venv added, updated
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,66 @@
|
||||
import json
|
||||
import base64
|
||||
import struct
|
||||
|
||||
|
||||
def to_bytes(x, charset='utf-8', errors='strict'):
|
||||
if x is None:
|
||||
return None
|
||||
if isinstance(x, bytes):
|
||||
return x
|
||||
if isinstance(x, str):
|
||||
return x.encode(charset, errors)
|
||||
if isinstance(x, (int, float)):
|
||||
return str(x).encode(charset, errors)
|
||||
return bytes(x)
|
||||
|
||||
|
||||
def to_unicode(x, charset='utf-8', errors='strict'):
|
||||
if x is None or isinstance(x, str):
|
||||
return x
|
||||
if isinstance(x, bytes):
|
||||
return x.decode(charset, errors)
|
||||
return str(x)
|
||||
|
||||
|
||||
def to_native(x, encoding='ascii'):
|
||||
if isinstance(x, str):
|
||||
return x
|
||||
return x.decode(encoding)
|
||||
|
||||
|
||||
def json_loads(s):
|
||||
return json.loads(s)
|
||||
|
||||
|
||||
def json_dumps(data, ensure_ascii=False):
|
||||
return json.dumps(data, ensure_ascii=ensure_ascii, separators=(',', ':'))
|
||||
|
||||
|
||||
def urlsafe_b64decode(s):
|
||||
s += b'=' * (-len(s) % 4)
|
||||
return base64.urlsafe_b64decode(s)
|
||||
|
||||
|
||||
def urlsafe_b64encode(s):
|
||||
return base64.urlsafe_b64encode(s).rstrip(b'=')
|
||||
|
||||
|
||||
def base64_to_int(s):
|
||||
data = urlsafe_b64decode(to_bytes(s, charset='ascii'))
|
||||
buf = struct.unpack('%sB' % len(data), data)
|
||||
return int(''.join(["%02x" % byte for byte in buf]), 16)
|
||||
|
||||
|
||||
def int_to_base64(num):
|
||||
if num < 0:
|
||||
raise ValueError('Must be a positive integer')
|
||||
|
||||
s = num.to_bytes((num.bit_length() + 7) // 8, 'big', signed=False)
|
||||
return to_unicode(urlsafe_b64encode(s))
|
||||
|
||||
|
||||
def json_b64encode(text):
|
||||
if isinstance(text, dict):
|
||||
text = json_dumps(text)
|
||||
return urlsafe_b64encode(to_bytes(text))
|
||||
63
myenv/lib/python3.12/site-packages/authlib/common/errors.py
Normal file
63
myenv/lib/python3.12/site-packages/authlib/common/errors.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from authlib.consts import default_json_headers
|
||||
|
||||
|
||||
class AuthlibBaseError(Exception):
|
||||
"""Base Exception for all errors in Authlib."""
|
||||
|
||||
#: short-string error code
|
||||
error = None
|
||||
#: long-string to describe this error
|
||||
description = ''
|
||||
#: web page that describes this error
|
||||
uri = None
|
||||
|
||||
def __init__(self, error=None, description=None, uri=None):
|
||||
if error is not None:
|
||||
self.error = error
|
||||
if description is not None:
|
||||
self.description = description
|
||||
if uri is not None:
|
||||
self.uri = uri
|
||||
|
||||
message = f'{self.error}: {self.description}'
|
||||
super().__init__(message)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<{self.__class__.__name__} "{self.error}">'
|
||||
|
||||
|
||||
class AuthlibHTTPError(AuthlibBaseError):
|
||||
#: HTTP status code
|
||||
status_code = 400
|
||||
|
||||
def __init__(self, error=None, description=None, uri=None,
|
||||
status_code=None):
|
||||
super().__init__(error, description, uri)
|
||||
if status_code is not None:
|
||||
self.status_code = status_code
|
||||
|
||||
def get_error_description(self):
|
||||
return self.description
|
||||
|
||||
def get_body(self):
|
||||
error = [('error', self.error)]
|
||||
|
||||
if self.description:
|
||||
error.append(('error_description', self.description))
|
||||
|
||||
if self.uri:
|
||||
error.append(('error_uri', self.uri))
|
||||
return error
|
||||
|
||||
def get_headers(self):
|
||||
return default_json_headers[:]
|
||||
|
||||
def __call__(self, uri=None):
|
||||
self.uri = uri
|
||||
body = dict(self.get_body())
|
||||
headers = self.get_headers()
|
||||
return self.status_code, body, headers
|
||||
|
||||
|
||||
class ContinueIteration(AuthlibBaseError):
|
||||
pass
|
||||
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
import string
|
||||
import random
|
||||
|
||||
UNICODE_ASCII_CHARACTER_SET = string.ascii_letters + string.digits
|
||||
|
||||
|
||||
def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET):
|
||||
rand = random.SystemRandom()
|
||||
return ''.join(rand.choice(chars) for _ in range(length))
|
||||
|
||||
|
||||
def is_secure_transport(uri):
|
||||
"""Check if the uri is over ssl."""
|
||||
if os.getenv('AUTHLIB_INSECURE_TRANSPORT'):
|
||||
return True
|
||||
|
||||
uri = uri.lower()
|
||||
return uri.startswith(('https://', 'http://localhost:'))
|
||||
146
myenv/lib/python3.12/site-packages/authlib/common/urls.py
Normal file
146
myenv/lib/python3.12/site-packages/authlib/common/urls.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
authlib.util.urls
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Wrapper functions for URL encoding and decoding.
|
||||
"""
|
||||
|
||||
import re
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from .encoding import to_unicode, to_bytes
|
||||
|
||||
always_safe = (
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
'abcdefghijklmnopqrstuvwxyz'
|
||||
'0123456789_.-'
|
||||
)
|
||||
urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?')
|
||||
INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]')
|
||||
|
||||
|
||||
def url_encode(params):
|
||||
encoded = []
|
||||
for k, v in params:
|
||||
encoded.append((to_bytes(k), to_bytes(v)))
|
||||
return to_unicode(_urlencode(encoded))
|
||||
|
||||
|
||||
def url_decode(query):
|
||||
"""Decode a query string in x-www-form-urlencoded format into a sequence
|
||||
of two-element tuples.
|
||||
|
||||
Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce
|
||||
correct formatting of the query string by validation. If validation fails
|
||||
a ValueError will be raised. urllib.parse_qsl will only raise errors if
|
||||
any of name-value pairs omits the equals sign.
|
||||
"""
|
||||
# Check if query contains invalid characters
|
||||
if query and not set(query) <= urlencoded:
|
||||
error = ("Error trying to decode a non urlencoded string. "
|
||||
"Found invalid characters: %s "
|
||||
"in the string: '%s'. "
|
||||
"Please ensure the request/response body is "
|
||||
"x-www-form-urlencoded.")
|
||||
raise ValueError(error % (set(query) - urlencoded, query))
|
||||
|
||||
# Check for correctly hex encoded values using a regular expression
|
||||
# All encoded values begin with % followed by two hex characters
|
||||
# correct = %00, %A0, %0A, %FF
|
||||
# invalid = %G0, %5H, %PO
|
||||
if INVALID_HEX_PATTERN.search(query):
|
||||
raise ValueError('Invalid hex encoding in query string.')
|
||||
|
||||
# We encode to utf-8 prior to parsing because parse_qsl behaves
|
||||
# differently on unicode input in python 2 and 3.
|
||||
# Python 2.7
|
||||
# >>> urlparse.parse_qsl(u'%E5%95%A6%E5%95%A6')
|
||||
# u'\xe5\x95\xa6\xe5\x95\xa6'
|
||||
# Python 2.7, non unicode input gives the same
|
||||
# >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6')
|
||||
# '\xe5\x95\xa6\xe5\x95\xa6'
|
||||
# but now we can decode it to unicode
|
||||
# >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6').decode('utf-8')
|
||||
# u'\u5566\u5566'
|
||||
# Python 3.3 however
|
||||
# >>> urllib.parse.parse_qsl(u'%E5%95%A6%E5%95%A6')
|
||||
# u'\u5566\u5566'
|
||||
|
||||
# We want to allow queries such as "c2" whereas urlparse.parse_qsl
|
||||
# with the strict_parsing flag will not.
|
||||
params = urlparse.parse_qsl(query, keep_blank_values=True)
|
||||
|
||||
# unicode all the things
|
||||
decoded = []
|
||||
for k, v in params:
|
||||
decoded.append((to_unicode(k), to_unicode(v)))
|
||||
return decoded
|
||||
|
||||
|
||||
def add_params_to_qs(query, params):
|
||||
"""Extend a query with a list of two-tuples."""
|
||||
if isinstance(params, dict):
|
||||
params = params.items()
|
||||
|
||||
qs = urlparse.parse_qsl(query, keep_blank_values=True)
|
||||
qs.extend(params)
|
||||
return url_encode(qs)
|
||||
|
||||
|
||||
def add_params_to_uri(uri, params, fragment=False):
|
||||
"""Add a list of two-tuples to the uri query components."""
|
||||
sch, net, path, par, query, fra = urlparse.urlparse(uri)
|
||||
if fragment:
|
||||
fra = add_params_to_qs(fra, params)
|
||||
else:
|
||||
query = add_params_to_qs(query, params)
|
||||
return urlparse.urlunparse((sch, net, path, par, query, fra))
|
||||
|
||||
|
||||
def quote(s, safe=b'/'):
|
||||
return to_unicode(_quote(to_bytes(s), safe))
|
||||
|
||||
|
||||
def unquote(s):
|
||||
return to_unicode(_unquote(s))
|
||||
|
||||
|
||||
def quote_url(s):
|
||||
return quote(s, b'~@#$&()*!+=:;,.?/\'')
|
||||
|
||||
|
||||
def extract_params(raw):
|
||||
"""Extract parameters and return them as a list of 2-tuples.
|
||||
|
||||
Will successfully extract parameters from urlencoded query strings,
|
||||
dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an
|
||||
empty list of parameters. Any other input will result in a return
|
||||
value of None.
|
||||
"""
|
||||
if isinstance(raw, (list, tuple)):
|
||||
try:
|
||||
raw = dict(raw)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
if isinstance(raw, dict):
|
||||
params = []
|
||||
for k, v in raw.items():
|
||||
params.append((to_unicode(k), to_unicode(v)))
|
||||
return params
|
||||
|
||||
if not raw:
|
||||
return None
|
||||
|
||||
try:
|
||||
return url_decode(raw)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def is_valid_url(url):
|
||||
parsed = urlparse.urlparse(url)
|
||||
return parsed.scheme and parsed.hostname
|
||||
Reference in New Issue
Block a user