added completly new version for haslach 2025
This commit is contained in:
@@ -0,0 +1 @@
|
||||
f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
@@ -0,0 +1 @@
|
||||
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
@@ -0,0 +1 @@
|
||||
6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
@@ -0,0 +1 @@
|
||||
5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
@@ -0,0 +1 @@
|
||||
7f32b0c1595c1a71957a218ece8d3977ed9171caad97df8fcd82aa80addfc5d2 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
@@ -0,0 +1 @@
|
||||
6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
@@ -0,0 +1 @@
|
||||
d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
@@ -0,0 +1 @@
|
||||
a30351c34760a1d7835b2a1b0552e463cf1d2db90da0cdb473313dc66e34a031 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
216
.venv/lib/python3.7/site-packages/aiohttp/__init__.py
Normal file
216
.venv/lib/python3.7/site-packages/aiohttp/__init__.py
Normal file
@@ -0,0 +1,216 @@
|
||||
__version__ = "3.8.6"
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from . import hdrs as hdrs
|
||||
from .client import (
|
||||
BaseConnector as BaseConnector,
|
||||
ClientConnectionError as ClientConnectionError,
|
||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||
ClientConnectorError as ClientConnectorError,
|
||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
||||
ClientError as ClientError,
|
||||
ClientHttpProxyError as ClientHttpProxyError,
|
||||
ClientOSError as ClientOSError,
|
||||
ClientPayloadError as ClientPayloadError,
|
||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
||||
ClientRequest as ClientRequest,
|
||||
ClientResponse as ClientResponse,
|
||||
ClientResponseError as ClientResponseError,
|
||||
ClientSession as ClientSession,
|
||||
ClientSSLError as ClientSSLError,
|
||||
ClientTimeout as ClientTimeout,
|
||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
||||
ContentTypeError as ContentTypeError,
|
||||
Fingerprint as Fingerprint,
|
||||
InvalidURL as InvalidURL,
|
||||
NamedPipeConnector as NamedPipeConnector,
|
||||
RequestInfo as RequestInfo,
|
||||
ServerConnectionError as ServerConnectionError,
|
||||
ServerDisconnectedError as ServerDisconnectedError,
|
||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
||||
ServerTimeoutError as ServerTimeoutError,
|
||||
TCPConnector as TCPConnector,
|
||||
TooManyRedirects as TooManyRedirects,
|
||||
UnixConnector as UnixConnector,
|
||||
WSServerHandshakeError as WSServerHandshakeError,
|
||||
request as request,
|
||||
)
|
||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
||||
from .formdata import FormData as FormData
|
||||
from .helpers import BasicAuth, ChainMapProxy, ETag
|
||||
from .http import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
WebSocketError as WebSocketError,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
)
|
||||
from .multipart import (
|
||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||
BadContentDispositionParam as BadContentDispositionParam,
|
||||
BodyPartReader as BodyPartReader,
|
||||
MultipartReader as MultipartReader,
|
||||
MultipartWriter as MultipartWriter,
|
||||
content_disposition_filename as content_disposition_filename,
|
||||
parse_content_disposition as parse_content_disposition,
|
||||
)
|
||||
from .payload import (
|
||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
||||
AsyncIterablePayload as AsyncIterablePayload,
|
||||
BufferedReaderPayload as BufferedReaderPayload,
|
||||
BytesIOPayload as BytesIOPayload,
|
||||
BytesPayload as BytesPayload,
|
||||
IOBasePayload as IOBasePayload,
|
||||
JsonPayload as JsonPayload,
|
||||
Payload as Payload,
|
||||
StringIOPayload as StringIOPayload,
|
||||
StringPayload as StringPayload,
|
||||
TextIOPayload as TextIOPayload,
|
||||
get_payload as get_payload,
|
||||
payload_type as payload_type,
|
||||
)
|
||||
from .payload_streamer import streamer as streamer
|
||||
from .resolver import (
|
||||
AsyncResolver as AsyncResolver,
|
||||
DefaultResolver as DefaultResolver,
|
||||
ThreadedResolver as ThreadedResolver,
|
||||
)
|
||||
from .streams import (
|
||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
||||
DataQueue as DataQueue,
|
||||
EofStream as EofStream,
|
||||
FlowControlDataQueue as FlowControlDataQueue,
|
||||
StreamReader as StreamReader,
|
||||
)
|
||||
from .tracing import (
|
||||
TraceConfig as TraceConfig,
|
||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
||||
TraceRequestEndParams as TraceRequestEndParams,
|
||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
||||
TraceRequestStartParams as TraceRequestStartParams,
|
||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||
)
|
||||
|
||||
__all__: Tuple[str, ...] = (
|
||||
"hdrs",
|
||||
# client
|
||||
"BaseConnector",
|
||||
"ClientConnectionError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ClientConnectorError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientError",
|
||||
"ClientHttpProxyError",
|
||||
"ClientOSError",
|
||||
"ClientPayloadError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientResponse",
|
||||
"ClientRequest",
|
||||
"ClientResponseError",
|
||||
"ClientSSLError",
|
||||
"ClientSession",
|
||||
"ClientTimeout",
|
||||
"ClientWebSocketResponse",
|
||||
"ContentTypeError",
|
||||
"Fingerprint",
|
||||
"InvalidURL",
|
||||
"RequestInfo",
|
||||
"ServerConnectionError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ServerTimeoutError",
|
||||
"TCPConnector",
|
||||
"TooManyRedirects",
|
||||
"UnixConnector",
|
||||
"NamedPipeConnector",
|
||||
"WSServerHandshakeError",
|
||||
"request",
|
||||
# cookiejar
|
||||
"CookieJar",
|
||||
"DummyCookieJar",
|
||||
# formdata
|
||||
"FormData",
|
||||
# helpers
|
||||
"BasicAuth",
|
||||
"ChainMapProxy",
|
||||
"ETag",
|
||||
# http
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
# multipart
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"BodyPartReader",
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"content_disposition_filename",
|
||||
"parse_content_disposition",
|
||||
# payload
|
||||
"AsyncIterablePayload",
|
||||
"BufferedReaderPayload",
|
||||
"BytesIOPayload",
|
||||
"BytesPayload",
|
||||
"IOBasePayload",
|
||||
"JsonPayload",
|
||||
"PAYLOAD_REGISTRY",
|
||||
"Payload",
|
||||
"StringIOPayload",
|
||||
"StringPayload",
|
||||
"TextIOPayload",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
# payload_streamer
|
||||
"streamer",
|
||||
# resolver
|
||||
"AsyncResolver",
|
||||
"DefaultResolver",
|
||||
"ThreadedResolver",
|
||||
# streams
|
||||
"DataQueue",
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"FlowControlDataQueue",
|
||||
"StreamReader",
|
||||
# tracing
|
||||
"TraceConfig",
|
||||
"TraceConnectionCreateEndParams",
|
||||
"TraceConnectionCreateStartParams",
|
||||
"TraceConnectionQueuedEndParams",
|
||||
"TraceConnectionQueuedStartParams",
|
||||
"TraceConnectionReuseconnParams",
|
||||
"TraceDnsCacheHitParams",
|
||||
"TraceDnsCacheMissParams",
|
||||
"TraceDnsResolveHostEndParams",
|
||||
"TraceDnsResolveHostStartParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceRequestEndParams",
|
||||
"TraceRequestExceptionParams",
|
||||
"TraceRequestRedirectParams",
|
||||
"TraceRequestStartParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
)
|
||||
|
||||
try:
|
||||
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
||||
|
||||
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
158
.venv/lib/python3.7/site-packages/aiohttp/_cparser.pxd
Normal file
158
.venv/lib/python3.7/site-packages/aiohttp/_cparser.pxd
Normal file
@@ -0,0 +1,158 @@
|
||||
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
||||
|
||||
|
||||
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
||||
|
||||
struct llhttp__internal_s:
|
||||
int32_t _index
|
||||
void* _span_pos0
|
||||
void* _span_cb0
|
||||
int32_t error
|
||||
const char* reason
|
||||
const char* error_pos
|
||||
void* data
|
||||
void* _current
|
||||
uint64_t content_length
|
||||
uint8_t type
|
||||
uint8_t method
|
||||
uint8_t http_major
|
||||
uint8_t http_minor
|
||||
uint8_t header_state
|
||||
uint8_t lenient_flags
|
||||
uint8_t upgrade
|
||||
uint8_t finish
|
||||
uint16_t flags
|
||||
uint16_t status_code
|
||||
void* settings
|
||||
|
||||
ctypedef llhttp__internal_s llhttp__internal_t
|
||||
ctypedef llhttp__internal_t llhttp_t
|
||||
|
||||
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
||||
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
||||
|
||||
struct llhttp_settings_s:
|
||||
llhttp_cb on_message_begin
|
||||
llhttp_data_cb on_url
|
||||
llhttp_data_cb on_status
|
||||
llhttp_data_cb on_header_field
|
||||
llhttp_data_cb on_header_value
|
||||
llhttp_cb on_headers_complete
|
||||
llhttp_data_cb on_body
|
||||
llhttp_cb on_message_complete
|
||||
llhttp_cb on_chunk_header
|
||||
llhttp_cb on_chunk_complete
|
||||
|
||||
llhttp_cb on_url_complete
|
||||
llhttp_cb on_status_complete
|
||||
llhttp_cb on_header_field_complete
|
||||
llhttp_cb on_header_value_complete
|
||||
|
||||
ctypedef llhttp_settings_s llhttp_settings_t
|
||||
|
||||
enum llhttp_errno:
|
||||
HPE_OK,
|
||||
HPE_INTERNAL,
|
||||
HPE_STRICT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_UNEXPECTED_CONTENT_LENGTH,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_INVALID_TRANSFER_ENCODING,
|
||||
HPE_CB_MESSAGE_BEGIN,
|
||||
HPE_CB_HEADERS_COMPLETE,
|
||||
HPE_CB_MESSAGE_COMPLETE,
|
||||
HPE_CB_CHUNK_HEADER,
|
||||
HPE_CB_CHUNK_COMPLETE,
|
||||
HPE_PAUSED,
|
||||
HPE_PAUSED_UPGRADE,
|
||||
HPE_USER
|
||||
|
||||
ctypedef llhttp_errno llhttp_errno_t
|
||||
|
||||
enum llhttp_flags:
|
||||
F_CHUNKED,
|
||||
F_CONTENT_LENGTH
|
||||
|
||||
enum llhttp_type:
|
||||
HTTP_REQUEST,
|
||||
HTTP_RESPONSE,
|
||||
HTTP_BOTH
|
||||
|
||||
enum llhttp_method:
|
||||
HTTP_DELETE,
|
||||
HTTP_GET,
|
||||
HTTP_HEAD,
|
||||
HTTP_POST,
|
||||
HTTP_PUT,
|
||||
HTTP_CONNECT,
|
||||
HTTP_OPTIONS,
|
||||
HTTP_TRACE,
|
||||
HTTP_COPY,
|
||||
HTTP_LOCK,
|
||||
HTTP_MKCOL,
|
||||
HTTP_MOVE,
|
||||
HTTP_PROPFIND,
|
||||
HTTP_PROPPATCH,
|
||||
HTTP_SEARCH,
|
||||
HTTP_UNLOCK,
|
||||
HTTP_BIND,
|
||||
HTTP_REBIND,
|
||||
HTTP_UNBIND,
|
||||
HTTP_ACL,
|
||||
HTTP_REPORT,
|
||||
HTTP_MKACTIVITY,
|
||||
HTTP_CHECKOUT,
|
||||
HTTP_MERGE,
|
||||
HTTP_MSEARCH,
|
||||
HTTP_NOTIFY,
|
||||
HTTP_SUBSCRIBE,
|
||||
HTTP_UNSUBSCRIBE,
|
||||
HTTP_PATCH,
|
||||
HTTP_PURGE,
|
||||
HTTP_MKCALENDAR,
|
||||
HTTP_LINK,
|
||||
HTTP_UNLINK,
|
||||
HTTP_SOURCE,
|
||||
HTTP_PRI,
|
||||
HTTP_DESCRIBE,
|
||||
HTTP_ANNOUNCE,
|
||||
HTTP_SETUP,
|
||||
HTTP_PLAY,
|
||||
HTTP_PAUSE,
|
||||
HTTP_TEARDOWN,
|
||||
HTTP_GET_PARAMETER,
|
||||
HTTP_SET_PARAMETER,
|
||||
HTTP_REDIRECT,
|
||||
HTTP_RECORD,
|
||||
HTTP_FLUSH
|
||||
|
||||
ctypedef llhttp_method llhttp_method_t;
|
||||
|
||||
void llhttp_settings_init(llhttp_settings_t* settings)
|
||||
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
||||
const llhttp_settings_t* settings)
|
||||
|
||||
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
||||
|
||||
int llhttp_should_keep_alive(const llhttp_t* parser)
|
||||
|
||||
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
||||
|
||||
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
||||
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
||||
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
||||
|
||||
const char* llhttp_method_name(llhttp_method_t method)
|
||||
|
||||
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
||||
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
||||
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
@@ -0,0 +1,2 @@
|
||||
cdef extern from "_find_header.h":
|
||||
int find_header(char *, int)
|
83
.venv/lib/python3.7/site-packages/aiohttp/_headers.pxi
Normal file
83
.venv/lib/python3.7/site-packages/aiohttp/_headers.pxi
Normal file
@@ -0,0 +1,83 @@
|
||||
# The file is autogenerated from aiohttp/hdrs.py
|
||||
# Run ./tools/gen.py to update it after the origin changing.
|
||||
|
||||
from . import hdrs
|
||||
cdef tuple headers = (
|
||||
hdrs.ACCEPT,
|
||||
hdrs.ACCEPT_CHARSET,
|
||||
hdrs.ACCEPT_ENCODING,
|
||||
hdrs.ACCEPT_LANGUAGE,
|
||||
hdrs.ACCEPT_RANGES,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
||||
hdrs.AGE,
|
||||
hdrs.ALLOW,
|
||||
hdrs.AUTHORIZATION,
|
||||
hdrs.CACHE_CONTROL,
|
||||
hdrs.CONNECTION,
|
||||
hdrs.CONTENT_DISPOSITION,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.CONTENT_LANGUAGE,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_LOCATION,
|
||||
hdrs.CONTENT_MD5,
|
||||
hdrs.CONTENT_RANGE,
|
||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.COOKIE,
|
||||
hdrs.DATE,
|
||||
hdrs.DESTINATION,
|
||||
hdrs.DIGEST,
|
||||
hdrs.ETAG,
|
||||
hdrs.EXPECT,
|
||||
hdrs.EXPIRES,
|
||||
hdrs.FORWARDED,
|
||||
hdrs.FROM,
|
||||
hdrs.HOST,
|
||||
hdrs.IF_MATCH,
|
||||
hdrs.IF_MODIFIED_SINCE,
|
||||
hdrs.IF_NONE_MATCH,
|
||||
hdrs.IF_RANGE,
|
||||
hdrs.IF_UNMODIFIED_SINCE,
|
||||
hdrs.KEEP_ALIVE,
|
||||
hdrs.LAST_EVENT_ID,
|
||||
hdrs.LAST_MODIFIED,
|
||||
hdrs.LINK,
|
||||
hdrs.LOCATION,
|
||||
hdrs.MAX_FORWARDS,
|
||||
hdrs.ORIGIN,
|
||||
hdrs.PRAGMA,
|
||||
hdrs.PROXY_AUTHENTICATE,
|
||||
hdrs.PROXY_AUTHORIZATION,
|
||||
hdrs.RANGE,
|
||||
hdrs.REFERER,
|
||||
hdrs.RETRY_AFTER,
|
||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
hdrs.SEC_WEBSOCKET_KEY1,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SERVER,
|
||||
hdrs.SET_COOKIE,
|
||||
hdrs.TE,
|
||||
hdrs.TRAILER,
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.URI,
|
||||
hdrs.UPGRADE,
|
||||
hdrs.USER_AGENT,
|
||||
hdrs.VARY,
|
||||
hdrs.VIA,
|
||||
hdrs.WWW_AUTHENTICATE,
|
||||
hdrs.WANT_DIGEST,
|
||||
hdrs.WARNING,
|
||||
hdrs.X_FORWARDED_FOR,
|
||||
hdrs.X_FORWARDED_HOST,
|
||||
hdrs.X_FORWARDED_PROTO,
|
||||
)
|
Binary file not shown.
6
.venv/lib/python3.7/site-packages/aiohttp/_helpers.pyi
Normal file
6
.venv/lib/python3.7/site-packages/aiohttp/_helpers.pyi
Normal file
@@ -0,0 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
class reify:
|
||||
def __init__(self, wrapped: Any) -> None: ...
|
||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
35
.venv/lib/python3.7/site-packages/aiohttp/_helpers.pyx
Normal file
35
.venv/lib/python3.7/site-packages/aiohttp/_helpers.pyx
Normal file
@@ -0,0 +1,35 @@
|
||||
cdef class reify:
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
cdef object wrapped
|
||||
cdef object name
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.name = wrapped.__name__
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
return self.wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst, value):
|
||||
raise AttributeError("reified property is read-only")
|
Binary file not shown.
836
.venv/lib/python3.7/site-packages/aiohttp/_http_parser.pyx
Normal file
836
.venv/lib/python3.7/site-packages/aiohttp/_http_parser.pyx
Normal file
@@ -0,0 +1,836 @@
|
||||
#cython: language_level=3
|
||||
#
|
||||
# Based on https://github.com/MagicStack/httptools
|
||||
#
|
||||
|
||||
from cpython cimport (
|
||||
Py_buffer,
|
||||
PyBUF_SIMPLE,
|
||||
PyBuffer_Release,
|
||||
PyBytes_AsString,
|
||||
PyBytes_AsStringAndSize,
|
||||
PyObject_GetBuffer,
|
||||
)
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
||||
from libc.limits cimport ULLONG_MAX
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
||||
from yarl import URL as _URL
|
||||
|
||||
from aiohttp import hdrs
|
||||
from aiohttp.helpers import DEBUG
|
||||
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
InvalidURLError,
|
||||
LineTooLong,
|
||||
PayloadEncodingError,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
||||
from .http_writer import (
|
||||
HttpVersion as _HttpVersion,
|
||||
HttpVersion10 as _HttpVersion10,
|
||||
HttpVersion11 as _HttpVersion11,
|
||||
)
|
||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
||||
|
||||
cimport cython
|
||||
|
||||
from aiohttp cimport _cparser as cparser
|
||||
|
||||
include "_headers.pxi"
|
||||
|
||||
from aiohttp cimport _find_header
|
||||
|
||||
DEF DEFAULT_FREELIST_SIZE = 250
|
||||
|
||||
cdef extern from "Python.h":
|
||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
||||
Py_ssize_t PyByteArray_Size(object) except -1
|
||||
char* PyByteArray_AsString(object)
|
||||
|
||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
||||
'RawRequestMessage', 'RawResponseMessage')
|
||||
|
||||
cdef object URL = _URL
|
||||
cdef object URL_build = URL.build
|
||||
cdef object CIMultiDict = _CIMultiDict
|
||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
||||
cdef object HttpVersion = _HttpVersion
|
||||
cdef object HttpVersion10 = _HttpVersion10
|
||||
cdef object HttpVersion11 = _HttpVersion11
|
||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
||||
cdef object StreamReader = _StreamReader
|
||||
cdef object DeflateBuffer = _DeflateBuffer
|
||||
|
||||
|
||||
cdef inline object extend(object buf, const char* at, size_t length):
|
||||
cdef Py_ssize_t s
|
||||
cdef char* ptr
|
||||
s = PyByteArray_Size(buf)
|
||||
PyByteArray_Resize(buf, s + length)
|
||||
ptr = PyByteArray_AsString(buf)
|
||||
memcpy(ptr + s, at, length)
|
||||
|
||||
|
||||
DEF METHODS_COUNT = 46;
|
||||
|
||||
cdef list _http_method = []
|
||||
|
||||
for i in range(METHODS_COUNT):
|
||||
_http_method.append(
|
||||
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
||||
|
||||
|
||||
cdef inline str http_method_str(int i):
|
||||
if i < METHODS_COUNT:
|
||||
return <str>_http_method[i]
|
||||
else:
|
||||
return "<unknown>"
|
||||
|
||||
cdef inline object find_header(bytes raw_header):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
cdef int idx
|
||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
||||
idx = _find_header.find_header(buf, size)
|
||||
if idx == -1:
|
||||
return raw_header.decode('utf-8', 'surrogateescape')
|
||||
return headers[idx]
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawRequestMessage:
|
||||
cdef readonly str method
|
||||
cdef readonly str path
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
cdef readonly object url # yarl.URL
|
||||
|
||||
def __init__(self, method, path, version, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked, url):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.version = version
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
self.url = url
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("method", self.method))
|
||||
info.append(("path", self.path))
|
||||
info.append(("version", self.version))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
info.append(("url", self.url))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawRequestMessage(' + sinfo + ')>'
|
||||
|
||||
def _replace(self, **dct):
|
||||
cdef RawRequestMessage ret
|
||||
ret = _new_request_message(self.method,
|
||||
self.path,
|
||||
self.version,
|
||||
self.headers,
|
||||
self.raw_headers,
|
||||
self.should_close,
|
||||
self.compression,
|
||||
self.upgrade,
|
||||
self.chunked,
|
||||
self.url)
|
||||
if "method" in dct:
|
||||
ret.method = dct["method"]
|
||||
if "path" in dct:
|
||||
ret.path = dct["path"]
|
||||
if "version" in dct:
|
||||
ret.version = dct["version"]
|
||||
if "headers" in dct:
|
||||
ret.headers = dct["headers"]
|
||||
if "raw_headers" in dct:
|
||||
ret.raw_headers = dct["raw_headers"]
|
||||
if "should_close" in dct:
|
||||
ret.should_close = dct["should_close"]
|
||||
if "compression" in dct:
|
||||
ret.compression = dct["compression"]
|
||||
if "upgrade" in dct:
|
||||
ret.upgrade = dct["upgrade"]
|
||||
if "chunked" in dct:
|
||||
ret.chunked = dct["chunked"]
|
||||
if "url" in dct:
|
||||
ret.url = dct["url"]
|
||||
return ret
|
||||
|
||||
cdef _new_request_message(str method,
|
||||
str path,
|
||||
object version,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked,
|
||||
object url):
|
||||
cdef RawRequestMessage ret
|
||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
||||
ret.method = method
|
||||
ret.path = path
|
||||
ret.version = version
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
ret.url = url
|
||||
return ret
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawResponseMessage:
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly int code
|
||||
cdef readonly str reason
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
|
||||
def __init__(self, version, code, reason, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked):
|
||||
self.version = version
|
||||
self.code = code
|
||||
self.reason = reason
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("version", self.version))
|
||||
info.append(("code", self.code))
|
||||
info.append(("reason", self.reason))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawResponseMessage(' + sinfo + ')>'
|
||||
|
||||
|
||||
cdef _new_response_message(object version,
|
||||
int code,
|
||||
str reason,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked):
|
||||
cdef RawResponseMessage ret
|
||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
||||
ret.version = version
|
||||
ret.code = code
|
||||
ret.reason = reason
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
return ret
|
||||
|
||||
|
||||
@cython.internal
|
||||
cdef class HttpParser:
|
||||
|
||||
cdef:
|
||||
cparser.llhttp_t* _cparser
|
||||
cparser.llhttp_settings_t* _csettings
|
||||
|
||||
bytearray _raw_name
|
||||
bytearray _raw_value
|
||||
bint _has_value
|
||||
|
||||
object _protocol
|
||||
object _loop
|
||||
object _timer
|
||||
|
||||
size_t _max_line_size
|
||||
size_t _max_field_size
|
||||
size_t _max_headers
|
||||
bint _response_with_body
|
||||
bint _read_until_eof
|
||||
|
||||
bint _started
|
||||
object _url
|
||||
bytearray _buf
|
||||
str _path
|
||||
str _reason
|
||||
object _headers
|
||||
list _raw_headers
|
||||
bint _upgraded
|
||||
list _messages
|
||||
object _payload
|
||||
bint _payload_error
|
||||
object _payload_exception
|
||||
object _last_error
|
||||
bint _auto_decompress
|
||||
int _limit
|
||||
|
||||
str _content_encoding
|
||||
|
||||
Py_buffer py_buf
|
||||
|
||||
def __cinit__(self):
|
||||
self._cparser = <cparser.llhttp_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
||||
if self._cparser is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
self._csettings = <cparser.llhttp_settings_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
||||
if self._csettings is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
def __dealloc__(self):
|
||||
PyMem_Free(self._cparser)
|
||||
PyMem_Free(self._csettings)
|
||||
|
||||
cdef _init(
|
||||
self, cparser.llhttp_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
cparser.llhttp_settings_init(self._csettings)
|
||||
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
||||
self._cparser.data = <void*>self
|
||||
self._cparser.content_length = 0
|
||||
|
||||
self._protocol = protocol
|
||||
self._loop = loop
|
||||
self._timer = timer
|
||||
|
||||
self._buf = bytearray()
|
||||
self._payload = None
|
||||
self._payload_error = 0
|
||||
self._payload_exception = payload_exception
|
||||
self._messages = []
|
||||
|
||||
self._raw_name = bytearray()
|
||||
self._raw_value = bytearray()
|
||||
self._has_value = False
|
||||
|
||||
self._max_line_size = max_line_size
|
||||
self._max_headers = max_headers
|
||||
self._max_field_size = max_field_size
|
||||
self._response_with_body = response_with_body
|
||||
self._read_until_eof = read_until_eof
|
||||
self._upgraded = False
|
||||
self._auto_decompress = auto_decompress
|
||||
self._content_encoding = None
|
||||
|
||||
self._csettings.on_url = cb_on_url
|
||||
self._csettings.on_status = cb_on_status
|
||||
self._csettings.on_header_field = cb_on_header_field
|
||||
self._csettings.on_header_value = cb_on_header_value
|
||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
||||
self._csettings.on_body = cb_on_body
|
||||
self._csettings.on_message_begin = cb_on_message_begin
|
||||
self._csettings.on_message_complete = cb_on_message_complete
|
||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
||||
|
||||
self._last_error = None
|
||||
self._limit = limit
|
||||
|
||||
cdef _process_header(self):
|
||||
if self._raw_name:
|
||||
raw_name = bytes(self._raw_name)
|
||||
raw_value = bytes(self._raw_value)
|
||||
|
||||
name = find_header(raw_name)
|
||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
||||
|
||||
self._headers.add(name, value)
|
||||
|
||||
if name is CONTENT_ENCODING:
|
||||
self._content_encoding = value
|
||||
|
||||
PyByteArray_Resize(self._raw_name, 0)
|
||||
PyByteArray_Resize(self._raw_value, 0)
|
||||
self._has_value = False
|
||||
self._raw_headers.append((raw_name, raw_value))
|
||||
|
||||
cdef _on_header_field(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
if self._has_value:
|
||||
self._process_header()
|
||||
|
||||
size = PyByteArray_Size(self._raw_name)
|
||||
PyByteArray_Resize(self._raw_name, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_name)
|
||||
memcpy(buf + size, at, length)
|
||||
|
||||
cdef _on_header_value(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
|
||||
size = PyByteArray_Size(self._raw_value)
|
||||
PyByteArray_Resize(self._raw_value, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_value)
|
||||
memcpy(buf + size, at, length)
|
||||
self._has_value = True
|
||||
|
||||
cdef _on_headers_complete(self):
|
||||
self._process_header()
|
||||
|
||||
method = http_method_str(self._cparser.method)
|
||||
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
||||
upgrade = self._cparser.upgrade
|
||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||
|
||||
raw_headers = tuple(self._raw_headers)
|
||||
headers = CIMultiDictProxy(self._headers)
|
||||
|
||||
if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
|
||||
self._upgraded = True
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
encoding = None
|
||||
enc = self._content_encoding
|
||||
if enc is not None:
|
||||
self._content_encoding = None
|
||||
enc = enc.lower()
|
||||
if enc in ('gzip', 'deflate', 'br'):
|
||||
encoding = enc
|
||||
|
||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
||||
msg = _new_request_message(
|
||||
method, self._path,
|
||||
self.http_version(), headers, raw_headers,
|
||||
should_close, encoding, upgrade, chunked, self._url)
|
||||
else:
|
||||
msg = _new_response_message(
|
||||
self.http_version(), self._cparser.status_code, self._reason,
|
||||
headers, raw_headers, should_close, encoding,
|
||||
upgrade, chunked)
|
||||
|
||||
if (
|
||||
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == cparser.HTTP_CONNECT or
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == 0 and
|
||||
self._read_until_eof)
|
||||
):
|
||||
payload = StreamReader(
|
||||
self._protocol, timer=self._timer, loop=self._loop,
|
||||
limit=self._limit)
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._payload = payload
|
||||
if encoding is not None and self._auto_decompress:
|
||||
self._payload = DeflateBuffer(payload, encoding)
|
||||
|
||||
if not self._response_with_body:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._messages.append((msg, payload))
|
||||
|
||||
cdef _on_message_complete(self):
|
||||
self._payload.feed_eof()
|
||||
self._payload = None
|
||||
|
||||
cdef _on_chunk_header(self):
|
||||
self._payload.begin_http_chunk_receiving()
|
||||
|
||||
cdef _on_chunk_complete(self):
|
||||
self._payload.end_http_chunk_receiving()
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
pass
|
||||
|
||||
cdef inline http_version(self):
|
||||
cdef cparser.llhttp_t* parser = self._cparser
|
||||
|
||||
if parser.http_major == 1:
|
||||
if parser.http_minor == 0:
|
||||
return HttpVersion10
|
||||
elif parser.http_minor == 1:
|
||||
return HttpVersion11
|
||||
|
||||
return HttpVersion(parser.http_major, parser.http_minor)
|
||||
|
||||
### Public API ###
|
||||
|
||||
def feed_eof(self):
|
||||
cdef bytes desc
|
||||
|
||||
if self._payload is not None:
|
||||
if self._cparser.flags & cparser.F_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header.")
|
||||
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header.")
|
||||
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
||||
desc = cparser.llhttp_get_error_reason(self._cparser)
|
||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||
else:
|
||||
self._payload.feed_eof()
|
||||
elif self._started:
|
||||
self._on_headers_complete()
|
||||
if self._messages:
|
||||
return self._messages[-1][0]
|
||||
|
||||
def feed_data(self, data):
|
||||
cdef:
|
||||
size_t data_len
|
||||
size_t nb
|
||||
cdef cparser.llhttp_errno_t errno
|
||||
|
||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||
data_len = <size_t>self.py_buf.len
|
||||
|
||||
errno = cparser.llhttp_execute(
|
||||
self._cparser,
|
||||
<char*>self.py_buf.buf,
|
||||
data_len)
|
||||
|
||||
if errno is cparser.HPE_PAUSED_UPGRADE:
|
||||
cparser.llhttp_resume_after_upgrade(self._cparser)
|
||||
|
||||
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
||||
|
||||
PyBuffer_Release(&self.py_buf)
|
||||
|
||||
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
||||
if self._payload_error == 0:
|
||||
if self._last_error is not None:
|
||||
ex = self._last_error
|
||||
self._last_error = None
|
||||
else:
|
||||
after = cparser.llhttp_get_error_pos(self._cparser)
|
||||
before = data[:after - <char*>self.py_buf.buf]
|
||||
after_b = after.split(b"\r\n", 1)[0]
|
||||
before = before.rsplit(b"\r\n", 1)[-1]
|
||||
data = before + after_b
|
||||
pointer = " " * (len(repr(before))-1) + "^"
|
||||
ex = parser_error_from_errno(self._cparser, data, pointer)
|
||||
self._payload = None
|
||||
raise ex
|
||||
|
||||
if self._messages:
|
||||
messages = self._messages
|
||||
self._messages = []
|
||||
else:
|
||||
messages = ()
|
||||
|
||||
if self._upgraded:
|
||||
return messages, True, data[nb:]
|
||||
else:
|
||||
return messages, False, b''
|
||||
|
||||
def set_upgraded(self, val):
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
cdef class HttpRequestParser(HttpParser):
|
||||
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
cdef int idx1, idx2
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
try:
|
||||
idx3 = len(self._path)
|
||||
if self._cparser.method == cparser.HTTP_CONNECT:
|
||||
# authority-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
||||
self._url = URL.build(authority=self._path, encoded=True)
|
||||
elif idx3 > 1 and self._path[0] == '/':
|
||||
# origin-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
||||
idx1 = self._path.find("?")
|
||||
if idx1 == -1:
|
||||
query = ""
|
||||
idx2 = self._path.find("#")
|
||||
if idx2 == -1:
|
||||
path = self._path
|
||||
fragment = ""
|
||||
else:
|
||||
path = self._path[0: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
else:
|
||||
path = self._path[0:idx1]
|
||||
idx1 += 1
|
||||
idx2 = self._path.find("#", idx1+1)
|
||||
if idx2 == -1:
|
||||
query = self._path[idx1:]
|
||||
fragment = ""
|
||||
else:
|
||||
query = self._path[idx1: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
self._url = URL.build(
|
||||
path=path,
|
||||
query_string=query,
|
||||
fragment=fragment,
|
||||
encoded=True,
|
||||
)
|
||||
else:
|
||||
# absolute-form for proxy maybe,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
||||
self._url = URL(self._path, encoded=True)
|
||||
finally:
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
|
||||
|
||||
cdef class HttpResponseParser(HttpParser):
|
||||
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
):
|
||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
# Use strict parsing on dev mode, so users are warned about broken servers.
|
||||
if not DEBUG:
|
||||
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
||||
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
||||
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
if self._buf:
|
||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
else:
|
||||
self._reason = self._reason or ''
|
||||
|
||||
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
|
||||
pyparser._started = True
|
||||
pyparser._headers = CIMultiDict()
|
||||
pyparser._raw_headers = []
|
||||
PyByteArray_Resize(pyparser._buf, 0)
|
||||
pyparser._path = None
|
||||
pyparser._reason = None
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_url(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_status(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef str reason
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
size = len(pyparser._raw_name) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header name is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_field(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
size = len(pyparser._raw_value) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header value is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_value(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
pyparser._on_headers_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
if (
|
||||
pyparser._cparser.upgrade or
|
||||
pyparser._cparser.method == cparser.HTTP_CONNECT
|
||||
):
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_body(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef bytes body = at[:length]
|
||||
try:
|
||||
pyparser._payload.feed_data(body, length)
|
||||
except BaseException as exc:
|
||||
if pyparser._payload_exception is not None:
|
||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
||||
else:
|
||||
pyparser._payload.set_exception(exc)
|
||||
pyparser._payload_error = 1
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._started = False
|
||||
pyparser._on_message_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_header()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
||||
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
||||
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
||||
|
||||
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
||||
|
||||
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
||||
cparser.HPE_CB_HEADERS_COMPLETE,
|
||||
cparser.HPE_CB_MESSAGE_COMPLETE,
|
||||
cparser.HPE_CB_CHUNK_HEADER,
|
||||
cparser.HPE_CB_CHUNK_COMPLETE,
|
||||
cparser.HPE_INVALID_CONSTANT,
|
||||
cparser.HPE_INVALID_HEADER_TOKEN,
|
||||
cparser.HPE_INVALID_CONTENT_LENGTH,
|
||||
cparser.HPE_INVALID_CHUNK_SIZE,
|
||||
cparser.HPE_INVALID_EOF_STATE,
|
||||
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
||||
return BadHttpMessage(err_msg)
|
||||
elif errno in {cparser.HPE_INVALID_STATUS,
|
||||
cparser.HPE_INVALID_METHOD,
|
||||
cparser.HPE_INVALID_VERSION}:
|
||||
return BadStatusLine(error=err_msg)
|
||||
elif errno == cparser.HPE_INVALID_URL:
|
||||
return InvalidURLError(err_msg)
|
||||
|
||||
return BadHttpMessage(err_msg)
|
Binary file not shown.
163
.venv/lib/python3.7/site-packages/aiohttp/_http_writer.pyx
Normal file
163
.venv/lib/python3.7/site-packages/aiohttp/_http_writer.pyx
Normal file
@@ -0,0 +1,163 @@
|
||||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
||||
from cpython.exc cimport PyErr_NoMemory
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
||||
from cpython.object cimport PyObject_Str
|
||||
from libc.stdint cimport uint8_t, uint64_t
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import istr
|
||||
|
||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
||||
cdef char BUFFER[BUF_SIZE]
|
||||
|
||||
cdef object _istr = istr
|
||||
|
||||
|
||||
# ----------------- writer ---------------------------
|
||||
|
||||
cdef struct Writer:
|
||||
char *buf
|
||||
Py_ssize_t size
|
||||
Py_ssize_t pos
|
||||
|
||||
|
||||
cdef inline void _init_writer(Writer* writer):
|
||||
writer.buf = &BUFFER[0]
|
||||
writer.size = BUF_SIZE
|
||||
writer.pos = 0
|
||||
|
||||
|
||||
cdef inline void _release_writer(Writer* writer):
|
||||
if writer.buf != BUFFER:
|
||||
PyMem_Free(writer.buf)
|
||||
|
||||
|
||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
||||
cdef char * buf
|
||||
cdef Py_ssize_t size
|
||||
|
||||
if writer.pos == writer.size:
|
||||
# reallocate
|
||||
size = writer.size + BUF_SIZE
|
||||
if writer.buf == BUFFER:
|
||||
buf = <char*>PyMem_Malloc(size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
memcpy(buf, writer.buf, writer.size)
|
||||
else:
|
||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
writer.buf = buf
|
||||
writer.size = size
|
||||
writer.buf[writer.pos] = <char>ch
|
||||
writer.pos += 1
|
||||
return 0
|
||||
|
||||
|
||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
||||
cdef uint64_t utf = <uint64_t> symbol
|
||||
|
||||
if utf < 0x80:
|
||||
return _write_byte(writer, <uint8_t>utf)
|
||||
elif utf < 0x800:
|
||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif 0xD800 <= utf <= 0xDFFF:
|
||||
# surogate pair, ignored
|
||||
return 0
|
||||
elif utf < 0x10000:
|
||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif utf > 0x10FFFF:
|
||||
# symbol is too large
|
||||
return 0
|
||||
else:
|
||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
|
||||
|
||||
cdef inline int _write_str(Writer* writer, str s):
|
||||
cdef Py_UCS4 ch
|
||||
for ch in s:
|
||||
if _write_utf8(writer, ch) < 0:
|
||||
return -1
|
||||
|
||||
|
||||
# --------------- _serialize_headers ----------------------
|
||||
|
||||
cdef str to_str(object s):
|
||||
typ = type(s)
|
||||
if typ is str:
|
||||
return <str>s
|
||||
elif typ is _istr:
|
||||
return PyObject_Str(s)
|
||||
elif not isinstance(s, str):
|
||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
|
||||
cdef void _safe_header(str string) except *:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return character detected in HTTP status message or "
|
||||
"header. This is a potential security issue."
|
||||
)
|
||||
|
||||
|
||||
def _serialize_headers(str status_line, headers):
|
||||
cdef Writer writer
|
||||
cdef object key
|
||||
cdef object val
|
||||
cdef bytes ret
|
||||
|
||||
_init_writer(&writer)
|
||||
|
||||
for key, val in headers.items():
|
||||
_safe_header(to_str(key))
|
||||
_safe_header(to_str(val))
|
||||
|
||||
try:
|
||||
if _write_str(&writer, status_line) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
for key, val in headers.items():
|
||||
if _write_str(&writer, to_str(key)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b':') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b' ') < 0:
|
||||
raise
|
||||
if _write_str(&writer, to_str(val)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||
finally:
|
||||
_release_writer(&writer)
|
Binary file not shown.
56
.venv/lib/python3.7/site-packages/aiohttp/_websocket.pyx
Normal file
56
.venv/lib/python3.7/site-packages/aiohttp/_websocket.pyx
Normal file
@@ -0,0 +1,56 @@
|
||||
from cpython cimport PyBytes_AsString
|
||||
|
||||
|
||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
||||
cdef extern from "Python.h":
|
||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
||||
|
||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
||||
|
||||
|
||||
def _websocket_mask_cython(object mask, object data):
|
||||
"""Note, this function mutates its `data` argument
|
||||
"""
|
||||
cdef:
|
||||
Py_ssize_t data_len, i
|
||||
# bit operations on signed integers are implementation-specific
|
||||
unsigned char * in_buf
|
||||
const unsigned char * mask_buf
|
||||
uint32_t uint32_msk
|
||||
uint64_t uint64_msk
|
||||
|
||||
assert len(mask) == 4
|
||||
|
||||
if not isinstance(mask, bytes):
|
||||
mask = bytes(mask)
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = <bytearray>data
|
||||
else:
|
||||
data = bytearray(data)
|
||||
|
||||
data_len = len(data)
|
||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
||||
|
||||
# TODO: align in_data ptr to achieve even faster speeds
|
||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
||||
|
||||
if sizeof(size_t) >= 8:
|
||||
uint64_msk = uint32_msk
|
||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
||||
|
||||
while data_len >= 8:
|
||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
||||
in_buf += 8
|
||||
data_len -= 8
|
||||
|
||||
|
||||
while data_len >= 4:
|
||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
||||
in_buf += 4
|
||||
data_len -= 4
|
||||
|
||||
for i in range(0, data_len):
|
||||
in_buf[i] ^= mask_buf[i]
|
207
.venv/lib/python3.7/site-packages/aiohttp/abc.py
Normal file
207
.venv/lib/python3.7/site-packages/aiohttp/abc.py
Normal file
@@ -0,0 +1,207 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Sized
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .helpers import get_running_loop
|
||||
from .typedefs import LooseCookies
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_app import Application
|
||||
from .web_exceptions import HTTPException
|
||||
from .web_request import BaseRequest, Request
|
||||
from .web_response import StreamResponse
|
||||
else:
|
||||
BaseRequest = Request = Application = StreamResponse = None
|
||||
HTTPException = None
|
||||
|
||||
|
||||
class AbstractRouter(ABC):
|
||||
def __init__(self) -> None:
|
||||
self._frozen = False
|
||||
|
||||
def post_init(self, app: Application) -> None:
|
||||
"""Post init stage.
|
||||
|
||||
Not an abstract method for sake of backward compatibility,
|
||||
but if the router wants to be aware of the application
|
||||
it can override this.
|
||||
"""
|
||||
|
||||
@property
|
||||
def frozen(self) -> bool:
|
||||
return self._frozen
|
||||
|
||||
def freeze(self) -> None:
|
||||
"""Freeze router."""
|
||||
self._frozen = True
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
||||
"""Return MATCH_INFO for given request"""
|
||||
|
||||
|
||||
class AbstractMatchInfo(ABC):
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
||||
"""Execute matched request handler"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
||||
"""Expect handler for 100-continue processing"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def http_exception(self) -> Optional[HTTPException]:
|
||||
"""HTTPException instance raised on router's resolving, or None"""
|
||||
|
||||
@abstractmethod # pragma: no branch
|
||||
def get_info(self) -> Dict[str, Any]:
|
||||
"""Return a dict with additional info useful for introspection"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def apps(self) -> Tuple[Application, ...]:
|
||||
"""Stack of nested applications.
|
||||
|
||||
Top level application is left-most element.
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def add_app(self, app: Application) -> None:
|
||||
"""Add application to the nested apps stack."""
|
||||
|
||||
@abstractmethod
|
||||
def freeze(self) -> None:
|
||||
"""Freeze the match info.
|
||||
|
||||
The method is called after route resolution.
|
||||
|
||||
After the call .add_app() is forbidden.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AbstractView(ABC):
|
||||
"""Abstract class based view."""
|
||||
|
||||
def __init__(self, request: Request) -> None:
|
||||
self._request = request
|
||||
|
||||
@property
|
||||
def request(self) -> Request:
|
||||
"""Request instance."""
|
||||
return self._request
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
||||
"""Execute the view handler."""
|
||||
|
||||
|
||||
class AbstractResolver(ABC):
|
||||
"""Abstract DNS resolver."""
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
||||
"""Return IP address for given hostname"""
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
"""Release resolver"""
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
IterableBase = Iterable[Morsel[str]]
|
||||
else:
|
||||
IterableBase = Iterable
|
||||
|
||||
|
||||
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
||||
|
||||
|
||||
class AbstractCookieJar(Sized, IterableBase):
|
||||
"""Abstract Cookie Jar."""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
@abstractmethod
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
"""Clear all cookies if no predicate is passed."""
|
||||
|
||||
@abstractmethod
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
"""Clear all cookies for domain and all subdomains."""
|
||||
|
||||
@abstractmethod
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
"""Return the jar's cookies filtered by their attributes."""
|
||||
|
||||
|
||||
class AbstractStreamWriter(ABC):
|
||||
"""Abstract stream writer."""
|
||||
|
||||
buffer_size = 0
|
||||
output_size = 0
|
||||
length: Optional[int] = 0
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
"""Write chunk into stream."""
|
||||
|
||||
@abstractmethod
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
"""Write last chunk."""
|
||||
|
||||
@abstractmethod
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer."""
|
||||
|
||||
@abstractmethod
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
"""Enable HTTP body compression"""
|
||||
|
||||
@abstractmethod
|
||||
def enable_chunking(self) -> None:
|
||||
"""Enable HTTP chunked mode"""
|
||||
|
||||
@abstractmethod
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write HTTP headers"""
|
||||
|
||||
|
||||
class AbstractAccessLogger(ABC):
|
||||
"""Abstract writer to access log."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
||||
self.logger = logger
|
||||
self.log_format = log_format
|
||||
|
||||
@abstractmethod
|
||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||
"""Emit log to logger."""
|
90
.venv/lib/python3.7/site-packages/aiohttp/base_protocol.py
Normal file
90
.venv/lib/python3.7/site-packages/aiohttp/base_protocol.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import asyncio
|
||||
from typing import Optional, cast
|
||||
|
||||
from .tcp_helpers import tcp_nodelay
|
||||
|
||||
|
||||
class BaseProtocol(asyncio.Protocol):
|
||||
__slots__ = (
|
||||
"_loop",
|
||||
"_paused",
|
||||
"_drain_waiter",
|
||||
"_connection_lost",
|
||||
"_reading_paused",
|
||||
"transport",
|
||||
)
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop: asyncio.AbstractEventLoop = loop
|
||||
self._paused = False
|
||||
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._reading_paused = False
|
||||
|
||||
self.transport: Optional[asyncio.Transport] = None
|
||||
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
"""Return True if the connection is open."""
|
||||
return self.transport is not None
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
assert not self._paused
|
||||
self._paused = True
|
||||
|
||||
def resume_writing(self) -> None:
|
||||
assert self._paused
|
||||
self._paused = False
|
||||
|
||||
waiter = self._drain_waiter
|
||||
if waiter is not None:
|
||||
self._drain_waiter = None
|
||||
if not waiter.done():
|
||||
waiter.set_result(None)
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
if not self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.pause_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = True
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
if self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.resume_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = False
|
||||
|
||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
||||
tr = cast(asyncio.Transport, transport)
|
||||
tcp_nodelay(tr, True)
|
||||
self.transport = tr
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
# Wake up the writer if currently paused.
|
||||
self.transport = None
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
return
|
||||
self._drain_waiter = None
|
||||
if waiter.done():
|
||||
return
|
||||
if exc is None:
|
||||
waiter.set_result(None)
|
||||
else:
|
||||
waiter.set_exception(exc)
|
||||
|
||||
async def _drain_helper(self) -> None:
|
||||
if not self.connected:
|
||||
raise ConnectionResetError("Connection lost")
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await asyncio.shield(waiter)
|
1331
.venv/lib/python3.7/site-packages/aiohttp/client.py
Normal file
1331
.venv/lib/python3.7/site-packages/aiohttp/client.py
Normal file
File diff suppressed because it is too large
Load Diff
342
.venv/lib/python3.7/site-packages/aiohttp/client_exceptions.py
Normal file
342
.venv/lib/python3.7/site-packages/aiohttp/client_exceptions.py
Normal file
@@ -0,0 +1,342 @@
|
||||
"""HTTP related errors."""
|
||||
|
||||
import asyncio
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||
|
||||
from .http_parser import RawResponseMessage
|
||||
from .typedefs import LooseHeaders
|
||||
|
||||
try:
|
||||
import ssl
|
||||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = SSLContext = None # type: ignore[assignment]
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
||||
else:
|
||||
RequestInfo = ClientResponse = ConnectionKey = None
|
||||
|
||||
__all__ = (
|
||||
"ClientError",
|
||||
"ClientConnectionError",
|
||||
"ClientOSError",
|
||||
"ClientConnectorError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientSSLError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ServerConnectionError",
|
||||
"ServerTimeoutError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ClientResponseError",
|
||||
"ClientHttpProxyError",
|
||||
"WSServerHandshakeError",
|
||||
"ContentTypeError",
|
||||
"ClientPayloadError",
|
||||
"InvalidURL",
|
||||
)
|
||||
|
||||
|
||||
class ClientError(Exception):
|
||||
"""Base class for client connection errors."""
|
||||
|
||||
|
||||
class ClientResponseError(ClientError):
|
||||
"""Connection error during reading response.
|
||||
|
||||
request_info: instance of RequestInfo
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request_info: RequestInfo,
|
||||
history: Tuple[ClientResponse, ...],
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
status: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
self.request_info = request_info
|
||||
if code is not None:
|
||||
if status is not None:
|
||||
raise ValueError(
|
||||
"Both code and status arguments are provided; "
|
||||
"code is deprecated, use status instead"
|
||||
)
|
||||
warnings.warn(
|
||||
"code argument is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if status is not None:
|
||||
self.status = status
|
||||
elif code is not None:
|
||||
self.status = code
|
||||
else:
|
||||
self.status = 0
|
||||
self.message = message
|
||||
self.headers = headers
|
||||
self.history = history
|
||||
self.args = (request_info, history)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "{}, message={!r}, url={!r}".format(
|
||||
self.status,
|
||||
self.message,
|
||||
self.request_info.real_url,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
args = f"{self.request_info!r}, {self.history!r}"
|
||||
if self.status != 0:
|
||||
args += f", status={self.status!r}"
|
||||
if self.message != "":
|
||||
args += f", message={self.message!r}"
|
||||
if self.headers is not None:
|
||||
args += f", headers={self.headers!r}"
|
||||
return f"{type(self).__name__}({args})"
|
||||
|
||||
@property
|
||||
def code(self) -> int:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.status
|
||||
|
||||
@code.setter
|
||||
def code(self, value: int) -> None:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.status = value
|
||||
|
||||
|
||||
class ContentTypeError(ClientResponseError):
|
||||
"""ContentType found is not valid."""
|
||||
|
||||
|
||||
class WSServerHandshakeError(ClientResponseError):
|
||||
"""websocket server handshake error."""
|
||||
|
||||
|
||||
class ClientHttpProxyError(ClientResponseError):
|
||||
"""HTTP proxy error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
proxy responds with status other than ``200 OK``
|
||||
on ``CONNECT`` request.
|
||||
"""
|
||||
|
||||
|
||||
class TooManyRedirects(ClientResponseError):
|
||||
"""Client was redirected too many times."""
|
||||
|
||||
|
||||
class ClientConnectionError(ClientError):
|
||||
"""Base class for client socket errors."""
|
||||
|
||||
|
||||
class ClientOSError(ClientConnectionError, OSError):
|
||||
"""OSError error."""
|
||||
|
||||
|
||||
class ClientConnectorError(ClientOSError):
|
||||
"""Client connector error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
a connection can not be established.
|
||||
"""
|
||||
|
||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._os_error = os_error
|
||||
super().__init__(os_error.errno, os_error.strerror)
|
||||
self.args = (connection_key, os_error)
|
||||
|
||||
@property
|
||||
def os_error(self) -> OSError:
|
||||
return self._os_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
||||
return self._conn_key.ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
# OSError.__reduce__ does too much black magick
|
||||
__reduce__ = BaseException.__reduce__
|
||||
|
||||
|
||||
class ClientProxyConnectionError(ClientConnectorError):
|
||||
"""Proxy connection error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
|
||||
class UnixClientConnectorError(ClientConnectorError):
|
||||
"""Unix connector error.
|
||||
|
||||
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
||||
if connection to unix socket can not be established.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
||||
) -> None:
|
||||
self._path = path
|
||||
super().__init__(connection_key, os_error)
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
|
||||
class ServerConnectionError(ClientConnectionError):
|
||||
"""Server connection errors."""
|
||||
|
||||
|
||||
class ServerDisconnectedError(ServerConnectionError):
|
||||
"""Server disconnected."""
|
||||
|
||||
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
||||
if message is None:
|
||||
message = "Server disconnected"
|
||||
|
||||
self.args = (message,)
|
||||
self.message = message
|
||||
|
||||
|
||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||
"""Server timeout error."""
|
||||
|
||||
|
||||
class ServerFingerprintMismatch(ServerConnectionError):
|
||||
"""SSL certificate does not match expected fingerprint."""
|
||||
|
||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
||||
self.expected = expected
|
||||
self.got = got
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.args = (expected, got, host, port)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
||||
)
|
||||
|
||||
|
||||
class ClientPayloadError(ClientError):
|
||||
"""Response payload error."""
|
||||
|
||||
|
||||
class InvalidURL(ClientError, ValueError):
|
||||
"""Invalid URL.
|
||||
|
||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||
part.
|
||||
"""
|
||||
|
||||
# Derive from ValueError for backward compatibility
|
||||
|
||||
def __init__(self, url: Any) -> None:
|
||||
# The type of url is not yarl.URL because the exception can be raised
|
||||
# on URL(url) call
|
||||
super().__init__(url)
|
||||
|
||||
@property
|
||||
def url(self) -> Any:
|
||||
return self.args[0]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__} {self.url}>"
|
||||
|
||||
|
||||
class ClientSSLError(ClientConnectorError):
|
||||
"""Base error for ssl.*Errors."""
|
||||
|
||||
|
||||
if ssl is not None:
|
||||
cert_errors = (ssl.CertificateError,)
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ssl.CertificateError,
|
||||
)
|
||||
|
||||
ssl_errors = (ssl.SSLError,)
|
||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
||||
else: # pragma: no cover
|
||||
cert_errors = tuple()
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ValueError,
|
||||
)
|
||||
|
||||
ssl_errors = tuple()
|
||||
ssl_error_bases = (ClientSSLError,)
|
||||
|
||||
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
||||
"""Response ssl error."""
|
||||
|
||||
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
||||
"""Response certificate error."""
|
||||
|
||||
def __init__(
|
||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
||||
) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._certificate_error = certificate_error
|
||||
self.args = (connection_key, certificate_error)
|
||||
|
||||
@property
|
||||
def certificate_error(self) -> Exception:
|
||||
return self._certificate_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> bool:
|
||||
return self._conn_key.is_ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
||||
"[{0.certificate_error.__class__.__name__}: "
|
||||
"{0.certificate_error.args}]".format(self)
|
||||
)
|
251
.venv/lib/python3.7/site-packages/aiohttp/client_proto.py
Normal file
251
.venv/lib/python3.7/site-packages/aiohttp/client_proto.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .client_exceptions import (
|
||||
ClientOSError,
|
||||
ClientPayloadError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
)
|
||||
from .helpers import BaseTimerContext
|
||||
from .http import HttpResponseParser, RawResponseMessage
|
||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
||||
|
||||
|
||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
||||
"""Helper class to adapt between Protocol and StreamReader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
BaseProtocol.__init__(self, loop=loop)
|
||||
DataQueue.__init__(self, loop)
|
||||
|
||||
self._should_close = False
|
||||
|
||||
self._payload: Optional[StreamReader] = None
|
||||
self._skip_payload = False
|
||||
self._payload_parser = None
|
||||
|
||||
self._timer = None
|
||||
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._parser: Optional[HttpResponseParser] = None
|
||||
|
||||
self._read_timeout: Optional[float] = None
|
||||
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
||||
|
||||
@property
|
||||
def upgraded(self) -> bool:
|
||||
return self._upgraded
|
||||
|
||||
@property
|
||||
def should_close(self) -> bool:
|
||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
||||
return True
|
||||
|
||||
return (
|
||||
self._should_close
|
||||
or self._upgraded
|
||||
or self.exception() is not None
|
||||
or self._payload_parser is not None
|
||||
or len(self) > 0
|
||||
or bool(self._tail)
|
||||
)
|
||||
|
||||
def force_close(self) -> None:
|
||||
self._should_close = True
|
||||
|
||||
def close(self) -> None:
|
||||
transport = self.transport
|
||||
if transport is not None:
|
||||
transport.close()
|
||||
self.transport = None
|
||||
self._payload = None
|
||||
self._drop_timeout()
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
return self.transport is not None and not self.transport.is_closing()
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._drop_timeout()
|
||||
|
||||
if self._payload_parser is not None:
|
||||
with suppress(Exception):
|
||||
self._payload_parser.feed_eof()
|
||||
|
||||
uncompleted = None
|
||||
if self._parser is not None:
|
||||
try:
|
||||
uncompleted = self._parser.feed_eof()
|
||||
except Exception:
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(
|
||||
ClientPayloadError("Response payload is not completed")
|
||||
)
|
||||
|
||||
if not self.is_eof():
|
||||
if isinstance(exc, OSError):
|
||||
exc = ClientOSError(*exc.args)
|
||||
if exc is None:
|
||||
exc = ServerDisconnectedError(uncompleted)
|
||||
# assigns self._should_close to True as side effect,
|
||||
# we do it anyway below
|
||||
self.set_exception(exc)
|
||||
|
||||
self._should_close = True
|
||||
self._parser = None
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
self._reading_paused = False
|
||||
|
||||
super().connection_lost(exc)
|
||||
|
||||
def eof_received(self) -> None:
|
||||
# should call parser.feed_eof() most likely
|
||||
self._drop_timeout()
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
super().pause_reading()
|
||||
self._drop_timeout()
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
super().resume_reading()
|
||||
self._reschedule_timeout()
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._should_close = True
|
||||
self._drop_timeout()
|
||||
super().set_exception(exc)
|
||||
|
||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
||||
# TODO: actual types are:
|
||||
# parser: WebSocketReader
|
||||
# payload: FlowControlDataQueue
|
||||
# but they are not generi enough
|
||||
# Need an ABC for both types
|
||||
self._payload = payload
|
||||
self._payload_parser = parser
|
||||
|
||||
self._drop_timeout()
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def set_response_params(
|
||||
self,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
skip_payload: bool = False,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
read_timeout: Optional[float] = None,
|
||||
read_bufsize: int = 2**16,
|
||||
) -> None:
|
||||
self._skip_payload = skip_payload
|
||||
|
||||
self._read_timeout = read_timeout
|
||||
self._reschedule_timeout()
|
||||
|
||||
self._parser = HttpResponseParser(
|
||||
self,
|
||||
self._loop,
|
||||
read_bufsize,
|
||||
timer=timer,
|
||||
payload_exception=ClientPayloadError,
|
||||
response_with_body=not skip_payload,
|
||||
read_until_eof=read_until_eof,
|
||||
auto_decompress=auto_decompress,
|
||||
)
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def _drop_timeout(self) -> None:
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _reschedule_timeout(self) -> None:
|
||||
timeout = self._read_timeout
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
|
||||
if timeout:
|
||||
self._read_timeout_handle = self._loop.call_later(
|
||||
timeout, self._on_read_timeout
|
||||
)
|
||||
else:
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _on_read_timeout(self) -> None:
|
||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
||||
self.set_exception(exc)
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(exc)
|
||||
|
||||
def data_received(self, data: bytes) -> None:
|
||||
self._reschedule_timeout()
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
# custom payload parser
|
||||
if self._payload_parser is not None:
|
||||
eof, tail = self._payload_parser.feed_data(data)
|
||||
if eof:
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
|
||||
if tail:
|
||||
self.data_received(tail)
|
||||
return
|
||||
else:
|
||||
if self._upgraded or self._parser is None:
|
||||
# i.e. websocket connection, websocket parser is not set yet
|
||||
self._tail += data
|
||||
else:
|
||||
# parse http messages
|
||||
try:
|
||||
messages, upgraded, tail = self._parser.feed_data(data)
|
||||
except BaseException as exc:
|
||||
if self.transport is not None:
|
||||
# connection.release() could be called BEFORE
|
||||
# data_received(), the transport is already
|
||||
# closed in this case
|
||||
self.transport.close()
|
||||
# should_close is True after the call
|
||||
self.set_exception(exc)
|
||||
return
|
||||
|
||||
self._upgraded = upgraded
|
||||
|
||||
payload: Optional[StreamReader] = None
|
||||
for message, payload in messages:
|
||||
if message.should_close:
|
||||
self._should_close = True
|
||||
|
||||
self._payload = payload
|
||||
|
||||
if self._skip_payload or message.code in (204, 304):
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
||||
else:
|
||||
self.feed_data((message, payload), 0)
|
||||
if payload is not None:
|
||||
# new message(s) was processed
|
||||
# register timeout handler unsubscribing
|
||||
# either on end-of-stream or immediately for
|
||||
# EMPTY_PAYLOAD
|
||||
if payload is not EMPTY_PAYLOAD:
|
||||
payload.on_eof(self._drop_timeout)
|
||||
else:
|
||||
self._drop_timeout()
|
||||
|
||||
if tail:
|
||||
if upgraded:
|
||||
self.data_received(tail)
|
||||
else:
|
||||
self._tail = tail
|
1135
.venv/lib/python3.7/site-packages/aiohttp/client_reqrep.py
Normal file
1135
.venv/lib/python3.7/site-packages/aiohttp/client_reqrep.py
Normal file
File diff suppressed because it is too large
Load Diff
300
.venv/lib/python3.7/site-packages/aiohttp/client_ws.py
Normal file
300
.venv/lib/python3.7/site-packages/aiohttp/client_ws.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""WebSocket client for asyncio."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .client_exceptions import ClientError
|
||||
from .client_reqrep import ClientResponse
|
||||
from .helpers import call_later, set_result
|
||||
from .http import (
|
||||
WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE,
|
||||
WebSocketError,
|
||||
WSCloseCode,
|
||||
WSMessage,
|
||||
WSMsgType,
|
||||
)
|
||||
from .http_websocket import WebSocketWriter # WSMessage
|
||||
from .streams import EofStream, FlowControlDataQueue
|
||||
from .typedefs import (
|
||||
DEFAULT_JSON_DECODER,
|
||||
DEFAULT_JSON_ENCODER,
|
||||
JSONDecoder,
|
||||
JSONEncoder,
|
||||
)
|
||||
|
||||
|
||||
class ClientWebSocketResponse:
|
||||
def __init__(
|
||||
self,
|
||||
reader: "FlowControlDataQueue[WSMessage]",
|
||||
writer: WebSocketWriter,
|
||||
protocol: Optional[str],
|
||||
response: ClientResponse,
|
||||
timeout: float,
|
||||
autoclose: bool,
|
||||
autoping: bool,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
*,
|
||||
receive_timeout: Optional[float] = None,
|
||||
heartbeat: Optional[float] = None,
|
||||
compress: int = 0,
|
||||
client_notakeover: bool = False,
|
||||
) -> None:
|
||||
self._response = response
|
||||
self._conn = response.connection
|
||||
|
||||
self._writer = writer
|
||||
self._reader = reader
|
||||
self._protocol = protocol
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._close_code: Optional[int] = None
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
||||
self._loop = loop
|
||||
self._waiting: Optional[asyncio.Future[bool]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._compress = compress
|
||||
self._client_notakeover = client_notakeover
|
||||
|
||||
self._reset_heartbeat()
|
||||
|
||||
def _cancel_heartbeat(self) -> None:
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = None
|
||||
|
||||
if self._heartbeat_cb is not None:
|
||||
self._heartbeat_cb.cancel()
|
||||
self._heartbeat_cb = None
|
||||
|
||||
def _reset_heartbeat(self) -> None:
|
||||
self._cancel_heartbeat()
|
||||
|
||||
if self._heartbeat is not None:
|
||||
self._heartbeat_cb = call_later(
|
||||
self._send_heartbeat, self._heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _send_heartbeat(self) -> None:
|
||||
if self._heartbeat is not None and not self._closed:
|
||||
# fire-and-forget a task is not perfect but maybe ok for
|
||||
# sending ping. Otherwise we need a long-living heartbeat
|
||||
# task in the class.
|
||||
self._loop.create_task(self._writer.ping())
|
||||
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = call_later(
|
||||
self._pong_not_received, self._pong_heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _pong_not_received(self) -> None:
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._response.close()
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
@property
|
||||
def close_code(self) -> Optional[int]:
|
||||
return self._close_code
|
||||
|
||||
@property
|
||||
def protocol(self) -> Optional[str]:
|
||||
return self._protocol
|
||||
|
||||
@property
|
||||
def compress(self) -> int:
|
||||
return self._compress
|
||||
|
||||
@property
|
||||
def client_notakeover(self) -> bool:
|
||||
return self._client_notakeover
|
||||
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
||||
"""extra info from connection transport"""
|
||||
conn = self._response.connection
|
||||
if conn is None:
|
||||
return default
|
||||
transport = conn.transport
|
||||
if transport is None:
|
||||
return default
|
||||
return transport.get_extra_info(name, default)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
await self._writer.ping(message)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
await self._writer.pong(message)
|
||||
|
||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, str):
|
||||
raise TypeError("data argument must be str (%r)" % type(data))
|
||||
await self._writer.send(data, binary=False, compress=compress)
|
||||
|
||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
||||
await self._writer.send(data, binary=True, compress=compress)
|
||||
|
||||
async def send_json(
|
||||
self,
|
||||
data: Any,
|
||||
compress: Optional[int] = None,
|
||||
*,
|
||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
||||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
||||
# we need to break `receive()` cycle first,
|
||||
# `close()` may be called from different task
|
||||
if self._waiting is not None and not self._closed:
|
||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
||||
await self._waiting
|
||||
|
||||
if not self._closed:
|
||||
self._cancel_heartbeat()
|
||||
self._closed = True
|
||||
try:
|
||||
await self._writer.close(code, message)
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if self._closing:
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
while True:
|
||||
try:
|
||||
async with async_timeout.timeout(self._timeout):
|
||||
msg = await self._reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._close_code = msg.data
|
||||
self._response.close()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
||||
while True:
|
||||
if self._waiting is not None:
|
||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
||||
|
||||
if self._closed:
|
||||
return WS_CLOSED_MESSAGE
|
||||
elif self._closing:
|
||||
await self.close()
|
||||
return WS_CLOSED_MESSAGE
|
||||
|
||||
try:
|
||||
self._waiting = self._loop.create_future()
|
||||
try:
|
||||
async with async_timeout.timeout(timeout or self._receive_timeout):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
waiter = self._waiting
|
||||
self._waiting = None
|
||||
set_result(waiter, True)
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = WSCloseCode.OK
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except ClientError:
|
||||
self._closed = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
return WS_CLOSED_MESSAGE
|
||||
except WebSocketError as exc:
|
||||
self._close_code = exc.code
|
||||
await self.close(code=exc.code)
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._closing = True
|
||||
self._close_code = msg.data
|
||||
if not self._closed and self._autoclose:
|
||||
await self.close()
|
||||
elif msg.type == WSMsgType.CLOSING:
|
||||
self._closing = True
|
||||
elif msg.type == WSMsgType.PING and self._autoping:
|
||||
await self.pong(msg.data)
|
||||
continue
|
||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
||||
continue
|
||||
|
||||
return msg
|
||||
|
||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.TEXT:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
||||
return cast(str, msg.data)
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return cast(bytes, msg.data)
|
||||
|
||||
async def receive_json(
|
||||
self,
|
||||
*,
|
||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
||||
timeout: Optional[float] = None,
|
||||
) -> Any:
|
||||
data = await self.receive_str(timeout=timeout)
|
||||
return loads(data)
|
||||
|
||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> WSMessage:
|
||||
msg = await self.receive()
|
||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
||||
raise StopAsyncIteration
|
||||
return msg
|
1456
.venv/lib/python3.7/site-packages/aiohttp/connector.py
Normal file
1456
.venv/lib/python3.7/site-packages/aiohttp/connector.py
Normal file
File diff suppressed because it is too large
Load Diff
416
.venv/lib/python3.7/site-packages/aiohttp/cookiejar.py
Normal file
416
.venv/lib/python3.7/site-packages/aiohttp/cookiejar.py
Normal file
@@ -0,0 +1,416 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import datetime
|
||||
import os # noqa
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
||||
from typing import ( # noqa
|
||||
DefaultDict,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from .abc import AbstractCookieJar, ClearCookiePredicate
|
||||
from .helpers import is_ip_address, next_whole_second
|
||||
from .typedefs import LooseCookies, PathLike, StrOrURL
|
||||
|
||||
__all__ = ("CookieJar", "DummyCookieJar")
|
||||
|
||||
|
||||
CookieItem = Union[str, "Morsel[str]"]
|
||||
|
||||
|
||||
class CookieJar(AbstractCookieJar):
|
||||
"""Implements cookie storage adhering to RFC 6265."""
|
||||
|
||||
DATE_TOKENS_RE = re.compile(
|
||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
||||
)
|
||||
|
||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
||||
|
||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
||||
|
||||
DATE_MONTH_RE = re.compile(
|
||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
||||
re.I,
|
||||
)
|
||||
|
||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
||||
|
||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
unsafe: bool = False,
|
||||
quote_cookie: bool = True,
|
||||
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie[str]] = defaultdict(
|
||||
SimpleCookie
|
||||
)
|
||||
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
||||
self._unsafe = unsafe
|
||||
self._quote_cookie = quote_cookie
|
||||
if treat_as_secure_origin is None:
|
||||
treat_as_secure_origin = []
|
||||
elif isinstance(treat_as_secure_origin, URL):
|
||||
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
||||
elif isinstance(treat_as_secure_origin, str):
|
||||
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
||||
else:
|
||||
treat_as_secure_origin = [
|
||||
URL(url).origin() if isinstance(url, str) else url.origin()
|
||||
for url in treat_as_secure_origin
|
||||
]
|
||||
self._treat_as_secure_origin = treat_as_secure_origin
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations: Dict[Tuple[str, str, str], datetime.datetime] = {}
|
||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
||||
self._max_time = self.MAX_TIME
|
||||
try:
|
||||
self._max_time.timestamp()
|
||||
except OverflowError:
|
||||
self._max_time = self.MAX_32BIT_TIME
|
||||
|
||||
def save(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="wb") as f:
|
||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def load(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="rb") as f:
|
||||
self._cookies = pickle.load(f)
|
||||
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
if predicate is None:
|
||||
self._next_expiration = next_whole_second()
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._expirations.clear()
|
||||
return
|
||||
|
||||
to_del = []
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
for (domain, path), cookie in self._cookies.items():
|
||||
for name, morsel in cookie.items():
|
||||
key = (domain, path, name)
|
||||
if (
|
||||
key in self._expirations and self._expirations[key] <= now
|
||||
) or predicate(morsel):
|
||||
to_del.append(key)
|
||||
|
||||
for domain, path, name in to_del:
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
key = (domain, path, name)
|
||||
if key in self._expirations:
|
||||
del self._expirations[(domain, path, name)]
|
||||
self._cookies[(domain, path)].pop(name, None)
|
||||
|
||||
next_expiration = min(self._expirations.values(), default=self._max_time)
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
self._do_expiration()
|
||||
for val in self._cookies.values():
|
||||
yield from val.values()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return sum(1 for i in self)
|
||||
|
||||
def _do_expiration(self) -> None:
|
||||
self.clear(lambda x: False)
|
||||
|
||||
def _expire_cookie(
|
||||
self, when: datetime.datetime, domain: str, path: str, name: str
|
||||
) -> None:
|
||||
self._next_expiration = min(self._next_expiration, when)
|
||||
self._expirations[(domain, path, name)] = when
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
hostname = response_url.raw_host
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
# Don't accept cookies from IPs
|
||||
return
|
||||
|
||||
if isinstance(cookies, Mapping):
|
||||
cookies = cookies.items()
|
||||
|
||||
for name, cookie in cookies:
|
||||
if not isinstance(cookie, Morsel):
|
||||
tmp: SimpleCookie[str] = SimpleCookie()
|
||||
tmp[name] = cookie # type: ignore[assignment]
|
||||
cookie = tmp[name]
|
||||
|
||||
domain = cookie["domain"]
|
||||
|
||||
# ignore domains with trailing dots
|
||||
if domain.endswith("."):
|
||||
domain = ""
|
||||
del cookie["domain"]
|
||||
|
||||
if not domain and hostname is not None:
|
||||
# Set the cookie's domain to the response hostname
|
||||
# and set its host-only-flag
|
||||
self._host_only_cookies.add((hostname, name))
|
||||
domain = cookie["domain"] = hostname
|
||||
|
||||
if domain.startswith("."):
|
||||
# Remove leading dot
|
||||
domain = domain[1:]
|
||||
cookie["domain"] = domain
|
||||
|
||||
if hostname and not self._is_domain_match(domain, hostname):
|
||||
# Setting cookies for different domains is not allowed
|
||||
continue
|
||||
|
||||
path = cookie["path"]
|
||||
if not path or not path.startswith("/"):
|
||||
# Set the cookie's path to the response path
|
||||
path = response_url.path
|
||||
if not path.startswith("/"):
|
||||
path = "/"
|
||||
else:
|
||||
# Cut everything from the last slash to the end
|
||||
path = "/" + path[1 : path.rfind("/")]
|
||||
cookie["path"] = path
|
||||
|
||||
max_age = cookie["max-age"]
|
||||
if max_age:
|
||||
try:
|
||||
delta_seconds = int(max_age)
|
||||
try:
|
||||
max_age_expiration = datetime.datetime.now(
|
||||
datetime.timezone.utc
|
||||
) + datetime.timedelta(seconds=delta_seconds)
|
||||
except OverflowError:
|
||||
max_age_expiration = self._max_time
|
||||
self._expire_cookie(max_age_expiration, domain, path, name)
|
||||
except ValueError:
|
||||
cookie["max-age"] = ""
|
||||
|
||||
else:
|
||||
expires = cookie["expires"]
|
||||
if expires:
|
||||
expire_time = self._parse_date(expires)
|
||||
if expire_time:
|
||||
self._expire_cookie(expire_time, domain, path, name)
|
||||
else:
|
||||
cookie["expires"] = ""
|
||||
|
||||
self._cookies[(domain, path)][name] = cookie
|
||||
|
||||
self._do_expiration()
|
||||
|
||||
def filter_cookies(
|
||||
self, request_url: URL = URL()
|
||||
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
||||
"""Returns this jar's cookies filtered by their attributes."""
|
||||
self._do_expiration()
|
||||
request_url = URL(request_url)
|
||||
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
||||
)
|
||||
hostname = request_url.raw_host or ""
|
||||
request_origin = URL()
|
||||
with contextlib.suppress(ValueError):
|
||||
request_origin = request_url.origin()
|
||||
|
||||
is_not_secure = (
|
||||
request_url.scheme not in ("https", "wss")
|
||||
and request_origin not in self._treat_as_secure_origin
|
||||
)
|
||||
|
||||
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
||||
for cookie in sorted(self, key=lambda c: len(c["path"])):
|
||||
name = cookie.key
|
||||
domain = cookie["domain"]
|
||||
|
||||
# Send shared cookies
|
||||
if not domain:
|
||||
filtered[name] = cookie.value
|
||||
continue
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
continue
|
||||
|
||||
if (domain, name) in self._host_only_cookies:
|
||||
if domain != hostname:
|
||||
continue
|
||||
elif not self._is_domain_match(domain, hostname):
|
||||
continue
|
||||
|
||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
||||
continue
|
||||
|
||||
if is_not_secure and cookie["secure"]:
|
||||
continue
|
||||
|
||||
# It's critical we use the Morsel so the coded_value
|
||||
# (based on cookie version) is preserved
|
||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
||||
filtered[name] = mrsl_val
|
||||
|
||||
return filtered
|
||||
|
||||
@staticmethod
|
||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
||||
"""Implements domain matching adhering to RFC 6265."""
|
||||
if hostname == domain:
|
||||
return True
|
||||
|
||||
if not hostname.endswith(domain):
|
||||
return False
|
||||
|
||||
non_matching = hostname[: -len(domain)]
|
||||
|
||||
if not non_matching.endswith("."):
|
||||
return False
|
||||
|
||||
return not is_ip_address(hostname)
|
||||
|
||||
@staticmethod
|
||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
||||
"""Implements path matching adhering to RFC 6265."""
|
||||
if not req_path.startswith("/"):
|
||||
req_path = "/"
|
||||
|
||||
if req_path == cookie_path:
|
||||
return True
|
||||
|
||||
if not req_path.startswith(cookie_path):
|
||||
return False
|
||||
|
||||
if cookie_path.endswith("/"):
|
||||
return True
|
||||
|
||||
non_matching = req_path[len(cookie_path) :]
|
||||
|
||||
return non_matching.startswith("/")
|
||||
|
||||
@classmethod
|
||||
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
||||
"""Implements date string parsing adhering to RFC 6265."""
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
found_time = False
|
||||
found_day = False
|
||||
found_month = False
|
||||
found_year = False
|
||||
|
||||
hour = minute = second = 0
|
||||
day = 0
|
||||
month = 0
|
||||
year = 0
|
||||
|
||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
||||
|
||||
token = token_match.group("token")
|
||||
|
||||
if not found_time:
|
||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
||||
if time_match:
|
||||
found_time = True
|
||||
hour, minute, second = (int(s) for s in time_match.groups())
|
||||
continue
|
||||
|
||||
if not found_day:
|
||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
||||
if day_match:
|
||||
found_day = True
|
||||
day = int(day_match.group())
|
||||
continue
|
||||
|
||||
if not found_month:
|
||||
month_match = cls.DATE_MONTH_RE.match(token)
|
||||
if month_match:
|
||||
found_month = True
|
||||
assert month_match.lastindex is not None
|
||||
month = month_match.lastindex
|
||||
continue
|
||||
|
||||
if not found_year:
|
||||
year_match = cls.DATE_YEAR_RE.match(token)
|
||||
if year_match:
|
||||
found_year = True
|
||||
year = int(year_match.group())
|
||||
|
||||
if 70 <= year <= 99:
|
||||
year += 1900
|
||||
elif 0 <= year <= 69:
|
||||
year += 2000
|
||||
|
||||
if False in (found_day, found_month, found_year, found_time):
|
||||
return None
|
||||
|
||||
if not 1 <= day <= 31:
|
||||
return None
|
||||
|
||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
||||
return None
|
||||
|
||||
return datetime.datetime(
|
||||
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
class DummyCookieJar(AbstractCookieJar):
|
||||
"""Implements a dummy cookie storage.
|
||||
|
||||
It can be used with the ClientSession when no cookie processing is needed.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
while False:
|
||||
yield None
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 0
|
||||
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
pass
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
pass
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
pass
|
||||
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
return SimpleCookie()
|
172
.venv/lib/python3.7/site-packages/aiohttp/formdata.py
Normal file
172
.venv/lib/python3.7/site-packages/aiohttp/formdata.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import io
|
||||
from typing import Any, Iterable, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
|
||||
from . import hdrs, multipart, payload
|
||||
from .helpers import guess_filename
|
||||
from .payload import Payload
|
||||
|
||||
__all__ = ("FormData",)
|
||||
|
||||
|
||||
class FormData:
|
||||
"""Helper class for form body generation.
|
||||
|
||||
Supports multipart/form-data and application/x-www-form-urlencoded.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fields: Iterable[Any] = (),
|
||||
quote_fields: bool = True,
|
||||
charset: Optional[str] = None,
|
||||
) -> None:
|
||||
self._writer = multipart.MultipartWriter("form-data")
|
||||
self._fields: List[Any] = []
|
||||
self._is_multipart = False
|
||||
self._is_processed = False
|
||||
self._quote_fields = quote_fields
|
||||
self._charset = charset
|
||||
|
||||
if isinstance(fields, dict):
|
||||
fields = list(fields.items())
|
||||
elif not isinstance(fields, (list, tuple)):
|
||||
fields = (fields,)
|
||||
self.add_fields(*fields)
|
||||
|
||||
@property
|
||||
def is_multipart(self) -> bool:
|
||||
return self._is_multipart
|
||||
|
||||
def add_field(
|
||||
self,
|
||||
name: str,
|
||||
value: Any,
|
||||
*,
|
||||
content_type: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
content_transfer_encoding: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
if isinstance(value, io.IOBase):
|
||||
self._is_multipart = True
|
||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
||||
if filename is None and content_transfer_encoding is None:
|
||||
filename = name
|
||||
|
||||
type_options: MultiDict[str] = MultiDict({"name": name})
|
||||
if filename is not None and not isinstance(filename, str):
|
||||
raise TypeError(
|
||||
"filename must be an instance of str. " "Got: %s" % filename
|
||||
)
|
||||
if filename is None and isinstance(value, io.IOBase):
|
||||
filename = guess_filename(value, name)
|
||||
if filename is not None:
|
||||
type_options["filename"] = filename
|
||||
self._is_multipart = True
|
||||
|
||||
headers = {}
|
||||
if content_type is not None:
|
||||
if not isinstance(content_type, str):
|
||||
raise TypeError(
|
||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
||||
)
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
self._is_multipart = True
|
||||
if content_transfer_encoding is not None:
|
||||
if not isinstance(content_transfer_encoding, str):
|
||||
raise TypeError(
|
||||
"content_transfer_encoding must be an instance"
|
||||
" of str. Got: %s" % content_transfer_encoding
|
||||
)
|
||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
||||
self._is_multipart = True
|
||||
|
||||
self._fields.append((type_options, headers, value))
|
||||
|
||||
def add_fields(self, *fields: Any) -> None:
|
||||
to_add = list(fields)
|
||||
|
||||
while to_add:
|
||||
rec = to_add.pop(0)
|
||||
|
||||
if isinstance(rec, io.IOBase):
|
||||
k = guess_filename(rec, "unknown")
|
||||
self.add_field(k, rec) # type: ignore[arg-type]
|
||||
|
||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
||||
to_add.extend(rec.items())
|
||||
|
||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
||||
k, fp = rec
|
||||
self.add_field(k, fp) # type: ignore[arg-type]
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Only io.IOBase, multidict and (name, file) "
|
||||
"pairs allowed, use .add_field() for passing "
|
||||
"more complex parameters, got {!r}".format(rec)
|
||||
)
|
||||
|
||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
||||
# form data (x-www-form-urlencoded)
|
||||
data = []
|
||||
for type_options, _, value in self._fields:
|
||||
data.append((type_options["name"], value))
|
||||
|
||||
charset = self._charset if self._charset is not None else "utf-8"
|
||||
|
||||
if charset == "utf-8":
|
||||
content_type = "application/x-www-form-urlencoded"
|
||||
else:
|
||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
||||
|
||||
return payload.BytesPayload(
|
||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
||||
if self._is_processed:
|
||||
raise RuntimeError("Form data has been processed already")
|
||||
for dispparams, headers, value in self._fields:
|
||||
try:
|
||||
if hdrs.CONTENT_TYPE in headers:
|
||||
part = payload.get_payload(
|
||||
value,
|
||||
content_type=headers[hdrs.CONTENT_TYPE],
|
||||
headers=headers,
|
||||
encoding=self._charset,
|
||||
)
|
||||
else:
|
||||
part = payload.get_payload(
|
||||
value, headers=headers, encoding=self._charset
|
||||
)
|
||||
except Exception as exc:
|
||||
raise TypeError(
|
||||
"Can not serialize value type: %r\n "
|
||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
||||
) from exc
|
||||
|
||||
if dispparams:
|
||||
part.set_content_disposition(
|
||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
||||
)
|
||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
||||
# Content-Length which were sent via chunked transfer encoding
|
||||
assert part.headers is not None
|
||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
||||
|
||||
self._writer.append_payload(part)
|
||||
|
||||
self._is_processed = True
|
||||
return self._writer
|
||||
|
||||
def __call__(self) -> Payload:
|
||||
if self._is_multipart:
|
||||
return self._gen_form_data()
|
||||
else:
|
||||
return self._gen_form_urlencoded()
|
114
.venv/lib/python3.7/site-packages/aiohttp/hdrs.py
Normal file
114
.venv/lib/python3.7/site-packages/aiohttp/hdrs.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""HTTP Headers constants."""
|
||||
|
||||
# After changing the file content call ./tools/gen.py
|
||||
# to regenerate the headers parser
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
from multidict import istr
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final
|
||||
else:
|
||||
from typing_extensions import Final
|
||||
|
||||
METH_ANY: Final[str] = "*"
|
||||
METH_CONNECT: Final[str] = "CONNECT"
|
||||
METH_HEAD: Final[str] = "HEAD"
|
||||
METH_GET: Final[str] = "GET"
|
||||
METH_DELETE: Final[str] = "DELETE"
|
||||
METH_OPTIONS: Final[str] = "OPTIONS"
|
||||
METH_PATCH: Final[str] = "PATCH"
|
||||
METH_POST: Final[str] = "POST"
|
||||
METH_PUT: Final[str] = "PUT"
|
||||
METH_TRACE: Final[str] = "TRACE"
|
||||
|
||||
METH_ALL: Final[Set[str]] = {
|
||||
METH_CONNECT,
|
||||
METH_HEAD,
|
||||
METH_GET,
|
||||
METH_DELETE,
|
||||
METH_OPTIONS,
|
||||
METH_PATCH,
|
||||
METH_POST,
|
||||
METH_PUT,
|
||||
METH_TRACE,
|
||||
}
|
||||
|
||||
ACCEPT: Final[istr] = istr("Accept")
|
||||
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
||||
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
||||
AGE: Final[istr] = istr("Age")
|
||||
ALLOW: Final[istr] = istr("Allow")
|
||||
AUTHORIZATION: Final[istr] = istr("Authorization")
|
||||
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
||||
CONNECTION: Final[istr] = istr("Connection")
|
||||
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
||||
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
||||
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
||||
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
||||
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
||||
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
||||
COOKIE: Final[istr] = istr("Cookie")
|
||||
DATE: Final[istr] = istr("Date")
|
||||
DESTINATION: Final[istr] = istr("Destination")
|
||||
DIGEST: Final[istr] = istr("Digest")
|
||||
ETAG: Final[istr] = istr("Etag")
|
||||
EXPECT: Final[istr] = istr("Expect")
|
||||
EXPIRES: Final[istr] = istr("Expires")
|
||||
FORWARDED: Final[istr] = istr("Forwarded")
|
||||
FROM: Final[istr] = istr("From")
|
||||
HOST: Final[istr] = istr("Host")
|
||||
IF_MATCH: Final[istr] = istr("If-Match")
|
||||
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
||||
IF_RANGE: Final[istr] = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
||||
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
||||
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
||||
LINK: Final[istr] = istr("Link")
|
||||
LOCATION: Final[istr] = istr("Location")
|
||||
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
||||
ORIGIN: Final[istr] = istr("Origin")
|
||||
PRAGMA: Final[istr] = istr("Pragma")
|
||||
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
||||
RANGE: Final[istr] = istr("Range")
|
||||
REFERER: Final[istr] = istr("Referer")
|
||||
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
||||
SERVER: Final[istr] = istr("Server")
|
||||
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
||||
TE: Final[istr] = istr("TE")
|
||||
TRAILER: Final[istr] = istr("Trailer")
|
||||
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
||||
UPGRADE: Final[istr] = istr("Upgrade")
|
||||
URI: Final[istr] = istr("URI")
|
||||
USER_AGENT: Final[istr] = istr("User-Agent")
|
||||
VARY: Final[istr] = istr("Vary")
|
||||
VIA: Final[istr] = istr("Via")
|
||||
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
||||
WARNING: Final[istr] = istr("Warning")
|
||||
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
881
.venv/lib/python3.7/site-packages/aiohttp/helpers.py
Normal file
881
.venv/lib/python3.7/site-packages/aiohttp/helpers.py
Normal file
@@ -0,0 +1,881 @@
|
||||
"""Various helper functions"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import contextlib
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import netrc
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
import weakref
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from email.parser import HeaderParser
|
||||
from email.utils import parsedate
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ContextManager,
|
||||
Dict,
|
||||
Generator,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
from urllib.request import getproxies, proxy_bypass
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .log import client_logger, internal_logger
|
||||
from .typedefs import PathLike, Protocol # noqa
|
||||
|
||||
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
||||
|
||||
IS_MACOS = platform.system() == "Darwin"
|
||||
IS_WINDOWS = platform.system() == "Windows"
|
||||
|
||||
PY_36 = sys.version_info >= (3, 6)
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
PY_38 = sys.version_info >= (3, 8)
|
||||
PY_310 = sys.version_info >= (3, 10)
|
||||
PY_311 = sys.version_info >= (3, 11)
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
import idna_ssl
|
||||
|
||||
idna_ssl.patch_match_hostname()
|
||||
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
else:
|
||||
all_tasks = asyncio.all_tasks
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
|
||||
sentinel: Any = object()
|
||||
NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
||||
|
||||
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
||||
# for compatibility with older versions
|
||||
DEBUG: bool = getattr(sys.flags, "dev_mode", False) or (
|
||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
||||
)
|
||||
|
||||
|
||||
CHAR = {chr(i) for i in range(0, 128)}
|
||||
CTL = {chr(i) for i in range(0, 32)} | {
|
||||
chr(127),
|
||||
}
|
||||
SEPARATORS = {
|
||||
"(",
|
||||
")",
|
||||
"<",
|
||||
">",
|
||||
"@",
|
||||
",",
|
||||
";",
|
||||
":",
|
||||
"\\",
|
||||
'"',
|
||||
"/",
|
||||
"[",
|
||||
"]",
|
||||
"?",
|
||||
"=",
|
||||
"{",
|
||||
"}",
|
||||
" ",
|
||||
chr(9),
|
||||
}
|
||||
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
||||
|
||||
|
||||
class noop:
|
||||
def __await__(self) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
||||
"""Http basic authentication helper."""
|
||||
|
||||
def __new__(
|
||||
cls, login: str, password: str = "", encoding: str = "latin1"
|
||||
) -> "BasicAuth":
|
||||
if login is None:
|
||||
raise ValueError("None is not allowed as login value")
|
||||
|
||||
if password is None:
|
||||
raise ValueError("None is not allowed as password value")
|
||||
|
||||
if ":" in login:
|
||||
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
||||
|
||||
return super().__new__(cls, login, password, encoding)
|
||||
|
||||
@classmethod
|
||||
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
||||
"""Create a BasicAuth object from an Authorization HTTP header."""
|
||||
try:
|
||||
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Could not parse authorization header.")
|
||||
|
||||
if auth_type.lower() != "basic":
|
||||
raise ValueError("Unknown authorization method %s" % auth_type)
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(
|
||||
encoded_credentials.encode("ascii"), validate=True
|
||||
).decode(encoding)
|
||||
except binascii.Error:
|
||||
raise ValueError("Invalid base64 encoding.")
|
||||
|
||||
try:
|
||||
# RFC 2617 HTTP Authentication
|
||||
# https://www.ietf.org/rfc/rfc2617.txt
|
||||
# the colon must be present, but the username and password may be
|
||||
# otherwise blank.
|
||||
username, password = decoded.split(":", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Invalid credentials.")
|
||||
|
||||
return cls(username, password, encoding=encoding)
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
||||
"""Create BasicAuth from url."""
|
||||
if not isinstance(url, URL):
|
||||
raise TypeError("url should be yarl.URL instance")
|
||||
if url.user is None:
|
||||
return None
|
||||
return cls(url.user, url.password or "", encoding=encoding)
|
||||
|
||||
def encode(self) -> str:
|
||||
"""Encode credentials."""
|
||||
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
||||
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
||||
|
||||
|
||||
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
auth = BasicAuth.from_url(url)
|
||||
if auth is None:
|
||||
return url, None
|
||||
else:
|
||||
return url.with_user(None), auth
|
||||
|
||||
|
||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
||||
"""Load netrc from file.
|
||||
|
||||
Attempt to load it from the path specified by the env-var
|
||||
NETRC or in the default location in the user's home directory.
|
||||
|
||||
Returns None if it couldn't be found or fails to parse.
|
||||
"""
|
||||
netrc_env = os.environ.get("NETRC")
|
||||
|
||||
if netrc_env is not None:
|
||||
netrc_path = Path(netrc_env)
|
||||
else:
|
||||
try:
|
||||
home_dir = Path.home()
|
||||
except RuntimeError as e: # pragma: no cover
|
||||
# if pathlib can't resolve home, it may raise a RuntimeError
|
||||
client_logger.debug(
|
||||
"Could not resolve home directory when "
|
||||
"trying to look for .netrc file: %s",
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
||||
|
||||
try:
|
||||
return netrc.netrc(str(netrc_path))
|
||||
except netrc.NetrcParseError as e:
|
||||
client_logger.warning("Could not parse .netrc file: %s", e)
|
||||
except OSError as e:
|
||||
netrc_exists = False
|
||||
with contextlib.suppress(OSError):
|
||||
netrc_exists = netrc_path.is_file()
|
||||
# we couldn't read the file (doesn't exist, permissions, etc.)
|
||||
if netrc_env or netrc_exists:
|
||||
# only warn if the environment wanted us to load it,
|
||||
# or it appears like the default file does actually exist
|
||||
client_logger.warning("Could not read .netrc file: %s", e)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ProxyInfo:
|
||||
proxy: URL
|
||||
proxy_auth: Optional[BasicAuth]
|
||||
|
||||
|
||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
||||
proxy_urls = {
|
||||
k: URL(v)
|
||||
for k, v in getproxies().items()
|
||||
if k in ("http", "https", "ws", "wss")
|
||||
}
|
||||
netrc_obj = netrc_from_env()
|
||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
||||
ret = {}
|
||||
for proto, val in stripped.items():
|
||||
proxy, auth = val
|
||||
if proxy.scheme in ("https", "wss"):
|
||||
client_logger.warning(
|
||||
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
||||
)
|
||||
continue
|
||||
if netrc_obj and auth is None:
|
||||
auth_from_netrc = None
|
||||
if proxy.host is not None:
|
||||
auth_from_netrc = netrc_obj.authenticators(proxy.host)
|
||||
if auth_from_netrc is not None:
|
||||
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
|
||||
# `user` and `account` both can be username,
|
||||
# if `user` is None, use `account`
|
||||
*logins, password = auth_from_netrc
|
||||
login = logins[0] if logins[0] else logins[-1]
|
||||
auth = BasicAuth(cast(str, login), cast(str, password))
|
||||
ret[proto] = ProxyInfo(proxy, auth)
|
||||
return ret
|
||||
|
||||
|
||||
def current_task(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> "Optional[asyncio.Task[Any]]":
|
||||
if sys.version_info >= (3, 7):
|
||||
return asyncio.current_task(loop=loop)
|
||||
else:
|
||||
return asyncio.Task.current_task(loop=loop)
|
||||
|
||||
|
||||
def get_running_loop(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not loop.is_running():
|
||||
warnings.warn(
|
||||
"The object should be created within an async function",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
if loop.get_debug():
|
||||
internal_logger.warning(
|
||||
"The object should be created within an async function", stack_info=True
|
||||
)
|
||||
return loop
|
||||
|
||||
|
||||
def isasyncgenfunction(obj: Any) -> bool:
|
||||
func = getattr(inspect, "isasyncgenfunction", None)
|
||||
if func is not None:
|
||||
return func(obj) # type: ignore[no-any-return]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
"""Get a permitted proxy for the given URL from the env."""
|
||||
if url.host is not None and proxy_bypass(url.host):
|
||||
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
||||
|
||||
proxies_in_env = proxies_from_env()
|
||||
try:
|
||||
proxy_info = proxies_in_env[url.scheme]
|
||||
except KeyError:
|
||||
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
||||
else:
|
||||
return proxy_info.proxy, proxy_info.proxy_auth
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class MimeType:
|
||||
type: str
|
||||
subtype: str
|
||||
suffix: str
|
||||
parameters: "MultiDictProxy[str]"
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=56)
|
||||
def parse_mimetype(mimetype: str) -> MimeType:
|
||||
"""Parses a MIME type into its components.
|
||||
|
||||
mimetype is a MIME type string.
|
||||
|
||||
Returns a MimeType object.
|
||||
|
||||
Example:
|
||||
|
||||
>>> parse_mimetype('text/html; charset=utf-8')
|
||||
MimeType(type='text', subtype='html', suffix='',
|
||||
parameters={'charset': 'utf-8'})
|
||||
|
||||
"""
|
||||
if not mimetype:
|
||||
return MimeType(
|
||||
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
||||
)
|
||||
|
||||
parts = mimetype.split(";")
|
||||
params: MultiDict[str] = MultiDict()
|
||||
for item in parts[1:]:
|
||||
if not item:
|
||||
continue
|
||||
key, value = cast(
|
||||
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
|
||||
)
|
||||
params.add(key.lower().strip(), value.strip(' "'))
|
||||
|
||||
fulltype = parts[0].strip().lower()
|
||||
if fulltype == "*":
|
||||
fulltype = "*/*"
|
||||
|
||||
mtype, stype = (
|
||||
cast(Tuple[str, str], fulltype.split("/", 1))
|
||||
if "/" in fulltype
|
||||
else (fulltype, "")
|
||||
)
|
||||
stype, suffix = (
|
||||
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
|
||||
)
|
||||
|
||||
return MimeType(
|
||||
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
||||
)
|
||||
|
||||
|
||||
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
||||
name = getattr(obj, "name", None)
|
||||
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
||||
return Path(name).name
|
||||
return default
|
||||
|
||||
|
||||
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
||||
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
||||
|
||||
|
||||
def quoted_string(content: str) -> str:
|
||||
"""Return 7-bit content as quoted-string.
|
||||
|
||||
Format content into a quoted-string as defined in RFC5322 for
|
||||
Internet Message Format. Notice that this is not the 8-bit HTTP
|
||||
format, but the 7-bit email format. Content must be in usascii or
|
||||
a ValueError is raised.
|
||||
"""
|
||||
if not (QCONTENT > set(content)):
|
||||
raise ValueError(f"bad content for quoted-string {content!r}")
|
||||
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
||||
|
||||
|
||||
def content_disposition_header(
|
||||
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
||||
) -> str:
|
||||
"""Sets ``Content-Disposition`` header for MIME.
|
||||
|
||||
This is the MIME payload Content-Disposition header from RFC 2183
|
||||
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
||||
RFC 6266.
|
||||
|
||||
disptype is a disposition type: inline, attachment, form-data.
|
||||
Should be valid extension token (see RFC 2183)
|
||||
|
||||
quote_fields performs value quoting to 7-bit MIME headers
|
||||
according to RFC 7578. Set to quote_fields to False if recipient
|
||||
can take 8-bit file names and field values.
|
||||
|
||||
_charset specifies the charset to use when quote_fields is True.
|
||||
|
||||
params is a dict with disposition params.
|
||||
"""
|
||||
if not disptype or not (TOKEN > set(disptype)):
|
||||
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
||||
|
||||
value = disptype
|
||||
if params:
|
||||
lparams = []
|
||||
for key, val in params.items():
|
||||
if not key or not (TOKEN > set(key)):
|
||||
raise ValueError(
|
||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
||||
)
|
||||
if quote_fields:
|
||||
if key.lower() == "filename":
|
||||
qval = quote(val, "", encoding=_charset)
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
try:
|
||||
qval = quoted_string(val)
|
||||
except ValueError:
|
||||
qval = "".join(
|
||||
(_charset, "''", quote(val, "", encoding=_charset))
|
||||
)
|
||||
lparams.append((key + "*", qval))
|
||||
else:
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
||||
value = "; ".join((value, sparams))
|
||||
return value
|
||||
|
||||
|
||||
class _TSelf(Protocol, Generic[_T]):
|
||||
_cache: Dict[str, _T]
|
||||
|
||||
|
||||
class reify(Generic[_T]):
|
||||
"""Use as a class method decorator.
|
||||
|
||||
It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
self.name = wrapped.__name__
|
||||
|
||||
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
||||
raise AttributeError("reified property is read-only")
|
||||
|
||||
|
||||
reify_py = reify
|
||||
|
||||
try:
|
||||
from ._helpers import reify as reify_c
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
reify = reify_c # type: ignore[misc,assignment]
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
_ipv4_pattern = (
|
||||
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
||||
)
|
||||
_ipv6_pattern = (
|
||||
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
||||
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
||||
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
||||
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
||||
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
||||
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
||||
r":|:(:[A-F0-9]{1,4}){7})$"
|
||||
)
|
||||
_ipv4_regex = re.compile(_ipv4_pattern)
|
||||
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
||||
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
||||
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
||||
|
||||
|
||||
def _is_ip_address(
|
||||
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
||||
) -> bool:
|
||||
if host is None:
|
||||
return False
|
||||
if isinstance(host, str):
|
||||
return bool(regex.match(host))
|
||||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
||||
return bool(regexb.match(host))
|
||||
else:
|
||||
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
||||
|
||||
|
||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
||||
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
||||
|
||||
|
||||
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
||||
|
||||
|
||||
def next_whole_second() -> datetime.datetime:
|
||||
"""Return current time rounded up to the next whole second."""
|
||||
return datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
_cached_current_datetime: Optional[int] = None
|
||||
_cached_formatted_datetime = ""
|
||||
|
||||
|
||||
def rfc822_formatted_time() -> str:
|
||||
global _cached_current_datetime
|
||||
global _cached_formatted_datetime
|
||||
|
||||
now = int(time.time())
|
||||
if now != _cached_current_datetime:
|
||||
# Weekday and month names for HTTP date/time formatting;
|
||||
# always English!
|
||||
# Tuples are constants stored in codeobject!
|
||||
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
||||
_monthname = (
|
||||
"", # Dummy so we can use 1-based month numbers
|
||||
"Jan",
|
||||
"Feb",
|
||||
"Mar",
|
||||
"Apr",
|
||||
"May",
|
||||
"Jun",
|
||||
"Jul",
|
||||
"Aug",
|
||||
"Sep",
|
||||
"Oct",
|
||||
"Nov",
|
||||
"Dec",
|
||||
)
|
||||
|
||||
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
_weekdayname[wd],
|
||||
day,
|
||||
_monthname[month],
|
||||
year,
|
||||
hh,
|
||||
mm,
|
||||
ss,
|
||||
)
|
||||
_cached_current_datetime = now
|
||||
return _cached_formatted_datetime
|
||||
|
||||
|
||||
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
||||
ref, name = info
|
||||
ob = ref()
|
||||
if ob is not None:
|
||||
with suppress(Exception):
|
||||
getattr(ob, name)()
|
||||
|
||||
|
||||
def weakref_handle(
|
||||
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
|
||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
||||
return None
|
||||
|
||||
|
||||
def call_later(
|
||||
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout > 5:
|
||||
when = ceil(when)
|
||||
return loop.call_at(when, cb)
|
||||
return None
|
||||
|
||||
|
||||
class TimeoutHandle:
|
||||
"""Timeout handle"""
|
||||
|
||||
def __init__(
|
||||
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
self._loop = loop
|
||||
self._callbacks: List[
|
||||
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
||||
] = []
|
||||
|
||||
def register(
|
||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
self._callbacks.append((callback, args, kwargs))
|
||||
|
||||
def close(self) -> None:
|
||||
self._callbacks.clear()
|
||||
|
||||
def start(self) -> Optional[asyncio.Handle]:
|
||||
timeout = self._timeout
|
||||
if timeout is not None and timeout > 0:
|
||||
when = self._loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
return self._loop.call_at(when, self.__call__)
|
||||
else:
|
||||
return None
|
||||
|
||||
def timer(self) -> "BaseTimerContext":
|
||||
if self._timeout is not None and self._timeout > 0:
|
||||
timer = TimerContext(self._loop)
|
||||
self.register(timer.timeout)
|
||||
return timer
|
||||
else:
|
||||
return TimerNoop()
|
||||
|
||||
def __call__(self) -> None:
|
||||
for cb, args, kwargs in self._callbacks:
|
||||
with suppress(Exception):
|
||||
cb(*args, **kwargs)
|
||||
|
||||
self._callbacks.clear()
|
||||
|
||||
|
||||
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
||||
pass
|
||||
|
||||
|
||||
class TimerNoop(BaseTimerContext):
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
return
|
||||
|
||||
|
||||
class TimerContext(BaseTimerContext):
|
||||
"""Low resolution timeout context manager"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._tasks: List[asyncio.Task[Any]] = []
|
||||
self._cancelled = False
|
||||
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
task = current_task(loop=self._loop)
|
||||
|
||||
if task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used " "inside a task"
|
||||
)
|
||||
|
||||
if self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
|
||||
self._tasks.append(task)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if self._tasks:
|
||||
self._tasks.pop()
|
||||
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
return None
|
||||
|
||||
def timeout(self) -> None:
|
||||
if not self._cancelled:
|
||||
for task in set(self._tasks):
|
||||
task.cancel()
|
||||
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
|
||||
if delay is None or delay <= 0:
|
||||
return async_timeout.timeout(None)
|
||||
|
||||
loop = get_running_loop()
|
||||
now = loop.time()
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
return async_timeout.timeout_at(when)
|
||||
|
||||
|
||||
class HeadersMixin:
|
||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
||||
|
||||
_content_type: Optional[str] = None
|
||||
_content_dict: Optional[Dict[str, str]] = None
|
||||
_stored_content_type = sentinel
|
||||
|
||||
def _parse_content_type(self, raw: str) -> None:
|
||||
self._stored_content_type = raw
|
||||
if raw is None:
|
||||
# default value according to RFC 2616
|
||||
self._content_type = "application/octet-stream"
|
||||
self._content_dict = {}
|
||||
else:
|
||||
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
||||
self._content_type = msg.get_content_type()
|
||||
params = msg.get_params()
|
||||
self._content_dict = dict(params[1:]) # First element is content type again
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""The value of content part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_type # type: ignore[return-value]
|
||||
|
||||
@property
|
||||
def charset(self) -> Optional[str]:
|
||||
"""The value of charset part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_dict.get("charset") # type: ignore[union-attr]
|
||||
|
||||
@property
|
||||
def content_length(self) -> Optional[int]:
|
||||
"""The value of Content-Length HTTP header."""
|
||||
content_length = self._headers.get( # type: ignore[attr-defined]
|
||||
hdrs.CONTENT_LENGTH
|
||||
)
|
||||
|
||||
if content_length is not None:
|
||||
return int(content_length)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
||||
if not fut.done():
|
||||
fut.set_result(result)
|
||||
|
||||
|
||||
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
||||
if not fut.done():
|
||||
fut.set_exception(exc)
|
||||
|
||||
|
||||
class ChainMapProxy(Mapping[str, Any]):
|
||||
__slots__ = ("_maps",)
|
||||
|
||||
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
|
||||
self._maps = tuple(maps)
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
raise TypeError(
|
||||
"Inheritance class {} from ChainMapProxy "
|
||||
"is forbidden".format(cls.__name__)
|
||||
)
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
for mapping in self._maps:
|
||||
try:
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self) -> int:
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self._maps)) # type: ignore[arg-type]
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
d: Dict[str, Any] = {}
|
||||
for mapping in reversed(self._maps):
|
||||
# reuses stored hash values if possible
|
||||
d.update(mapping)
|
||||
return iter(d)
|
||||
|
||||
def __contains__(self, key: object) -> bool:
|
||||
return any(key in m for m in self._maps)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return any(self._maps)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
content = ", ".join(map(repr, self._maps))
|
||||
return f"ChainMapProxy({content})"
|
||||
|
||||
|
||||
# https://tools.ietf.org/html/rfc7232#section-2.3
|
||||
_ETAGC = r"[!#-}\x80-\xff]+"
|
||||
_ETAGC_RE = re.compile(_ETAGC)
|
||||
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
||||
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
||||
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
||||
|
||||
ETAG_ANY = "*"
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ETag:
|
||||
value: str
|
||||
is_weak: bool = False
|
||||
|
||||
|
||||
def validate_etag_value(value: str) -> None:
|
||||
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
||||
raise ValueError(
|
||||
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
||||
)
|
||||
|
||||
|
||||
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
||||
"""Process a date string, return a datetime object"""
|
||||
if date_str is not None:
|
||||
timetuple = parsedate(date_str)
|
||||
if timetuple is not None:
|
||||
with suppress(ValueError):
|
||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
||||
return None
|
70
.venv/lib/python3.7/site-packages/aiohttp/http.py
Normal file
70
.venv/lib/python3.7/site-packages/aiohttp/http.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import http.server
|
||||
import sys
|
||||
from typing import Mapping, Tuple
|
||||
|
||||
from . import __version__
|
||||
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
||||
from .http_parser import (
|
||||
HeadersParser as HeadersParser,
|
||||
HttpParser as HttpParser,
|
||||
HttpRequestParser as HttpRequestParser,
|
||||
HttpResponseParser as HttpResponseParser,
|
||||
RawRequestMessage as RawRequestMessage,
|
||||
RawResponseMessage as RawResponseMessage,
|
||||
)
|
||||
from .http_websocket import (
|
||||
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
||||
WS_KEY as WS_KEY,
|
||||
WebSocketError as WebSocketError,
|
||||
WebSocketReader as WebSocketReader,
|
||||
WebSocketWriter as WebSocketWriter,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
ws_ext_gen as ws_ext_gen,
|
||||
ws_ext_parse as ws_ext_parse,
|
||||
)
|
||||
from .http_writer import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
StreamWriter as StreamWriter,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"HttpProcessingError",
|
||||
"RESPONSES",
|
||||
"SERVER_SOFTWARE",
|
||||
# .http_writer
|
||||
"StreamWriter",
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
# .http_parser
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
# .http_websocket
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"ws_ext_gen",
|
||||
"ws_ext_parse",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
sys.version_info, __version__
|
||||
)
|
||||
|
||||
RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses
|
107
.venv/lib/python3.7/site-packages/aiohttp/http_exceptions.py
Normal file
107
.venv/lib/python3.7/site-packages/aiohttp/http_exceptions.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Low-level http related exceptions."""
|
||||
|
||||
|
||||
from textwrap import indent
|
||||
from typing import Optional, Union
|
||||
|
||||
from .typedefs import _CIMultiDict
|
||||
|
||||
__all__ = ("HttpProcessingError",)
|
||||
|
||||
|
||||
class HttpProcessingError(Exception):
|
||||
"""HTTP error.
|
||||
|
||||
Shortcut for raising HTTP errors with custom code, message and headers.
|
||||
|
||||
code: HTTP Error code.
|
||||
message: (optional) Error message.
|
||||
headers: (optional) Headers to be sent in response, a list of pairs
|
||||
"""
|
||||
|
||||
code = 0
|
||||
message = ""
|
||||
headers = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[_CIMultiDict] = None,
|
||||
) -> None:
|
||||
if code is not None:
|
||||
self.code = code
|
||||
self.headers = headers
|
||||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
msg = indent(self.message, " ")
|
||||
return f"{self.code}, message:\n{msg}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
||||
|
||||
|
||||
class BadHttpMessage(HttpProcessingError):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
||||
super().__init__(message=message, headers=headers)
|
||||
self.args = (message,)
|
||||
|
||||
|
||||
class HttpBadRequest(BadHttpMessage):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
|
||||
class PayloadEncodingError(BadHttpMessage):
|
||||
"""Base class for payload errors"""
|
||||
|
||||
|
||||
class ContentEncodingError(PayloadEncodingError):
|
||||
"""Content encoding error."""
|
||||
|
||||
|
||||
class TransferEncodingError(PayloadEncodingError):
|
||||
"""transfer encoding error."""
|
||||
|
||||
|
||||
class ContentLengthError(PayloadEncodingError):
|
||||
"""Not enough data for satisfy content length header."""
|
||||
|
||||
|
||||
class LineTooLong(BadHttpMessage):
|
||||
def __init__(
|
||||
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
||||
) -> None:
|
||||
super().__init__(
|
||||
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
||||
)
|
||||
self.args = (line, limit, actual_size)
|
||||
|
||||
|
||||
class InvalidHeader(BadHttpMessage):
|
||||
def __init__(self, hdr: Union[bytes, str]) -> None:
|
||||
if isinstance(hdr, bytes):
|
||||
hdr = hdr.decode("utf-8", "surrogateescape")
|
||||
super().__init__(f"Invalid HTTP Header: {hdr}")
|
||||
self.hdr = hdr
|
||||
self.args = (hdr,)
|
||||
|
||||
|
||||
class BadStatusLine(BadHttpMessage):
|
||||
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
||||
if not isinstance(line, str):
|
||||
line = repr(line)
|
||||
super().__init__(error or f"Bad status line {line!r}")
|
||||
self.args = (line,)
|
||||
self.line = line
|
||||
|
||||
|
||||
class InvalidURLError(BadHttpMessage):
|
||||
pass
|
1025
.venv/lib/python3.7/site-packages/aiohttp/http_parser.py
Normal file
1025
.venv/lib/python3.7/site-packages/aiohttp/http_parser.py
Normal file
File diff suppressed because it is too large
Load Diff
701
.venv/lib/python3.7/site-packages/aiohttp/http_websocket.py
Normal file
701
.venv/lib/python3.7/site-packages/aiohttp/http_websocket.py
Normal file
@@ -0,0 +1,701 @@
|
||||
"""WebSocket protocol versions 13 and 8."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from struct import Struct
|
||||
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
from .streams import DataQueue
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
class WSCloseCode(IntEnum):
|
||||
OK = 1000
|
||||
GOING_AWAY = 1001
|
||||
PROTOCOL_ERROR = 1002
|
||||
UNSUPPORTED_DATA = 1003
|
||||
ABNORMAL_CLOSURE = 1006
|
||||
INVALID_TEXT = 1007
|
||||
POLICY_VIOLATION = 1008
|
||||
MESSAGE_TOO_BIG = 1009
|
||||
MANDATORY_EXTENSION = 1010
|
||||
INTERNAL_ERROR = 1011
|
||||
SERVICE_RESTART = 1012
|
||||
TRY_AGAIN_LATER = 1013
|
||||
BAD_GATEWAY = 1014
|
||||
|
||||
|
||||
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
||||
|
||||
|
||||
class WSMsgType(IntEnum):
|
||||
# websocket spec types
|
||||
CONTINUATION = 0x0
|
||||
TEXT = 0x1
|
||||
BINARY = 0x2
|
||||
PING = 0x9
|
||||
PONG = 0xA
|
||||
CLOSE = 0x8
|
||||
|
||||
# aiohttp specific types
|
||||
CLOSING = 0x100
|
||||
CLOSED = 0x101
|
||||
ERROR = 0x102
|
||||
|
||||
text = TEXT
|
||||
binary = BINARY
|
||||
ping = PING
|
||||
pong = PONG
|
||||
close = CLOSE
|
||||
closing = CLOSING
|
||||
closed = CLOSED
|
||||
error = ERROR
|
||||
|
||||
|
||||
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
|
||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
||||
UNPACK_LEN3 = Struct("!Q").unpack_from
|
||||
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
||||
PACK_LEN1 = Struct("!BB").pack
|
||||
PACK_LEN2 = Struct("!BBH").pack
|
||||
PACK_LEN3 = Struct("!BBQ").pack
|
||||
PACK_CLOSE_CODE = Struct("!H").pack
|
||||
MSG_SIZE: Final[int] = 2**14
|
||||
DEFAULT_LIMIT: Final[int] = 2**16
|
||||
|
||||
|
||||
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
||||
|
||||
|
||||
class WSMessage(_WSMessageBase):
|
||||
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
||||
"""Return parsed JSON data.
|
||||
|
||||
.. versionadded:: 0.22
|
||||
"""
|
||||
return loads(self.data)
|
||||
|
||||
|
||||
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
||||
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
||||
|
||||
|
||||
class WebSocketError(Exception):
|
||||
"""WebSocket protocol parser error."""
|
||||
|
||||
def __init__(self, code: int, message: str) -> None:
|
||||
self.code = code
|
||||
super().__init__(code, message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return cast(str, self.args[1])
|
||||
|
||||
|
||||
class WSHandshakeError(Exception):
|
||||
"""WebSocket protocol handshake error."""
|
||||
|
||||
|
||||
native_byteorder: Final[str] = sys.byteorder
|
||||
|
||||
|
||||
# Used by _websocket_mask_python
|
||||
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
|
||||
|
||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
||||
"""Websocket masking function.
|
||||
|
||||
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
||||
object of any length. The contents of `data` are masked with `mask`,
|
||||
as specified in section 5.3 of RFC 6455.
|
||||
|
||||
Note that this function mutates the `data` argument.
|
||||
|
||||
This pure-python implementation may be replaced by an optimized
|
||||
version when available.
|
||||
|
||||
"""
|
||||
assert isinstance(data, bytearray), data
|
||||
assert len(mask) == 4, mask
|
||||
|
||||
if data:
|
||||
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
||||
data[::4] = data[::4].translate(a)
|
||||
data[1::4] = data[1::4].translate(b)
|
||||
data[2::4] = data[2::4].translate(c)
|
||||
data[3::4] = data[3::4].translate(d)
|
||||
|
||||
|
||||
if NO_EXTENSIONS: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
else:
|
||||
try:
|
||||
from ._websocket import _websocket_mask_cython # type: ignore[import]
|
||||
|
||||
_websocket_mask = _websocket_mask_cython
|
||||
except ImportError: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
|
||||
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
|
||||
|
||||
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
||||
r"^(?:;\s*(?:"
|
||||
r"(server_no_context_takeover)|"
|
||||
r"(client_no_context_takeover)|"
|
||||
r"(server_max_window_bits(?:=(\d+))?)|"
|
||||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
||||
)
|
||||
|
||||
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
||||
|
||||
|
||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
||||
if not extstr:
|
||||
return 0, False
|
||||
|
||||
compress = 0
|
||||
notakeover = False
|
||||
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
||||
defext = ext.group(1)
|
||||
# Return compress = 15 when get `permessage-deflate`
|
||||
if not defext:
|
||||
compress = 15
|
||||
break
|
||||
match = _WS_EXT_RE.match(defext)
|
||||
if match:
|
||||
compress = 15
|
||||
if isserver:
|
||||
# Server never fail to detect compress handshake.
|
||||
# Server does not need to send max wbit to client
|
||||
if match.group(4):
|
||||
compress = int(match.group(4))
|
||||
# Group3 must match if group4 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# CONTINUE to next extension
|
||||
if compress > 15 or compress < 9:
|
||||
compress = 0
|
||||
continue
|
||||
if match.group(1):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
else:
|
||||
if match.group(6):
|
||||
compress = int(match.group(6))
|
||||
# Group5 must match if group6 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# FAIL the parse progress
|
||||
if compress > 15 or compress < 9:
|
||||
raise WSHandshakeError("Invalid window size")
|
||||
if match.group(2):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
# Return Fail if client side and not match
|
||||
elif not isserver:
|
||||
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
||||
|
||||
return compress, notakeover
|
||||
|
||||
|
||||
def ws_ext_gen(
|
||||
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
||||
) -> str:
|
||||
# client_notakeover=False not used for server
|
||||
# compress wbit 8 does not support in zlib
|
||||
if compress < 9 or compress > 15:
|
||||
raise ValueError(
|
||||
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
||||
)
|
||||
enabledext = ["permessage-deflate"]
|
||||
if not isserver:
|
||||
enabledext.append("client_max_window_bits")
|
||||
|
||||
if compress < 15:
|
||||
enabledext.append("server_max_window_bits=" + str(compress))
|
||||
if server_notakeover:
|
||||
enabledext.append("server_no_context_takeover")
|
||||
# if client_notakeover:
|
||||
# enabledext.append('client_no_context_takeover')
|
||||
return "; ".join(enabledext)
|
||||
|
||||
|
||||
class WSParserState(IntEnum):
|
||||
READ_HEADER = 1
|
||||
READ_PAYLOAD_LENGTH = 2
|
||||
READ_PAYLOAD_MASK = 3
|
||||
READ_PAYLOAD = 4
|
||||
|
||||
|
||||
class WebSocketReader:
|
||||
def __init__(
|
||||
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
||||
) -> None:
|
||||
self.queue = queue
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._partial = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
|
||||
self._opcode: Optional[int] = None
|
||||
self._frame_fin = False
|
||||
self._frame_opcode: Optional[int] = None
|
||||
self._frame_payload = bytearray()
|
||||
|
||||
self._tail = b""
|
||||
self._has_mask = False
|
||||
self._frame_mask: Optional[bytes] = None
|
||||
self._payload_length = 0
|
||||
self._payload_length_flag = 0
|
||||
self._compressed: Optional[bool] = None
|
||||
self._decompressobj: Any = None # zlib.decompressobj actually
|
||||
self._compress = compress
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self.queue.feed_eof()
|
||||
|
||||
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
if self._exc:
|
||||
return True, data
|
||||
|
||||
try:
|
||||
return self._feed_data(data)
|
||||
except Exception as exc:
|
||||
self._exc = exc
|
||||
self.queue.set_exception(exc)
|
||||
return True, b""
|
||||
|
||||
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
for fin, opcode, payload, compressed in self.parse_frame(data):
|
||||
if compressed and not self._decompressobj:
|
||||
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
if opcode == WSMsgType.CLOSE:
|
||||
if len(payload) >= 2:
|
||||
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
||||
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close code: {close_code}",
|
||||
)
|
||||
try:
|
||||
close_message = payload[2:].decode("utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
||||
elif payload:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
||||
)
|
||||
else:
|
||||
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
||||
|
||||
self.queue.feed_data(msg, 0)
|
||||
|
||||
elif opcode == WSMsgType.PING:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif opcode == WSMsgType.PONG:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif (
|
||||
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
||||
and self._opcode is None
|
||||
):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
||||
)
|
||||
else:
|
||||
# load text/binary
|
||||
if not fin:
|
||||
# got partial frame payload
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
self._opcode = opcode
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
# previous frame was non finished
|
||||
# we should get continuation opcode
|
||||
if self._partial:
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"The opcode in non-fin frame is expected "
|
||||
"to be zero, got {!r}".format(opcode),
|
||||
)
|
||||
|
||||
if opcode == WSMsgType.CONTINUATION:
|
||||
assert self._opcode is not None
|
||||
opcode = self._opcode
|
||||
self._opcode = None
|
||||
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
|
||||
# Decompress process must to be done after all packets
|
||||
# received.
|
||||
if compressed:
|
||||
self._partial.extend(_WS_DEFLATE_TRAILING)
|
||||
payload_merged = self._decompressobj.decompress(
|
||||
self._partial, self._max_msg_size
|
||||
)
|
||||
if self._decompressobj.unconsumed_tail:
|
||||
left = len(self._decompressobj.unconsumed_tail)
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Decompressed message size {} exceeds limit {}".format(
|
||||
self._max_msg_size + left, self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
payload_merged = bytes(self._partial)
|
||||
|
||||
self._partial.clear()
|
||||
|
||||
if opcode == WSMsgType.TEXT:
|
||||
try:
|
||||
text = payload_merged.decode("utf-8")
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
||||
)
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
else:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
||||
len(payload_merged),
|
||||
)
|
||||
|
||||
return False, b""
|
||||
|
||||
def parse_frame(
|
||||
self, buf: bytes
|
||||
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
||||
"""Return the next frame from the socket."""
|
||||
frames = []
|
||||
if self._tail:
|
||||
buf, self._tail = self._tail + buf, b""
|
||||
|
||||
start_pos = 0
|
||||
buf_length = len(buf)
|
||||
|
||||
while True:
|
||||
# read header
|
||||
if self._state == WSParserState.READ_HEADER:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
first_byte, second_byte = data
|
||||
|
||||
fin = (first_byte >> 7) & 1
|
||||
rsv1 = (first_byte >> 6) & 1
|
||||
rsv2 = (first_byte >> 5) & 1
|
||||
rsv3 = (first_byte >> 4) & 1
|
||||
opcode = first_byte & 0xF
|
||||
|
||||
# frame-fin = %x0 ; more frames of this message follow
|
||||
# / %x1 ; final frame of this message
|
||||
# frame-rsv1 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv2 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv3 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
#
|
||||
# Remove rsv1 from this test for deflate development
|
||||
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
if opcode > 0x7 and fin == 0:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received fragmented control frame",
|
||||
)
|
||||
|
||||
has_mask = (second_byte >> 7) & 1
|
||||
length = second_byte & 0x7F
|
||||
|
||||
# Control frames MUST have a payload
|
||||
# length of 125 bytes or less
|
||||
if opcode > 0x7 and length > 125:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Control frame payload cannot be " "larger than 125 bytes",
|
||||
)
|
||||
|
||||
# Set compress status if last package is FIN
|
||||
# OR set compress status if this is first fragment
|
||||
# Raise error if not first fragment with rsv1 = 0x1
|
||||
if self._frame_fin or self._compressed is None:
|
||||
self._compressed = True if rsv1 else False
|
||||
elif rsv1:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
self._frame_fin = bool(fin)
|
||||
self._frame_opcode = opcode
|
||||
self._has_mask = bool(has_mask)
|
||||
self._payload_length_flag = length
|
||||
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
||||
else:
|
||||
break
|
||||
|
||||
# read payload length
|
||||
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
||||
length = self._payload_length_flag
|
||||
if length == 126:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
length = UNPACK_LEN2(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
elif length > 126:
|
||||
if buf_length - start_pos >= 8:
|
||||
data = buf[start_pos : start_pos + 8]
|
||||
start_pos += 8
|
||||
length = UNPACK_LEN3(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
|
||||
# read payload mask
|
||||
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
||||
if buf_length - start_pos >= 4:
|
||||
self._frame_mask = buf[start_pos : start_pos + 4]
|
||||
start_pos += 4
|
||||
self._state = WSParserState.READ_PAYLOAD
|
||||
else:
|
||||
break
|
||||
|
||||
if self._state == WSParserState.READ_PAYLOAD:
|
||||
length = self._payload_length
|
||||
payload = self._frame_payload
|
||||
|
||||
chunk_len = buf_length - start_pos
|
||||
if length >= chunk_len:
|
||||
self._payload_length = length - chunk_len
|
||||
payload.extend(buf[start_pos:])
|
||||
start_pos = buf_length
|
||||
else:
|
||||
self._payload_length = 0
|
||||
payload.extend(buf[start_pos : start_pos + length])
|
||||
start_pos = start_pos + length
|
||||
|
||||
if self._payload_length == 0:
|
||||
if self._has_mask:
|
||||
assert self._frame_mask is not None
|
||||
_websocket_mask(self._frame_mask, payload)
|
||||
|
||||
frames.append(
|
||||
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
||||
)
|
||||
|
||||
self._frame_payload = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
else:
|
||||
break
|
||||
|
||||
self._tail = buf[start_pos:]
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
class WebSocketWriter:
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
transport: asyncio.Transport,
|
||||
*,
|
||||
use_mask: bool = False,
|
||||
limit: int = DEFAULT_LIMIT,
|
||||
random: Any = random.Random(),
|
||||
compress: int = 0,
|
||||
notakeover: bool = False,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.transport = transport
|
||||
self.use_mask = use_mask
|
||||
self.randrange = random.randrange
|
||||
self.compress = compress
|
||||
self.notakeover = notakeover
|
||||
self._closing = False
|
||||
self._limit = limit
|
||||
self._output_size = 0
|
||||
self._compressobj: Any = None # actually compressobj
|
||||
|
||||
async def _send_frame(
|
||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if self._closing and not (opcode & WSMsgType.CLOSE):
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
|
||||
rsv = 0
|
||||
|
||||
# Only compress larger packets (disabled)
|
||||
# Does small packet needs to be compressed?
|
||||
# if self.compress and opcode < 8 and len(message) > 124:
|
||||
if (compress or self.compress) and opcode < 8:
|
||||
if compress:
|
||||
# Do not set self._compress if compressing is for this frame
|
||||
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
|
||||
else: # self.compress
|
||||
if not self._compressobj:
|
||||
self._compressobj = zlib.compressobj(
|
||||
level=zlib.Z_BEST_SPEED, wbits=-self.compress
|
||||
)
|
||||
compressobj = self._compressobj
|
||||
|
||||
message = compressobj.compress(message)
|
||||
message = message + compressobj.flush(
|
||||
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
||||
)
|
||||
if message.endswith(_WS_DEFLATE_TRAILING):
|
||||
message = message[:-4]
|
||||
rsv = rsv | 0x40
|
||||
|
||||
msg_length = len(message)
|
||||
|
||||
use_mask = self.use_mask
|
||||
if use_mask:
|
||||
mask_bit = 0x80
|
||||
else:
|
||||
mask_bit = 0
|
||||
|
||||
if msg_length < 126:
|
||||
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
||||
elif msg_length < (1 << 16):
|
||||
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
||||
else:
|
||||
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
||||
if use_mask:
|
||||
mask = self.randrange(0, 0xFFFFFFFF)
|
||||
mask = mask.to_bytes(4, "big")
|
||||
message = bytearray(message)
|
||||
_websocket_mask(mask, message)
|
||||
self._write(header + mask + message)
|
||||
self._output_size += len(header) + len(mask) + len(message)
|
||||
else:
|
||||
if len(message) > MSG_SIZE:
|
||||
self._write(header)
|
||||
self._write(message)
|
||||
else:
|
||||
self._write(header + message)
|
||||
|
||||
self._output_size += len(header) + len(message)
|
||||
|
||||
if self._output_size > self._limit:
|
||||
self._output_size = 0
|
||||
await self.protocol._drain_helper()
|
||||
|
||||
def _write(self, data: bytes) -> None:
|
||||
if self.transport is None or self.transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self.transport.write(data)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
"""Send pong message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PONG)
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
"""Send ping message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PING)
|
||||
|
||||
async def send(
|
||||
self,
|
||||
message: Union[str, bytes],
|
||||
binary: bool = False,
|
||||
compress: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
if binary:
|
||||
await self._send_frame(message, WSMsgType.BINARY, compress)
|
||||
else:
|
||||
await self._send_frame(message, WSMsgType.TEXT, compress)
|
||||
|
||||
async def close(self, code: int = 1000, message: bytes = b"") -> None:
|
||||
"""Close the websocket, sending the specified code and message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
try:
|
||||
await self._send_frame(
|
||||
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
||||
)
|
||||
finally:
|
||||
self._closing = True
|
198
.venv/lib/python3.7/site-packages/aiohttp/http_writer.py
Normal file
198
.venv/lib/python3.7/site-packages/aiohttp/http_writer.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""Http related parsers and protocol."""
|
||||
|
||||
import asyncio
|
||||
import zlib
|
||||
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
||||
|
||||
|
||||
class HttpVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
HttpVersion10 = HttpVersion(1, 0)
|
||||
HttpVersion11 = HttpVersion(1, 1)
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
||||
|
||||
|
||||
class StreamWriter(AbstractStreamWriter):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
on_chunk_sent: _T_OnChunkSent = None,
|
||||
on_headers_sent: _T_OnHeadersSent = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
|
||||
self.loop = loop
|
||||
self.length = None
|
||||
self.chunked = False
|
||||
self.buffer_size = 0
|
||||
self.output_size = 0
|
||||
|
||||
self._eof = False
|
||||
self._compress: Any = None
|
||||
self._drain_waiter = None
|
||||
|
||||
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
||||
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
||||
|
||||
@property
|
||||
def transport(self) -> Optional[asyncio.Transport]:
|
||||
return self._protocol.transport
|
||||
|
||||
@property
|
||||
def protocol(self) -> BaseProtocol:
|
||||
return self._protocol
|
||||
|
||||
def enable_chunking(self) -> None:
|
||||
self.chunked = True
|
||||
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
size = len(chunk)
|
||||
self.buffer_size += size
|
||||
self.output_size += size
|
||||
transport = self.transport
|
||||
if not self._protocol.connected or transport is None or transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
transport.write(chunk)
|
||||
|
||||
async def write(
|
||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
||||
) -> None:
|
||||
"""Writes chunk of data to a stream.
|
||||
|
||||
write_eof() indicates end of stream.
|
||||
writer can't be used after write_eof() method being called.
|
||||
write() return drain future.
|
||||
"""
|
||||
if self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if isinstance(chunk, memoryview):
|
||||
if chunk.nbytes != len(chunk):
|
||||
# just reshape it
|
||||
chunk = chunk.cast("c")
|
||||
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self.length is not None:
|
||||
chunk_len = len(chunk)
|
||||
if self.length >= chunk_len:
|
||||
self.length = self.length - chunk_len
|
||||
else:
|
||||
chunk = chunk[: self.length]
|
||||
self.length = 0
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if chunk:
|
||||
if self.chunked:
|
||||
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len_pre + chunk + b"\r\n"
|
||||
|
||||
self._write(chunk)
|
||||
|
||||
if self.buffer_size > LIMIT and drain:
|
||||
self.buffer_size = 0
|
||||
await self.drain()
|
||||
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write request/response status and headers."""
|
||||
if self._on_headers_sent is not None:
|
||||
await self._on_headers_sent(headers)
|
||||
|
||||
# status + headers
|
||||
buf = _serialize_headers(status_line, headers)
|
||||
self._write(buf)
|
||||
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
if chunk and self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if self._compress:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
|
||||
chunk = chunk + self._compress.flush()
|
||||
if chunk and self.chunked:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
if self.chunked:
|
||||
if chunk:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
chunk = b"0\r\n\r\n"
|
||||
|
||||
if chunk:
|
||||
self._write(chunk)
|
||||
|
||||
await self.drain()
|
||||
|
||||
self._eof = True
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer.
|
||||
|
||||
The intended use is to write
|
||||
|
||||
await w.write(data)
|
||||
await w.drain()
|
||||
"""
|
||||
if self._protocol.transport is not None:
|
||||
await self._protocol._drain_helper()
|
||||
|
||||
|
||||
def _safe_header(string: str) -> str:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return detected in headers. "
|
||||
"Potential header injection attack."
|
||||
)
|
||||
return string
|
||||
|
||||
|
||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
||||
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
||||
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
||||
return line.encode("utf-8")
|
||||
|
||||
|
||||
_serialize_headers = _py_serialize_headers
|
||||
|
||||
try:
|
||||
import aiohttp._http_writer as _http_writer # type: ignore[import]
|
||||
|
||||
_c_serialize_headers = _http_writer._serialize_headers
|
||||
if not NO_EXTENSIONS:
|
||||
_serialize_headers = _c_serialize_headers
|
||||
except ImportError:
|
||||
pass
|
41
.venv/lib/python3.7/site-packages/aiohttp/locks.py
Normal file
41
.venv/lib/python3.7/site-packages/aiohttp/locks.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import asyncio
|
||||
import collections
|
||||
from typing import Any, Deque, Optional
|
||||
|
||||
|
||||
class EventResultOrError:
|
||||
"""Event asyncio lock helper class.
|
||||
|
||||
Wraps the Event asyncio lock allowing either to awake the
|
||||
locked Tasks without any error or raising an exception.
|
||||
|
||||
thanks to @vorpalsmith for the simple design.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._event = asyncio.Event()
|
||||
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
|
||||
|
||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
||||
self._exc = exc
|
||||
self._event.set()
|
||||
|
||||
async def wait(self) -> Any:
|
||||
waiter = self._loop.create_task(self._event.wait())
|
||||
self._waiters.append(waiter)
|
||||
try:
|
||||
val = await waiter
|
||||
finally:
|
||||
self._waiters.remove(waiter)
|
||||
|
||||
if self._exc is not None:
|
||||
raise self._exc
|
||||
|
||||
return val
|
||||
|
||||
def cancel(self) -> None:
|
||||
"""Cancel all waiters"""
|
||||
for waiter in self._waiters:
|
||||
waiter.cancel()
|
8
.venv/lib/python3.7/site-packages/aiohttp/log.py
Normal file
8
.venv/lib/python3.7/site-packages/aiohttp/log.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import logging
|
||||
|
||||
access_logger = logging.getLogger("aiohttp.access")
|
||||
client_logger = logging.getLogger("aiohttp.client")
|
||||
internal_logger = logging.getLogger("aiohttp.internal")
|
||||
server_logger = logging.getLogger("aiohttp.server")
|
||||
web_logger = logging.getLogger("aiohttp.web")
|
||||
ws_logger = logging.getLogger("aiohttp.websocket")
|
961
.venv/lib/python3.7/site-packages/aiohttp/multipart.py
Normal file
961
.venv/lib/python3.7/site-packages/aiohttp/multipart.py
Normal file
@@ -0,0 +1,961 @@
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
import warnings
|
||||
import zlib
|
||||
from collections import deque
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import parse_qsl, unquote, urlencode
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
|
||||
|
||||
from .hdrs import (
|
||||
CONTENT_DISPOSITION,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_LENGTH,
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
CONTENT_TYPE,
|
||||
)
|
||||
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
||||
from .http import HeadersParser
|
||||
from .payload import (
|
||||
JsonPayload,
|
||||
LookupError,
|
||||
Order,
|
||||
Payload,
|
||||
StringPayload,
|
||||
get_payload,
|
||||
payload_type,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
|
||||
__all__ = (
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"BodyPartReader",
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"parse_content_disposition",
|
||||
"content_disposition_filename",
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse
|
||||
|
||||
|
||||
class BadContentDispositionHeader(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
class BadContentDispositionParam(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
def parse_content_disposition(
|
||||
header: Optional[str],
|
||||
) -> Tuple[Optional[str], Dict[str, str]]:
|
||||
def is_token(string: str) -> bool:
|
||||
return bool(string) and TOKEN >= set(string)
|
||||
|
||||
def is_quoted(string: str) -> bool:
|
||||
return string[0] == string[-1] == '"'
|
||||
|
||||
def is_rfc5987(string: str) -> bool:
|
||||
return is_token(string) and string.count("'") == 2
|
||||
|
||||
def is_extended_param(string: str) -> bool:
|
||||
return string.endswith("*")
|
||||
|
||||
def is_continuous_param(string: str) -> bool:
|
||||
pos = string.find("*") + 1
|
||||
if not pos:
|
||||
return False
|
||||
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
||||
return substring.isdigit()
|
||||
|
||||
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
||||
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
||||
|
||||
if not header:
|
||||
return None, {}
|
||||
|
||||
disptype, *parts = header.split(";")
|
||||
if not is_token(disptype):
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
while parts:
|
||||
item = parts.pop(0)
|
||||
|
||||
if "=" not in item:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
key, value = item.split("=", 1)
|
||||
key = key.lower().strip()
|
||||
value = value.lstrip()
|
||||
|
||||
if key in params:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
if not is_token(key):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_continuous_param(key):
|
||||
if is_quoted(value):
|
||||
value = unescape(value[1:-1])
|
||||
elif not is_token(value):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_extended_param(key):
|
||||
if is_rfc5987(value):
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
else:
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
try:
|
||||
value = unquote(value, encoding, "strict")
|
||||
except UnicodeDecodeError: # pragma: nocover
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
else:
|
||||
failed = True
|
||||
if is_quoted(value):
|
||||
failed = False
|
||||
value = unescape(value[1:-1].lstrip("\\/"))
|
||||
elif is_token(value):
|
||||
failed = False
|
||||
elif parts:
|
||||
# maybe just ; in filename, in any case this is just
|
||||
# one case fix, for proper fix we need to redesign parser
|
||||
_value = f"{value};{parts[0]}"
|
||||
if is_quoted(_value):
|
||||
parts.pop(0)
|
||||
value = unescape(_value[1:-1].lstrip("\\/"))
|
||||
failed = False
|
||||
|
||||
if failed:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params[key] = value
|
||||
|
||||
return disptype.lower(), params
|
||||
|
||||
|
||||
def content_disposition_filename(
|
||||
params: Mapping[str, str], name: str = "filename"
|
||||
) -> Optional[str]:
|
||||
name_suf = "%s*" % name
|
||||
if not params:
|
||||
return None
|
||||
elif name_suf in params:
|
||||
return params[name_suf]
|
||||
elif name in params:
|
||||
return params[name]
|
||||
else:
|
||||
parts = []
|
||||
fnparams = sorted(
|
||||
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
||||
)
|
||||
for num, (key, value) in enumerate(fnparams):
|
||||
_, tail = key.split("*", 1)
|
||||
if tail.endswith("*"):
|
||||
tail = tail[:-1]
|
||||
if tail == str(num):
|
||||
parts.append(value)
|
||||
else:
|
||||
break
|
||||
if not parts:
|
||||
return None
|
||||
value = "".join(parts)
|
||||
if "'" in value:
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
return unquote(value, encoding, "strict")
|
||||
return value
|
||||
|
||||
|
||||
class MultipartResponseWrapper:
|
||||
"""Wrapper around the MultipartReader.
|
||||
|
||||
It takes care about
|
||||
underlying connection and close it when it needs in.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
resp: "ClientResponse",
|
||||
stream: "MultipartReader",
|
||||
) -> None:
|
||||
self.resp = resp
|
||||
self.stream = stream
|
||||
|
||||
def __aiter__(self) -> "MultipartResponseWrapper":
|
||||
return self
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Union["MultipartReader", "BodyPartReader"]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True when all response data had been read."""
|
||||
return self.resp.content.at_eof()
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
||||
"""Emits next multipart reader object."""
|
||||
item = await self.stream.next()
|
||||
if self.stream.at_eof():
|
||||
await self.release()
|
||||
return item
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Release the connection gracefully.
|
||||
|
||||
All remaining content is read to the void.
|
||||
"""
|
||||
await self.resp.release()
|
||||
|
||||
|
||||
class BodyPartReader:
|
||||
"""Multipart reader for single body part."""
|
||||
|
||||
chunk_size = 8192
|
||||
|
||||
def __init__(
|
||||
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
|
||||
) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = boundary
|
||||
self._content = content
|
||||
self._at_eof = False
|
||||
length = self.headers.get(CONTENT_LENGTH, None)
|
||||
self._length = int(length) if length is not None else None
|
||||
self._read_bytes = 0
|
||||
# TODO: typeing.Deque is not supported by Python 3.5
|
||||
self._unread: Deque[bytes] = deque()
|
||||
self._prev_chunk: Optional[bytes] = None
|
||||
self._content_eof = 0
|
||||
self._cache: Dict[str, Any] = {}
|
||||
|
||||
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
async def next(self) -> Optional[bytes]:
|
||||
item = await self.read()
|
||||
if not item:
|
||||
return None
|
||||
return item
|
||||
|
||||
async def read(self, *, decode: bool = False) -> bytes:
|
||||
"""Reads body part data.
|
||||
|
||||
decode: Decodes data following by encoding
|
||||
method from Content-Encoding header. If it missed
|
||||
data remains untouched
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
data = bytearray()
|
||||
while not self._at_eof:
|
||||
data.extend(await self.read_chunk(self.chunk_size))
|
||||
if decode:
|
||||
return self.decode(data)
|
||||
return data
|
||||
|
||||
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
||||
"""Reads body part content chunk of the specified size.
|
||||
|
||||
size: chunk size
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
if self._length:
|
||||
chunk = await self._read_chunk_from_length(size)
|
||||
else:
|
||||
chunk = await self._read_chunk_from_stream(size)
|
||||
|
||||
self._read_bytes += len(chunk)
|
||||
if self._read_bytes == self._length:
|
||||
self._at_eof = True
|
||||
if self._at_eof:
|
||||
clrf = await self._content.readline()
|
||||
assert (
|
||||
b"\r\n" == clrf
|
||||
), "reader did not read all the data or it is malformed"
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_length(self, size: int) -> bytes:
|
||||
# Reads body part content chunk of the specified size.
|
||||
# The body part must has Content-Length header with proper value.
|
||||
assert self._length is not None, "Content-Length required for chunked read"
|
||||
chunk_size = min(size, self._length - self._read_bytes)
|
||||
chunk = await self._content.read(chunk_size)
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
||||
# Reads content chunk of body part with unknown length.
|
||||
# The Content-Length header for body part is not necessary.
|
||||
assert (
|
||||
size >= len(self._boundary) + 2
|
||||
), "Chunk size must be greater or equal than boundary length + 2"
|
||||
first_chunk = self._prev_chunk is None
|
||||
if first_chunk:
|
||||
self._prev_chunk = await self._content.read(size)
|
||||
|
||||
chunk = await self._content.read(size)
|
||||
self._content_eof += int(self._content.at_eof())
|
||||
assert self._content_eof < 3, "Reading after EOF"
|
||||
assert self._prev_chunk is not None
|
||||
window = self._prev_chunk + chunk
|
||||
sub = b"\r\n" + self._boundary
|
||||
if first_chunk:
|
||||
idx = window.find(sub)
|
||||
else:
|
||||
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
||||
if idx >= 0:
|
||||
# pushing boundary back to content
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
self._content.unread_data(window[idx:])
|
||||
if size > idx:
|
||||
self._prev_chunk = self._prev_chunk[:idx]
|
||||
chunk = window[len(self._prev_chunk) : idx]
|
||||
if not chunk:
|
||||
self._at_eof = True
|
||||
result = self._prev_chunk
|
||||
self._prev_chunk = chunk
|
||||
return result
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
"""Reads body part by line by line."""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
|
||||
if self._unread:
|
||||
line = self._unread.popleft()
|
||||
else:
|
||||
line = await self._content.readline()
|
||||
|
||||
if line.startswith(self._boundary):
|
||||
# the very last boundary may not come with \r\n,
|
||||
# so set single rules for everyone
|
||||
sline = line.rstrip(b"\r\n")
|
||||
boundary = self._boundary
|
||||
last_boundary = self._boundary + b"--"
|
||||
# ensure that we read exactly the boundary, not something alike
|
||||
if sline == boundary or sline == last_boundary:
|
||||
self._at_eof = True
|
||||
self._unread.append(line)
|
||||
return b""
|
||||
else:
|
||||
next_line = await self._content.readline()
|
||||
if next_line.startswith(self._boundary):
|
||||
line = line[:-2] # strip CRLF but only once
|
||||
self._unread.append(next_line)
|
||||
|
||||
return line
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Like read(), but reads all the data to the void."""
|
||||
if self._at_eof:
|
||||
return
|
||||
while not self._at_eof:
|
||||
await self.read_chunk(self.chunk_size)
|
||||
|
||||
async def text(self, *, encoding: Optional[str] = None) -> str:
|
||||
"""Like read(), but assumes that body part contains text data."""
|
||||
data = await self.read(decode=True)
|
||||
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
|
||||
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return data.decode(encoding)
|
||||
|
||||
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Like read(), but assumes that body parts contains JSON data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return None
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
||||
|
||||
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
||||
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return []
|
||||
if encoding is not None:
|
||||
real_encoding = encoding
|
||||
else:
|
||||
real_encoding = self.get_charset(default="utf-8")
|
||||
return parse_qsl(
|
||||
data.rstrip().decode(real_encoding),
|
||||
keep_blank_values=True,
|
||||
encoding=real_encoding,
|
||||
)
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the boundary was reached or False otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
def decode(self, data: bytes) -> bytes:
|
||||
"""Decodes data.
|
||||
|
||||
Decoding is done according the specified Content-Encoding
|
||||
or Content-Transfer-Encoding headers value.
|
||||
"""
|
||||
if CONTENT_TRANSFER_ENCODING in self.headers:
|
||||
data = self._decode_content_transfer(data)
|
||||
if CONTENT_ENCODING in self.headers:
|
||||
return self._decode_content(data)
|
||||
return data
|
||||
|
||||
def _decode_content(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
||||
|
||||
if encoding == "deflate":
|
||||
return zlib.decompress(data, -zlib.MAX_WBITS)
|
||||
elif encoding == "gzip":
|
||||
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
|
||||
elif encoding == "identity":
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
|
||||
def _decode_content_transfer(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
||||
|
||||
if encoding == "base64":
|
||||
return base64.b64decode(data)
|
||||
elif encoding == "quoted-printable":
|
||||
return binascii.a2b_qp(data)
|
||||
elif encoding in ("binary", "8bit", "7bit"):
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(encoding)
|
||||
)
|
||||
|
||||
def get_charset(self, default: str) -> str:
|
||||
"""Returns charset parameter from Content-Type header or default."""
|
||||
ctype = self.headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
return mimetype.parameters.get("charset", default)
|
||||
|
||||
@reify
|
||||
def name(self) -> Optional[str]:
|
||||
"""Returns name specified in Content-Disposition header.
|
||||
|
||||
If the header is missing or malformed, returns None.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "name")
|
||||
|
||||
@reify
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Returns filename specified in Content-Disposition header.
|
||||
|
||||
Returns None if the header is missing or malformed.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "filename")
|
||||
|
||||
|
||||
@payload_type(BodyPartReader, order=Order.try_first)
|
||||
class BodyPartReaderPayload(Payload):
|
||||
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
if value.name is not None:
|
||||
params["name"] = value.name
|
||||
if value.filename is not None:
|
||||
params["filename"] = value.filename
|
||||
|
||||
if params:
|
||||
self.set_content_disposition("attachment", True, **params)
|
||||
|
||||
async def write(self, writer: Any) -> None:
|
||||
field = self._value
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
while chunk:
|
||||
await writer.write(field.decode(chunk))
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
|
||||
|
||||
class MultipartReader:
|
||||
"""Multipart body reader."""
|
||||
|
||||
#: Response wrapper, used when multipart readers constructs from response.
|
||||
response_wrapper_cls = MultipartResponseWrapper
|
||||
#: Multipart reader class, used to handle multipart/* body parts.
|
||||
#: None points to type(self)
|
||||
multipart_reader_cls = None
|
||||
#: Body part reader class for non multipart/* content types.
|
||||
part_reader_cls = BodyPartReader
|
||||
|
||||
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = ("--" + self._get_boundary()).encode()
|
||||
self._content = content
|
||||
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
||||
self._at_eof = False
|
||||
self._at_bof = True
|
||||
self._unread: List[bytes] = []
|
||||
|
||||
def __aiter__(
|
||||
self,
|
||||
) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
@classmethod
|
||||
def from_response(
|
||||
cls,
|
||||
response: "ClientResponse",
|
||||
) -> MultipartResponseWrapper:
|
||||
"""Constructs reader instance from HTTP response.
|
||||
|
||||
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
||||
"""
|
||||
obj = cls.response_wrapper_cls(
|
||||
response, cls(response.headers, response.content)
|
||||
)
|
||||
return obj
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the final boundary was reached, false otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
"""Emits the next multipart body part."""
|
||||
# So, if we're at BOF, we need to skip till the boundary.
|
||||
if self._at_eof:
|
||||
return None
|
||||
await self._maybe_release_last_part()
|
||||
if self._at_bof:
|
||||
await self._read_until_first_boundary()
|
||||
self._at_bof = False
|
||||
else:
|
||||
await self._read_boundary()
|
||||
if self._at_eof: # we just read the last boundary, nothing to do there
|
||||
return None
|
||||
self._last_part = await self.fetch_next_part()
|
||||
return self._last_part
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Reads all the body parts to the void till the final boundary."""
|
||||
while not self._at_eof:
|
||||
item = await self.next()
|
||||
if item is None:
|
||||
break
|
||||
await item.release()
|
||||
|
||||
async def fetch_next_part(
|
||||
self,
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Returns the next body part reader."""
|
||||
headers = await self._read_headers()
|
||||
return self._get_part_reader(headers)
|
||||
|
||||
def _get_part_reader(
|
||||
self,
|
||||
headers: "CIMultiDictProxy[str]",
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Dispatches the response by the `Content-Type` header.
|
||||
|
||||
Returns a suitable reader instance.
|
||||
|
||||
:param dict headers: Response headers
|
||||
"""
|
||||
ctype = headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
|
||||
if mimetype.type == "multipart":
|
||||
if self.multipart_reader_cls is None:
|
||||
return type(self)(headers, self._content)
|
||||
return self.multipart_reader_cls(headers, self._content)
|
||||
else:
|
||||
return self.part_reader_cls(self._boundary, headers, self._content)
|
||||
|
||||
def _get_boundary(self) -> str:
|
||||
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
|
||||
|
||||
assert mimetype.type == "multipart", "multipart/* content type expected"
|
||||
|
||||
if "boundary" not in mimetype.parameters:
|
||||
raise ValueError(
|
||||
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
|
||||
)
|
||||
|
||||
boundary = mimetype.parameters["boundary"]
|
||||
if len(boundary) > 70:
|
||||
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
||||
|
||||
return boundary
|
||||
|
||||
async def _readline(self) -> bytes:
|
||||
if self._unread:
|
||||
return self._unread.pop()
|
||||
return await self._content.readline()
|
||||
|
||||
async def _read_until_first_boundary(self) -> None:
|
||||
while True:
|
||||
chunk = await self._readline()
|
||||
if chunk == b"":
|
||||
raise ValueError(
|
||||
"Could not find starting boundary %r" % (self._boundary)
|
||||
)
|
||||
chunk = chunk.rstrip()
|
||||
if chunk == self._boundary:
|
||||
return
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
return
|
||||
|
||||
async def _read_boundary(self) -> None:
|
||||
chunk = (await self._readline()).rstrip()
|
||||
if chunk == self._boundary:
|
||||
pass
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
epilogue = await self._readline()
|
||||
next_line = await self._readline()
|
||||
|
||||
# the epilogue is expected and then either the end of input or the
|
||||
# parent multipart boundary, if the parent boundary is found then
|
||||
# it should be marked as unread and handed to the parent for
|
||||
# processing
|
||||
if next_line[:2] == b"--":
|
||||
self._unread.append(next_line)
|
||||
# otherwise the request is likely missing an epilogue and both
|
||||
# lines should be passed to the parent for processing
|
||||
# (this handles the old behavior gracefully)
|
||||
else:
|
||||
self._unread.extend([next_line, epilogue])
|
||||
else:
|
||||
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
||||
|
||||
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
||||
lines = [b""]
|
||||
while True:
|
||||
chunk = await self._content.readline()
|
||||
chunk = chunk.strip()
|
||||
lines.append(chunk)
|
||||
if not chunk:
|
||||
break
|
||||
parser = HeadersParser()
|
||||
headers, raw_headers = parser.parse_headers(lines)
|
||||
return headers
|
||||
|
||||
async def _maybe_release_last_part(self) -> None:
|
||||
"""Ensures that the last read body part is read completely."""
|
||||
if self._last_part is not None:
|
||||
if not self._last_part.at_eof():
|
||||
await self._last_part.release()
|
||||
self._unread.extend(self._last_part._unread)
|
||||
self._last_part = None
|
||||
|
||||
|
||||
_Part = Tuple[Payload, str, str]
|
||||
|
||||
|
||||
class MultipartWriter(Payload):
|
||||
"""Multipart body writer."""
|
||||
|
||||
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
||||
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
||||
# The underlying Payload API demands a str (utf-8), not bytes,
|
||||
# so we need to ensure we don't lose anything during conversion.
|
||||
# As a result, require the boundary to be ASCII only.
|
||||
# In both situations.
|
||||
|
||||
try:
|
||||
self._boundary = boundary.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("boundary should contain ASCII only chars") from None
|
||||
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
||||
|
||||
super().__init__(None, content_type=ctype)
|
||||
|
||||
self._parts: List[_Part] = []
|
||||
|
||||
def __enter__(self) -> "MultipartWriter":
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def __iter__(self) -> Iterator[_Part]:
|
||||
return iter(self._parts)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._parts)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
||||
|
||||
@property
|
||||
def _boundary_value(self) -> str:
|
||||
"""Wrap boundary parameter value in quotes, if necessary.
|
||||
|
||||
Reads self.boundary and returns a unicode sting.
|
||||
"""
|
||||
# Refer to RFCs 7231, 7230, 5234.
|
||||
#
|
||||
# parameter = token "=" ( token / quoted-string )
|
||||
# token = 1*tchar
|
||||
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
||||
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
||||
# obs-text = %x80-FF
|
||||
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
||||
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
||||
# / DIGIT / ALPHA
|
||||
# ; any VCHAR, except delimiters
|
||||
# VCHAR = %x21-7E
|
||||
value = self._boundary
|
||||
if re.match(self._valid_tchar_regex, value):
|
||||
return value.decode("ascii") # cannot fail
|
||||
|
||||
if re.search(self._invalid_qdtext_char_regex, value):
|
||||
raise ValueError("boundary value contains invalid characters")
|
||||
|
||||
# escape %x5C and %x22
|
||||
quoted_value_content = value.replace(b"\\", b"\\\\")
|
||||
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
||||
|
||||
return '"' + quoted_value_content.decode("ascii") + '"'
|
||||
|
||||
@property
|
||||
def boundary(self) -> str:
|
||||
return self._boundary.decode("ascii")
|
||||
|
||||
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Payload):
|
||||
obj.headers.update(headers)
|
||||
return self.append_payload(obj)
|
||||
else:
|
||||
try:
|
||||
payload = get_payload(obj, headers=headers)
|
||||
except LookupError:
|
||||
raise TypeError("Cannot create payload from %r" % obj)
|
||||
else:
|
||||
return self.append_payload(payload)
|
||||
|
||||
def append_payload(self, payload: Payload) -> Payload:
|
||||
"""Adds a new body part to multipart writer."""
|
||||
# compression
|
||||
encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_ENCODING,
|
||||
"",
|
||||
).lower()
|
||||
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
if encoding == "identity":
|
||||
encoding = None
|
||||
|
||||
# te encoding
|
||||
te_encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
"",
|
||||
).lower()
|
||||
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(te_encoding)
|
||||
)
|
||||
if te_encoding == "binary":
|
||||
te_encoding = None
|
||||
|
||||
# size
|
||||
size = payload.size
|
||||
if size is not None and not (encoding or te_encoding):
|
||||
payload.headers[CONTENT_LENGTH] = str(size)
|
||||
|
||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
||||
return payload
|
||||
|
||||
def append_json(
|
||||
self, obj: Any, headers: Optional[MultiMapping[str]] = None
|
||||
) -> Payload:
|
||||
"""Helper to append JSON part."""
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
return self.append_payload(JsonPayload(obj, headers=headers))
|
||||
|
||||
def append_form(
|
||||
self,
|
||||
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
||||
headers: Optional[MultiMapping[str]] = None,
|
||||
) -> Payload:
|
||||
"""Helper to append form urlencoded part."""
|
||||
assert isinstance(obj, (Sequence, Mapping))
|
||||
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Mapping):
|
||||
obj = list(obj.items())
|
||||
data = urlencode(obj, doseq=True)
|
||||
|
||||
return self.append_payload(
|
||||
StringPayload(
|
||||
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
total = 0
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
if encoding or te_encoding or part.size is None:
|
||||
return None
|
||||
|
||||
total += int(
|
||||
2
|
||||
+ len(self._boundary)
|
||||
+ 2
|
||||
+ part.size # b'--'+self._boundary+b'\r\n'
|
||||
+ len(part._binary_headers)
|
||||
+ 2 # b'\r\n'
|
||||
)
|
||||
|
||||
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
||||
return total
|
||||
|
||||
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
||||
"""Write body."""
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
await writer.write(b"--" + self._boundary + b"\r\n")
|
||||
await writer.write(part._binary_headers)
|
||||
|
||||
if encoding or te_encoding:
|
||||
w = MultipartPayloadWriter(writer)
|
||||
if encoding:
|
||||
w.enable_compression(encoding)
|
||||
if te_encoding:
|
||||
w.enable_encoding(te_encoding)
|
||||
await part.write(w) # type: ignore[arg-type]
|
||||
await w.write_eof()
|
||||
else:
|
||||
await part.write(writer)
|
||||
|
||||
await writer.write(b"\r\n")
|
||||
|
||||
if close_boundary:
|
||||
await writer.write(b"--" + self._boundary + b"--\r\n")
|
||||
|
||||
|
||||
class MultipartPayloadWriter:
|
||||
def __init__(self, writer: Any) -> None:
|
||||
self._writer = writer
|
||||
self._encoding: Optional[str] = None
|
||||
self._compress: Any = None
|
||||
self._encoding_buffer: Optional[bytearray] = None
|
||||
|
||||
def enable_encoding(self, encoding: str) -> None:
|
||||
if encoding == "base64":
|
||||
self._encoding = encoding
|
||||
self._encoding_buffer = bytearray()
|
||||
elif encoding == "quoted-printable":
|
||||
self._encoding = "quoted-printable"
|
||||
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
async def write_eof(self) -> None:
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.flush()
|
||||
if chunk:
|
||||
self._compress = None
|
||||
await self.write(chunk)
|
||||
|
||||
if self._encoding == "base64":
|
||||
if self._encoding_buffer:
|
||||
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
||||
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
if self._compress is not None:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self._encoding == "base64":
|
||||
buf = self._encoding_buffer
|
||||
assert buf is not None
|
||||
buf.extend(chunk)
|
||||
|
||||
if buf:
|
||||
div, mod = divmod(len(buf), 3)
|
||||
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
||||
if enc_chunk:
|
||||
b64chunk = base64.b64encode(enc_chunk)
|
||||
await self._writer.write(b64chunk)
|
||||
elif self._encoding == "quoted-printable":
|
||||
await self._writer.write(binascii.b2a_qp(chunk))
|
||||
else:
|
||||
await self._writer.write(chunk)
|
465
.venv/lib/python3.7/site-packages/aiohttp/payload.py
Normal file
465
.venv/lib/python3.7/site-packages/aiohttp/payload.py
Normal file
@@ -0,0 +1,465 @@
|
||||
import asyncio
|
||||
import enum
|
||||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
ByteString,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import (
|
||||
PY_36,
|
||||
content_disposition_header,
|
||||
guess_filename,
|
||||
parse_mimetype,
|
||||
sentinel,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
from .typedefs import Final, JSONEncoder, _CIMultiDict
|
||||
|
||||
__all__ = (
|
||||
"PAYLOAD_REGISTRY",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
"Payload",
|
||||
"BytesPayload",
|
||||
"StringPayload",
|
||||
"IOBasePayload",
|
||||
"BytesIOPayload",
|
||||
"BufferedReaderPayload",
|
||||
"TextIOPayload",
|
||||
"StringIOPayload",
|
||||
"JsonPayload",
|
||||
"AsyncIterablePayload",
|
||||
)
|
||||
|
||||
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import List
|
||||
|
||||
|
||||
class LookupError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Order(str, enum.Enum):
|
||||
normal = "normal"
|
||||
try_first = "try_first"
|
||||
try_last = "try_last"
|
||||
|
||||
|
||||
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
||||
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
||||
|
||||
|
||||
def register_payload(
|
||||
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
||||
|
||||
|
||||
class payload_type:
|
||||
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
||||
self.type = type
|
||||
self.order = order
|
||||
|
||||
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
||||
register_payload(factory, self.type, order=self.order)
|
||||
return factory
|
||||
|
||||
|
||||
PayloadType = Type["Payload"]
|
||||
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
||||
|
||||
|
||||
class PayloadRegistry:
|
||||
"""Payload registry.
|
||||
|
||||
note: we need zope.interface for more efficient adapter search
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._first: List[_PayloadRegistryItem] = []
|
||||
self._normal: List[_PayloadRegistryItem] = []
|
||||
self._last: List[_PayloadRegistryItem] = []
|
||||
|
||||
def get(
|
||||
self,
|
||||
data: Any,
|
||||
*args: Any,
|
||||
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
||||
**kwargs: Any,
|
||||
) -> "Payload":
|
||||
if isinstance(data, Payload):
|
||||
return data
|
||||
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
||||
if isinstance(data, type):
|
||||
return factory(data, *args, **kwargs)
|
||||
|
||||
raise LookupError()
|
||||
|
||||
def register(
|
||||
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
if order is Order.try_first:
|
||||
self._first.append((factory, type))
|
||||
elif order is Order.normal:
|
||||
self._normal.append((factory, type))
|
||||
elif order is Order.try_last:
|
||||
self._last.append((factory, type))
|
||||
else:
|
||||
raise ValueError(f"Unsupported order {order!r}")
|
||||
|
||||
|
||||
class Payload(ABC):
|
||||
|
||||
_default_content_type: str = "application/octet-stream"
|
||||
_size: Optional[int] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
headers: Optional[
|
||||
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
||||
] = None,
|
||||
content_type: Optional[str] = sentinel,
|
||||
filename: Optional[str] = None,
|
||||
encoding: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._encoding = encoding
|
||||
self._filename = filename
|
||||
self._headers: _CIMultiDict = CIMultiDict()
|
||||
self._value = value
|
||||
if content_type is not sentinel and content_type is not None:
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
elif self._filename is not None:
|
||||
content_type = mimetypes.guess_type(self._filename)[0]
|
||||
if content_type is None:
|
||||
content_type = self._default_content_type
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
else:
|
||||
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
||||
self._headers.update(headers or {})
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
return self._size
|
||||
|
||||
@property
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Filename of the payload."""
|
||||
return self._filename
|
||||
|
||||
@property
|
||||
def headers(self) -> _CIMultiDict:
|
||||
"""Custom item headers"""
|
||||
return self._headers
|
||||
|
||||
@property
|
||||
def _binary_headers(self) -> bytes:
|
||||
return (
|
||||
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
||||
"utf-8"
|
||||
)
|
||||
+ b"\r\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def encoding(self) -> Optional[str]:
|
||||
"""Payload encoding"""
|
||||
return self._encoding
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""Content type"""
|
||||
return self._headers[hdrs.CONTENT_TYPE]
|
||||
|
||||
def set_content_disposition(
|
||||
self,
|
||||
disptype: str,
|
||||
quote_fields: bool = True,
|
||||
_charset: str = "utf-8",
|
||||
**params: Any,
|
||||
) -> None:
|
||||
"""Sets ``Content-Disposition`` header."""
|
||||
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
||||
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
"""Write payload.
|
||||
|
||||
writer is an AbstractStreamWriter instance:
|
||||
"""
|
||||
|
||||
|
||||
class BytesPayload(Payload):
|
||||
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
||||
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if isinstance(value, memoryview):
|
||||
self._size = value.nbytes
|
||||
else:
|
||||
self._size = len(value)
|
||||
|
||||
if self._size > TOO_LARGE_BYTES_BODY:
|
||||
if PY_36:
|
||||
kwargs = {"source": self}
|
||||
else:
|
||||
kwargs = {}
|
||||
warnings.warn(
|
||||
"Sending a large body directly with raw bytes might"
|
||||
" lock the event loop. You should probably pass an "
|
||||
"io.BytesIO object instead",
|
||||
ResourceWarning,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await writer.write(self._value)
|
||||
|
||||
|
||||
class StringPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: str,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
real_encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
real_encoding = encoding
|
||||
|
||||
super().__init__(
|
||||
value.encode(real_encoding),
|
||||
encoding=real_encoding,
|
||||
content_type=content_type,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class StringIOPayload(StringPayload):
|
||||
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.read(), *args, **kwargs)
|
||||
|
||||
|
||||
class IOBasePayload(Payload):
|
||||
_value: IO[Any]
|
||||
|
||||
def __init__(
|
||||
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
if "filename" not in kwargs:
|
||||
kwargs["filename"] = guess_filename(value)
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if self._filename is not None and disposition is not None:
|
||||
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
||||
self.set_content_disposition(disposition, filename=self._filename)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class TextIOPayload(IOBasePayload):
|
||||
_value: TextIO
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: TextIO,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
|
||||
super().__init__(
|
||||
value,
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
data = (
|
||||
chunk.encode(encoding=self._encoding)
|
||||
if self._encoding
|
||||
else chunk.encode()
|
||||
)
|
||||
await writer.write(data)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class BytesIOPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> int:
|
||||
position = self._value.tell()
|
||||
end = self._value.seek(0, os.SEEK_END)
|
||||
self._value.seek(position)
|
||||
return end - position
|
||||
|
||||
|
||||
class BufferedReaderPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
# data.fileno() is not supported, e.g.
|
||||
# io.BufferedReader(io.BytesIO(b'data'))
|
||||
return None
|
||||
|
||||
|
||||
class JsonPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
encoding: str = "utf-8",
|
||||
content_type: str = "application/json",
|
||||
dumps: JSONEncoder = json.dumps,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
super().__init__(
|
||||
dumps(value).encode(encoding),
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator[bytes]
|
||||
_AsyncIterable = AsyncIterable[bytes]
|
||||
else:
|
||||
from collections.abc import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator
|
||||
_AsyncIterable = AsyncIterable
|
||||
|
||||
|
||||
class AsyncIterablePayload(Payload):
|
||||
|
||||
_iter: Optional[_AsyncIterator] = None
|
||||
|
||||
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, AsyncIterable):
|
||||
raise TypeError(
|
||||
"value argument must support "
|
||||
"collections.abc.AsyncIterable interface, "
|
||||
"got {!r}".format(type(value))
|
||||
)
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
self._iter = value.__aiter__()
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
if self._iter:
|
||||
try:
|
||||
# iter is not None check prevents rare cases
|
||||
# when the case iterable is used twice
|
||||
while True:
|
||||
chunk = await self._iter.__anext__()
|
||||
await writer.write(chunk)
|
||||
except StopAsyncIteration:
|
||||
self._iter = None
|
||||
|
||||
|
||||
class StreamReaderPayload(AsyncIterablePayload):
|
||||
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.iter_any(), *args, **kwargs)
|
||||
|
||||
|
||||
PAYLOAD_REGISTRY = PayloadRegistry()
|
||||
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
||||
PAYLOAD_REGISTRY.register(StringPayload, str)
|
||||
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
||||
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
||||
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
||||
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
||||
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
||||
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
||||
# try_last for giving a chance to more specialized async interables like
|
||||
# multidict.BodyPartReaderPayload override the default
|
||||
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
Payload implemenation for coroutines as data provider.
|
||||
|
||||
As a simple case, you can upload data from file::
|
||||
|
||||
@aiohttp.streamer
|
||||
async def file_sender(writer, file_name=None):
|
||||
with open(file_name, 'rb') as f:
|
||||
chunk = f.read(2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
|
||||
chunk = f.read(2**16)
|
||||
|
||||
Then you can use `file_sender` like this:
|
||||
|
||||
async with session.post('http://httpbin.org/post',
|
||||
data=file_sender(file_name='huge_file')) as resp:
|
||||
print(await resp.text())
|
||||
|
||||
..note:: Coroutine must accept `writer` as first argument
|
||||
|
||||
"""
|
||||
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Awaitable, Callable, Dict, Tuple
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .payload import Payload, payload_type
|
||||
|
||||
__all__ = ("streamer",)
|
||||
|
||||
|
||||
class _stream_wrapper:
|
||||
def __init__(
|
||||
self,
|
||||
coro: Callable[..., Awaitable[None]],
|
||||
args: Tuple[Any, ...],
|
||||
kwargs: Dict[str, Any],
|
||||
) -> None:
|
||||
self.coro = types.coroutine(coro)
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
||||
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
|
||||
|
||||
|
||||
class streamer:
|
||||
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
||||
warnings.warn(
|
||||
"@streamer is deprecated, use async generators instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.coro = coro
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
||||
return _stream_wrapper(self.coro, args, kwargs)
|
||||
|
||||
|
||||
@payload_type(_stream_wrapper)
|
||||
class StreamWrapperPayload(Payload):
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
||||
|
||||
|
||||
@payload_type(streamer)
|
||||
class StreamPayload(StreamWrapperPayload):
|
||||
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value(), *args, **kwargs)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
1
.venv/lib/python3.7/site-packages/aiohttp/py.typed
Normal file
1
.venv/lib/python3.7/site-packages/aiohttp/py.typed
Normal file
@@ -0,0 +1 @@
|
||||
Marker
|
391
.venv/lib/python3.7/site-packages/aiohttp/pytest_plugin.py
Normal file
391
.venv/lib/python3.7/site-packages/aiohttp/pytest_plugin.py
Normal file
@@ -0,0 +1,391 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
|
||||
|
||||
import pytest
|
||||
|
||||
from aiohttp.helpers import PY_37, isasyncgenfunction
|
||||
from aiohttp.web import Application
|
||||
|
||||
from .test_utils import (
|
||||
BaseTestServer,
|
||||
RawTestServer,
|
||||
TestClient,
|
||||
TestServer,
|
||||
loop_context,
|
||||
setup_test_loop,
|
||||
teardown_test_loop,
|
||||
unused_port as _unused_port,
|
||||
)
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError: # pragma: no cover
|
||||
uvloop = None
|
||||
|
||||
try:
|
||||
import tokio
|
||||
except ImportError: # pragma: no cover
|
||||
tokio = None
|
||||
|
||||
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
||||
|
||||
|
||||
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
||||
parser.addoption(
|
||||
"--aiohttp-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run tests faster by disabling extra checks",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-loop",
|
||||
action="store",
|
||||
default="pyloop",
|
||||
help="run tests with specific loop: pyloop, uvloop, tokio or all",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-enable-loop-debug",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="enable event loop debug mode",
|
||||
)
|
||||
|
||||
|
||||
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
||||
"""Set up pytest fixture.
|
||||
|
||||
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
||||
"""
|
||||
func = fixturedef.func
|
||||
|
||||
if isasyncgenfunction(func):
|
||||
# async generator fixture
|
||||
is_async_gen = True
|
||||
elif asyncio.iscoroutinefunction(func):
|
||||
# regular async fixture
|
||||
is_async_gen = False
|
||||
else:
|
||||
# not an async fixture, nothing to do
|
||||
return
|
||||
|
||||
strip_request = False
|
||||
if "request" not in fixturedef.argnames:
|
||||
fixturedef.argnames += ("request",)
|
||||
strip_request = True
|
||||
|
||||
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
||||
request = kwargs["request"]
|
||||
if strip_request:
|
||||
del kwargs["request"]
|
||||
|
||||
# if neither the fixture nor the test use the 'loop' fixture,
|
||||
# 'getfixturevalue' will fail because the test is not parameterized
|
||||
# (this can be removed someday if 'loop' is no longer parameterized)
|
||||
if "loop" not in request.fixturenames:
|
||||
raise Exception(
|
||||
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
||||
"be used in tests depending from it."
|
||||
)
|
||||
|
||||
_loop = request.getfixturevalue("loop")
|
||||
|
||||
if is_async_gen:
|
||||
# for async generators, we need to advance the generator once,
|
||||
# then advance it again in a finalizer
|
||||
gen = func(*args, **kwargs)
|
||||
|
||||
def finalizer(): # type: ignore[no-untyped-def]
|
||||
try:
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
except StopAsyncIteration:
|
||||
pass
|
||||
|
||||
request.addfinalizer(finalizer)
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
else:
|
||||
return _loop.run_until_complete(func(*args, **kwargs))
|
||||
|
||||
fixturedef.func = wrapper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fast(request): # type: ignore[no-untyped-def]
|
||||
"""--fast config option"""
|
||||
return request.config.getoption("--aiohttp-fast")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop_debug(request): # type: ignore[no-untyped-def]
|
||||
"""--enable-loop-debug config option"""
|
||||
return request.config.getoption("--aiohttp-enable-loop-debug")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
||||
"""Context manager which checks for RuntimeWarnings.
|
||||
|
||||
This exists specifically to
|
||||
avoid "coroutine 'X' was never awaited" warnings being missed.
|
||||
|
||||
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
||||
"""
|
||||
with warnings.catch_warnings(record=True) as _warnings:
|
||||
yield
|
||||
rw = [
|
||||
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
||||
for w in _warnings
|
||||
if w.category == RuntimeWarning
|
||||
]
|
||||
if rw:
|
||||
raise RuntimeError(
|
||||
"{} Runtime Warning{},\n{}".format(
|
||||
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
||||
"""Passthrough loop context.
|
||||
|
||||
Sets up and tears down a loop unless one is passed in via the loop
|
||||
argument when it's passed straight through.
|
||||
"""
|
||||
if loop:
|
||||
# loop already exists, pass it straight through
|
||||
yield loop
|
||||
else:
|
||||
# this shadows loop_context's standard behavior
|
||||
loop = setup_test_loop()
|
||||
yield loop
|
||||
teardown_test_loop(loop, fast=fast)
|
||||
|
||||
|
||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
||||
"""Fix pytest collecting for coroutines."""
|
||||
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
||||
return list(collector._genfunctions(name, obj))
|
||||
|
||||
|
||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
||||
"""Run coroutines in an event loop instead of a normal function call."""
|
||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
||||
existing_loop = pyfuncitem.funcargs.get(
|
||||
"proactor_loop"
|
||||
) or pyfuncitem.funcargs.get("loop", None)
|
||||
with _runtime_warning_context():
|
||||
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
||||
testargs = {
|
||||
arg: pyfuncitem.funcargs[arg]
|
||||
for arg in pyfuncitem._fixtureinfo.argnames
|
||||
}
|
||||
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
||||
if "loop_factory" not in metafunc.fixturenames:
|
||||
return
|
||||
|
||||
loops = metafunc.config.option.aiohttp_loop
|
||||
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
||||
|
||||
if uvloop is not None: # pragma: no cover
|
||||
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
||||
|
||||
if tokio is not None: # pragma: no cover
|
||||
avail_factories["tokio"] = tokio.EventLoopPolicy
|
||||
|
||||
if loops == "all":
|
||||
loops = "pyloop,uvloop?,tokio?"
|
||||
|
||||
factories = {} # type: ignore[var-annotated]
|
||||
for name in loops.split(","):
|
||||
required = not name.endswith("?")
|
||||
name = name.strip(" ?")
|
||||
if name not in avail_factories: # pragma: no cover
|
||||
if required:
|
||||
raise ValueError(
|
||||
"Unknown loop '%s', available loops: %s"
|
||||
% (name, list(factories.keys()))
|
||||
)
|
||||
else:
|
||||
continue
|
||||
factories[name] = avail_factories[name]
|
||||
metafunc.parametrize(
|
||||
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
||||
"""Return an instance of the event loop."""
|
||||
policy = loop_factory()
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
with loop_context(fast=fast) as _loop:
|
||||
if loop_debug:
|
||||
_loop.set_debug(True) # pragma: no cover
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def proactor_loop(): # type: ignore[no-untyped-def]
|
||||
if not PY_37:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
||||
else:
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
|
||||
with loop_context(policy.new_event_loop) as _loop:
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_unused_port fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
|
||||
"""Return a port that is unused on the current host."""
|
||||
return _unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a TestServer instance, given an app.
|
||||
|
||||
aiohttp_server(app, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = TestServer(app, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a RawTestServer instance, given a web handler.
|
||||
|
||||
aiohttp_raw_server(handler, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = RawTestServer(handler, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
||||
aiohttp_raw_server,
|
||||
):
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_raw_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_raw_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> Generator[AiohttpClient, None, None]:
|
||||
"""Factory to create a TestClient instance.
|
||||
|
||||
aiohttp_client(app, **kwargs)
|
||||
aiohttp_client(server, **kwargs)
|
||||
aiohttp_client(raw_server, **kwargs)
|
||||
"""
|
||||
clients = []
|
||||
|
||||
async def go(
|
||||
__param: Union[Application, BaseTestServer],
|
||||
*args: Any,
|
||||
server_kwargs: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any
|
||||
) -> TestClient:
|
||||
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
||||
__param, (Application, BaseTestServer)
|
||||
):
|
||||
__param = __param(loop, *args, **kwargs)
|
||||
kwargs = {}
|
||||
else:
|
||||
assert not args, "args should be empty"
|
||||
|
||||
if isinstance(__param, Application):
|
||||
server_kwargs = server_kwargs or {}
|
||||
server = TestServer(__param, loop=loop, **server_kwargs)
|
||||
client = TestClient(server, loop=loop, **kwargs)
|
||||
elif isinstance(__param, BaseTestServer):
|
||||
client = TestClient(__param, loop=loop, **kwargs)
|
||||
else:
|
||||
raise ValueError("Unknown argument type: %r" % type(__param))
|
||||
|
||||
await client.start_server()
|
||||
clients.append(client)
|
||||
return client
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while clients:
|
||||
await clients.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_client fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_client
|
160
.venv/lib/python3.7/site-packages/aiohttp/resolver.py
Normal file
160
.venv/lib/python3.7/site-packages/aiohttp/resolver.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import asyncio
|
||||
import socket
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
|
||||
from .abc import AbstractResolver
|
||||
from .helpers import get_running_loop
|
||||
|
||||
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
||||
|
||||
try:
|
||||
import aiodns
|
||||
|
||||
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
|
||||
except ImportError: # pragma: no cover
|
||||
aiodns = None
|
||||
|
||||
aiodns_default = False
|
||||
|
||||
|
||||
class ThreadedResolver(AbstractResolver):
|
||||
"""Threaded resolver.
|
||||
|
||||
Uses an Executor for synchronous getaddrinfo() calls.
|
||||
concurrent.futures.ThreadPoolExecutor is used by default.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
async def resolve(
|
||||
self, hostname: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
infos = await self._loop.getaddrinfo(
|
||||
hostname,
|
||||
port,
|
||||
type=socket.SOCK_STREAM,
|
||||
family=family,
|
||||
flags=socket.AI_ADDRCONFIG,
|
||||
)
|
||||
|
||||
hosts = []
|
||||
for family, _, proto, _, address in infos:
|
||||
if family == socket.AF_INET6:
|
||||
if len(address) < 3:
|
||||
# IPv6 is not supported by Python build,
|
||||
# or IPv6 is not enabled in the host
|
||||
continue
|
||||
if address[3]: # type: ignore[misc]
|
||||
# This is essential for link-local IPv6 addresses.
|
||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
||||
# getnameinfo() unconditionally, but performance makes sense.
|
||||
host, _port = socket.getnameinfo(
|
||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
||||
)
|
||||
port = int(_port)
|
||||
else:
|
||||
host, port = address[:2]
|
||||
else: # IPv4
|
||||
assert family == socket.AF_INET
|
||||
host, port = address # type: ignore[misc]
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": hostname,
|
||||
"host": host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": proto,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncResolver(AbstractResolver):
|
||||
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
if aiodns is None:
|
||||
raise RuntimeError("Resolver requires aiodns library")
|
||||
|
||||
self._loop = get_running_loop(loop)
|
||||
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
|
||||
|
||||
if not hasattr(self._resolver, "gethostbyname"):
|
||||
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
||||
self.resolve = self._resolve_with_query # type: ignore
|
||||
|
||||
async def resolve(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
try:
|
||||
resp = await self._resolver.gethostbyname(host, family)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
hosts = []
|
||||
for address in resp.addresses:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": address,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def _resolve_with_query(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
if family == socket.AF_INET6:
|
||||
qtype = "AAAA"
|
||||
else:
|
||||
qtype = "A"
|
||||
|
||||
try:
|
||||
resp = await self._resolver.query(host, qtype)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
|
||||
hosts = []
|
||||
for rr in resp:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": rr.host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
self._resolver.cancel()
|
||||
|
||||
|
||||
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
||||
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
660
.venv/lib/python3.7/site-packages/aiohttp/streams.py
Normal file
660
.venv/lib/python3.7/site-packages/aiohttp/streams.py
Normal file
@@ -0,0 +1,660 @@
|
||||
import asyncio
|
||||
import collections
|
||||
import warnings
|
||||
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import BaseTimerContext, set_exception, set_result
|
||||
from .log import internal_logger
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"StreamReader",
|
||||
"DataQueue",
|
||||
"FlowControlDataQueue",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class EofStream(Exception):
|
||||
"""eof stream indication."""
|
||||
|
||||
|
||||
class AsyncStreamIterator(Generic[_T]):
|
||||
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
||||
self.read_func = read_func
|
||||
|
||||
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> _T:
|
||||
try:
|
||||
rv = await self.read_func()
|
||||
except EofStream:
|
||||
raise StopAsyncIteration
|
||||
if rv == b"":
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class ChunkTupleAsyncStreamIterator:
|
||||
def __init__(self, stream: "StreamReader") -> None:
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> Tuple[bytes, bool]:
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class AsyncStreamReaderMixin:
|
||||
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
||||
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields chunks of size n.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(
|
||||
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
|
||||
)
|
||||
|
||||
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
||||
"""Yield all available data as soon as it is received.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
||||
"""Yield chunks of data as they are received by the server.
|
||||
|
||||
The yielded objects are tuples
|
||||
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class StreamReader(AsyncStreamReaderMixin):
|
||||
"""An enhancement of asyncio.StreamReader.
|
||||
|
||||
Supports asynchronous iteration by line, chunk or as available::
|
||||
|
||||
async for line in reader:
|
||||
...
|
||||
async for chunk in reader.iter_chunked(1024):
|
||||
...
|
||||
async for slice in reader.iter_any():
|
||||
...
|
||||
|
||||
"""
|
||||
|
||||
total_bytes = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
limit: int,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._low_water = limit
|
||||
self._high_water = limit * 2
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._loop = loop
|
||||
self._size = 0
|
||||
self._cursor = 0
|
||||
self._http_chunk_splits: Optional[List[int]] = None
|
||||
self._buffer: Deque[bytes] = collections.deque()
|
||||
self._buffer_offset = 0
|
||||
self._eof = False
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._timer = timer
|
||||
self._eof_callbacks: List[Callable[[], None]] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
info = [self.__class__.__name__]
|
||||
if self._size:
|
||||
info.append("%d bytes" % self._size)
|
||||
if self._eof:
|
||||
info.append("eof")
|
||||
if self._low_water != 2**16: # default limit
|
||||
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
||||
if self._waiter:
|
||||
info.append("w=%r" % self._waiter)
|
||||
if self._exception:
|
||||
info.append("e=%r" % self._exception)
|
||||
return "<%s>" % " ".join(info)
|
||||
|
||||
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
||||
return (self._low_water, self._high_water)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._exception = exc
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
if self._eof:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
else:
|
||||
self._eof_callbacks.append(callback)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
for cb in self._eof_callbacks:
|
||||
try:
|
||||
cb()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
"""Return True if 'feed_eof' was called."""
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
||||
return self._eof and not self._buffer
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
assert self._eof_waiter is None
|
||||
self._eof_waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._eof_waiter
|
||||
finally:
|
||||
self._eof_waiter = None
|
||||
|
||||
def unread_data(self, data: bytes) -> None:
|
||||
"""rollback reading some data from stream, inserting it to buffer head."""
|
||||
warnings.warn(
|
||||
"unread_data() is deprecated "
|
||||
"and will be removed in future releases (#3260)",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if not data:
|
||||
return
|
||||
|
||||
if self._buffer_offset:
|
||||
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
||||
self._buffer_offset = 0
|
||||
self._size += len(data)
|
||||
self._cursor -= len(data)
|
||||
self._buffer.appendleft(data)
|
||||
self._eof_counter = 0
|
||||
|
||||
# TODO: size is ignored, remove the param later
|
||||
def feed_data(self, data: bytes, size: int = 0) -> None:
|
||||
assert not self._eof, "feed_data after feed_eof"
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
self._size += len(data)
|
||||
self._buffer.append(data)
|
||||
self.total_bytes += len(data)
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
if self._size > self._high_water and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
if self.total_bytes:
|
||||
raise RuntimeError(
|
||||
"Called begin_http_chunk_receiving when" "some data was already fed"
|
||||
)
|
||||
self._http_chunk_splits = []
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
raise RuntimeError(
|
||||
"Called end_chunk_receiving without calling "
|
||||
"begin_chunk_receiving first"
|
||||
)
|
||||
|
||||
# self._http_chunk_splits contains logical byte offsets from start of
|
||||
# the body transfer. Each offset is the offset of the end of a chunk.
|
||||
# "Logical" means bytes, accessible for a user.
|
||||
# If no chunks containig logical data were received, current position
|
||||
# is difinitely zero.
|
||||
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
||||
|
||||
if self.total_bytes == pos:
|
||||
# We should not add empty chunks here. So we check for that.
|
||||
# Note, when chunked + gzip is used, we can receive a chunk
|
||||
# of compressed data, but that data may not be enough for gzip FSM
|
||||
# to yield any uncompressed data. That's why current position may
|
||||
# not change after receiving a chunk.
|
||||
return
|
||||
|
||||
self._http_chunk_splits.append(self.total_bytes)
|
||||
|
||||
# wake up readchunk when end of http chunk received
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def _wait(self, func_name: str) -> None:
|
||||
# StreamReader uses a future to link the protocol feed_data() method
|
||||
# to a read coroutine. Running two read coroutines at the same time
|
||||
# would have an unexpected behaviour. It would not possible to know
|
||||
# which coroutine would get the next data.
|
||||
if self._waiter is not None:
|
||||
raise RuntimeError(
|
||||
"%s() called while another coroutine is "
|
||||
"already waiting for incoming data" % func_name
|
||||
)
|
||||
|
||||
waiter = self._waiter = self._loop.create_future()
|
||||
try:
|
||||
if self._timer:
|
||||
with self._timer:
|
||||
await waiter
|
||||
else:
|
||||
await waiter
|
||||
finally:
|
||||
self._waiter = None
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return await self.readuntil()
|
||||
|
||||
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
||||
seplen = len(separator)
|
||||
if seplen == 0:
|
||||
raise ValueError("Separator should be at least one-byte string")
|
||||
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
chunk = b""
|
||||
chunk_size = 0
|
||||
not_enough = True
|
||||
|
||||
while not_enough:
|
||||
while self._buffer and not_enough:
|
||||
offset = self._buffer_offset
|
||||
ichar = self._buffer[0].find(separator, offset) + 1
|
||||
# Read from current offset to found separator or to the end.
|
||||
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
|
||||
chunk += data
|
||||
chunk_size += len(data)
|
||||
if ichar:
|
||||
not_enough = False
|
||||
|
||||
if chunk_size > self._high_water:
|
||||
raise ValueError("Chunk too big")
|
||||
|
||||
if self._eof:
|
||||
break
|
||||
|
||||
if not_enough:
|
||||
await self._wait("readuntil")
|
||||
|
||||
return chunk
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# migration problem; with DataQueue you have to catch
|
||||
# EofStream exception, so common way is to run payload.read() inside
|
||||
# infinite loop. what can cause real infinite loop with StreamReader
|
||||
# lets keep this code one major release.
|
||||
if __debug__:
|
||||
if self._eof and not self._buffer:
|
||||
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
||||
if self._eof_counter > 5:
|
||||
internal_logger.warning(
|
||||
"Multiple access to StreamReader in eof state, "
|
||||
"might be infinite loop.",
|
||||
stack_info=True,
|
||||
)
|
||||
|
||||
if not n:
|
||||
return b""
|
||||
|
||||
if n < 0:
|
||||
# This used to just loop creating a new waiter hoping to
|
||||
# collect everything in self._buffer, but that would
|
||||
# deadlock if the subprocess sends more than self.limit
|
||||
# bytes. So just call self.readany() until EOF.
|
||||
blocks = []
|
||||
while True:
|
||||
block = await self.readany()
|
||||
if not block:
|
||||
break
|
||||
blocks.append(block)
|
||||
return b"".join(blocks)
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("read")
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("readany")
|
||||
|
||||
return self._read_nowait(-1)
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
"""Returns a tuple of (data, end_of_http_chunk).
|
||||
|
||||
When chunked transfer
|
||||
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
||||
always False.
|
||||
"""
|
||||
while True:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
while self._http_chunk_splits:
|
||||
pos = self._http_chunk_splits.pop(0)
|
||||
if pos == self._cursor:
|
||||
return (b"", True)
|
||||
if pos > self._cursor:
|
||||
return (self._read_nowait(pos - self._cursor), True)
|
||||
internal_logger.warning(
|
||||
"Skipping HTTP chunk end due to data "
|
||||
"consumption beyond chunk boundary"
|
||||
)
|
||||
|
||||
if self._buffer:
|
||||
return (self._read_nowait_chunk(-1), False)
|
||||
# return (self._read_nowait(-1), False)
|
||||
|
||||
if self._eof:
|
||||
# Special case for signifying EOF.
|
||||
# (b'', True) is not a final return value actually.
|
||||
return (b"", False)
|
||||
|
||||
await self._wait("readchunk")
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
blocks: List[bytes] = []
|
||||
while n > 0:
|
||||
block = await self.read(n)
|
||||
if not block:
|
||||
partial = b"".join(blocks)
|
||||
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
||||
blocks.append(block)
|
||||
n -= len(block)
|
||||
|
||||
return b"".join(blocks)
|
||||
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
# default was changed to be consistent with .read(-1)
|
||||
#
|
||||
# I believe the most users don't know about the method and
|
||||
# they are not affected.
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
if self._waiter and not self._waiter.done():
|
||||
raise RuntimeError(
|
||||
"Called while some coroutine is waiting for incoming data."
|
||||
)
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
def _read_nowait_chunk(self, n: int) -> bytes:
|
||||
first_buffer = self._buffer[0]
|
||||
offset = self._buffer_offset
|
||||
if n != -1 and len(first_buffer) - offset > n:
|
||||
data = first_buffer[offset : offset + n]
|
||||
self._buffer_offset += n
|
||||
|
||||
elif offset:
|
||||
self._buffer.popleft()
|
||||
data = first_buffer[offset:]
|
||||
self._buffer_offset = 0
|
||||
|
||||
else:
|
||||
data = self._buffer.popleft()
|
||||
|
||||
self._size -= len(data)
|
||||
self._cursor += len(data)
|
||||
|
||||
chunk_splits = self._http_chunk_splits
|
||||
# Prevent memory leak: drop useless chunk splits
|
||||
while chunk_splits and chunk_splits[0] < self._cursor:
|
||||
chunk_splits.pop(0)
|
||||
|
||||
if self._size < self._low_water and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
||||
return data
|
||||
|
||||
def _read_nowait(self, n: int) -> bytes:
|
||||
"""Read not more than n bytes, or whole buffer if n == -1"""
|
||||
chunks = []
|
||||
|
||||
while self._buffer:
|
||||
chunk = self._read_nowait_chunk(n)
|
||||
chunks.append(chunk)
|
||||
if n != -1:
|
||||
n -= len(chunk)
|
||||
if n == 0:
|
||||
break
|
||||
|
||||
return b"".join(chunks) if chunks else b""
|
||||
|
||||
|
||||
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return None
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
pass
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
pass
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
return
|
||||
|
||||
def feed_data(self, data: bytes, n: int = 0) -> None:
|
||||
pass
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
# TODO add async def readuntil
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
return (b"", True)
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
raise asyncio.IncompleteReadError(b"", n)
|
||||
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
||||
|
||||
|
||||
class DataQueue(Generic[_T]):
|
||||
"""DataQueue is a general-purpose blocking queue with one reader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._eof = False
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._size = 0
|
||||
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._buffer)
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return self._eof and not self._buffer
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._eof = True
|
||||
self._exception = exc
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
self._size += size
|
||||
self._buffer.append((data, size))
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def read(self) -> _T:
|
||||
if not self._buffer and not self._eof:
|
||||
assert not self._waiter
|
||||
self._waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._waiter
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._waiter = None
|
||||
raise
|
||||
|
||||
if self._buffer:
|
||||
data, size = self._buffer.popleft()
|
||||
self._size -= size
|
||||
return data
|
||||
else:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
else:
|
||||
raise EofStream
|
||||
|
||||
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
||||
return AsyncStreamIterator(self.read)
|
||||
|
||||
|
||||
class FlowControlDataQueue(DataQueue[_T]):
|
||||
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
||||
|
||||
It is a destination for parsed data.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
self._protocol = protocol
|
||||
self._limit = limit * 2
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
super().feed_data(data, size)
|
||||
|
||||
if self._size > self._limit and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
async def read(self) -> _T:
|
||||
try:
|
||||
return await super().read()
|
||||
finally:
|
||||
if self._size < self._limit and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
37
.venv/lib/python3.7/site-packages/aiohttp/tcp_helpers.py
Normal file
37
.venv/lib/python3.7/site-packages/aiohttp/tcp_helpers.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Helper methods to tune a TCP connection"""
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
from contextlib import suppress
|
||||
from typing import Optional # noqa
|
||||
|
||||
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
||||
|
||||
|
||||
if hasattr(socket, "SO_KEEPALIVE"):
|
||||
|
||||
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
||||
sock = transport.get_extra_info("socket")
|
||||
if sock is not None:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
|
||||
else:
|
||||
|
||||
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
||||
pass
|
||||
|
||||
|
||||
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
||||
sock = transport.get_extra_info("socket")
|
||||
|
||||
if sock is None:
|
||||
return
|
||||
|
||||
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
||||
return
|
||||
|
||||
value = bool(value)
|
||||
|
||||
# socket may be closed already, on windows OSError get raised
|
||||
with suppress(OSError):
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
706
.venv/lib/python3.7/site-packages/aiohttp/test_utils.py
Normal file
706
.venv/lib/python3.7/site-packages/aiohttp/test_utils.py
Normal file
@@ -0,0 +1,706 @@
|
||||
"""Utilities shared by tests."""
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import gc
|
||||
import inspect
|
||||
import ipaddress
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from unittest import mock
|
||||
|
||||
from aiosignal import Signal
|
||||
from multidict import CIMultiDict, CIMultiDictProxy
|
||||
from yarl import URL
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
|
||||
|
||||
from . import ClientSession, hdrs
|
||||
from .abc import AbstractCookieJar
|
||||
from .client_reqrep import ClientResponse
|
||||
from .client_ws import ClientWebSocketResponse
|
||||
from .helpers import PY_38, sentinel
|
||||
from .http import HttpVersion, RawRequestMessage
|
||||
from .web import (
|
||||
Application,
|
||||
AppRunner,
|
||||
BaseRunner,
|
||||
Request,
|
||||
Server,
|
||||
ServerRunner,
|
||||
SockSite,
|
||||
UrlMappingMatchInfo,
|
||||
)
|
||||
from .web_protocol import _RequestHandler
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from ssl import SSLContext
|
||||
else:
|
||||
SSLContext = None
|
||||
|
||||
if PY_38:
|
||||
from unittest import IsolatedAsyncioTestCase as TestCase
|
||||
else:
|
||||
from asynctest import TestCase # type: ignore[no-redef]
|
||||
|
||||
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
||||
|
||||
|
||||
def get_unused_port_socket(
|
||||
host: str, family: socket.AddressFamily = socket.AF_INET
|
||||
) -> socket.socket:
|
||||
return get_port_socket(host, 0, family)
|
||||
|
||||
|
||||
def get_port_socket(
|
||||
host: str, port: int, family: socket.AddressFamily
|
||||
) -> socket.socket:
|
||||
s = socket.socket(family, socket.SOCK_STREAM)
|
||||
if REUSE_ADDRESS:
|
||||
# Windows has different semantics for SO_REUSEADDR,
|
||||
# so don't set it. Ref:
|
||||
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
s.bind((host, port))
|
||||
return s
|
||||
|
||||
|
||||
def unused_port() -> int:
|
||||
"""Return a port that is unused on the current host."""
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.bind(("127.0.0.1", 0))
|
||||
return cast(int, s.getsockname()[1])
|
||||
|
||||
|
||||
class BaseTestServer(ABC):
|
||||
__test__ = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
scheme: Union[str, object] = sentinel,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
host: str = "127.0.0.1",
|
||||
port: Optional[int] = None,
|
||||
skip_url_asserts: bool = False,
|
||||
socket_factory: Callable[
|
||||
[str, int, socket.AddressFamily], socket.socket
|
||||
] = get_port_socket,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._loop = loop
|
||||
self.runner: Optional[BaseRunner] = None
|
||||
self._root: Optional[URL] = None
|
||||
self.host = host
|
||||
self.port = port
|
||||
self._closed = False
|
||||
self.scheme = scheme
|
||||
self.skip_url_asserts = skip_url_asserts
|
||||
self.socket_factory = socket_factory
|
||||
|
||||
async def start_server(
|
||||
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
||||
) -> None:
|
||||
if self.runner:
|
||||
return
|
||||
self._loop = loop
|
||||
self._ssl = kwargs.pop("ssl", None)
|
||||
self.runner = await self._make_runner(**kwargs)
|
||||
await self.runner.setup()
|
||||
if not self.port:
|
||||
self.port = 0
|
||||
try:
|
||||
version = ipaddress.ip_address(self.host).version
|
||||
except ValueError:
|
||||
version = 4
|
||||
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
||||
_sock = self.socket_factory(self.host, self.port, family)
|
||||
self.host, self.port = _sock.getsockname()[:2]
|
||||
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
||||
await site.start()
|
||||
server = site._server
|
||||
assert server is not None
|
||||
sockets = server.sockets
|
||||
assert sockets is not None
|
||||
self.port = sockets[0].getsockname()[1]
|
||||
if self.scheme is sentinel:
|
||||
if self._ssl:
|
||||
scheme = "https"
|
||||
else:
|
||||
scheme = "http"
|
||||
self.scheme = scheme
|
||||
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
|
||||
|
||||
@abstractmethod # pragma: no cover
|
||||
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
||||
pass
|
||||
|
||||
def make_url(self, path: str) -> URL:
|
||||
assert self._root is not None
|
||||
url = URL(path)
|
||||
if not self.skip_url_asserts:
|
||||
assert not url.is_absolute()
|
||||
return self._root.join(url)
|
||||
else:
|
||||
return URL(str(self._root) + path)
|
||||
|
||||
@property
|
||||
def started(self) -> bool:
|
||||
return self.runner is not None
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
@property
|
||||
def handler(self) -> Server:
|
||||
# for backward compatibility
|
||||
# web.Server instance
|
||||
runner = self.runner
|
||||
assert runner is not None
|
||||
assert runner.server is not None
|
||||
return runner.server
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close all fixtures created by the test client.
|
||||
|
||||
After that point, the TestClient is no longer usable.
|
||||
|
||||
This is an idempotent function: running close multiple times
|
||||
will not have any additional effects.
|
||||
|
||||
close is also run when the object is garbage collected, and on
|
||||
exit when used as a context manager.
|
||||
|
||||
"""
|
||||
if self.started and not self.closed:
|
||||
assert self.runner is not None
|
||||
await self.runner.cleanup()
|
||||
self._root = None
|
||||
self.port = None
|
||||
self._closed = True
|
||||
|
||||
def __enter__(self) -> None:
|
||||
raise TypeError("Use async with instead")
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> None:
|
||||
# __exit__ should exist in pair with __enter__ but never executed
|
||||
pass # pragma: no cover
|
||||
|
||||
async def __aenter__(self) -> "BaseTestServer":
|
||||
await self.start_server(loop=self._loop)
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> None:
|
||||
await self.close()
|
||||
|
||||
|
||||
class TestServer(BaseTestServer):
|
||||
def __init__(
|
||||
self,
|
||||
app: Application,
|
||||
*,
|
||||
scheme: Union[str, object] = sentinel,
|
||||
host: str = "127.0.0.1",
|
||||
port: Optional[int] = None,
|
||||
**kwargs: Any,
|
||||
):
|
||||
self.app = app
|
||||
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
||||
|
||||
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
||||
return AppRunner(self.app, **kwargs)
|
||||
|
||||
|
||||
class RawTestServer(BaseTestServer):
|
||||
def __init__(
|
||||
self,
|
||||
handler: _RequestHandler,
|
||||
*,
|
||||
scheme: Union[str, object] = sentinel,
|
||||
host: str = "127.0.0.1",
|
||||
port: Optional[int] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._handler = handler
|
||||
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
||||
|
||||
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
||||
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
||||
return ServerRunner(srv, debug=debug, **kwargs)
|
||||
|
||||
|
||||
class TestClient:
|
||||
"""
|
||||
A test client implementation.
|
||||
|
||||
To write functional tests for aiohttp based servers.
|
||||
|
||||
"""
|
||||
|
||||
__test__ = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server: BaseTestServer,
|
||||
*,
|
||||
cookie_jar: Optional[AbstractCookieJar] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
if not isinstance(server, BaseTestServer):
|
||||
raise TypeError(
|
||||
"server must be TestServer " "instance, found type: %r" % type(server)
|
||||
)
|
||||
self._server = server
|
||||
self._loop = loop
|
||||
if cookie_jar is None:
|
||||
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
||||
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
||||
self._closed = False
|
||||
self._responses: List[ClientResponse] = []
|
||||
self._websockets: List[ClientWebSocketResponse] = []
|
||||
|
||||
async def start_server(self) -> None:
|
||||
await self._server.start_server(loop=self._loop)
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._server.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._server.port
|
||||
|
||||
@property
|
||||
def server(self) -> BaseTestServer:
|
||||
return self._server
|
||||
|
||||
@property
|
||||
def app(self) -> Optional[Application]:
|
||||
return cast(Optional[Application], getattr(self._server, "app", None))
|
||||
|
||||
@property
|
||||
def session(self) -> ClientSession:
|
||||
"""An internal aiohttp.ClientSession.
|
||||
|
||||
Unlike the methods on the TestClient, client session requests
|
||||
do not automatically include the host in the url queried, and
|
||||
will require an absolute path to the resource.
|
||||
|
||||
"""
|
||||
return self._session
|
||||
|
||||
def make_url(self, path: str) -> URL:
|
||||
return self._server.make_url(path)
|
||||
|
||||
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
|
||||
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
||||
# save it to close later
|
||||
self._responses.append(resp)
|
||||
return resp
|
||||
|
||||
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Routes a request to tested http server.
|
||||
|
||||
The interface is identical to aiohttp.ClientSession.request,
|
||||
except the loop kwarg is overridden by the instance used by the
|
||||
test server.
|
||||
|
||||
"""
|
||||
return _RequestContextManager(self._request(method, path, **kwargs))
|
||||
|
||||
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP GET request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
||||
|
||||
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP POST request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
||||
|
||||
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP OPTIONS request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
|
||||
|
||||
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP HEAD request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
||||
|
||||
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP PUT request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
||||
|
||||
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP PATCH request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
|
||||
|
||||
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||
"""Perform an HTTP PATCH request."""
|
||||
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
|
||||
|
||||
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
|
||||
"""Initiate websocket connection.
|
||||
|
||||
The api corresponds to aiohttp.ClientSession.ws_connect.
|
||||
|
||||
"""
|
||||
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
||||
|
||||
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
|
||||
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
||||
self._websockets.append(ws)
|
||||
return ws
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close all fixtures created by the test client.
|
||||
|
||||
After that point, the TestClient is no longer usable.
|
||||
|
||||
This is an idempotent function: running close multiple times
|
||||
will not have any additional effects.
|
||||
|
||||
close is also run on exit when used as a(n) (asynchronous)
|
||||
context manager.
|
||||
|
||||
"""
|
||||
if not self._closed:
|
||||
for resp in self._responses:
|
||||
resp.close()
|
||||
for ws in self._websockets:
|
||||
await ws.close()
|
||||
await self._session.close()
|
||||
await self._server.close()
|
||||
self._closed = True
|
||||
|
||||
def __enter__(self) -> None:
|
||||
raise TypeError("Use async with instead")
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
# __exit__ should exist in pair with __enter__ but never executed
|
||||
pass # pragma: no cover
|
||||
|
||||
async def __aenter__(self) -> "TestClient":
|
||||
await self.start_server()
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
await self.close()
|
||||
|
||||
|
||||
class AioHTTPTestCase(TestCase):
|
||||
"""A base class to allow for unittest web applications using aiohttp.
|
||||
|
||||
Provides the following:
|
||||
|
||||
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
||||
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
||||
application and server are running.
|
||||
* self.app (aiohttp.web.Application): the application returned by
|
||||
self.get_application()
|
||||
|
||||
Note that the TestClient's methods are asynchronous: you have to
|
||||
execute function on the test client using asynchronous methods.
|
||||
"""
|
||||
|
||||
async def get_application(self) -> Application:
|
||||
"""Get application.
|
||||
|
||||
This method should be overridden
|
||||
to return the aiohttp.web.Application
|
||||
object to test.
|
||||
"""
|
||||
return self.get_app()
|
||||
|
||||
def get_app(self) -> Application:
|
||||
"""Obsolete method used to constructing web application.
|
||||
|
||||
Use .get_application() coroutine instead.
|
||||
"""
|
||||
raise RuntimeError("Did you forget to define get_application()?")
|
||||
|
||||
def setUp(self) -> None:
|
||||
if not PY_38:
|
||||
asyncio.get_event_loop().run_until_complete(self.asyncSetUp())
|
||||
|
||||
async def asyncSetUp(self) -> None:
|
||||
try:
|
||||
self.loop = asyncio.get_running_loop()
|
||||
except (AttributeError, RuntimeError): # AttributeError->py36
|
||||
self.loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
|
||||
return await self.setUpAsync()
|
||||
|
||||
async def setUpAsync(self) -> None:
|
||||
self.app = await self.get_application()
|
||||
self.server = await self.get_server(self.app)
|
||||
self.client = await self.get_client(self.server)
|
||||
|
||||
await self.client.start_server()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
if not PY_38:
|
||||
self.loop.run_until_complete(self.asyncTearDown())
|
||||
|
||||
async def asyncTearDown(self) -> None:
|
||||
return await self.tearDownAsync()
|
||||
|
||||
async def tearDownAsync(self) -> None:
|
||||
await self.client.close()
|
||||
|
||||
async def get_server(self, app: Application) -> TestServer:
|
||||
"""Return a TestServer instance."""
|
||||
return TestServer(app, loop=self.loop)
|
||||
|
||||
async def get_client(self, server: TestServer) -> TestClient:
|
||||
"""Return a TestClient instance."""
|
||||
return TestClient(server, loop=self.loop)
|
||||
|
||||
|
||||
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
||||
"""
|
||||
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
||||
|
||||
In 3.8+, this does nothing.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return func
|
||||
|
||||
|
||||
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def loop_context(
|
||||
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
||||
) -> Iterator[asyncio.AbstractEventLoop]:
|
||||
"""A contextmanager that creates an event_loop, for test purposes.
|
||||
|
||||
Handles the creation and cleanup of a test loop.
|
||||
"""
|
||||
loop = setup_test_loop(loop_factory)
|
||||
yield loop
|
||||
teardown_test_loop(loop, fast=fast)
|
||||
|
||||
|
||||
def setup_test_loop(
|
||||
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
"""Create and return an asyncio.BaseEventLoop instance.
|
||||
|
||||
The caller should also call teardown_test_loop,
|
||||
once they are done with the loop.
|
||||
"""
|
||||
loop = loop_factory()
|
||||
try:
|
||||
module = loop.__class__.__module__
|
||||
skip_watcher = "uvloop" in module
|
||||
except AttributeError: # pragma: no cover
|
||||
# Just in case
|
||||
skip_watcher = True
|
||||
asyncio.set_event_loop(loop)
|
||||
if sys.platform != "win32" and not skip_watcher:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
watcher: asyncio.AbstractChildWatcher
|
||||
try: # Python >= 3.8
|
||||
# Refs:
|
||||
# * https://github.com/pytest-dev/pytest-xdist/issues/620
|
||||
# * https://stackoverflow.com/a/58614689/595220
|
||||
# * https://bugs.python.org/issue35621
|
||||
# * https://github.com/python/cpython/pull/14344
|
||||
watcher = asyncio.ThreadedChildWatcher()
|
||||
except AttributeError: # Python < 3.8
|
||||
watcher = asyncio.SafeChildWatcher()
|
||||
watcher.attach_loop(loop)
|
||||
with contextlib.suppress(NotImplementedError):
|
||||
policy.set_child_watcher(watcher)
|
||||
return loop
|
||||
|
||||
|
||||
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
||||
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
||||
closed = loop.is_closed()
|
||||
if not closed:
|
||||
loop.call_soon(loop.stop)
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
||||
if not fast:
|
||||
gc.collect()
|
||||
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
|
||||
def _create_app_mock() -> mock.MagicMock:
|
||||
def get_dict(app: Any, key: str) -> Any:
|
||||
return app.__app_dict[key]
|
||||
|
||||
def set_dict(app: Any, key: str, value: Any) -> None:
|
||||
app.__app_dict[key] = value
|
||||
|
||||
app = mock.MagicMock(spec=Application)
|
||||
app.__app_dict = {}
|
||||
app.__getitem__ = get_dict
|
||||
app.__setitem__ = set_dict
|
||||
|
||||
app._debug = False
|
||||
app.on_response_prepare = Signal(app)
|
||||
app.on_response_prepare.freeze()
|
||||
return app
|
||||
|
||||
|
||||
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
||||
transport = mock.Mock()
|
||||
|
||||
def get_extra_info(key: str) -> Optional[SSLContext]:
|
||||
if key == "sslcontext":
|
||||
return sslcontext
|
||||
else:
|
||||
return None
|
||||
|
||||
transport.get_extra_info.side_effect = get_extra_info
|
||||
return transport
|
||||
|
||||
|
||||
def make_mocked_request(
|
||||
method: str,
|
||||
path: str,
|
||||
headers: Any = None,
|
||||
*,
|
||||
match_info: Any = sentinel,
|
||||
version: HttpVersion = HttpVersion(1, 1),
|
||||
closing: bool = False,
|
||||
app: Any = None,
|
||||
writer: Any = sentinel,
|
||||
protocol: Any = sentinel,
|
||||
transport: Any = sentinel,
|
||||
payload: Any = sentinel,
|
||||
sslcontext: Optional[SSLContext] = None,
|
||||
client_max_size: int = 1024**2,
|
||||
loop: Any = ...,
|
||||
) -> Request:
|
||||
"""Creates mocked web.Request testing purposes.
|
||||
|
||||
Useful in unit tests, when spinning full web server is overkill or
|
||||
specific conditions and errors are hard to trigger.
|
||||
"""
|
||||
task = mock.Mock()
|
||||
if loop is ...:
|
||||
loop = mock.Mock()
|
||||
loop.create_future.return_value = ()
|
||||
|
||||
if version < HttpVersion(1, 1):
|
||||
closing = True
|
||||
|
||||
if headers:
|
||||
headers = CIMultiDictProxy(CIMultiDict(headers))
|
||||
raw_hdrs = tuple(
|
||||
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
||||
)
|
||||
else:
|
||||
headers = CIMultiDictProxy(CIMultiDict())
|
||||
raw_hdrs = ()
|
||||
|
||||
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
||||
|
||||
message = RawRequestMessage(
|
||||
method,
|
||||
path,
|
||||
version,
|
||||
headers,
|
||||
raw_hdrs,
|
||||
closing,
|
||||
None,
|
||||
False,
|
||||
chunked,
|
||||
URL(path),
|
||||
)
|
||||
if app is None:
|
||||
app = _create_app_mock()
|
||||
|
||||
if transport is sentinel:
|
||||
transport = _create_transport(sslcontext)
|
||||
|
||||
if protocol is sentinel:
|
||||
protocol = mock.Mock()
|
||||
protocol.transport = transport
|
||||
|
||||
if writer is sentinel:
|
||||
writer = mock.Mock()
|
||||
writer.write_headers = make_mocked_coro(None)
|
||||
writer.write = make_mocked_coro(None)
|
||||
writer.write_eof = make_mocked_coro(None)
|
||||
writer.drain = make_mocked_coro(None)
|
||||
writer.transport = transport
|
||||
|
||||
protocol.transport = transport
|
||||
protocol.writer = writer
|
||||
|
||||
if payload is sentinel:
|
||||
payload = mock.Mock()
|
||||
|
||||
req = Request(
|
||||
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
||||
)
|
||||
|
||||
match_info = UrlMappingMatchInfo(
|
||||
{} if match_info is sentinel else match_info, mock.Mock()
|
||||
)
|
||||
match_info.add_app(app)
|
||||
req._match_info = match_info
|
||||
|
||||
return req
|
||||
|
||||
|
||||
def make_mocked_coro(
|
||||
return_value: Any = sentinel, raise_exception: Any = sentinel
|
||||
) -> Any:
|
||||
"""Creates a coroutine mock."""
|
||||
|
||||
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
||||
if raise_exception is not sentinel:
|
||||
raise raise_exception
|
||||
if not inspect.isawaitable(return_value):
|
||||
return return_value
|
||||
await return_value
|
||||
|
||||
return mock.Mock(wraps=mock_coro)
|
472
.venv/lib/python3.7/site-packages/aiohttp/tracing.py
Normal file
472
.venv/lib/python3.7/site-packages/aiohttp/tracing.py
Normal file
@@ -0,0 +1,472 @@
|
||||
from types import SimpleNamespace
|
||||
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
|
||||
|
||||
import attr
|
||||
from aiosignal import Signal
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .client_reqrep import ClientResponse
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client import ClientSession
|
||||
from .typedefs import Protocol
|
||||
|
||||
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
||||
|
||||
class _SignalCallback(Protocol[_ParamT_contra]):
|
||||
def __call__(
|
||||
self,
|
||||
__client_session: ClientSession,
|
||||
__trace_config_ctx: SimpleNamespace,
|
||||
__params: _ParamT_contra,
|
||||
) -> Awaitable[None]:
|
||||
...
|
||||
|
||||
|
||||
__all__ = (
|
||||
"TraceConfig",
|
||||
"TraceRequestStartParams",
|
||||
"TraceRequestEndParams",
|
||||
"TraceRequestExceptionParams",
|
||||
"TraceConnectionQueuedStartParams",
|
||||
"TraceConnectionQueuedEndParams",
|
||||
"TraceConnectionCreateStartParams",
|
||||
"TraceConnectionCreateEndParams",
|
||||
"TraceConnectionReuseconnParams",
|
||||
"TraceDnsResolveHostStartParams",
|
||||
"TraceDnsResolveHostEndParams",
|
||||
"TraceDnsCacheHitParams",
|
||||
"TraceDnsCacheMissParams",
|
||||
"TraceRequestRedirectParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
"TraceRequestHeadersSentParams",
|
||||
)
|
||||
|
||||
|
||||
class TraceConfig:
|
||||
"""First-class used to trace requests launched via ClientSession objects."""
|
||||
|
||||
def __init__(
|
||||
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
||||
) -> None:
|
||||
self._on_request_start: Signal[
|
||||
_SignalCallback[TraceRequestStartParams]
|
||||
] = Signal(self)
|
||||
self._on_request_chunk_sent: Signal[
|
||||
_SignalCallback[TraceRequestChunkSentParams]
|
||||
] = Signal(self)
|
||||
self._on_response_chunk_received: Signal[
|
||||
_SignalCallback[TraceResponseChunkReceivedParams]
|
||||
] = Signal(self)
|
||||
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
|
||||
self
|
||||
)
|
||||
self._on_request_exception: Signal[
|
||||
_SignalCallback[TraceRequestExceptionParams]
|
||||
] = Signal(self)
|
||||
self._on_request_redirect: Signal[
|
||||
_SignalCallback[TraceRequestRedirectParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_queued_start: Signal[
|
||||
_SignalCallback[TraceConnectionQueuedStartParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_queued_end: Signal[
|
||||
_SignalCallback[TraceConnectionQueuedEndParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_create_start: Signal[
|
||||
_SignalCallback[TraceConnectionCreateStartParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_create_end: Signal[
|
||||
_SignalCallback[TraceConnectionCreateEndParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_reuseconn: Signal[
|
||||
_SignalCallback[TraceConnectionReuseconnParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_resolvehost_start: Signal[
|
||||
_SignalCallback[TraceDnsResolveHostStartParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_resolvehost_end: Signal[
|
||||
_SignalCallback[TraceDnsResolveHostEndParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_cache_hit: Signal[
|
||||
_SignalCallback[TraceDnsCacheHitParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_cache_miss: Signal[
|
||||
_SignalCallback[TraceDnsCacheMissParams]
|
||||
] = Signal(self)
|
||||
self._on_request_headers_sent: Signal[
|
||||
_SignalCallback[TraceRequestHeadersSentParams]
|
||||
] = Signal(self)
|
||||
|
||||
self._trace_config_ctx_factory = trace_config_ctx_factory
|
||||
|
||||
def trace_config_ctx(
|
||||
self, trace_request_ctx: Optional[SimpleNamespace] = None
|
||||
) -> SimpleNamespace:
|
||||
"""Return a new trace_config_ctx instance"""
|
||||
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
||||
|
||||
def freeze(self) -> None:
|
||||
self._on_request_start.freeze()
|
||||
self._on_request_chunk_sent.freeze()
|
||||
self._on_response_chunk_received.freeze()
|
||||
self._on_request_end.freeze()
|
||||
self._on_request_exception.freeze()
|
||||
self._on_request_redirect.freeze()
|
||||
self._on_connection_queued_start.freeze()
|
||||
self._on_connection_queued_end.freeze()
|
||||
self._on_connection_create_start.freeze()
|
||||
self._on_connection_create_end.freeze()
|
||||
self._on_connection_reuseconn.freeze()
|
||||
self._on_dns_resolvehost_start.freeze()
|
||||
self._on_dns_resolvehost_end.freeze()
|
||||
self._on_dns_cache_hit.freeze()
|
||||
self._on_dns_cache_miss.freeze()
|
||||
self._on_request_headers_sent.freeze()
|
||||
|
||||
@property
|
||||
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
||||
return self._on_request_start
|
||||
|
||||
@property
|
||||
def on_request_chunk_sent(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
|
||||
return self._on_request_chunk_sent
|
||||
|
||||
@property
|
||||
def on_response_chunk_received(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
|
||||
return self._on_response_chunk_received
|
||||
|
||||
@property
|
||||
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
|
||||
return self._on_request_end
|
||||
|
||||
@property
|
||||
def on_request_exception(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
|
||||
return self._on_request_exception
|
||||
|
||||
@property
|
||||
def on_request_redirect(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
|
||||
return self._on_request_redirect
|
||||
|
||||
@property
|
||||
def on_connection_queued_start(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
|
||||
return self._on_connection_queued_start
|
||||
|
||||
@property
|
||||
def on_connection_queued_end(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
|
||||
return self._on_connection_queued_end
|
||||
|
||||
@property
|
||||
def on_connection_create_start(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
|
||||
return self._on_connection_create_start
|
||||
|
||||
@property
|
||||
def on_connection_create_end(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
|
||||
return self._on_connection_create_end
|
||||
|
||||
@property
|
||||
def on_connection_reuseconn(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
|
||||
return self._on_connection_reuseconn
|
||||
|
||||
@property
|
||||
def on_dns_resolvehost_start(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
|
||||
return self._on_dns_resolvehost_start
|
||||
|
||||
@property
|
||||
def on_dns_resolvehost_end(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
|
||||
return self._on_dns_resolvehost_end
|
||||
|
||||
@property
|
||||
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
|
||||
return self._on_dns_cache_hit
|
||||
|
||||
@property
|
||||
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
||||
return self._on_dns_cache_miss
|
||||
|
||||
@property
|
||||
def on_request_headers_sent(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
||||
return self._on_request_headers_sent
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestStartParams:
|
||||
"""Parameters sent by the `on_request_start` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestChunkSentParams:
|
||||
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
chunk: bytes
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceResponseChunkReceivedParams:
|
||||
"""Parameters sent by the `on_response_chunk_received` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
chunk: bytes
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestEndParams:
|
||||
"""Parameters sent by the `on_request_end` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
response: ClientResponse
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestExceptionParams:
|
||||
"""Parameters sent by the `on_request_exception` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
exception: BaseException
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestRedirectParams:
|
||||
"""Parameters sent by the `on_request_redirect` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
response: ClientResponse
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionQueuedStartParams:
|
||||
"""Parameters sent by the `on_connection_queued_start` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionQueuedEndParams:
|
||||
"""Parameters sent by the `on_connection_queued_end` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionCreateStartParams:
|
||||
"""Parameters sent by the `on_connection_create_start` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionCreateEndParams:
|
||||
"""Parameters sent by the `on_connection_create_end` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionReuseconnParams:
|
||||
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsResolveHostStartParams:
|
||||
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsResolveHostEndParams:
|
||||
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsCacheHitParams:
|
||||
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsCacheMissParams:
|
||||
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestHeadersSentParams:
|
||||
"""Parameters sent by the `on_request_headers_sent` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
|
||||
|
||||
class Trace:
|
||||
"""Internal dependency holder class.
|
||||
|
||||
Used to keep together the main dependencies used
|
||||
at the moment of send a signal.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: "ClientSession",
|
||||
trace_config: TraceConfig,
|
||||
trace_config_ctx: SimpleNamespace,
|
||||
) -> None:
|
||||
self._trace_config = trace_config
|
||||
self._trace_config_ctx = trace_config_ctx
|
||||
self._session = session
|
||||
|
||||
async def send_request_start(
|
||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
return await self._trace_config.on_request_start.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestStartParams(method, url, headers),
|
||||
)
|
||||
|
||||
async def send_request_chunk_sent(
|
||||
self, method: str, url: URL, chunk: bytes
|
||||
) -> None:
|
||||
return await self._trace_config.on_request_chunk_sent.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestChunkSentParams(method, url, chunk),
|
||||
)
|
||||
|
||||
async def send_response_chunk_received(
|
||||
self, method: str, url: URL, chunk: bytes
|
||||
) -> None:
|
||||
return await self._trace_config.on_response_chunk_received.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceResponseChunkReceivedParams(method, url, chunk),
|
||||
)
|
||||
|
||||
async def send_request_end(
|
||||
self,
|
||||
method: str,
|
||||
url: URL,
|
||||
headers: "CIMultiDict[str]",
|
||||
response: ClientResponse,
|
||||
) -> None:
|
||||
return await self._trace_config.on_request_end.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestEndParams(method, url, headers, response),
|
||||
)
|
||||
|
||||
async def send_request_exception(
|
||||
self,
|
||||
method: str,
|
||||
url: URL,
|
||||
headers: "CIMultiDict[str]",
|
||||
exception: BaseException,
|
||||
) -> None:
|
||||
return await self._trace_config.on_request_exception.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestExceptionParams(method, url, headers, exception),
|
||||
)
|
||||
|
||||
async def send_request_redirect(
|
||||
self,
|
||||
method: str,
|
||||
url: URL,
|
||||
headers: "CIMultiDict[str]",
|
||||
response: ClientResponse,
|
||||
) -> None:
|
||||
return await self._trace_config._on_request_redirect.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestRedirectParams(method, url, headers, response),
|
||||
)
|
||||
|
||||
async def send_connection_queued_start(self) -> None:
|
||||
return await self._trace_config.on_connection_queued_start.send(
|
||||
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
|
||||
)
|
||||
|
||||
async def send_connection_queued_end(self) -> None:
|
||||
return await self._trace_config.on_connection_queued_end.send(
|
||||
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
|
||||
)
|
||||
|
||||
async def send_connection_create_start(self) -> None:
|
||||
return await self._trace_config.on_connection_create_start.send(
|
||||
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
|
||||
)
|
||||
|
||||
async def send_connection_create_end(self) -> None:
|
||||
return await self._trace_config.on_connection_create_end.send(
|
||||
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
|
||||
)
|
||||
|
||||
async def send_connection_reuseconn(self) -> None:
|
||||
return await self._trace_config.on_connection_reuseconn.send(
|
||||
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
|
||||
)
|
||||
|
||||
async def send_dns_resolvehost_start(self, host: str) -> None:
|
||||
return await self._trace_config.on_dns_resolvehost_start.send(
|
||||
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
|
||||
)
|
||||
|
||||
async def send_dns_resolvehost_end(self, host: str) -> None:
|
||||
return await self._trace_config.on_dns_resolvehost_end.send(
|
||||
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
|
||||
)
|
||||
|
||||
async def send_dns_cache_hit(self, host: str) -> None:
|
||||
return await self._trace_config.on_dns_cache_hit.send(
|
||||
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
|
||||
)
|
||||
|
||||
async def send_dns_cache_miss(self, host: str) -> None:
|
||||
return await self._trace_config.on_dns_cache_miss.send(
|
||||
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
||||
)
|
||||
|
||||
async def send_request_headers(
|
||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
return await self._trace_config._on_request_headers_sent.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestHeadersSentParams(method, url, headers),
|
||||
)
|
64
.venv/lib/python3.7/site-packages/aiohttp/typedefs.py
Normal file
64
.venv/lib/python3.7/site-packages/aiohttp/typedefs.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
||||
# These are for other modules to use (to avoid repeating the conditional import).
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
|
||||
else:
|
||||
from typing_extensions import ( # noqa: F401
|
||||
Final,
|
||||
Protocol as Protocol,
|
||||
TypedDict as TypedDict,
|
||||
)
|
||||
|
||||
DEFAULT_JSON_ENCODER = json.dumps
|
||||
DEFAULT_JSON_DECODER = json.loads
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
_CIMultiDict = CIMultiDict[str]
|
||||
_CIMultiDictProxy = CIMultiDictProxy[str]
|
||||
_MultiDict = MultiDict[str]
|
||||
_MultiDictProxy = MultiDictProxy[str]
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
|
||||
from .web import Request, StreamResponse
|
||||
else:
|
||||
_CIMultiDict = CIMultiDict
|
||||
_CIMultiDictProxy = CIMultiDictProxy
|
||||
_MultiDict = MultiDict
|
||||
_MultiDictProxy = MultiDictProxy
|
||||
|
||||
Byteish = Union[bytes, bytearray, memoryview]
|
||||
JSONEncoder = Callable[[Any], str]
|
||||
JSONDecoder = Callable[[str], Any]
|
||||
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
|
||||
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
||||
StrOrURL = Union[str, URL]
|
||||
|
||||
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
||||
LooseCookiesIterables = Iterable[
|
||||
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
||||
]
|
||||
LooseCookies = Union[
|
||||
LooseCookiesMappings,
|
||||
LooseCookiesIterables,
|
||||
"BaseCookie[str]",
|
||||
]
|
||||
|
||||
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
||||
|
||||
PathLike = Union[str, "os.PathLike[str]"]
|
588
.venv/lib/python3.7/site-packages/aiohttp/web.py
Normal file
588
.venv/lib/python3.7/site-packages/aiohttp/web.py
Normal file
@@ -0,0 +1,588 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from collections.abc import Iterable
|
||||
from importlib import import_module
|
||||
from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterable as TypingIterable,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from .abc import AbstractAccessLogger
|
||||
from .helpers import all_tasks
|
||||
from .log import access_logger
|
||||
from .web_app import Application as Application, CleanupError as CleanupError
|
||||
from .web_exceptions import (
|
||||
HTTPAccepted as HTTPAccepted,
|
||||
HTTPBadGateway as HTTPBadGateway,
|
||||
HTTPBadRequest as HTTPBadRequest,
|
||||
HTTPClientError as HTTPClientError,
|
||||
HTTPConflict as HTTPConflict,
|
||||
HTTPCreated as HTTPCreated,
|
||||
HTTPError as HTTPError,
|
||||
HTTPException as HTTPException,
|
||||
HTTPExpectationFailed as HTTPExpectationFailed,
|
||||
HTTPFailedDependency as HTTPFailedDependency,
|
||||
HTTPForbidden as HTTPForbidden,
|
||||
HTTPFound as HTTPFound,
|
||||
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
||||
HTTPGone as HTTPGone,
|
||||
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
||||
HTTPInternalServerError as HTTPInternalServerError,
|
||||
HTTPLengthRequired as HTTPLengthRequired,
|
||||
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
||||
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
||||
HTTPMovedPermanently as HTTPMovedPermanently,
|
||||
HTTPMultipleChoices as HTTPMultipleChoices,
|
||||
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
||||
HTTPNoContent as HTTPNoContent,
|
||||
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
||||
HTTPNotAcceptable as HTTPNotAcceptable,
|
||||
HTTPNotExtended as HTTPNotExtended,
|
||||
HTTPNotFound as HTTPNotFound,
|
||||
HTTPNotImplemented as HTTPNotImplemented,
|
||||
HTTPNotModified as HTTPNotModified,
|
||||
HTTPOk as HTTPOk,
|
||||
HTTPPartialContent as HTTPPartialContent,
|
||||
HTTPPaymentRequired as HTTPPaymentRequired,
|
||||
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
||||
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
||||
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
||||
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
||||
HTTPRedirection as HTTPRedirection,
|
||||
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
||||
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
||||
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
||||
HTTPRequestTimeout as HTTPRequestTimeout,
|
||||
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
||||
HTTPResetContent as HTTPResetContent,
|
||||
HTTPSeeOther as HTTPSeeOther,
|
||||
HTTPServerError as HTTPServerError,
|
||||
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
||||
HTTPSuccessful as HTTPSuccessful,
|
||||
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
||||
HTTPTooManyRequests as HTTPTooManyRequests,
|
||||
HTTPUnauthorized as HTTPUnauthorized,
|
||||
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
||||
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
||||
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
||||
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
||||
HTTPUseProxy as HTTPUseProxy,
|
||||
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
||||
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
||||
)
|
||||
from .web_fileresponse import FileResponse as FileResponse
|
||||
from .web_log import AccessLogger
|
||||
from .web_middlewares import (
|
||||
middleware as middleware,
|
||||
normalize_path_middleware as normalize_path_middleware,
|
||||
)
|
||||
from .web_protocol import (
|
||||
PayloadAccessError as PayloadAccessError,
|
||||
RequestHandler as RequestHandler,
|
||||
RequestPayloadError as RequestPayloadError,
|
||||
)
|
||||
from .web_request import (
|
||||
BaseRequest as BaseRequest,
|
||||
FileField as FileField,
|
||||
Request as Request,
|
||||
)
|
||||
from .web_response import (
|
||||
ContentCoding as ContentCoding,
|
||||
Response as Response,
|
||||
StreamResponse as StreamResponse,
|
||||
json_response as json_response,
|
||||
)
|
||||
from .web_routedef import (
|
||||
AbstractRouteDef as AbstractRouteDef,
|
||||
RouteDef as RouteDef,
|
||||
RouteTableDef as RouteTableDef,
|
||||
StaticDef as StaticDef,
|
||||
delete as delete,
|
||||
get as get,
|
||||
head as head,
|
||||
options as options,
|
||||
patch as patch,
|
||||
post as post,
|
||||
put as put,
|
||||
route as route,
|
||||
static as static,
|
||||
view as view,
|
||||
)
|
||||
from .web_runner import (
|
||||
AppRunner as AppRunner,
|
||||
BaseRunner as BaseRunner,
|
||||
BaseSite as BaseSite,
|
||||
GracefulExit as GracefulExit,
|
||||
NamedPipeSite as NamedPipeSite,
|
||||
ServerRunner as ServerRunner,
|
||||
SockSite as SockSite,
|
||||
TCPSite as TCPSite,
|
||||
UnixSite as UnixSite,
|
||||
)
|
||||
from .web_server import Server as Server
|
||||
from .web_urldispatcher import (
|
||||
AbstractResource as AbstractResource,
|
||||
AbstractRoute as AbstractRoute,
|
||||
DynamicResource as DynamicResource,
|
||||
PlainResource as PlainResource,
|
||||
PrefixedSubAppResource as PrefixedSubAppResource,
|
||||
Resource as Resource,
|
||||
ResourceRoute as ResourceRoute,
|
||||
StaticResource as StaticResource,
|
||||
UrlDispatcher as UrlDispatcher,
|
||||
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
||||
View as View,
|
||||
)
|
||||
from .web_ws import (
|
||||
WebSocketReady as WebSocketReady,
|
||||
WebSocketResponse as WebSocketResponse,
|
||||
WSMsgType as WSMsgType,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
# web_app
|
||||
"Application",
|
||||
"CleanupError",
|
||||
# web_exceptions
|
||||
"HTTPAccepted",
|
||||
"HTTPBadGateway",
|
||||
"HTTPBadRequest",
|
||||
"HTTPClientError",
|
||||
"HTTPConflict",
|
||||
"HTTPCreated",
|
||||
"HTTPError",
|
||||
"HTTPException",
|
||||
"HTTPExpectationFailed",
|
||||
"HTTPFailedDependency",
|
||||
"HTTPForbidden",
|
||||
"HTTPFound",
|
||||
"HTTPGatewayTimeout",
|
||||
"HTTPGone",
|
||||
"HTTPInsufficientStorage",
|
||||
"HTTPInternalServerError",
|
||||
"HTTPLengthRequired",
|
||||
"HTTPMethodNotAllowed",
|
||||
"HTTPMisdirectedRequest",
|
||||
"HTTPMovedPermanently",
|
||||
"HTTPMultipleChoices",
|
||||
"HTTPNetworkAuthenticationRequired",
|
||||
"HTTPNoContent",
|
||||
"HTTPNonAuthoritativeInformation",
|
||||
"HTTPNotAcceptable",
|
||||
"HTTPNotExtended",
|
||||
"HTTPNotFound",
|
||||
"HTTPNotImplemented",
|
||||
"HTTPNotModified",
|
||||
"HTTPOk",
|
||||
"HTTPPartialContent",
|
||||
"HTTPPaymentRequired",
|
||||
"HTTPPermanentRedirect",
|
||||
"HTTPPreconditionFailed",
|
||||
"HTTPPreconditionRequired",
|
||||
"HTTPProxyAuthenticationRequired",
|
||||
"HTTPRedirection",
|
||||
"HTTPRequestEntityTooLarge",
|
||||
"HTTPRequestHeaderFieldsTooLarge",
|
||||
"HTTPRequestRangeNotSatisfiable",
|
||||
"HTTPRequestTimeout",
|
||||
"HTTPRequestURITooLong",
|
||||
"HTTPResetContent",
|
||||
"HTTPSeeOther",
|
||||
"HTTPServerError",
|
||||
"HTTPServiceUnavailable",
|
||||
"HTTPSuccessful",
|
||||
"HTTPTemporaryRedirect",
|
||||
"HTTPTooManyRequests",
|
||||
"HTTPUnauthorized",
|
||||
"HTTPUnavailableForLegalReasons",
|
||||
"HTTPUnprocessableEntity",
|
||||
"HTTPUnsupportedMediaType",
|
||||
"HTTPUpgradeRequired",
|
||||
"HTTPUseProxy",
|
||||
"HTTPVariantAlsoNegotiates",
|
||||
"HTTPVersionNotSupported",
|
||||
# web_fileresponse
|
||||
"FileResponse",
|
||||
# web_middlewares
|
||||
"middleware",
|
||||
"normalize_path_middleware",
|
||||
# web_protocol
|
||||
"PayloadAccessError",
|
||||
"RequestHandler",
|
||||
"RequestPayloadError",
|
||||
# web_request
|
||||
"BaseRequest",
|
||||
"FileField",
|
||||
"Request",
|
||||
# web_response
|
||||
"ContentCoding",
|
||||
"Response",
|
||||
"StreamResponse",
|
||||
"json_response",
|
||||
# web_routedef
|
||||
"AbstractRouteDef",
|
||||
"RouteDef",
|
||||
"RouteTableDef",
|
||||
"StaticDef",
|
||||
"delete",
|
||||
"get",
|
||||
"head",
|
||||
"options",
|
||||
"patch",
|
||||
"post",
|
||||
"put",
|
||||
"route",
|
||||
"static",
|
||||
"view",
|
||||
# web_runner
|
||||
"AppRunner",
|
||||
"BaseRunner",
|
||||
"BaseSite",
|
||||
"GracefulExit",
|
||||
"ServerRunner",
|
||||
"SockSite",
|
||||
"TCPSite",
|
||||
"UnixSite",
|
||||
"NamedPipeSite",
|
||||
# web_server
|
||||
"Server",
|
||||
# web_urldispatcher
|
||||
"AbstractResource",
|
||||
"AbstractRoute",
|
||||
"DynamicResource",
|
||||
"PlainResource",
|
||||
"PrefixedSubAppResource",
|
||||
"Resource",
|
||||
"ResourceRoute",
|
||||
"StaticResource",
|
||||
"UrlDispatcher",
|
||||
"UrlMappingMatchInfo",
|
||||
"View",
|
||||
# web_ws
|
||||
"WebSocketReady",
|
||||
"WebSocketResponse",
|
||||
"WSMsgType",
|
||||
# web
|
||||
"run_app",
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from ssl import SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
SSLContext = Any # type: ignore[misc,assignment]
|
||||
|
||||
HostSequence = TypingIterable[str]
|
||||
|
||||
|
||||
async def _run_app(
|
||||
app: Union[Application, Awaitable[Application]],
|
||||
*,
|
||||
host: Optional[Union[str, HostSequence]] = None,
|
||||
port: Optional[int] = None,
|
||||
path: Optional[str] = None,
|
||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
||||
shutdown_timeout: float = 60.0,
|
||||
keepalive_timeout: float = 75.0,
|
||||
ssl_context: Optional[SSLContext] = None,
|
||||
print: Callable[..., None] = print,
|
||||
backlog: int = 128,
|
||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
||||
access_log_format: str = AccessLogger.LOG_FORMAT,
|
||||
access_log: Optional[logging.Logger] = access_logger,
|
||||
handle_signals: bool = True,
|
||||
reuse_address: Optional[bool] = None,
|
||||
reuse_port: Optional[bool] = None,
|
||||
) -> None:
|
||||
# A internal functio to actually do all dirty job for application running
|
||||
if asyncio.iscoroutine(app):
|
||||
app = await app # type: ignore[misc]
|
||||
|
||||
app = cast(Application, app)
|
||||
|
||||
runner = AppRunner(
|
||||
app,
|
||||
handle_signals=handle_signals,
|
||||
access_log_class=access_log_class,
|
||||
access_log_format=access_log_format,
|
||||
access_log=access_log,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
)
|
||||
|
||||
await runner.setup()
|
||||
|
||||
sites: List[BaseSite] = []
|
||||
|
||||
try:
|
||||
if host is not None:
|
||||
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
||||
sites.append(
|
||||
TCPSite(
|
||||
runner,
|
||||
host,
|
||||
port,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
)
|
||||
else:
|
||||
for h in host:
|
||||
sites.append(
|
||||
TCPSite(
|
||||
runner,
|
||||
h,
|
||||
port,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
)
|
||||
elif path is None and sock is None or port is not None:
|
||||
sites.append(
|
||||
TCPSite(
|
||||
runner,
|
||||
port=port,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
)
|
||||
|
||||
if path is not None:
|
||||
if isinstance(path, (str, bytes, bytearray, memoryview)):
|
||||
sites.append(
|
||||
UnixSite(
|
||||
runner,
|
||||
path,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
)
|
||||
)
|
||||
else:
|
||||
for p in path:
|
||||
sites.append(
|
||||
UnixSite(
|
||||
runner,
|
||||
p,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
)
|
||||
)
|
||||
|
||||
if sock is not None:
|
||||
if not isinstance(sock, Iterable):
|
||||
sites.append(
|
||||
SockSite(
|
||||
runner,
|
||||
sock,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
)
|
||||
)
|
||||
else:
|
||||
for s in sock:
|
||||
sites.append(
|
||||
SockSite(
|
||||
runner,
|
||||
s,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
backlog=backlog,
|
||||
)
|
||||
)
|
||||
for site in sites:
|
||||
await site.start()
|
||||
|
||||
if print: # pragma: no branch
|
||||
names = sorted(str(s.name) for s in runner.sites)
|
||||
print(
|
||||
"======== Running on {} ========\n"
|
||||
"(Press CTRL+C to quit)".format(", ".join(names))
|
||||
)
|
||||
|
||||
# sleep forever by 1 hour intervals,
|
||||
# on Windows before Python 3.8 wake up every 1 second to handle
|
||||
# Ctrl+C smoothly
|
||||
if sys.platform == "win32" and sys.version_info < (3, 8):
|
||||
delay = 1
|
||||
else:
|
||||
delay = 3600
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(delay)
|
||||
finally:
|
||||
await runner.cleanup()
|
||||
|
||||
|
||||
def _cancel_tasks(
|
||||
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
||||
) -> None:
|
||||
if not to_cancel:
|
||||
return
|
||||
|
||||
for task in to_cancel:
|
||||
task.cancel()
|
||||
|
||||
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
||||
|
||||
for task in to_cancel:
|
||||
if task.cancelled():
|
||||
continue
|
||||
if task.exception() is not None:
|
||||
loop.call_exception_handler(
|
||||
{
|
||||
"message": "unhandled exception during asyncio.run() shutdown",
|
||||
"exception": task.exception(),
|
||||
"task": task,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def run_app(
|
||||
app: Union[Application, Awaitable[Application]],
|
||||
*,
|
||||
host: Optional[Union[str, HostSequence]] = None,
|
||||
port: Optional[int] = None,
|
||||
path: Optional[str] = None,
|
||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
||||
shutdown_timeout: float = 60.0,
|
||||
keepalive_timeout: float = 75.0,
|
||||
ssl_context: Optional[SSLContext] = None,
|
||||
print: Callable[..., None] = print,
|
||||
backlog: int = 128,
|
||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
||||
access_log_format: str = AccessLogger.LOG_FORMAT,
|
||||
access_log: Optional[logging.Logger] = access_logger,
|
||||
handle_signals: bool = True,
|
||||
reuse_address: Optional[bool] = None,
|
||||
reuse_port: Optional[bool] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
"""Run an app locally"""
|
||||
if loop is None:
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
# Configure if and only if in debugging mode and using the default logger
|
||||
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
||||
if access_log.level == logging.NOTSET:
|
||||
access_log.setLevel(logging.DEBUG)
|
||||
if not access_log.hasHandlers():
|
||||
access_log.addHandler(logging.StreamHandler())
|
||||
|
||||
main_task = loop.create_task(
|
||||
_run_app(
|
||||
app,
|
||||
host=host,
|
||||
port=port,
|
||||
path=path,
|
||||
sock=sock,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
ssl_context=ssl_context,
|
||||
print=print,
|
||||
backlog=backlog,
|
||||
access_log_class=access_log_class,
|
||||
access_log_format=access_log_format,
|
||||
access_log=access_log,
|
||||
handle_signals=handle_signals,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(main_task)
|
||||
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
||||
pass
|
||||
finally:
|
||||
_cancel_tasks({main_task}, loop)
|
||||
_cancel_tasks(all_tasks(loop), loop)
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
loop.close()
|
||||
|
||||
|
||||
def main(argv: List[str]) -> None:
|
||||
arg_parser = ArgumentParser(
|
||||
description="aiohttp.web Application server", prog="aiohttp.web"
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"entry_func",
|
||||
help=(
|
||||
"Callable returning the `aiohttp.web.Application` instance to "
|
||||
"run. Should be specified in the 'module:function' syntax."
|
||||
),
|
||||
metavar="entry-func",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"-H",
|
||||
"--hostname",
|
||||
help="TCP/IP hostname to serve on (default: %(default)r)",
|
||||
default="localhost",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"-P",
|
||||
"--port",
|
||||
help="TCP/IP port to serve on (default: %(default)r)",
|
||||
type=int,
|
||||
default="8080",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"-U",
|
||||
"--path",
|
||||
help="Unix file system path to serve on. Specifying a path will cause "
|
||||
"hostname and port arguments to be ignored.",
|
||||
)
|
||||
args, extra_argv = arg_parser.parse_known_args(argv)
|
||||
|
||||
# Import logic
|
||||
mod_str, _, func_str = args.entry_func.partition(":")
|
||||
if not func_str or not mod_str:
|
||||
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
||||
if mod_str.startswith("."):
|
||||
arg_parser.error("relative module names not supported")
|
||||
try:
|
||||
module = import_module(mod_str)
|
||||
except ImportError as ex:
|
||||
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
||||
try:
|
||||
func = getattr(module, func_str)
|
||||
except AttributeError:
|
||||
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
||||
|
||||
# Compatibility logic
|
||||
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
||||
arg_parser.error(
|
||||
"file system paths not supported by your operating" " environment"
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
app = func(extra_argv)
|
||||
run_app(app, host=args.hostname, port=args.port, path=args.path)
|
||||
arg_parser.exit(message="Stopped\n")
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no branch
|
||||
main(sys.argv[1:]) # pragma: no cover
|
557
.venv/lib/python3.7/site-packages/aiohttp/web_app.py
Normal file
557
.venv/lib/python3.7/site-packages/aiohttp/web_app.py
Normal file
@@ -0,0 +1,557 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import warnings
|
||||
from functools import partial, update_wrapper
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from aiosignal import Signal
|
||||
from frozenlist import FrozenList
|
||||
|
||||
from . import hdrs
|
||||
from .abc import (
|
||||
AbstractAccessLogger,
|
||||
AbstractMatchInfo,
|
||||
AbstractRouter,
|
||||
AbstractStreamWriter,
|
||||
)
|
||||
from .helpers import DEBUG
|
||||
from .http_parser import RawRequestMessage
|
||||
from .log import web_logger
|
||||
from .streams import StreamReader
|
||||
from .web_log import AccessLogger
|
||||
from .web_middlewares import _fix_request_current_app
|
||||
from .web_protocol import RequestHandler
|
||||
from .web_request import Request
|
||||
from .web_response import StreamResponse
|
||||
from .web_routedef import AbstractRouteDef
|
||||
from .web_server import Server
|
||||
from .web_urldispatcher import (
|
||||
AbstractResource,
|
||||
AbstractRoute,
|
||||
Domain,
|
||||
MaskDomain,
|
||||
MatchedSubAppResource,
|
||||
PrefixedSubAppResource,
|
||||
UrlDispatcher,
|
||||
)
|
||||
|
||||
__all__ = ("Application", "CleanupError")
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .typedefs import Handler
|
||||
|
||||
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
|
||||
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
|
||||
_Middleware = Union[
|
||||
Callable[[Request, Handler], Awaitable[StreamResponse]],
|
||||
Callable[["Application", Handler], Awaitable[Handler]], # old-style
|
||||
]
|
||||
_Middlewares = FrozenList[_Middleware]
|
||||
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
|
||||
_Subapps = List["Application"]
|
||||
else:
|
||||
# No type checker mode, skip types
|
||||
_AppSignal = Signal
|
||||
_RespPrepareSignal = Signal
|
||||
_Middleware = Callable
|
||||
_Middlewares = FrozenList
|
||||
_MiddlewaresHandlers = Optional[Sequence]
|
||||
_Subapps = List
|
||||
|
||||
|
||||
class Application(MutableMapping[str, Any]):
|
||||
ATTRS = frozenset(
|
||||
[
|
||||
"logger",
|
||||
"_debug",
|
||||
"_router",
|
||||
"_loop",
|
||||
"_handler_args",
|
||||
"_middlewares",
|
||||
"_middlewares_handlers",
|
||||
"_run_middlewares",
|
||||
"_state",
|
||||
"_frozen",
|
||||
"_pre_frozen",
|
||||
"_subapps",
|
||||
"_on_response_prepare",
|
||||
"_on_startup",
|
||||
"_on_shutdown",
|
||||
"_on_cleanup",
|
||||
"_client_max_size",
|
||||
"_cleanup_ctx",
|
||||
]
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
logger: logging.Logger = web_logger,
|
||||
router: Optional[UrlDispatcher] = None,
|
||||
middlewares: Iterable[_Middleware] = (),
|
||||
handler_args: Optional[Mapping[str, Any]] = None,
|
||||
client_max_size: int = 1024**2,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
debug: Any = ..., # mypy doesn't support ellipsis
|
||||
) -> None:
|
||||
if router is None:
|
||||
router = UrlDispatcher()
|
||||
else:
|
||||
warnings.warn(
|
||||
"router argument is deprecated", DeprecationWarning, stacklevel=2
|
||||
)
|
||||
assert isinstance(router, AbstractRouter), router
|
||||
|
||||
if loop is not None:
|
||||
warnings.warn(
|
||||
"loop argument is deprecated", DeprecationWarning, stacklevel=2
|
||||
)
|
||||
|
||||
if debug is not ...:
|
||||
warnings.warn(
|
||||
"debug argument is deprecated", DeprecationWarning, stacklevel=2
|
||||
)
|
||||
self._debug = debug
|
||||
self._router: UrlDispatcher = router
|
||||
self._loop = loop
|
||||
self._handler_args = handler_args
|
||||
self.logger = logger
|
||||
|
||||
self._middlewares: _Middlewares = FrozenList(middlewares)
|
||||
|
||||
# initialized on freezing
|
||||
self._middlewares_handlers: _MiddlewaresHandlers = None
|
||||
# initialized on freezing
|
||||
self._run_middlewares: Optional[bool] = None
|
||||
|
||||
self._state: Dict[str, Any] = {}
|
||||
self._frozen = False
|
||||
self._pre_frozen = False
|
||||
self._subapps: _Subapps = []
|
||||
|
||||
self._on_response_prepare: _RespPrepareSignal = Signal(self)
|
||||
self._on_startup: _AppSignal = Signal(self)
|
||||
self._on_shutdown: _AppSignal = Signal(self)
|
||||
self._on_cleanup: _AppSignal = Signal(self)
|
||||
self._cleanup_ctx = CleanupContext()
|
||||
self._on_startup.append(self._cleanup_ctx._on_startup)
|
||||
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
|
||||
self._client_max_size = client_max_size
|
||||
|
||||
def __init_subclass__(cls: Type["Application"]) -> None:
|
||||
warnings.warn(
|
||||
"Inheritance class {} from web.Application "
|
||||
"is discouraged".format(cls.__name__),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
if DEBUG: # pragma: no cover
|
||||
|
||||
def __setattr__(self, name: str, val: Any) -> None:
|
||||
if name not in self.ATTRS:
|
||||
warnings.warn(
|
||||
"Setting custom web.Application.{} attribute "
|
||||
"is discouraged".format(name),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
super().__setattr__(name, val)
|
||||
|
||||
# MutableMapping API
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return self is other
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self._state[key]
|
||||
|
||||
def _check_frozen(self) -> None:
|
||||
if self._frozen:
|
||||
warnings.warn(
|
||||
"Changing state of started or joined " "application is deprecated",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self._check_frozen()
|
||||
self._state[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
self._check_frozen()
|
||||
del self._state[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._state)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self._state)
|
||||
|
||||
########
|
||||
@property
|
||||
def loop(self) -> asyncio.AbstractEventLoop:
|
||||
# Technically the loop can be None
|
||||
# but we mask it by explicit type cast
|
||||
# to provide more convinient type annotation
|
||||
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
|
||||
return cast(asyncio.AbstractEventLoop, self._loop)
|
||||
|
||||
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if self._loop is not None and self._loop is not loop:
|
||||
raise RuntimeError(
|
||||
"web.Application instance initialized with different loop"
|
||||
)
|
||||
|
||||
self._loop = loop
|
||||
|
||||
# set loop debug
|
||||
if self._debug is ...:
|
||||
self._debug = loop.get_debug()
|
||||
|
||||
# set loop to sub applications
|
||||
for subapp in self._subapps:
|
||||
subapp._set_loop(loop)
|
||||
|
||||
@property
|
||||
def pre_frozen(self) -> bool:
|
||||
return self._pre_frozen
|
||||
|
||||
def pre_freeze(self) -> None:
|
||||
if self._pre_frozen:
|
||||
return
|
||||
|
||||
self._pre_frozen = True
|
||||
self._middlewares.freeze()
|
||||
self._router.freeze()
|
||||
self._on_response_prepare.freeze()
|
||||
self._cleanup_ctx.freeze()
|
||||
self._on_startup.freeze()
|
||||
self._on_shutdown.freeze()
|
||||
self._on_cleanup.freeze()
|
||||
self._middlewares_handlers = tuple(self._prepare_middleware())
|
||||
|
||||
# If current app and any subapp do not have middlewares avoid run all
|
||||
# of the code footprint that it implies, which have a middleware
|
||||
# hardcoded per app that sets up the current_app attribute. If no
|
||||
# middlewares are configured the handler will receive the proper
|
||||
# current_app without needing all of this code.
|
||||
self._run_middlewares = True if self.middlewares else False
|
||||
|
||||
for subapp in self._subapps:
|
||||
subapp.pre_freeze()
|
||||
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
|
||||
|
||||
@property
|
||||
def frozen(self) -> bool:
|
||||
return self._frozen
|
||||
|
||||
def freeze(self) -> None:
|
||||
if self._frozen:
|
||||
return
|
||||
|
||||
self.pre_freeze()
|
||||
self._frozen = True
|
||||
for subapp in self._subapps:
|
||||
subapp.freeze()
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
|
||||
return self._debug # type: ignore[no-any-return]
|
||||
|
||||
def _reg_subapp_signals(self, subapp: "Application") -> None:
|
||||
def reg_handler(signame: str) -> None:
|
||||
subsig = getattr(subapp, signame)
|
||||
|
||||
async def handler(app: "Application") -> None:
|
||||
await subsig.send(subapp)
|
||||
|
||||
appsig = getattr(self, signame)
|
||||
appsig.append(handler)
|
||||
|
||||
reg_handler("on_startup")
|
||||
reg_handler("on_shutdown")
|
||||
reg_handler("on_cleanup")
|
||||
|
||||
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
|
||||
if not isinstance(prefix, str):
|
||||
raise TypeError("Prefix must be str")
|
||||
prefix = prefix.rstrip("/")
|
||||
if not prefix:
|
||||
raise ValueError("Prefix cannot be empty")
|
||||
factory = partial(PrefixedSubAppResource, prefix, subapp)
|
||||
return self._add_subapp(factory, subapp)
|
||||
|
||||
def _add_subapp(
|
||||
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
|
||||
) -> AbstractResource:
|
||||
if self.frozen:
|
||||
raise RuntimeError("Cannot add sub application to frozen application")
|
||||
if subapp.frozen:
|
||||
raise RuntimeError("Cannot add frozen application")
|
||||
resource = resource_factory()
|
||||
self.router.register_resource(resource)
|
||||
self._reg_subapp_signals(subapp)
|
||||
self._subapps.append(subapp)
|
||||
subapp.pre_freeze()
|
||||
if self._loop is not None:
|
||||
subapp._set_loop(self._loop)
|
||||
return resource
|
||||
|
||||
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
|
||||
if not isinstance(domain, str):
|
||||
raise TypeError("Domain must be str")
|
||||
elif "*" in domain:
|
||||
rule: Domain = MaskDomain(domain)
|
||||
else:
|
||||
rule = Domain(domain)
|
||||
factory = partial(MatchedSubAppResource, rule, subapp)
|
||||
return self._add_subapp(factory, subapp)
|
||||
|
||||
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
||||
return self.router.add_routes(routes)
|
||||
|
||||
@property
|
||||
def on_response_prepare(self) -> _RespPrepareSignal:
|
||||
return self._on_response_prepare
|
||||
|
||||
@property
|
||||
def on_startup(self) -> _AppSignal:
|
||||
return self._on_startup
|
||||
|
||||
@property
|
||||
def on_shutdown(self) -> _AppSignal:
|
||||
return self._on_shutdown
|
||||
|
||||
@property
|
||||
def on_cleanup(self) -> _AppSignal:
|
||||
return self._on_cleanup
|
||||
|
||||
@property
|
||||
def cleanup_ctx(self) -> "CleanupContext":
|
||||
return self._cleanup_ctx
|
||||
|
||||
@property
|
||||
def router(self) -> UrlDispatcher:
|
||||
return self._router
|
||||
|
||||
@property
|
||||
def middlewares(self) -> _Middlewares:
|
||||
return self._middlewares
|
||||
|
||||
def _make_handler(
|
||||
self,
|
||||
*,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
||||
**kwargs: Any,
|
||||
) -> Server:
|
||||
|
||||
if not issubclass(access_log_class, AbstractAccessLogger):
|
||||
raise TypeError(
|
||||
"access_log_class must be subclass of "
|
||||
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
|
||||
)
|
||||
|
||||
self._set_loop(loop)
|
||||
self.freeze()
|
||||
|
||||
kwargs["debug"] = self._debug
|
||||
kwargs["access_log_class"] = access_log_class
|
||||
if self._handler_args:
|
||||
for k, v in self._handler_args.items():
|
||||
kwargs[k] = v
|
||||
|
||||
return Server(
|
||||
self._handle, # type: ignore[arg-type]
|
||||
request_factory=self._make_request,
|
||||
loop=self._loop,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def make_handler(
|
||||
self,
|
||||
*,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
||||
**kwargs: Any,
|
||||
) -> Server:
|
||||
|
||||
warnings.warn(
|
||||
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
return self._make_handler(
|
||||
loop=loop, access_log_class=access_log_class, **kwargs
|
||||
)
|
||||
|
||||
async def startup(self) -> None:
|
||||
"""Causes on_startup signal
|
||||
|
||||
Should be called in the event loop along with the request handler.
|
||||
"""
|
||||
await self.on_startup.send(self)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""Causes on_shutdown signal
|
||||
|
||||
Should be called before cleanup()
|
||||
"""
|
||||
await self.on_shutdown.send(self)
|
||||
|
||||
async def cleanup(self) -> None:
|
||||
"""Causes on_cleanup signal
|
||||
|
||||
Should be called after shutdown()
|
||||
"""
|
||||
if self.on_cleanup.frozen:
|
||||
await self.on_cleanup.send(self)
|
||||
else:
|
||||
# If an exception occurs in startup, ensure cleanup contexts are completed.
|
||||
await self._cleanup_ctx._on_cleanup(self)
|
||||
|
||||
def _make_request(
|
||||
self,
|
||||
message: RawRequestMessage,
|
||||
payload: StreamReader,
|
||||
protocol: RequestHandler,
|
||||
writer: AbstractStreamWriter,
|
||||
task: "asyncio.Task[None]",
|
||||
_cls: Type[Request] = Request,
|
||||
) -> Request:
|
||||
return _cls(
|
||||
message,
|
||||
payload,
|
||||
protocol,
|
||||
writer,
|
||||
task,
|
||||
self._loop,
|
||||
client_max_size=self._client_max_size,
|
||||
)
|
||||
|
||||
def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
|
||||
for m in reversed(self._middlewares):
|
||||
if getattr(m, "__middleware_version__", None) == 1:
|
||||
yield m, True
|
||||
else:
|
||||
warnings.warn(
|
||||
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
yield m, False
|
||||
|
||||
yield _fix_request_current_app(self), True
|
||||
|
||||
async def _handle(self, request: Request) -> StreamResponse:
|
||||
loop = asyncio.get_event_loop()
|
||||
debug = loop.get_debug()
|
||||
match_info = await self._router.resolve(request)
|
||||
if debug: # pragma: no cover
|
||||
if not isinstance(match_info, AbstractMatchInfo):
|
||||
raise TypeError(
|
||||
"match_info should be AbstractMatchInfo "
|
||||
"instance, not {!r}".format(match_info)
|
||||
)
|
||||
match_info.add_app(self)
|
||||
|
||||
match_info.freeze()
|
||||
|
||||
resp = None
|
||||
request._match_info = match_info
|
||||
expect = request.headers.get(hdrs.EXPECT)
|
||||
if expect:
|
||||
resp = await match_info.expect_handler(request)
|
||||
await request.writer.drain()
|
||||
|
||||
if resp is None:
|
||||
handler = match_info.handler
|
||||
|
||||
if self._run_middlewares:
|
||||
for app in match_info.apps[::-1]:
|
||||
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
|
||||
if new_style:
|
||||
handler = update_wrapper(
|
||||
partial(m, handler=handler), handler
|
||||
)
|
||||
else:
|
||||
handler = await m(app, handler) # type: ignore[arg-type]
|
||||
|
||||
resp = await handler(request)
|
||||
|
||||
return resp
|
||||
|
||||
def __call__(self) -> "Application":
|
||||
"""gunicorn compatibility"""
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Application 0x{id(self):x}>"
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class CleanupError(RuntimeError):
|
||||
@property
|
||||
def exceptions(self) -> List[BaseException]:
|
||||
return cast(List[BaseException], self.args[1])
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
|
||||
else:
|
||||
_CleanupContextBase = FrozenList
|
||||
|
||||
|
||||
class CleanupContext(_CleanupContextBase):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._exits: List[AsyncIterator[None]] = []
|
||||
|
||||
async def _on_startup(self, app: Application) -> None:
|
||||
for cb in self:
|
||||
it = cb(app).__aiter__()
|
||||
await it.__anext__()
|
||||
self._exits.append(it)
|
||||
|
||||
async def _on_cleanup(self, app: Application) -> None:
|
||||
errors = []
|
||||
for it in reversed(self._exits):
|
||||
try:
|
||||
await it.__anext__()
|
||||
except StopAsyncIteration:
|
||||
pass
|
||||
except Exception as exc:
|
||||
errors.append(exc)
|
||||
else:
|
||||
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
|
||||
if errors:
|
||||
if len(errors) == 1:
|
||||
raise errors[0]
|
||||
else:
|
||||
raise CleanupError("Multiple errors on cleanup stage", errors)
|
441
.venv/lib/python3.7/site-packages/aiohttp/web_exceptions.py
Normal file
441
.venv/lib/python3.7/site-packages/aiohttp/web_exceptions.py
Normal file
@@ -0,0 +1,441 @@
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from .typedefs import LooseHeaders, StrOrURL
|
||||
from .web_response import Response
|
||||
|
||||
__all__ = (
|
||||
"HTTPException",
|
||||
"HTTPError",
|
||||
"HTTPRedirection",
|
||||
"HTTPSuccessful",
|
||||
"HTTPOk",
|
||||
"HTTPCreated",
|
||||
"HTTPAccepted",
|
||||
"HTTPNonAuthoritativeInformation",
|
||||
"HTTPNoContent",
|
||||
"HTTPResetContent",
|
||||
"HTTPPartialContent",
|
||||
"HTTPMultipleChoices",
|
||||
"HTTPMovedPermanently",
|
||||
"HTTPFound",
|
||||
"HTTPSeeOther",
|
||||
"HTTPNotModified",
|
||||
"HTTPUseProxy",
|
||||
"HTTPTemporaryRedirect",
|
||||
"HTTPPermanentRedirect",
|
||||
"HTTPClientError",
|
||||
"HTTPBadRequest",
|
||||
"HTTPUnauthorized",
|
||||
"HTTPPaymentRequired",
|
||||
"HTTPForbidden",
|
||||
"HTTPNotFound",
|
||||
"HTTPMethodNotAllowed",
|
||||
"HTTPNotAcceptable",
|
||||
"HTTPProxyAuthenticationRequired",
|
||||
"HTTPRequestTimeout",
|
||||
"HTTPConflict",
|
||||
"HTTPGone",
|
||||
"HTTPLengthRequired",
|
||||
"HTTPPreconditionFailed",
|
||||
"HTTPRequestEntityTooLarge",
|
||||
"HTTPRequestURITooLong",
|
||||
"HTTPUnsupportedMediaType",
|
||||
"HTTPRequestRangeNotSatisfiable",
|
||||
"HTTPExpectationFailed",
|
||||
"HTTPMisdirectedRequest",
|
||||
"HTTPUnprocessableEntity",
|
||||
"HTTPFailedDependency",
|
||||
"HTTPUpgradeRequired",
|
||||
"HTTPPreconditionRequired",
|
||||
"HTTPTooManyRequests",
|
||||
"HTTPRequestHeaderFieldsTooLarge",
|
||||
"HTTPUnavailableForLegalReasons",
|
||||
"HTTPServerError",
|
||||
"HTTPInternalServerError",
|
||||
"HTTPNotImplemented",
|
||||
"HTTPBadGateway",
|
||||
"HTTPServiceUnavailable",
|
||||
"HTTPGatewayTimeout",
|
||||
"HTTPVersionNotSupported",
|
||||
"HTTPVariantAlsoNegotiates",
|
||||
"HTTPInsufficientStorage",
|
||||
"HTTPNotExtended",
|
||||
"HTTPNetworkAuthenticationRequired",
|
||||
)
|
||||
|
||||
|
||||
############################################################
|
||||
# HTTP Exceptions
|
||||
############################################################
|
||||
|
||||
|
||||
class HTTPException(Response, Exception):
|
||||
|
||||
# You should set in subclasses:
|
||||
# status = 200
|
||||
|
||||
status_code = -1
|
||||
empty_body = False
|
||||
|
||||
__http_exception__ = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
reason: Optional[str] = None,
|
||||
body: Any = None,
|
||||
text: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
) -> None:
|
||||
if body is not None:
|
||||
warnings.warn(
|
||||
"body argument is deprecated for http web exceptions",
|
||||
DeprecationWarning,
|
||||
)
|
||||
Response.__init__(
|
||||
self,
|
||||
status=self.status_code,
|
||||
headers=headers,
|
||||
reason=reason,
|
||||
body=body,
|
||||
text=text,
|
||||
content_type=content_type,
|
||||
)
|
||||
Exception.__init__(self, self.reason)
|
||||
if self.body is None and not self.empty_body:
|
||||
self.text = f"{self.status}: {self.reason}"
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class HTTPError(HTTPException):
|
||||
"""Base class for exceptions with status codes in the 400s and 500s."""
|
||||
|
||||
|
||||
class HTTPRedirection(HTTPException):
|
||||
"""Base class for exceptions with status codes in the 300s."""
|
||||
|
||||
|
||||
class HTTPSuccessful(HTTPException):
|
||||
"""Base class for exceptions with status codes in the 200s."""
|
||||
|
||||
|
||||
class HTTPOk(HTTPSuccessful):
|
||||
status_code = 200
|
||||
|
||||
|
||||
class HTTPCreated(HTTPSuccessful):
|
||||
status_code = 201
|
||||
|
||||
|
||||
class HTTPAccepted(HTTPSuccessful):
|
||||
status_code = 202
|
||||
|
||||
|
||||
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
|
||||
status_code = 203
|
||||
|
||||
|
||||
class HTTPNoContent(HTTPSuccessful):
|
||||
status_code = 204
|
||||
empty_body = True
|
||||
|
||||
|
||||
class HTTPResetContent(HTTPSuccessful):
|
||||
status_code = 205
|
||||
empty_body = True
|
||||
|
||||
|
||||
class HTTPPartialContent(HTTPSuccessful):
|
||||
status_code = 206
|
||||
|
||||
|
||||
############################################################
|
||||
# 3xx redirection
|
||||
############################################################
|
||||
|
||||
|
||||
class _HTTPMove(HTTPRedirection):
|
||||
def __init__(
|
||||
self,
|
||||
location: StrOrURL,
|
||||
*,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
reason: Optional[str] = None,
|
||||
body: Any = None,
|
||||
text: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
) -> None:
|
||||
if not location:
|
||||
raise ValueError("HTTP redirects need a location to redirect to.")
|
||||
super().__init__(
|
||||
headers=headers,
|
||||
reason=reason,
|
||||
body=body,
|
||||
text=text,
|
||||
content_type=content_type,
|
||||
)
|
||||
self.headers["Location"] = str(URL(location))
|
||||
self.location = location
|
||||
|
||||
|
||||
class HTTPMultipleChoices(_HTTPMove):
|
||||
status_code = 300
|
||||
|
||||
|
||||
class HTTPMovedPermanently(_HTTPMove):
|
||||
status_code = 301
|
||||
|
||||
|
||||
class HTTPFound(_HTTPMove):
|
||||
status_code = 302
|
||||
|
||||
|
||||
# This one is safe after a POST (the redirected location will be
|
||||
# retrieved with GET):
|
||||
class HTTPSeeOther(_HTTPMove):
|
||||
status_code = 303
|
||||
|
||||
|
||||
class HTTPNotModified(HTTPRedirection):
|
||||
# FIXME: this should include a date or etag header
|
||||
status_code = 304
|
||||
empty_body = True
|
||||
|
||||
|
||||
class HTTPUseProxy(_HTTPMove):
|
||||
# Not a move, but looks a little like one
|
||||
status_code = 305
|
||||
|
||||
|
||||
class HTTPTemporaryRedirect(_HTTPMove):
|
||||
status_code = 307
|
||||
|
||||
|
||||
class HTTPPermanentRedirect(_HTTPMove):
|
||||
status_code = 308
|
||||
|
||||
|
||||
############################################################
|
||||
# 4xx client error
|
||||
############################################################
|
||||
|
||||
|
||||
class HTTPClientError(HTTPError):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPBadRequest(HTTPClientError):
|
||||
status_code = 400
|
||||
|
||||
|
||||
class HTTPUnauthorized(HTTPClientError):
|
||||
status_code = 401
|
||||
|
||||
|
||||
class HTTPPaymentRequired(HTTPClientError):
|
||||
status_code = 402
|
||||
|
||||
|
||||
class HTTPForbidden(HTTPClientError):
|
||||
status_code = 403
|
||||
|
||||
|
||||
class HTTPNotFound(HTTPClientError):
|
||||
status_code = 404
|
||||
|
||||
|
||||
class HTTPMethodNotAllowed(HTTPClientError):
|
||||
status_code = 405
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
method: str,
|
||||
allowed_methods: Iterable[str],
|
||||
*,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
reason: Optional[str] = None,
|
||||
body: Any = None,
|
||||
text: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
) -> None:
|
||||
allow = ",".join(sorted(allowed_methods))
|
||||
super().__init__(
|
||||
headers=headers,
|
||||
reason=reason,
|
||||
body=body,
|
||||
text=text,
|
||||
content_type=content_type,
|
||||
)
|
||||
self.headers["Allow"] = allow
|
||||
self.allowed_methods: Set[str] = set(allowed_methods)
|
||||
self.method = method.upper()
|
||||
|
||||
|
||||
class HTTPNotAcceptable(HTTPClientError):
|
||||
status_code = 406
|
||||
|
||||
|
||||
class HTTPProxyAuthenticationRequired(HTTPClientError):
|
||||
status_code = 407
|
||||
|
||||
|
||||
class HTTPRequestTimeout(HTTPClientError):
|
||||
status_code = 408
|
||||
|
||||
|
||||
class HTTPConflict(HTTPClientError):
|
||||
status_code = 409
|
||||
|
||||
|
||||
class HTTPGone(HTTPClientError):
|
||||
status_code = 410
|
||||
|
||||
|
||||
class HTTPLengthRequired(HTTPClientError):
|
||||
status_code = 411
|
||||
|
||||
|
||||
class HTTPPreconditionFailed(HTTPClientError):
|
||||
status_code = 412
|
||||
|
||||
|
||||
class HTTPRequestEntityTooLarge(HTTPClientError):
|
||||
status_code = 413
|
||||
|
||||
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
|
||||
kwargs.setdefault(
|
||||
"text",
|
||||
"Maximum request body size {} exceeded, "
|
||||
"actual body size {}".format(max_size, actual_size),
|
||||
)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class HTTPRequestURITooLong(HTTPClientError):
|
||||
status_code = 414
|
||||
|
||||
|
||||
class HTTPUnsupportedMediaType(HTTPClientError):
|
||||
status_code = 415
|
||||
|
||||
|
||||
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
|
||||
status_code = 416
|
||||
|
||||
|
||||
class HTTPExpectationFailed(HTTPClientError):
|
||||
status_code = 417
|
||||
|
||||
|
||||
class HTTPMisdirectedRequest(HTTPClientError):
|
||||
status_code = 421
|
||||
|
||||
|
||||
class HTTPUnprocessableEntity(HTTPClientError):
|
||||
status_code = 422
|
||||
|
||||
|
||||
class HTTPFailedDependency(HTTPClientError):
|
||||
status_code = 424
|
||||
|
||||
|
||||
class HTTPUpgradeRequired(HTTPClientError):
|
||||
status_code = 426
|
||||
|
||||
|
||||
class HTTPPreconditionRequired(HTTPClientError):
|
||||
status_code = 428
|
||||
|
||||
|
||||
class HTTPTooManyRequests(HTTPClientError):
|
||||
status_code = 429
|
||||
|
||||
|
||||
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
|
||||
status_code = 431
|
||||
|
||||
|
||||
class HTTPUnavailableForLegalReasons(HTTPClientError):
|
||||
status_code = 451
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
link: str,
|
||||
*,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
reason: Optional[str] = None,
|
||||
body: Any = None,
|
||||
text: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
headers=headers,
|
||||
reason=reason,
|
||||
body=body,
|
||||
text=text,
|
||||
content_type=content_type,
|
||||
)
|
||||
self.headers["Link"] = '<%s>; rel="blocked-by"' % link
|
||||
self.link = link
|
||||
|
||||
|
||||
############################################################
|
||||
# 5xx Server Error
|
||||
############################################################
|
||||
# Response status codes beginning with the digit "5" indicate cases in
|
||||
# which the server is aware that it has erred or is incapable of
|
||||
# performing the request. Except when responding to a HEAD request, the
|
||||
# server SHOULD include an entity containing an explanation of the error
|
||||
# situation, and whether it is a temporary or permanent condition. User
|
||||
# agents SHOULD display any included entity to the user. These response
|
||||
# codes are applicable to any request method.
|
||||
|
||||
|
||||
class HTTPServerError(HTTPError):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPInternalServerError(HTTPServerError):
|
||||
status_code = 500
|
||||
|
||||
|
||||
class HTTPNotImplemented(HTTPServerError):
|
||||
status_code = 501
|
||||
|
||||
|
||||
class HTTPBadGateway(HTTPServerError):
|
||||
status_code = 502
|
||||
|
||||
|
||||
class HTTPServiceUnavailable(HTTPServerError):
|
||||
status_code = 503
|
||||
|
||||
|
||||
class HTTPGatewayTimeout(HTTPServerError):
|
||||
status_code = 504
|
||||
|
||||
|
||||
class HTTPVersionNotSupported(HTTPServerError):
|
||||
status_code = 505
|
||||
|
||||
|
||||
class HTTPVariantAlsoNegotiates(HTTPServerError):
|
||||
status_code = 506
|
||||
|
||||
|
||||
class HTTPInsufficientStorage(HTTPServerError):
|
||||
status_code = 507
|
||||
|
||||
|
||||
class HTTPNotExtended(HTTPServerError):
|
||||
status_code = 510
|
||||
|
||||
|
||||
class HTTPNetworkAuthenticationRequired(HTTPServerError):
|
||||
status_code = 511
|
288
.venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py
Normal file
288
.venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py
Normal file
@@ -0,0 +1,288 @@
|
||||
import asyncio
|
||||
import mimetypes
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import ( # noqa
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import ETAG_ANY, ETag
|
||||
from .typedefs import Final, LooseHeaders
|
||||
from .web_exceptions import (
|
||||
HTTPNotModified,
|
||||
HTTPPartialContent,
|
||||
HTTPPreconditionFailed,
|
||||
HTTPRequestRangeNotSatisfiable,
|
||||
)
|
||||
from .web_response import StreamResponse
|
||||
|
||||
__all__ = ("FileResponse",)
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_request import BaseRequest
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
|
||||
|
||||
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
||||
|
||||
|
||||
class FileResponse(StreamResponse):
|
||||
"""A response object can be used to send files."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Union[str, pathlib.Path],
|
||||
chunk_size: int = 256 * 1024,
|
||||
status: int = 200,
|
||||
reason: Optional[str] = None,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
super().__init__(status=status, reason=reason, headers=headers)
|
||||
|
||||
if isinstance(path, str):
|
||||
path = pathlib.Path(path)
|
||||
|
||||
self._path = path
|
||||
self._chunk_size = chunk_size
|
||||
|
||||
async def _sendfile_fallback(
|
||||
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
||||
) -> AbstractStreamWriter:
|
||||
# To keep memory usage low,fobj is transferred in chunks
|
||||
# controlled by the constructor's chunk_size argument.
|
||||
|
||||
chunk_size = self._chunk_size
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
await loop.run_in_executor(None, fobj.seek, offset)
|
||||
|
||||
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
count = count - chunk_size
|
||||
if count <= 0:
|
||||
break
|
||||
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
||||
|
||||
await writer.drain()
|
||||
return writer
|
||||
|
||||
async def _sendfile(
|
||||
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
||||
) -> AbstractStreamWriter:
|
||||
writer = await super().prepare(request)
|
||||
assert writer is not None
|
||||
|
||||
if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
|
||||
return await self._sendfile_fallback(writer, fobj, offset, count)
|
||||
|
||||
loop = request._loop
|
||||
transport = request.transport
|
||||
assert transport is not None
|
||||
|
||||
try:
|
||||
await loop.sendfile(transport, fobj, offset, count)
|
||||
except NotImplementedError:
|
||||
return await self._sendfile_fallback(writer, fobj, offset, count)
|
||||
|
||||
await super().write_eof()
|
||||
return writer
|
||||
|
||||
@staticmethod
|
||||
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
|
||||
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
||||
return True
|
||||
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
|
||||
|
||||
async def _not_modified(
|
||||
self, request: "BaseRequest", etag_value: str, last_modified: float
|
||||
) -> Optional[AbstractStreamWriter]:
|
||||
self.set_status(HTTPNotModified.status_code)
|
||||
self._length_check = False
|
||||
self.etag = etag_value # type: ignore[assignment]
|
||||
self.last_modified = last_modified # type: ignore[assignment]
|
||||
# Delete any Content-Length headers provided by user. HTTP 304
|
||||
# should always have empty response body
|
||||
return await super().prepare(request)
|
||||
|
||||
async def _precondition_failed(
|
||||
self, request: "BaseRequest"
|
||||
) -> Optional[AbstractStreamWriter]:
|
||||
self.set_status(HTTPPreconditionFailed.status_code)
|
||||
self.content_length = 0
|
||||
return await super().prepare(request)
|
||||
|
||||
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
||||
filepath = self._path
|
||||
|
||||
gzip = False
|
||||
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
|
||||
gzip_path = filepath.with_name(filepath.name + ".gz")
|
||||
|
||||
if gzip_path.is_file():
|
||||
filepath = gzip_path
|
||||
gzip = True
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
|
||||
|
||||
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
||||
last_modified = st.st_mtime
|
||||
|
||||
# https://tools.ietf.org/html/rfc7232#section-6
|
||||
ifmatch = request.if_match
|
||||
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
|
||||
return await self._precondition_failed(request)
|
||||
|
||||
unmodsince = request.if_unmodified_since
|
||||
if (
|
||||
unmodsince is not None
|
||||
and ifmatch is None
|
||||
and st.st_mtime > unmodsince.timestamp()
|
||||
):
|
||||
return await self._precondition_failed(request)
|
||||
|
||||
ifnonematch = request.if_none_match
|
||||
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
|
||||
return await self._not_modified(request, etag_value, last_modified)
|
||||
|
||||
modsince = request.if_modified_since
|
||||
if (
|
||||
modsince is not None
|
||||
and ifnonematch is None
|
||||
and st.st_mtime <= modsince.timestamp()
|
||||
):
|
||||
return await self._not_modified(request, etag_value, last_modified)
|
||||
|
||||
if hdrs.CONTENT_TYPE not in self.headers:
|
||||
ct, encoding = mimetypes.guess_type(str(filepath))
|
||||
if not ct:
|
||||
ct = "application/octet-stream"
|
||||
should_set_ct = True
|
||||
else:
|
||||
encoding = "gzip" if gzip else None
|
||||
should_set_ct = False
|
||||
|
||||
status = self._status
|
||||
file_size = st.st_size
|
||||
count = file_size
|
||||
|
||||
start = None
|
||||
|
||||
ifrange = request.if_range
|
||||
if ifrange is None or st.st_mtime <= ifrange.timestamp():
|
||||
# If-Range header check:
|
||||
# condition = cached date >= last modification date
|
||||
# return 206 if True else 200.
|
||||
# if False:
|
||||
# Range header would not be processed, return 200
|
||||
# if True but Range header missing
|
||||
# return 200
|
||||
try:
|
||||
rng = request.http_range
|
||||
start = rng.start
|
||||
end = rng.stop
|
||||
except ValueError:
|
||||
# https://tools.ietf.org/html/rfc7233:
|
||||
# A server generating a 416 (Range Not Satisfiable) response to
|
||||
# a byte-range request SHOULD send a Content-Range header field
|
||||
# with an unsatisfied-range value.
|
||||
# The complete-length in a 416 response indicates the current
|
||||
# length of the selected representation.
|
||||
#
|
||||
# Will do the same below. Many servers ignore this and do not
|
||||
# send a Content-Range header with HTTP 416
|
||||
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
||||
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
||||
return await super().prepare(request)
|
||||
|
||||
# If a range request has been made, convert start, end slice
|
||||
# notation into file pointer offset and count
|
||||
if start is not None or end is not None:
|
||||
if start < 0 and end is None: # return tail of file
|
||||
start += file_size
|
||||
if start < 0:
|
||||
# if Range:bytes=-1000 in request header but file size
|
||||
# is only 200, there would be trouble without this
|
||||
start = 0
|
||||
count = file_size - start
|
||||
else:
|
||||
# rfc7233:If the last-byte-pos value is
|
||||
# absent, or if the value is greater than or equal to
|
||||
# the current length of the representation data,
|
||||
# the byte range is interpreted as the remainder
|
||||
# of the representation (i.e., the server replaces the
|
||||
# value of last-byte-pos with a value that is one less than
|
||||
# the current length of the selected representation).
|
||||
count = (
|
||||
min(end if end is not None else file_size, file_size) - start
|
||||
)
|
||||
|
||||
if start >= file_size:
|
||||
# HTTP 416 should be returned in this case.
|
||||
#
|
||||
# According to https://tools.ietf.org/html/rfc7233:
|
||||
# If a valid byte-range-set includes at least one
|
||||
# byte-range-spec with a first-byte-pos that is less than
|
||||
# the current length of the representation, or at least one
|
||||
# suffix-byte-range-spec with a non-zero suffix-length,
|
||||
# then the byte-range-set is satisfiable. Otherwise, the
|
||||
# byte-range-set is unsatisfiable.
|
||||
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
||||
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
||||
return await super().prepare(request)
|
||||
|
||||
status = HTTPPartialContent.status_code
|
||||
# Even though you are sending the whole file, you should still
|
||||
# return a HTTP 206 for a Range request.
|
||||
self.set_status(status)
|
||||
|
||||
if should_set_ct:
|
||||
self.content_type = ct # type: ignore[assignment]
|
||||
if encoding:
|
||||
self.headers[hdrs.CONTENT_ENCODING] = encoding
|
||||
if gzip:
|
||||
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
||||
|
||||
self.etag = etag_value # type: ignore[assignment]
|
||||
self.last_modified = st.st_mtime # type: ignore[assignment]
|
||||
self.content_length = count
|
||||
|
||||
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
|
||||
|
||||
real_start = cast(int, start)
|
||||
|
||||
if status == HTTPPartialContent.status_code:
|
||||
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
||||
real_start, real_start + count - 1, file_size
|
||||
)
|
||||
|
||||
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
||||
if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
|
||||
return await super().prepare(request)
|
||||
|
||||
fobj = await loop.run_in_executor(None, filepath.open, "rb")
|
||||
if start: # be aware that start could be None or int=0 here.
|
||||
offset = start
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
try:
|
||||
return await self._sendfile(request, fobj, offset, count)
|
||||
finally:
|
||||
await loop.run_in_executor(None, fobj.close)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user