mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-08 22:17:07 +01:00
cleanup
This commit is contained in:
parent
e565e45a6f
commit
14505063ec
@ -30,6 +30,7 @@ def __init__(self, *args, **kwargs):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def skip_handler(request, handler):
|
||||
# usage: pytest.mark.skip_handler('my_handler', 'reason')
|
||||
for marker in request.node.iter_markers('skip_handler'):
|
||||
if marker.args[0] == handler.RH_KEY:
|
||||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||
@ -37,20 +38,19 @@ def skip_handler(request, handler):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def skip_handler_if(request, handler):
|
||||
# usage: pytest.mark.skip_handler_if('my_handler', lambda request: True, 'reason')
|
||||
for marker in request.node.iter_markers('skip_handler_if'):
|
||||
if marker.args[0] == handler.RH_KEY and marker.args[1](request):
|
||||
pytest.skip(marker.args[2] if len(marker.args) > 2 else '')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def skip_handlers_if(request, handler):
|
||||
# usage: pytest.mark.skip_handlers_if(lambda request, handler: True, 'reason')
|
||||
for marker in request.node.iter_markers('skip_handlers_if'):
|
||||
if handler and marker.args[0](request, handler):
|
||||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||
|
||||
def validate_and_send(rh, req):
|
||||
rh.validate(req)
|
||||
return rh.send(req)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
|
@ -338,3 +338,8 @@ def http_server_port(httpd):
|
||||
def verify_address_availability(address):
|
||||
if find_available_port(address) is None:
|
||||
pytest.skip(f'Unable to bind to source address {address} (address may not exist)')
|
||||
|
||||
|
||||
def validate_and_send(rh, req):
|
||||
rh.validate(req)
|
||||
return rh.send(req)
|
||||
|
@ -17,7 +17,7 @@
|
||||
from test.test_socks import IPv6ThreadingTCPServer
|
||||
from yt_dlp.dependencies import urllib3
|
||||
from yt_dlp.networking import Request
|
||||
from yt_dlp.networking.exceptions import ProxyError, HTTPError, SSLError
|
||||
from yt_dlp.networking.exceptions import HTTPError, ProxyError, SSLError
|
||||
|
||||
|
||||
class HTTPProxyAuthMixin:
|
||||
@ -124,21 +124,6 @@ def __init__(self, request, *args, **kwargs):
|
||||
super().__init__(request, *args, **kwargs)
|
||||
|
||||
|
||||
class WebsocketsProxyHandler(BaseRequestHandler):
|
||||
def __init__(self, *args, proxy_info=None, **kwargs):
|
||||
self.proxy_info = proxy_info
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def handle(self):
|
||||
import websockets.sync.server
|
||||
protocol = websockets.ServerProtocol()
|
||||
connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol,
|
||||
close_timeout=0)
|
||||
connection.handshake()
|
||||
connection.send(json.dumps(self.proxy_info))
|
||||
connection.close()
|
||||
|
||||
|
||||
class HTTPConnectProxyHandler(BaseHTTPRequestHandler, HTTPProxyAuthMixin):
|
||||
protocol_version = 'HTTP/1.1'
|
||||
default_request_version = 'HTTP/1.1'
|
||||
@ -199,6 +184,7 @@ def proxy_server(proxy_server_class, request_handler, bind_ip=None, **proxy_serv
|
||||
class HTTPProxyTestContext(abc.ABC):
|
||||
REQUEST_HANDLER_CLASS = None
|
||||
REQUEST_PROTO = None
|
||||
|
||||
def http_server(self, server_class, *args, **kwargs):
|
||||
return proxy_server(server_class, self.REQUEST_HANDLER_CLASS, *args, **kwargs)
|
||||
|
||||
@ -229,26 +215,9 @@ def proxy_info_request(self, handler, target_domain=None, target_port=None, **re
|
||||
return json.loads(handler.send(request).read().decode())
|
||||
|
||||
|
||||
class HTTPProxyWebsocketsTestContext(HTTPProxyTestContext):
|
||||
REQUEST_HANDLER_CLASS = WebsocketsProxyHandler
|
||||
REQUEST_PROTO = 'ws'
|
||||
|
||||
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||
request = Request(f'ws://{target_domain or "127.0.0.1"}:{target_port or "40000"}', **req_kwargs)
|
||||
handler.validate(request)
|
||||
ws = handler.send(request)
|
||||
ws.send('proxy_info')
|
||||
proxy_info = ws.recv()
|
||||
ws.close()
|
||||
return json.loads(proxy_info)
|
||||
|
||||
# todo: wss
|
||||
|
||||
|
||||
CTX_MAP = {
|
||||
'http': HTTPProxyHTTPTestContext,
|
||||
'https': HTTPProxyHTTPSTestContext,
|
||||
'ws': HTTPProxyWebsocketsTestContext,
|
||||
}
|
||||
|
||||
|
||||
|
@ -29,8 +29,12 @@
|
||||
from email.message import Message
|
||||
from http.cookiejar import CookieJar
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
from test.helper import FakeYDL, http_server_port, verify_address_availability
|
||||
from test.helper import (
|
||||
FakeYDL,
|
||||
http_server_port,
|
||||
validate_and_send,
|
||||
verify_address_availability,
|
||||
)
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
||||
from yt_dlp.networking import (
|
||||
@ -64,21 +68,6 @@
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def _build_proxy_handler(name):
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
proxy_name = name
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
def do_GET(self):
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'text/plain; charset=utf-8')
|
||||
self.end_headers()
|
||||
self.wfile.write(f'{self.proxy_name}: {self.path}'.encode())
|
||||
return HTTPTestRequestHandler
|
||||
|
||||
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
protocol_version = 'HTTP/1.1'
|
||||
default_request_version = 'HTTP/1.1'
|
||||
@ -319,8 +308,9 @@ def setup_class(cls):
|
||||
cls.https_server_thread.start()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
|
||||
def test_verify_cert(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(CertificateVerifyError):
|
||||
@ -331,7 +321,6 @@ def test_verify_cert(self, handler):
|
||||
assert r.status == 200
|
||||
r.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_ssl_error(self, handler):
|
||||
# HTTPS server with too old TLS version
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
@ -349,7 +338,6 @@ def test_ssl_error(self, handler):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_percent_encode(self, handler):
|
||||
with handler() as rh:
|
||||
# Unicode characters should be encoded with uppercase percent-encoding
|
||||
@ -361,7 +349,6 @@ def test_percent_encode(self, handler):
|
||||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.parametrize('path', [
|
||||
'/a/b/./../../headers',
|
||||
'/redirect_dotsegments',
|
||||
@ -377,15 +364,13 @@ def test_remove_dot_segments(self, handler, path):
|
||||
assert res.url == f'http://127.0.0.1:{self.http_port}/headers'
|
||||
res.close()
|
||||
|
||||
# Not supported by CurlCFFI (non-standard)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi (non-standard)')
|
||||
def test_unicode_path_redirection(self, handler):
|
||||
with handler() as rh:
|
||||
r = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/302-non-ascii-redirect'))
|
||||
assert r.url == f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html'
|
||||
r.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_raise_http_error(self, handler):
|
||||
with handler() as rh:
|
||||
for bad_status in (400, 500, 599, 302):
|
||||
@ -395,7 +380,6 @@ def test_raise_http_error(self, handler):
|
||||
# Should not raise an error
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_response_url(self, handler):
|
||||
with handler() as rh:
|
||||
# Response url should be that of the last url in redirect chain
|
||||
@ -407,7 +391,6 @@ def test_response_url(self, handler):
|
||||
res2.close()
|
||||
|
||||
# Covers some basic cases we expect some level of consistency between request handlers for
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.parametrize('redirect_status,method,expected', [
|
||||
# A 303 must either use GET or HEAD for subsequent request
|
||||
(303, 'POST', ('', 'GET', False)),
|
||||
@ -449,7 +432,6 @@ def test_redirect(self, handler, redirect_status, method, expected):
|
||||
assert expected[1] == res.headers.get('method')
|
||||
assert expected[2] == ('content-length' in headers.decode().lower())
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_request_cookie_header(self, handler):
|
||||
# We should accept a Cookie header being passed as in normal headers and handle it appropriately.
|
||||
with handler() as rh:
|
||||
@ -482,19 +464,16 @@ def test_request_cookie_header(self, handler):
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
assert b'cookie: test=test3' in data.lower()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_redirect_loop(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(HTTPError, match='redirect loop'):
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_loop'))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_incompleteread(self, handler):
|
||||
with handler(timeout=2) as rh:
|
||||
with pytest.raises(IncompleteRead, match='13 bytes read, 234221 more expected'):
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_cookies(self, handler):
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
cookiejar.set_cookie(http.cookiejar.Cookie(
|
||||
@ -511,7 +490,6 @@ def test_cookies(self, handler):
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_headers(self, handler):
|
||||
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
@ -527,7 +505,6 @@ def test_headers(self, handler):
|
||||
assert b'test2: test2' not in data
|
||||
assert b'test3: test3' in data
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_read_timeout(self, handler):
|
||||
with handler() as rh:
|
||||
# Default timeout is 20 seconds, so this should go through
|
||||
@ -543,7 +520,6 @@ def test_read_timeout(self, handler):
|
||||
validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1', extensions={'timeout': 4}))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_connect_timeout(self, handler):
|
||||
# nothing should be listening on this port
|
||||
connect_timeout_url = 'http://10.255.255.255'
|
||||
@ -562,7 +538,6 @@ def test_connect_timeout(self, handler):
|
||||
rh, Request(connect_timeout_url, extensions={'timeout': 0.01}))
|
||||
assert 0.01 <= time.time() - now < 20
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_source_address(self, handler):
|
||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
# on some systems these loopback addresses we need for testing may not be available
|
||||
@ -574,13 +549,13 @@ def test_source_address(self, handler):
|
||||
assert source_address == data
|
||||
|
||||
# Not supported by CurlCFFI
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_gzip_trailing_garbage(self, handler):
|
||||
with handler() as rh:
|
||||
data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
|
||||
assert data == '<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
||||
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
||||
def test_brotli(self, handler):
|
||||
with handler() as rh:
|
||||
@ -591,7 +566,6 @@ def test_brotli(self, handler):
|
||||
assert res.headers.get('Content-Encoding') == 'br'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_deflate(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
@ -601,7 +575,6 @@ def test_deflate(self, handler):
|
||||
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_gzip(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
@ -611,7 +584,6 @@ def test_gzip(self, handler):
|
||||
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_multiple_encodings(self, handler):
|
||||
with handler() as rh:
|
||||
for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
|
||||
@ -622,8 +594,7 @@ def test_multiple_encodings(self, handler):
|
||||
assert res.headers.get('Content-Encoding') == pair
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
# Not supported by curl_cffi
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_unsupported_encoding(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
@ -633,7 +604,6 @@ def test_unsupported_encoding(self, handler):
|
||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||
assert res.read() == b'raw'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
@ -644,7 +614,6 @@ def test_read(self, handler):
|
||||
assert res.read().decode().endswith('\n\n')
|
||||
assert res.read() == b''
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_request_disable_proxy(self, handler):
|
||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
||||
# Given the handler is configured with a proxy
|
||||
@ -656,7 +625,6 @@ def test_request_disable_proxy(self, handler):
|
||||
res.close()
|
||||
assert res.status == 200
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.skip_handlers_if(
|
||||
lambda _, handler: Features.NO_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support NO_PROXY')
|
||||
def test_noproxy(self, handler):
|
||||
@ -671,24 +639,23 @@ def test_noproxy(self, handler):
|
||||
assert nop_response.status == 200
|
||||
nop_response.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.skip_handlers_if(
|
||||
lambda _, handler: Features.ALL_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support ALL_PROXY')
|
||||
def test_allproxy(self, handler):
|
||||
# This is a bit of a hacky test, but it should be enough to check whether the handler is using the proxy.
|
||||
# 0.1s might not be enough of a timeout if proxy is not used in all cases, but should still get failures.
|
||||
with handler(proxies={'all': f'http://10.255.255.255'}, timeout=0.1) as rh:
|
||||
with handler(proxies={'all': 'http://10.255.255.255'}, timeout=0.1) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).close()
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).close()
|
||||
|
||||
with handler(timeout=0.1) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
validate_and_send(
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': f'http://10.255.255.255'})).close()
|
||||
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': 'http://10.255.255.255'})).close()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
class TestClientCertificate:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
@ -715,27 +682,23 @@ def _run_test(self, handler, **handler_kwargs):
|
||||
) as rh:
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{self.port}/video.html')).read().decode()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_combined_nopass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'clientwithkey.crt'),
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_nocombined_nopass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'client.crt'),
|
||||
'client_certificate_key': os.path.join(self.certdir, 'client.key'),
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_combined_pass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
|
||||
'client_certificate_password': 'foobar',
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_nocombined_pass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'client.crt'),
|
||||
@ -775,8 +738,8 @@ def test_remove_logging_handler(self, handler, logger_name):
|
||||
assert len(logging_handlers) == before_count
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
class TestUrllibRequestHandler(TestRequestHandlerBase):
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
def test_file_urls(self, handler):
|
||||
# See https://github.com/ytdl-org/youtube-dl/issues/8227
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
@ -798,7 +761,6 @@ def test_file_urls(self, handler):
|
||||
|
||||
os.unlink(tf.name)
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
def test_http_error_returns_content(self, handler):
|
||||
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
||||
def get_response():
|
||||
@ -811,7 +773,6 @@ def get_response():
|
||||
|
||||
assert get_response().read() == b'<html></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
def test_verify_cert_error_text(self, handler):
|
||||
# Check the output of the error message
|
||||
with handler() as rh:
|
||||
@ -821,7 +782,6 @@ def test_verify_cert_error_text(self, handler):
|
||||
):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{self.https_port}/headers'))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
@pytest.mark.parametrize('req,match,version_check', [
|
||||
# https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1256
|
||||
# bpo-39603: Check implemented in 3.7.9+, 3.8.5+
|
||||
@ -1153,7 +1113,7 @@ class HTTPSupportedRH(ValidationRH):
|
||||
]
|
||||
|
||||
PROXY_SCHEME_TESTS = [
|
||||
# scheme, expected to fail
|
||||
# proxy scheme, expected to fail
|
||||
('Urllib', 'http', [
|
||||
('http', False),
|
||||
('https', UnsupportedRequest),
|
||||
@ -1179,30 +1139,41 @@ class HTTPSupportedRH(ValidationRH):
|
||||
('socks5', False),
|
||||
('socks5h', False),
|
||||
]),
|
||||
('Websockets', 'ws', [
|
||||
('http', UnsupportedRequest),
|
||||
('https', UnsupportedRequest),
|
||||
('socks4', False),
|
||||
('socks4a', False),
|
||||
('socks5', False),
|
||||
('socks5h', False),
|
||||
]),
|
||||
(NoCheckRH, 'http', [('http', False)]),
|
||||
(HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
|
||||
('Websockets', 'ws', [('http', UnsupportedRequest)]),
|
||||
(NoCheckRH, 'http', [('http', False)]),
|
||||
(HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
|
||||
]
|
||||
|
||||
PROXY_KEY_TESTS = [
|
||||
# key, expected to fail
|
||||
('Urllib', [
|
||||
('all', False),
|
||||
('unrelated', False),
|
||||
# proxy key, proxy scheme, expected to fail
|
||||
('Urllib', 'http', [
|
||||
('all', 'http', False),
|
||||
('unrelated', 'http', False),
|
||||
]),
|
||||
('Requests', [
|
||||
('all', False),
|
||||
('unrelated', False),
|
||||
('Requests', 'http', [
|
||||
('all', 'http', False),
|
||||
('unrelated', 'http', False),
|
||||
]),
|
||||
('CurlCFFI', [
|
||||
('all', False),
|
||||
('unrelated', False),
|
||||
('CurlCFFI', 'http', [
|
||||
('all', 'http', False),
|
||||
('unrelated', 'http', False),
|
||||
]),
|
||||
(NoCheckRH, [('all', False)]),
|
||||
(HTTPSupportedRH, [('all', UnsupportedRequest)]),
|
||||
(HTTPSupportedRH, [('no', UnsupportedRequest)]),
|
||||
('Websockets', 'ws', [
|
||||
('all', 'socks5', False),
|
||||
('unrelated', 'socks5', False),
|
||||
]),
|
||||
(NoCheckRH, 'http', [('all', 'http', False)]),
|
||||
(HTTPSupportedRH, 'http', [('all', 'http', UnsupportedRequest)]),
|
||||
(HTTPSupportedRH, 'http', [('no', 'http', UnsupportedRequest)]),
|
||||
]
|
||||
|
||||
EXTENSION_TESTS = [
|
||||
@ -1244,28 +1215,54 @@ class HTTPSupportedRH(ValidationRH):
|
||||
]),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize('handler,fail,scheme', [
|
||||
('Urllib', False, 'http'),
|
||||
('Requests', False, 'http'),
|
||||
('CurlCFFI', False, 'http'),
|
||||
('Websockets', False, 'ws')
|
||||
], indirect=['handler'])
|
||||
def test_no_proxy(self, handler, fail, scheme):
|
||||
run_validation(handler, fail, Request(f'{scheme}://', proxies={'no': '127.0.0.1,github.com'}))
|
||||
run_validation(handler, fail, Request(f'{scheme}://'), proxies={'no': '127.0.0.1,github.com'})
|
||||
|
||||
@pytest.mark.parametrize('handler,scheme', [
|
||||
('Urllib', 'http'),
|
||||
(HTTPSupportedRH, 'http'),
|
||||
('Requests', 'http'),
|
||||
('CurlCFFI', 'http'),
|
||||
('Websockets', 'ws')
|
||||
], indirect=['handler'])
|
||||
def test_empty_proxy(self, handler, scheme):
|
||||
run_validation(handler, False, Request(f'{scheme}://', proxies={scheme: None}))
|
||||
run_validation(handler, False, Request(f'{scheme}://'), proxies={scheme: None})
|
||||
|
||||
@pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
|
||||
@pytest.mark.parametrize('handler,scheme', [
|
||||
('Urllib', 'http'),
|
||||
(HTTPSupportedRH, 'http'),
|
||||
('Requests', 'http'),
|
||||
('CurlCFFI', 'http'),
|
||||
('Websockets', 'ws')
|
||||
], indirect=['handler'])
|
||||
def test_invalid_proxy_url(self, handler, scheme, proxy_url):
|
||||
run_validation(handler, UnsupportedRequest, Request(f'{scheme}://', proxies={scheme: proxy_url}))
|
||||
|
||||
@pytest.mark.parametrize('handler,scheme,fail,handler_kwargs', [
|
||||
(handler_tests[0], scheme, fail, handler_kwargs)
|
||||
for handler_tests in URL_SCHEME_TESTS
|
||||
for scheme, fail, handler_kwargs in handler_tests[1]
|
||||
|
||||
], indirect=['handler'])
|
||||
def test_url_scheme(self, handler, scheme, fail, handler_kwargs):
|
||||
run_validation(handler, fail, Request(f'{scheme}://'), **(handler_kwargs or {}))
|
||||
|
||||
@pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False), ('CurlCFFI', False)], indirect=['handler'])
|
||||
def test_no_proxy(self, handler, fail):
|
||||
run_validation(handler, fail, Request('http://', proxies={'no': '127.0.0.1,github.com'}))
|
||||
run_validation(handler, fail, Request('http://'), proxies={'no': '127.0.0.1,github.com'})
|
||||
|
||||
@pytest.mark.parametrize('handler,proxy_key,fail', [
|
||||
(handler_tests[0], proxy_key, fail)
|
||||
@pytest.mark.parametrize('handler,scheme,proxy_key,proxy_scheme,fail', [
|
||||
(handler_tests[0], handler_tests[1], proxy_key, proxy_scheme, fail)
|
||||
for handler_tests in PROXY_KEY_TESTS
|
||||
for proxy_key, fail in handler_tests[1]
|
||||
for proxy_key, proxy_scheme, fail in handler_tests[2]
|
||||
], indirect=['handler'])
|
||||
def test_proxy_key(self, handler, proxy_key, fail):
|
||||
run_validation(handler, fail, Request('http://', proxies={proxy_key: 'http://example.com'}))
|
||||
run_validation(handler, fail, Request('http://'), proxies={proxy_key: 'http://example.com'})
|
||||
def test_proxy_key(self, handler, scheme, proxy_key, proxy_scheme, fail):
|
||||
run_validation(handler, fail, Request(f'{scheme}://', proxies={proxy_key: f'{proxy_scheme}://example.com'}))
|
||||
run_validation(handler, fail, Request(f'{scheme}://'), proxies={proxy_key: f'{proxy_scheme}://example.com'})
|
||||
|
||||
@pytest.mark.parametrize('handler,req_scheme,scheme,fail', [
|
||||
(handler_tests[0], handler_tests[1], scheme, fail)
|
||||
@ -1276,16 +1273,6 @@ def test_proxy_scheme(self, handler, req_scheme, scheme, fail):
|
||||
run_validation(handler, fail, Request(f'{req_scheme}://', proxies={req_scheme: f'{scheme}://example.com'}))
|
||||
run_validation(handler, fail, Request(f'{req_scheme}://'), proxies={req_scheme: f'{scheme}://example.com'})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_empty_proxy(self, handler):
|
||||
run_validation(handler, False, Request('http://', proxies={'http': None}))
|
||||
run_validation(handler, False, Request('http://'), proxies={'http': None})
|
||||
|
||||
@pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_invalid_proxy_url(self, handler, proxy_url):
|
||||
run_validation(handler, UnsupportedRequest, Request('http://', proxies={'http': proxy_url}))
|
||||
|
||||
@pytest.mark.parametrize('handler,scheme,extensions,fail', [
|
||||
(handler_tests[0], handler_tests[1], extensions, fail)
|
||||
for handler_tests in EXTENSION_TESTS
|
||||
|
@ -19,7 +19,7 @@
|
||||
import ssl
|
||||
import threading
|
||||
|
||||
from yt_dlp import socks
|
||||
from yt_dlp import socks, traverse_obj
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import websockets
|
||||
from yt_dlp.networking import Request
|
||||
@ -115,6 +115,7 @@ def ws_validate_and_send(rh, req):
|
||||
|
||||
|
||||
@pytest.mark.skipif(not websockets, reason='websockets must be installed to test websocket request handlers')
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
class TestWebsSocketRequestHandlerConformance:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
@ -130,7 +131,6 @@ def setup_class(cls):
|
||||
cls.mtls_wss_thread, cls.mtls_wss_port = create_mtls_wss_websocket_server()
|
||||
cls.mtls_wss_base_url = f'wss://127.0.0.1:{cls.mtls_wss_port}'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_basic_websockets(self, handler):
|
||||
with handler() as rh:
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
@ -142,7 +142,6 @@ def test_basic_websockets(self, handler):
|
||||
|
||||
# https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
|
||||
@pytest.mark.parametrize('msg,opcode', [('str', 1), (b'bytes', 2)])
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_send_types(self, handler, msg, opcode):
|
||||
with handler() as rh:
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
@ -150,7 +149,6 @@ def test_send_types(self, handler, msg, opcode):
|
||||
assert int(ws.recv()) == opcode
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_verify_cert(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(CertificateVerifyError):
|
||||
@ -161,14 +159,12 @@ def test_verify_cert(self, handler):
|
||||
assert ws.status == 101
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_ssl_error(self, handler):
|
||||
with handler(verify=False) as rh:
|
||||
with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
|
||||
ws_validate_and_send(rh, Request(self.bad_wss_host))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
@pytest.mark.parametrize('path,expected', [
|
||||
# Unicode characters should be encoded with uppercase percent-encoding
|
||||
('/中文', '/%E4%B8%AD%E6%96%87'),
|
||||
@ -183,7 +179,6 @@ def test_percent_encode(self, handler, path, expected):
|
||||
assert ws.status == 101
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_remove_dot_segments(self, handler):
|
||||
with handler() as rh:
|
||||
# This isn't a comprehensive test,
|
||||
@ -196,7 +191,6 @@ def test_remove_dot_segments(self, handler):
|
||||
|
||||
# We are restricted to known HTTP status codes in http.HTTPStatus
|
||||
# Redirects are not supported for websockets
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
@pytest.mark.parametrize('status', (200, 204, 301, 302, 303, 400, 500, 511))
|
||||
def test_raise_http_error(self, handler, status):
|
||||
with handler() as rh:
|
||||
@ -204,7 +198,6 @@ def test_raise_http_error(self, handler, status):
|
||||
ws_validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
|
||||
assert exc_info.value.status == status
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
@pytest.mark.parametrize('params,extensions', [
|
||||
({'timeout': sys.float_info.min}, {}),
|
||||
({}, {'timeout': sys.float_info.min}),
|
||||
@ -214,7 +207,6 @@ def test_timeout(self, handler, params, extensions):
|
||||
with pytest.raises(TransportError):
|
||||
ws_validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_cookies(self, handler):
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
cookiejar.set_cookie(http.cookiejar.Cookie(
|
||||
@ -240,7 +232,6 @@ def test_cookies(self, handler):
|
||||
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_source_address(self, handler):
|
||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
verify_address_availability(source_address)
|
||||
@ -250,7 +241,6 @@ def test_source_address(self, handler):
|
||||
assert source_address == ws.recv()
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_response_url(self, handler):
|
||||
with handler() as rh:
|
||||
url = f'{self.ws_base_url}/something'
|
||||
@ -258,7 +248,6 @@ def test_response_url(self, handler):
|
||||
assert ws.url == url
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_request_headers(self, handler):
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
# Global Headers
|
||||
@ -294,7 +283,6 @@ def test_request_headers(self, handler):
|
||||
'client_certificate_password': 'foobar',
|
||||
}
|
||||
))
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_mtls(self, handler, client_cert):
|
||||
with handler(
|
||||
# Disable client-side validation of unacceptable self-signed testcert.pem
|
||||
@ -304,7 +292,6 @@ def test_mtls(self, handler, client_cert):
|
||||
) as rh:
|
||||
ws_validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_request_disable_proxy(self, handler):
|
||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['ws']:
|
||||
# Given handler is configured with a proxy
|
||||
@ -315,7 +302,6 @@ def test_request_disable_proxy(self, handler):
|
||||
assert ws.status == 101
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
@pytest.mark.skip_handlers_if(
|
||||
lambda _, handler: Features.NO_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support NO_PROXY')
|
||||
def test_noproxy(self, handler):
|
||||
@ -329,6 +315,21 @@ def test_noproxy(self, handler):
|
||||
assert ws.status == 101
|
||||
ws.close()
|
||||
|
||||
@pytest.mark.skip_handlers_if(
|
||||
lambda _, handler: Features.ALL_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support ALL_PROXY')
|
||||
def test_allproxy(self, handler):
|
||||
supported_proto = traverse_obj(handler._SUPPORTED_PROXY_SCHEMES, 0, default='ws')
|
||||
# This is a bit of a hacky test, but it should be enough to check whether the handler is using the proxy.
|
||||
# 0.1s might not be enough of a timeout if proxy is not used in all cases, but should still get failures.
|
||||
with handler(proxies={'all': f'{supported_proto}://10.255.255.255'}, timeout=0.1) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
ws_validate_and_send(rh, Request(self.ws_base_url)).close()
|
||||
|
||||
with handler(timeout=0.1) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
ws_validate_and_send(
|
||||
rh, Request(self.ws_base_url, proxies={'all': f'{supported_proto}://10.255.255.255'})).close()
|
||||
|
||||
|
||||
def create_fake_ws_connection(raised):
|
||||
import websockets.sync.client
|
||||
|
Loading…
Reference in New Issue
Block a user