Skip to content

Commit

Permalink
[compat, networking] Deprecate old functions (yt-dlp#2861)
Browse files Browse the repository at this point in the history
Authored by: coletdjnz, pukkandan
  • Loading branch information
coletdjnz authored and aalsuwaidi committed Apr 21, 2024
1 parent 874548b commit 02fc0dc
Show file tree
Hide file tree
Showing 176 changed files with 706 additions and 728 deletions.
2 changes: 1 addition & 1 deletion test/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def try_rm_tcs_files(tcs=None):
force_generic_extractor=params.get('force_generic_extractor', False))
except (DownloadError, ExtractorError) as err:
# Check if the exception is not a network related one
if not isinstance(err.exc_info[1], (TransportError, UnavailableVideoError)) or (isinstance(err.exc_info[1], HTTPError) and err.exc_info[1].code == 503):
if not isinstance(err.exc_info[1], (TransportError, UnavailableVideoError)) or (isinstance(err.exc_info[1], HTTPError) and err.exc_info[1].status == 503):
err.msg = f'{getattr(err, "msg", err)} ({tname})'
raise

Expand Down
27 changes: 15 additions & 12 deletions test/test_networking.py
Original file line number Diff line number Diff line change
Expand Up @@ -1057,14 +1057,15 @@ def test_compat_request(self):
urllib_req = urllib.request.Request('http://foo.bar', data=b'test', method='PUT', headers={'X-Test': '1'})
urllib_req.add_unredirected_header('Cookie', 'bob=bob')
urllib_req.timeout = 2

req = ydl.urlopen(urllib_req).request
assert req.url == urllib_req.get_full_url()
assert req.data == urllib_req.data
assert req.method == urllib_req.get_method()
assert 'X-Test' in req.headers
assert 'Cookie' in req.headers
assert req.extensions.get('timeout') == 2
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=DeprecationWarning)
req = ydl.urlopen(urllib_req).request
assert req.url == urllib_req.get_full_url()
assert req.data == urllib_req.data
assert req.method == urllib_req.get_method()
assert 'X-Test' in req.headers
assert 'Cookie' in req.headers
assert req.extensions.get('timeout') == 2

with pytest.raises(AssertionError):
ydl.urlopen(None)
Expand Down Expand Up @@ -1362,7 +1363,9 @@ def test_get_header(self):

def test_compat(self):
res = Response(io.BytesIO(b''), url='test://', status=404, headers={'test': 'test'})
assert res.code == res.getcode() == res.status
assert res.geturl() == res.url
assert res.info() is res.headers
assert res.getheader('test') == res.get_header('test')
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=DeprecationWarning)
assert res.code == res.getcode() == res.status
assert res.geturl() == res.url
assert res.info() is res.headers
assert res.getheader('test') == res.get_header('test')
64 changes: 52 additions & 12 deletions test/test_networking_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@

sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

import contextlib
import io
import platform
import random
import ssl
import urllib.error
import warnings

from yt_dlp.cookies import YoutubeDLCookieJar
from yt_dlp.dependencies import certifi
Expand Down Expand Up @@ -202,20 +204,58 @@ def test_compat_http_error(self):
assert isinstance(error, HTTPError)
assert isinstance(error, urllib.error.HTTPError)

assert error.code == 403
assert error.getcode() == 403
assert error.hdrs is error.response.headers
assert error.info() is error.response.headers
assert error.headers is error.response.headers
assert error.filename == error.response.url
assert error.url == error.response.url
assert error.geturl() == error.response.url
@contextlib.contextmanager
def raises_deprecation_warning():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
yield

if len(w) == 0:
pytest.fail('Did not raise DeprecationWarning')
if len(w) > 1:
pytest.fail(f'Raised multiple warnings: {w}')

if not issubclass(w[-1].category, DeprecationWarning):
pytest.fail(f'Expected DeprecationWarning, got {w[-1].category}')
w.clear()

with raises_deprecation_warning():
assert error.code == 403

with raises_deprecation_warning():
assert error.getcode() == 403

with raises_deprecation_warning():
assert error.hdrs is error.response.headers

with raises_deprecation_warning():
assert error.info() is error.response.headers

with raises_deprecation_warning():
assert error.headers is error.response.headers

with raises_deprecation_warning():
assert error.filename == error.response.url

with raises_deprecation_warning():
assert error.url == error.response.url

with raises_deprecation_warning():
assert error.geturl() == error.response.url

# Passthrough file operations
assert error.read() == b'test'
assert not error.closed
# Technically Response operations are also passed through, which should not be used.
assert error.get_header('test') == 'test'
with raises_deprecation_warning():
assert error.read() == b'test'

with raises_deprecation_warning():
assert not error.closed

with raises_deprecation_warning():
# Technically Response operations are also passed through, which should not be used.
assert error.get_header('test') == 'test'

# Should not raise a warning
error.close()

@pytest.mark.skipif(
platform.python_implementation() == 'PyPy', reason='garbage collector works differently in pypy')
Expand Down
12 changes: 7 additions & 5 deletions yt_dlp/YoutubeDL.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,15 @@
from .extractor.common import UnsupportedURLIE
from .extractor.openload import PhantomJSwrapper
from .minicurses import format_text
from .networking import Request, RequestDirector
from .networking import HEADRequest, Request, RequestDirector
from .networking.common import _REQUEST_HANDLERS
from .networking.exceptions import (
HTTPError,
NoSupportingHandlers,
RequestError,
SSLError,
_CompatHTTPError,
network_exceptions,
)
from .plugins import directories as plugin_directories
from .postprocessor import _PLUGIN_CLASSES as plugin_pps
Expand Down Expand Up @@ -80,7 +81,6 @@
ExtractorError,
FormatSorter,
GeoRestrictedError,
HEADRequest,
ISO3166Utils,
LazyList,
MaxDownloadsReached,
Expand Down Expand Up @@ -122,7 +122,6 @@
locked_file,
make_archive_id,
make_dir,
network_exceptions,
number_of_digits,
orderedSet,
orderedSet_from_options,
Expand All @@ -135,7 +134,6 @@
sanitize_filename,
sanitize_path,
sanitize_url,
std_headers,
str_or_none,
strftime_or_none,
subtitles_filename,
Expand All @@ -158,6 +156,7 @@
HTTPHeaderDict,
clean_headers,
clean_proxies,
std_headers,
)
from .version import CHANNEL, RELEASE_GIT_HEAD, VARIANT, __version__

Expand Down Expand Up @@ -4019,6 +4018,9 @@ def urlopen(self, req):
if isinstance(req, str):
req = Request(req)
elif isinstance(req, urllib.request.Request):
self.deprecation_warning(
'Passing a urllib.request.Request object to YoutubeDL.urlopen() is deprecated. '
'Use yt_dlp.networking.common.Request instead.')
req = urllib_req_to_req(req)
assert isinstance(req, Request)

Expand Down Expand Up @@ -4242,7 +4244,7 @@ def _write_thumbnails(self, label, info_dict, filename, thumb_filename_base=None
ret.append((thumb_filename, thumb_filename_final))
t['filepath'] = thumb_filename
except network_exceptions as err:
if isinstance(err, urllib.error.HTTPError) and err.code == 404:
if isinstance(err, HTTPError) and err.status == 404:
self.to_screen(f'[info] {thumb_display_id.title()} does not exist')
else:
self.report_warning(f'Unable to download {thumb_display_id}: {err}')
Expand Down
2 changes: 1 addition & 1 deletion yt_dlp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,11 @@
read_stdin,
render_table,
setproctitle,
std_headers,
traverse_obj,
variadic,
write_string,
)
from .utils.networking import std_headers
from .YoutubeDL import YoutubeDL

_IN_CLI = False
Expand Down
1 change: 0 additions & 1 deletion yt_dlp/compat/_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

compat_b64decode = base64.b64decode

compat_HTTPError = urllib.error.HTTPError
compat_urlparse = urllib.parse
compat_parse_qs = urllib.parse.parse_qs
compat_urllib_parse_unquote = urllib.parse.unquote
Expand Down
1 change: 1 addition & 0 deletions yt_dlp/compat/_legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def compat_setenv(key, value, env=os.environ):
compat_HTMLParser = compat_html_parser_HTMLParser = html.parser.HTMLParser
compat_http_client = http.client
compat_http_server = http.server
compat_HTTPError = urllib.error.HTTPError
compat_input = input
compat_integer_types = (int, )
compat_itertools_count = itertools.count
Expand Down
7 changes: 3 additions & 4 deletions yt_dlp/downloader/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from .fragment import FragmentFD
from ..compat import functools
from ..networking import Request
from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor
from ..utils import (
Popen,
Expand All @@ -25,7 +26,6 @@
encodeFilename,
find_available_port,
remove_end,
sanitized_Request,
traverse_obj,
)

Expand Down Expand Up @@ -357,13 +357,12 @@ def aria2c_rpc(self, rpc_port, rpc_secret, method, params=()):
'method': method,
'params': [f'token:{rpc_secret}', *params],
}).encode('utf-8')
request = sanitized_Request(
request = Request(
f'http://localhost:{rpc_port}/jsonrpc',
data=d, headers={
'Content-Type': 'application/json',
'Content-Length': f'{len(d)}',
'Ytdl-request-proxy': '__noproxy__',
})
}, proxies={'all': None})
with self.ydl.urlopen(request) as r:
resp = json.load(r)
assert resp.get('id') == sanitycheck, 'Something went wrong with RPC server'
Expand Down
8 changes: 4 additions & 4 deletions yt_dlp/downloader/f4m.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import itertools
import struct
import time
import urllib.error
import urllib.parse

from .fragment import FragmentFD
from ..compat import compat_etree_fromstring
from ..networking.exceptions import HTTPError
from ..utils import fix_xml_ampersands, xpath_text


Expand Down Expand Up @@ -312,7 +312,7 @@ def real_download(self, filename, info_dict):
self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME)

urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl()
man_url = urlh.url
# Some manifests may be malformed, e.g. prosiebensat1 generated manifests
# (see https://github.com/ytdl-org/youtube-dl/issues/6215#issuecomment-121704244
# and https://github.com/ytdl-org/youtube-dl/issues/7823)
Expand Down Expand Up @@ -407,8 +407,8 @@ def real_download(self, filename, info_dict):
if box_type == b'mdat':
self._append_fragment(ctx, box_data)
break
except urllib.error.HTTPError as err:
if live and (err.code == 404 or err.code == 410):
except HTTPError as err:
if live and (err.status == 404 or err.status == 410):
# We didn't keep up with the live window. Continue
# with the next available fragment.
msg = 'Fragment %d unavailable' % frag_i
Expand Down
19 changes: 7 additions & 12 deletions yt_dlp/downloader/fragment.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
import concurrent.futures
import contextlib
import http.client
import json
import math
import os
import struct
import time
import urllib.error

from .common import FileDownloader
from .http import HttpFD
from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
from ..compat import compat_os_name
from ..utils import (
DownloadError,
RetryManager,
encodeFilename,
sanitized_Request,
traverse_obj,
)
from ..networking import Request
from ..networking.exceptions import HTTPError, IncompleteRead
from ..utils import DownloadError, RetryManager, encodeFilename, traverse_obj
from ..utils.networking import HTTPHeaderDict


class HttpQuietDownloader(HttpFD):
Expand Down Expand Up @@ -75,7 +70,7 @@ def report_skip_fragment(self, frag_index, err=None):

def _prepare_url(self, info_dict, url):
headers = info_dict.get('http_headers')
return sanitized_Request(url, None, headers) if headers else url
return Request(url, None, headers) if headers else url

def _prepare_and_start_frag_download(self, ctx, info_dict):
self._prepare_frag_download(ctx)
Expand Down Expand Up @@ -457,7 +452,7 @@ def download_fragment(fragment, ctx):

frag_index = ctx['fragment_index'] = fragment['frag_index']
ctx['last_error'] = None
headers = info_dict.get('http_headers', {}).copy()
headers = HTTPHeaderDict(info_dict.get('http_headers'))
byte_range = fragment.get('byte_range')
if byte_range:
headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
Expand All @@ -477,7 +472,7 @@ def error_callback(err, count, retries):
if not self._download_fragment(
ctx, fragment['url'], info_dict, headers, info_dict.get('request_data')):
return
except (urllib.error.HTTPError, http.client.IncompleteRead) as err:
except (HTTPError, IncompleteRead) as err:
retry.error = err
continue
except DownloadError: # has own retry settings
Expand Down
2 changes: 1 addition & 1 deletion yt_dlp/downloader/hls.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def real_download(self, filename, info_dict):
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)

urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl()
man_url = urlh.url
s = urlh.read().decode('utf-8', 'ignore')

can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None
Expand Down
Loading

0 comments on commit 02fc0dc

Please sign in to comment.