Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Replace lambdas with function calls #7441

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .sonar/analyse.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# These env varialble should be set by Jenkins.
SERVER_URL = os.environ.get('SONAR_SERVER_URL', "https://sonarcloud.io")
PROJECT_KEY = os.environ.get('PROJECT_KEY', "org.sonarqube:tribler")
PR_COMMIT = os.environ.get('ghprbActualCommit', u'')
PR_COMMIT = os.environ.get('ghprbActualCommit', '')
TASK_PATH = os.path.join(os.environ.get('WORKSPACE', os.getcwd()), '.scannerwork', 'report-task.txt')

task_status_url = None
Expand Down Expand Up @@ -56,12 +56,12 @@
json_response = requests.get(pr_analysis_url)
data = json.loads(json_response.text)

for pull_request in data[u'pullRequests']:
print("Matching analysis:", pull_request[u'key'], PR_COMMIT, pull_request[u'key'] == PR_COMMIT)
for pull_request in data['pullRequests']:
print("Matching analysis:", pull_request['key'], PR_COMMIT, pull_request['key'] == PR_COMMIT)
# If there is analysis result for the PR commit with status OK, then exit with success status (0)
if pull_request[u'key'] == PR_COMMIT:
print("Quality Gate:", pull_request[u'status'])
if pull_request[u'status'][u'qualityGateStatus'] == u'OK':
if pull_request['key'] == PR_COMMIT:
print("Quality Gate:", pull_request['status'])
if pull_request['status']['qualityGateStatus'] == 'OK':
print("Status: OK")
break
else:
Expand Down
31 changes: 19 additions & 12 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,14 @@

# root_dir = Path(__file__).parent.parent

def _add_root_endpoint(add_endpoint):
def _add_endpoint(self, path, ep):
if path in ['/ipv8', '/market', '/wallets']:
return None
return add_endpoint(self, path, ep)

return _add_endpoint

root_dir = os.path.abspath(os.path.join(os.path.dirname(__name__), '..'))
tribler_source_dirs = [
os.path.join(root_dir, "src"),
Expand All @@ -49,8 +57,7 @@
from tribler.core.components.restapi.rest.root_endpoint import RootEndpoint

add_endpoint = RootEndpoint.add_endpoint
RootEndpoint.add_endpoint = lambda self, path, ep: add_endpoint(self, path, ep) \
if path not in ['/ipv8', '/market', '/wallets'] else None
RootEndpoint.add_endpoint = _add_root_endpoint(add_endpoint=add_endpoint)

# Extract Swagger docs
from extract_swagger import extract_swagger
Expand Down Expand Up @@ -93,18 +100,18 @@
master_doc = 'index'

# General information about the project.
project = u'Tribler'
copyright = u'2020, Tribler devs'
author = u'Tribler devs'
project = 'Tribler'
copyright = '2020, Tribler devs'
author = 'Tribler devs'

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'7.5'
version = '7.5'
# The full version, including alpha/beta/rc tags.
release = u'7.5.0'
release = '7.5.0'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down Expand Up @@ -177,7 +184,7 @@
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'Tribler v6.6'
# html_title = 'Tribler v6.6'

# A shorter title for the navigation bar. Default is the same as html_title.
#
Expand Down Expand Up @@ -303,8 +310,8 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Tribler.tex', u'Tribler Documentation',
u'Tribler devs', 'manual'),
(master_doc, 'Tribler.tex', 'Tribler Documentation',
'Tribler devs', 'manual'),
]

# The name of an image file (relative to this directory) to place at the top of
Expand Down Expand Up @@ -339,7 +346,7 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tribler', u'Tribler Documentation',
(master_doc, 'tribler', 'Tribler Documentation',
[author], 1)
]

Expand All @@ -354,7 +361,7 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Tribler', u'Tribler Documentation',
(master_doc, 'Tribler', 'Tribler Documentation',
author, 'Tribler', 'One line description of project.',
'Miscellaneous'),
]
Expand Down
2 changes: 1 addition & 1 deletion requirements-build.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ text-unidecode==1.3; sys_platform == 'darwin'
defusedxml==0.7.1; sys_platform == 'linux2' or sys_platform == 'linux'
markupsafe==2.0.1; sys_platform == 'linux2' or sys_platform == 'linux'

requests==2.25.1
requests==2.31.0
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import operator
import uuid
from binascii import unhexlify
from collections import defaultdict
Expand Down Expand Up @@ -47,7 +48,7 @@ def add(self, peer: Peer, channel_pk: bytes, channel_id: int):
self._peers_channels[peer].add(id_tuple)

if len(channel_peers) > self.max_peers_per_channel:
removed_peer = min(channel_peers, key=lambda x: x.last_response)
removed_peer = min(channel_peers, key=operator.attrgetter("last_response"))
channel_peers.remove(removed_peer)
# Maintain the reverse mapping
self._peers_channels[removed_peer].remove(id_tuple)
Expand All @@ -64,7 +65,10 @@ def remove_peer(self, peer):
def get_last_seen_peers_for_channel(self, channel_pk: bytes, channel_id: int, limit=None):
id_tuple = (channel_pk, channel_id)
channel_peers = self._channels_dict.get(id_tuple, [])
return sorted(channel_peers, key=lambda x: x.last_response, reverse=True)[0:limit]
last_seen_peers = (
sorted(channel_peers, key=operator.attrgetter("last_response"), reverse=True)
)
return last_seen_peers[0:limit]


class GigaChannelCommunity(RemoteQueryCommunity):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,11 +248,11 @@ def create_session(self, hops=0, store_listen_port=True):
self._logger.info(f'Dummy mode: {self.dummy_mode}. Hops: {hops}.')

# Elric: Strip out the -rcX, -beta, -whatever tail on the version string.
fingerprint = ['TL'] + [int(x) for x in version_id.split('-')[0].split('.')] + [0]
fingerprint = ['TL', *map(int, version_id.split('-')[0].split('.')), 0]
if self.dummy_mode:
from unittest.mock import Mock
ltsession = Mock()
ltsession.pop_alerts = lambda: {}
ltsession.pop_alerts = dict
ltsession.listen_port = lambda: 123
ltsession.get_settings = lambda: {"peer_fingerprint": "000"}
else:
Expand All @@ -268,10 +268,10 @@ def create_session(self, hops=0, store_listen_port=True):
settings['enable_incoming_utp'] = enable_utp

settings['prefer_rc4'] = True
settings["listen_interfaces"] = "0.0.0.0:%d" % libtorrent_port
settings["listen_interfaces"] = f"0.0.0.0:{libtorrent_port}"

settings['peer_fingerprint'] = self.peer_mid
settings['handshake_client_version'] = 'Tribler/' + version_id + '/' + hexlify(self.peer_mid)
settings['handshake_client_version'] = f"Tribler/{version_id}/{hexlify(self.peer_mid)}"
else:
settings['enable_outgoing_utp'] = True
settings['enable_incoming_utp'] = True
Expand All @@ -281,7 +281,7 @@ def create_session(self, hops=0, store_listen_port=True):
settings['force_proxy'] = True

# Anon listen port is never used anywhere, so we let Libtorrent set it
# settings["listen_interfaces"] = "0.0.0.0:%d" % anon_port
# settings["listen_interfaces"] = f"0.0.0.0:{anon_port}"

# By default block all IPs except 1.1.1.1 (which is used to ensure libtorrent makes a connection to us)
self.update_ip_filter(ltsession, ['1.1.1.1'])
Expand Down Expand Up @@ -363,7 +363,8 @@ def set_max_connections(self, conns, hops=None):
def set_upload_rate_limit(self, rate, hops=None):
# Rate conversion due to the fact that we had a different system with Swift
# and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes
libtorrent_rate = int(-1 if rate == 0 else (1 if rate == -1 else rate * 1024))
torrent_rate = {0: -1, -1: 1}
libtorrent_rate = torrent_rate.get(rate, rate * 1024)

# Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18
settings_dict = {'upload_rate_limit': libtorrent_rate, 'outgoing_port': 0, 'num_outgoing_ports': 1}
Expand All @@ -374,10 +375,12 @@ def get_upload_rate_limit(self, hops=None):
# Rate conversion due to the fact that we had a different system with Swift
# and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes
libtorrent_rate = self.get_session(hops).upload_rate_limit()
return 0 if libtorrent_rate == -1 else (-1 if libtorrent_rate == 1 else libtorrent_rate / 1024)
torrent_rate = {-1: 0, 1: -1}
return torrent_rate.get(libtorrent_rate, libtorrent_rate // 1024)

def set_download_rate_limit(self, rate, hops=None):
libtorrent_rate = int(-1 if rate == 0 else (1 if rate == -1 else rate * 1024))
torrent_rate = {0: -1, -1: 1}
libtorrent_rate = torrent_rate.get(rate, rate * 1024)

# Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18
settings_dict = {'download_rate_limit': libtorrent_rate}
Expand All @@ -386,7 +389,8 @@ def set_download_rate_limit(self, rate, hops=None):

def get_download_rate_limit(self, hops=0):
libtorrent_rate = self.get_session(hops).download_rate_limit()
return 0 if libtorrent_rate == -1 else (-1 if libtorrent_rate == 1 else libtorrent_rate / 1024)
torrent_rate = {-1: 0, 1: -1}
return torrent_rate.get(libtorrent_rate, libtorrent_rate // 1024)

def process_alert(self, alert, hops=0):
alert_type = alert.__class__.__name__
Expand Down Expand Up @@ -780,7 +784,7 @@ def update_trackers(self, infohash, trackers):
old_def = download.get_def()
old_trackers = old_def.get_trackers()
new_trackers = list(set(trackers) - old_trackers)
all_trackers = list(old_trackers) + new_trackers
all_trackers = [*old_trackers, *new_trackers]

if new_trackers:
# Add new trackers to the download
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import functools
from asyncio import Future, gather, get_event_loop, sleep
from unittest.mock import MagicMock

Expand Down Expand Up @@ -27,9 +28,10 @@ def create_fake_download_and_state():
fake_download.get_def().get_name_as_unicode = lambda: "test.iso"
fake_download.get_peerlist = lambda: [fake_peer]
fake_download.hidden = False
fake_download.checkpoint = lambda: succeed(None)
fake_download.stop = lambda: succeed(None)
fake_download.shutdown = lambda: succeed(None)
succeed_none = functools.partial(succeed, None)
fake_download.checkpoint = succeed_none
fake_download.stop = succeed_none
fake_download.shutdown = succeed_none
dl_state = MagicMock()
dl_state.get_infohash = lambda: b'aaaa'
dl_state.get_status = lambda: DownloadStatus.SEEDING
Expand Down Expand Up @@ -138,9 +140,10 @@ async def test_get_metainfo_with_already_added_torrent(fake_dlmgr):

download_impl = MagicMock()
download_impl.future_metainfo = succeed(bencode(torrent_def.get_metainfo()))
download_impl.checkpoint = lambda: succeed(None)
download_impl.stop = lambda: succeed(None)
download_impl.shutdown = lambda: succeed(None)
succeed_none = functools.partial(succeed, None)
download_impl.checkpoint = succeed_none
download_impl.stop = succeed_none
download_impl.shutdown = succeed_none

fake_dlmgr.initialize()
fake_dlmgr.downloads[torrent_def.infohash] = download_impl
Expand All @@ -155,10 +158,10 @@ async def test_start_download_while_getting_metainfo(fake_dlmgr):
infohash = b"a" * 20

metainfo_session = MagicMock()
metainfo_session.get_torrents = lambda: []
metainfo_session.get_torrents = list

metainfo_dl = MagicMock()
metainfo_dl.get_def = lambda: MagicMock(get_infohash=lambda: infohash)
metainfo_dl.get_def = functools.partial(MagicMock, get_infohash=lambda: infohash)

fake_dlmgr.initialize()
fake_dlmgr.get_session = lambda *_: metainfo_session
Expand All @@ -181,7 +184,7 @@ async def test_start_download(fake_dlmgr):
infohash = b'a' * 20

mock_handle = MagicMock()
mock_handle.info_hash = lambda: hexlify(infohash)
mock_handle.info_hash = functools.partial(hexlify, infohash)
mock_handle.is_valid = lambda: True

mock_error = MagicMock()
Expand All @@ -192,7 +195,7 @@ async def test_start_download(fake_dlmgr):
category=lambda _: None))()

mock_ltsession = MagicMock()
mock_ltsession.get_torrents = lambda: []
mock_ltsession.get_torrents = list
mock_ltsession.async_add_torrent = lambda _: fake_dlmgr.register_task('post_alert',
fake_dlmgr.process_alert,
mock_alert, delay=0.1)
Expand Down Expand Up @@ -241,7 +244,7 @@ async def test_start_download_existing_handle(fake_dlmgr):
infohash = b'a' * 20

mock_handle = MagicMock()
mock_handle.info_hash = lambda: hexlify(infohash)
mock_handle.info_hash = functools.partial(hexlify, infohash)
mock_handle.is_valid = lambda: True

mock_ltsession = MagicMock()
Expand Down Expand Up @@ -319,7 +322,7 @@ def on_set_settings(settings):
assert settings['proxy_hostnames']

mock_lt_session = MagicMock()
mock_lt_session.get_settings = lambda: {}
mock_lt_session.get_settings = dict
mock_lt_session.set_settings = on_set_settings
mock_lt_session.set_proxy = on_proxy_set # Libtorrent < 1.1.0 uses set_proxy to set proxy settings
fake_dlmgr.set_proxy_settings(mock_lt_session, 0, ('a', "1234"), ('abc', 'def'))
Expand Down Expand Up @@ -406,7 +409,7 @@ def mocked_load_checkpoint(filename):
mocked_load_checkpoint.called = True

mocked_load_checkpoint.called = False
fake_dlmgr.get_checkpoint_dir = lambda: Path(tmpdir)
fake_dlmgr.get_checkpoint_dir = functools.partial(Path, tmpdir)

with open(fake_dlmgr.get_checkpoint_dir() / 'abcd.conf', 'wb') as state_file:
state_file.write(b"hi")
Expand Down
2 changes: 1 addition & 1 deletion src/tribler/core/components/libtorrent/torrentdef.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def escape_as_utf8(string, encoding='utf8'):
# latin1 and hope for the best (minor corruption).
return string.decode('latin1').encode('utf8', 'ignore').decode('utf8')
except (TypeError, ValueError):
# This is a very nasty string (e.g. u'\u266b'), remove the illegal entries.
# This is a very nasty string (e.g. '\u266b'), remove the illegal entries.
return string.encode('utf8', 'ignore').decode('utf8')


Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import operator
import time
from collections import defaultdict
from typing import Dict, List
Expand Down Expand Up @@ -57,7 +58,7 @@ def build_snippets(self, search_results: List[Dict]) -> List[Dict]:

# Sort the search results within each snippet by the number of seeders
for torrents_list in content_to_torrents.values():
torrents_list.sort(key=lambda x: x["num_seeders"], reverse=True)
torrents_list.sort(key=operator.itemgetter("num_seeders"), reverse=True)

# Determine the most popular content items - this is the one we show
sorted_content_info = list(content_to_torrents.items())
Expand Down
2 changes: 1 addition & 1 deletion src/tribler/core/components/reporter/reported_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class ReportedError:
type: str
text: str
event: dict = field(repr=False)
additional_information: dict = field(default_factory=lambda: {}, repr=False)
additional_information: dict = field(default_factory=dict, repr=False)

long_text: str = field(default='', repr=False)
context: str = field(default='', repr=False)
Expand Down
15 changes: 13 additions & 2 deletions src/tribler/core/components/tunnel/community/dispatcher.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import functools
import random
from collections import defaultdict

Expand Down Expand Up @@ -97,6 +98,11 @@ async def on_socks5_tcp_data(self, tcp_connection, destination, request):
tcp_connection.transport.write(response)
tcp_connection.transport.close()

def _add_data_if_result(self, result_func, connection, request):
if not result_func.result():
return None
return self.on_socks5_udp_data(connection, request)

def select_circuit(self, connection, request):
if request.destination[1] == CIRCUIT_ID_PORT:
circuit = self.tunnels.circuits.get(self.tunnels.ip_to_circuit_id(request.destination[0]))
Expand All @@ -120,8 +126,13 @@ def select_circuit(self, connection, request):
return None
self._logger.debug("Creating circuit for data to %s. Retrying later..", request.destination)
self.cid_to_con[circuit.circuit_id] = connection
circuit.ready.add_done_callback(lambda f, c=connection.udp_connection, r=request:
self.on_socks5_udp_data(c, r) if f.result() else None)
circuit.ready.add_done_callback(
functools.partial(
self._add_data_if_result,
connection=connection.udp_connection,
request=request
)
)
return None

circuit = random.choice(options)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import functools
import hashlib
import math
import sys
Expand Down Expand Up @@ -503,7 +504,7 @@ def monitor_downloads(self, dslist):
if state_changed and new_state in active:
if old_state != DownloadStatus.METADATA or new_state != DownloadStatus.DOWNLOADING:
self.join_swarm(info_hash, hops[info_hash], seeding=new_state == DownloadStatus.SEEDING,
callback=lambda addr, ih=info_hash: self.on_e2e_finished(addr, ih))
callback=functools.partial(self.on_e2e_finished, info_hash=info_hash))
elif state_changed and new_state in [DownloadStatus.STOPPED, None]:
self.leave_swarm(info_hash)

Expand Down
Loading