diff --git a/.github/workflows/scripttest.yml b/.github/workflows/scripttest.yml index 667d23a7134..ff86ef79fc5 100644 --- a/.github/workflows/scripttest.yml +++ b/.github/workflows/scripttest.yml @@ -98,20 +98,3 @@ jobs: command: python ./scripts/experiments/tunnel_community/speed_test_exit.py --fragile duration: ${{inputs.duration}} - #seedbox - - - name: generate_test_data.py - run: | - python ./scripts/seedbox/generate_test_data.py --destination=./test_data --count=10 - - - name: disseminator.py - uses: ./.github/actions/timeout - with: - command: python ./scripts/seedbox/disseminator.py --source=./test_data --fragile --testnet - duration: ${{inputs.duration}} - - - name: seeder.py - uses: ./.github/actions/timeout - with: - command: python ./scripts/seedbox/seeder.py --source=./test_data --testnet - duration: ${{inputs.duration}} diff --git a/scripts/seedbox/README.md b/scripts/seedbox/README.md deleted file mode 100644 index c85ed69dbb2..00000000000 --- a/scripts/seedbox/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Seedbox - -This folder contains scripts for effortlessly setting up a seedbox. - -The seedbox consists of two parts: - -1. Torrent seeding (by using a LibTorrent protocol) -1. Channel disseminating (by using the Tribler network) - -## Prerequisites - -1. Clone the tribler repo: - ```shell - git clone https://github.com/Tribler/tribler.git - ``` -1. Install Tribler requirements: - ```bash - python3 -m pip install -r requirements.txt - ``` -1. Add Tribler `src` folder to `PYTHONPATH` (below the bash example) - ```shell - export PYTHONPATH=${PYTHONPATH}:./src - ``` - -## Torrent seeding - -To start torrents' seeding run the following script: - -```bash -python3 seeder.py -``` - -Consider the following folder structure: - -``` -source folder -├ sub_directory -| ├ file1 -| └file2 -├ sub_directory2 -| ├ file3 -| └ file4 -├ thumbnail.png -└ description.md -``` - -In this particular example, `seeder.py` will create two torrents: -`sub_directory.torrent` and `sub_directory2.torrent`. - -`seeder.py` will start to seed them through BitTorrent protocol after creating. - -## Data disseminating - -To start disseminating data through Tribler's network run the following script: - -```bash -python3 disseminator.py -``` - -This script will create a channel and will disseminate it to Tribler. - -Consider the following folder structure: - -``` -source folder -├ sub_directory.torrent -├ sub_directory2.torrent -├ thumbnail.png -└ description.md -``` - -Above you can see two "special" files: - -* thumbnail.png -* description.md - -The channel will be created with description based on these files. -As the channel name, the source folder's name will be used. - -### Error reporting - -In case you want errors to be reported, you can use [Sentry](https://develop.sentry.dev/) - -To enable error reporting, specify the following environment variable: - -```bash -export SENTRY_URL= -``` - -URL can be taken directly from a corresponding Sentry project. - -### Generate test data - -The following script generates `1GB` dataset divided into `1024` folders: - -```shell -python3 generate_test_data.py -d /tmp/test_data -``` \ No newline at end of file diff --git a/scripts/seedbox/disseminator.py b/scripts/seedbox/disseminator.py deleted file mode 100644 index 05a58031d13..00000000000 --- a/scripts/seedbox/disseminator.py +++ /dev/null @@ -1,223 +0,0 @@ -""" -This script scans the input directory and create a Tribler channel based on -torrents found. - -For available parameters see "parse_args" function below. - -Folder structure: - -my channel -├ sub_directory -| ├ file1.torrent -| └ file2.torrent -├ file3.torrent -├ thumbnail.png -└ description.md -""" - -import argparse -import logging -import os -from json import dumps -from pathlib import Path -from types import SimpleNamespace - -import libtorrent -import sentry_sdk -from pony.orm import db_session - -from tribler.core.components import libtorrent -from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent -from tribler.core.components.gigachannel_manager.gigachannel_manager_component import GigachannelManagerComponent -from tribler.core.components.ipv8.ipv8_component import Ipv8Component -from tribler.core.components.key.key_component import KeyComponent -from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent -from tribler.core.components.libtorrent.libtorrent_component import LibtorrentComponent -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW -from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent -from tribler.core.components.socks_servers.socks_servers_component import SocksServersComponent -from tribler.core.utilities.tiny_tribler_service import TinyTriblerService - -_description_file_name = 'description.md' -_thumbnail_file_name = 'thumbnail.png' - -_logger = logging.getLogger('Disseminator') - -sentry_sdk.init( - os.environ.get('SENTRY_URL'), - traces_sample_rate=1.0 -) - - -def parse_args(): - parser = argparse.ArgumentParser(description='Disseminate data by using the Tribler network') - - parser.add_argument('-s', '--source', type=str, help='path to data folder', default='.') - parser.add_argument('-d', '--tribler_dir', type=str, help='path to data folder', default='./.Tribler') - parser.add_argument('-v', '--verbosity', help='increase output verbosity', action='store_true') - parser.add_argument('-f', '--fragile', help='Fail at the first error', action='store_true') - parser.add_argument('-t', '--testnet', help='Testnet run', action='store_true') - - return parser.parse_args() - - -def setup_logger(verbosity): - logging_level = logging.DEBUG if verbosity else logging.INFO - logging.basicConfig(level=logging_level) - - -class ChannelHelper: - def __init__(self, community, manager): - self.community = community - self.manager = manager - self.directories = SimpleNamespace(tree={}, directory=None) - - @db_session - def create_root_channel(self, name, description=''): - _logger.info(f'Creating channel: {name}') - channels = self.community.mds.ChannelMetadata - - if len(channels.get_channels_by_title(name)) >= 1: - _logger.warning(f'Channel with name {name} already exists') - return False - - self.directories.directory = channels.create_channel(name, description) - self.flush() - - return True - - @db_session - def add_torrent(self, file, relative_path): - _logger.info(f'Add torrent: {file}') - - directory = self.get_directory(relative_path) - decoded_torrent = libtorrent.bdecode(file.read_bytes()) - directory.add_torrent_to_channel(TorrentDef(metainfo=decoded_torrent), None) - - @db_session - def add_thumbnail(self, thumbnail): - if not thumbnail: - return - - _logger.info(f'Add thumbnail: {thumbnail}') - - root_channel = self.directories.directory - self.community.mds.ChannelThumbnail(public_key=root_channel.public_key, - origin_id=root_channel.id_, - status=NEW, - binary_data=thumbnail, - data_type='image/png') - - @db_session - def add_description(self, description): - if not description: - return - - _logger.info(f'Add description: {description}') - - root_channel = self.directories.directory - self.community.mds.ChannelDescription(public_key=root_channel.public_key, - origin_id=root_channel.id_, - json_text=dumps({"description_text": description}), - status=NEW) - - @db_session - def get_directory(self, path): - current = self.directories - - for part in path.parts[:-1]: - next_directory = current.tree.get(part, None) - if next_directory is not None: - current = next_directory - continue - - next_directory = SimpleNamespace( - tree={}, - directory=self.community.mds.CollectionNode(title=part, origin_id=current.directory.id_, status=NEW) - ) - - current.tree[part] = next_directory - current = next_directory - self.flush() - - _logger.info(f'Directory created: {part}') - - return current.directory - - @db_session - def commit(self): - _logger.info('Commit changes') - - for t in self.community.mds.CollectionNode.commit_all_channels(): - self.manager.updated_my_channel(TorrentDef.load_from_dict(t)) - - @db_session - def flush(self): - _logger.debug('Flush') - - self.community.mds.db.flush() - - -class Service(TinyTriblerService): - def __init__(self, source_dir, testnet: bool, *args, **kwargs): - super().__init__(*args, **kwargs, - components=[ - KnowledgeComponent(), MetadataStoreComponent(), KeyComponent(), Ipv8Component(), - SocksServersComponent(), LibtorrentComponent(), GigachannelManagerComponent(), - GigaChannelComponent() - ]) - self.config.general.testnet = testnet - self.source_dir = Path(source_dir) - - def get_torrents_from_source(self): - return [(file, file.relative_to(self.source_dir)) for file in self.source_dir.rglob('*.torrent')] - - def get_thumbnail(self): - f = self.source_dir / _thumbnail_file_name - return f.read_bytes() if f.exists() else None - - def get_description(self): - f = self.source_dir / _description_file_name - return f.read_text() if f.exists() else None - - async def create_channel(self, community, manager): - channel_helper = ChannelHelper(community, manager) - channel_name = self.source_dir.name - - if not channel_helper.create_root_channel(channel_name): - return - - torrents = self.get_torrents_from_source() - - for file, relative_path in torrents: - channel_helper.add_torrent(file, relative_path) - - channel_helper.add_thumbnail(self.get_thumbnail()) - channel_helper.add_description(self.get_description()) - - channel_helper.commit() - - _logger.info(f'{len(torrents)} torrents where added') - - async def on_tribler_started(self): - await super().on_tribler_started() - gigachannel_component = self.session.get_instance(GigaChannelComponent) - gigachannel_manager_component = self.session.get_instance(GigachannelManagerComponent) - await self.create_channel(gigachannel_component.community, - gigachannel_manager_component.gigachannel_manager) - - -if __name__ == "__main__": - _arguments = parse_args() - print(f"Arguments: {_arguments}") - - setup_logger(_arguments.verbosity) - - service = Service( - source_dir=Path(_arguments.source), - state_dir=Path(_arguments.tribler_dir), - testnet=_arguments.testnet - ) - - service.run(fragile=_arguments.fragile) diff --git a/scripts/seedbox/generate_test_data.py b/scripts/seedbox/generate_test_data.py deleted file mode 100644 index 7ce9275ffbe..00000000000 --- a/scripts/seedbox/generate_test_data.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -This scripts generates data for testing purposes. -It generates given amount of folders which contains given amount of files. - -For available parameters see "parse_args" function below. -""" -import argparse -import os -from pathlib import Path - -_file_count_per_folder = 8 -_file_size = 128 * 1024 - - -def parse_args(): - parser = argparse.ArgumentParser(description='Generate test data') - - parser.add_argument('-d', '--destination', type=str, help='path to data folder', default='.') - parser.add_argument('-c', '--count', type=int, help='folders count', default=1024) - - return parser.parse_args() - - -def generate(arguments): - print(arguments) - destination = Path(arguments.destination) - destination.mkdir(exist_ok=True) - - for folder_index in range(arguments.count): - folder = Path(destination / f'{folder_index}') - folder.mkdir(exist_ok=True) - - for file_index in range(_file_count_per_folder): - f = Path(folder) / f'{file_index}.txt' - content = os.urandom(_file_size) - f.write_bytes(content) - - print(folder) - - -if __name__ == "__main__": - args = parse_args() - generate(args) diff --git a/scripts/seedbox/seeder.py b/scripts/seedbox/seeder.py deleted file mode 100644 index 0c1ca1a8ce8..00000000000 --- a/scripts/seedbox/seeder.py +++ /dev/null @@ -1,211 +0,0 @@ -""" -This script generates torrents in input folder and seed them. -For available parameters see "parse_args" function below. - -Folder structure: - -# my channel -# ├ sub_directory -# | ├ file1 -# | └ file2 -# ├ sub_directory2 -# | ├ file3 -# | └ file4 -# ├ file5 -# └ file6 - -The script generates torrents for each folder contains files. -There are a possibility to add ignored files (see "_ignore_glob" below). -""" -import argparse -import logging -import os -import time -from collections import defaultdict -from pathlib import Path - -import libtorrent -import sentry_sdk - -# fmt: off -# flake8: noqa - -UNLIMITED = -1 - -_creator = 'TU Delft' - -_dht_routers = [ - ('router.utorrent.com', 6881), - ("router.utorrent.com", 6881), - ("router.bittorrent.com", 6881), - ("dht.transmissionbt.com", 6881), - ("dht.aelitis.com", 6881), - ("router.bitcomet.com", 6881), -] -_port_range = (6881, 7000) -_log_statistics_interval_in_sec = 10 -_add_torrent_delay_in_sec = 1 -_ignore_glob = [ - '*DS_Store', - '*.torrent', - 'thumbnail.png', - 'description.md', -] - -_logger = logging.getLogger('Seeder') - -sentry_sdk.init( - os.environ.get('SENTRY_URL'), - traces_sample_rate=1.0 -) - - -def parse_args(): - parser = argparse.ArgumentParser(description='Seed data by using the LibTorrent protocol') - - parser.add_argument('-s', '--source', type=str, help='path to data folder', default='.') - parser.add_argument('-v', '--verbosity', help='increase output verbosity', action='store_true') - parser.add_argument('-t', '--testnet', help='Testnet run', action='store_true') - - return parser.parse_args() - - -def setup_logger(verbosity): - logging_level = logging.DEBUG if verbosity else logging.INFO - logging.basicConfig(level=logging_level) - - -def get_folders_with_files(source): - """ Return all folders that contains files - - Args: - source: a source folder - - Returns: - Dictionary where - * key: is a folder - * value: is a file list - """ - result = {} - - for file in Path(source).rglob('*'): - ignore = any(file.match(a) for a in _ignore_glob) - if file.is_file() and not ignore: - result.setdefault(file.parent, set()).add(file) - - return result - - -def create_torrents(folders, source): - _logger.info(f'Creating {len(folders)} torrent files...') - - for folder in folders: - if folder.match(source): - continue - - torrent_file = folder.parent / f'{folder.name}.torrent' - - if not torrent_file.exists(): - original, encoded = create_torrent_from_folder(folder, folders[folder]) - torrent_file.write_bytes(encoded) - _logger.info(f'Created: {torrent_file}') - - yield original, folder - else: - _logger.info(f'Skipped (file already exists): {torrent_file}') - - encoded = torrent_file.read_bytes() - decoded = libtorrent.bdecode(encoded) - - yield decoded, folder - - -def create_torrent_from_folder(folder, files): - file_storage = libtorrent.file_storage() - file_storage.set_name(folder.name) - - for file in files: - relative = file.relative_to(folder.parent) - size = file.stat().st_size - - file_storage.add_file(str(relative), size) - - flags = libtorrent.create_torrent_flags_t.optimize - torrent = libtorrent.create_torrent(file_storage, flags=flags) - - torrent.set_creator(_creator) - libtorrent.set_piece_hashes(torrent, str(folder.parent)) - - torrent_data = torrent.generate() - return torrent_data, libtorrent.bencode(torrent_data) - - -def log_all_alerts(session): - for a in session.pop_alerts(): - if a.category() & libtorrent.alert.category_t.error_notification: - _logger.error(a) - else: - _logger.info(a) - - -def log_statistics(session, handlers, interval): - while True: - time.sleep(interval) - log_all_alerts(session) - - states = defaultdict(int) - errors = defaultdict(int) - - for h in handlers: - status = h.status() - states[status.state] += 1 - if status.errc.value() != 0: - errors[status.errc.message()] += 1 - - _logger.info(f'Torrents states: {states}') - if errors: - _logger.info(f'Torrents errors: {errors}') - - -def seed(torrents): - _logger.info(f'Create torrent session in port range: {_port_range}') - session = libtorrent.session() - session.listen_on(*_port_range) - for router in _dht_routers: - session.add_dht_router(*router) - - session.start_dht() - - session.apply_settings({ - 'active_seeds': UNLIMITED, - 'active_limit': UNLIMITED - }) - - handlers = [] - for torrent, folder in torrents: - torrent_info = libtorrent.torrent_info(torrent) - params = { - 'save_path': str(folder.parent), - 'ti': torrent_info, - 'name': folder.name, - } - - _logger.info(f'Add torrent: {params}') - result = session.add_torrent(params) - handlers.append(result) - - time.sleep(_add_torrent_delay_in_sec) - log_all_alerts(session) - - log_statistics(session, handlers, _log_statistics_interval_in_sec) - - -if __name__ == "__main__": - _arguments = parse_args() - print(f"Arguments: {_arguments}") - - setup_logger(_arguments.verbosity) - _folders = get_folders_with_files(_arguments.source) - _torrents = list(create_torrents(_folders, _arguments.source)) - if not _arguments.testnet: - seed(_torrents) diff --git a/src/tribler/core/components/gigachannel/__init__.py b/src/tribler/core/components/gigachannel/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/tribler/core/components/gigachannel/community/__init__.py b/src/tribler/core/components/gigachannel/community/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/tribler/core/components/gigachannel/community/gigachannel_community.py b/src/tribler/core/components/gigachannel/community/gigachannel_community.py deleted file mode 100644 index f00d939535c..00000000000 --- a/src/tribler/core/components/gigachannel/community/gigachannel_community.py +++ /dev/null @@ -1,272 +0,0 @@ -import time -import uuid -from binascii import unhexlify -from collections import defaultdict -from dataclasses import dataclass -from random import sample - -from anyio import Event, create_task_group, move_on_after -from ipv8.types import Peer -from pony.orm import db_session - -from tribler.core import notifications -from tribler.core.components.ipv8.discovery_booster import DiscoveryBooster -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT -from tribler.core.components.metadata_store.remote_query_community.payload_checker import ObjState -from tribler.core.components.metadata_store.remote_query_community.remote_query_community import RemoteQueryCommunity -from tribler.core.components.metadata_store.utils import NoChannelSourcesException -from tribler.core.utilities.notifier import Notifier -from tribler.core.utilities.simpledefs import CHANNELS_VIEW_UUID -from tribler.core.utilities.unicode import hexlify - -minimal_blob_size = 200 -maximum_payload_size = 1024 -max_entries = maximum_payload_size // minimal_blob_size -max_search_peers = 5 - -happy_eyeballs_delay = 0.3 # Send request to another peer if the answer did not arrive in 0.3s - -max_address_cache_lifetime = 5.0 # seconds - - -@dataclass -class ChannelEntry: - timestamp: float - channel_version: int - - -class ChannelsPeersMapping: - def __init__(self, max_peers_per_channel=10): - self.max_peers_per_channel = max_peers_per_channel - self._channels_dict = defaultdict(set) - # Reverse mapping from peers to channels - self._peers_channels = defaultdict(set) - - def add(self, peer: Peer, channel_pk: bytes, channel_id: int): - id_tuple = (channel_pk, channel_id) - channel_peers = self._channels_dict[id_tuple] - - channel_peers.add(peer) - self._peers_channels[peer].add(id_tuple) - - if len(channel_peers) > self.max_peers_per_channel: - removed_peer = min(channel_peers, key=lambda x: x.last_response) - channel_peers.remove(removed_peer) - # Maintain the reverse mapping - self._peers_channels[removed_peer].remove(id_tuple) - if not self._peers_channels[removed_peer]: - self._peers_channels.pop(removed_peer) - - def remove_peer(self, peer): - for id_tuple in self._peers_channels[peer]: - self._channels_dict[id_tuple].discard(peer) - if not self._channels_dict[id_tuple]: - self._channels_dict.pop(id_tuple) - self._peers_channels.pop(peer) - - def get_last_seen_peers_for_channel(self, channel_pk: bytes, channel_id: int, limit=None): - id_tuple = (channel_pk, channel_id) - channel_peers = self._channels_dict.get(id_tuple, []) - return sorted(channel_peers, key=lambda x: x.last_response, reverse=True)[0:limit] - - -class GigaChannelCommunity(RemoteQueryCommunity): - community_id = unhexlify('d3512d0ff816d8ac672eab29a9c1a3a32e17cb13') - - def create_introduction_response( - self, - lan_socket_address, - socket_address, - identifier, - introduction=None, - extra_bytes=b'', - prefix=None, - new_style=False, - ): - # ACHTUNG! We add extra_bytes here to identify the newer, 7.6+ version RemoteQuery/GigaChannel community - # dialect, so that other 7.6+ are able to distinguish between the older and newer versions. - return super().create_introduction_response( - lan_socket_address, - socket_address, - identifier, - introduction=introduction, - prefix=prefix, - new_style=new_style, - ) - - def __init__( - self, *args, notifier: Notifier = None, **kwargs - ): # pylint: disable=unused-argument - # ACHTUNG! We create a separate instance of Network for this community because it - # walks aggressively and wants lots of peers, which can interfere with other communities - super().__init__(*args, **kwargs) - - self.notifier = notifier - - # This set contains all the peers that we queried for subscribed channels over time. - # It is emptied regularly. The purpose of this set is to work as a filter so we never query the same - # peer twice. If we do, this should happen really rarely - self.queried_peers = set() - - self.address_cache = {} - self.address_cache_created_at = time.time() - - self.discovery_booster = DiscoveryBooster() - self.discovery_booster.apply(self) - - self.channels_peers = ChannelsPeersMapping() - - def guess_address(self, interface): - # Address caching allows 100x speedup of EdgeWalk.take_step() in DiscoveryBooster, from 3.0 to 0.03 seconds. - # The overridden method can be removed after IPv8 adds internal caching of addresses. - now = time.time() - cache_lifetime = now - self.address_cache_created_at - if cache_lifetime > max_address_cache_lifetime: - self.address_cache.clear() - self.address_cache_created_at = now - - result = self.address_cache.get(interface) - if result is not None: - return result - - result = super().guess_address(interface) - self.address_cache[interface] = result - return result - - def get_random_peers(self, sample_size=None): - # Randomly sample sample_size peers from the complete list of our peers - all_peers = self.get_peers() - if sample_size is not None and sample_size < len(all_peers): - return sample(all_peers, sample_size) - return all_peers - - def introduction_response_callback(self, peer, dist, payload): - # ACHTUNG! Due to Dispersy legacy, it is possible for other peer to send us an introduction - # to ourselves (peer's public_key is not sent along with the introduction). To prevent querying - # ourselves, we add the check for blacklist_mids here, which by default contains our own peer. - if ( - peer.address in self.network.blacklist - or peer.mid in self.queried_peers - or peer.mid in self.network.blacklist_mids - ): - return - if len(self.queried_peers) >= self.settings.queried_peers_limit: - self.queried_peers.clear() - self.queried_peers.add(peer.mid) - self.send_remote_select_subscribed_channels(peer) - - def send_remote_select_subscribed_channels(self, peer): - def on_packet_callback(_, processing_results): - # We use responses for requests about subscribed channels to bump our local channels ratings - with db_session: - for c in (r.md_obj for r in processing_results if r.md_obj.metadata_type == CHANNEL_TORRENT): - self.mds.vote_bump(c.public_key, c.id_, peer.public_key.key_to_bin()[10:]) - self.channels_peers.add(peer, c.public_key, c.id_) - - # Notify GUI about the new channels - results = [ - r.md_obj.to_simple_dict() - for r in processing_results - if ( - r.obj_state == ObjState.NEW_OBJECT - and r.md_obj.metadata_type == CHANNEL_TORRENT - and r.md_obj.origin_id == 0 - ) - ] - if self.notifier and results: - self.notifier[notifications.channel_discovered]({"results": results, "uuid": str(CHANNELS_VIEW_UUID)}) - - request_dict = { - "metadata_type": [CHANNEL_TORRENT], - "subscribed": True, - "attribute_ranges": (("num_entries", 1, None),), - "complete_channel": True, - } - self.send_remote_select(peer, **request_dict, processing_callback=on_packet_callback) - - async def remote_select_channel_contents(self, **kwargs): - peers_to_query = self.get_known_subscribed_peers_for_node(kwargs["channel_pk"], kwargs["origin_id"]) - if not peers_to_query: - raise NoChannelSourcesException() - - result = [] - async with create_task_group() as tg: - got_at_least_one_response = Event() - - async def _send_remote_select(peer): - request = self.send_remote_select(peer, force_eva_response=True, **kwargs) - await request.processing_results - - # Stop execution if we already received the results from another coroutine - if result or got_at_least_one_response.is_set(): - return - - result.extend(request.processing_results.result()) - got_at_least_one_response.set() - - for peer in peers_to_query: - # Before issuing another request, check if we possibly already received a response - if got_at_least_one_response.is_set(): - break - - # Issue a request to another peer - tg.start_soon(_send_remote_select, peer) - with move_on_after(happy_eyeballs_delay): - await got_at_least_one_response.wait() - await got_at_least_one_response.wait() - - # Cancel the remaining requests so we don't have to wait for them to finish - tg.cancel_scope.cancel() - - request_results = [r.md_obj.to_simple_dict() for r in result] - return request_results - - def send_search_request(self, **kwargs): - # Send a remote query request to multiple random peers to search for some terms - request_uuid = uuid.uuid4() - - def notify_gui(request, processing_results): - results = [ - r.md_obj.to_simple_dict() - for r in processing_results - if r.obj_state in (ObjState.NEW_OBJECT, ObjState.UPDATED_LOCAL_VERSION) - ] - if self.notifier: - self.notifier[notifications.remote_query_results]( - {"results": results, "uuid": str(request_uuid), "peer": hexlify(request.peer.mid)}) - - # Try sending the request to at least some peers that we know have it - if "channel_pk" in kwargs and "origin_id" in kwargs: - peers_to_query = self.get_known_subscribed_peers_for_node( - kwargs["channel_pk"], kwargs["origin_id"], self.settings.max_mapped_query_peers - ) - else: - peers_to_query = self.get_random_peers(self.rqc_settings.max_query_peers) - - for p in peers_to_query: - self.send_remote_select(p, **kwargs, processing_callback=notify_gui) - - return request_uuid, peers_to_query - - def get_known_subscribed_peers_for_node(self, node_pk, node_id, limit=None): - # Determine the toplevel parent channel - root_id = node_id - with db_session: - node = self.mds.ChannelNode.get(public_key=node_pk, id_=node_id) - if node: - root_id = next((node.id_ for node in node.get_parent_nodes() if node.origin_id == 0), node.origin_id) - - return self.channels_peers.get_last_seen_peers_for_channel(node_pk, root_id, limit) - - def _on_query_timeout(self, request_cache): - if not request_cache.peer_responded: - self.channels_peers.remove_peer(request_cache.peer) - super()._on_query_timeout(request_cache) - - -class GigaChannelTestnetCommunity(GigaChannelCommunity): - """ - This community defines a testnet for the giga channels, used for testing purposes. - """ - - community_id = unhexlify('ad8cece0dfdb0e03344b59a4d31a38fe9812da9d') diff --git a/src/tribler/core/components/gigachannel/community/request.py b/src/tribler/core/components/gigachannel/community/request.py deleted file mode 100644 index 11bd2d4d22c..00000000000 --- a/src/tribler/core/components/gigachannel/community/request.py +++ /dev/null @@ -1,26 +0,0 @@ -from ipv8.requestcache import RandomNumberCache - - -class SearchRequestCache(RandomNumberCache): - """ - This request cache keeps track of all outstanding search requests within the GigaChannelCommunity. - """ - - def __init__(self, request_cache, uuid, peers): - super().__init__(request_cache, "remote-search-request") - self.request_cache = request_cache - self.uuid = uuid - - @property - def timeout_delay(self): - return 30.0 - - def on_timeout(self): - pass - - def remove_request(self): - if self.request_cache.has(self.prefix, self.number): - try: - self.request_cache.pop(self.prefix, self.number) - except KeyError: - pass diff --git a/src/tribler/core/components/gigachannel/community/settings.py b/src/tribler/core/components/gigachannel/community/settings.py deleted file mode 100644 index 88520fccb96..00000000000 --- a/src/tribler/core/components/gigachannel/community/settings.py +++ /dev/null @@ -1,17 +0,0 @@ -from pydantic import Field - -from tribler.core.config.tribler_config_section import TriblerConfigSection -from tribler.core.utilities.simpledefs import STATEDIR_CHANNELS_DIR - - -class ChantSettings(TriblerConfigSection): - enabled: bool = True - manager_enabled: bool = True - channel_edit: bool = False - channels_dir: str = STATEDIR_CHANNELS_DIR - testnet: bool = Field(default=False, env='CHANT_TESTNET') - - queried_peers_limit: int = 1000 - # The maximum number of peers that we got from channels to peers mapping, - # that must be queried in addition to randomly queried peers - max_mapped_query_peers = 3 diff --git a/src/tribler/core/components/gigachannel/community/tests/__init__.py b/src/tribler/core/components/gigachannel/community/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/tribler/core/components/gigachannel/community/tests/test_gigachannel_community.py b/src/tribler/core/components/gigachannel/community/tests/test_gigachannel_community.py deleted file mode 100644 index 44cf7f4a8a1..00000000000 --- a/src/tribler/core/components/gigachannel/community/tests/test_gigachannel_community.py +++ /dev/null @@ -1,391 +0,0 @@ -import time -from collections.abc import Mapping -from dataclasses import asdict, dataclass, fields -from typing import Callable -from unittest.mock import AsyncMock, Mock - -import pytest -from ipv8.keyvault.crypto import default_eccrypto -from ipv8.peer import Peer -from pony.orm import db_session - -from tribler.core.components.gigachannel.community.gigachannel_community import ( - ChannelsPeersMapping, - GigaChannelCommunity, - NoChannelSourcesException, - happy_eyeballs_delay -) -from tribler.core.components.gigachannel.community.settings import ChantSettings -from tribler.core.components.ipv8.adapters_tests import TriblerTestBase -from tribler.core.components.metadata_store.db.store import MetadataStore -from tribler.core.components.metadata_store.remote_query_community.remote_query_community import EvaSelectRequest, \ - SelectRequest, RemoteSelectPayload, RemoteSelectPayloadEva, SelectResponsePayload -from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings -from tribler.core.components.metadata_store.utils import RequestTimeoutException -from tribler.core.utilities.notifier import Notifier -from tribler.core.utilities.path_util import Path -from tribler.core.utilities.utilities import random_infohash - -# pylint:disable=protected-access - -EMPTY_BLOB = b"" -U_CHANNEL = "ubuntu channel" -U_TORRENT = "ubuntu torrent" -CHANNEL_ID = 123 -BASE_PATH = 'tribler.core.components.metadata_store.remote_query_community.remote_query_community' -ID1, ID2, ID3 = range(3) - - -@dataclass -class ChannelKey(Mapping): - channel_pk: bytes - origin_id: int - - # The following methods allow for use as a Mapping (i.e., the "**key"-syntax) - def __iter__(self): - return iter(asdict(self)) - - def __getitem__(self, item): - return getattr(self, item) - - def __len__(self): - return len(fields(self)) - - -class TestGigaChannelUnits(TriblerTestBase): - overlay: Callable[[int], GigaChannelCommunity] - - def setUp(self): - super().setUp() - self.count = 0 - self.metadata_store_set = set() - self.initialize(GigaChannelCommunity, 3) - - async def tearDown(self): - for metadata_store in self.metadata_store_set: - metadata_store.shutdown() - await super().tearDown() - - def create_node(self, *args, **kwargs): - metadata_store = MetadataStore( - Path(self.temporary_directory()) / f"{self.count}.db", - Path(self.temporary_directory()), - default_eccrypto.generate_key("curve25519"), - disable_sync=True, - ) - self.metadata_store_set.add(metadata_store) - kwargs['metadata_store'] = metadata_store - kwargs['settings'] = ChantSettings() - kwargs['rqc_settings'] = RemoteQueryCommunitySettings() - node = super().create_node(*args, **kwargs) - - node.overlay.discovery_booster.finish() - notifier = Notifier(loop=self.loop) - notifier.notify = Mock() - node.overlay.notifier = notifier - - self.count += 1 - return node - - def channel_metadata(self, i): - return self.overlay(i).mds.ChannelMetadata - - def torrent_metadata(self, i): - return self.overlay(i).mds.TorrentMetadata - - def notifier(self, i): - return self.overlay(i).notifier - - def channel_pk(self, i): - return self.key_bin(i)[10:] - - def generate_torrents(self, overlay) -> ChannelKey: - private_key = default_eccrypto.generate_key("curve25519") - channel_key = ChannelKey(private_key.pub().key_to_bin()[10:], CHANNEL_ID) - with db_session: - for m in range(0, 50): - overlay.mds.TorrentMetadata( - title=f"bla-{m}", origin_id=channel_key.origin_id, infohash=random_infohash(), sign_with=private_key - ) - return channel_key - - async def test_gigachannel_search(self): - """ - Test searching several nodes for metadata entries based on title text - """ - - # We do not want the query back mechanism and introduction callback to interfere with this test - for node in self.nodes: - node.overlay.rqc_settings.max_channel_query_back = 0 - - with db_session: - # Add test metadata to node ID1 - self.channel_metadata(ID1).create_channel(U_CHANNEL, "") - self.channel_metadata(ID1).create_channel("debian channel", "") - # Add test metadata to node ID2 - self.torrent_metadata(ID2)(title=U_TORRENT, infohash=random_infohash()) - self.torrent_metadata(ID2)(title="debian torrent", infohash=random_infohash()) - - self.overlay(ID3).send_search_request(**{"txt_filter": "ubuntu*"}) - - await self.deliver_messages() - - # Check that the notifier callback was called on both entries - titles = sorted(call.args[1]["results"][0]["name"] for call in self.notifier(ID3).notify.call_args_list) - assert titles == [U_CHANNEL, U_TORRENT] - - with db_session: - assert self.overlay(ID3).mds.ChannelNode.select().count() == 2 - assert self.overlay(ID3).mds.ChannelNode.select(lambda g: g.title in (U_CHANNEL, U_TORRENT)).count() == 2 - - async def test_query_on_introduction(self): - """ - Test querying a peer that was just introduced to us. - """ - with self.assertReceivedBy(ID1, [SelectResponsePayload], message_filter=[SelectResponsePayload]): - self.overlay(ID2).send_introduction_request(self.peer(ID1)) - await self.deliver_messages() - self.assertIn(self.mid(ID1), self.overlay(ID2).queried_peers) - - # Make sure the same peer will not be queried twice in case the walker returns to it - with self.assertReceivedBy(ID1, [], message_filter=[SelectResponsePayload]): - self.overlay(ID2).send_introduction_request(self.peer(ID1)) - await self.deliver_messages() - - # Test clearing queried peers set when it outgrows its capacity - self.overlay(ID2).settings.queried_peers_limit = 2 - with self.assertReceivedBy(ID3, [SelectResponsePayload], message_filter=[SelectResponsePayload]): - self.overlay(ID2).send_introduction_request(self.peer(ID3)) - await self.deliver_messages() - self.assertEqual(len(self.overlay(ID2).queried_peers), 2) - - self.add_node_to_experiment(self.create_node()) - with self.assertReceivedBy(3, [SelectResponsePayload], message_filter=[SelectResponsePayload]): - self.overlay(ID2).send_introduction_request(self.peer(3)) - await self.deliver_messages() - # The set has been cleared, so the number of queried peers must be dropped back to 1 - self.assertEqual(len(self.overlay(ID2).queried_peers), 1) - - # Ensure that we're not going to query ourselves - with self.assertReceivedBy(ID1, [], message_filter=[SelectResponsePayload]): - self.overlay(ID2).send_introduction_request(self.peer(ID2)) - self.assertEqual(len(self.overlay(ID2).queried_peers), 1) - - async def test_remote_select_subscribed_channels(self): - """ - Test querying remote peers for subscribed channels and updating local votes accordingly. - """ - - # We do not want the query back mechanism to interfere with this test - self.overlay(ID2).rqc_settings.max_channel_query_back = 0 - - num_channels = 5 - - with db_session: - # Create one channel with zero contents, to check that only non-empty channels are served - self.channel_metadata(ID1).create_channel("channel sub", "") - # Create one channel that has not yet been processed (with local_version 0.0) - - def test_channels_peers_mapping_drop_excess_peers(self): - """ - Test dropping old excess peers from a channel to peers mapping - """ - mapping = ChannelsPeersMapping() - key = ChannelKey(self.channel_pk(ID1), CHANNEL_ID) - - num_excess_peers = 20 - t = time.time() - 1000 - first_peer_timestamp = t - for k in range(0, mapping.max_peers_per_channel + num_excess_peers): - peer = Peer(default_eccrypto.generate_key("very-low"), ("1.2.3.4", 5)) - peer.last_response = t - t += 1.0 - mapping.add(peer, *key.values()) - if k == 0: - first_peer_timestamp = peer.last_response - - chan_peers_3 = mapping.get_last_seen_peers_for_channel(*key.values(), limit=3) - assert len(chan_peers_3) == 3 - - chan_peers = mapping.get_last_seen_peers_for_channel(*key.values()) - assert len(chan_peers) == mapping.max_peers_per_channel - - assert chan_peers_3 == chan_peers[0:3] - assert chan_peers == sorted(chan_peers, key=lambda x: x.last_response, reverse=True) - - # Make sure only the older peers are dropped as excess - for p in chan_peers: - assert p.last_response > first_peer_timestamp - - # Test removing a peer directly, e.g. as a result of a query timeout - peer = Peer(default_eccrypto.generate_key("very-low"), ("1.2.3.4", 5)) - mapping.add(peer, *key.values()) - mapping.remove_peer(peer) - for p in chan_peers: - mapping.remove_peer(p) - - assert mapping.get_last_seen_peers_for_channel(*key.values()) == [] - - # Make sure the stuff is cleaned up - assert len(mapping._peers_channels) == 0 - assert len(mapping._channels_dict) == 0 - - def test_get_known_subscribed_peers_for_node(self): - key = default_eccrypto.generate_key("curve25519") - with db_session: - channel = self.channel_metadata(ID1)(origin_id=0, infohash=random_infohash(), sign_with=key) - folder1 = self.overlay(ID1).mds.CollectionNode(origin_id=channel.id_, sign_with=key) - folder2 = self.overlay(ID1).mds.CollectionNode(origin_id=folder1.id_, sign_with=key) - - orphan = self.overlay(ID1).mds.CollectionNode(origin_id=123123, sign_with=key) - - self.overlay(ID1).channels_peers.add(self.peer(ID2), channel.public_key, channel.id_) - expected = [self.peer(ID2)] - assert expected == self.overlay(ID1).get_known_subscribed_peers_for_node(channel.public_key, channel.id_) - assert expected == self.overlay(ID1).get_known_subscribed_peers_for_node(folder1.public_key, folder1.id_) - assert expected == self.overlay(ID1).get_known_subscribed_peers_for_node(folder2.public_key, folder2.id_) - assert [] == self.overlay(ID1).get_known_subscribed_peers_for_node(orphan.public_key, orphan.id_) - - async def test_remote_search_mapped_peers(self): - """ - Test using mapped peers for channel queries. - """ - key = ChannelKey(self.channel_pk(ID1), CHANNEL_ID) - self.network(ID3).remove_peer(self.peer(ID1)) - self.network(ID3).remove_peer(self.peer(ID2)) - self.overlay(ID3).channels_peers.add(self.peer(ID2), *key.values()) - - # The only source for peers is channels peers map - # The peer must have queried its known channel peer - with self.assertReceivedBy(ID2, [RemoteSelectPayload, SelectResponsePayload]): - self.overlay(ID3).send_search_request(**key) - await self.deliver_messages() - - async def test_drop_silent_peer(self): - # We do not want the query back mechanism to interfere with this test - self.overlay(ID2).rqc_settings.max_channel_query_back = 0 - self.overlay(ID2).channels_peers.add(self.peer(ID1), self.channel_pk(ID1), CHANNEL_ID) - - seen_peers = self.overlay(ID2).channels_peers.get_last_seen_peers_for_channel(self.channel_pk(ID1), CHANNEL_ID) - assert [self.peer(ID1)] == seen_peers - - with self.overlay(ID2).request_cache.passthrough(SelectRequest): - self.overlay(ID2).send_remote_select(self.peer(ID1), txt_filter="ubuntu*") - await self.deliver_messages() - - # the `remove_peer` function must have been called because of the timeout - seen_peers = self.overlay(ID2).channels_peers.get_last_seen_peers_for_channel(self.channel_pk(ID1), CHANNEL_ID) - assert [] == seen_peers - - async def test_drop_silent_peer_empty_response_packet(self): - # We do not want the query back mechanism to interfere with this test - self.overlay(ID2).rqc_settings.max_channel_query_back = 0 - self.overlay(ID2).channels_peers.add(self.peer(ID1), self.channel_pk(ID1), CHANNEL_ID) - - seen_peers = self.overlay(ID2).channels_peers.get_last_seen_peers_for_channel(self.channel_pk(ID1), CHANNEL_ID) - assert [self.peer(ID1)] == seen_peers - - # Now test that even in the case of an empty response packet, remove_peer is not called on timeout - self.overlay(ID2).send_remote_select(self.peer(ID1), txt_filter="ubuntu*") - await self.deliver_messages() - - # the `remove_peer` function must not have been called because of the timeout - seen_peers = self.overlay(ID2).channels_peers.get_last_seen_peers_for_channel(self.channel_pk(ID1), CHANNEL_ID) - assert [self.peer(ID1)] == seen_peers - - async def test_remote_select_channel_contents(self): - """ - Test awaiting for response from remote peer - """ - key = self.generate_torrents(self.overlay(ID2)) - with db_session: - self.overlay(ID1).channels_peers.add(self.peer(ID2), *key.values()) - generated = [p.to_simple_dict() for p in self.overlay(ID2).mds.get_entries(**key)] - - results = await self.overlay(ID1).remote_select_channel_contents(**key) - - assert results == generated - assert len(results) == 50 - - async def test_remote_select_channel_contents_empty(self): - """ - Test awaiting for response from remote peer and getting empty results - """ - key = ChannelKey(self.channel_pk(ID3), CHANNEL_ID) - with db_session: - self.overlay(ID1).channels_peers.add(self.peer(ID2), *key.values()) - - results = await self.overlay(ID1).remote_select_channel_contents(**key) - - assert [] == results - - async def test_remote_select_channel_timeout(self): - key = self.generate_torrents(self.overlay(ID2)) - with db_session: - self.overlay(ID1).channels_peers.add(self.peer(ID2), *key.values()) - self.overlay(ID2).endpoint.close() - - with pytest.raises(RequestTimeoutException): - with self.overlay(ID1).request_cache.passthrough(EvaSelectRequest): # Immediately timeout the cache - await self.overlay(ID1).remote_select_channel_contents(**key) - - async def test_remote_select_channel_no_peers(self): - key = self.generate_torrents(self.overlay(ID2)) - - with pytest.raises(NoChannelSourcesException): - await self.overlay(ID1).remote_select_channel_contents(**key) - - async def test_remote_select_channel_contents_happy_eyeballs(self): - """ - Test trying to connect to the first server, then timing out and falling back to the second one - """ - key = self.generate_torrents(self.overlay(ID3)) - with db_session: - self.overlay(ID1).channels_peers.add(self.peer(ID2), *key.values()) - self.overlay(ID1).channels_peers.add(self.peer(ID3), *key.values()) - - # Neither responds early, so each should act as each others fallback. - # Neither responds at all actually, so the remote select will time out. - self.overlay(ID2)._on_remote_select_basic = AsyncMock() - self.overlay(ID3)._on_remote_select_basic = AsyncMock() - - # Check that ID2 received a call - with self.assertReceivedBy(ID2, [RemoteSelectPayloadEva]): - # Check that ID3 received a call - with self.assertReceivedBy(ID3, [RemoteSelectPayloadEva]): - # Make sure that happy_eyeballs_delay finishes. - # Both ID2 and ID3 should then have received a request (asserted by assertReceivedBy) - with self.overlay(ID1).request_cache.passthrough(timeout=happy_eyeballs_delay + 0.05): - with self.assertRaises(RequestTimeoutException): - await self.overlay(ID1).remote_select_channel_contents(**key) diff --git a/src/tribler/core/components/gigachannel/gigachannel_component.py b/src/tribler/core/components/gigachannel/gigachannel_component.py deleted file mode 100644 index 527f57a75d9..00000000000 --- a/src/tribler/core/components/gigachannel/gigachannel_component.py +++ /dev/null @@ -1,50 +0,0 @@ -from ipv8.peerdiscovery.network import Network - -from tribler.core.components.component import Component -from tribler.core.components.database.database_component import DatabaseComponent -from tribler.core.components.gigachannel.community.gigachannel_community import ( - GigaChannelCommunity, - GigaChannelTestnetCommunity, -) -from tribler.core.components.gigachannel.community.sync_strategy import RemovePeers -from tribler.core.components.ipv8.ipv8_component import INFINITE, Ipv8Component -from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent -from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent -from tribler.core.components.reporter.reporter_component import ReporterComponent - - -class GigaChannelComponent(Component): - community: GigaChannelCommunity = None - _ipv8_component: Ipv8Component = None - - async def run(self): - await super().run() - await self.get_component(ReporterComponent) - - config = self.session.config - notifier = self.session.notifier - - self._ipv8_component = await self.require_component(Ipv8Component) - metadata_store_component = await self.require_component(MetadataStoreComponent) - db_component = await self.get_component(DatabaseComponent) - - giga_channel_cls = GigaChannelTestnetCommunity if config.general.testnet else GigaChannelCommunity - community = giga_channel_cls( - self._ipv8_component.peer, - self._ipv8_component.ipv8.endpoint, - Network(), - notifier=notifier, - settings=config.chant, - rqc_settings=config.remote_query_community, - metadata_store=metadata_store_component.mds, - max_peers=50, - tribler_db=db_component.db if db_component else None - ) - self.community = community - self._ipv8_component.initialise_community_by_default(community, default_random_walk_max_peers=30) - self._ipv8_component.ipv8.add_strategy(community, RemovePeers(community), INFINITE) - - async def shutdown(self): - await super().shutdown() - if self._ipv8_component and self.community: - await self._ipv8_component.unload_community(self.community) diff --git a/src/tribler/core/components/gigachannel/tests/test_gigachannel_component.py b/src/tribler/core/components/gigachannel/tests/test_gigachannel_component.py deleted file mode 100644 index bd5e75b66bc..00000000000 --- a/src/tribler/core/components/gigachannel/tests/test_gigachannel_component.py +++ /dev/null @@ -1,23 +0,0 @@ -from tribler.core.components.database.database_component import DatabaseComponent -from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent -from tribler.core.components.ipv8.ipv8_component import Ipv8Component -from tribler.core.components.key.key_component import KeyComponent -from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent -from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent -from tribler.core.components.session import Session - - -# pylint: disable=protected-access - - -async def test_giga_channel_component(tribler_config): - tribler_config.ipv8.enabled = True - tribler_config.libtorrent.enabled = True - tribler_config.chant.enabled = True - components = [DatabaseComponent(), KnowledgeComponent(), MetadataStoreComponent(), KeyComponent(), Ipv8Component(), - GigaChannelComponent()] - async with Session(tribler_config, components) as session: - comp = session.get_instance(GigaChannelComponent) - assert comp.started_event.is_set() and not comp.failed - assert comp.community - assert comp._ipv8_component diff --git a/src/tribler/core/components/gigachannel_manager/__init__.py b/src/tribler/core/components/gigachannel_manager/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py b/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py deleted file mode 100644 index 523f3fbfbde..00000000000 --- a/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py +++ /dev/null @@ -1,328 +0,0 @@ -import asyncio -from asyncio import CancelledError, wait_for -from pathlib import Path - -from ipv8.taskmanager import TaskManager, task -from pony.orm import db_session - -from tribler.core import notifications -from tribler.core.components.libtorrent.download_manager.download_config import DownloadConfig -from tribler.core.components.libtorrent.download_manager.download_manager import DownloadManager -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import COMMITTED -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT -from tribler.core.components.metadata_store.db.store import MetadataStore -from tribler.core.utilities.db_corruption_handling.base import DatabaseIsCorrupted -from tribler.core.utilities.notifier import Notifier -from tribler.core.utilities.pony_utils import run_threaded -from tribler.core.utilities.simpledefs import DownloadStatus -from tribler.core.utilities.unicode import hexlify - -PROCESS_CHANNEL_DIR = 1 -REMOVE_CHANNEL_DOWNLOAD = 2 -CLEANUP_UNSUBSCRIBED_CHANNEL = 3 - - -class GigaChannelManager(TaskManager): - """ - This class represents the main manager for gigachannels. - It provides methods to manage channels, download new channels or remove existing ones. - """ - - def __init__( - self, - state_dir: Path = None, - metadata_store: MetadataStore = None, - notifier: Notifier = None, - download_manager: DownloadManager = None, - ): - super().__init__() - self.notifier = notifier - self.download_manager = download_manager - self.mds = metadata_store - self.state_dir = state_dir - - # We queue up processing of the channels because we do it in a separate thread, and we don't want - # to run more that one of these simultaneously - self.channels_processing_queue = {} - self.processing = False - - def start(self): - """ - The Metadata Store checks the database at regular intervals to see if new channels are available for preview - or subscribed channels require updating. - """ - - self.register_task("Check and regen personal channels", self.check_and_regen_personal_channels) - - channels_check_interval = 5.0 # seconds - self.register_task( - "Process channels download queue and remove cruft", self.service_channels, interval=channels_check_interval - ) - - async def check_and_regen_personal_channels(self): - # Test if our channels are there, but we don't share these because Tribler was closed unexpectedly - try: - with db_session: - for channel in self.mds.ChannelMetadata.get_my_channels().where(lambda g: g.status == COMMITTED): - channel_download = self.download_manager.get_download(bytes(channel.infohash)) - if channel_download is None: - self._logger.warning( - "Torrent for personal channel %s %i does not exist.", - hexlify(channel.public_key), - channel.id_, - ) - self.regenerate_channel_torrent(channel.public_key, channel.id_) - else: - self.register_task( - f"Check personal channel {hexlify(channel.public_key), channel.id_}", - self.check_and_regen_personal_channel_torrent, - channel.public_key, - channel.id_, - channel_download, - ) - except DatabaseIsCorrupted: - raise # re-raise this exception and terminate the Core process - except Exception: - self._logger.exception("Error when tried to resume personal channel seeding on GigaChannel Manager startup") - - @task - async def regenerate_channel_torrent(self, channel_pk, channel_id): - self._logger.info("Regenerating personal channel %s %i", hexlify(channel_pk), channel_id) - with db_session: - channel = self.mds.ChannelMetadata.get(public_key=channel_pk, id_=channel_id) - if channel is None: - self._logger.warning("Tried to regenerate non-existing channel %s %i", hexlify(channel_pk), channel_id) - return None - channel_dirname = channel.dirname - for d in self.download_manager.get_downloads_by_name(channel_dirname): - await self.download_manager.remove_download(d, remove_content=True) - with db_session: - channel = self.mds.ChannelMetadata.get_for_update(public_key=channel_pk, id_=channel_id) - regenerated = channel.consolidate_channel_torrent() - # If the user created their channel, but added no torrents to it, - # the channel torrent will not be created. - if regenerated is None: - return None - tdef = TorrentDef.load_from_dict(regenerated) - self.updated_my_channel(tdef) - return tdef - - async def check_and_regen_personal_channel_torrent(self, channel_pk, channel_id, channel_download, timeout=60): - try: - await wait_for(channel_download.wait_for_status(DownloadStatus.SEEDING), timeout=timeout) - except asyncio.TimeoutError: - self._logger.warning("Time out waiting for personal channel %s %i to seed", hexlify(channel_pk), channel_id) - await self.regenerate_channel_torrent(channel_pk, channel_id) - - async def shutdown(self): - """ - Stop the gigachannel manager. - """ - await self.shutdown_task_manager() - - def remove_cruft_channels(self): - """ - Assembles a list of obsolete channel torrents to be removed. - The list is formed from older versions of channels we are subscribed to and from channel torrents we are not - subscribed to (i.e. we recently unsubscribed from these). The unsubscribed channels are removed completely - with their contents, while in the case of older versions the files are left in place because the newer version - possibly uses them. - :return: list of tuples (download_to_remove=download, remove_files=Bool) - """ - with db_session: - # FIXME: if someone is subscribed to more than 1000 channels, they are in trouble... - channels = self.mds.get_entries(last=1000, subscribed=True, metadata_type=CHANNEL_TORRENT) - subscribed_infohashes = [bytes(c.infohash) for c in list(channels)] - dirnames = [c.dirname for c in channels] - - # TODO: add some more advanced logic for removal of older channel versions - cruft_list = [ - (d, d.get_def().get_name_utf8() not in dirnames) - for d in self.download_manager.get_channel_downloads() - if bytes(d.get_def().infohash) not in subscribed_infohashes - ] - - for d, remove_content in cruft_list: - self.channels_processing_queue[d.get_def().infohash] = (REMOVE_CHANNEL_DOWNLOAD, (d, remove_content)) - - async def service_channels(self): - if self.processing: - return - try: - self.clean_unsubscribed_channels() - except Exception: - self._logger.exception("Error when deleting unsubscribed channels") - try: - self.remove_cruft_channels() - except Exception: - self._logger.exception("Error when tried to check for cruft channels") - try: - self.check_channels_updates() - except Exception: - self._logger.exception("Error when checking for channel updates") - try: - self.process_queued_channels() - except Exception: - self._logger.exception("Error when tried to start processing queued channel torrents changes") - - @task - async def process_queued_channels(self): - self.processing = True - while self.channels_processing_queue: - infohash, (action, data) = next(iter(self.channels_processing_queue.items())) - self.channels_processing_queue.pop(infohash) - if action == PROCESS_CHANNEL_DIR: - await self.process_channel_dir_threaded(data) # data is a channel object (used read-only!) - elif action == REMOVE_CHANNEL_DOWNLOAD: - await self.remove_channel_download(data) # data is a tuple (download, remove_content bool) - elif action == CLEANUP_UNSUBSCRIBED_CHANNEL: - self.cleanup_channel(data) # data is a tuple (public_key, id_) - self.processing = False - - def check_channels_updates(self): - """ - Check whether there are channels that are updated. If so, download the new version of the channel. - """ - # FIXME: These naughty try-except-pass workarounds are necessary to keep the loop going in all circumstances - - with db_session: - channels = list(self.mds.ChannelMetadata.get_updated_channels()) - - for channel in channels: - try: - infohash = bytes(channel.infohash) - if self.download_manager.metainfo_requests.get(infohash): - continue - if not self.download_manager.download_exists(infohash): - self._logger.info( - "Downloading new channel version %s ver %i->%i", - channel.dirname, - channel.local_version, - channel.timestamp, - ) - self.download_channel(channel) - continue - - channel_download = self.download_manager.get_download(infohash) - if channel_download and channel_download.get_state().get_status() == DownloadStatus.SEEDING: - self._logger.info( - "Processing previously downloaded, but unprocessed channel torrent %s ver %i->%i", - channel.dirname, - channel.local_version, - channel.timestamp, - ) - self.channels_processing_queue[channel.infohash] = (PROCESS_CHANNEL_DIR, channel) - except Exception as e: - self._logger.exception("Error when tried to download a newer version of channel " - f"{hexlify(channel.public_key)}: {type(e).__name__}: {e}") - - async def remove_channel_download(self, to_remove): - """ - :param to_remove: a tuple (download_to_remove=download, remove_files=Bool) - """ - - # TODO: make file removal from older versions safe (i.e. check if it overlaps with newer downloads) - - d, remove_content = to_remove - try: - await self.download_manager.remove_download(d, remove_content=remove_content) - except Exception as e: - self._logger.error("Error when removing the channel download: %s", e) - - @task - async def download_channel(self, channel): - """ - Download a channel with a given infohash and title. - :param channel: The channel metadata ORM object. - """ - - metainfo = await self.download_manager.get_metainfo(bytes(channel.infohash), timeout=60, hops=0) - if metainfo is None: - # Timeout looking for the channel metainfo. Probably, there are no seeds. - # TODO: count the number of tries we had with the channel, so we can stop trying eventually - return - try: - if metainfo[b'info'][b'name'].decode('utf-8') != channel.dirname: - # Malformed channel - # TODO: stop trying to download this channel until it is updated with a new infohash - return - except (KeyError, TypeError): - return - - dcfg = DownloadConfig(state_dir=self.state_dir) - dcfg.set_dest_dir(self.mds.channels_dir) - dcfg.set_channel_download(True) - tdef = TorrentDef(metainfo=metainfo) - - download = await self.download_manager.start_download(tdef=tdef, config=dcfg, hidden=True) - try: - await download.future_finished - except CancelledError: - pass - else: - self.channels_processing_queue[channel.infohash] = (PROCESS_CHANNEL_DIR, channel) - return download - - async def process_channel_dir_threaded(self, channel): - mds: MetadataStore = self.mds - - def _process_download(): - channel_dirname = mds.get_channel_dir_path(channel) - mds.process_channel_dir(channel_dirname, channel.public_key, channel.id_, external_thread=True) - - try: - await run_threaded(mds.db, _process_download) - except Exception as e: # pylint: disable=broad-except # pragma: no cover - self._logger.error("Error when processing channel dir download: %s", e) - - with db_session: - updated_channel = self.mds.ChannelMetadata.get(public_key=channel.public_key, id_=channel.id_) - channel_dict = updated_channel.to_simple_dict() if updated_channel else None - if updated_channel: - self.notifier[notifications.channel_entity_updated](channel_dict) - - @task - async def updated_my_channel(self, tdef): - """ - Notify the core that we updated our channel. - """ - with db_session: - my_channel = self.mds.ChannelMetadata.get(infohash=tdef.get_infohash()) - if ( - my_channel - and my_channel.status == COMMITTED - and not self.download_manager.download_exists(bytes(my_channel.infohash)) - ): - dcfg = DownloadConfig(state_dir=self.state_dir) - dcfg.set_dest_dir(self.mds.channels_dir) - dcfg.set_channel_download(True) - return await self.download_manager.start_download(tdef=tdef, config=dcfg) - - @db_session - def clean_unsubscribed_channels(self): - - unsubscribed_list = list( - self.mds.ChannelMetadata.select( - lambda g: not g.subscribed and g.local_version > 0 and g.metadata_type == CHANNEL_TORRENT - ) - ) # do not delete `g.metadata_type == CHANNEL_TORRENT` condition, it is used by partial index! - - for channel in unsubscribed_list: - self.channels_processing_queue[channel.infohash] = ( - CLEANUP_UNSUBSCRIBED_CHANNEL, - (channel.public_key, channel.id_), - ) - - def cleanup_channel(self, to_cleanup): - public_key, id_ = to_cleanup - # TODO: Maybe run it threaded? - try: - with db_session: - channel = self.mds.ChannelMetadata.get_for_update(public_key=public_key, id_=id_) - if not channel: - return - channel.local_version = 0 - channel.contents.delete(bulk=True) - except Exception as e: - self._logger.warning("Exception while cleaning unsubscribed channel: %", str(e)) diff --git a/src/tribler/core/components/gigachannel_manager/gigachannel_manager_component.py b/src/tribler/core/components/gigachannel_manager/gigachannel_manager_component.py deleted file mode 100644 index 300e6db9b05..00000000000 --- a/src/tribler/core/components/gigachannel_manager/gigachannel_manager_component.py +++ /dev/null @@ -1,30 +0,0 @@ -from tribler.core.components.component import Component -from tribler.core.components.gigachannel_manager.gigachannel_manager import GigaChannelManager -from tribler.core.components.libtorrent.libtorrent_component import LibtorrentComponent -from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent - - -class GigachannelManagerComponent(Component): - gigachannel_manager: GigaChannelManager = None - - async def run(self): - await super().run() - - config = self.session.config - notifier = self.session.notifier - - libtorrent_component = await self.require_component(LibtorrentComponent) - download_manager = libtorrent_component.download_manager if libtorrent_component else None - - metadata_store_component = await self.require_component(MetadataStoreComponent) - - self.gigachannel_manager = GigaChannelManager( - notifier=notifier, metadata_store=metadata_store_component.mds, download_manager=download_manager - ) - if not config.gui_test_mode: - self.gigachannel_manager.start() - - async def shutdown(self): - await super().shutdown() - if self.gigachannel_manager: - await self.gigachannel_manager.shutdown() diff --git a/src/tribler/core/components/gigachannel_manager/tests/__init__.py b/src/tribler/core/components/gigachannel_manager/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py b/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py deleted file mode 100644 index d814fe26308..00000000000 --- a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py +++ /dev/null @@ -1,409 +0,0 @@ -import asyncio -import os -import random -from asyncio import Future -from datetime import datetime -from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from ipv8.util import succeed -from pony.orm import db_session - -from tribler.core.components.gigachannel_manager.gigachannel_manager import GigaChannelManager -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW -from tribler.core.tests.tools.base_test import MockObject -from tribler.core.tests.tools.common import TORRENT_UBUNTU_FILE -from tribler.core.utilities.simpledefs import DownloadStatus -from tribler.core.utilities.utilities import random_infohash - -# pylint: disable=redefined-outer-name - - -update_metainfo = None - - -@pytest.fixture -def torrent_template(): - return {"title": "", "infohash": b"", "torrent_date": datetime(1970, 1, 1), "tags": "video"} - - -@pytest.fixture -async def personal_channel(metadata_store): - global update_metainfo - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="my test chan", description="test") - chan.add_torrent_to_channel(tdef, None) - update_metainfo = chan.commit_channel_torrent() - return chan - - -@pytest.fixture(name="gigachannel_manager") -async def gigachannel_manager_fixture(metadata_store): - chanman = GigaChannelManager( - state_dir=metadata_store.channels_dir.parent, - metadata_store=metadata_store, - download_manager=MagicMock(), - notifier=MagicMock() - ) - yield chanman - await chanman.shutdown() - - -async def test_regen_personal_channel_no_torrent(personal_channel, gigachannel_manager): - """ - Test regenerating a non-existing personal channel torrent at startup - """ - gigachannel_manager.download_manager.get_download = lambda _: None - gigachannel_manager.regenerate_channel_torrent = MagicMock() - await gigachannel_manager.check_and_regen_personal_channels() - gigachannel_manager.regenerate_channel_torrent.assert_called_once() - - -async def test_regen_personal_channel_damaged_torrent(personal_channel, gigachannel_manager): - """ - Test regenerating a damaged personal channel torrent at startup - """ - complete = Future() - - async def mock_regen(*_, **__): - complete.set_result(True) - - gigachannel_manager.check_and_regen_personal_channel_torrent = mock_regen - gigachannel_manager.start() - await complete - - -async def test_regenerate_channel_torrent(personal_channel, metadata_store, gigachannel_manager): - with db_session: - chan_pk, chan_id = personal_channel.public_key, personal_channel.id_ - channel_dir = Path(metadata_store.ChannelMetadata._channels_dir) / Path(personal_channel.dirname) - for f in channel_dir.iterdir(): - f.unlink() - - # Test trying to regenerate a non-existing channel - assert await gigachannel_manager.regenerate_channel_torrent(chan_pk, chan_id + 1) is None - - # MagicMock existing downloads removal-related functions - gigachannel_manager.download_manager.get_downloads_by_name = lambda *_: [MagicMock()] - downloads_to_remove = [] - - async def mock_remove_download(download_obj, **_): - downloads_to_remove.append(download_obj) - - gigachannel_manager.download_manager.remove_download = mock_remove_download - - # Test regenerating an empty channel - metadata_store.ChannelMetadata.consolidate_channel_torrent = lambda *_: None - assert await gigachannel_manager.regenerate_channel_torrent(chan_pk, chan_id) is None - assert len(downloads_to_remove) == 1 - - # Test regenerating a non-empty channel - gigachannel_manager.updated_my_channel = MagicMock() - metadata_store.ChannelMetadata.consolidate_channel_torrent = lambda *_: MagicMock() - with patch("tribler.core.components.libtorrent.torrentdef.TorrentDef.load_from_dict"): - await gigachannel_manager.regenerate_channel_torrent(chan_pk, chan_id) - gigachannel_manager.updated_my_channel.assert_called_once() - - -async def test_updated_my_channel(personal_channel, gigachannel_manager, tmpdir): - tdef = TorrentDef.load_from_dict(update_metainfo) - gigachannel_manager.download_manager.start_download = AsyncMock() - gigachannel_manager.download_manager.download_exists = lambda *_: False - await gigachannel_manager.updated_my_channel(tdef) - gigachannel_manager.download_manager.start_download.assert_called_once() - - -async def test_check_and_regen_personal_channel_torrent_wait(personal_channel, gigachannel_manager): - # Test wait for status OK - await gigachannel_manager.check_and_regen_personal_channel_torrent( - channel_pk=personal_channel.public_key, - channel_id=personal_channel.id_, - channel_download=MagicMock(wait_for_status=AsyncMock()), - timeout=0.5 - ) - - -async def test_check_and_regen_personal_channel_torrent_sleep(personal_channel, gigachannel_manager): - async def mock_wait(*_): - await asyncio.sleep(3) - - mock_regen = AsyncMock() - with patch.object(GigaChannelManager, 'regenerate_channel_torrent', mock_regen): - # Test timeout waiting for seeding state and then regen - await gigachannel_manager.check_and_regen_personal_channel_torrent( - channel_pk=personal_channel.public_key, - channel_id=personal_channel.id_, - channel_download=MagicMock(wait_for_status=mock_wait), - timeout=0.5 - ) - mock_regen.assert_called_once() - - -async def test_check_channels_updates(personal_channel, gigachannel_manager, metadata_store): - torrents_added = 0 - # We add our personal channel in an inconsistent state to make sure the GigaChannel Manager will - # not try to update it in the same way it should update other's channels - with db_session: - my_channel = metadata_store.ChannelMetadata.get_my_channels().first() - my_channel.local_version -= 1 - - # Subscribed, not updated - metadata_store.ChannelMetadata( - title="bla1", - public_key=b'123', - signature=b'345', - skip_key_check=True, - timestamp=123, - local_version=123, - subscribed=True, - infohash=random_infohash(), - ) - # Not subscribed, updated - metadata_store.ChannelMetadata( - title="bla2", - public_key=b'124', - signature=b'346', - skip_key_check=True, - timestamp=123, - local_version=122, - subscribed=False, - infohash=random_infohash(), - ) - # Subscribed, updated - only this one should be downloaded - chan3 = metadata_store.ChannelMetadata( - title="bla3", - public_key=b'125', - signature=b'347', - skip_key_check=True, - timestamp=123, - local_version=122, - subscribed=True, - infohash=random_infohash(), - ) - - def mock_download_channel(chan1): - nonlocal torrents_added - torrents_added += 1 - assert chan1 == chan3 - - gigachannel_manager.download_channel = mock_download_channel - - @db_session - def fake_get_metainfo(infohash, **_): - return {'info': {'name': metadata_store.ChannelMetadata.get(infohash=infohash).dirname}} - - gigachannel_manager.download_manager.get_metainfo = fake_get_metainfo - gigachannel_manager.download_manager.metainfo_requests = {} - gigachannel_manager.download_manager.download_exists = lambda _: False - - # Manually fire the channel updates checking routine - gigachannel_manager.check_channels_updates() - # download_channel should only fire once - for the original subscribed channel - assert torrents_added == 1 - - # Check that downloaded, but unprocessed channel torrent is added to the processing queue - gigachannel_manager.download_manager = MockObject() - gigachannel_manager.download_manager.download_exists = lambda _: True - - mock_download = MagicMock() - mock_download.get_state.get_status = DownloadStatus.SEEDING - - gigachannel_manager.download_manager.get_download = lambda _: mock_download - - def mock_process_channel_dir(c, _): - # Only the subscribed, but not processed (with local_version < timestamp) channel should be processed - assert c == chan3 - - gigachannel_manager.process_channel_dir = mock_process_channel_dir - - # Manually fire the channel updates checking routine - gigachannel_manager.check_channels_updates() - await gigachannel_manager.process_queued_channels() - - # The queue should be empty afterwards - assert not gigachannel_manager.channels_processing_queue - - -@db_session -def test_check_channel_updates_for_different_states(gigachannel_manager, metadata_store): - def random_subscribed_channel(): - return metadata_store.ChannelMetadata( - title=f"Channel {random.randint(0, 100)}", - public_key=os.urandom(32), - signature=os.urandom(32), - skip_key_check=True, - timestamp=123, - local_version=122, - subscribed=True, - infohash=random_infohash(), - ) - - # Three channels in different states based on the setup - channel_with_metainfo = random_subscribed_channel() - already_downloaded_channel = random_subscribed_channel() - non_downloaded_channel = random_subscribed_channel() - - # Setup 1: metainfo is already available for channel torrent. - def mock_get_metainfo(infohash): - return MagicMock() if infohash == channel_with_metainfo.infohash else None - - gigachannel_manager.download_manager.metainfo_requests = MagicMock(get=mock_get_metainfo) - - # Setup 2: Only one specific channel torrent is already downloaded. - def mock_download_exists(infohash): - return infohash == already_downloaded_channel.infohash - - gigachannel_manager.download_manager.download_exists = mock_download_exists - - # Setup 2 (contd): We expect non-downloaded channel to be downloaded - # so mocking download_channel() method. - gigachannel_manager.download_channel = MagicMock() - - # Setup 3: Downloaded channel torrent is set on Seeding state. - def mock_get_download(infohash): - if infohash != already_downloaded_channel.infohash: - return None - - seeding_state = MagicMock(get_status=lambda: DownloadStatus.SEEDING) - return MagicMock(get_state=lambda: seeding_state) - - gigachannel_manager.download_manager.get_download = mock_get_download - - # Act - gigachannel_manager.check_channels_updates() - - # Assert - gigachannel_manager.download_channel.assert_called_once_with(non_downloaded_channel) - assert len(gigachannel_manager.channels_processing_queue) == 1 - assert already_downloaded_channel.infohash in gigachannel_manager.channels_processing_queue - - -async def test_remove_cruft_channels(torrent_template, personal_channel, gigachannel_manager, metadata_store): - remove_list = [] - with db_session: - # Our personal chan is created, then updated, so there are 2 files on disk and there are 2 torrents: - # the old one and the new one - personal_channel = metadata_store.ChannelMetadata.get_my_channels().first() - my_chan_old_infohash = personal_channel.infohash - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, origin_id=personal_channel.id_, status=NEW)) - personal_channel.commit_channel_torrent() - - # Now we add an external channel we are subscribed to. - chan2 = metadata_store.ChannelMetadata( - title="bla1", - infohash=b'123', - public_key=b'123', - signature=b'345', - skip_key_check=True, - timestamp=123, - local_version=123, - subscribed=True, - ) - - # Another external channel, but there is a catch: we recently unsubscribed from it - chan3 = metadata_store.ChannelMetadata( - title="bla2", - infohash=b'124', - public_key=b'124', - signature=b'346', - skip_key_check=True, - timestamp=123, - local_version=123, - subscribed=False, - ) - - class MockDownload(MockObject): - def __init__(self, infohash, dirname): - self.infohash = infohash - self.dirname = dirname - self.tdef = MockObject() - self.tdef.get_name_utf8 = lambda: self.dirname - self.tdef.get_infohash = lambda: infohash - - def get_def(self): - a = MockObject() - a.infohash = self.infohash - a.get_name_utf8 = lambda: self.dirname - a.get_infohash = lambda: self.infohash - return a - - # Double conversion is required to make sure that buffers signatures are not the same - mock_dl_list = [ - # Downloads for the personal channel - MockDownload(my_chan_old_infohash, personal_channel.dirname), - MockDownload(personal_channel.infohash, personal_channel.dirname), - # Downloads for the updated external channel: "old ones" and "recent" - MockDownload(b'12331244', chan2.dirname), - MockDownload(chan2.infohash, chan2.dirname), - # Downloads for the unsubscribed external channel - MockDownload(b'1231551', chan3.dirname), - MockDownload(chan3.infohash, chan3.dirname), - # Orphaned download - MockDownload(b'333', "blabla"), - ] - - def mock_get_channel_downloads(**_): - return mock_dl_list - - def mock_remove(infohash, remove_content=False): - nonlocal remove_list - d = Future() - d.set_result(None) - remove_list.append((infohash, remove_content)) - return d - - gigachannel_manager.download_manager.get_channel_downloads = mock_get_channel_downloads - gigachannel_manager.download_manager.remove_download = mock_remove - - gigachannel_manager.remove_cruft_channels() - await gigachannel_manager.process_queued_channels() - # We want to remove torrents for (a) deleted channels and (b) unsubscribed channels - assert remove_list == [ - (mock_dl_list[0], False), - (mock_dl_list[2], False), - (mock_dl_list[4], True), - (mock_dl_list[5], True), - (mock_dl_list[6], True), - ] - - -initiated_download = False - - -async def test_reject_malformed_channel( - gigachannel_manager, metadata_store -): # pylint: disable=unused-argument, redefined-outer-name - global initiated_download - with db_session: - channel = metadata_store.ChannelMetadata(title="bla1", public_key=b'123', infohash=random_infohash()) - - def mock_get_metainfo_bad(*args, **kwargs): - return succeed({b'info': {b'name': b'bla'}}) - - def mock_get_metainfo_good(*args, **kwargs): - return succeed({b'info': {b'name': channel.dirname.encode('utf-8')}}) - - initiated_download = False - - async def mock_download_from_tdef(*_, **__): - global initiated_download - initiated_download = True - mock_dl = MockObject() - mock_dl.future_finished = succeed(None) - return mock_dl - - gigachannel_manager.download_manager.start_download = mock_download_from_tdef - - # Check that we skip channels with incorrect dirnames - gigachannel_manager.download_manager.get_metainfo = mock_get_metainfo_bad - await gigachannel_manager.download_channel(channel) - assert not initiated_download - - with patch.object(TorrentDef, "__init__", lambda *_, **__: None): - # Check that we download channels with correct dirname - gigachannel_manager.download_manager.get_metainfo = mock_get_metainfo_good - await gigachannel_manager.download_channel(channel) - assert initiated_download diff --git a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager_component.py b/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager_component.py deleted file mode 100644 index 78435fa18f0..00000000000 --- a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager_component.py +++ /dev/null @@ -1,21 +0,0 @@ -from tribler.core.components.database.database_component import DatabaseComponent -from tribler.core.components.gigachannel_manager.gigachannel_manager_component import GigachannelManagerComponent -from tribler.core.components.ipv8.ipv8_component import Ipv8Component -from tribler.core.components.key.key_component import KeyComponent -from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent -from tribler.core.components.libtorrent.libtorrent_component import LibtorrentComponent -from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent -from tribler.core.components.session import Session -from tribler.core.components.socks_servers.socks_servers_component import SocksServersComponent - - -# pylint: disable=protected-access - - -async def test_gigachannel_manager_component(tribler_config): - components = [DatabaseComponent(), Ipv8Component(), KnowledgeComponent(), SocksServersComponent(), KeyComponent(), - MetadataStoreComponent(), LibtorrentComponent(), GigachannelManagerComponent()] - async with Session(tribler_config, components) as session: - comp = session.get_instance(GigachannelManagerComponent) - assert comp.started_event.is_set() and not comp.failed - assert comp.gigachannel_manager diff --git a/src/tribler/core/components/knowledge/knowledge_component.py b/src/tribler/core/components/knowledge/knowledge_component.py index d1cfc487853..10830b43fee 100644 --- a/src/tribler/core/components/knowledge/knowledge_component.py +++ b/src/tribler/core/components/knowledge/knowledge_component.py @@ -5,7 +5,6 @@ from tribler.core.components.key.key_component import KeyComponent from tribler.core.components.knowledge.community.knowledge_community import KnowledgeCommunity from tribler.core.components.knowledge.rules.knowledge_rules_processor import KnowledgeRulesProcessor -from tribler.core.components.metadata_store.utils import generate_test_channels class KnowledgeComponent(Component): @@ -39,9 +38,6 @@ async def run(self): self._ipv8_component.initialise_community_by_default(self.community) - if self.session.config.gui_test_mode: - generate_test_channels(mds_component.mds, db_component.db) - async def shutdown(self): await super().shutdown() if self._ipv8_component and self.community: diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/binary_node.py b/src/tribler/core/components/metadata_store/db/orm_bindings/binary_node.py deleted file mode 100644 index 279237c663a..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/binary_node.py +++ /dev/null @@ -1,26 +0,0 @@ -from pony.orm import Optional - -from tribler.core.components.metadata_store.db.serialization import BINARY_NODE, BinaryNodePayload - - -def define_binding(db, db_version: int): - class BinaryNode(db.ChannelNode): - """ - This ORM class represents channel descriptions. - """ - - _discriminator_ = BINARY_NODE - - # Serializable - if db_version >= 12: - binary_data = Optional(bytes, default=b"") - data_type = Optional(str, default="") - - # Special class-level properties - _payload_class = BinaryNodePayload - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - nonpersonal_attributes = db.ChannelNode.nonpersonal_attributes + ('binary_data', 'data_type') - - return BinaryNode diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_description.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_description.py deleted file mode 100644 index f75f3c4540c..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_description.py +++ /dev/null @@ -1,12 +0,0 @@ -from tribler.core.components.metadata_store.db.serialization import CHANNEL_DESCRIPTION - - -def define_binding(db): - class ChannelDescription(db.JsonNode): - """ - This ORM class represents channel descriptions. - """ - - _discriminator_ = CHANNEL_DESCRIPTION - - return ChannelDescription diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_metadata.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_metadata.py deleted file mode 100644 index 4cfc4d650c9..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_metadata.py +++ /dev/null @@ -1,568 +0,0 @@ -import os -from binascii import unhexlify -from datetime import datetime - -from lz4.frame import LZ4FrameCompressor -from pony import orm -from pony.orm import db_session, raw_sql, select - -from tribler.core.components.libtorrent.utils.libtorrent_helper import libtorrent as lt -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import ( - CHANNEL_DESCRIPTION_FLAG, - CHANNEL_THUMBNAIL_FLAG, - COMMITTED, - LEGACY_ENTRY, - NEW, - PUBLIC_KEY_LEN, - TODELETE, - UPDATED, -) -from tribler.core.components.metadata_store.db.orm_bindings.discrete_clock import clock -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_TORRENT, - ChannelMetadataPayload, - HealthItemsPayload, -) -from tribler.core.utilities.path_util import Path -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import random_infohash - -CHANNEL_DIR_NAME_PK_LENGTH = 32 # Its not 40 so it could be distinguished from infohash -CHANNEL_DIR_NAME_ID_LENGTH = 16 # Zero-padded long int in hex form -CHANNEL_DIR_NAME_LENGTH = CHANNEL_DIR_NAME_PK_LENGTH + CHANNEL_DIR_NAME_ID_LENGTH -BLOB_EXTENSION = '.mdblob' -LZ4_END_MARK_SIZE = 4 # in bytes, from original specification. We don't use CRC -HEALTH_ITEM_HEADER_SIZE = 4 # in bytes, len of varlenI header - -LZ4_EMPTY_ARCHIVE = unhexlify("04224d184040c000000000") - - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i: i + n] - - -def create_torrent_from_dir(directory, torrent_filename): - fs = lt.file_storage() - lt.add_files(fs, str(directory)) - t = lt.create_torrent(fs) - # t = create_torrent(fs, flags=17) # piece alignment - t.set_priv(False) - lt.set_piece_hashes(t, str(directory.parent)) - torrent = t.generate() - with open(torrent_filename, 'wb') as f: - f.write(lt.bencode(torrent)) - - infohash = lt.torrent_info(torrent).info_hash().to_bytes() - return torrent, infohash - - -def get_mdblob_sequence_number(filename): - filepath = Path(filename) - if filepath.suffixes == [BLOB_EXTENSION]: - return int(filename.stem) - if filepath.suffixes == [BLOB_EXTENSION, '.lz4']: - return int(Path(filepath.stem).stem) - return None - - -def entries_to_chunk(metadata_list, chunk_size, start_index=0, include_health=False): - """ - :param metadata_list: the list of metadata to process. - :param chunk_size: the desired chunk size limit, in bytes. - :param start_index: the index of the element of metadata_list from which the processing should start. - :param include_health: if True, put metadata health information into the chunk. - :return: (chunk, last_entry_index) tuple, where chunk is the resulting chunk in string form and - last_entry_index is the index of the element of the input list that was put into the chunk the last. - """ - # Try to fit as many blobs into this chunk as permitted by chunk_size and - # calculate their ends' offsets in the blob - if start_index >= len(metadata_list): - raise Exception('Could not serialize chunk: incorrect start_index', metadata_list, chunk_size, start_index) - - compressor = MetadataCompressor(chunk_size, include_health) - index = start_index - while index < len(metadata_list): - metadata = metadata_list[index] - was_able_to_add = compressor.put(metadata) - if not was_able_to_add: - break - index += 1 - - return compressor.close(), index - - -class MetadataCompressor: - """ - This class provides methods to put serialized data of one or more metadata entries into a single binary chunk. - - The data is added incrementally until it stops fitting into the designated chunk size. The first entry is added - regardless of violating the chunk size limit. - - The chunk format is: - - - [] - - The optional health information is serialized separately, as it was not originally included in the serialized - metadata format. If present, it contains the same number of items as the serialized list of metadata - entries. The N-th health info item in the health block corresponds to the N-th metadata entry. - - For the details of the health info format see the documentation: doc/metadata_store/serialization_format.rst - - While it is possible to put the health info items into the second LZ4-compressed frame, it is more efficient to - serialize them without any compression. The reason for this is that a typical health info item has a 1-byte - length (about 17 bytes if a torrent has actual health information), and the number of items is few for a single - chunk (usually less then 10 items). If we use LZ4 compressor, we want to use it incrementally in order to detect - when items stop fitting into a chunk. LZ4 algorithm cannot compress such small items efficiently in an incremental - fashion, and the resulting "compressed" size can be significantly bigger than the original data size. - """ - - def __init__(self, chunk_size: int, include_health: bool = False): - """ - :param chunk_size: the desired chunk size limit, in bytes. - :param include_health: if True, put metadata health information into the chunk. - """ - self.chunk_size = chunk_size - self.include_health = include_health - self.compressor = LZ4FrameCompressor(auto_flush=True) - # The next line is not necessary, added just to be safe - # in case of possible future changes of LZ4FrameCompressor - assert self.compressor.__enter__() is self.compressor - - metadata_header: bytes = self.compressor.begin() - self.count = 0 - self.size = len(metadata_header) + LZ4_END_MARK_SIZE - self.metadata_buffer = [metadata_header] - - if include_health: - self.health_buffer = [] - self.size += HEALTH_ITEM_HEADER_SIZE - else: - self.health_buffer = None - - self.closed = False - - def put(self, metadata) -> bool: - """ - Tries to add a metadata entry to chunk. The first entry is always added successfully. Then next entries are - added only if it possible to fit data into the chunk. - - :param metadata: a metadata entry to process. - :return: False if it was not possible to fit data into the chunk - """ - if self.closed: - raise TypeError('Compressor is already closed') - - metadata_bytes = metadata.serialized_delete() if metadata.status == TODELETE else metadata.serialized() - compressed_metadata_bytes = self.compressor.compress(metadata_bytes) - new_size = self.size + len(compressed_metadata_bytes) - health_bytes = b'' # To satisfy linter - if self.include_health: - health_bytes = metadata.serialized_health() - new_size += len(health_bytes) - - if new_size > self.chunk_size and self.count > 0: - # The first entry is always added even if the resulted size exceeds the chunk size. - # This lets higher levels to decide what to do in this case, e.g. send it through EVA protocol. - return False - - self.count += 1 - self.size = new_size - self.metadata_buffer.append(compressed_metadata_bytes) - if self.include_health: - self.health_buffer.append(health_bytes) - - return True - - def close(self) -> bytes: - """ - Closes compressor object and returns packed data. - - :return: serialized binary data - """ - if self.closed: - raise TypeError('Compressor is already closed') - self.closed = True - - end_mark = self.compressor.flush() - self.metadata_buffer.append(end_mark) - result = b''.join(self.metadata_buffer) - - # The next lines aren't necessary, added just to be safe - # in case of possible future changes of LZ4FrameCompressor - self.compressor.__exit__(None, None, None) - - if self.include_health: - result += HealthItemsPayload(b''.join(self.health_buffer)).serialize() - - return result - - -def define_binding(db): # pylint: disable=R0915 - class ChannelMetadata(db.TorrentMetadata, db.CollectionNode): - """ - This ORM binding represents Channel entries in the GigaChannel system. Each channel is a Collection that - additionally has Torrent properties, such as infohash, etc. The torrent properties are used to associate - a torrent that holds the contents of the channel dumped on the disk in the serialized form. - Methods for committing channels into the torrent form are implemented in this class. - """ - - _discriminator_ = CHANNEL_TORRENT - - # Serializable - start_timestamp = orm.Optional(int, size=64, default=0) - - # Local - subscribed = orm.Optional(bool, default=False) - share = orm.Optional(bool, default=False) - votes = orm.Optional(float, default=0.0) - individual_votes = orm.Set("ChannelVote", reverse="channel") - local_version = orm.Optional(int, size=64, default=0) - - votes_scaling = 1.0 - - # Special class-level properties - _payload_class = ChannelMetadataPayload - _channels_dir = None - _category_filter = None - _CHUNK_SIZE_LIMIT = 1 * 1024 * 1024 # We use 1MB chunks as a workaround for Python's lack of string pointers - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - - # As channel metadata depends on the public key, we can't include the infohash in nonpersonal_attributes - nonpersonal_attributes = set(db.CollectionNode.nonpersonal_attributes) - - infohash_to_channel_name_cache = {} - - @classmethod - @db_session - def get_my_channels(cls): - return ChannelMetadata.select( - lambda g: g.origin_id == 0 and g.public_key == cls._my_key.pub().key_to_bin()[10:] - ) - - @classmethod - @db_session - def create_channel(cls, title, description="", origin_id=0): - """ - Create a channel and sign it with a given key. - :param title: The title of the channel - :param description: The description of the channel - :param origin_id: id_ of the parent channel - :return: The channel metadata - """ - my_channel = cls( - origin_id=origin_id, - public_key=cls._my_key.pub().key_to_bin()[10:], - title=title, - tags=description, - subscribed=True, - share=True, - status=NEW, - infohash=random_infohash(), - ) - # random infohash is necessary to avoid triggering DB uniqueness constraints - my_channel.sign() - return my_channel - - @db_session - def consolidate_channel_torrent(self): - """ - Delete the channel dir contents and create it anew. - Use it to consolidate fragmented channel torrent directories. - :param key: The public/private key, used to sign the data - """ - - # Remark: there should be a way to optimize this stuff with SQL and better tree traversal algorithms - # Cleanup entries marked for deletion - - db.CollectionNode.collapse_deleted_subtrees() - # Note: It should be possible to stop alling get_contents_to_commit here - commit_queue = self.get_contents_to_commit() - for entry in commit_queue: - if entry.status == TODELETE: - entry.delete() - - folder = Path(self._channels_dir) / self.dirname - # We check if we need to re-create the channel dir in case it was deleted for some reason - if not folder.is_dir(): - os.makedirs(folder) - for filename in os.listdir(folder): - file_path = folder / filename - # We only remove mdblobs and leave the rest as it is - if filename.endswith(BLOB_EXTENSION) or filename.endswith(BLOB_EXTENSION + '.lz4'): - os.unlink(Path.fix_win_long_file(file_path)) - - # Channel should get a new starting timestamp and its contents should get higher timestamps - start_timestamp = clock.tick() - - def update_timestamps_recursive(node): - if issubclass(type(node), db.CollectionNode): - for child in node.contents: - update_timestamps_recursive(child) - if node.status in [COMMITTED, UPDATED, NEW]: - node.status = UPDATED - node.timestamp = clock.tick() - node.sign() - - update_timestamps_recursive(self) - - return self.commit_channel_torrent(new_start_timestamp=start_timestamp) - - def update_channel_torrent(self, metadata_list): - """ - Channel torrents are append-only to support seeding the old versions - from the same dir and avoid updating already downloaded blobs. - :param metadata_list: The list of metadata entries to add to the torrent dir. - ACHTUNG: TODELETE entries _MUST_ be sorted to the end of the list to prevent channel corruption! - :return The newly create channel torrent infohash, final timestamp for the channel and torrent date - """ - # As a workaround for delete entries not having a timestamp in the DB, delete entries should - # be placed after create/modify entries: - # | create/modify entries | delete entries | <- final timestamp - - # Create dir for the metadata files - channel_dir = Path(self._channels_dir / self.dirname).absolute() - if not channel_dir.is_dir(): - os.makedirs(Path.fix_win_long_file(channel_dir)) - - existing_contents = sorted(channel_dir.iterdir()) - last_existing_blob_number = get_mdblob_sequence_number(existing_contents[-1]) if existing_contents else None - - index = 0 - while index < len(metadata_list): - # Squash several serialized and signed metadata entries into a single file - data, index = entries_to_chunk(metadata_list, self._CHUNK_SIZE_LIMIT, start_index=index) - # Blobs ending with TODELETE entries increase the final timestamp as a workaround for delete commands - # possessing no timestamp. - if metadata_list[index - 1].status == TODELETE: - blob_timestamp = clock.tick() - else: - blob_timestamp = metadata_list[index - 1].timestamp - - # The final file in the sequence should get a timestamp that is higher than the timestamp of - # the last channel contents entry. This final timestamp then should be returned to the calling function - # to be assigned to the corresponding channel entry. - # Otherwise, the local channel version will never become equal to its timestamp. - if index >= len(metadata_list): - blob_timestamp = clock.tick() - # Check that the mdblob we're going to create has a greater timestamp than the existing ones - assert last_existing_blob_number is None or (blob_timestamp > last_existing_blob_number) - - blob_filename = Path(channel_dir, str(blob_timestamp).zfill(12) + BLOB_EXTENSION + '.lz4') - assert not blob_filename.exists() # Never ever write over existing files. - blob_filename.write_bytes(data) - last_existing_blob_number = blob_timestamp - - with db_session: - thumb_exists = db.ChannelThumbnail.exists( - lambda g: g.public_key == self.public_key and g.origin_id == self.id_ and g.status != TODELETE - ) - descr_exists = db.ChannelDescription.exists( - lambda g: g.public_key == self.public_key and g.origin_id == self.id_ and g.status != TODELETE - ) - - flags = CHANNEL_THUMBNAIL_FLAG * (int(thumb_exists)) + CHANNEL_DESCRIPTION_FLAG * (int(descr_exists)) - - # Note: the timestamp can end up messed in case of an error - - # Make torrent out of dir with metadata files - torrent, infohash = create_torrent_from_dir(channel_dir, self._channels_dir / (self.dirname + ".torrent")) - torrent_date = datetime.utcfromtimestamp(torrent[b'creation date']) - - return { - "infohash": infohash, - "timestamp": last_existing_blob_number, - "torrent_date": torrent_date, - "reserved_flags": flags, - }, torrent - - def commit_channel_torrent(self, new_start_timestamp=None, commit_list=None): - """ - Collect new/uncommitted and marked for deletion metadata entries, commit them to a channel torrent and - remove the obsolete entries if the commit succeeds. - :param new_start_timestamp: change the start_timestamp of the committed channel entry to this value - :param commit_list: the list of ORM objects to commit into this channel torrent - :return The new infohash, should be used to update the downloads - """ - md_list = commit_list or self.get_contents_to_commit() - - if not md_list: - return None - - try: - update_dict, torrent = self.update_channel_torrent(md_list) - except OSError: - self._logger.error( - "Error during channel torrent commit, not going to garbage collect the channel. Channel %s", - hexlify(self.public_key), - ) - return None - - if new_start_timestamp: - update_dict['start_timestamp'] = new_start_timestamp - # Update channel infohash, etc - for attr, val in update_dict.items(): - setattr(self, attr, val) - self.local_version = self.timestamp - self.sign() - - # Change the statuses of committed entries and clean up obsolete TODELETE entries - for g in md_list: - if g.status in [NEW, UPDATED]: - g.status = COMMITTED - elif g.status == TODELETE: - g.delete() - - # Write the channel mdblob to disk - self.status = COMMITTED # pylint: disable=W0201 - self.to_file(self._channels_dir / (self.dirname + BLOB_EXTENSION)) - - self._logger.info( - "Channel %s committed with %i new entries. New version is %i", - hexlify(self.public_key), - len(md_list), - update_dict['timestamp'], - ) - return torrent - - @property - def dirname(self): - # Have to limit this to support Windows file path length limit - return hexlify(self.public_key)[:CHANNEL_DIR_NAME_PK_LENGTH] + f"{self.id_:0>16x}" - - @classmethod - @db_session - def get_channels_by_title(cls, title): - return cls.select(lambda g: g.title == title) - - @classmethod - @db_session - def get_channel_with_infohash(cls, infohash): - return cls.get(infohash=infohash) - - @classmethod - @db_session - def get_channel_with_dirname(cls, dirname): - # Parse the public key part of the dirname - pk_part = dirname[:-CHANNEL_DIR_NAME_ID_LENGTH] - - def extend_to_bitmask(txt): - return txt + "0" * (PUBLIC_KEY_LEN * 2 - CHANNEL_DIR_NAME_LENGTH) - - pk_binmask_start = "x'" + extend_to_bitmask(pk_part) + "'" - pk_plus_one = f"{int(pk_part, 16) + 1:X}".zfill(len(pk_part)) - pk_binmask_end = "x'" + extend_to_bitmask(pk_plus_one) + "'" - # It is impossible to use LIKE queries on BLOBs, so we have to use comparisons - sql = "g.public_key >= " + pk_binmask_start + " AND g.public_key < " + pk_binmask_end - - # Parse the id part of the dirname - id_part = dirname[-CHANNEL_DIR_NAME_ID_LENGTH:] - id_ = int(id_part, 16) - - return orm.select(g for g in cls if g.id_ == id_ and raw_sql(sql)).first() - - @classmethod - @db_session - def get_updated_channels(cls): - return select( - g - for g in cls - if g.subscribed == 1 - and g.status != LEGACY_ENTRY - and (g.local_version < g.timestamp) - and g.public_key != cls._my_key.pub().key_to_bin()[10:] - ) # don't simplify `g.subscribed == 1` to bool form, it is used by partial index! - - @property - @db_session - def state(self): - """ - This property describes the current state of the channel. - :return: Text-based status - """ - if self.is_personal: - return CHANNEL_STATE.PERSONAL.value - if self.status == LEGACY_ENTRY: - return CHANNEL_STATE.LEGACY.value - if self.local_version == self.timestamp: - return CHANNEL_STATE.COMPLETE.value - if self.local_version > 0: - return CHANNEL_STATE.UPDATING.value - if self.subscribed: - return CHANNEL_STATE.METAINFO_LOOKUP.value - return CHANNEL_STATE.PREVIEW.value - - def to_simple_dict(self, **kwargs): - """ - Return a basic dictionary with information about the channel. - """ - result = super().to_simple_dict(**kwargs) - result.update( - { - "state": self.state, - "subscribed": self.subscribed, - "votes": self.votes / db.ChannelMetadata.votes_scaling, - "dirty": self.dirty if self.is_personal else False, - } - ) - return result - - @classmethod - def get_channel_name_cached(cls, dl_name, infohash): - # Querying the database each time is costly so we cache the name request in a dict. - chan_name = cls.infohash_to_channel_name_cache.get(infohash) - if chan_name is None: - chan_name = cls.get_channel_name(dl_name, infohash) - cls.infohash_to_channel_name_cache[infohash] = chan_name - return chan_name - - @classmethod - @db_session - def get_channel_name(cls, dl_name, infohash): - """ - Try to translate a Tribler download name into matching channel name. By searching for a channel with the - given dirname and/or infohash. Try do determine if infohash belongs to an older version of - some channel we already have. - :param dl_name - name of the download. Should match the directory name of the channel. - :param infohash - infohash of the download. - :return: Channel title as a string, prefixed with 'OLD:' for older versions - """ - channel = cls.get_channel_with_infohash(infohash) - if not channel: - try: - channel = cls.get_channel_with_dirname(dl_name) - except UnicodeEncodeError: - channel = None - - if not channel: - return dl_name - if channel.infohash == infohash: - return channel.title - return 'OLD:' + channel.title - - @db_session - def update_properties(self, update_dict): - updated_self = super().update_properties(update_dict) - if updated_self.origin_id != 0: - # Coerce to CollectionNode - # ACHTUNG! This is a little bit awkward way to re-create the entry as an instance of - # another class. Be very careful with it! - self_dict = updated_self.to_dict() - updated_self.delete(recursive=False) - self_dict.pop("rowid") - self_dict.pop("metadata_type") - self_dict["sign_with"] = self._my_key - updated_self = db.CollectionNode.from_dict(self_dict) - return updated_self - - def make_copy(self, tgt_parent_id, **kwargs): - return db.CollectionNode.make_copy( - self, tgt_parent_id, attributes_override={'infohash': random_infohash()}, **kwargs - ) - - return ChannelMetadata diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_node.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_node.py deleted file mode 100644 index 7dcb636ff91..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_node.py +++ /dev/null @@ -1,317 +0,0 @@ -import random -from datetime import datetime - -from ipv8.keyvault.crypto import default_eccrypto -from pony import orm -from pony.orm.core import DEFAULT, db_session - -from tribler.core.components.metadata_store.db.orm_bindings.discrete_clock import clock -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_NODE, - ChannelNodePayload, - DELETED, - DeletedMetadataPayload, -) -from tribler.core.exceptions import InvalidChannelNodeException, InvalidSignatureException -from tribler.core.utilities.path_util import Path -from tribler.core.utilities.unicode import hexlify - -# Metadata, torrents and channel statuses -NEW = 0 # The entry is newly created and is not published yet. It will be committed at the next commit. -TODELETE = 1 # The entry is marked to be removed at the next commit. -COMMITTED = 2 # The entry is committed and seeded. -UPDATED = 6 # One of the entry's properties was updated. It will be committed at the next commit. -LEGACY_ENTRY = 1000 # The entry was converted from the old Tribler DB. It has no signature and should not be shared. - -DIRTY_STATUSES = (NEW, TODELETE, UPDATED) - -PUBLIC_KEY_LEN = 64 - -CHANNEL_DESCRIPTION_FLAG = 1 -CHANNEL_THUMBNAIL_FLAG = 2 - - -def generate_dict_from_pony_args(cls, skip_list=None, **kwargs): - """ - Note: this is a way to manually define Pony entity default attributes in case we - have to generate the signature before creating an object - """ - d = {} - skip_list = skip_list or [] - for attr in cls._attrs_: # pylint: disable=W0212 - val = kwargs.get(attr.name, DEFAULT) - if attr.name in skip_list: - continue - d[attr.name] = attr.validate(val, entity=cls) - return d - - -def define_binding(db, logger=None, key=None): # pylint: disable=R0915 - class ChannelNode(db.Entity): - """ - This is the base class of our ORM bindings. It implements methods for signing and serialization of ORM objects. - All other GigaChannel-related ORM classes are derived from it. It is not intended for direct use. - Instead, other classes should derive from it. - """ - - _discriminator_ = CHANNEL_NODE - - rowid = orm.PrimaryKey(int, size=64, auto=True) - - # Serializable - metadata_type = orm.Discriminator(int, size=16) - reserved_flags = orm.Optional(int, size=16, default=0) - origin_id = orm.Optional(int, size=64, default=0, index=True) - - public_key = orm.Required(bytes) - id_ = orm.Required(int, size=64) - orm.composite_key(public_key, id_) - orm.composite_index(public_key, origin_id) - - timestamp = orm.Required(int, size=64, default=0) - # Signature is nullable. This means that "None" entries are stored in DB as NULLs instead of empty strings. - # NULLs are not checked for uniqueness and not indexed. - # This is necessary to store unsigned signatures without violating the uniqueness constraints. - signature = orm.Optional(bytes, unique=True, nullable=True, default=None) - - # Local - added_on = orm.Optional(datetime, default=datetime.utcnow) - status = orm.Optional(int, default=COMMITTED) - - # Special class-level properties - _payload_class = ChannelNodePayload - _my_key = key - _logger = logger - - # This attribute holds the names of the class attributes that are used by the serializer for the - # corresponding payload type. We only initialize it once on class creation as an optimization. - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - - # A non - personal attribute of an entry is an attribute that would have the same value regardless of where, - # when and who created the entry. - # In other words, it does not depend on the Tribler instance that created it. - # ACHTUNG! On object creation, Pony does not check if discriminator is wrong for the created ORM type! - nonpersonal_attributes = ('metadata_type',) - - def __init__(self, *args, **kwargs): - """ - Initialize a metadata object. - All this dance is required to ensure that the signature is there and it is correct. - """ - skip_key_check = False - - # Process special keyworded arguments - # "sign_with" argument given, sign with it - private_key_override = None - if "sign_with" in kwargs: - kwargs["public_key"] = kwargs["sign_with"].pub().key_to_bin()[10:] - private_key_override = kwargs.pop("sign_with") - - # Free-for-all entries require special treatment - if "public_key" in kwargs and kwargs["public_key"] == b"": - # We have to give the entry an unique sig to honor the DB constraints. We use the entry's id_ - # as the sig to keep it unique and short. The uniqueness is guaranteed by DB as it already - # imposes uniqueness constraints on the id_+public_key combination. - if "id_" in kwargs: - kwargs["signature"] = None - skip_key_check = True - else: - # Trying to create an FFA entry without specifying the id_ should be considered an error, - # because assigning id_ automatically by clock breaks anonymity. - # FFA entries should be "timeless" and anonymous. - raise InvalidChannelNodeException( - "Attempted to create %s free-for-all (unsigned) object without specifying id_ : " - % str(self.__class__.__name__) - ) - - # For putting legacy/test stuff in - skip_key_check = kwargs.pop("skip_key_check", skip_key_check) - - if "timestamp" not in kwargs: - kwargs["timestamp"] = clock.tick() - - if "id_" not in kwargs: - kwargs["id_"] = int(random.getrandbits(63)) - - if not private_key_override and not skip_key_check: - # No key/signature given, sign with our own key. - if ("signature" not in kwargs) and ( - ("public_key" not in kwargs) or (kwargs["public_key"] == self._my_key.pub().key_to_bin()[10:]) - ): - private_key_override = self._my_key - - # Key/signature given, check them for correctness - elif ("public_key" in kwargs) and ("signature" in kwargs): - try: - self._payload_class(**kwargs) - except InvalidSignatureException as e: - raise InvalidSignatureException( - f"Attempted to create {str(self.__class__.__name__)} object with invalid signature/PK: " - + (hexlify(kwargs["signature"]) if "signature" in kwargs else "empty signature ") - + " / " - + (hexlify(kwargs["public_key"]) if "public_key" in kwargs else " empty PK") - ) from e - - if private_key_override: - # Get default values for Pony class attributes. We have to do it manually because we need - # to know the payload signature *before* creating the object. - kwargs = generate_dict_from_pony_args(self.__class__, skip_list=["signature", "public_key"], **kwargs) - payload = self._payload_class( - **dict( - kwargs, - public_key=private_key_override.pub().key_to_bin()[10:], - key=private_key_override, - metadata_type=self.metadata_type, - ) - ) - kwargs["public_key"] = payload.public_key - kwargs["signature"] = payload.signature - - super().__init__(*args, **kwargs) - - def _serialized(self, key=None): - """ - Serializes the object and returns the result with added signature (tuple output) - :param key: private key to sign object with - :return: (serialized_data, signature) tuple - """ - return self._payload_class( # pylint: disable=W0212 - key=key, unsigned=(self.signature is None), **self.to_dict() - )._serialized() # pylint: disable=W0212 - - def serialized(self, key=None): - """ - Serializes the object and returns the result with added signature (blob output) - :param key: private key to sign object with - :return: serialized_data+signature binary string - """ - return b''.join(self._serialized(key)) - - def _serialized_delete(self): - """ - Create a special command to delete this metadata and encode it for transfer (tuple output). - :return: (serialized_data, signature) tuple - """ - my_dict = ChannelNode.to_dict(self) - my_dict.update({"metadata_type": DELETED, "delete_signature": self.signature}) - return DeletedMetadataPayload(key=self._my_key, **my_dict)._serialized() # pylint: disable=W0212 - - def serialized_delete(self): - """ - Create a special command to delete this metadata and encode it for transfer (blob output). - :return: serialized_data+signature binary string - """ - return b''.join(self._serialized_delete()) - - def serialized_health(self) -> bytes: - return b';' - - def to_file(self, filename, key=None): - with open(Path.fix_win_long_file(filename), 'wb') as output_file: - output_file.write(self.serialized(key)) - - def to_delete_file(self, filename): - with open(Path.fix_win_long_file(filename), 'wb') as output_file: - output_file.write(self.serialized_delete()) - - def sign(self, key=None): - if not key: - key = self._my_key - self.public_key = key.pub().key_to_bin()[10:] - _, self.signature = self._serialized(key) - - def has_valid_signature(self): - crypto = default_eccrypto - signature_correct = False - key_correct = crypto.is_valid_public_bin(b"LibNaCLPK:" + bytes(self.public_key)) - - if key_correct: - try: - self._payload_class(**self.to_dict()) - except InvalidSignatureException: - signature_correct = False - else: - signature_correct = True - - return key_correct and signature_correct - - @classmethod - def from_payload(cls, payload): - return cls(**payload.to_dict()) - - @classmethod - def from_dict(cls, dct): - return cls(**dct) - - @property - @db_session - def is_personal(self): - return self._my_key.pub().key_to_bin()[10:] == self.public_key - - @db_session - def soft_delete(self): - if self.status == NEW: - # Uncommited metadata. Delete immediately - self.delete() - else: - self.status = TODELETE - - def update_properties(self, update_dict): - signed_attribute_changed = False - for k, value in update_dict.items(): - if getattr(self, k) != value: - setattr(self, k, value) - signed_attribute_changed = signed_attribute_changed or (k in self.payload_arguments) - - if signed_attribute_changed: - if self.status != NEW: - self.status = UPDATED - # ACHTUNG! When using the key argument, the thing will still use _local_ timestamp counter! - self.timestamp = clock.tick() - self.sign() - - return self - - def get_parent_nodes(self): - full_path = {self: True} - node = self - while node: - node = db.CollectionNode.get(public_key=self.public_key, id_=node.origin_id) - if node is None: - break - if node in full_path: - # Found id loop, but we return it nonetheless to keep the logic from breaking. - break - full_path[node] = True - if node.origin_id == 0: - break - return tuple(reversed(list(full_path))) - - def make_copy(self, tgt_parent_id, attributes_override=None): - dst_dict = attributes_override or {} - - for k in self.nonpersonal_attributes: - dst_dict[k] = getattr(self, k) - dst_dict.update({"origin_id": tgt_parent_id, "status": NEW}) - return self.__class__(**dst_dict) - - def get_type(self) -> int: - return self._discriminator_ - - def to_simple_dict(self): - """ - Return a basic dictionary with information about the node - """ - simple_dict = { - "type": self.get_type(), - "id": self.id_, - "origin_id": self.origin_id, - "public_key": hexlify(self.public_key), - "status": self.status, - } - - return simple_dict - - return ChannelNode diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_peer.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_peer.py deleted file mode 100644 index 11b23685531..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_peer.py +++ /dev/null @@ -1,18 +0,0 @@ -from datetime import datetime - -from pony import orm - - -def define_binding(db): - class ChannelPeer(db.Entity): - """ - This binding stores public keys of IPv8 peers that sent us some GigaChannel data. It is used by the - voting system. - """ - - rowid = orm.PrimaryKey(int, size=64, auto=True) - public_key = orm.Required(bytes, unique=True) - individual_votes = orm.Set("ChannelVote", reverse='voter') - added_on = orm.Optional(datetime, default=datetime.utcnow) - - return ChannelPeer diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_thumbnail.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_thumbnail.py deleted file mode 100644 index 55f1f889745..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_thumbnail.py +++ /dev/null @@ -1,12 +0,0 @@ -from tribler.core.components.metadata_store.db.serialization import CHANNEL_THUMBNAIL - - -def define_binding(db): - class ChannelThumbnail(db.BinaryNode): - """ - This ORM class represents channel descriptions. - """ - - _discriminator_ = CHANNEL_THUMBNAIL - - return ChannelThumbnail diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_vote.py b/src/tribler/core/components/metadata_store/db/orm_bindings/channel_vote.py deleted file mode 100644 index fd63235c8d2..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/channel_vote.py +++ /dev/null @@ -1,22 +0,0 @@ -from datetime import datetime - -from pony import orm - - -def define_binding(db): - class ChannelVote(db.Entity): - """ - This ORM class represents votes cast for a channel. A single instance (row), represents a vote from a single - peer (public key) for a single channel (ChannelMetadata entry, essentially represented by a public_key+id_ - pair). To allow only a single vote from the channel, it keeps track of when the vote was cast (vote_date) - and what amount was used locally to bump it (last_amount). - """ - - rowid = orm.PrimaryKey(int, size=64, auto=True) - voter = orm.Required("ChannelPeer") - channel = orm.Required("ChannelMetadata", reverse='individual_votes') - orm.composite_key(voter, channel) - last_amount = orm.Optional(float, default=0.0) - vote_date = orm.Optional(datetime, default=datetime.utcnow) - - return ChannelVote diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/collection_node.py b/src/tribler/core/components/metadata_store/db/orm_bindings/collection_node.py deleted file mode 100644 index 8c2b99fb461..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/collection_node.py +++ /dev/null @@ -1,444 +0,0 @@ -import os -from pathlib import Path - -from pony import orm -from pony.orm import db_session, select - -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import chunks -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import ( - CHANNEL_DESCRIPTION_FLAG, - CHANNEL_THUMBNAIL_FLAG, - COMMITTED, - DIRTY_STATUSES, - NEW, - TODELETE, - UPDATED, -) -from tribler.core.components.metadata_store.db.orm_bindings.discrete_clock import clock -from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import tdef_to_metadata_dict -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_TORRENT, - COLLECTION_NODE, - CollectionNodePayload, -) -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.core.utilities.utilities import random_infohash - - -# pylint: disable=too-many-statements - - -def define_binding(db): - class CollectionNode(db.MetadataNode): - """ - This ORM class represents a generic named container, i.e. a folder. It is used as an intermediary node - in building the nested channels tree. - Methods for copying stuff recursively are bound to it. - """ - - _discriminator_ = COLLECTION_NODE - - # ACHTUNG! PONY BUG! attributes inherited from multiple inheritance are not cached! - # Therefore, we are forced to move the attributes to common ancestor class of CollectionNode and ChannelTorrent, - # that is MetadataNode. When Pony fixes it, we must move it here for clarity. - # num_entries = orm.Optional(int, size=64, default=0) - - # Special class-level properties - _payload_class = CollectionNodePayload - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - nonpersonal_attributes = db.MetadataNode.nonpersonal_attributes + ('num_entries',) - - @property - @db_session - def state(self): - if self.is_personal: - return CHANNEL_STATE.PERSONAL.value - - toplevel_parent = self.get_parent_nodes()[0] - if ( - toplevel_parent.metadata_type == CHANNEL_TORRENT - and toplevel_parent.local_version == toplevel_parent.timestamp - ): - return CHANNEL_STATE.COMPLETE.value - - return CHANNEL_STATE.PREVIEW.value - - def to_simple_dict(self): - result = super().to_simple_dict() - result.update( - { - "torrents": self.num_entries, - "state": self.state, - "description_flag": self.description_flag, - "thumbnail_flag": self.thumbnail_flag, - } - ) - return result - - def make_copy(self, tgt_parent_id, recursion_depth=15, **kwargs): - new_node = db.MetadataNode.make_copy(self, tgt_parent_id, **kwargs) - # Recursive copying - if recursion_depth: - for node in self.actual_contents: - if issubclass(type(node), CollectionNode): - node.make_copy(new_node.id_, recursion_depth=recursion_depth - 1) - else: - node.make_copy(new_node.id_) - return new_node - - @db_session - def copy_torrent_from_infohash(self, infohash): - """ - Search the database for a given infohash and create a copy of the matching entry in the current channel - :param infohash: - :return: New TorrentMetadata signed with your key. - """ - - existing = db.TorrentMetadata.select(lambda g: g.infohash == infohash).first() - - if not existing: - return None - - new_entry_dict = { - "origin_id": self.id_, - "infohash": existing.infohash, - "title": existing.title, - "tags": existing.tags, - "size": existing.size, - "torrent_date": existing.torrent_date, - "tracker_info": existing.tracker_info, - "status": NEW, - } - return db.TorrentMetadata.from_dict(new_entry_dict) - - @property - def dirty(self): - return self.contents.where(lambda g: g.status in DIRTY_STATUSES).exists() - - @property - def contents(self): - return db.ChannelNode.select( - lambda g: g.public_key == self.public_key and g.origin_id == self.id_ and g != self - ) - - @property - def actual_contents(self): - return self.contents.where(lambda g: g.status != TODELETE) - - @property - @db_session - def contents_list(self): - return list(self.contents) - - @property - def contents_len(self): - return orm.count(self.contents) - - @property - def thumbnail_flag(self): - return bool(self.reserved_flags & CHANNEL_THUMBNAIL_FLAG) - - @property - def description_flag(self): - return bool(self.reserved_flags & CHANNEL_DESCRIPTION_FLAG) - - @db_session - def add_torrent_to_channel(self, tdef, extra_info=None): - """ - Add a torrent to your channel. - :param tdef: The torrent definition file of the torrent to add - :param extra_info: Optional extra info to add to the torrent - """ - new_entry_dict = dict(tdef_to_metadata_dict(tdef), status=NEW) - if extra_info: - new_entry_dict['tags'] = extra_info.get('description', '') - - # See if the torrent is already in the channel - old_torrent = db.TorrentMetadata.get(public_key=self.public_key, infohash=tdef.get_infohash()) - torrent_metadata = old_torrent - if old_torrent: - # If it is there, check if we were going to delete it - if old_torrent.status == TODELETE: - new_timestamp = clock.tick() - old_torrent.set(timestamp=new_timestamp, origin_id=self.id_, **new_entry_dict) - old_torrent.sign() - # As we really don't know what status this torrent had _before_ it got its TODELETE status, - # we _must_ set its status to UPDATED, for safety - old_torrent.status = UPDATED - else: - torrent_metadata = db.TorrentMetadata.from_dict(dict(origin_id=self.id_, **new_entry_dict)) - return torrent_metadata - - @db_session - def pprint_tree(self, file=None, _prefix="", _last=True): - print(_prefix, "`- " if _last else "|- ", (self.num_entries, self.metadata_type), sep="", file=file) # noqa - _prefix += " " if _last else "| " - child_count = self.actual_contents.count() - for i, child in enumerate(list(self.actual_contents)): - if issubclass(type(child), CollectionNode): - _last = i == (child_count - 1) - child.pprint_tree(file, _prefix, _last) - else: - print(_prefix, "`- " if _last else "|- ", child.metadata_type, sep="", file=file) # noqa - - @db_session - def get_contents_recursive(self): - results_stack = [] - for subnode in self.contents: - if issubclass(type(subnode), CollectionNode): - results_stack.extend(subnode.get_contents_recursive()) - results_stack.append(subnode) - return results_stack - - async def add_torrents_from_dir(self, torrents_dir, recursive=False): - torrents_list = [] - errors_list = [] - - def rec_gen(dir_): - for root, _, filenames in os.walk(dir_): - for fn in filenames: - yield Path(root) / fn - - filename_generator = rec_gen(torrents_dir) if recursive else os.listdir(torrents_dir) - # Build list of .torrents to process - torrents_list_generator = (Path(torrents_dir, f) for f in filename_generator) - torrents_list = [f for f in torrents_list_generator if f.is_file() and f.suffix == ".torrent"] - - torrent_defs = [] - for filename in torrents_list: - try: - torrent_defs.append(await TorrentDef.load(filename)) - except Exception: # pylint: disable=W0703 - # Have to use the broad exception clause because Py3 versions of libtorrent - # generate generic Exceptions - errors_list.append(filename) - - # 100 is a reasonable chunk size for commits - with db_session: - for chunk in chunks(torrent_defs, 100): - for tdef in chunk: - self.add_torrent_to_channel(tdef) - orm.commit() - - return torrents_list, errors_list - - @staticmethod - @db_session - def commit_all_channels(): - committed_channels = [] - commit_queues_list = db.ChannelMetadata.get_commit_forest() - for _, queue in commit_queues_list.items(): - channel = queue[-1] - # Committing empty channels - if len(queue) == 1: - # Empty top-level channels are deleted on-sight - if channel.status == TODELETE: - channel.delete() - else: - # Only the top-level channel entry was changed. Just mark it committed and do nothing. - channel.status = COMMITTED - continue - - # Committing non-empty channels - queue_prepared = db.ChannelMetadata.prepare_commit_queue_for_channel(queue) - if isinstance(channel, db.ChannelMetadata): - committed_channels.append(channel.commit_channel_torrent(commit_list=queue_prepared)) - # Top-level collections get special treatment. - # These can be used for e.g. non-published personal favourites collections. - elif isinstance(channel, db.CollectionNode): - for g in queue: - if g.status in [NEW, UPDATED]: - g.status = COMMITTED - elif g.status == TODELETE: - g.delete() - - return committed_channels - - @staticmethod - @db_session - def get_children_dict_to_commit(): - db.CollectionNode.collapse_deleted_subtrees() - upd_dict = {} - children = {} - - # Remark: it should be possible to optimize this by rewriting in pure SQL with recursive CTEs - - def update_node_info(n): - # Add the node to its parent's set of children - if n.origin_id not in children: - children[n.origin_id] = {n} - else: - children[n.origin_id].add(n) - upd_dict[n.id_] = n - - dead_parents = set() - # First we traverse the tree upwards from changed leaves to find all nodes affected by changes - for node in db.ChannelNode.select( - lambda g: g.public_key == db.ChannelNode._my_key.pub().key_to_bin()[10:] # pylint: disable=W0212 - and g.status in DIRTY_STATUSES - ): - update_node_info(node) - # This process resolves the parents completely. - # Therefore, if a parent is already in the dict, its path has already been resolved. - while node and (node.origin_id not in upd_dict): - # Add the node to its parent's set of children - update_node_info(node) - # Get parent node - parent = db.CollectionNode.get(public_key=node.public_key, id_=node.origin_id) - if not parent: - dead_parents.add(node.origin_id) - node = parent - - # Normally, dead_parents should consist only of 0 node, which is root. Otherwise, we got some orphans. - if 0 in dead_parents: - dead_parents.remove(0) - # Delete orphans - db.ChannelNode.select( - lambda g: db.ChannelNode._my_key.pub().key_to_bin()[10:] == g.public_key # pylint: disable=W0212 - and g.origin_id in dead_parents - ).delete() - orm.flush() # Just in case... - if not children or 0 not in children: - return {} - return children - - @staticmethod - @db_session - def get_commit_forest(): - children = db.CollectionNode.get_children_dict_to_commit() - if not children: - return {} - # We want a separate commit tree/queue for each toplevel channel - forest = {} - toplevel_nodes = children.pop(0) - for root_node in toplevel_nodes: - # Tree -> stack -> queue - commit_queue = [] - tree_stack = [root_node] - while tree_stack and children.get(tree_stack[-1].id_, None): - # Traverse the tree from top to bottom converting it to a stack - while children.get(tree_stack[-1].id_, None): - node = children[tree_stack[-1].id_].pop() - tree_stack.append(node) - - while not issubclass(type(tree_stack[-1]), db.CollectionNode): - commit_queue.append(tree_stack.pop()) - # Unwind the tree stack until either the stack is empty or we meet a non-empty node - while tree_stack and not children.get(tree_stack[-1].id_, None): - while not issubclass(type(tree_stack[-1]), db.CollectionNode): - commit_queue.append(tree_stack.pop()) - - # It was a terminal collection - collection = tree_stack.pop() - commit_queue.append(collection) - - if not commit_queue or commit_queue[-1] != root_node: - commit_queue.append(root_node) - forest[root_node.id_] = tuple(commit_queue) - - return forest - - @staticmethod - def prepare_commit_queue_for_channel(commit_queue): - """ - This routine prepares the raw commit queue for commit by updating the elements' properties and - re-signing them. Also, it removes the channel entry itself from the queue [:-1], because its - meaningless to put it in the blobs, as it must be updated with the new infohash after commit. - - :param commit_queue: - :return: - """ - for node in commit_queue: - # Avoid updating entries that must be deleted: - # soft delete payloads require signatures of unmodified entries - if issubclass(type(node), db.CollectionNode) and node.status != TODELETE: - # Update recursive count of actual non-collection contents - node.num_entries = select( - # For each subnode, if it is a collection, add the count of its contents to the recursive sum. - # Otherwise, add just 1 to the sum (to count the subnode itself). - (g.num_entries if g.metadata_type == COLLECTION_NODE else 1) - for g in node.actual_contents - ).sum() - node.timestamp = clock.tick() - node.sign() - # This perverted comparator lambda is necessary to ensure that delete entries are always - # sorted to the end of the list, as required by the channel serialization routine. - return sorted(commit_queue[:-1], key=lambda x: int(x.status == TODELETE) - 1 / x.timestamp) - - def delete(self, *args, **kwargs): - # Recursively delete contents - if kwargs.pop('recursive', True): - for node in self.contents: - node.delete(*args, **kwargs) - super().delete(*args, **kwargs) - - @staticmethod - @db_session - def collapse_deleted_subtrees(): - """ - This procedure scans personal channels for collection nodes marked TODELETE and recursively removes - their contents. The top-level nodes themselves are left intact so soft delete entries can be generated - in the future. - This procedure should be always run _before_ committing personal channels. - """ - - # Remark: it should be possible to optimize this by rewriting in pure SQL with recursive CTEs - - def get_highest_deleted_parent(node, highest_deleted_parent=None): - if node.origin_id == 0: - return highest_deleted_parent - parent = db.CollectionNode.get(public_key=node.public_key, id_=node.origin_id) - if not parent: - return highest_deleted_parent - if parent.status == TODELETE: - highest_deleted_parent = parent - return get_highest_deleted_parent(parent, highest_deleted_parent) - - deletion_set = { - get_highest_deleted_parent(node, highest_deleted_parent=node).rowid - for node in db.CollectionNode.select( - lambda g: g.public_key == db.CollectionNode._my_key.pub().key_to_bin()[10:] # pylint: disable=W0212 - and g.status == TODELETE - ) - if node - } - - for node in [db.CollectionNode[rowid] for rowid in deletion_set]: - for subnode in node.contents: - subnode.delete() - - @db_session - def get_contents_to_commit(self): - return db.ChannelMetadata.prepare_commit_queue_for_channel(self.get_commit_forest().get(self.id_, [])) - - def update_properties(self, update_dict): - # Sanity checks: check that we don't create a recursive dependency or an orphaned channel - new_origin_id = update_dict.get('origin_id', self.origin_id) - if new_origin_id not in (0, self.origin_id): - new_parent = CollectionNode.get(public_key=self.public_key, id_=new_origin_id) - if not new_parent: - raise ValueError("Target collection does not exists") - root_path = new_parent.get_parent_nodes() - if new_origin_id == self.id_ or self in root_path[:-1]: - raise ValueError("Can't move collection into itself or its descendants!") - if root_path[0].origin_id != 0: - raise ValueError("Tried to move collection into an orphaned hierarchy!") - updated_self = super().update_properties(update_dict) - if updated_self.origin_id == 0 and self.metadata_type == COLLECTION_NODE: - # Coerce to ChannelMetadata - # ACHTUNG! This is a somewhat awkward way to re-create the entry as an instance of - # another class. Be very careful with it! - self_dict = updated_self.to_dict() - updated_self.delete(recursive=False) - self_dict.pop("rowid") - self_dict.pop("metadata_type") - self_dict.pop("timestamp") - self_dict['infohash'] = random_infohash() - self_dict["sign_with"] = self._my_key - updated_self = db.ChannelMetadata.from_dict(self_dict) - return updated_self - - return CollectionNode diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/discrete_clock.py b/src/tribler/core/components/metadata_store/db/orm_bindings/discrete_clock.py deleted file mode 100644 index 73784b1641e..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/discrete_clock.py +++ /dev/null @@ -1,22 +0,0 @@ -# Discrete clock-like counter, initialized from the system clock. -# It produces monotonically increasing timestamps for user-generated channel elements. -# Note that we only use the system clock to initialize the counter -# when starting Tribler. Afterwards, we increase the counter ourselves. This supposes -# that users do not create more than a 1000 entries per second and their clock does -# not go backwards between Tribler restarts. -from datetime import datetime - -from tribler.core.components.metadata_store.db.serialization import time2int - - -class DiscreteClock: - def __init__(self): - # We assume people are not adding 1000 torrents per second constantly to their channels - self.clock = time2int(datetime.utcnow()) * 1000 - - def tick(self): - self.clock += 1 - return self.clock - - -clock = DiscreteClock() diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/json_node.py b/src/tribler/core/components/metadata_store/db/orm_bindings/json_node.py deleted file mode 100644 index b6dae083aa9..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/json_node.py +++ /dev/null @@ -1,31 +0,0 @@ -from pony.orm import Optional - -from tribler.core.components.metadata_store.db.serialization import JSON_NODE, JsonNodePayload - - -def define_binding(db, db_version: int): - class JsonNode(db.ChannelNode): - """ - This ORM class represents channel descriptions. - """ - - _discriminator_ = JSON_NODE - - # Serializable - if db_version >= 12: - json_text = Optional(str, default="{}") - - # Special class-level properties - _payload_class = JsonNodePayload - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - nonpersonal_attributes = db.ChannelNode.nonpersonal_attributes + ('json_text',) - - def to_simple_dict(self): - simple_dict = super().to_simple_dict() - simple_dict.update({"json_text": self.json_text}) - - return simple_dict - - return JsonNode diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/metadata_node.py b/src/tribler/core/components/metadata_store/db/orm_bindings/metadata_node.py deleted file mode 100644 index dc9d6470ace..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/metadata_node.py +++ /dev/null @@ -1,46 +0,0 @@ -from pony import orm - -from tribler.core.components.metadata_store.db.serialization import METADATA_NODE, MetadataNodePayload - - -def define_binding(db): - class MetadataNode(db.ChannelNode): - """ - This ORM class extends ChannelNode by adding metadata-storing attributes such as "title" and "tags". - It implements methods for indexed text search based on the "title" field. - It is not intended for direct use. Instead, other classes should derive from it. - """ - - _discriminator_ = METADATA_NODE - - # Serializable - title = orm.Optional(str, default='') - tags = orm.Optional(str, default='') - - # ACHTUNG! PONY BUG! This is a workaround for Pony not caching attributes from multiple inheritance! - # Its real home is CollectionNode, but we are forced to put it here so it is loaded by default on all queries. - # When Pony fixes it, we must move it back to CollectionNode for clarity. - num_entries = orm.Optional(int, size=64, default=0) - - # Special class-level properties - _payload_class = MetadataNodePayload - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - nonpersonal_attributes = db.ChannelNode.nonpersonal_attributes + ('title', 'tags') - - def to_simple_dict(self): - """ - Return a basic dictionary with information about the channel. - """ - simple_dict = super().to_simple_dict() - simple_dict.update( - { - "name": self.title, - "category": self.tags, - } - ) - - return simple_dict - - return MetadataNode diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py b/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py index fe76814cd4f..a65a0153193 100644 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py +++ b/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py @@ -1,20 +1,43 @@ +from binascii import unhexlify from datetime import datetime +import random from struct import unpack +from typing import Optional +from lz4.frame import LZ4FrameCompressor from pony import orm from pony.orm import db_session +from pony.orm.core import DEFAULT from tribler.core import notifications from tribler.core.components.metadata_store.category_filter.category import Category, default_category_filter from tribler.core.components.metadata_store.category_filter.family_filter import default_xxx_filter -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import COMMITTED -from tribler.core.components.metadata_store.db.serialization import EPOCH, REGULAR_TORRENT, TorrentMetadataPayload +from tribler.core.components.metadata_store.db.serialization import EPOCH, REGULAR_TORRENT, TorrentMetadataPayload, \ + HealthItemsPayload, time2int +from tribler.core.exceptions import InvalidChannelNodeException from tribler.core.utilities.notifier import Notifier from tribler.core.utilities.tracker_utils import get_uniformed_tracker_url from tribler.core.utilities.unicode import ensure_unicode, hexlify NULL_KEY_SUBST = b"\00" +CHANNEL_DIR_NAME_PK_LENGTH = 32 # It's not 40, so it could be distinguished from infohash +CHANNEL_DIR_NAME_ID_LENGTH = 16 # Zero-padded long int in hex form +CHANNEL_DIR_NAME_LENGTH = CHANNEL_DIR_NAME_PK_LENGTH + CHANNEL_DIR_NAME_ID_LENGTH + +LZ4_EMPTY_ARCHIVE = unhexlify("04224d184040c000000000") +LZ4_END_MARK_SIZE = 4 # in bytes, from original specification. We don't use CRC + +HEALTH_ITEM_HEADER_SIZE = 4 # in bytes, len of varlenI header + +# Metadata, torrents and channel statuses +NEW = 0 # The entry is newly created and is not published yet. It will be committed at the next commit. +TODELETE = 1 # The entry is marked to be removed at the next commit. +COMMITTED = 2 # The entry is committed and seeded. +UPDATED = 6 # One of the entry's properties was updated. It will be committed at the next commit. + +PUBLIC_KEY_LEN = 64 + # This function is used to devise id_ from infohash in deterministic way. Used in FFA channels. def infohash_to_id(infohash): @@ -52,49 +75,134 @@ def tdef_to_metadata_dict(tdef, category_filter: Category = None): } -def define_binding(db, notifier: Notifier, tag_processor_version: int): - class TorrentMetadata(db.MetadataNode): +def entries_to_chunk(metadata_list, chunk_size, start_index=0, include_health=False): + """ + Put serialized data of one or more metadata entries into a single binary chunk. The data is added + incrementally until it stops fitting into the designated chunk size. The first entry is added + regardless of violating the chunk size limit. + + The chunk format is: + + + [] + + For the details of the health info format see the documentation: doc/metadata_store/serialization_format.rst + + :param metadata_list: the list of metadata to process. + :param chunk_size: the desired chunk size limit, in bytes. + :param start_index: the index of the element of metadata_list from which the processing should start. + :param include_health: if True, put metadata health information into the chunk. + :return: (chunk, last_entry_index) tuple, where chunk is the resulting chunk in string form and + last_entry_index is the index of the element of the input list that was put into the chunk the last. + """ + if start_index >= len(metadata_list): + raise Exception('Could not serialize chunk: incorrect start_index', metadata_list, chunk_size, start_index) + + compressor = LZ4FrameCompressor(auto_flush=True) + metadata_buffer = compressor.begin() + health_buffer = b'' + + index = 0 + size = len(metadata_buffer) + LZ4_END_MARK_SIZE + if include_health: + size += HEALTH_ITEM_HEADER_SIZE + + for count in range(start_index, len(metadata_list)): + metadata = metadata_list[count] + metadata_bytes = compressor.compress(metadata.serialized()) + health_bytes = metadata.serialized_health() if include_health else b'' + size += len(metadata_bytes) + len(health_bytes) + + if size > chunk_size and count > 0: + # The first entry is always added even if the resulted size exceeds the chunk size. + # This lets higher levels to decide what to do in this case, e.g. send it through EVA protocol. + break + + metadata_buffer += metadata_bytes + if include_health: + health_buffer += health_bytes + index = count + + result = metadata_buffer + compressor.flush() + if include_health: + result += HealthItemsPayload(health_buffer).serialize() + + return result, index + 1 + + +def define_binding(db, notifier: Optional[Notifier], tag_processor_version: int): # noqa: MC0001 + class TorrentMetadata(db.Entity): """ This ORM binding class is intended to store Torrent objects, i.e. infohashes along with some related metadata. """ - _discriminator_ = REGULAR_TORRENT + _table_ = "ChannelNode" + _CHUNK_SIZE_LIMIT = 1 * 1024 * 1024 # We use 1MB chunks as a workaround for Python's lack of string pointers + + rowid = orm.PrimaryKey(int, size=64, auto=True) # Serializable infohash = orm.Required(bytes, index=True) size = orm.Optional(int, size=64, default=0) torrent_date = orm.Optional(datetime, default=datetime.utcnow, index=True) tracker_info = orm.Optional(str, default='') + title = orm.Optional(str, default='') + tags = orm.Optional(str, default='') + metadata_type = orm.Discriminator(int, size=16) + reserved_flags = orm.Optional(int, size=16, default=0) + origin_id = orm.Optional(int, size=64, default=0, index=True) + public_key = orm.Required(bytes) + id_ = orm.Required(int, size=64) + timestamp = orm.Required(int, size=64, default=0) + # Signature is nullable. This means that "None" entries are stored in DB as NULLs instead of empty strings. + # NULLs are not checked for uniqueness and not indexed. + # This is necessary to store unsigned signatures without violating the uniqueness constraints. + signature = orm.Optional(bytes, unique=True, nullable=True, default=None) + + orm.composite_key(public_key, id_) + orm.composite_index(public_key, origin_id) # Local + added_on = orm.Optional(datetime, default=datetime.utcnow) + status = orm.Optional(int, default=COMMITTED) xxx = orm.Optional(float, default=0) health = orm.Optional('TorrentState', reverse='metadata') tag_processor_version = orm.Required(int, default=0) # Special class-level properties - _payload_class = TorrentMetadataPayload - payload_arguments = _payload_class.__init__.__code__.co_varnames[ - : _payload_class.__init__.__code__.co_argcount - ][1:] - nonpersonal_attributes = db.MetadataNode.nonpersonal_attributes + ( - 'infohash', - 'size', - 'torrent_date', - 'tracker_info', - ) + payload_class = TorrentMetadataPayload def __init__(self, *args, **kwargs): + # Any public keys + signatures are considered to be correct at this point, and should + # be checked after receiving the payload from the network. + if "health" not in kwargs and "infohash" in kwargs: infohash = kwargs["infohash"] health = db.TorrentState.get_for_update(infohash=infohash) or db.TorrentState(infohash=infohash) kwargs["health"] = health + if 'xxx' not in kwargs: kwargs["xxx"] = default_xxx_filter.isXXXTorrentMetadataDict(kwargs) + if "timestamp" not in kwargs: + kwargs["timestamp"] = time2int(datetime.utcnow()) * 1000 + + if "id_" not in kwargs: + kwargs["id_"] = int(random.getrandbits(63)) + + # Free-for-all entries require special treatment + kwargs["public_key"] = kwargs.get("public_key", b"") + if kwargs["public_key"] == b"": + # We have to give the entry an unique sig to honor the DB constraints. We use the entry's id_ + # as the sig to keep it unique and short. The uniqueness is guaranteed by DB as it already + # imposes uniqueness constraints on the id_+public_key combination. + kwargs["signature"] = None + super().__init__(*args, **kwargs) if 'tracker_info' in kwargs: self.add_tracker(kwargs["tracker_info"]) + if notifier: notifier[notifications.new_torrent_metadata_created](infohash=kwargs.get("infohash"), title=self.title) self.tag_processor_version = tag_processor_version @@ -109,7 +217,7 @@ def before_update(self): self.add_tracker(self.tracker_info) def get_magnet(self): - return (f"magnet:?xt=urn:btih:{hexlify(self.infohash)}&dn={self.title}") + ( + return f"magnet:?xt=urn:btih:{hexlify(self.infohash)}&dn={self.title}" + ( f"&tr={self.tracker_info}" if self.tracker_info else "" ) @@ -134,31 +242,34 @@ def to_simple_dict(self): """ Return a basic dictionary with information about the channel. """ - simple_dict = super().to_simple_dict() epoch = datetime.utcfromtimestamp(0) - simple_dict.update( - { - "infohash": hexlify(self.infohash), - "size": self.size, - "num_seeders": self.health.seeders, - "num_leechers": self.health.leechers, - "last_tracker_check": self.health.last_check, - "created": int((self.torrent_date - epoch).total_seconds()), - "tag_processor_version": self.tag_processor_version, - } - ) + return { + "name": self.title, + "category": self.tags, + "infohash": hexlify(self.infohash), + "size": self.size, + "num_seeders": self.health.seeders, + "num_leechers": self.health.leechers, + "last_tracker_check": self.health.last_check, + "created": int((self.torrent_date - epoch).total_seconds()), + "tag_processor_version": self.tag_processor_version, + "type": self.get_type(), + "id": self.id_, + "origin_id": self.origin_id, + "public_key": hexlify(self.public_key), + "status": self.status, + } - return simple_dict + def get_type(self) -> int: + return self._discriminator_ - def metadata_conflicting(self, b): - # Check if metadata in the given dict has conflicts with this entry - # WARNING! This does NOT check the INFOHASH - a = self.to_dict() - for comp in ["title", "size", "tags", "torrent_date", "tracker_info"]: - if (comp not in b) or (str(a[comp]) == str(b[comp])): - continue - return True - return False + @classmethod + def from_payload(cls, payload): + return cls(**payload.to_dict()) + + @classmethod + def from_dict(cls, dct): + return cls(**dct) @classmethod @db_session @@ -177,4 +288,17 @@ def serialized_health(self) -> bytes: return b';' return b'%d,%d,%d;' % (health.seeders or 0, health.leechers or 0, health.last_check or 0) + def serialized(self, key=None): + """ + Serializes the object and returns the result with added signature (blob output) + :param key: private key to sign object with + :return: serialized_data+signature binary string + """ + kwargs = self.to_dict() + payload = self.payload_class.from_dict(**kwargs) + payload.signature = kwargs.pop('signature', None) or payload.signature + if key: + payload.add_signature(key) + return payload.serialized() + payload.signature + return TorrentMetadata diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/vsids.py b/src/tribler/core/components/metadata_store/db/orm_bindings/vsids.py deleted file mode 100644 index 2541e7ed712..00000000000 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/vsids.py +++ /dev/null @@ -1,114 +0,0 @@ -import datetime -import math - -from pony import orm -from pony.orm import db_session - -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import LEGACY_ENTRY - - -def define_binding(db): - # ACHTUNG! This thing should be used as a singleton, i.e. there should be only a single row there! - # We store it as a DB object only to make the counters persistent. - - # VSIDS-based votes ratings - # We use VSIDS since it provides an efficient way to add temporal decay to the voting system. - # Temporal decay is necessary for two reasons: - # 1. We do not gossip _unsubscription_ events, but we want votes decline for channels that go out of favor - # 2. We want to promote the fresh content - # - # There are two differences with the classic VSIDS: - # a. We scale the bump amount with passage of time, instead of on each bump event. - # By default, the bump amount scales 2.71 per 23hrs. Note though, that we only count Tribler uptime - # for this purpose. This is intentional, so the ratings do not suddenly drop after the user skips a week - # of uptime. - # b. Repeated votes by some peer to some channel _do not add up_. Instead, the vote is refreshed by substracting - # the old amount from the current vote (it is stored in the DB), and adding the new one (1.0 votes, scaled). This - # is the reason why we have to keep the old votes in the DB, and normalize the old votes last_amount values - to - # keep them in the same "normalization space" to be compatible with the current votes values. - - # This binding is used to store normalization data and stats for VSIDS - class Vsids(db.Entity): - """ - This ORM class is used to hold persistent information for the state of VSIDS scoring system. - ACHTUNG! At all times there should be no more than one row/entity of this class. A single entity is - enough to keep the information for the whole GigaChannels. - In a sense, *this is a singleton*. - """ - - rowid = orm.PrimaryKey(int) - bump_amount = orm.Required(float) - total_activity = orm.Required(float) - last_bump = orm.Required(datetime.datetime) - rescale_threshold = orm.Optional(float, default=10.0 ** 100) - exp_period = orm.Optional(float, default=24.0 * 60 * 60 * 3) # decay e times over this period of seconds - max_val = orm.Optional(float, default=1.0) - - @db_session - def rescale(self, norm): - for channel in db.ChannelMetadata.select(lambda g: g.status != LEGACY_ENTRY): - channel.votes /= norm - for vote in db.ChannelVote.select(): - vote.last_amount /= norm - - self.max_val /= norm - self.total_activity /= norm - self.bump_amount /= norm - db.ChannelMetadata.votes_scaling = self.max_val - - # Normalization routine should normally be called only in case the values in the DB do not look normal - @db_session - def normalize(self): - # If we run the normalization for the first time during the runtime, we have to gather the activity from DB - self.total_activity = self.total_activity or orm.sum(g.votes for g in db.ChannelMetadata) - channel_count = orm.count(db.ChannelMetadata.select(lambda g: g.status != LEGACY_ENTRY)) - if not channel_count: - return - if self.total_activity > 0.0: - self.rescale(self.total_activity / channel_count) - self.bump_amount = 1.0 - - @db_session - def bump_channel(self, channel, vote): - now = datetime.datetime.utcnow() - - # Subtract the last vote by the same peer from the total vote amount for this channel. - # This effectively puts a cap of 1.0 vote from a peer on a channel - channel.votes -= vote.last_amount - self.total_activity -= vote.last_amount - - # Next, increase the bump amount based on the time passed since the last bump - # (Increasing the bump amount is the equivalent of decaying the values of votes, - # cast for all other channels) - self.bump_amount *= math.exp((now - self.last_bump).total_seconds() / self.exp_period) - self.last_bump = now - - # To cap the future votes from this peer, note the last bump vote - # amount added to this channel by the voting peer. - vote.last_amount = self.bump_amount - - # Add the vote to the accumulated sum of all votes for the channel - channel.votes += self.bump_amount - - # Keep track of total activity and max vote amount to assist with votes scaling/normalization - self.total_activity += self.bump_amount - if channel.votes > self.max_val: - self.max_val = channel.votes - db.ChannelMetadata.votes_scaling = self.max_val - - # Renormalize all votes in the database if current bump amount is nearing the float - # precision threshold - if self.bump_amount > self.rescale_threshold: - self.rescale(self.bump_amount) - - @classmethod - @db_session - def create_default_vsids(cls): - return cls( - rowid=0, - bump_amount=1.0, - total_activity=(orm.sum(g.votes for g in db.ChannelMetadata) or 0.0), - last_bump=datetime.datetime.utcnow(), - ) - - return Vsids diff --git a/src/tribler/core/components/metadata_store/db/serialization.py b/src/tribler/core/components/metadata_store/db/serialization.py index 58be16a695e..301e0474719 100644 --- a/src/tribler/core/components/metadata_store/db/serialization.py +++ b/src/tribler/core/components/metadata_store/db/serialization.py @@ -6,12 +6,12 @@ from ipv8.keyvault.crypto import default_eccrypto from ipv8.messaging.lazy_payload import VariablePayload, vp_compile -from ipv8.messaging.payload import Payload -from ipv8.messaging.serialization import default_serializer +from ipv8.messaging.serialization import default_serializer, VarLenUtf8 -from tribler.core.exceptions import InvalidSignatureException from tribler.core.utilities.unicode import hexlify +default_serializer.add_packer('varlenIutf8', VarLenUtf8('>I')) # TODO: move to IPv8 + EPOCH = datetime(1970, 1, 1) SIGNATURE_SIZE = 64 @@ -67,495 +67,130 @@ class UnknownBlobTypeException(Exception): def read_payload_with_offset(data, offset=0): # First we have to determine the actual payload type metadata_type = struct.unpack_from('>H', data, offset=offset)[0] - payload_class = DISCRIMINATOR_TO_PAYLOAD_CLASS.get(metadata_type) + payload_class = METADATA_TYPE_TO_PAYLOAD_CLASS.get(metadata_type) if payload_class is not None: - return payload_class.from_signed_blob_with_offset(data, offset=offset) + payload, offset = default_serializer.unpack_serializable(payload_class, data, offset=offset) + payload.signature = data[offset: offset + 64] + return payload, offset + 64 # Unknown metadata type, raise exception raise UnknownBlobTypeException -class SignedPayload(Payload): - """ - Payload for metadata. - """ - +@vp_compile +class SignedPayload(VariablePayload): + names = ['metadata_type', 'reserved_flags', 'public_key'] format_list = ['H', 'H', '64s'] + signature: bytes = NULL_SIG - def __init__(self, metadata_type, reserved_flags, public_key, **kwargs): - super().__init__() - self.metadata_type = metadata_type - self.reserved_flags = reserved_flags - self.public_key = bytes(public_key) - self.signature = bytes(kwargs["signature"]) if "signature" in kwargs and kwargs["signature"] else None - - # Special case: free-for-all entries are allowed to go with zero key and without sig check - if "unsigned" in kwargs and kwargs["unsigned"]: - self.public_key = NULL_KEY - self.signature = NULL_SIG - return - - if "skip_key_check" in kwargs and kwargs["skip_key_check"]: - return - - # This is integrity check for FFA payloads. - if self.public_key == NULL_KEY: - if self.signature == NULL_SIG: - return - raise InvalidSignatureException("Tried to create FFA payload with non-null signature") - - serialized_data = default_serializer.pack_serializable(self) - if "key" in kwargs and kwargs["key"]: - key = kwargs["key"] - if self.public_key != key.pub().key_to_bin()[10:]: - raise KeysMismatchException(self.public_key, key.pub().key_to_bin()[10:]) - self.signature = default_eccrypto.create_signature(key, serialized_data) - elif "signature" in kwargs: - # This check ensures that an entry with a wrong signature will not proliferate further - if not default_eccrypto.is_valid_signature( - default_eccrypto.key_from_public_bin(b"LibNaCLPK:" + self.public_key), serialized_data, - self.signature - ): - raise InvalidSignatureException("Tried to create payload with wrong signature") - else: - raise InvalidSignatureException("Tried to create payload without signature") - - def to_pack_list(self): - data = [('H', self.metadata_type), ('H', self.reserved_flags), ('64s', self.public_key)] - return data - - @classmethod - def from_unpack_list(cls, metadata_type, reserved_flags, public_key, **kwargs): # pylint: disable=W0221 - return SignedPayload(metadata_type, reserved_flags, public_key, **kwargs) - - @classmethod - def from_signed_blob(cls, data, check_signature=True): - return cls.from_signed_blob_with_offset(data, check_signature)[0] - - @classmethod - def from_signed_blob_with_offset(cls, data, check_signature=True, offset=0): - unpack_list = [] - for format_str in cls.format_list: - offset = default_serializer.get_packer_for(format_str).unpack(data, offset, unpack_list) - if check_signature: - signature = data[offset: offset + SIGNATURE_SIZE] - payload = cls.from_unpack_list(*unpack_list, signature=signature) # pylint: disable=E1120 - else: - payload = cls.from_unpack_list(*unpack_list, skip_key_check=True) # pylint: disable=E1120 - return payload, offset + SIGNATURE_SIZE - - def to_dict(self): - return { - "metadata_type": self.metadata_type, - "reserved_flags": self.reserved_flags, - "public_key": self.public_key, - "signature": self.signature, - } - - def _serialized(self): - serialized_data = default_serializer.pack_serializable(self) - return serialized_data, self.signature + public_key: bytes def serialized(self): - return b''.join(self._serialized()) - - @classmethod - def from_file(cls, filepath): - with open(filepath, 'rb') as f: - return cls.from_signed_blob(f.read()) - - -# fmt: off -class ChannelNodePayload(SignedPayload): - format_list = SignedPayload.format_list + ['Q', 'Q', 'Q'] - - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs): - self.id_ = id_ - self.origin_id = origin_id - self.timestamp = timestamp - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - **kwargs) - - def to_pack_list(self): - data = super().to_pack_list() - data.append(('Q', self.id_)) - data.append(('Q', self.origin_id)) - data.append(('Q', self.timestamp)) - return data + return default_serializer.pack_serializable(self) @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs): - return ChannelNodePayload( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs) + def from_signed_blob(cls, serialized): + payload, offset = default_serializer.unpack_serializable(cls, serialized) + payload.signature = serialized[offset:] + return payload def to_dict(self): - dct = super().to_dict() - dct.update( - {"id_": self.id_, - "origin_id": self.origin_id, - "timestamp": self.timestamp - }) - return dct - - -class JsonNodePayload(ChannelNodePayload): - format_list = ChannelNodePayload.format_list + ['varlenI'] - - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - json_text, # JsonNodePayload - **kwargs): - self.json_text = json_text.decode('utf-8') if isinstance(json_text, bytes) else json_text - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs - ) - - def to_pack_list(self): - data = super().to_pack_list() - data.append(('varlenI', self.json_text.encode('utf-8'))) - return data + return {name: getattr(self, name) for name in (self.names + ['signature'])} @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - json_text, # JsonNodePayload - **kwargs - ): - return cls( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - json_text, # JsonNodePayload - **kwargs - ) + def from_dict(cls, **kwargs): + return cls(**{key: value for key, value in kwargs.items() if key in cls.names}) - def to_dict(self): - dct = super().to_dict() - dct.update({"json_text": self.json_text}) - return dct + def add_signature(self, key): + self.public_key = key.pub().key_to_bin()[10:] + self.signature = default_eccrypto.create_signature(key, self.serialized()) + def has_signature(self): + return self.public_key != NULL_KEY or self.signature != NULL_SIG -class BinaryNodePayload(ChannelNodePayload): - format_list = ChannelNodePayload.format_list + ['varlenI', 'varlenI'] - - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - binary_data, data_type, # BinaryNodePayload - **kwargs): - self.binary_data = binary_data - self.data_type = data_type.decode('utf-8') if isinstance(data_type, bytes) else data_type - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs + def check_signature(self): + return default_eccrypto.is_valid_signature( + default_eccrypto.key_from_public_bin(b"LibNaCLPK:" + self.public_key), + self.serialized(), + self.signature ) - def to_pack_list(self): - data = super().to_pack_list() - data.append(('varlenI', self.binary_data)) - data.append(('varlenI', self.data_type.encode('utf-8'))) - return data - @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - binary_data, data_type, # BinaryNodePayload - **kwargs - ): - return cls( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - binary_data, data_type, # BinaryNodePayload - **kwargs - ) - - def to_dict(self): - dct = super().to_dict() - dct.update({"binary_data": self.binary_data}) - dct.update({"data_type": self.data_type}) - return dct +@vp_compile +class ChannelNodePayload(SignedPayload): + names = SignedPayload.names + ['id_', 'origin_id', 'timestamp'] + format_list = SignedPayload.format_list + ['Q', 'Q', 'Q'] +@vp_compile class MetadataNodePayload(ChannelNodePayload): - format_list = ChannelNodePayload.format_list + ['varlenI', 'varlenI'] - - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - **kwargs): - self.title = title.decode('utf-8') if isinstance(title, bytes) else title - self.tags = tags.decode('utf-8') if isinstance(tags, bytes) else tags - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs - ) + names = ChannelNodePayload.names + ['title', 'tags'] + format_list = ChannelNodePayload.format_list + ['varlenIutf8', 'varlenIutf8'] - def to_pack_list(self): - data = super().to_pack_list() - data.append(('varlenI', self.title.encode('utf-8'))) - data.append(('varlenI', self.tags.encode('utf-8'))) - return data - @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - **kwargs - ): - return MetadataNodePayload( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - **kwargs - ) +@vp_compile +class JsonNodePayload(ChannelNodePayload): + names = ChannelNodePayload.names + ['json_text'] + format_list = ChannelNodePayload.format_list + ['varlenIutf8'] - def to_dict(self): - dct = super().to_dict() - dct.update( - {"title": self.title, - "tags": self.tags}) - return dct +@vp_compile +class BinaryNodePayload(ChannelNodePayload): + names = ChannelNodePayload.names + ['binary_data', 'data_type'] + format_list = ChannelNodePayload.format_list + ['varlenI', 'varlenIutf8'] -class CollectionNodePayload(MetadataNodePayload): - """ - Payload for metadata that stores a collection - """ +@vp_compile +class CollectionNodePayload(MetadataNodePayload): + names = MetadataNodePayload.names + ['num_entries'] format_list = MetadataNodePayload.format_list + ['Q'] - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - num_entries, # CollectionNodePayload - **kwargs - ): - self.num_entries = num_entries - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - **kwargs - ) - - def to_pack_list(self): - data = super().to_pack_list() - data.append(('Q', self.num_entries)) - return data - - @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - num_entries, # CollectionNodePayload - **kwargs - ): - return CollectionNodePayload( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - title, tags, # MetadataNodePayload - num_entries, # CollectionNodePayload - **kwargs - ) - - def to_dict(self): - dct = super().to_dict() - dct.update({"num_entries": self.num_entries}) - return dct - +@vp_compile class TorrentMetadataPayload(ChannelNodePayload): """ Payload for metadata that stores a torrent. """ - format_list = ChannelNodePayload.format_list + ['20s', 'Q', 'I', 'varlenI', 'varlenI', 'varlenI'] - - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - **kwargs): - self.infohash = bytes(infohash) - self.size = size - self.torrent_date = time2int(torrent_date) if isinstance(torrent_date, datetime) else torrent_date - self.title = title.decode('utf-8') if isinstance(title, bytes) else title - self.tags = tags.decode('utf-8') if isinstance(tags, bytes) else tags - self.tracker_info = tracker_info.decode('utf-8') if isinstance(tracker_info, bytes) else tracker_info - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - **kwargs - ) + names = ChannelNodePayload.names + ['infohash', 'size', 'torrent_date', 'title', 'tags', 'tracker_info'] + format_list = ChannelNodePayload.format_list + ['20s', 'Q', 'I', 'varlenIutf8', 'varlenIutf8', 'varlenIutf8'] - def to_pack_list(self): - data = super().to_pack_list() - data.append(('20s', self.infohash)) - data.append(('Q', self.size)) - data.append(('I', self.torrent_date)) - data.append(('varlenI', self.title.encode('utf-8'))) - data.append(('varlenI', self.tags.encode('utf-8'))) - data.append(('varlenI', self.tracker_info.encode('utf-8'))) - return data + def fix_pack_torrent_date(self, value): + if isinstance(value, datetime): + return time2int(value) + return value @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - **kwargs): - return TorrentMetadataPayload( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - **kwargs) - - def to_dict(self): - dct = super().to_dict() - dct.update( - { - "infohash": self.infohash, - "size": self.size, - "torrent_date": int2time(self.torrent_date), - "title": self.title, - "tags": self.tags, - "tracker_info": self.tracker_info, - } - ) - return dct + def fix_unpack_torrent_date(cls, value): + return int2time(value) def get_magnet(self): - return (f"magnet:?xt=urn:btih:{hexlify(self.infohash)}&dn={self.title.encode('utf8')}") + ( + return f"magnet:?xt=urn:btih:{hexlify(self.infohash)}&dn={self.title.encode('utf8')}" + ( f"&tr={self.tracker_info.encode('utf8')}" if self.tracker_info else "" ) +@vp_compile class ChannelMetadataPayload(TorrentMetadataPayload): """ Payload for metadata that stores a channel. """ + names = TorrentMetadataPayload.names + ['num_entries', 'start_timestamp'] format_list = TorrentMetadataPayload.format_list + ['Q'] + ['Q'] - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - num_entries, start_timestamp, # ChannelMetadataPayload - **kwargs): - self.num_entries = num_entries - self.start_timestamp = start_timestamp - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - **kwargs) - - def to_pack_list(self): - data = super().to_pack_list() - data.append(('Q', self.num_entries)) - data.append(('Q', self.start_timestamp)) - return data - - @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - num_entries, start_timestamp, # ChannelMetadataPayload - **kwargs): - return ChannelMetadataPayload( - metadata_type, reserved_flags, public_key, # SignedPayload - id_, origin_id, timestamp, # ChannelNodePayload - infohash, size, torrent_date, title, tags, tracker_info, # TorrentMetadataPayload - num_entries, start_timestamp, # ChannelMetadataPayload - **kwargs) - - def to_dict(self): - dct = super().to_dict() - dct.update({ - "num_entries": self.num_entries, - "start_timestamp": self.start_timestamp - }) - return dct - +@vp_compile class DeletedMetadataPayload(SignedPayload): """ Payload for metadata that stores deleted metadata. """ + names = SignedPayload.names + ['delete_signature'] format_list = SignedPayload.format_list + ['64s'] - def __init__( - self, - metadata_type, reserved_flags, public_key, # SignedPayload - delete_signature, # DeletedMetadataPayload - **kwargs): - self.delete_signature = bytes(delete_signature) - super().__init__( - metadata_type, reserved_flags, public_key, # SignedPayload - **kwargs) - - def to_pack_list(self): - data = super().to_pack_list() - data.append(('64s', self.delete_signature)) - return data - - @classmethod - def from_unpack_list( # pylint: disable=arguments-differ - cls, - metadata_type, reserved_flags, public_key, # SignedPayload - delete_signature, # DeletedMetadataPayload - **kwargs): - return DeletedMetadataPayload( - metadata_type, reserved_flags, public_key, # SignedPayload - delete_signature, # DeletedMetadataPayload - **kwargs) - - def to_dict(self): - dct = super().to_dict() - dct.update({"delete_signature": self.delete_signature}) - return dct - - -# fmt: on - -DISCRIMINATOR_TO_PAYLOAD_CLASS = { +METADATA_TYPE_TO_PAYLOAD_CLASS = { REGULAR_TORRENT: TorrentMetadataPayload, CHANNEL_TORRENT: ChannelMetadataPayload, COLLECTION_NODE: CollectionNodePayload, diff --git a/src/tribler/core/components/metadata_store/db/store.py b/src/tribler/core/components/metadata_store/db/store.py index 58c781058ae..4d4663a4bc9 100644 --- a/src/tribler/core/components/metadata_store/db/store.py +++ b/src/tribler/core/components/metadata_store/db/store.py @@ -1,3 +1,5 @@ +import enum +from dataclasses import dataclass, field import logging import re from datetime import datetime, timedelta @@ -9,51 +11,51 @@ from pony.orm import db_session, desc, left_join, raw_sql, select from pony.orm.dbproviders.sqlite import keep_exception -from tribler.core import notifications +from tribler.core.components.metadata_store.category_filter.l2_filter import is_forbidden from tribler.core.components.metadata_store.db.orm_bindings import ( - binary_node, - channel_description, - channel_metadata, - channel_node, - channel_peer, - channel_thumbnail, - channel_vote, - collection_node, - json_node, - metadata_node, misc, torrent_metadata, torrent_state as torrent_state_, tracker_state, - vsids, ) -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import get_mdblob_sequence_number -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import LEGACY_ENTRY, TODELETE from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import NULL_KEY_SUBST from tribler.core.components.metadata_store.db.serialization import ( - BINARY_NODE, - CHANNEL_DESCRIPTION, - CHANNEL_NODE, - CHANNEL_THUMBNAIL, CHANNEL_TORRENT, COLLECTION_NODE, HealthItemsPayload, - JSON_NODE, - METADATA_NODE, REGULAR_TORRENT, read_payload_with_offset, + NULL_KEY ) -from tribler.core.components.metadata_store.remote_query_community.payload_checker import process_payload + from tribler.core.components.torrent_checker.torrent_checker.dataclasses import HealthInfo -from tribler.core.exceptions import InvalidSignatureException from tribler.core.utilities.db_corruption_handling.base import DatabaseIsCorrupted, handle_db_if_corrupted from tribler.core.utilities.notifier import Notifier from tribler.core.utilities.path_util import Path from tribler.core.utilities.pony_utils import TrackedDatabase, get_max, get_or_create, run_threaded from tribler.core.utilities.search_utils import torrent_rank -from tribler.core.utilities.unicode import hexlify from tribler.core.utilities.utilities import MEMORY_DB + +class ObjState(enum.Enum): + UPDATED_LOCAL_VERSION = enum.auto() # We updated the local version of the ORM object with the received one + LOCAL_VERSION_NEWER = enum.auto() # The local version of the ORM object is newer than the received one + LOCAL_VERSION_SAME = enum.auto() # The local version of the ORM object is the same as the received one + NEW_OBJECT = enum.auto() # The received object is unknown to us and thus added to ORM + DUPLICATE_OBJECT = enum.auto() # We already know about the received object + + +@dataclass +class ProcessingResult: + # This class is used to return results of processing of a payload by process_payload. + # It includes the ORM object created as a result of processing, the state of the object + # as indicated by ObjState enum, and missing dependencies list that includes a list of query + # arguments for get_entries to query the sender back through Remote Query Community + md_obj: object = None + obj_state: object = None + missing_deps: list = field(default_factory=list) + + BETA_DB_VERSIONS = [0, 1, 2, 3, 4, 5] CURRENT_DB_VERSION = 15 @@ -103,31 +105,6 @@ END; """ -sql_create_partial_index_channelnode_subscribed = """ - CREATE INDEX IF NOT EXISTS idx_channelnode__metadata_subscribed__partial ON "ChannelNode" (subscribed) - WHERE subscribed = 1 -""" - -# ACHTUNG! When adding a new metadata_types which should be indexed you need to add -# it to this list and write a database upgrade which recreates the partial index -indexed_metadata_types = [ - CHANNEL_NODE, - METADATA_NODE, - COLLECTION_NODE, - JSON_NODE, - CHANNEL_DESCRIPTION, - BINARY_NODE, - CHANNEL_THUMBNAIL, - CHANNEL_TORRENT, -] # Does not include REGULAR_TORRENT! We dont want for regular torrents to be added to the partial index. - -sql_create_partial_index_channelnode_metadata_type = """ - CREATE INDEX IF NOT EXISTS idx_channelnode__metadata_type__partial ON "ChannelNode" (metadata_type) - WHERE %s; -""" % ' OR '.join( - f'metadata_type = {discriminator_value}' for discriminator_value in indexed_metadata_types -) - sql_create_partial_index_torrentstate_last_check = """ CREATE INDEX IF NOT EXISTS idx_torrentstate__last_check__partial ON TorrentState (last_check, seeders, leechers, self_checked) @@ -191,29 +168,11 @@ def on_connect(_, connection): self.TrackerState = tracker_state.define_binding(self.db) self.TorrentState = torrent_state_.define_binding(self.db) - - self.ChannelNode = channel_node.define_binding(self.db, logger=self._logger, key=my_key) - - self.MetadataNode = metadata_node.define_binding(self.db) - self.CollectionNode = collection_node.define_binding(self.db) self.TorrentMetadata = torrent_metadata.define_binding( self.db, notifier=notifier, tag_processor_version=tag_processor_version ) - self.ChannelMetadata = channel_metadata.define_binding(self.db) - - self.JsonNode = json_node.define_binding(self.db, db_version) - self.ChannelDescription = channel_description.define_binding(self.db) - - self.BinaryNode = binary_node.define_binding(self.db, db_version) - self.ChannelThumbnail = channel_thumbnail.define_binding(self.db) - - self.ChannelVote = channel_vote.define_binding(self.db) - self.ChannelPeer = channel_peer.define_binding(self.db) - self.Vsids = vsids.define_binding(self.db) - - self.ChannelMetadata._channels_dir = channels_dir # pylint: disable=protected-access if db_filename is MEMORY_DB: create_db = True @@ -233,18 +192,11 @@ def on_connect(_, connection): self.db.execute(sql_create_fts_table) self.create_fts_triggers() self.create_torrentstate_triggers() - self.create_partial_indexes() if create_db: with db_session: self.MiscData(name="db_version", value=str(db_version)) - with db_session: - default_vsids = self.Vsids.get(rowid=0) - if not default_vsids: - default_vsids = self.Vsids.create_default_vsids() - self.ChannelMetadata.votes_scaling = default_vsids.max_val - def set_value(self, key: str, value: str): key_value = get_or_create(self.MiscData, name=key) key_value.value = value @@ -295,161 +247,14 @@ def create_torrentstate_triggers(self): cursor.execute(sql_add_torrentstate_trigger_after_insert) cursor.execute(sql_add_torrentstate_trigger_after_update) - def create_partial_indexes(self): - cursor = self.db.get_connection().cursor() - cursor.execute(sql_create_partial_index_channelnode_subscribed) - cursor.execute(sql_create_partial_index_channelnode_metadata_type) - - @db_session - def upsert_vote(self, channel, peer_pk): - voter = self.ChannelPeer.get_for_update(public_key=peer_pk) - if not voter: - voter = self.ChannelPeer(public_key=peer_pk) - vote = self.ChannelVote.get_for_update(voter=voter, channel=channel) - if not vote: - vote = self.ChannelVote(voter=voter, channel=channel) - else: - vote.vote_date = datetime.utcnow() - return vote - - @db_session - def vote_bump(self, public_key, id_, voter_pk): - channel = self.ChannelMetadata.get_for_update(public_key=public_key, id_=id_) - if not channel: - return - vote = self.upsert_vote(channel, voter_pk) - - self.Vsids[0].bump_channel(channel, vote) - def shutdown(self): self._shutting_down = True self.db.disconnect() - @staticmethod - def get_list_of_channel_blobs_to_process(dirname, start_timestamp): - blobs_to_process = [] - total_blobs_size = 0 - for full_filename in sorted(dirname.iterdir()): - blob_sequence_number = get_mdblob_sequence_number(full_filename.name) - - if blob_sequence_number is None or blob_sequence_number <= start_timestamp: - continue - blob_size = full_filename.stat().st_size - total_blobs_size += blob_size - blobs_to_process.append((blob_sequence_number, full_filename, blob_size)) - return blobs_to_process, total_blobs_size - @db_session def get_channel_dir_path(self, channel): return self.channels_dir / channel.dirname - @db_session - def compute_channel_update_progress(self, channel): - blobs_to_process, total_blobs_size = self.get_list_of_channel_blobs_to_process( - self.get_channel_dir_path(channel), channel.start_timestamp - ) - processed_blobs_size = 0 - for blob_sequence_number, _, blob_size in blobs_to_process: - if channel.local_version >= blob_sequence_number >= channel.start_timestamp: - processed_blobs_size += blob_size - return float(processed_blobs_size) / total_blobs_size - - def process_channel_dir(self, dirname, public_key, id_, **kwargs): - """ - Load all metadata blobs in a given directory. - :param dirname: The directory containing the metadata blobs. - :param skip_personal_metadata_payload: if this is set to True, personal torrent metadata payload received - through gossip will be ignored. The default value is True. - :param external_thread: indicate to lower levels that this is running on a background thread - :param public_key: public_key of the channel. - :param id_: id_ of the channel. - """ - # We use multiple separate db_sessions here to limit the memory and reactor time impact, - # but we must check the existence of the channel every time to avoid race conditions - with db_session: - channel = self.ChannelMetadata.get(public_key=public_key, id_=id_) - if not channel: - return - self._logger.debug( - "Starting processing channel dir %s. Channel %s local/max version %i/%i", - dirname, - hexlify(channel.public_key), - channel.local_version, - channel.timestamp, - ) - - blobs_to_process, total_blobs_size = self.get_list_of_channel_blobs_to_process(dirname, channel.start_timestamp) - - # We count total size of all the processed blobs to estimate the progress of channel processing - # Counting the blobs' sizes are the only reliable way to estimate the remaining processing time, - # because it accounts for potential deletions, entry modifications, etc. - processed_blobs_size = 0 - for blob_sequence_number, full_filename, blob_size in blobs_to_process: - processed_blobs_size += blob_size - # Skip blobs containing data we already have and those that are - # ahead of the channel version known to us - # ==================| channel data |=== - # ===start_timestamp|---local_version----timestamp|=== - # local_version is essentially a cursor pointing into the current state of update process - with db_session: - channel = self.ChannelMetadata.get(public_key=public_key, id_=id_) - if not channel: - return - if ( - blob_sequence_number <= channel.start_timestamp - or blob_sequence_number <= channel.local_version - or blob_sequence_number > channel.timestamp - ): - continue - try: - self.process_mdblob_file(str(full_filename), **kwargs, channel_public_key=public_key) - # If we stopped mdblob processing due to shutdown flag, we should stop - # processing immediately, so that the channel local version will not increase - if self._shutting_down: - return - # We track the local version of the channel while reading blobs - with db_session: - channel = self.ChannelMetadata.get_for_update(public_key=public_key, id_=id_) - if not channel: - return - channel.local_version = blob_sequence_number - if self.notifier: - channel_update_dict = channel.to_simple_dict() - channel_update_dict["progress"] = float(processed_blobs_size) / total_blobs_size - self.notifier[notifications.channel_entity_updated](channel_update_dict) - except InvalidSignatureException: - self._logger.error("Not processing metadata located at %s: invalid signature", full_filename) - - with db_session: - channel = self.ChannelMetadata.get(public_key=public_key, id_=id_) - if not channel: - return - self._logger.debug( - "Finished processing channel dir %s. Channel %s local/max version %i/%i", - dirname, - hexlify(bytes(channel.public_key)), - channel.local_version, - channel.timestamp, - ) - - def process_mdblob_file(self, filepath, **kwargs): - """ - Process a file with metadata in a channel directory. - :param filepath: The path to the file - :param skip_personal_metadata_payload: if this is set to True, personal torrent metadata payload received - through gossip will be ignored. The default value is True. - :param external_thread: indicate to the lower lever that we're running in the backround thread, - to possibly pace down the upload process - :return: a list of tuples of (, ) - """ - path = Path.fix_win_long_file(filepath) - with open(path, 'rb') as f: - serialized_data = f.read() - - if path.endswith('.lz4'): - return self.process_compressed_mdblob(serialized_data, **kwargs) - return self.process_squashed_mdblob(serialized_data, **kwargs) - async def process_compressed_mdblob_threaded(self, compressed_data, **kwargs): try: return await run_threaded(self.db, self.process_compressed_mdblob, compressed_data, **kwargs) @@ -475,6 +280,7 @@ def process_compressed_mdblob(self, compressed_data, **kwargs): health_info = HealthItemsPayload.unpack(unused_data) except Exception as e: # pylint: disable=broad-except # pragma: no cover self._logger.warning(f"Unable to parse health information: {type(e).__name__}: {str(e)}") + raise return self.process_squashed_mdblob(decompressed_data, health_info=health_info, **kwargs) @@ -520,7 +326,8 @@ def process_squashed_mdblob(self, chunk_data, external_thread=False, health_info payload_list = [] while offset < len(chunk_data): payload, offset = read_payload_with_offset(chunk_data, offset) - payload_list.append(payload) + if payload: + payload_list.append(payload) if health_info and len(health_info) == len(payload_list): with db_session: @@ -571,28 +378,41 @@ def process_squashed_mdblob(self, chunk_data, external_thread=False, health_info return result @db_session - def process_payload(self, payload, **kwargs): - return process_payload(self, payload, **kwargs) + def process_payload(self, payload, skip_personal_metadata_payload=True, **kwargs): + # Don't process our own torrents + if skip_personal_metadata_payload and payload.public_key == self.my_public_key_bin: + return [] - @db_session - def get_num_channels(self): - return orm.count(self.ChannelMetadata.select(lambda g: g.metadata_type == CHANNEL_TORRENT)) + # Don't process unknown/deprecated payloads + if payload.metadata_type != REGULAR_TORRENT: + return [] + + # Don't process torrents with a bad signature + if payload.has_signature() and not payload.check_signature(): + return [] + + # Don't allow forbidden words in the title/tags + if is_forbidden(f'{payload.title} {payload.tags}'): + return [] + + # Process unsigned torrents + if payload.public_key == NULL_KEY: + node = self.TorrentMetadata.add_ffa_from_dict(payload.to_dict()) + return [ProcessingResult(md_obj=node, obj_state=ObjState.NEW_OBJECT)] if node else [] + + # Do we already know about this object? In that case, we keep the first one (i.e., no versioning). + node = self.TorrentMetadata.get_for_update(public_key=payload.public_key, id_=payload.id_) + if node: + return [ProcessingResult(md_obj=node, obj_state=ObjState.DUPLICATE_OBJECT)] + + # Process signed torrents + obj = self.TorrentMetadata.from_payload(payload) + return [ProcessingResult(md_obj=obj, obj_state=ObjState.NEW_OBJECT)] @db_session def get_num_torrents(self): return orm.count(self.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT)) - @db_session - def torrent_exists_in_personal_channel(self, infohash): - """ - Return True if torrent with given infohash exists in any of user's channels - :param infohash: The infohash of the torrent - :return: True if torrent exists else False - """ - return self.TorrentMetadata.exists( - lambda g: g.public_key == self.my_public_key_bin and g.infohash == infohash and g.status != LEGACY_ENTRY - ) - # pylint: disable=unused-argument def search_keyword(self, query, origin_id=None): # Requires FTS5 table "FtsIndex" to be generated and populated. @@ -641,30 +461,24 @@ def search_keyword(self, query, origin_id=None): ORDER BY coalesce(ts.seeders, 0) DESC, fts.rowid DESC LIMIT 1000 """) - return left_join(g for g in self.MetadataNode if g.rowid in fts_ids) # pylint: disable=E1135 + return left_join(g for g in self.TorrentMetadata if g.rowid in fts_ids) # pylint: disable=E1135 @db_session def get_entries_query( self, metadata_type=None, channel_pk=None, - exclude_deleted=False, hide_xxx=False, - exclude_legacy=False, origin_id=None, sort_by=None, sort_desc=True, max_rowid=None, txt_filter=None, - subscribed=None, category=None, - attribute_ranges=None, infohash=None, infohash_set=None, id_=None, - complete_channel=None, self_checked_torrent=None, - cls=None, health_checked_after=None, popular=None, ): @@ -674,13 +488,10 @@ def get_entries_query( """ # Warning! For Pony magic to work, iteration variable name (e.g. 'g') should be the same everywhere! - if cls is None: - cls = self.ChannelNode - if txt_filter: pony_query = self.search_keyword(txt_filter, origin_id=origin_id) else: - pony_query = left_join(g for g in cls) + pony_query = left_join(g for g in self.TorrentMetadata) infohash_set = infohash_set or ({infohash} if infohash else None) if popular: @@ -714,38 +525,17 @@ def get_entries_query( else pony_query ) - if attribute_ranges is not None: - for attr, left, right in attribute_ranges: - if ( - self.ChannelNode._adict_.get(attr) # pylint: disable=W0212 - or self.ChannelNode._subclass_adict_.get(attr) # pylint: disable=W0212 - ) is None: # Check against code injection - raise AttributeError("Tried to query for non-existent attribute") - if left is not None: - pony_query = pony_query.where(f"g.{attr} >= left") - if right is not None: - pony_query = pony_query.where(f"g.{attr} < right") - # origin_id can be zero, for e.g. root channel pony_query = pony_query.where(id_=id_) if id_ is not None else pony_query pony_query = pony_query.where(origin_id=origin_id) if origin_id is not None else pony_query - pony_query = pony_query.where(lambda g: g.subscribed) if subscribed is not None else pony_query pony_query = pony_query.where(lambda g: g.tags == category) if category else pony_query - pony_query = pony_query.where(lambda g: g.status != TODELETE) if exclude_deleted else pony_query pony_query = pony_query.where(lambda g: g.xxx == 0) if hide_xxx else pony_query - pony_query = pony_query.where(lambda g: g.status != LEGACY_ENTRY) if exclude_legacy else pony_query pony_query = pony_query.where(lambda g: g.infohash in infohash_set) if infohash_set else pony_query pony_query = ( pony_query.where(lambda g: g.health.self_checked == self_checked_torrent) if self_checked_torrent is not None else pony_query ) - # ACHTUNG! Setting complete_channel to True forces the metadata type to Channels only! - pony_query = ( - pony_query.where(lambda g: g.metadata_type == CHANNEL_TORRENT and g.timestamp == g.local_version) - if complete_channel - else pony_query - ) if health_checked_after is not None: pony_query = pony_query.where(lambda g: g.health.last_check >= health_checked_after) @@ -759,10 +549,10 @@ def get_entries_query( if sort_desc else "(g.health.seeders, g.health.leechers)" ) - elif sort_by == "size" and not issubclass(cls, self.ChannelMetadata): + elif sort_by == "size": # Remark: this can be optimized to skip cases where size field does not matter # When querying for mixed channels / torrents lists, channels should have priority over torrents - sort_expression = "desc(g.num_entries), desc(g.size)" if sort_desc else "g.num_entries, g.size" + sort_expression = "desc(g.size)" if sort_desc else "g.size" pony_query = pony_query.sort_by(sort_expression) elif sort_by: sort_expression = raw_sql(f"g.{sort_by} COLLATE NOCASE" + (" DESC" if sort_desc else "")) @@ -855,7 +645,7 @@ def get_entries_count(self, **kwargs): @db_session def get_max_rowid(self) -> int: - return get_max(self.ChannelNode) + return get_max(self.TorrentMetadata) fts_keyword_search_re = re.compile(r'\w+', re.UNICODE) diff --git a/src/tribler/core/components/metadata_store/db/tests/test_serialization.py b/src/tribler/core/components/metadata_store/db/tests/test_serialization.py new file mode 100644 index 00000000000..9f15b40cc74 --- /dev/null +++ b/src/tribler/core/components/metadata_store/db/tests/test_serialization.py @@ -0,0 +1,26 @@ +from datetime import datetime + +from tribler.core.components.metadata_store.db.serialization import TorrentMetadataPayload, int2time + + +def test_fix_torrent_metadata_payload(): + """ + Check that TorrentMetadataPayload can handle both timestamps and datetime "torrent_date"s. + """ + payload_1 = TorrentMetadataPayload(0, 0, bytes(range(64)), 0, 0, 0, bytes(range(20)), 0, 0, + "title", "tags", "tracker_info") + payload_2 = TorrentMetadataPayload(0, 0, bytes(range(64)), 0, 0, 0, bytes(range(20)), 0, datetime(1970, 1, 1), + "title", "tags", "tracker_info") + + assert payload_1.serialized() == payload_2.serialized() + + +def test_torrent_metadata_payload_magnet(): + """ + Check that TorrentMetadataPayload produces an appropriate magnet link. + """ + payload = TorrentMetadataPayload(0, 0, bytes(range(64)), 0, 0, 0, bytes(range(20)), 0, 0, + "title", "tags", "tracker_info") + expected = "magnet:?xt=urn:btih:000102030405060708090a0b0c0d0e0f10111213&dn=b'title'&tr=b'tracker_info'" + + assert expected == payload.get_magnet() diff --git a/src/tribler/core/components/metadata_store/db/tests/test_store.py b/src/tribler/core/components/metadata_store/db/tests/test_store.py index c3d83d9cc98..f07b1cfb0a2 100644 --- a/src/tribler/core/components/metadata_store/db/tests/test_store.py +++ b/src/tribler/core/components/metadata_store/db/tests/test_store.py @@ -1,32 +1,18 @@ -import os import random import string import threading -from binascii import unhexlify from datetime import datetime -from unittest.mock import patch import pytest from ipv8.keyvault.crypto import default_eccrypto from pony.orm import db_session -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import ( - CHANNEL_DIR_NAME_LENGTH, - entries_to_chunk, -) -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import entries_to_chunk from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_TORRENT, - ChannelMetadataPayload, - DeletedMetadataPayload, SignedPayload, - UnknownBlobTypeException, int2time, ) -from tribler.core.components.metadata_store.remote_query_community.payload_checker import ObjState, ProcessingResult -from tribler.core.components.metadata_store.tests.test_channel_download import CHANNEL_METADATA_UPDATED -from tribler.core.tests.tools.common import TESTS_DATA_DIR -from tribler.core.utilities.path_util import Path +from tribler.core.components.metadata_store.db.store import ObjState from tribler.core.utilities.pony_utils import run_threaded from tribler.core.utilities.utilities import random_infohash @@ -35,15 +21,9 @@ def get_payloads(entity_class, key): - orig_key = entity_class._my_key - - entity_class._my_key = key c = entity_class(infohash=random_infohash()) - payload = c._payload_class.from_signed_blob(c.serialized()) - deleted_payload = DeletedMetadataPayload.from_signed_blob(c.serialized_delete()) - - entity_class._my_key = orig_key - return c, payload, deleted_payload + payload = c.payload_class.from_signed_blob(c.serialized()) + return c, payload def make_wrong_payload(filename): @@ -53,52 +33,10 @@ def make_wrong_payload(filename): output_file.write(metadata_payload.serialized()) -SAMPLE_DIR = TESTS_DATA_DIR / 'sample_channel' -# Just get the first and only subdir there, and assume it is the sample channel dir -CHANNEL_DIR = [ - SAMPLE_DIR / subdir - for subdir in os.listdir(SAMPLE_DIR) - if (SAMPLE_DIR / subdir).is_dir() and len(subdir) == CHANNEL_DIR_NAME_LENGTH -][0] -CHANNEL_METADATA = TESTS_DATA_DIR / 'sample_channel' / 'channel.mdblob' - - -@db_session -def test_process_channel_dir_file(tmpdir, metadata_store): - """ - Test whether we are able to process files in a directory containing node metadata - """ - test_node_metadata = metadata_store.TorrentMetadata(title='test', infohash=random_infohash()) - metadata_path = tmpdir / 'metadata.data' - test_node_metadata.to_file(metadata_path) - # We delete this TorrentMeta info now, it should be added again to the database when loading it - test_node_metadata.delete() - loaded_metadata = metadata_store.process_mdblob_file(metadata_path, skip_personal_metadata_payload=False) - assert loaded_metadata[0].md_obj.title == 'test' - - # Test whether we delete existing metadata when loading a DeletedMetadata blob - metadata = metadata_store.TorrentMetadata(infohash=b'1' * 20) - public_key = metadata.public_key - metadata.to_delete_file(metadata_path) - metadata_sig = metadata.signature - loaded_metadata = metadata_store.process_mdblob_file( - metadata_path, skip_personal_metadata_payload=False, channel_public_key=public_key - ) - assert loaded_metadata == [] - # Make sure the original metadata is deleted - assert metadata_store.TorrentMetadata.get(signature=metadata_sig) is None - - # Test an unknown metadata type, this should raise an exception - invalid_metadata = tmpdir / 'invalidtype.mdblob' - make_wrong_payload(invalid_metadata) - with pytest.raises(UnknownBlobTypeException): - metadata_store.process_mdblob_file(invalid_metadata, skip_personal_metadata_payload=False) - - @db_session def test_squash_mdblobs(metadata_store): r = random.Random(123) - chunk_size = metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT + chunk_size = metadata_store.TorrentMetadata._CHUNK_SIZE_LIMIT md_list = [ metadata_store.TorrentMetadata( title=''.join(r.choice(string.ascii_uppercase + string.digits) for _ in range(20)), @@ -130,6 +68,9 @@ def test_squash_mdblobs_multiple_chunks(metadata_store): ) for _ in range(0, 10) ] + for md in md_list: + md.public_key = metadata_store.my_public_key_bin + md.signature = md.serialized(metadata_store.my_key)[-64:] # Test splitting into multiple chunks chunk, index = entries_to_chunk(md_list, chunk_size=900) chunk2, _ = entries_to_chunk(md_list, chunk_size=900, start_index=index) @@ -147,72 +88,15 @@ def test_squash_mdblobs_multiple_chunks(metadata_store): ] -@db_session -def test_multiple_squashed_commit_and_read(metadata_store): - """ - Test committing entries into several squashed blobs and reading them back - """ - metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT = 500 - - num_entries = 10 - channel = metadata_store.ChannelMetadata.create_channel('testchan') - md_list = [ - metadata_store.TorrentMetadata( - origin_id=channel.id_, title='test' + str(x), status=NEW, infohash=random_infohash() - ) - for x in range(0, num_entries) - ] - channel.commit_channel_torrent() - - channel.local_version = 0 - for md in md_list: - md.delete() - - channel_dir = Path(metadata_store.ChannelMetadata._channels_dir) / channel.dirname - assert len(os.listdir(channel_dir)) > 1 # make sure it was broken into more than one .mdblob file - metadata_store.process_channel_dir( - channel_dir, channel.public_key, channel.id_, skip_personal_metadata_payload=False - ) - assert num_entries == len(channel.contents) - - -@db_session -def test_skip_processing_of_received_personal_channel_torrents(metadata_store): - """ - Test that personal torrent is ignored by default when processing the torrent metadata payload - """ - channel = metadata_store.ChannelMetadata.create_channel('testchan') - torrent_md = metadata_store.TorrentMetadata( - origin_id=channel.id_, title='test', status=NEW, infohash=random_infohash() - ) - channel.commit_channel_torrent() - torrent_md.delete() - - channel_dir = Path(metadata_store.ChannelMetadata._channels_dir) / channel.dirname - assert os.listdir(Path.fix_win_long_file(channel_dir)) - - # By default, personal channel torrent metadata processing is skipped so there should be no torrents - # added to the channel - channel.local_version = 0 - metadata_store.process_channel_dir(channel_dir, channel.public_key, channel.id_) - assert not channel.contents - - # Enable processing of personal channel torrent metadata - channel.local_version = 0 - metadata_store.process_channel_dir( - channel_dir, channel.public_key, channel.id_, skip_personal_metadata_payload=False - ) - assert len(channel.contents) == 1 - - @db_session def test_skip_processing_mdblob_with_forbidden_terms(metadata_store): """ Test that an mdblob with forbidden terms cannot ever get into the local database """ key = default_eccrypto.generate_key("curve25519") - chan_entry = metadata_store.ChannelMetadata(title="12yo", infohash=random_infohash(), sign_with=key) - chan_payload = chan_entry._payload_class(**chan_entry.to_dict()) + chan_entry = metadata_store.TorrentMetadata(title="12yo", infohash=random_infohash()) + chan_payload = chan_entry.payload_class.from_dict(**chan_entry.to_dict()) + chan_payload.add_signature(key) chan_entry.delete() assert metadata_store.process_payload(chan_payload) == [] @@ -226,70 +110,79 @@ def test_process_invalid_compressed_mdblob(metadata_store): @db_session -def test_process_channel_dir(metadata_store): - """ - Test processing a directory containing metadata blobs - """ - payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA) - channel = metadata_store.process_payload(payload)[0].md_obj - assert not channel.contents_list - metadata_store.process_channel_dir(CHANNEL_DIR, channel.public_key, channel.id_) - assert len(channel.contents_list) == 4 - assert channel.timestamp == 1565621688015 - assert channel.local_version == channel.timestamp +def test_process_forbidden_payload(metadata_store): + _, node_payload = get_payloads( + metadata_store.TorrentMetadata, default_eccrypto.generate_key("curve25519") + ) + + assert metadata_store.process_payload(node_payload) == [] @db_session -def test_compute_channel_update_progress(metadata_store, tmpdir): - """ - Test estimating progress of channel processing - """ - payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA_UPDATED) - channel = metadata_store.process_payload(payload)[0].md_obj - with patch.object(metadata_store, 'get_channel_dir_path', lambda _: Path(CHANNEL_DIR)): - assert metadata_store.compute_channel_update_progress(channel) == 0.0 - metadata_store.process_channel_dir(CHANNEL_DIR, channel.public_key, channel.id_) - assert metadata_store.compute_channel_update_progress(channel) == 1.0 +def test_process_payload(metadata_store): + sender_key = default_eccrypto.generate_key("curve25519") + node, node_payload = get_payloads(metadata_store.TorrentMetadata, sender_key) + node_payload.add_signature(sender_key) + node_dict = node.to_dict() + node.delete() + + # Check if node metadata object is properly created on payload processing + result, = metadata_store.process_payload(node_payload) + assert result.obj_state == ObjState.NEW_OBJECT + assert node_dict['metadata_type'] == result.md_obj.to_dict()['metadata_type'] + + # Check that we flag this as duplicate in case we already know about the local node + result, = metadata_store.process_payload(node_payload) + assert result.obj_state == ObjState.DUPLICATE_OBJECT @db_session -def test_process_forbidden_payload(metadata_store): - _, node_payload, node_deleted_payload = get_payloads( - metadata_store.ChannelNode, default_eccrypto.generate_key("curve25519") - ) +def test_process_payload_invalid_sig(metadata_store): + sender_key = default_eccrypto.generate_key("curve25519") + node, node_payload = get_payloads(metadata_store.TorrentMetadata, sender_key) + node_payload.add_signature(sender_key) + node_payload.signature = bytes(127 ^ byte for byte in node_payload.signature) + node.delete() - assert not metadata_store.process_payload(node_payload) - assert metadata_store.process_payload(node_deleted_payload) == [] + assert [] == metadata_store.process_payload(node_payload) - # Do nothing in case it is unknown/abstract payload type, like ChannelNode - assert not metadata_store.process_payload(node_payload) + +@db_session +def test_process_payload_invalid_metadata_type(metadata_store): + sender_key = default_eccrypto.generate_key("curve25519") + node, node_payload = get_payloads(metadata_store.TorrentMetadata, sender_key) + node_payload.metadata_type = -1 + node.delete() + + assert [] == metadata_store.process_payload(node_payload) @db_session -def test_process_payload(metadata_store): +def test_process_payload_skip_personal(metadata_store): sender_key = default_eccrypto.generate_key("curve25519") - for md_class in ( - metadata_store.ChannelMetadata, - metadata_store.TorrentMetadata, - metadata_store.CollectionNode, - metadata_store.ChannelDescription, - metadata_store.ChannelThumbnail, - ): - node, node_payload, node_deleted_payload = get_payloads(md_class, sender_key) - node_dict = node.to_dict() - node.delete() + metadata_store.my_public_key_bin = sender_key.pub().key_to_bin()[10:] + node, node_payload = get_payloads(metadata_store.TorrentMetadata, sender_key) + node_payload.add_signature(sender_key) + node.delete() - # Check that there is no action if trying to delete an unknown object - assert not metadata_store.process_payload(node_deleted_payload) + assert [] == metadata_store.process_payload(node_payload) - # Check if node metadata object is properly created on payload processing - result = metadata_store.process_payload(node_payload)[0] - assert result.obj_state == ObjState.NEW_OBJECT - assert node_dict['metadata_type'] == result.md_obj.to_dict()['metadata_type'] +@db_session +def test_process_payload_unsigned(metadata_store): + sender_key = default_eccrypto.generate_key("curve25519") + node, node_payload = get_payloads(metadata_store.TorrentMetadata, sender_key) + node_dict = node.to_dict() + infohash = node_dict['infohash'] + node.delete() - # Check that nothing happens in case in case we already know about the local node - assert metadata_store.process_payload(node_payload)[0].obj_state == ObjState.LOCAL_VERSION_SAME + # Check if node metadata object is properly created on payload processing + result, = metadata_store.process_payload(node_payload) + assert result.obj_state == ObjState.NEW_OBJECT + assert node_dict['metadata_type'] == result.md_obj.to_dict()['metadata_type'] + + # Check that nothing happens in case we don't know about the local node + assert metadata_store.process_payload(node_payload) == [] @db_session @@ -297,12 +190,12 @@ def test_process_payload_ffa(metadata_store): infohash = b"1" * 20 ffa_title = "abcabc" ffa_torrent = metadata_store.TorrentMetadata.add_ffa_from_dict(dict(infohash=infohash, title=ffa_title)) - ffa_payload = metadata_store.TorrentMetadata._payload_class.from_signed_blob(ffa_torrent.serialized()) + ffa_payload = metadata_store.TorrentMetadata.payload_class.from_signed_blob(ffa_torrent.serialized()) ffa_torrent.delete() # Assert that FFA is never added to DB if there is already a signed entry with the same infohash signed_md = metadata_store.TorrentMetadata(infohash=infohash, title='') - metadata_store.TorrentMetadata._payload_class.from_signed_blob(signed_md.serialized()) + metadata_store.TorrentMetadata.payload_class.from_signed_blob(signed_md.serialized()) assert metadata_store.process_payload(ffa_payload) == [] assert metadata_store.TorrentMetadata.get(title=ffa_title) is None signed_md.delete() @@ -316,142 +209,6 @@ def test_process_payload_ffa(metadata_store): assert metadata_store.process_payload(ffa_payload) == [] -@db_session -def test_process_payload_with_known_channel_public_key(metadata_store): - """ - Test processing a payload when the channel public key is known, e.g. from disk. - """ - key1 = default_eccrypto.generate_key("curve25519") - key2 = default_eccrypto.generate_key("curve25519") - torrent = metadata_store.TorrentMetadata(infohash=random_infohash(), sign_with=key1) - payload = torrent._payload_class(**torrent.to_dict()) - torrent.delete() - # Check rejecting a payload with non-matching public key - assert [] == metadata_store.process_payload(payload, channel_public_key=key2.pub().key_to_bin()[10:]) - assert metadata_store.TorrentMetadata.get() is None - - # Check accepting a payload with matching public key - assert ( - metadata_store.process_payload(payload, channel_public_key=key1.pub().key_to_bin()[10:])[0].obj_state - == ObjState.NEW_OBJECT - ) - assert metadata_store.TorrentMetadata.get() - - -@db_session -def test_process_payload_reject_older(metadata_store): - # Check there is no action if the processed payload has a timestamp that is less than the - # local_version of the corresponding local channel. (I.e. remote peer trying to push back a deleted entry) - key = default_eccrypto.generate_key("curve25519") - channel = metadata_store.ChannelMetadata( - title='bla', - version=123, - timestamp=12, - local_version=12, - infohash=random_infohash(), - sign_with=key, - ) - torrent = metadata_store.TorrentMetadata( - title='blabla', timestamp=11, origin_id=channel.id_, infohash=random_infohash(), sign_with=key - ) - payload = torrent._payload_class(**torrent.to_dict()) - torrent.delete() - assert metadata_store.process_payload(payload) == [] - assert metadata_store.TorrentMetadata.get(title='blabla') is None - - # Now test the same, but for a torrent within a hierarchy of nested channels - folder_1 = metadata_store.CollectionNode(origin_id=channel.id_, sign_with=key) - folder_2 = metadata_store.CollectionNode(origin_id=folder_1.id_, sign_with=key) - - torrent = metadata_store.TorrentMetadata( - title='blabla', timestamp=11, origin_id=folder_2.id_, infohash=random_infohash(), sign_with=key - ) - payload = torrent._payload_class(**torrent.to_dict()) - torrent.delete() - assert metadata_store.process_payload(payload) == [] - assert metadata_store.TorrentMetadata.get(title='blabla') is None - - # Now test that we still add the torrent for the case of a broken hierarchy - folder_1 = metadata_store.CollectionNode(origin_id=123123, sign_with=key) - folder_2 = metadata_store.CollectionNode(origin_id=folder_1.id_, sign_with=key) - torrent = metadata_store.TorrentMetadata( - title='blabla', timestamp=11, origin_id=folder_2.id_, infohash=random_infohash(), sign_with=key - ) - payload = torrent._payload_class(**torrent.to_dict()) - torrent.delete() - assert metadata_store.process_payload(payload)[0].obj_state == ObjState.NEW_OBJECT - assert metadata_store.TorrentMetadata.get(title='blabla') - - -@db_session -def test_process_payload_reject_older_entry(metadata_store): - """ - Test rejecting and returning LOCAL_VERSION_NEWER upon receiving an older version - of an already known metadata entry - """ - key = default_eccrypto.generate_key("curve25519") - torrent_old = metadata_store.TorrentMetadata( - title='blabla', timestamp=11, id_=3, infohash=random_infohash(), sign_with=key - ) - payload_old = torrent_old._payload_class(**torrent_old.to_dict()) - torrent_old.delete() - - torrent_updated = metadata_store.TorrentMetadata( - title='blabla', timestamp=12, id_=3, infohash=random_infohash(), sign_with=key - ) - # Test rejecting older version of the same entry - assert metadata_store.process_payload(payload_old, skip_personal_metadata_payload=False)[0] == ProcessingResult( - md_obj=torrent_updated, obj_state=ObjState.LOCAL_VERSION_NEWER - ) - - -@db_session -def test_get_num_channels_nodes(metadata_store): - metadata_store.ChannelMetadata(title='testchan', id_=0, infohash=random_infohash()) - metadata_store.ChannelMetadata(title='testchan', id_=123, infohash=random_infohash()) - metadata_store.ChannelMetadata( - title='testchan', - id_=0, - public_key=unhexlify('0' * 20), - signature=unhexlify('0' * 64), - skip_key_check=True, - infohash=random_infohash(), - ) - metadata_store.ChannelMetadata( - title='testchan', - id_=0, - public_key=unhexlify('1' * 20), - signature=unhexlify('1' * 64), - skip_key_check=True, - infohash=random_infohash(), - ) - - _ = [ - metadata_store.TorrentMetadata(title='test' + str(x), status=NEW, infohash=random_infohash()) - for x in range(0, 3) - ] - - assert metadata_store.get_num_channels() == 4 - assert metadata_store.get_num_torrents() == 3 - - -@db_session -def test_process_payload_update_type(metadata_store): - # Check if applying class-changing update to an entry works - # First, create a node and get a payload from it, then update it to another type, then get payload - # for the updated version, then delete the updated version, then bring back the original one by processing it, - # then try processing the payload of updated version and see if it works. Phew! - node, node_payload, _ = get_payloads(metadata_store.CollectionNode, metadata_store.my_key) - updated_node = node.update_properties({"origin_id": 0}) # This will implicitly change the node to ChannelTorrent - assert updated_node.metadata_type == CHANNEL_TORRENT - updated_node_payload = updated_node._payload_class.from_signed_blob(updated_node.serialized()) - updated_node.delete() - - metadata_store.process_payload(node_payload, skip_personal_metadata_payload=False) - updated_node2 = metadata_store.process_payload(updated_node_payload, skip_personal_metadata_payload=False)[0].md_obj - assert updated_node2.metadata_type == CHANNEL_TORRENT - - class ThreadedTestException(Exception): pass @@ -469,3 +226,15 @@ def f1(a, b, *, c, d): with pytest.raises(ThreadedTestException, match='^test exception$'): await run_threaded(metadata_store.db, f1, 1, 2, c=5, d=6) + + +def test_get_entries_query_sort_by_size(metadata_store): + with db_session: + metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xab" * 20, "title": "abc", "size": 20}) + metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xcd" * 20, "title": "def", "size": 1}) + metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xef" * 20, "title": "ghi", "size": 10}) + + ordered1, ordered2, ordered3 = metadata_store.get_entries_query(sort_by="size", sort_desc=True)[:] + assert ordered1.size == 20 + assert ordered2.size == 10 + assert ordered3.size == 1 diff --git a/src/tribler/core/components/metadata_store/db/tests/test_torrent_metadata.py b/src/tribler/core/components/metadata_store/db/tests/test_torrent_metadata.py index 2b56a00bc1f..5878823157c 100644 --- a/src/tribler/core/components/metadata_store/db/tests/test_torrent_metadata.py +++ b/src/tribler/core/components/metadata_store/db/tests/test_torrent_metadata.py @@ -1,17 +1,15 @@ +import os from datetime import datetime from time import time from unittest.mock import MagicMock, Mock import pytest -from ipv8.keyvault.crypto import default_eccrypto from pony import orm from pony.orm import db_session -from tribler.core.components.conftest import TEST_PERSONAL_KEY from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import TODELETE -from tribler.core.components.metadata_store.db.orm_bindings.discrete_clock import clock -from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import tdef_to_metadata_dict +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import tdef_to_metadata_dict, TODELETE, \ + entries_to_chunk from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, REGULAR_TORRENT from tribler.core.tests.tools.common import TORRENT_UBUNTU_FILE from tribler.core.utilities.utilities import random_infohash @@ -34,6 +32,17 @@ def test_serialization(metadata_store): assert torrent_metadata.serialized() +def test_entries_to_chunk(): + """ + Test that calling entries_to_chunk with a start index >= the length of the metadata list raises an Exception. + """ + with pytest.raises(Exception): + entries_to_chunk([], 10, 0) + + with pytest.raises(Exception): + entries_to_chunk([], 10, 1) + + async def test_create_ffa_from_dict(metadata_store): """ Test creating a free-for-all torrent entry @@ -42,7 +51,9 @@ async def test_create_ffa_from_dict(metadata_store): with db_session: # Make sure that FFA entry with the infohash that is already known to GigaChannel cannot be created - signed_entry = metadata_store.TorrentMetadata.from_dict(tdef_to_metadata_dict(tdef)) + signed_md_dict = tdef_to_metadata_dict(tdef) + signed_md_dict.update({'public_key': os.urandom(64), 'signature': os.urandom(64)}) + signed_entry = metadata_store.TorrentMetadata(**signed_md_dict) metadata_store.TorrentMetadata.add_ffa_from_dict(tdef_to_metadata_dict(tdef)) assert metadata_store.TorrentMetadata.select(lambda g: g.public_key == EMPTY_BLOB).count() == 0 @@ -104,10 +115,9 @@ def test_search_deduplicated(metadata_store): """ Test SQL-query base deduplication of search results with the same infohash """ - key2 = default_eccrypto.generate_key("curve25519") torrent = rnd_torrent() metadata_store.TorrentMetadata.from_dict(dict(torrent, title="foo bar 123")) - metadata_store.TorrentMetadata.from_dict(dict(torrent, title="eee 123", sign_with=key2)) + metadata_store.TorrentMetadata.from_dict(dict(torrent, title="eee 123")) results = metadata_store.search_keyword("foo")[:] assert len(results) == 1 @@ -233,8 +243,8 @@ def test_get_entries_for_infohashes(metadata_store): infohash2 = random_infohash() infohash3 = random_infohash() - metadata_store.TorrentMetadata(title='title', infohash=infohash1, size=0, sign_with=TEST_PERSONAL_KEY) - metadata_store.TorrentMetadata(title='title', infohash=infohash2, size=0, sign_with=TEST_PERSONAL_KEY) + metadata_store.TorrentMetadata(title='title', infohash=infohash1, size=0) + metadata_store.TorrentMetadata(title='title', infohash=infohash2, size=0) def count(*args, **kwargs): return len(metadata_store.get_entries_query(*args, **kwargs)) @@ -256,20 +266,12 @@ def test_get_entries(metadata_store): """ Test base method for getting torrents """ - clock.clock = 0 # We want deterministic discrete clock values for tests - # First we create a few channels and add some torrents to these channels tlist = [] - keys = [*(default_eccrypto.generate_key('curve25519') for _ in range(4)), metadata_store.ChannelNode._my_key] - for ind, key in enumerate(keys): - metadata_store.ChannelMetadata( - title='channel%d' % ind, subscribed=(ind % 2 == 0), infohash=random_infohash(), num_entries=5, sign_with=key - ) + for _ in range(5): tlist.extend( [ - metadata_store.TorrentMetadata( - title='torrent%d' % torrent_ind, infohash=random_infohash(), size=123, sign_with=key - ) + metadata_store.TorrentMetadata(title=f'torrent{torrent_ind}', infohash=random_infohash(), size=123) for torrent_ind in range(5) ] ) @@ -282,60 +284,6 @@ def test_get_entries(metadata_store): count = metadata_store.get_entries_count(metadata_type=REGULAR_TORRENT) assert count == 25 - # Test fetching torrents in a channel - channel_pk = metadata_store.ChannelNode._my_key.pub().key_to_bin()[10:] - - args = dict(channel_pk=channel_pk, hide_xxx=True, exclude_deleted=True, metadata_type=REGULAR_TORRENT) - torrents = metadata_store.get_entries_query(**args)[:] - assert tlist[-5:-2] == list(torrents)[::-1] - - count = metadata_store.get_entries_count(**args) - assert count == 3 - - args = dict(sort_by='title', channel_pk=channel_pk, origin_id=0, metadata_type=REGULAR_TORRENT) - torrents = metadata_store.get_entries(first=1, last=10, **args) - assert len(torrents) == 5 - - count = metadata_store.get_entries_count(**args) - assert count == 5 - - # Test that channels get priority over torrents when querying for mixed content - args = dict(sort_by='size', sort_desc=True, channel_pk=channel_pk, origin_id=0) - torrents = metadata_store.get_entries(first=1, last=10, **args) - assert torrents[0].metadata_type == CHANNEL_TORRENT - - args = dict(sort_by='size', sort_desc=False, channel_pk=channel_pk, origin_id=0) - torrents = metadata_store.get_entries(first=1, last=10, **args) - assert torrents[-1].metadata_type == CHANNEL_TORRENT - - # Test getting entries by timestamp range - args = dict(channel_pk=channel_pk, origin_id=0, attribute_ranges=(("timestamp", 3, 30),)) - torrents = metadata_store.get_entries(first=1, last=10, **args) - assert sorted([t.timestamp for t in torrents]) == list(range(25, 30)) - - # Test catching SQL injection - args = dict(channel_pk=channel_pk, origin_id=0, attribute_ranges=(("timestamp < 3 and g.timestamp", 3, 30),)) - with pytest.raises(AttributeError): - metadata_store.get_entries(**args) - - # Test getting entry by id_ - with db_session: - entry = metadata_store.TorrentMetadata(id_=123, infohash=random_infohash()) - args = dict(channel_pk=channel_pk, id_=123) - torrents = metadata_store.get_entries(first=1, last=10, **args) - assert list(torrents) == [entry] - - # Test getting complete channels - with db_session: - complete_chan = metadata_store.ChannelMetadata( - infohash=random_infohash(), title='bla', local_version=222, timestamp=222 - ) - incomplete_chan = metadata_store.ChannelMetadata( - infohash=random_infohash(), title='bla', local_version=222, timestamp=223 - ) - channels = metadata_store.get_entries(complete_channel=True) - assert [complete_chan] == channels - @db_session def test_get_entries_health_checked_after(metadata_store): @@ -354,31 +302,6 @@ def test_get_entries_health_checked_after(metadata_store): assert torrents == [t1] -@db_session -def test_metadata_conflicting(metadata_store): - tdict = dict(rnd_torrent(), title="lakes sheep", tags="video", infohash=b'\x00\xff') - md = metadata_store.TorrentMetadata.from_dict(tdict) - assert not md.metadata_conflicting(tdict) - assert md.metadata_conflicting(dict(tdict, title="bla")) - tdict.pop('title') - assert not md.metadata_conflicting(tdict) - - -@db_session -def test_update_properties(metadata_store): - """ - Test the updating of several properties of a TorrentMetadata object - """ - metadata = metadata_store.TorrentMetadata(title='foo', infohash=random_infohash()) - orig_timestamp = metadata.timestamp - - # Test updating the status only - assert metadata.update_properties({"status": 456}).status == 456 - assert orig_timestamp == metadata.timestamp - assert metadata.update_properties({"title": "bar"}).title == "bar" - assert metadata.timestamp > orig_timestamp - - @db_session def test_popular_torrens_with_metadata_type(metadata_store): """ diff --git a/src/tribler/core/components/metadata_store/remote_query_community/payload_checker.py b/src/tribler/core/components/metadata_store/remote_query_community/payload_checker.py deleted file mode 100644 index 4792d7ca068..00000000000 --- a/src/tribler/core/components/metadata_store/remote_query_community/payload_checker.py +++ /dev/null @@ -1,357 +0,0 @@ -import enum -from dataclasses import dataclass, field - -from pony.orm import db_session - -from tribler.core.components.metadata_store.category_filter.l2_filter import is_forbidden -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_DESCRIPTION, - CHANNEL_THUMBNAIL, - CHANNEL_TORRENT, - COLLECTION_NODE, - DELETED, - NULL_KEY, - REGULAR_TORRENT, -) -from tribler.core.utilities.sentinels import sentinel -from tribler.core.utilities.unicode import hexlify - - -class ObjState(enum.Enum): - UPDATED_LOCAL_VERSION = enum.auto() # We updated the local version of the ORM object with the received one - LOCAL_VERSION_NEWER = enum.auto() # The local version of the ORM object is newer than the received one - LOCAL_VERSION_SAME = enum.auto() # The local version of the ORM object is the same as the received one - NEW_OBJECT = enum.auto() # The received object is unknown to us and thus added to ORM - - -CONTINUE = sentinel('CONTINUE') # Sentinel object indicating that the check yielded no result - - -@dataclass -class ProcessingResult: - # This class is used to return results of processing of a payload by process_payload. - # It includes the ORM object created as a result of processing, the state of the object - # as indicated by ObjState enum, and missing dependencies list that includes a list of query - # arguments for get_entries to query the sender back through Remote Query Community - md_obj: object = None - obj_state: object = None - missing_deps: list = field(default_factory=list) - - -class PayloadChecker: - def __init__(self, mds, payload, skip_personal_metadata_payload=True, channel_public_key=None): - self.mds = mds - self.payload = payload - self.skip_personal_metadata_payload = skip_personal_metadata_payload - self.channel_public_key = channel_public_key - self._logger = self.mds._logger # pylint: disable=W0212 - - def reject_payload_with_nonmatching_public_key(self, channel_public_key): - """ - This check rejects payloads that do not match the given public key. It is used during authoritative - updates of channels from disk (serialized and downloaded in the torrent form) to prevent - channel creators from injecting random garbage into local database. - """ - if self.payload.public_key != channel_public_key: - self._logger.warning( - "Tried to push metadata entry with foreign public key.\ - Expected public key: %s, entry public key / id: %s / %i", - hexlify(channel_public_key), - self.payload.public_key, - self.payload.id_, - ) - return [] - return CONTINUE - - def process_delete_node_command(self): - """ - Check if the payload is a command to delete an existing node. If it is, delete the node - and return empty list. Otherwise, CONTINUE control to further checks. - """ - if self.payload.metadata_type == DELETED: - # We only allow people to delete their own entries, thus PKs must match - node = self.mds.ChannelNode.get_for_update( - signature=self.payload.delete_signature, public_key=self.payload.public_key - ) - if node: - node.delete() - return [] - return CONTINUE - - def reject_unknown_payload_type(self): - """ - Check if the payload contains metadata of a known type. - If it does not, stop processing and return empty list. - Otherwise, CONTINUE control to further checks. - """ - if self.payload.metadata_type not in [ - CHANNEL_TORRENT, - REGULAR_TORRENT, - COLLECTION_NODE, - CHANNEL_DESCRIPTION, - CHANNEL_THUMBNAIL, - ]: - return [] - return CONTINUE - - def reject_payload_with_offending_words(self): - """ - Check if the payload contains strong offending words. - If it does, stop processing and return empty list. - Otherwise, CONTINUE control to further checks. - """ - if is_forbidden( - " ".join( - getattr(self.payload, attr) for attr in ("title", "tags", "text") if hasattr(self.payload, attr)) - ): - return [] - return CONTINUE - - def add_ffa_node(self): - """ - Check if the payload contains metadata of Free-For-All (FFA) type, which is just a REGULAR_TORRENT payload - without signature. If it does, create a corresponding node in the local database. - Otherwise, CONTINUE control to further checks. - """ - if self.payload.public_key == NULL_KEY: - if self.payload.metadata_type == REGULAR_TORRENT: - node = self.mds.TorrentMetadata.add_ffa_from_dict(self.payload.to_dict()) - if node: - return [ProcessingResult(md_obj=node, obj_state=ObjState.NEW_OBJECT)] - return [] - return CONTINUE - - def add_node(self): - """ - Try to create a local node from the payload. - If it is impossible, CONTINUE control to further checks (there should not be any more, really). - """ - for orm_class in ( - self.mds.TorrentMetadata, - self.mds.ChannelMetadata, - self.mds.CollectionNode, - self.mds.ChannelThumbnail, - self.mds.ChannelDescription, - ): - if orm_class._discriminator_ == self.payload.metadata_type: # pylint: disable=W0212 - obj = orm_class.from_payload(self.payload) - return [ProcessingResult(md_obj=obj, obj_state=ObjState.NEW_OBJECT)] - return CONTINUE - - def reject_personal_metadata(self): - """ - Check if the payload contains metadata signed by our private key. This could happen in a situation where - someone else tries to push us our old channel data, for example. - Since we are the only authoritative source of information about our own channel, we reject - such payloads and thus return empty list. - Otherwise, CONTINUE control to further checks. - """ - if self.payload.public_key == self.mds.my_public_key_bin: - return [] - return CONTINUE - - def reject_obsolete_metadata(self): - """ - Check if the received payload contains older deleted metadata for a channel we are subscribed to. - In that case, we reject the metadata and return an empty list. - Otherwise, CONTINUE control to further checks. - """ - - # ACHTUNG! Due to deficiencies in the current Channels design, it is impossible to - # reliably tell if the received entry belongs to a channel we already subscribed, - # if some of the intermediate folders were deleted earlier. - # Also, this means we must return empty list for the case when the local subscribed channel - # version is higher than the receive payload. This behavior does not conform to the - # "local results == remote results" contract, but that is not a problem in most important cases - # (e.g. browsing a non-subscribed channel). One situation where it can still matter is when - # a remote search returns deleted results for a channel that we subscribe locally. - parent = self.mds.CollectionNode.get(public_key=self.payload.public_key, id_=self.payload.origin_id) - if parent is None: - # Probably, this is a payload for an unknown object, so nothing to do here - return CONTINUE - # If the immediate parent is not a real channel, look for its toplevel parent in turn - parent = parent.get_parent_nodes()[0] if parent.metadata_type != CHANNEL_TORRENT else parent - - if parent.metadata_type == CHANNEL_TORRENT and self.payload.timestamp <= parent.local_version: - # The received metadata is an older entry from a channel we are subscribed to. Reject it. - return [] - return CONTINUE - - def update_local_node(self): - """ - Check if the received payload contains an updated version of metadata node we already have - in the local database (e.g. a newer version of channel entry gossiped to us). - We try to update the local metadata node in that case, returning UPDATED_LOCAL_VERSION status. - Conversely, if we got a newer version of the metadata node, we return it to higher level - with a LOCAL_VERSION_NEWER mark, so the higher level can possibly push an update back to the sender. - If we don't have some version of the node locally, CONTINUE control to further checks. - """ - # Check for the older version of the added node - node = self.mds.ChannelNode.get_for_update(public_key=self.payload.public_key, id_=self.payload.id_) - if not node: - return CONTINUE - - node.to_simple_dict() # Force loading of related objects (like TorrentMetadata.health) in db_session - - if node.timestamp == self.payload.timestamp: - # We got the same version locally and do nothing. - # Nevertheless, it is important to indicate to upper levels that we recognised - # the entry, for e.g. channel votes bumping - return [ProcessingResult(md_obj=node, obj_state=ObjState.LOCAL_VERSION_SAME)] - if node.timestamp > self.payload.timestamp: - # We got the newer version, return it to upper level (for e.g. a pushback update) - return [ProcessingResult(md_obj=node, obj_state=ObjState.LOCAL_VERSION_NEWER)] - if node.timestamp < self.payload.timestamp: - # The received metadata has newer version than the stuff we got, so we have to update the local version. - return self.update_channel_node(node) - - # This should never happen, really. But nonetheless, to appease the linter... - return CONTINUE - - def update_channel_node(self, node): - # Update the local metadata entry - if node.metadata_type == self.payload.metadata_type: - node.set(**self.payload.to_dict()) - return [ProcessingResult(md_obj=node, obj_state=ObjState.UPDATED_LOCAL_VERSION)] - - # Remote change of md type. - # We delete the original node and replace it with the updated one. - for orm_class in (self.mds.ChannelMetadata, self.mds.CollectionNode): - if orm_class._discriminator_ == self.payload.metadata_type: # pylint: disable=W0212 - node.delete() - obj = orm_class.from_payload(self.payload) - return [ProcessingResult(md_obj=obj, obj_state=ObjState.UPDATED_LOCAL_VERSION)] - - # Something went wrong, log it - self._logger.warning( - f"Tried to update channel node to illegal type: " - f" original type: {node.metadata_type}" - f" updated type: {self.payload.metadata_type}" - f" {hexlify(self.payload.public_key)}, {self.payload.id_} " - ) - return [] - - def request_missing_dependencies(self, node_list): - """ - Scan the results for entries with locally missing dependencies, such as thumbnail and description nodes, - and modify the results by adding a dict with request for missing nodes in the get_entries format. - """ - for r in node_list: - updated_local_channel_node = ( - r.obj_state == ObjState.UPDATED_LOCAL_VERSION and r.md_obj.metadata_type == CHANNEL_TORRENT - ) - r.missing_deps.extend( - self.requests_for_child_dependencies(r.md_obj, include_newer=updated_local_channel_node) - ) - - return node_list - - def perform_checks(self): - """ - This method runs checks on the received payload. Essentially, it acts like a firewall, rejecting - incorrect or conflicting entries. Individual checks can return either CONTINUE, an empty list or a list - of ProcessingResult objects. If CONTINUE sentinel object is returned, checks will proceed further. - If non-CONTINUE result is returned by a check, the checking process stops. - """ - if self.channel_public_key: - yield self.reject_payload_with_nonmatching_public_key(self.channel_public_key) - if self.skip_personal_metadata_payload: - yield self.reject_personal_metadata() - # We only allow deleting entries during authoritative updates - if self.channel_public_key: - yield self.process_delete_node_command() - yield self.reject_unknown_payload_type() - yield self.reject_payload_with_offending_words() - yield self.reject_obsolete_metadata() - yield self.add_ffa_node() - yield self.update_local_node() - yield self.add_node() - - # Something went wrong, log it - self._logger.warning( - f"Payload processing ended without actions, this should not happen normally." - f" Payload type: {self.payload.metadata_type}" - f" {hexlify(self.payload.public_key)}, {self.payload.id_} " - f" {self.payload.timestamp}" - ) - - yield [] - - def requests_for_child_dependencies(self, node, include_newer=False): - """ - This method checks the given ORM node (object) for missing dependencies, such as thumbnails and/or - descriptions. To do so, it checks for existence of special dependency flags in the object's - "reserved_flags" field and checks for existence of the corresponding dependencies in the local database. - """ - if node.metadata_type not in (CHANNEL_TORRENT, COLLECTION_NODE): - return [] - - result = [] - if node.description_flag: - result.extend(self.check_and_request_child_dependency(node, CHANNEL_DESCRIPTION, include_newer)) - if node.thumbnail_flag: - result.extend(self.check_and_request_child_dependency(node, CHANNEL_THUMBNAIL, include_newer)) - - return result - - def check_and_request_child_dependency(self, node, dep_type, include_newer=False): - """ - For each missing dependency it will generate a query in the "get_entry" format that should be addressed to the - peer that sent the original payload/node/object. - If include_newer argument is true, it will generate a query even if the dependencies exist in the local - database. However, this query will limit the selection to dependencies with a higher timestamp than that - of the local versions. Effectively, this query asks the remote peer for updates on dependencies. Thus, - it should only be issued when it is known that the parent object was updated. - """ - dep_node = self.mds.ChannelNode.select( - lambda g: g.origin_id == node.id_ and g.public_key == node.public_key and g.metadata_type == dep_type - ).first() - request_dict = { - "metadata_type": [dep_type], - "channel_pk": node.public_key, - "origin_id": node.id_, - "first": 0, - "last": 1, - } - if not dep_node: - return [request_dict] - if include_newer: - request_dict["attribute_ranges"] = (("timestamp", dep_node.timestamp + 1, None),) - return [request_dict] - return [] - - @db_session - def process_payload(self): - result = [] - for result in self.perform_checks(): - if result is not CONTINUE: - break - - if self.channel_public_key is None: - # The request came from the network, so check for missing dependencies - result = self.request_missing_dependencies(result) - return result - - -def process_payload(metadata_store, payload, skip_personal_metadata_payload=True, channel_public_key=None): - """ - This routine decides what to do with a given payload and executes the necessary actions. - To do so, it looks into the database, compares version numbers, etc. - It returns a list of tuples each of which contain the corresponding new/old object and the actions - that were performed on that object. - :param metadata_store: Metadata Store object serving the database - :param payload: payload to work on - :param skip_personal_metadata_payload: if this is set to True, personal torrent metadata payload received - through gossip will be ignored. The default value is True. - :param channel_public_key: rejects payloads that do not belong to this key. - Enabling this allows to skip some costly checks during e.g. channel processing. - - :return: a list of ProcessingResult objects - """ - - return PayloadChecker( - metadata_store, - payload, - skip_personal_metadata_payload=skip_personal_metadata_payload, - channel_public_key=channel_public_key, - ).process_payload() diff --git a/src/tribler/core/components/metadata_store/remote_query_community/remote_query_community.py b/src/tribler/core/components/metadata_store/remote_query_community/remote_query_community.py index a51b953375d..0128765043c 100644 --- a/src/tribler/core/components/metadata_store/remote_query_community/remote_query_community.py +++ b/src/tribler/core/components/metadata_store/remote_query_community/remote_query_community.py @@ -1,4 +1,5 @@ import json +import logging import struct import time from asyncio import Future @@ -17,16 +18,15 @@ from tribler.core.components.ipv8.eva.result import TransferResult from tribler.core.components.ipv8.tribler_community import TriblerCommunity from tribler.core.components.knowledge.community.knowledge_validator import is_valid_resource -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import LZ4_EMPTY_ARCHIVE, entries_to_chunk -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import LZ4_EMPTY_ARCHIVE, entries_to_chunk from tribler.core.components.metadata_store.db.store import MetadataStore -from tribler.core.components.metadata_store.remote_query_community.payload_checker import ObjState from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings from tribler.core.components.metadata_store.utils import RequestTimeoutException from tribler.core.utilities.pony_utils import run_threaded from tribler.core.utilities.unicode import hexlify BINARY_FIELDS = ("infohash", "channel_pk") +DEPRECATED_PARAMETERS = ['subscribed', 'attribute_ranges', 'complete_channel'] def sanitize_query(query_dict: Dict[str, Any], cap=100) -> Dict[str, Any]: @@ -272,6 +272,11 @@ def parse_parameters(self, json_bytes: bytes) -> Dict[str, Any]: async def _on_remote_select_basic(self, peer, request_payload, force_eva_response=False): try: sanitized_parameters = self.parse_parameters(request_payload.json) + # Drop selects with deprecated queries + if any(param in sanitized_parameters for param in DEPRECATED_PARAMETERS): + self.logger.warning(f"Remote select with deprecated parameters: {sanitized_parameters}") + self.ez_send(peer, SelectResponsePayload(request_payload.id, LZ4_EMPTY_ARCHIVE)) + return db_results = await self.process_rpc_query_rate_limited(sanitized_parameters) # When we send our response to a host, we open a window of opportunity @@ -286,7 +291,7 @@ async def _on_remote_select_basic(self, peer, request_payload, force_eva_respons @lazy_wrapper(SelectResponsePayload) async def on_remote_select_response(self, peer, response_payload): """ - Match the the response that we received from the network to a query cache + Match the response that we received from the network to a query cache and process it by adding the corresponding entries to the MetadataStore database. This processes both direct responses and pushback (updates) responses """ @@ -309,36 +314,6 @@ async def on_remote_select_response(self, peer, response_payload): if isinstance(request, EvaSelectRequest) and not request.processing_results.done(): request.processing_results.set_result(processing_results) - # If we know about updated versions of the received stuff, push the updates back - if isinstance(request, SelectRequest) and self.rqc_settings.push_updates_back_enabled: - newer_entities = [r.md_obj for r in processing_results if r.obj_state == ObjState.LOCAL_VERSION_NEWER] - self.send_db_results(peer, response_payload.id, newer_entities) - - if self.rqc_settings.channel_query_back_enabled: - for result in processing_results: - # Query back the sender for preview contents for the new channels - # The fact that the object is previously unknown is indicated by process_payload in the - # .obj_state property of returned ProcessingResults objects. - if result.obj_state == ObjState.NEW_OBJECT and result.md_obj.metadata_type in ( - CHANNEL_TORRENT, - COLLECTION_NODE, - ): - request_dict = { - "metadata_type": [COLLECTION_NODE, REGULAR_TORRENT], - "channel_pk": result.md_obj.public_key, - "origin_id": result.md_obj.id_, - "first": 0, - "last": self.rqc_settings.max_channel_query_back, - } - self.send_remote_select(peer=peer, **request_dict) - - # Query back for missing dependencies, e.g. thumbnail/description. - # The fact that some dependency is missing is checked by the lower layer during - # the query to process_payload and indicated through .missing_deps property of the - # ProcessingResults objects returned by process_payload. - for dep_query_dict in result.missing_deps: - self.send_remote_select(peer=peer, **dep_query_dict) - if isinstance(request, SelectRequest) and request.processing_callback: request.processing_callback(request, processing_results) @@ -346,6 +321,8 @@ async def on_remote_select_response(self, peer, response_payload): if isinstance(request, SelectRequest): request.peer_responded = True + return processing_results + def _on_query_timeout(self, request_cache): if not request_cache.peer_responded: self.logger.debug( diff --git a/src/tribler/core/components/metadata_store/remote_query_community/settings.py b/src/tribler/core/components/metadata_store/remote_query_community/settings.py index 9fcd21bdbf5..ae1c1e49dfa 100644 --- a/src/tribler/core/components/metadata_store/remote_query_community/settings.py +++ b/src/tribler/core/components/metadata_store/remote_query_community/settings.py @@ -16,9 +16,4 @@ class RemoteQueryCommunitySettings(TriblerConfigSection): max_query_peers: int = 20 max_response_size: int = 100 # Max number of entries returned by SQL query - max_channel_query_back: int = 4 # Max number of entries to query back on receiving an unknown channel push_updates_back_enabled = True - - @property - def channel_query_back_enabled(self): - return self.max_channel_query_back > 0 diff --git a/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_query_community.py b/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_query_community.py index b53eb1db690..a174eccaf15 100644 --- a/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_query_community.py +++ b/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_query_community.py @@ -3,7 +3,7 @@ import string import time from asyncio import sleep -from binascii import unhexlify +from binascii import hexlify, unhexlify from operator import attrgetter from os import urandom from unittest.mock import Mock, patch @@ -14,12 +14,13 @@ from pony.orm.dbapiprovider import OperationalError from tribler.core.components.ipv8.adapters_tests import TriblerTestBase -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW -from tribler.core.components.metadata_store.db.serialization import CHANNEL_THUMBNAIL, CHANNEL_TORRENT, REGULAR_TORRENT +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import NEW, LZ4_EMPTY_ARCHIVE +from tribler.core.components.metadata_store.db.serialization import CHANNEL_THUMBNAIL, REGULAR_TORRENT, \ + NULL_KEY from tribler.core.components.metadata_store.db.store import MetadataStore from tribler.core.components.metadata_store.remote_query_community.remote_query_community import ( RemoteQueryCommunity, - sanitize_query, + sanitize_query, SelectResponsePayload, ) from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings from tribler.core.utilities.path_util import Path @@ -34,18 +35,16 @@ def random_string(): return ''.join(random.choices(string.ascii_uppercase + string.digits, k=100)) -def add_random_torrent(metadata_cls, name="test", channel=None, seeders=None, leechers=None, last_check=None): - d = {"infohash": random_infohash(), "title": name, "tags": "", "size": 1234, "status": NEW} - if channel: - d.update({"origin_id": channel.id_}) +def add_random_torrent(metadata_cls, name="test", seeders=None, leechers=None, last_check=None): + d = {"infohash": random_infohash(), "public_key": NULL_KEY, "title": name, "tags": "", "size": 1234, "status": NEW} torrent_metadata = metadata_cls.from_dict(d) - torrent_metadata.sign() if seeders: torrent_metadata.health.seeders = seeders if leechers: torrent_metadata.health.leechers = leechers if last_check: torrent_metadata.health.last_check = last_check + return torrent_metadata class BasicRemoteQueryCommunity(RemoteQueryCommunity): @@ -87,9 +86,6 @@ def create_node(self, *args, **kwargs): self.count += 1 return node - def channel_metadata(self, i): - return self.nodes[i].overlay.mds.ChannelMetadata - def torrent_metadata(self, i): return self.nodes[i].overlay.mds.TorrentMetadata @@ -100,17 +96,12 @@ async def test_remote_select(self): mds0 = self.nodes[0].overlay.mds mds1 = self.nodes[1].overlay.mds - # We do not want the query back mechanism to interfere with this test - self.nodes[1].overlay.rqc_settings.max_channel_query_back = 0 - # Fill Node 0 DB with channels and torrents with db_session: - channel = mds0.ChannelMetadata.create_channel("ubuntu channel", "ubuntu") for i in range(20): add_random_torrent( mds0.TorrentMetadata, name=f"ubuntu {i}", - channel=channel, seeders=2 * i, leechers=i, last_check=int(time.time()) + i, @@ -142,85 +133,16 @@ async def test_remote_select(self): await self.deliver_messages(timeout=0.5) callback.assert_called() - async def test_remote_select_query_back(self): - """ - Test querying back preview contents for previously unknown channels. - """ - num_channels = 5 - max_received_torrents_per_channel_query_back = 4 - - mds0 = self.nodes[0].overlay.mds - mds1 = self.nodes[1].overlay.mds - - with db_session: - # Generate channels on Node 0 - for _ in range(0, num_channels): - chan = mds0.ChannelMetadata.create_channel("channel", "") - # Generate torrents in each channel - for i in range(0, max_received_torrents_per_channel_query_back): - torrent = mds0.TorrentMetadata(origin_id=chan.id_, infohash=random_infohash()) - torrent.health.seeders = i - - peer = self.nodes[0].my_peer - kwargs_dict = {"metadata_type": [CHANNEL_TORRENT]} - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - - await self.deliver_messages(timeout=0.5) - - with db_session: - received_channels = list(mds1.ChannelMetadata.select(lambda g: g.title == "channel")) - assert len(received_channels) == num_channels - # For each unknown channel that we received, we should have queried the sender for 4 preview torrents. - received_torrents = list(mds1.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT)) - assert num_channels * max_received_torrents_per_channel_query_back == len(received_torrents) - seeders = {t.health.seeders for t in received_torrents} - assert seeders == set(range(max_received_torrents_per_channel_query_back)) - - async def test_push_back_entry_update(self): + async def test_remote_select_deprecated(self): """ - Test pushing back update for an entry. - Scenario: both hosts 0 and 1 have metadata entries for the same channel, - but host 1's version was created later (its timestamp is higher). - When host 1 queries -> host 0 for channel info, host 0 sends it back. - Upon receiving the response, host 1 sees that it has a newer version of the channel entry, - so it pushes it back to host 0. + Test deprecated search keys receiving an empty archive response. """ + with self.assertReceivedBy(0, [SelectResponsePayload]) as responses: + self.overlay(0).send_remote_select(self.peer(1), subscribed=1) + await self.deliver_messages() + response, = responses - mds0 = self.nodes[0].overlay.mds - mds1 = self.nodes[1].overlay.mds - - # Create the old and new versions of the test channel - # We sign it with a different private key to prevent the special treatment - # of personal channels during processing interfering with the test. - fake_key = default_eccrypto.generate_key("curve25519") - with db_session: - chan = mds0.ChannelMetadata(infohash=random_infohash(), title="foo", sign_with=fake_key) - # pylint: disable=protected-access - chan_payload_old = chan._payload_class.from_signed_blob(chan.serialized()) - chan.timestamp = chan.timestamp + 1 - chan.sign(key=fake_key) - # pylint: disable=protected-access - chan_payload_updated = chan._payload_class.from_signed_blob(chan.serialized()) - chan.delete() - - # Add the older channel version to node 0 - mds0.ChannelMetadata.from_payload(chan_payload_old) - - # Add the updated channel version to node 1 - mds1.ChannelMetadata.from_payload(chan_payload_updated) - - # Just in case, assert the first node only got the older version for now - assert mds0.ChannelMetadata.get(timestamp=chan_payload_old.timestamp) - - # Node 1 requests channel peers from node 0 - peer = self.nodes[0].my_peer - kwargs_dict = {"metadata_type": [CHANNEL_TORRENT]} - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - await self.deliver_messages(timeout=0.5) - - with db_session: - # Check that node0 now got the updated version - assert mds0.ChannelMetadata.get(timestamp=chan_payload_updated.timestamp) + assert response.raw_blob == LZ4_EMPTY_ARCHIVE async def test_push_entry_update(self): """ @@ -238,10 +160,8 @@ async def test_remote_select_torrents(self): mds1 = self.nodes[1].overlay.mds with db_session: - chan = mds0.ChannelMetadata.create_channel(random_string(), "") torrent_infohash = random_infohash() - torrent = mds0.TorrentMetadata(origin_id=chan.id_, infohash=torrent_infohash, title='title1') - torrent.sign() + mds0.TorrentMetadata(infohash=torrent_infohash, public_key=NULL_KEY, title='title1') callback_called = asyncio.Event() processing_results = [] @@ -262,25 +182,6 @@ def callback(_, results): assert obj.title == 'title1' assert obj.health.seeders == 0 - with db_session: - torrent = mds0.TorrentMetadata.get(infohash=torrent_infohash) - torrent.timestamp += 1 - torrent.title = 'title2' - torrent.sign() - - processing_results = [] - callback_called.clear() - - self.nodes[1].overlay.send_remote_select( - peer, metadata_type=[REGULAR_TORRENT], infohash=torrent_infohash, processing_callback=callback - ) - - await callback_called.wait() - - assert len(processing_results) == 1 - obj = processing_results[0].md_obj - assert isinstance(obj, mds1.TorrentMetadata) - assert obj.health.seeders == 0 async def test_remote_select_packets_limit(self): """ @@ -290,15 +191,15 @@ async def test_remote_select_packets_limit(self): mds0 = self.nodes[0].overlay.mds mds1 = self.nodes[1].overlay.mds - # We do not want the query back mechanism to interfere with this test - self.nodes[1].overlay.rqc_settings.max_channel_query_back = 0 - with db_session: for _ in range(0, 100): - mds0.ChannelMetadata.create_channel(random_string(), "") + md = add_random_torrent(mds0.TorrentMetadata, name=random_string()) + key = default_eccrypto.generate_key("curve25519") + md.public_key = key.pub().key_to_bin()[10:] + md.signature = md.serialized(key)[-64:] peer = self.nodes[0].my_peer - kwargs_dict = {"metadata_type": [CHANNEL_TORRENT]} + kwargs_dict = {"metadata_type": [REGULAR_TORRENT]} self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) # There should be an outstanding request in the list self.assertTrue(self.nodes[1].overlay.request_cache._identifiers) # pylint: disable=protected-access @@ -306,10 +207,10 @@ async def test_remote_select_packets_limit(self): await self.deliver_messages(timeout=1.5) with db_session: - received_channels = list(mds1.ChannelMetadata.select()) - # We should receive less that 6 packets, so all the channels should not fit there. - received_channels_count = len(received_channels) - assert 40 < received_channels_count < 60 + received_torrents = list(mds1.TorrentMetadata.select()) + # We should receive less than 6 packets, so all the channels should not fit there. + received_torrents_count = len(received_torrents) + assert 40 <= received_torrents_count < 60 # The list of outstanding requests should be empty self.assertFalse(self.nodes[1].overlay.request_cache._identifiers) # pylint: disable=protected-access @@ -350,28 +251,28 @@ async def test_process_rpc_query_match_many(self): Check if a correct query with a match in our database returns a result. """ with db_session: - channel = self.channel_metadata(0).create_channel("a channel", "") - add_random_torrent(self.torrent_metadata(0), name="a torrent", channel=channel) + add_random_torrent(self.torrent_metadata(0), name="torrent1") + add_random_torrent(self.torrent_metadata(0), name="torrent2") results = await self.overlay(0).process_rpc_query({}) self.assertEqual(2, len(results)) - channel_md, torrent_md = results if isinstance(results[0], self.channel_metadata(0)) else results[::-1] - self.assertEqual("a channel", channel_md.title) - self.assertEqual("a torrent", torrent_md.title) + torrent1_md, torrent2_md = results if results[0].title == "torrent1" else results[::-1] + self.assertEqual("torrent1", torrent1_md.title) + self.assertEqual("torrent2", torrent2_md.title) async def test_process_rpc_query_match_one(self): """ Check if a correct query with one match in our database returns one result. """ with db_session: - self.channel_metadata(0).create_channel("a channel", "") + add_random_torrent(self.torrent_metadata(0), name="a torrent") results = await self.overlay(0).process_rpc_query({}) self.assertEqual(1, len(results)) - (channel_md,) = results - self.assertEqual("a channel", channel_md.title) + (torrent_md,) = results + self.assertEqual("a torrent", torrent_md.title) async def test_process_rpc_query_match_none(self): """ @@ -398,8 +299,6 @@ async def test_process_rpc_query_match_invalid_json(self): """ Check if processing a request with invalid JSON causes a ValueError to be raised. """ - with db_session: - self.channel_metadata(0).create_channel("a channel", "") query = b'{"id_":' + b'\x31' * 200 + b'}' with self.assertRaises(ValueError): parameters = self.overlay(0).parse_parameters(query) @@ -426,9 +325,9 @@ async def test_remote_query_big_response(self): mds0 = self.nodes[0].overlay.mds mds1 = self.nodes[1].overlay.mds - value = urandom(20000) + value = urandom(10000) with db_session: - mds1.ChannelThumbnail(binary_data=value) + add_random_torrent(mds1.TorrentMetadata, name=hexlify(value)) kwargs_dict = {"metadata_type": [CHANNEL_THUMBNAIL]} callback = Mock() @@ -444,100 +343,7 @@ async def test_remote_query_big_response(self): torrents1 = mds1.get_entries(**kwargs_dict) self.assertEqual(len(torrents0), len(torrents1)) - async def test_remote_select_query_back_thumbs_and_descriptions(self): - """ - Test querying back preview thumbnail and description for previously unknown and updated channels. - """ - mds0 = self.nodes[0].overlay.mds - mds1 = self.nodes[1].overlay.mds - - with db_session: - # Generate channels on Node 0 - chan = mds0.ChannelMetadata.create_channel("channel", "") - mds0.ChannelThumbnail( - public_key=chan.public_key, - origin_id=chan.id_, - binary_data=urandom(2000), - data_type="image/png", - status=NEW, - ) - - mds0.ChannelDescription( - public_key=chan.public_key, origin_id=chan.id_, json_text='{"description_text": "foobar"}', status=NEW - ) - chan.commit_all_channels() - chan_v = chan.timestamp - - peer = self.nodes[0].my_peer - kwargs_dict = {"metadata_type": [CHANNEL_TORRENT]} - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - - await self.deliver_messages(timeout=0.5) - - with db_session: - assert mds1.ChannelMetadata.get(lambda g: g.title == "channel") - assert mds1.ChannelThumbnail.get() - assert mds1.ChannelDescription.get() - - # Now test querying for updated version of description/thumbnail - with db_session: - thumb = mds0.ChannelThumbnail.get() - new_pic_bytes = urandom(2500) - thumb.update_properties({"binary_data": new_pic_bytes}) - descr = mds0.ChannelDescription.get() - descr.update_properties({"json_text": '{"description_text": "yummy"}'}) - - chan = mds0.ChannelMetadata.get() - chan.commit_all_channels() - chan_v2 = chan.timestamp - assert chan_v2 > chan_v - - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - - await self.deliver_messages(timeout=1) - - with db_session: - assert mds1.ChannelMetadata.get(lambda g: g.title == "channel") - assert mds1.ChannelThumbnail.get().binary_data == new_pic_bytes - assert mds1.ChannelDescription.get().json_text == '{"description_text": "yummy"}' - - # Test querying for missing dependencies (e.g. thumbnails and descriptions lost due to transfer errors) - with db_session: - mds1.ChannelThumbnail.get().delete() - mds1.ChannelDescription.get().delete() - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - - await self.deliver_messages(timeout=1) - - with db_session: - mds1.ChannelThumbnail.get() - mds1.ChannelDescription.get() - - # Test that we're only going to query for updated objects and skip old ones - with db_session: - chan = mds0.ChannelMetadata.get() - mds0.TorrentMetadata(public_key=chan.public_key, origin_id=chan.id_, infohash=random_infohash(), status=NEW) - - chan.commit_all_channels() - chan_v3 = chan.timestamp - assert chan_v3 > chan_v2 - - self.nodes[0].overlay.eva_send_binary = Mock() - - self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict) - await self.deliver_messages(timeout=1) - - # Big transfer should not have been called, because we only queried for updated version of the thumbnail - self.nodes[0].overlay.eva_send_binary.assert_not_called() - - with db_session: - assert mds1.ChannelMetadata.get(lambda g: g.title == "channel").timestamp == chan_v3 - async def test_drop_silent_peer(self): - - # We do not want the query back mechanism to interfere with this test - self.nodes[1].overlay.rqc_settings.max_channel_query_back = 0 - kwargs_dict = {"txt_filter": "ubuntu*"} basic_path = 'tribler.core.components.metadata_store.remote_query_community.remote_query_community' @@ -554,9 +360,6 @@ async def test_drop_silent_peer(self): async def test_dont_drop_silent_peer_on_empty_response(self): # Test that even in the case of an empty response packet, remove_peer is not called on timeout - # We do not want the query back mechanism to interfere with this test - self.nodes[1].overlay.rqc_settings.max_channel_query_back = 0 - was_called = [] async def mock_on_remote_select_response(*_, **__): @@ -575,9 +378,9 @@ async def test_remote_select_force_eva(self): # Test requesting usage of EVA for sending multiple smaller entries with db_session: for _ in range(0, 10): - self.nodes[1].overlay.mds.ChannelThumbnail(binary_data=urandom(500)) + add_random_torrent(self.nodes[1].overlay.mds.TorrentMetadata, name=hexlify(urandom(250))) - kwargs_dict = {"metadata_type": [CHANNEL_THUMBNAIL]} + kwargs_dict = {"metadata_type": [REGULAR_TORRENT]} self.nodes[1].overlay.eva.send_binary = Mock() self.nodes[0].overlay.send_remote_select(self.nodes[1].my_peer, **kwargs_dict, force_eva_response=True) diff --git a/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_search_by_tags.py b/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_search_by_tags.py index 504202b0aa6..ef55fbfd485 100644 --- a/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_search_by_tags.py +++ b/src/tribler/core/components/metadata_store/remote_query_community/tests/test_remote_search_by_tags.py @@ -10,7 +10,7 @@ TestKnowledgeAccessLayerBase from tribler.core.components.database.db.tribler_database import TriblerDatabase from tribler.core.components.ipv8.adapters_tests import TriblerTestBase -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import NEW from tribler.core.components.metadata_store.db.store import MetadataStore from tribler.core.components.metadata_store.remote_query_community.remote_query_community import RemoteQueryCommunity from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings diff --git a/src/tribler/core/components/metadata_store/restapi/channels_endpoint.py b/src/tribler/core/components/metadata_store/restapi/channels_endpoint.py deleted file mode 100644 index e4170812ac6..00000000000 --- a/src/tribler/core/components/metadata_store/restapi/channels_endpoint.py +++ /dev/null @@ -1,519 +0,0 @@ -import base64 -import json -from asyncio import CancelledError -from binascii import unhexlify -from pathlib import Path - -from aiohttp import ClientSession, ContentTypeError, web -from aiohttp_apispec import docs, json_schema, querystring_schema -from ipv8.REST.schema import schema -from marshmallow.fields import Boolean, Integer, String -from pony.orm import db_session - -from tribler.core.components.gigachannel.community.gigachannel_community import GigaChannelCommunity -from tribler.core.components.gigachannel_manager.gigachannel_manager import GigaChannelManager -from tribler.core.components.libtorrent.download_manager.download_manager import DownloadManager -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import DIRTY_STATUSES, NEW -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, REGULAR_TORRENT -from tribler.core.components.metadata_store.restapi.metadata_endpoint_base import MetadataEndpointBase -from tribler.core.components.metadata_store.restapi.metadata_schema import ChannelSchema, MetadataParameters, MetadataSchema, TorrentSchema -from tribler.core.components.metadata_store.utils import NoChannelSourcesException, RequestTimeoutException -from tribler.core.components.restapi.rest.rest_endpoint import HTTP_BAD_REQUEST, HTTP_NOT_FOUND, RESTResponse -from tribler.core.components.restapi.rest.schema import HandledErrorSchema -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import froze_it, parse_bool, parse_magnetlink - -ERROR_INVALID_MAGNET_LINK = "Invalid magnet link: %s" - - -async def _fetch_uri(uri): - async with ClientSession() as session: - response = await session.get(uri) - data = await response.read() - return data - - -@froze_it -class ChannelsEndpoint(MetadataEndpointBase): - path = '/channels' - - def __init__(self, - download_manager: DownloadManager, - gigachannel_manager: GigaChannelManager, - gigachannel_community: GigaChannelCommunity, - *args, **kwargs): - MetadataEndpointBase.__init__(self, *args, **kwargs) - self.download_manager = download_manager - self.gigachannel_manager = gigachannel_manager - self.gigachannel_community = gigachannel_community - - def setup_routes(self): - self.app.add_routes( - [ - web.get('', self.get_channels), - web.get(r'/{channel_pk:\w*}/{channel_id:\w*}', self.get_channel_contents), - web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/description', self.get_channel_description), - web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/description', self.put_channel_description), - web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/thumbnail', self.get_channel_thumbnail), - web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/thumbnail', self.put_channel_thumbnail), - web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/copy', self.copy_channel), - web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/channels', self.create_channel), - web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/collections', self.create_collection), - web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/torrents', self.add_torrent_to_channel), - web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/commit', self.post_commit), - web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/commit', self.is_channel_dirty), - web.get('/popular_torrents', self.get_popular_torrents_channel), - ] - ) - - def add_download_progress_to_metadata_list(self, contents_list): - for torrent in contents_list: - if torrent['type'] == REGULAR_TORRENT: - dl = self.download_manager.get_download(unhexlify(torrent['infohash'])) - if dl is not None and dl.tdef.infohash not in self.download_manager.metainfo_requests: - torrent['progress'] = dl.get_state().get_progress() - - def get_channel_from_request(self, request): - channel_pk = ( - self.mds.my_key.pub().key_to_bin()[10:] - if request.match_info['channel_pk'] == 'mychannel' - else unhexlify(request.match_info['channel_pk']) - ) - channel_id = int(request.match_info['channel_id']) - return channel_pk, channel_id - - @docs( - tags=['Metadata'], - summary='Get a list of all channels known to the system.', - responses={ - 200: { - 'schema': schema( - GetChannelsResponse={ - 'results': [ChannelSchema], - 'first': Integer(), - 'last': Integer(), - 'sort_by': String(), - 'sort_desc': Integer(), - 'total': Integer(), - } - ) - } - }, - ) - @querystring_schema(MetadataParameters) - async def get_channels(self, request): - sanitized = self.sanitize_parameters(request.query) - sanitized['subscribed'] = None if 'subscribed' not in request.query else parse_bool(request.query['subscribed']) - include_total = request.query.get('include_total', '') - sanitized.update({"origin_id": 0}) - sanitized['metadata_type'] = CHANNEL_TORRENT - - with db_session: - channels = self.mds.get_entries(**sanitized) - total = self.mds.get_total_count(**sanitized) if include_total else None - channels_list = [] - for channel in channels: - channel_dict = channel.to_simple_dict() - # Add progress info for those channels that are still being processed - if channel.subscribed: - if channel_dict["state"] == CHANNEL_STATE.UPDATING.value: - try: - progress = self.mds.compute_channel_update_progress(channel) - channel_dict["progress"] = progress - except (ZeroDivisionError, FileNotFoundError) as e: - self._logger.error( - "Error %s when calculating channel update progress. Channel data: %s-%i %i/%i", - e, - hexlify(channel.public_key), - channel.id_, - channel.start_timestamp, - channel.local_version, - ) - elif channel_dict["state"] == CHANNEL_STATE.METAINFO_LOOKUP.value: - if not self.download_manager.metainfo_requests.get( - bytes(channel.infohash) - ) and self.download_manager.download_exists(bytes(channel.infohash)): - channel_dict["state"] = CHANNEL_STATE.DOWNLOADING.value - - channels_list.append(channel_dict) - response_dict = { - "results": channels_list, - "first": sanitized["first"], - "last": sanitized["last"], - "sort_by": sanitized["sort_by"], - "sort_desc": int(sanitized["sort_desc"]), - } - if total is not None: - response_dict.update({"total": total}) - return RESTResponse(response_dict) - - @docs( - tags=['Metadata'], - summary='Get a list of the channel\'s contents (torrents/channels/etc.).', - responses={ - 200: { - 'schema': schema( - GetChannelContentsResponse={ - 'results': [MetadataSchema], - 'first': Integer(), - 'last': Integer(), - 'sort_by': String(), - 'sort_desc': Integer(), - 'total': Integer(), - } - ) - } - }, - ) - async def get_channel_contents(self, request): - self._logger.info('Get channel content') - sanitized = self.sanitize_parameters(request.query) - include_total = request.query.get('include_total', '') - channel_pk, channel_id = self.get_channel_from_request(request) - sanitized.update({"channel_pk": channel_pk, "origin_id": channel_id}) - remote = sanitized.pop("remote", None) - - total = None - - remote_failed = False - if remote: - try: - self._logger.info('Receive remote content') - contents_list = await self.gigachannel_community.remote_select_channel_contents(**sanitized) - except (RequestTimeoutException, NoChannelSourcesException, CancelledError): - remote_failed = True - self._logger.info('Remote request failed') - - if not remote or remote_failed: - self._logger.info('Receive local content') - with db_session: - contents = self.mds.get_entries(**sanitized) - contents_list = [] - for entry in contents: - contents_list.append(entry.to_simple_dict()) - total = self.mds.get_total_count(**sanitized) if include_total else None - - if self.tag_rules_processor: - await self.tag_rules_processor.process_queue() - - self.add_download_progress_to_metadata_list(contents_list) - self.add_statements_to_metadata_list(contents_list, hide_xxx=sanitized["hide_xxx"]) - response_dict = { - "results": contents_list, - "first": sanitized['first'], - "last": sanitized['last'], - "sort_by": sanitized['sort_by'], - "sort_desc": int(sanitized['sort_desc']), - } - if total is not None: - response_dict.update({"total": total}) - - return RESTResponse(response_dict) - - async def get_channel_description(self, request): - channel_pk, channel_id = self.get_channel_from_request(request) - with db_session: - channel_description = self.mds.ChannelDescription.select( - lambda g: g.public_key == channel_pk and g.origin_id == channel_id - ).first() - - response_dict = json.loads(channel_description.json_text) if (channel_description is not None) else {} - return RESTResponse(response_dict) - - async def put_channel_description(self, request): - channel_pk, channel_id = self.get_channel_from_request(request) - request_parsed = await request.json() - updated_json_text = json.dumps({"description_text": request_parsed["description_text"]}) - with db_session: - channel_description = self.mds.ChannelDescription.select( - lambda g: g.public_key == channel_pk and g.origin_id == channel_id - ).first() - if channel_description is not None: - channel_description.update_properties({"json_text": updated_json_text}) - else: - channel_description = self.mds.ChannelDescription( - public_key=channel_pk, origin_id=channel_id, json_text=updated_json_text, status=NEW - ) - return RESTResponse(json.loads(channel_description.json_text)) - - async def get_channel_thumbnail(self, request): - channel_pk, channel_id = self.get_channel_from_request(request) - with db_session: - obj = self.mds.ChannelThumbnail.select( - lambda g: g.public_key == channel_pk and g.origin_id == channel_id - ).first() - return web.Response(body=obj.binary_data, content_type=obj.data_type) if obj else web.Response(status=400) - - async def put_channel_thumbnail(self, request): - content_type = request.headers["Content-Type"] - post_body = await request.read() - channel_pk, channel_id = self.get_channel_from_request(request) - obj_properties = {"binary_data": post_body, "data_type": content_type} - with db_session: - obj = self.mds.ChannelThumbnail.select( - lambda g: g.public_key == channel_pk and g.origin_id == channel_id, - ).first() - if obj is not None: - obj.update_properties(obj_properties) - else: - self.mds.ChannelThumbnail(public_key=channel_pk, origin_id=channel_id, status=NEW, **obj_properties) - return web.Response(status=201) - - @docs( - tags=['Metadata'], - summary='Create a copy of an entry/entries from another channel.', - parameters=[ - { - 'in': 'body', - 'name': 'entries', - 'description': 'List of entries to copy', - 'example': [{'public_key': '1234567890', 'id': 123}], - 'required': True, - } - ], - responses={ - 200: {'description': 'Returns a list of copied content'}, - HTTP_NOT_FOUND: {'schema': HandledErrorSchema, 'example': {"error": "Target channel not found"}}, - HTTP_BAD_REQUEST: {'schema': HandledErrorSchema, 'example': {"error": "Source entry not found"}}, - }, - ) - async def copy_channel(self, request): - with db_session: - channel_pk, channel_id = self.get_channel_from_request(request) - personal_root = channel_id == 0 and channel_pk == self.mds.my_key.pub().key_to_bin()[10:] - # TODO: better error handling - target_collection = self.mds.CollectionNode.get(public_key=channel_pk, id_=channel_id) - try: - request_parsed = await request.json() - except (ContentTypeError, ValueError): - return RESTResponse({"error": "Bad JSON"}, status=HTTP_BAD_REQUEST) - - if not target_collection and not personal_root: - return RESTResponse({"error": "Target channel not found"}, status=HTTP_NOT_FOUND) - results_list = [] - for entry in request_parsed: - public_key, id_ = unhexlify(entry["public_key"]), entry["id"] - source = self.mds.ChannelNode.get(public_key=public_key, id_=id_) - if not source: - return RESTResponse({"error": "Source entry not found"}, status=HTTP_BAD_REQUEST) - # We must upgrade Collections to Channels when moving them to root channel, and, vice-versa, - # downgrade Channels to Collections when moving them into existing channels - if isinstance(source, self.mds.CollectionNode): - src_dict = source.to_dict() - if channel_id == 0: - rslt = self.mds.ChannelMetadata.create_channel(title=source.title) - else: - dst_dict = {'origin_id': channel_id, "status": NEW} - for k in self.mds.CollectionNode.nonpersonal_attributes: - dst_dict[k] = src_dict[k] - dst_dict.pop("metadata_type") - rslt = self.mds.CollectionNode(**dst_dict) - for child in source.actual_contents: - child.make_copy(rslt.id_) - else: - rslt = source.make_copy(channel_id) - results_list.append(rslt.to_simple_dict()) - return RESTResponse(results_list) - - @docs( - tags=['Metadata'], - summary='Create a new channel entry in the given channel.', - responses={ - 200: { - 'description': 'Returns the newly created channel', - 'schema': schema(CreateChannelResponse={'results': [ChannelSchema]}), - } - }, - ) - async def create_channel(self, request): - with db_session: - _, channel_id = self.get_channel_from_request(request) - request_parsed = await request.json() - channel_name = request_parsed.get("name", "New channel") - md = self.mds.ChannelMetadata.create_channel(channel_name, origin_id=channel_id) - return RESTResponse({"results": [md.to_simple_dict()]}) - - @docs( - tags=['Metadata'], - summary='Create a new collection entry in the given channel.', - responses={ - 200: { - 'description': 'Returns the newly created collection', - 'schema': schema(CreateCollectionResponse={'results': [ChannelSchema]}), - } - }, - ) - async def create_collection(self, request): - with db_session: - _, channel_id = self.get_channel_from_request(request) - request_parsed = await request.json() - collection_name = request_parsed.get("name", "New collection") - md = self.mds.CollectionNode(origin_id=channel_id, title=collection_name, status=NEW) - return RESTResponse({"results": [md.to_simple_dict()]}) - - @docs( - tags=['Metadata'], - summary='Add a torrent file to your own channel.', - responses={ - 200: { - 'schema': schema( - AddTorrentToChannelResponse={'added': (Integer, 'Number of torrent that were added to the channel')} - ) - }, - HTTP_NOT_FOUND: {'schema': HandledErrorSchema, 'example': {"error": "Unknown channel"}}, - HTTP_BAD_REQUEST: {'schema': HandledErrorSchema, 'example': {"error": "unknown uri type"}}, - }, - ) - @json_schema( - schema( - AddTorrentToChannelRequest={ - 'torrent': (String, 'Base64-encoded torrent file'), - 'uri': (String, 'Add a torrent from a magnet link or URL'), - 'torrents_dir': (String, 'Add all .torrent files from a chosen directory'), - 'recursive': (Boolean, 'Toggle recursive scanning of the chosen directory for .torrent files'), - 'description': (String, 'Description for the torrent'), - } - ) - ) - async def add_torrent_to_channel(self, request): - channel_pk, channel_id = self.get_channel_from_request(request) - with db_session: - channel = self.mds.CollectionNode.get(public_key=channel_pk, id_=channel_id) - if not channel: - return RESTResponse({"error": "Unknown channel"}, status=HTTP_NOT_FOUND) - - parameters = await request.json() - - extra_info = {} - if parameters.get('description', None): - extra_info = {'description': parameters['description']} - - # First, check whether we did upload a magnet link or URL - if parameters.get('uri', None): - uri = parameters['uri'] - if uri.startswith("http:") or uri.startswith("https:"): - data = await _fetch_uri(uri) - tdef = TorrentDef.load_from_memory(data) - elif uri.startswith("magnet:"): - _, xt, _ = parse_magnetlink(uri) - - if not xt: - return RESTResponse({"error": ERROR_INVALID_MAGNET_LINK.format(uri)}, status=HTTP_BAD_REQUEST) - - if self.mds.torrent_exists_in_personal_channel(xt) or channel.copy_torrent_from_infohash(xt): - return RESTResponse({"added": 1}) - - meta_info = await self.download_manager.get_metainfo(xt, timeout=30, url=uri) - if not meta_info: - return RESTResponse(f'Metainfo request for {uri} timed out.', status=HTTP_BAD_REQUEST) - tdef = TorrentDef.load_from_dict(meta_info) - else: - return RESTResponse({"error": "unknown uri type"}, status=HTTP_BAD_REQUEST) - - added = 0 - if tdef: - channel.add_torrent_to_channel(tdef, extra_info) - added = 1 - return RESTResponse({"added": added}) - - torrents_dir = None - if parameters.get('torrents_dir', None): - torrents_dir = parameters['torrents_dir'] - if not Path(torrents_dir).is_absolute(): - return RESTResponse({"error": "the torrents_dir should point to a directory"}, status=HTTP_BAD_REQUEST) - - recursive = False - if parameters.get('recursive'): - recursive = parameters['recursive'] - if not torrents_dir: - return RESTResponse( - {"error": "the torrents_dir parameter should be provided when the recursive parameter is set"}, - status=HTTP_BAD_REQUEST, - ) - - if torrents_dir: - torrents_list, errors_list = await channel.add_torrents_from_dir(torrents_dir, recursive) - return RESTResponse({"added": len(torrents_list), "errors": errors_list}) - - if not parameters.get('torrent', None): - return RESTResponse({"error": "torrent parameter missing"}, status=HTTP_BAD_REQUEST) - - # Try to parse the torrent data - # Any errors will be handled by the error_middleware - torrent = base64.b64decode(parameters['torrent']) - torrent_def = TorrentDef.load_from_memory(torrent) - channel.add_torrent_to_channel(torrent_def, extra_info) - return RESTResponse({"added": 1}) - - @docs( - tags=['Metadata'], - summary='Commit a channel.', - responses={200: {'schema': schema(CommitResponse={'success': Boolean()})}}, - ) - async def post_commit(self, request): - channel_pk, channel_id = self.get_channel_from_request(request) - with db_session: - if channel_id == 0: - for t in self.mds.CollectionNode.commit_all_channels(): - self.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(t)) - else: - coll = self.mds.CollectionNode.get(public_key=channel_pk, id_=channel_id) - if not coll: - return RESTResponse({"success": False}, status=HTTP_NOT_FOUND) - torrent_dict = coll.commit_channel_torrent() - if torrent_dict: - self.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(torrent_dict)) - - return RESTResponse({"success": True}) - - @docs( - tags=['Metadata'], - summary='Check if a channel has uncommitted changes.', - responses={200: {'schema': schema(IsChannelDirtyResponse={'dirty': Boolean()})}}, - ) - async def is_channel_dirty(self, request): - channel_pk, _ = self.get_channel_from_request(request) - with db_session: - dirty = self.mds.MetadataNode.exists(lambda g: g.public_key == channel_pk and g.status in DIRTY_STATUSES) - return RESTResponse({"dirty": dirty}) - - @docs( - tags=['Metadata'], - summary='Get the list of most popular torrents. Functions as a pseudo-channel.', - responses={ - 200: { - 'schema': schema( - GetPopularTorrentsResponse={ - 'results': [TorrentSchema], - 'first': Integer(), - 'last': Integer(), - } - ) - } - }, - ) - async def get_popular_torrents_channel(self, request): - sanitized = self.sanitize_parameters(request.query) - sanitized["metadata_type"] = REGULAR_TORRENT - sanitized["popular"] = True - - with db_session: - contents = self.mds.get_entries(**sanitized) - contents_list = [] - for entry in contents: - contents_list.append(entry.to_simple_dict()) - - if self.tag_rules_processor: - await self.tag_rules_processor.process_queue() - - self.add_download_progress_to_metadata_list(contents_list) - self.add_statements_to_metadata_list(contents_list, hide_xxx=sanitized["hide_xxx"]) - response_dict = { - "results": contents_list, - "first": sanitized['first'], - "last": sanitized['last'], - } - - return RESTResponse(response_dict) diff --git a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint.py b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint.py index 4f05ecb7164..0b1bb35a473 100644 --- a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint.py +++ b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint.py @@ -1,48 +1,26 @@ from binascii import unhexlify from typing import Optional -from aiohttp import ContentTypeError, web +from aiohttp import web from aiohttp_apispec import docs -from ipv8.REST.base_endpoint import HTTP_BAD_REQUEST, HTTP_NOT_FOUND +from ipv8.REST.base_endpoint import HTTP_BAD_REQUEST from ipv8.REST.schema import schema -from marshmallow.fields import Boolean +from marshmallow.fields import Boolean, Integer from pony.orm import db_session -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import LEGACY_ENTRY +from tribler.core.components.libtorrent.download_manager.download_manager import DownloadManager +from tribler.core.components.metadata_store.db.serialization import REGULAR_TORRENT from tribler.core.components.metadata_store.restapi.metadata_endpoint_base import MetadataEndpointBase +from tribler.core.components.metadata_store.restapi.metadata_schema import TorrentSchema from tribler.core.components.restapi.rest.rest_endpoint import RESTResponse -from tribler.core.components.restapi.rest.schema import HandledErrorSchema from tribler.core.components.torrent_checker.torrent_checker.torrent_checker import TorrentChecker -from tribler.core.utilities.unicode import hexlify from tribler.core.utilities.utilities import froze_it TORRENT_CHECK_TIMEOUT = 20 -class UpdateEntryMixin: - @db_session - def update_entry(self, public_key, id_, update_dict): - entry = self.mds.ChannelNode.get(public_key=public_key, id_=id_) - if not entry: - return HTTP_NOT_FOUND, {"error": "Object with the specified pk+id could not be found."} - - signed_parameters_to_change = set(entry.payload_arguments).intersection(set(update_dict.keys())) - if signed_parameters_to_change: - if 'status' in update_dict: - return HTTP_BAD_REQUEST, {"error": "Cannot set status manually when changing signed attributes."} - if entry.status == LEGACY_ENTRY: - return HTTP_BAD_REQUEST, {"error": "Changing parameters of legacy entries is not supported."} - if not entry.is_personal: - return ( - HTTP_BAD_REQUEST, - {"error": "Changing signed parameters in non-personal entries is not supported."}, - ) - - return None, entry.update_properties(update_dict).to_simple_dict() - - @froze_it -class MetadataEndpoint(MetadataEndpointBase, UpdateEntryMixin): +class MetadataEndpoint(MetadataEndpointBase): """ This is the top-level endpoint class that serves other endpoints. @@ -52,127 +30,20 @@ class MetadataEndpoint(MetadataEndpointBase, UpdateEntryMixin): """ path = '/metadata' - def __init__(self, torrent_checker: Optional[TorrentChecker], *args, **kwargs): + def __init__(self, download_manager: DownloadManager, + torrent_checker: Optional[TorrentChecker], *args, **kwargs): MetadataEndpointBase.__init__(self, *args, **kwargs) + self.download_manager = download_manager self.torrent_checker = torrent_checker def setup_routes(self): self.app.add_routes( [ - web.patch('', self.update_channel_entries), - web.delete('', self.delete_channel_entries), web.get('/torrents/{infohash}/health', self.get_torrent_health), - web.patch(r'/{public_key:\w*}/{id:\w*}', self.update_channel_entry), - web.get(r'/{public_key:\w*}/{id:\w*}', self.get_channel_entries), + web.get('/torrents/popular', self.get_popular_torrents), ] ) - @docs( - tags=['Metadata'], - summary='Update channel entries.', - parameters=[ - { - 'in': 'body', - 'name': 'entries', - 'description': 'List of entries to update', - 'example': [{'public_key': '1234567890', 'id': 123, 'property_to_update': 'new_value'}], - 'required': True, - } - ], - responses={ - 200: {'description': 'Returns a list of updated entries'}, - HTTP_NOT_FOUND: {'schema': HandledErrorSchema}, - HTTP_BAD_REQUEST: {'schema': HandledErrorSchema}, - }, - ) - async def update_channel_entries(self, request): - try: - request_parsed = await request.json() - except (ContentTypeError, ValueError): - return RESTResponse({"error": "Bad JSON"}, status=HTTP_BAD_REQUEST) - results_list = [] - for entry in request_parsed: - public_key = unhexlify(entry.pop("public_key")) - id_ = entry.pop("id") - error, result = self.update_entry(public_key, id_, entry) - # TODO: handle the results for a list that contains some errors in a smarter way - if error: - return RESTResponse(result, status=error) - results_list.append(result) - return RESTResponse(results_list) - - @docs( - tags=['Metadata'], - summary='Delete channel entries.', - parameters=[ - { - 'in': 'body', - 'name': 'entries', - 'description': 'List of entries to delete', - 'example': [{'public_key': '1234567890', 'id': 123}], - 'required': True, - } - ], - responses={ - 200: {'description': 'Returns a list of deleted entries'}, - HTTP_BAD_REQUEST: {'schema': HandledErrorSchema}, - }, - ) - async def delete_channel_entries(self, request): - with db_session: - request_parsed = await request.json() - results_list = [] - for entry in request_parsed: - public_key = unhexlify(entry.pop("public_key")) - id_ = entry.pop("id") - entry = self.mds.ChannelNode.get(public_key=public_key, id_=id_) - if not entry: - return RESTResponse({"error": "Entry %i not found" % id_}, status=HTTP_BAD_REQUEST) - entry.delete() - result = {"public_key": hexlify(public_key), "id": id_, "state": "Deleted"} - results_list.append(result) - return RESTResponse(results_list) - - @docs( - tags=['Metadata'], - summary='Update a single channel entry.', - responses={ - 200: {'description': 'The updated entry'}, - HTTP_NOT_FOUND: {'schema': HandledErrorSchema}, - HTTP_BAD_REQUEST: {'schema': HandledErrorSchema}, - }, - ) - async def update_channel_entry(self, request): - # TODO: unify checks for parts of the path, i.e. proper hex for public key, etc. - try: - parameters = await request.json() - except (ContentTypeError, ValueError): - return RESTResponse({"error": "Bad JSON input data"}, status=HTTP_BAD_REQUEST) - - public_key = unhexlify(request.match_info['public_key']) - id_ = request.match_info['id'] - error, result = self.update_entry(public_key, id_, parameters) - return RESTResponse(result, status=error or 200) - - @docs( - tags=['Metadata'], - summary='Get channel entries.', - responses={200: {'description': 'Returns a list of entries'}, HTTP_NOT_FOUND: {'schema': HandledErrorSchema}}, - ) - async def get_channel_entries(self, request): - public_key = unhexlify(request.match_info['public_key']) - id_ = request.match_info['id'] - with db_session: - entry = self.mds.ChannelNode.get(public_key=public_key, id_=id_) - - if entry: - # TODO: handle costly attributes in a more graceful and generic way for all types of metadata - entry_dict = entry.to_simple_dict() - else: - return RESTResponse({"error": "entry not found in database"}, status=HTTP_NOT_FOUND) - - return RESTResponse(entry_dict) - @docs( tags=["Metadata"], summary="Fetch the swarm health of a specific torrent.", @@ -220,3 +91,49 @@ async def get_torrent_health(self, request): check_coro = self.torrent_checker.check_torrent_health(infohash, timeout=timeout, scrape_now=True) self.async_group.add_task(check_coro) return RESTResponse({'checking': True}) + + def add_download_progress_to_metadata_list(self, contents_list): + for torrent in contents_list: + if torrent['type'] == REGULAR_TORRENT: + dl = self.download_manager.get_download(unhexlify(torrent['infohash'])) + if dl is not None and dl.tdef.infohash not in self.download_manager.metainfo_requests: + torrent['progress'] = dl.get_state().get_progress() + + @docs( + tags=['Metadata'], + summary='Get the list of most popular torrents.', + responses={ + 200: { + 'schema': schema( + GetPopularTorrentsResponse={ + 'results': [TorrentSchema], + 'first': Integer(), + 'last': Integer(), + } + ) + } + }, + ) + async def get_popular_torrents(self, request): + sanitized = self.sanitize_parameters(request.query) + sanitized["metadata_type"] = REGULAR_TORRENT + sanitized["popular"] = True + + with db_session: + contents = self.mds.get_entries(**sanitized) + contents_list = [] + for entry in contents: + contents_list.append(entry.to_simple_dict()) + + if self.tag_rules_processor: + await self.tag_rules_processor.process_queue() + + self.add_download_progress_to_metadata_list(contents_list) + self.add_statements_to_metadata_list(contents_list, hide_xxx=sanitized["hide_xxx"]) + response_dict = { + "results": contents_list, + "first": sanitized['first'], + "last": sanitized['last'], + } + + return RESTResponse(response_dict) diff --git a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py index 9d3ff8d03fe..541452d20ac 100644 --- a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py +++ b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py @@ -62,7 +62,6 @@ def sanitize_parameters(cls, parameters): "txt_filter": parameters.get('txt_filter'), "hide_xxx": parse_bool(parameters.get('hide_xxx', False)), "category": parameters.get('category'), - "exclude_deleted": parse_bool(parameters.get('exclude_deleted', False)), } if 'tags' in parameters: sanitized['tags'] = parameters.getall('tags') diff --git a/src/tribler/core/components/metadata_store/restapi/remote_query_endpoint.py b/src/tribler/core/components/metadata_store/restapi/remote_query_endpoint.py deleted file mode 100644 index 873361d0628..00000000000 --- a/src/tribler/core/components/metadata_store/restapi/remote_query_endpoint.py +++ /dev/null @@ -1,94 +0,0 @@ -import time -from binascii import unhexlify - -from aiohttp import web -from aiohttp_apispec import docs, querystring_schema -from ipv8.REST.schema import schema -from marshmallow.fields import String, List -from pony.orm import db_session - -from tribler.core.components.gigachannel.community.gigachannel_community import GigaChannelCommunity -from tribler.core.components.metadata_store.restapi.metadata_endpoint import MetadataEndpointBase -from tribler.core.components.metadata_store.restapi.metadata_schema import RemoteQueryParameters -from tribler.core.components.restapi.rest.rest_endpoint import HTTP_BAD_REQUEST, RESTResponse -from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import froze_it - - -@froze_it -class RemoteQueryEndpoint(MetadataEndpointBase): - """ - This endpoint fires a remote search in the IPv8 GigaChannel Community. - """ - path = '/remote_query' - - def __init__(self, gigachannel_community: GigaChannelCommunity, *args, **kwargs): - MetadataEndpointBase.__init__(self, *args, **kwargs) - self.gigachannel_community = gigachannel_community - - def setup_routes(self): - self.app.add_routes([web.put('', self.create_remote_search_request)]) - self.app.add_routes([web.get('/channels_peers', self.get_channels_peers)]) - - def sanitize_parameters(self, parameters): - sanitized = super().sanitize_parameters(parameters) - - if "channel_pk" in parameters: - sanitized["channel_pk"] = unhexlify(parameters["channel_pk"]) - if "origin_id" in parameters: - sanitized["origin_id"] = int(parameters["origin_id"]) - - return sanitized - - @docs( - tags=['Metadata'], - summary="Perform a search for a given query.", - responses={200: { - 'schema': schema(RemoteSearchResponse={'request_uuid': String(), 'peers': List(String())})}, - "examples": { - 'Success': { - "request_uuid": "268560c0-3f28-4e6e-9d85-d5ccb0269693", - "peers": ["50e9a2ce646c373985a8e827e328830e053025c6", "107c84e5d9636c17b46c88c3ddb54842d80081b0"] - } - } - }, - ) - @querystring_schema(RemoteQueryParameters) - async def create_remote_search_request(self, request): - self._logger.info('Create remote search request') - # Query remote results from the GigaChannel Community. - # Results are returned over the Events endpoint. - try: - sanitized = self.sanitize_parameters(request.query) - except (ValueError, KeyError) as e: - return RESTResponse({"error": f"Error processing request parameters: {e}"}, status=HTTP_BAD_REQUEST) - self._logger.info(f'Parameters: {sanitized}') - - request_uuid, peers_list = self.gigachannel_community.send_search_request(**sanitized) - peers_mid_list = [hexlify(p.mid) for p in peers_list] - - return RESTResponse({"request_uuid": str(request_uuid), "peers": peers_mid_list}) - - async def get_channels_peers(self, _): - # Get debug stats for peers serving channels - current_time = time.time() - result = [] - mapping = self.gigachannel_community.channels_peers - with db_session: - for id_tuple, peers in mapping._channels_dict.items(): # pylint:disable=W0212 - channel_pk, channel_id = id_tuple - chan = self.mds.ChannelMetadata.get(public_key=channel_pk, id_=channel_id) - - peers_list = [] - for p in peers: - peers_list.append((hexlify(p.mid), int(current_time - p.last_response))) - - chan_dict = { - "channel_name": chan.title if chan else None, - "channel_pk": hexlify(channel_pk), - "channel_id": channel_id, - "peers": peers_list, - } - result.append(chan_dict) - - return RESTResponse({"channels_list": result}) diff --git a/src/tribler/core/components/metadata_store/restapi/search_endpoint.py b/src/tribler/core/components/metadata_store/restapi/search_endpoint.py index 6bdad682f4a..bd04fde58f8 100644 --- a/src/tribler/core/components/metadata_store/restapi/search_endpoint.py +++ b/src/tribler/core/components/metadata_store/restapi/search_endpoint.py @@ -1,18 +1,21 @@ import time +import typing +from binascii import unhexlify, hexlify from collections import defaultdict -from typing import Dict, List from aiohttp import web from aiohttp_apispec import docs, querystring_schema from ipv8.REST.schema import schema -from marshmallow.fields import Integer, String +from marshmallow.fields import Integer, String, List from pony.orm import db_session from tribler.core.components.database.db.layers.knowledge_data_access_layer import ResourceType from tribler.core.components.metadata_store.db.serialization import SNIPPET from tribler.core.components.metadata_store.db.store import MetadataStore from tribler.core.components.metadata_store.restapi.metadata_endpoint import MetadataEndpointBase -from tribler.core.components.metadata_store.restapi.metadata_schema import MetadataSchema, SearchMetadataParameters +from tribler.core.components.metadata_store.restapi.metadata_schema import SearchMetadataParameters, MetadataSchema, \ + RemoteQueryParameters +from tribler.core.components.popularity.community.popularity_community import PopularityCommunity from tribler.core.components.restapi.rest.rest_endpoint import HTTP_BAD_REQUEST, RESTResponse from tribler.core.utilities.pony_utils import run_threaded from tribler.core.utilities.utilities import froze_it @@ -28,17 +31,27 @@ class SearchEndpoint(MetadataEndpointBase): """ path = '/search' + def __init__(self, popularity_community: PopularityCommunity, *args, **kwargs): + MetadataEndpointBase.__init__(self, *args, **kwargs) + self.popularity_community = popularity_community + def setup_routes(self): - self.app.add_routes([web.get('', self.search), web.get('/completions', self.completions)]) + self.app.add_routes([web.get('/local', self.local_search), + web.put('/remote', self.remote_search), + web.get('/completions', self.completions)]) @classmethod def sanitize_parameters(cls, parameters): sanitized = super().sanitize_parameters(parameters) if "max_rowid" in parameters: sanitized["max_rowid"] = int(parameters["max_rowid"]) + if "channel_pk" in parameters: + sanitized["channel_pk"] = unhexlify(parameters["channel_pk"]) + if "origin_id" in parameters: + sanitized["origin_id"] = int(parameters["origin_id"]) return sanitized - def build_snippets(self, search_results: List[Dict]) -> List[Dict]: + def build_snippets(self, search_results: typing.List[typing.Dict]) -> typing.List[typing.Dict]: """ Build a list of snippets that bundle torrents describing the same content item. For each search result we determine the content item it is associated to and bundle it inside a snippet. @@ -46,14 +59,15 @@ def build_snippets(self, search_results: List[Dict]) -> List[Dict]: Within each snippet, we sort on torrent popularity, putting the torrent with the most seeders on top. Torrents bundled in a snippet are filtered out from the search results. """ - content_to_torrents: Dict[str, list] = defaultdict(list) + content_to_torrents: typing.Dict[str, list] = defaultdict(list) for search_result in search_results: if "infohash" not in search_result: continue with db_session: - content_items: List[str] = self.tribler_db.knowledge.get_objects(subject_type=ResourceType.TORRENT, - subject=search_result["infohash"], - predicate=ResourceType.CONTENT_ITEM) + content_items: typing.List[str] = self.tribler_db.knowledge.get_objects( + subject_type=ResourceType.TORRENT, + subject=search_result["infohash"], + predicate=ResourceType.CONTENT_ITEM) if content_items: for content_id in content_items: content_to_torrents[content_id].append(search_result) @@ -66,7 +80,7 @@ def build_snippets(self, search_results: List[Dict]) -> List[Dict]: sorted_content_info = list(content_to_torrents.items()) sorted_content_info.sort(key=lambda x: x[1][0]["num_seeders"], reverse=True) - snippets: List[Dict] = [] + snippets: typing.List[typing.Dict] = [] for content_info in sorted_content_info: content_id = content_info[0] torrents_in_snippet = content_to_torrents[content_id][:MAX_TORRENTS_IN_SNIPPETS] @@ -114,7 +128,7 @@ def build_snippets(self, search_results: List[Dict]) -> List[Dict]: }, ) @querystring_schema(SearchMetadataParameters) - async def search(self, request): + async def local_search(self, request): try: sanitized = self.sanitize_parameters(request.query) tags = sanitized.pop('tags', None) @@ -210,3 +224,32 @@ async def completions(self, request): # TODO: add XXX filtering for completion terms results = self.mds.get_auto_complete_terms(keywords, max_terms=5) return RESTResponse({"completions": results}) + + @docs( + tags=['Metadata'], + summary="Perform a search for a given query.", + responses={200: { + 'schema': schema(RemoteSearchResponse={'request_uuid': String(), 'peers': List(String())})}, + "examples": { + 'Success': { + "request_uuid": "268560c0-3f28-4e6e-9d85-d5ccb0269693", + "peers": ["50e9a2ce646c373985a8e827e328830e053025c6", "107c84e5d9636c17b46c88c3ddb54842d80081b0"] + } + } + }, + ) + @querystring_schema(RemoteQueryParameters) + async def remote_search(self, request): + self._logger.info('Create remote search request') + # Query remote results from the GigaChannel Community. + # Results are returned over the Events endpoint. + try: + sanitized = self.sanitize_parameters(request.query) + except (ValueError, KeyError) as e: + return RESTResponse({"error": f"Error processing request parameters: {e}"}, status=HTTP_BAD_REQUEST) + self._logger.info(f'Parameters: {sanitized}') + + request_uuid, peers_list = self.popularity_community.send_search_request(**sanitized) + peers_mid_list = [hexlify(p.mid).decode() for p in peers_list] + + return RESTResponse({"request_uuid": str(request_uuid), "peers": peers_mid_list}) diff --git a/src/tribler/core/components/metadata_store/restapi/tests/conftest.py b/src/tribler/core/components/metadata_store/restapi/tests/conftest.py index 897ed1c6786..4ba11ff2c6a 100644 --- a/src/tribler/core/components/metadata_store/restapi/tests/conftest.py +++ b/src/tribler/core/components/metadata_store/restapi/tests/conftest.py @@ -5,7 +5,7 @@ from ipv8.keyvault.crypto import default_eccrypto from pony.orm import db_session -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import NEW from tribler.core.components.metadata_store.utils import tag_torrent from tribler.core.utilities.utilities import random_infohash diff --git a/src/tribler/core/components/metadata_store/restapi/tests/test_channels_endpoint.py b/src/tribler/core/components/metadata_store/restapi/tests/test_channels_endpoint.py deleted file mode 100644 index a2133d162bc..00000000000 --- a/src/tribler/core/components/metadata_store/restapi/tests/test_channels_endpoint.py +++ /dev/null @@ -1,674 +0,0 @@ -import base64 -import json -from binascii import unhexlify -from unittest.mock import AsyncMock, Mock, patch - -import pytest -from ipv8.keyvault.crypto import default_eccrypto -from ipv8.util import succeed -from pony.orm import db_session - -from tribler.core.components.database.db.layers.knowledge_data_access_layer import ResourceType -from tribler.core.components.gigachannel.community.gigachannel_community import NoChannelSourcesException -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.category_filter.family_filter import default_xxx_filter -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT -from tribler.core.components.metadata_store.restapi.channels_endpoint import ChannelsEndpoint, ERROR_INVALID_MAGNET_LINK -from tribler.core.components.metadata_store.utils import RequestTimeoutException, tag_torrent -from tribler.core.components.restapi.rest.base_api_test import do_request -from tribler.core.components.restapi.rest.rest_endpoint import HTTP_BAD_REQUEST -from tribler.core.tests.tools.common import TORRENT_UBUNTU_FILE -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import random_infohash - -PNG_DATA = unhexlify( - "89504e470d0a1a0a0000000d494844520" - "0000001000000010100000000376ef924" - "0000001049444154789c626001000000f" - "fff03000006000557bfabd40000000049454e44ae426082" -) - - -# pylint: disable=unused-argument, redefined-outer-name - -@pytest.fixture -def endpoint(mock_dlmgr, metadata_store, tribler_db): - def return_exc(*args, **kwargs): - raise RequestTimeoutException - - mock_dlmgr.metainfo_requests = {} - - return ChannelsEndpoint( - mock_dlmgr, - Mock(), - Mock(remote_select_channel_contents=return_exc), - metadata_store, - tribler_db=tribler_db - ) - - -async def test_get_channels(rest_api, add_fake_torrents_channels, add_subscribed_and_not_downloaded_channel, mock_dlmgr, - metadata_store): - """ - Test whether we can query some channels in the database with the REST API - """ - mock_dlmgr.download_exists = lambda *args: None - json_dict = await do_request(rest_api, 'channels') - assert len(json_dict['results']) == 11 - assert json_dict['results'][0]['state'] == CHANNEL_STATE.METAINFO_LOOKUP.value - - # We test out different combinations of channels' states and download progress - # State UPDATING: - metadata_store.compute_channel_update_progress = lambda _: 0.5 - with db_session: - channel = metadata_store.ChannelMetadata.select().first() - channel.subscribed = True - channel.local_version = 123 - - json_dict = await do_request(rest_api, 'channels') - assert json_dict['results'][-1]['progress'] == 0.5 - - # State DOWNLOADING - with db_session: - channel = metadata_store.ChannelMetadata.select().first() - channel.subscribed = True - channel.local_version = 0 - - mock_dlmgr.download_exists = lambda _: True - json_dict = await do_request(rest_api, 'channels') - assert json_dict['results'][-1]['state'] == CHANNEL_STATE.DOWNLOADING.value - - -async def test_get_channels_sort_by_health(rest_api, add_fake_torrents_channels, mock_dlmgr): - json_dict = await do_request(rest_api, 'channels?sort_by=health') - assert len(json_dict['results']) == 10 - - -async def test_get_channels_invalid_sort(add_fake_torrents_channels, mock_dlmgr, rest_api): - """ - Test whether we can query some channels in the database with the REST API and an invalid sort parameter - """ - json_dict = await do_request(rest_api, 'channels?sort_by=fdsafsdf') - assert len(json_dict['results']) == 10 - - -async def test_get_subscribed_channels(add_fake_torrents_channels, mock_dlmgr, rest_api): - """ - Test whether we can successfully query channels we are subscribed to with the REST API - """ - json_dict = await do_request(rest_api, 'channels?subscribed=1') - assert len(json_dict['results']) == 5 - - -async def test_get_channels_count(add_fake_torrents_channels, mock_dlmgr, rest_api): - """ - Test getting the total number of channels through the API - """ - json_dict = await do_request(rest_api, 'channels?subscribed=1&include_total=1') - assert json_dict['total'] == 5 - - -async def test_create_channel(rest_api, metadata_store): - """ - Test creating a channel in your channel with REST API POST request - """ - await do_request(rest_api, 'channels/mychannel/0/channels', request_type='POST', expected_code=200) - with db_session: - assert metadata_store.ChannelMetadata.get(title="New channel") - await do_request( - rest_api, 'channels/mychannel/0/channels', request_type='POST', post_data={"name": "foobar"}, expected_code=200 - ) - with db_session: - assert metadata_store.ChannelMetadata.get(title="foobar") - - -async def test_get_contents_count(add_fake_torrents_channels, mock_dlmgr, rest_api, metadata_store): - """ - Test getting the total number of items in a specific channel - """ - mock_dlmgr.get_download = lambda _: None - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - - json_dict = await do_request(rest_api, f'channels/{hexlify(chan.public_key)}/123?include_total=1') - assert json_dict['total'] == 5 - - -async def test_get_channel_contents(metadata_store, add_fake_torrents_channels, mock_dlmgr, rest_api): - """ - Test whether we can query torrents from a channel - """ - mock_dlmgr.get_download().get_state().get_progress = lambda: 0.5 - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - json_dict = await do_request(rest_api, f'channels/{hexlify(chan.public_key)}/123', expected_code=200) - assert len(json_dict['results']) == 5 - assert 'status' in json_dict['results'][0] - assert json_dict['results'][0]['progress'] == 0.5 - - -async def test_get_channel_contents_remote(metadata_store, add_fake_torrents_channels, mock_dlmgr, rest_api): - """ - Test whether we can query torrents from a channel from a remote peer - """ - mock_dlmgr.get_download().get_state().get_progress = lambda: 0.5 - - async def mock_select(**kwargs): - with db_session: - return [r.to_simple_dict() for r in metadata_store.get_entries(**kwargs)] - - rest_api.gigachannel_community = Mock() - rest_api.gigachannel_community.remote_select_channel_contents = mock_select - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - json_dict = await do_request(rest_api, f'channels/{hexlify(chan.public_key)}/123?remote=1', expected_code=200) - assert len(json_dict['results']) == 5 - assert 'status' in json_dict['results'][0] - assert json_dict['results'][0]['progress'] == 0.5 - - -async def test_get_channel_contents_remote_request_timeout( - metadata_store, add_fake_torrents_channels, mock_dlmgr, rest_api -): - """ - Test whether we can query torrents from a channel from a remote peer. - In case of remote query timeout, the results should still be served from the local DB - """ - mock_dlmgr.get_download().get_state().get_progress = lambda: 0.5 - - async def mock_select(**kwargs): - raise RequestTimeoutException() - - rest_api.gigachannel_community = Mock() - rest_api.gigachannel_community.remote_select_channel_contents = mock_select - - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - json_dict = await do_request(rest_api, f'channels/{hexlify(chan.public_key)}/123?remote=1', expected_code=200) - assert len(json_dict['results']) == 5 - assert 'status' in json_dict['results'][0] - assert json_dict['results'][0]['progress'] == 0.5 - - -async def test_get_channel_contents_remote_request_no_peers( - add_fake_torrents_channels, mock_dlmgr_get_download, rest_api, metadata_store -): - """ - Test whether we can query torrents from a channel from a remote peer. - In case of zero available remote sources for the channel, the results should still be served from the local DB - """ - - async def mock_select(**kwargs): - raise NoChannelSourcesException() - - rest_api.gigachannel_community = Mock() - rest_api.gigachannel_community.remote_select_channel_contents = mock_select - - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - json_dict = await do_request(rest_api, f'channels/{hexlify(chan.public_key)}/123?remote=1', expected_code=200) - assert len(json_dict['results']) == 5 - assert 'status' in json_dict['results'][0] - - -async def test_get_channel_description(rest_api, metadata_store): - """ - Test getting description of the channel from the database - """ - descr_txt = "foobar" - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - channel_description = metadata_store.ChannelDescription( - origin_id=chan.id_, json_text=json.dumps({"description_text": descr_txt}) - ) - response_dict = await do_request( - rest_api, f'channels/{hexlify(chan.public_key)}/{chan.id_}/description', expected_code=200 - ) - assert response_dict == json.loads(channel_description.json_text) - - -async def test_put_new_channel_description(rest_api, metadata_store): - """ - Test adding description to a channel - """ - new_descr = "lalala" - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - response_dict = await do_request( - rest_api, - f'channels/{hexlify(chan.public_key)}/{chan.id_}/description', - request_type="PUT", - post_data={"description_text": new_descr}, - expected_code=200, - ) - - assert response_dict == {"description_text": new_descr} - - # Test updating description of a channel - updated_descr = "foobar" - response_dict = await do_request( - rest_api, - f'channels/{hexlify(chan.public_key)}/{chan.id_}/description', - request_type="PUT", - post_data={"description_text": updated_descr}, - expected_code=200, - ) - - assert response_dict == {"description_text": updated_descr} - - -async def test_get_popular_torrents(add_fake_torrents_channels, mock_dlmgr_get_download, mock_dlmgr, rest_api): - """ - Test getting the list of popular torrents. The list is served as contents of a pseudo-channel - """ - json_dict = await do_request(rest_api, 'channels/popular_torrents', expected_code=200) - # torrents2 & torrent4 in each of 10 channels (but not torrent0, as it has 0 seeders) - assert len(json_dict['results']) == 20 - - def fields(d, *args): - return {key: d[key] for key in args} - - seeders_orig_order = [fields(d, 'type', 'num_seeders', 'num_leechers') for d in json_dict['results']] - - def sort_key(d): - a = 1 if d["type"] == CHANNEL_TORRENT else 2 if d["type"] == COLLECTION_NODE else 3 - b = -d["num_seeders"] - c = -d["num_leechers"] - return (a, b, c) - - assert seeders_orig_order == sorted(seeders_orig_order, key=sort_key) - - -async def test_get_popular_torrents_mdtype(add_fake_torrents_channels, mock_dlmgr_get_download, rest_api): - """ - It should be not possible to specify metadata_type argument for popular torrents endpoint - """ - json_dict1 = await do_request(rest_api, 'channels/popular_torrents') - json_dict2 = await do_request(rest_api, 'channels/popular_torrents?metadata_type=300') - json_dict3 = await do_request(rest_api, 'channels/popular_torrents?metadata_type=400') - - # Currently popularity page force-set metadata_type to 300 (REGULAR_TORRENT) for all requests - assert json_dict1 == json_dict2 == json_dict3 - - -async def test_get_channel_contents_by_type(metadata_store, my_channel, mock_dlmgr_get_download, rest_api): - """ - Test filtering channel contents by a list of data types - """ - with db_session: - metadata_store.CollectionNode(title='some_folder', origin_id=my_channel.id_) - - json_dict = await do_request( - rest_api, - 'channels/%s/%d?metadata_type=%d&metadata_type=%d' - % (hexlify(my_channel.public_key), my_channel.id_, COLLECTION_NODE, REGULAR_TORRENT), - expected_code=200, - ) - - assert len(json_dict['results']) == 10 - assert 'status' in json_dict['results'][0] - - -async def test_commit_no_channel(rest_api): - """ - Test whether we get an error if we try to commit a channel without it being created - """ - await do_request(rest_api, 'channels/mychannel/123/commit', expected_code=404, request_type='POST') - - -async def test_commit_single_channel(my_channel, mock_dlmgr, rest_api): - """ - Test whether we can successfully commit changes to a single personal channel with the REST API - """ - json_dict = await do_request(rest_api, 'channels/mychannel/%i/commit' % my_channel.id_, request_type='POST') - assert json_dict["success"] - - -async def test_commit_all_channels(my_channel, mock_dlmgr, rest_api): - """ - Test whether we can successfully commit changes to a single personal channel with the REST API - """ - json_dict = await do_request(rest_api, 'channels/mychannel/0/commit', request_type='POST') - assert json_dict["success"] - - -async def test_get_commit_state(my_channel, rest_api): - """ - Test getting dirty status of a channel through its commit endpoint - """ - await do_request(rest_api, 'channels/mychannel/0/commit', expected_json={'dirty': True}) - - -async def test_add_torrents_no_channel(metadata_store, my_channel, rest_api): - """ - Test whether an error is returned when we try to add a torrent to your unexisting channel - """ - with db_session: - my_chan = metadata_store.ChannelMetadata.get_my_channels().first() - my_chan.delete() - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - expected_code=404, - ) - - -async def test_add_torrents_no_dir(my_channel, rest_api): - """ - Test whether an error is returned when pointing to a file instead of a directory when adding torrents - """ - post_params = {'torrents_dir': 'nonexisting'} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=HTTP_BAD_REQUEST, - ) - - -async def test_add_torrents_recursive_no_dir(my_channel, rest_api): - """ - Test whether an error is returned when recursively adding torrents without a specified directory - """ - post_params = {'recursive': True} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=HTTP_BAD_REQUEST, - ) - - -async def test_add_torrents_from_dir(my_channel, state_dir, rest_api): - """ - Test whether adding torrents from a directory to your channels works - """ - post_params = {'torrents_dir': str(state_dir), 'recursive': True} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - ) - - -async def test_add_torrent_missing_torrent(my_channel, rest_api): - """ - Test whether an error is returned when adding a torrent to your channel but with a missing torrent parameter - """ - post_params = {} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=HTTP_BAD_REQUEST, - ) - - -async def test_add_invalid_torrent(my_channel, rest_api): - """ - Test whether an error is returned when adding an invalid torrent file to your channel - """ - post_params = {'torrent': 'bla'} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=500, - ) - - -async def test_add_torrent_duplicate(my_channel, rest_api): - """ - Test that adding a duplicate torrent to you channel does not result in an error - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - with db_session: - my_channel.add_torrent_to_channel(tdef, {'description': 'blabla'}) - - with open(TORRENT_UBUNTU_FILE, "rb") as torrent_file: - base64_content = base64.b64encode(torrent_file.read()).decode('utf-8') - - post_params = {'torrent': base64_content} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=200, - ) - - -async def test_add_torrent(my_channel, rest_api): - """ - Test adding a torrent to your channel - """ - with open(TORRENT_UBUNTU_FILE, "rb") as torrent_file: - base64_content = base64.b64encode(torrent_file.read()) - - post_params = {'torrent': base64_content.decode('utf-8')} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - ) - - -async def test_add_torrent_invalid_uri(my_channel, rest_api): - """ - Test whether adding a torrent to your channel with an invalid URI results in an error - """ - post_params = {'uri': 'thisisinvalid'} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=HTTP_BAD_REQUEST, - ) - - -async def test_add_torrent_from_url(my_channel, tmpdir, rest_api): - """ - Test whether we can add a torrent to your channel from an URL - """ - post_params = {'uri': 'http://localhost:123/ubuntu.torrent'} - - async def _mock_fetch(*args): - with open(TORRENT_UBUNTU_FILE, 'rb') as f: - return f.read() - - with patch('tribler.core.components.metadata_store.restapi.channels_endpoint._fetch_uri', new=_mock_fetch): - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - ) - - -async def test_add_torrent_from_magnet(my_channel, mock_dlmgr, rest_api, metadata_store): - """ - Test whether we can add a torrent to your channel from a magnet link - """ - metadata_store.torrent_exists_in_personal_channel = Mock() - - post_params = {'uri': 'magnet:?xt=urn:btih:1111111111111111111111111111111111111111'} - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - ) - metadata_store.torrent_exists_in_personal_channel.assert_called_once() - - -async def test_add_torrent_from_magnet_error(my_channel, mock_dlmgr, rest_api): - """ - Test whether an error while adding magnets to your channel results in a proper 500 error - """ - - def fake_get_metainfo(*_, **__): - return succeed(None) - - mock_dlmgr.get_metainfo = fake_get_metainfo - - invalid_magnet_link = 'magnet:?fake' - post_params = {'uri': invalid_magnet_link} - response = await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data=post_params, - expected_code=HTTP_BAD_REQUEST, - ) - assert response['error'] == ERROR_INVALID_MAGNET_LINK.format(invalid_magnet_link) - - -async def test_get_torrents(my_channel, mock_dlmgr_get_download, rest_api, metadata_store): - """ - Test whether we can query some torrents in the database with the REST API - """ - with db_session: - chan = metadata_store.ChannelMetadata.select().first() - json_dict = await do_request(rest_api, 'channels/%s/%d' % (hexlify(chan.public_key), my_channel.id_)) - assert len(json_dict['results']) == 9 - - -async def test_get_torrents_ffa_channel(my_channel, mock_dlmgr_get_download, rest_api, metadata_store): - """ - Test whether we can query channel contents for unsigned (legacy/FFA) channels - """ - with db_session: - channel = metadata_store.ChannelMetadata(title='ffa', infohash=random_infohash(), public_key=b"", id_=123) - metadata_store.TorrentMetadata( - public_key=b"", id_=333333, origin_id=channel.id_, title='torrent', infohash=random_infohash() - ) - - def on_response(json_dict): - assert len(json_dict['results']) == 1 - - on_response(await do_request(rest_api, 'channels/00/123')) - - -async def test_put_channel_thumbnail(rest_api, metadata_store): - """ - Test adding description to a channel - """ - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - await do_request( - rest_api, - f'channels/{hexlify(chan.public_key)}/{chan.id_}/thumbnail', - request_type="PUT", - headers={'Content-Type': 'image/png'}, - json_response=False, - post_data=PNG_DATA, - expected_code=201, - ) - with db_session: - obj = metadata_store.ChannelThumbnail.get(public_key=chan.public_key, origin_id=chan.id_) - assert obj.binary_data == PNG_DATA - assert obj.data_type == 'image/png' - - # Test updating channel thumbnail - await do_request( - rest_api, - f'channels/{hexlify(chan.public_key)}/{chan.id_}/thumbnail', - request_type="PUT", - headers={'Content-Type': 'image/foo'}, - json_response=False, - post_data=b"ffff", - expected_code=201, - ) - with db_session: - obj = metadata_store.ChannelThumbnail.get(public_key=chan.public_key, origin_id=chan.id_) - assert obj.binary_data == b"ffff" - assert obj.data_type == 'image/foo' - - -async def test_get_channel_thumbnail(rest_api, metadata_store): - """ - Test getting a channel thumbnail from MetadataStore - """ - - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - metadata_store.ChannelThumbnail( - public_key=chan.public_key, origin_id=chan.id_, binary_data=PNG_DATA, data_type="image/png" - ) - endpoint = f'channels/{hexlify(chan.public_key)}/{chan.id_}/thumbnail' - url = f'/{endpoint}' - - async with rest_api.request("GET", url, ssl=False) as response: - assert response.status == 200 - assert await response.read() == PNG_DATA - assert response.headers["Content-Type"] == "image/png" - - -async def test_get_my_channel_tags(metadata_store, mock_dlmgr_get_download, my_channel, - rest_api): - """ - Test whether tags are correctly returned over the REST API - """ - with db_session: - json_dict = await do_request( - rest_api, - 'channels/%s/%d?metadata_type=%d' - % (hexlify(my_channel.public_key), my_channel.id_, REGULAR_TORRENT), - expected_code=200, - ) - - assert len(json_dict['results']) == 9 - for item in json_dict['results']: - assert len(item["statements"]) >= 2 - - -async def test_get_my_channel_tags_xxx(metadata_store, tribler_db, mock_dlmgr_get_download, my_channel, - rest_api): - """ - Test whether XXX tags are correctly filtered - """ - with db_session: - chan = metadata_store.ChannelMetadata.create_channel('test', 'test') - infohash = random_infohash() - _ = metadata_store.TorrentMetadata(origin_id=chan.id_, title='taggedtorrent', status=NEW, infohash=infohash) - default_xxx_filter.xxx_terms = {"wrongterm"} - - # Add a few tags to our new torrent - tags = ["totally safe", "wrongterm", "wRonGtErM", "a wrongterm b"] - tag_torrent(infohash, tribler_db, tags=tags) - - json_dict = await do_request( - rest_api, - 'channels/%s/%d?metadata_type=%d&hide_xxx=1' - % (hexlify(my_channel.public_key), chan.id_, REGULAR_TORRENT), - expected_code=200, - ) - - assert len(json_dict['results']) == 1 - print(json_dict) - tag_statements = [s for s in json_dict["results"][0]["statements"] if s["predicate"] == ResourceType.TAG] - assert len(tag_statements) == 1 - - -async def test_timeout_for_metainfo_request(my_channel, rest_api, mock_dlmgr): - """ Test that in the case of a timeout, the client receives HTTP_BAD_REQUEST """ - mock_dlmgr.get_metainfo = AsyncMock(return_value=None) - await do_request( - rest_api, - f'channels/{hexlify(my_channel.public_key)}/{my_channel.id_}/torrents', - request_type='PUT', - post_data={'uri': 'magnet:?xt=urn:btih:1111111111111111111111111111111111111111'}, - expected_code=HTTP_BAD_REQUEST, - ) - - assert mock_dlmgr.get_metainfo.called diff --git a/src/tribler/core/components/metadata_store/restapi/tests/test_metadata_endpoint.py b/src/tribler/core/components/metadata_store/restapi/tests/test_metadata_endpoint.py index f609f8658a4..8d742fa217b 100644 --- a/src/tribler/core/components/metadata_store/restapi/tests/test_metadata_endpoint.py +++ b/src/tribler/core/components/metadata_store/restapi/tests/test_metadata_endpoint.py @@ -1,16 +1,13 @@ -import json -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock, AsyncMock import pytest -from pony.orm import db_session -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import COMMITTED, TODELETE, UPDATED +from tribler.core.components.metadata_store.db.serialization import REGULAR_TORRENT from tribler.core.components.metadata_store.restapi.metadata_endpoint import MetadataEndpoint, TORRENT_CHECK_TIMEOUT from tribler.core.components.restapi.rest.base_api_test import do_request from tribler.core.components.torrent_checker.torrent_checker.torrent_checker import TorrentChecker from tribler.core.config.tribler_config import TriblerConfig from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import random_infohash # pylint: disable=unused-argument, redefined-outer-name @@ -38,160 +35,7 @@ async def torrent_checker(mock_dlmgr, metadata_store): @pytest.fixture def endpoint(torrent_checker, metadata_store): - return MetadataEndpoint(torrent_checker, metadata_store) - - -async def test_update_multiple_metadata_entries(metadata_store, add_fake_torrents_channels, rest_api): - """ - Test updating attributes of several metadata entities at once with a PATCH request to REST API - """ - # Test handling the wrong/empty JSON gracefully - await do_request(rest_api, 'metadata', expected_code=400, request_type='PATCH', post_data='abc') - - # Test trying update a non-existing entry - await do_request( - rest_api, - 'metadata', - post_data=[{'public_key': hexlify(b'1' * 64), 'id': 111}], - expected_code=404, - request_type='PATCH', - ) - with db_session: - md1 = metadata_store.TorrentMetadata(title='old1', infohash=random_infohash()) - md2 = metadata_store.ChannelMetadata(title='old2', infohash=random_infohash(), subscribed=False) - - NEW_NAME1 = "updated1" - NEW_NAME2 = "updated2" - patch_data = [ - {'public_key': hexlify(md1.public_key), 'id': md1.id_, 'title': NEW_NAME1}, - {'public_key': hexlify(md2.public_key), 'id': md2.id_, 'title': NEW_NAME2, 'subscribed': 1}, - ] - await do_request(rest_api, 'metadata', post_data=patch_data, expected_code=200, request_type='PATCH') - with db_session: - entry1 = metadata_store.ChannelNode.get(rowid=md1.rowid) - assert NEW_NAME1 == entry1.title - assert UPDATED == entry1.status - - entry2 = metadata_store.ChannelNode.get(rowid=md2.rowid) - assert NEW_NAME2 == entry2.title - assert UPDATED == entry2.status - assert entry2.subscribed - - -async def test_delete_multiple_metadata_entries(rest_api, metadata_store): - """ - Test deleting multiple entries with JSON REST API - """ - with db_session: - md1 = metadata_store.TorrentMetadata(title='old1', infohash=random_infohash()) - md2 = metadata_store.TorrentMetadata(title='old2', infohash=random_infohash()) - assert metadata_store.ChannelNode.select().count() == 2 - - patch_data = [ - {'public_key': hexlify(md1.public_key), 'id': md1.id_}, - {'public_key': hexlify(md2.public_key), 'id': md2.id_}, - ] - await do_request(rest_api, 'metadata', post_data=patch_data, expected_code=200, request_type='DELETE') - with db_session: - assert metadata_store.ChannelNode.select().count() == 0 - - -async def test_update_entry_missing_json(metadata_store, rest_api): - """ - Test whether an error is returned if we try to change entry with the REST API and missing JSON data - """ - channel_pk = hexlify(metadata_store.ChannelNode._my_key.pub().key_to_bin()[10:]) - await do_request(rest_api, f'metadata/{channel_pk}/123', expected_code=400, request_type='PATCH', post_data='abc') - - -async def test_update_entry_not_found(metadata_store, rest_api): - """ - Test whether an error is returned if we try to change some metadata entry that is not there - """ - patch_params = {'subscribed': '1'} - await do_request(rest_api, 'metadata/aa/123', expected_code=404, request_type='PATCH', post_data=patch_params) - - -async def test_update_entry_status_and_name(metadata_store, rest_api): - """ - Test whether an error is returned if try to modify both the status and name of a torrent - """ - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - patch_params = {'status': TODELETE, 'title': 'test'} - await do_request( - rest_api, - 'metadata/%s/%i' % (hexlify(chan.public_key), chan.id_), - request_type='PATCH', - post_data=patch_params, - expected_code=400, - ) - - -async def test_update_entry(rest_api, metadata_store): - """ - Test updating a metadata entry with REST API - """ - new_title = 'bla2' - new_tags = "Compressed" - - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title="bla") - chan.status = COMMITTED - - patch_params = {'title': new_title, 'tags': new_tags} - - result = await do_request( - rest_api, - 'metadata/%s/%i' % (hexlify(chan.public_key), chan.id_), - request_type='PATCH', - post_data=patch_params, - expected_code=200, - ) - - assert new_title == result['name'] - assert new_tags == result['category'] - with db_session: - chan = metadata_store.ChannelMetadata.get_my_channels().first() - assert chan.status == UPDATED - assert chan.tags == new_tags - assert chan.title == new_title - - -async def test_get_entry(rest_api, metadata_store): - """ - Test getting an entry with REST API GET request - """ - for md_type, kwargs in ( - ( - metadata_store.TorrentMetadata, - {"title": "bla", "infohash": random_infohash(), - "tracker_info": "http://sometracker.local/announce"}, - ), - ( - metadata_store.ChannelDescription, - { - "text": json.dumps( - {"description_text": "*{{}bla <\\> [)]// /ee2323㋛㋛㋛ ", "channel_thumbnail": "ffffff.jpg"} - ) - }, - ), - ): - with db_session: - md = md_type(**kwargs) - md.status = COMMITTED - await do_request( - rest_api, - 'metadata/%s/%i' % (hexlify(md.public_key), md.id_), - expected_json=md.to_simple_dict(), - ) - - -async def test_get_entry_not_found(rest_api, metadata_store): - """ - Test trying to get a non-existing entry with the REST API GET request - """ - await do_request(rest_api, 'metadata/%s/%i' % (hexlify(b"0" * 64), 123), expected_code=404) + return MetadataEndpoint(torrent_checker.download_manager, torrent_checker, metadata_store) async def test_check_torrent_health(rest_api, mock_dlmgr, udp_tracker, metadata_store): @@ -210,3 +54,33 @@ async def test_check_torrent_query(rest_api, udp_tracker, metadata_store): """ infohash = b'a' * 20 await do_request(rest_api, f"metadata/torrents/{infohash}/health?timeout=wrong_value&refresh=1", expected_code=400) + + +async def test_get_popular_torrents(rest_api, endpoint, metadata_store): + """ + Test that the endpoint responds with its known entries. + """ + fake_entry = { + "name": "Torrent Name", + "category": "", + "infohash": "ab" * 20, + "size": 1, + "num_seeders": 1234, + "num_leechers": 123, + "last_tracker_check": 17000000, + "created": 15000000, + "tag_processor_version": 1, + "type": REGULAR_TORRENT, + "id": 0, + "origin_id": 0, + "public_key": "ab" * 64, + "status": 2, + } + fake_state = Mock(return_value=Mock(get_progress=Mock(return_value=0.5))) + metadata_store.get_entries = Mock(return_value=[Mock(to_simple_dict=Mock(return_value=fake_entry.copy()))]) + endpoint.tag_rules_processor = Mock(process_queue=AsyncMock()) + endpoint.download_manager.get_download = Mock(return_value=Mock(get_state=fake_state)) + response = await do_request(rest_api, f"metadata/torrents/popular") + + endpoint.tag_rules_processor.process_queue.assert_called_once() + assert response == {'results': [{**fake_entry, **{"progress": 0.5}}], 'first': 1, 'last': 50} diff --git a/src/tribler/core/components/metadata_store/restapi/tests/test_remote_query_endpoint.py b/src/tribler/core/components/metadata_store/restapi/tests/test_remote_query_endpoint.py deleted file mode 100644 index a3ff0118a2f..00000000000 --- a/src/tribler/core/components/metadata_store/restapi/tests/test_remote_query_endpoint.py +++ /dev/null @@ -1,87 +0,0 @@ -import uuid -from unittest.mock import Mock - -import pytest -from ipv8.keyvault.crypto import default_eccrypto -from ipv8.peer import Peer -from pony.orm import db_session - -from tribler.core.components.gigachannel.community.gigachannel_community import ChannelsPeersMapping -from tribler.core.components.metadata_store.restapi.remote_query_endpoint import RemoteQueryEndpoint -from tribler.core.components.restapi.rest.base_api_test import do_request -from tribler.core.utilities.unicode import hexlify -from tribler.core.utilities.utilities import random_infohash - - -# pylint: disable=unused-argument,redefined-outer-name,multiple-statements - - -@pytest.fixture -def mock_gigachannel_community(): - return Mock() - - -@pytest.fixture -def endpoint(metadata_store, mock_gigachannel_community): - return RemoteQueryEndpoint(mock_gigachannel_community, metadata_store) - - -async def test_create_remote_search_request(rest_api, mock_gigachannel_community): - """ - Test that remote search call is sent on a REST API search request - """ - sent = {} - peers = [] - request_uuid = uuid.uuid4() - - def mock_send(**kwargs): - sent.update(kwargs) - return request_uuid, peers - - # Test querying for keywords - mock_gigachannel_community.send_search_request = mock_send - search_txt = "foo" - await do_request( - rest_api, - f'remote_query?txt_filter={search_txt}', - request_type="PUT", - expected_code=200, - expected_json={"request_uuid": str(request_uuid), "peers": peers}, - ) - assert sent['txt_filter'] == search_txt - sent.clear() - - # Test querying channel data by public key, e.g. for channel preview purposes - channel_pk = "ff" - await do_request( - rest_api, f'remote_query?channel_pk={channel_pk}&metadata_type=torrent', request_type="PUT", expected_code=200 - ) - assert hexlify(sent['channel_pk']) == channel_pk - - -async def test_get_channels_peers(rest_api, metadata_store, mock_gigachannel_community): - """ - Test getting debug info about the state of channels to peers mapping - """ - - mapping = mock_gigachannel_community.channels_peers = ChannelsPeersMapping() - - peer_key = default_eccrypto.generate_key("curve25519") - chan_key = default_eccrypto.generate_key("curve25519") - with db_session: - chan = metadata_store.ChannelMetadata(sign_with=chan_key, name="bla", infohash=random_infohash()) - - peer = Peer(peer_key, ("1.2.3.4", 5)) - mapping.add(peer, chan.public_key, chan.id_) - - result = await do_request( - rest_api, - 'remote_query/channels_peers', - request_type="GET", - expected_code=200, - ) - first_result = result["channels_list"][0] - assert first_result["channel_name"] == chan.title - assert first_result["channel_pk"] == hexlify(chan.public_key) - assert first_result["channel_id"] == chan.id_ - assert first_result["peers"][0][0] == hexlify(peer.mid) diff --git a/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py b/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py index d3186fcc70f..d149d6c90f7 100644 --- a/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py +++ b/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py @@ -1,7 +1,8 @@ import os +import uuid from binascii import unhexlify from typing import List, Set -from unittest.mock import patch +from unittest.mock import patch, Mock import pytest from pony.orm import db_session @@ -21,24 +22,28 @@ def needle_in_haystack_mds(metadata_store): num_hay = 100 with db_session: - _ = metadata_store.ChannelMetadata(title='test', tags='test', subscribed=True, infohash=random_infohash()) for x in range(0, num_hay): - metadata_store.TorrentMetadata(title='hay ' + str(x), infohash=random_infohash()) - metadata_store.TorrentMetadata(title='needle', infohash=random_infohash()) - metadata_store.TorrentMetadata(title='needle2', infohash=random_infohash()) + metadata_store.TorrentMetadata(title='hay ' + str(x), infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='needle', infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='needle2', infohash=random_infohash(), public_key=b'') return metadata_store @pytest.fixture -def endpoint(needle_in_haystack_mds, tribler_db): - return SearchEndpoint(needle_in_haystack_mds, tribler_db=tribler_db) +def mock_popularity_community(): + return Mock() + + +@pytest.fixture +def endpoint(mock_popularity_community, needle_in_haystack_mds, tribler_db): + return SearchEndpoint(mock_popularity_community, needle_in_haystack_mds, tribler_db=tribler_db) async def test_search_wrong_mdtype(rest_api): """ Testing whether the API returns an error 400 if wrong metadata type is passed in the query """ - await do_request(rest_api, 'search?txt_filter=bla&metadata_type=ddd', expected_code=400) + await do_request(rest_api, 'search/local?txt_filter=bla&metadata_type=ddd', expected_code=400) async def test_search(rest_api): @@ -46,22 +51,19 @@ async def test_search(rest_api): Test a search query that should return a few new type channels """ - parsed = await do_request(rest_api, 'search?txt_filter=needle', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle', expected_code=200) assert len(parsed["results"]) == 1 - parsed = await do_request(rest_api, 'search?txt_filter=hay', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=hay', expected_code=200) assert len(parsed["results"]) == 50 - parsed = await do_request(rest_api, 'search?txt_filter=test&type=channel', expected_code=200) - assert len(parsed["results"]) == 1 - - parsed = await do_request(rest_api, 'search?txt_filter=needle&type=torrent', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&type=torrent', expected_code=200) assert parsed["results"][0]['name'] == 'needle' - parsed = await do_request(rest_api, 'search?txt_filter=needle&sort_by=name', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&sort_by=name', expected_code=200) assert len(parsed["results"]) == 1 - parsed = await do_request(rest_api, 'search?txt_filter=needle%2A&sort_by=name&sort_desc=1', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle%2A&sort_by=name&sort_desc=1', expected_code=200) assert len(parsed["results"]) == 2 assert parsed["results"][0]['name'] == "needle2" @@ -73,10 +75,11 @@ def mocked_get_subjects_intersection(*_, objects: Set[str], **__): return {hexlify(os.urandom(20))} with patch.object(KnowledgeDataAccessLayer, 'get_subjects_intersection', wraps=mocked_get_subjects_intersection): - parsed = await do_request(rest_api, 'search?txt_filter=needle&tags=real_tag', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&tags=real_tag', expected_code=200) + assert len(parsed["results"]) == 0 - parsed = await do_request(rest_api, 'search?txt_filter=needle&tags=missed_tag', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&tags=missed_tag', expected_code=200) assert len(parsed["results"]) == 1 @@ -85,35 +88,35 @@ async def test_search_with_include_total_and_max_rowid(rest_api): Test search queries with include_total and max_rowid options """ - parsed = await do_request(rest_api, 'search?txt_filter=needle', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle', expected_code=200) assert len(parsed["results"]) == 1 assert "total" not in parsed assert "max_rowid" not in parsed - parsed = await do_request(rest_api, 'search?txt_filter=needle&include_total=1', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&include_total=1', expected_code=200) assert parsed["total"] == 1 - assert parsed["max_rowid"] == 103 + assert parsed["max_rowid"] == 102 - parsed = await do_request(rest_api, 'search?txt_filter=hay&include_total=1', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=hay&include_total=1', expected_code=200) assert parsed["total"] == 100 - assert parsed["max_rowid"] == 103 + assert parsed["max_rowid"] == 102 - parsed = await do_request(rest_api, 'search?txt_filter=hay', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=hay', expected_code=200) assert len(parsed["results"]) == 50 - parsed = await do_request(rest_api, 'search?txt_filter=hay&max_rowid=0', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=hay&max_rowid=0', expected_code=200) assert len(parsed["results"]) == 0 - parsed = await do_request(rest_api, 'search?txt_filter=hay&max_rowid=20', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=hay&max_rowid=19', expected_code=200) assert len(parsed["results"]) == 19 - parsed = await do_request(rest_api, 'search?txt_filter=needle&sort_by=name', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&sort_by=name', expected_code=200) assert len(parsed["results"]) == 1 - parsed = await do_request(rest_api, 'search?txt_filter=needle&sort_by=name&max_rowid=20', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&sort_by=name&max_rowid=20', expected_code=200) assert len(parsed["results"]) == 0 - parsed = await do_request(rest_api, 'search?txt_filter=needle&sort_by=name&max_rowid=200', expected_code=200) + parsed = await do_request(rest_api, 'search/local?txt_filter=needle&sort_by=name&max_rowid=200', expected_code=200) assert len(parsed["results"]) == 1 @@ -134,12 +137,11 @@ async def test_completions(rest_api): async def test_search_with_space(rest_api, metadata_store): with db_session: - _ = metadata_store.ChannelMetadata(title='test', tags='test', subscribed=True, infohash=random_infohash()) - metadata_store.TorrentMetadata(title='abc', infohash=random_infohash()) - metadata_store.TorrentMetadata(title='abc.def', infohash=random_infohash()) - metadata_store.TorrentMetadata(title='abc def', infohash=random_infohash()) - metadata_store.TorrentMetadata(title='abcxyz def', infohash=random_infohash()) - metadata_store.TorrentMetadata(title='abc defxyz', infohash=random_infohash()) + metadata_store.TorrentMetadata(title='abc', infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='abc.def', infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='abc def', infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='abcxyz def', infohash=random_infohash(), public_key=b'') + metadata_store.TorrentMetadata(title='abc defxyz', infohash=random_infohash(), public_key=b'') s1 = to_fts_query("abc") assert s1 == '"abc"' @@ -150,11 +152,11 @@ async def test_search_with_space(rest_api, metadata_store): ss2 = to_fts_query(s2) assert ss2 == s2 - parsed = await do_request(rest_api, f'search?txt_filter={s1}', expected_code=200) + parsed = await do_request(rest_api, f'search/local?txt_filter={s1}', expected_code=200) results = {item["name"] for item in parsed["results"]} assert results == {'abc', 'abc.def', 'abc def', 'abc defxyz'} - parsed = await do_request(rest_api, f'search?txt_filter={s2}', expected_code=200) + parsed = await do_request(rest_api, f'search/local?txt_filter={s2}', expected_code=200) results = {item["name"] for item in parsed["results"]} assert results == {'abc.def', 'abc def'} # but not 'abcxyz def' @@ -172,7 +174,7 @@ def mocked_get_subjects(*_, **__) -> List[str]: with patch.object(KnowledgeDataAccessLayer, 'get_objects', wraps=mocked_get_subjects): s1 = to_fts_query("abc") - results = await do_request(rest_api, f'search?txt_filter={s1}', expected_code=200) + results = await do_request(rest_api, f'search/local?txt_filter={s1}', expected_code=200) assert len(results["results"]) == 1 snippet = results["results"][0] @@ -190,7 +192,7 @@ async def test_multiple_snippets_in_search(rest_api, metadata_store, tribler_db) infohashes = [random_infohash() for _ in range(5)] for ind, infohash in enumerate(infohashes): torrent_state = metadata_store.TorrentState(infohash=infohash, seeders=ind) - metadata_store.TorrentMetadata(title=f'abc {ind}', infohash=infohash, health=torrent_state) + metadata_store.TorrentMetadata(title=f'abc {ind}', infohash=infohash, health=torrent_state, public_key=b'') def mocked_get_objects(*__, subject=None, **___) -> List[str]: subject = unhexlify(subject) @@ -202,7 +204,7 @@ def mocked_get_objects(*__, subject=None, **___) -> List[str]: with patch.object(KnowledgeDataAccessLayer, 'get_objects', wraps=mocked_get_objects): s1 = to_fts_query("abc") - parsed = await do_request(rest_api, f'search?txt_filter={s1}', expected_code=200) + parsed = await do_request(rest_api, f'search/local?txt_filter={s1}', expected_code=200) results = parsed["results"] assert len(results) == 3 @@ -227,3 +229,49 @@ def test_build_snippets_no_infohash(endpoint: SearchEndpoint): search_results = [{'dictionary': 'without infohash'}] result = endpoint.build_snippets(search_results) assert result == search_results + + +async def test_create_remote_search_request(rest_api, mock_popularity_community): + """ + Test that remote search call is sent on a REST API search request + """ + sent = {} + peers = [] + request_uuid = uuid.uuid4() + + def mock_send(**kwargs): + sent.update(kwargs) + return request_uuid, peers + + # Test querying for keywords + mock_popularity_community.send_search_request = mock_send + search_txt = "foo" + await do_request( + rest_api, + f'search/remote?txt_filter={search_txt}', + request_type="PUT", + expected_code=200, + expected_json={"request_uuid": str(request_uuid), "peers": peers}, + ) + assert sent['txt_filter'] == search_txt + sent.clear() + + # Test querying channel data by public key, e.g. for channel preview purposes + channel_pk = "ff" + await do_request( + rest_api, f'search/remote?channel_pk={channel_pk}&metadata_type=torrent', request_type="PUT", expected_code=200 + ) + assert hexlify(sent['channel_pk']) == channel_pk + + +async def test_create_remote_search_request_illegal(rest_api, mock_popularity_community): + """ + Test that remote search call is sent on a REST API search request + """ + response = await do_request( + rest_api, + f'search/remote?origin_id=a', + request_type="PUT", + expected_code=400 + ) + assert "error" in response diff --git a/src/tribler/core/components/metadata_store/tests/test_channel_download.py b/src/tribler/core/components/metadata_store/tests/test_channel_download.py deleted file mode 100644 index 9ab58b0eae8..00000000000 --- a/src/tribler/core/components/metadata_store/tests/test_channel_download.py +++ /dev/null @@ -1,105 +0,0 @@ -from unittest.mock import MagicMock - -import pytest -from ipv8.util import succeed -from pony.orm import db_session - -from tribler.core.components.gigachannel_manager.gigachannel_manager import GigaChannelManager -from tribler.core.components.libtorrent.download_manager.download_config import DownloadConfig -from tribler.core.components.libtorrent.download_manager.download_manager import DownloadManager -from tribler.core.components.libtorrent.settings import LibtorrentSettings -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.serialization import ChannelMetadataPayload -from tribler.core.tests.tools.common import TESTS_DATA_DIR -from tribler.core.utilities.simpledefs import DownloadStatus - -CHANNEL_DIR = TESTS_DATA_DIR / 'sample_channel' -CHANNEL_TORRENT = CHANNEL_DIR / 'channel.torrent' -CHANNEL_TORRENT_UPDATED = CHANNEL_DIR / 'channel_upd.torrent' -CHANNEL_METADATA = CHANNEL_DIR / 'channel.mdblob' -CHANNEL_METADATA_UPDATED = CHANNEL_DIR / 'channel_upd.mdblob' - - -# pylint: disable=redefined-outer-name - -@pytest.fixture -async def channel_tdef(): - return await TorrentDef.load(TESTS_DATA_DIR / 'sample_channel' / 'channel_upd.torrent') - - -@pytest.fixture -async def channel_seeder(channel_tdef, tmp_path_factory): # pylint: disable=unused-argument - config = LibtorrentSettings() - config.dht = False - config.upnp = False - config.natpmp = False - config.lsd = False - seeder_dlmgr = DownloadManager(state_dir=tmp_path_factory.mktemp('state_dir'), config=config, notifier=MagicMock(), - peer_mid=b"0000") - seeder_dlmgr.metadata_tmpdir = tmp_path_factory.mktemp('metadata_tmpdir') - seeder_dlmgr.initialize() - dscfg_seed = DownloadConfig() - dscfg_seed.set_dest_dir(TESTS_DATA_DIR / 'sample_channel') - upload = await seeder_dlmgr.start_download(tdef=channel_tdef, config=dscfg_seed) - await upload.wait_for_status(DownloadStatus.SEEDING) - yield seeder_dlmgr - await seeder_dlmgr.shutdown() - - -@pytest.fixture -async def gigachannel_manager(metadata_store, download_manager: DownloadManager): - manager = GigaChannelManager( - state_dir=metadata_store.channels_dir.parent, - download_manager=download_manager, - metadata_store=metadata_store, - notifier=MagicMock(), - ) - yield manager - await manager.shutdown() - - -@pytest.mark.looptime(False) -async def test_channel_update_and_download( - channel_tdef, channel_seeder, metadata_store, download_manager, gigachannel_manager -): - """ - Test whether we can successfully update a channel and download the new version - """ - - # First we have to manually add the old version - old_payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA) - with db_session: - old_channel = metadata_store.ChannelMetadata.from_payload(old_payload) - chan_dir = CHANNEL_DIR / old_channel.dirname - - metadata_store.process_channel_dir(chan_dir, old_payload.public_key, old_payload.id_) - - payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA_UPDATED) - # Download the channel in our session - with db_session: - metadata_store.process_payload(payload) - channel = metadata_store.ChannelMetadata.get(signature=payload.signature) - - def fake_get_metainfo(*args, **kwargs): - return succeed(channel_tdef.get_metainfo()) - - download_manager.get_metainfo = fake_get_metainfo - # The leecher should be hinted to leech from localhost. Thus, we must extend start_download_from_tdef - # and get_metainfo to provide the hint. - original_start_download_from_tdef = download_manager.start_download - - async def hinted_start_download(tdef=None, config=None, hidden=False): - download = await original_start_download_from_tdef(tdef=tdef, config=config, hidden=hidden) - download.add_peer(("127.0.0.1", channel_seeder.libtorrent_port)) - return download - - download_manager.start_download = hinted_start_download - await gigachannel_manager.download_channel(channel) - await gigachannel_manager.process_queued_channels() - - with db_session: - # There should be 8 torrents + 1 channel torrent - channel2 = metadata_store.ChannelMetadata.get(public_key=payload.public_key) - assert channel2.timestamp == channel2.local_version - assert channel2.timestamp == 1565621688018 - assert metadata_store.ChannelNode.select().count() == 8 diff --git a/src/tribler/core/components/metadata_store/tests/test_channel_metadata.py b/src/tribler/core/components/metadata_store/tests/test_channel_metadata.py index f1a6666072d..3f6cfc5ca87 100644 --- a/src/tribler/core/components/metadata_store/tests/test_channel_metadata.py +++ b/src/tribler/core/components/metadata_store/tests/test_channel_metadata.py @@ -1,1098 +1,4 @@ -import os -from binascii import unhexlify -from datetime import datetime, timedelta -from itertools import combinations -from pathlib import Path -from unittest.mock import Mock, patch - -import pytest -from ipv8.keyvault.crypto import default_eccrypto -from lz4.frame import LZ4FrameDecompressor -from pony.orm import ObjectNotFound, db_session - -from tribler.core.components.libtorrent.torrentdef import TorrentDef -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import ( - CHANNEL_DIR_NAME_LENGTH, - MetadataCompressor, - entries_to_chunk, -) -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import COMMITTED, NEW, TODELETE, UPDATED -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_TORRENT, - COLLECTION_NODE, - REGULAR_TORRENT, - int2time, -) from tribler.core.components.metadata_store.db.store import HealthItemsPayload -from tribler.core.tests.tools.common import TESTS_DATA_DIR, TORRENT_UBUNTU_FILE -from tribler.core.utilities.date_utils import freeze_time -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.core.utilities.utilities import random_infohash - - -# pylint: disable=protected-access, redefined-outer-name - -@pytest.fixture -def my_key(): - return default_eccrypto.generate_key("curve25519") - - -@pytest.fixture -def torrent_template(): - return {"title": "", "infohash": b"", "torrent_date": datetime(1970, 1, 1), "tags": "video"} - - -@pytest.fixture -def sample_torrent_dict(my_key): - return { - "infohash": b"1" * 20, - "size": 123, - "torrent_date": datetime.utcnow(), - "tags": "bla", - "id_": 123, - "public_key": my_key.pub().key_to_bin()[10:], - "title": "lalala", - } - - -@pytest.fixture -def sample_channel_dict(sample_torrent_dict): - return dict(sample_torrent_dict, votes=222, subscribed=False, timestamp=1) - - -@pytest.fixture(name='mds_with_some_torrents') -@db_session -def mds_with_some_torrents_fixture(metadata_store): - # channel1 - # torrent1 aaa bbb seeders=10 - # folder1 aaa ccc - # torrent2 bbb aaa no seeders - # torrent3 ccc ddd seeders=5 - # folder2 aaa bbb - # fodler2_1 aaa bbb - # folder2_2 bbb ccc - # torrent2_1 aaa ccc seeders=20 - # torrent4 ccc ddd seeders=30 - # channel2 - # torrent5 aaa zzz seeders=1 - # torrent6 aaa zzz - - def save(): - metadata_store.db.flush() - - def datetime_generator(): - dt = datetime.utcnow() - timedelta(days=100) - for i in range(100): - yield dt + timedelta(days=i) - assert False, "too many values requested" - - next_datetime = datetime_generator().__next__ - - def new_channel(**kwargs): - params = dict(subscribed=True, share=True, status=NEW, infohash=random_infohash(), torrent_date=next_datetime()) - params.update(kwargs) - return metadata_store.ChannelMetadata(**params) - - def new_torrent(**kwargs): - params = dict(origin_id=channel.id_, staus=NEW, infohash=random_infohash(), torrent_date=next_datetime()) - params.update(kwargs) - return metadata_store.TorrentMetadata(**params) - - def new_folder(**kwargs): - params = dict(origin_id=channel.id_) - params.update(kwargs) - return metadata_store.CollectionNode(**params) - - # Prepare some data - - channel = new_channel(title='channel1 aaa bbb') - save() # to obtain channel.id_ - - new_torrent(title='torrent1 aaa bbb').health.set(seeders=10, leechers=20) - new_folder(title='folder1 aaa ccc') - new_torrent(title='torrent2 bbb aaa') - new_torrent(title='torrent3 ccc ddd').health.set(seeders=5, leechers=10) - folder2 = new_folder(title='folder2 aaa bbb') - new_torrent(title='torrent4 ccc ddd').health.set(seeders=30, leechers=40) - save() # to obtain folder2.id_ - new_folder(title='folder2_1 aaa bbb', origin_id=folder2.id_) - new_folder(title='folder2_2 bbb ccc', origin_id=folder2.id_) - new_torrent(title='torrent2_1 aaa ccc', origin_id=folder2.id_).health.set(seeders=20, leechers=10) - save() - - key = default_eccrypto.generate_key("curve25519") - channel2 = new_channel(title='channel2 aaa bbb', sign_with=key) - save() # to obtain channel2.id_ - new_torrent(title='torrent5 aaa zzz', origin_id=channel2.id_, sign_with=key).health.set(seeders=1, leechers=2) - new_torrent(title='torrent6 aaa zzz', origin_id=channel2.id_, sign_with=key) - - return metadata_store, channel - - -@db_session -def test_serialization(metadata_store): - """ - Test converting channel metadata to serialized data - """ - channel_metadata = metadata_store.ChannelMetadata.from_dict({"infohash": random_infohash()}) - assert channel_metadata.serialized() - - -@db_session -def test_list_contents(metadata_store, torrent_template): - """ - Test whether a correct list with channel content is returned from the database - """ - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - channel1 = metadata_store.ChannelMetadata(infohash=random_infohash()) - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, origin_id=channel1.id_)) - - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - channel2 = metadata_store.ChannelMetadata(infohash=random_infohash()) - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, infohash=b"1", origin_id=channel2.id_)) - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, infohash=b"2", origin_id=channel2.id_)) - - assert len(channel1.contents_list) == 1 - assert len(channel2.contents_list) == 2 - assert channel2.contents_len == 2 - - -@db_session -def test_get_dirname(sample_channel_dict, metadata_store): - """ - Test whether the correct directory name is returned for channel metadata - """ - channel_metadata = metadata_store.ChannelMetadata.from_dict(sample_channel_dict) - assert len(channel_metadata.dirname) == CHANNEL_DIR_NAME_LENGTH - - -@db_session -def test_get_channel_with_dirname(sample_channel_dict, metadata_store): - """ - Test getting a channel with a specific name - """ - channel_metadata = metadata_store.ChannelMetadata.from_dict(sample_channel_dict) - dirname = channel_metadata.dirname - channel_result = metadata_store.ChannelMetadata.get_channel_with_dirname(dirname) - assert channel_metadata == channel_result - - # Test for corner-case of channel PK starting with zeroes - channel_metadata.public_key = unhexlify('0' * 128) - channel_result = metadata_store.ChannelMetadata.get_channel_with_dirname(channel_metadata.dirname) - assert channel_metadata == channel_result - - -@db_session -def test_add_metadata_to_channel(torrent_template, metadata_store): - """ - Test whether adding new torrents to a channel works as expected - """ - channel_metadata = metadata_store.ChannelMetadata.create_channel('test', 'test') - original_channel = channel_metadata.to_dict() - md = metadata_store.TorrentMetadata.from_dict(dict(torrent_template, status=NEW, origin_id=channel_metadata.id_)) - channel_metadata.commit_channel_torrent() - - assert original_channel["timestamp"] < channel_metadata.timestamp - assert md.timestamp < channel_metadata.timestamp - assert channel_metadata.num_entries == 1 - - -async def test_add_torrent_to_channel(metadata_store): - """ - Test adding a torrent to your channel - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - - with db_session: - channel_metadata = metadata_store.ChannelMetadata.create_channel('test', 'test') - - channel_metadata.add_torrent_to_channel(tdef, {'description': 'blabla'}) - assert channel_metadata.contents_list - - # Make sure trying to add a duplicate torrent does not result in an error - channel_metadata.add_torrent_to_channel(tdef, None) - - -@db_session -def test_torrent_exists_in_channel(torrent_template, metadata_store): - """ - Test torrent already exists in the personal channel. - """ - channel_metadata = metadata_store.ChannelMetadata.create_channel('test', 'test') - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, infohash=b"1", origin_id=channel_metadata.id_)) - assert metadata_store.torrent_exists_in_personal_channel(b"1") - assert not metadata_store.torrent_exists_in_personal_channel(b"0") - - -@db_session -def test_copy_to_channel(torrent_template, metadata_store): - """ - Test copying a torrent from an another channel. - """ - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - channel1 = metadata_store.ChannelMetadata(infohash=random_infohash()) - metadata_store.TorrentMetadata.from_dict(dict(torrent_template, infohash=b"1", origin_id=channel1.id_)) - - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - channel2 = metadata_store.ChannelMetadata(infohash=random_infohash()) - - # Trying copying existing torrent to channel - new_torrent = channel2.copy_torrent_from_infohash(b"1") - assert new_torrent - assert len(channel1.contents_list) == 1 - assert len(channel2.contents_list) == 1 - - # Try copying non-existing torrent ot channel - new_torrent2 = channel2.copy_torrent_from_infohash(b"2") - assert new_torrent2 is None - assert len(channel1.contents_list) == 1 - assert len(channel2.contents_list) == 1 - - -async def test_restore_torrent_in_channel(metadata_store): - """ - Test if the torrent scheduled for deletion is restored/updated after the user tries to re-add it. - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - - with db_session: - channel_metadata = metadata_store.ChannelMetadata.create_channel('test', 'test') - md = channel_metadata.add_torrent_to_channel(tdef, None) - - # Check correct re-add - md.status = TODELETE - md_updated = channel_metadata.add_torrent_to_channel(tdef, None) - assert UPDATED == md.status - assert md_updated == md - assert md.has_valid_signature - - # Check update of torrent properties from a new tdef - md.status = TODELETE - new_tracker_address = 'http://tribler.org/announce' - tdef.torrent_parameters[b'announce'] = new_tracker_address.encode('utf-8') - md_updated = channel_metadata.add_torrent_to_channel(tdef, None) - assert md_updated == md - assert md.status == UPDATED - assert md.tracker_info == new_tracker_address - assert md.has_valid_signature - # In addition, check that the trackers table was properly updated - assert len(md.health.trackers) == 2 - - -async def test_delete_torrent_from_channel(metadata_store): - """ - Test deleting a torrent from your channel - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - - with db_session: - channel_metadata = metadata_store.ChannelMetadata.create_channel('test', 'test') - - # Check that nothing is committed when deleting uncommited torrent metadata - torrent = channel_metadata.add_torrent_to_channel(tdef, None) - torrent.soft_delete() - assert not channel_metadata.contents_list - - # Check append-only deletion process - torrent = channel_metadata.add_torrent_to_channel(tdef, None) - channel_metadata.commit_channel_torrent() - assert len(channel_metadata.contents_list) == 1 - - torrent.soft_delete() - channel_metadata.commit_channel_torrent() - assert not channel_metadata.contents_list - - -@db_session -def test_correct_commit_of_delete_entries(metadata_store): - """ - Test that delete entries are committed to disk within mdblobs with correct filenames. - GitHub issue #5295 - """ - - channel = metadata_store.ChannelMetadata.create_channel('test', 'test') - # To trigger the bug we must ensure that the deletion commands will not fit in a single mdblob - with patch.object(metadata_store.ChannelMetadata, "_CHUNK_SIZE_LIMIT", 300): - torrents = [ - metadata_store.TorrentMetadata(infohash=random_infohash(), origin_id=channel.id_, status=NEW) - for _ in range(0, metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT * 2 // 100) - ] - channel.commit_channel_torrent() - for t in torrents: - t.soft_delete() - channel.commit_channel_torrent() - - torrents = [ - metadata_store.TorrentMetadata(infohash=random_infohash(), origin_id=channel.id_, status=NEW) - for _ in range(0, metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT * 2 // 100) - ] - channel.commit_channel_torrent() - torrents.append(metadata_store.TorrentMetadata(infohash=random_infohash(), origin_id=channel.id_, status=NEW)) - for t in torrents[:-1]: - t.soft_delete() - channel.commit_channel_torrent() - - -@pytest.fixture(name="freezer") -def fixture_freezer(): - with freeze_time("2021-09-24") as freezer: - yield freezer - - -@db_session -def test_vsids(freezer, metadata_store): - """ - Test VSIDS-based channel popularity system. - """ - peer_key = default_eccrypto.generate_key("curve25519") - assert metadata_store.Vsids[0].bump_amount == 1.0 - - channel = metadata_store.ChannelMetadata.create_channel('test', 'test') - metadata_store.vote_bump(channel.public_key, channel.id_, peer_key.pub().key_to_bin()[10:]) - freezer.move_to('2021-09-25') - metadata_store.vote_bump(channel.public_key, channel.id_, peer_key.pub().key_to_bin()[10:]) - assert channel.votes > 0.0 - assert metadata_store.Vsids[0].bump_amount > 1.0 - - # Make sure normalization for display purposes work - assert channel.to_simple_dict()["votes"] == 1.0 - - # Make sure the rescale works for the channels - metadata_store.Vsids[0].normalize() - assert metadata_store.Vsids[0].bump_amount == 1.0 - assert channel.votes == 1.0 - - # Ensure that vote by another person counts - peer_key = default_eccrypto.generate_key("curve25519") - metadata_store.vote_bump(channel.public_key, channel.id_, peer_key.pub().key_to_bin()[10:]) - assert channel.votes == 2.0 - - freezer.move_to('2021-09-26') - # Ensure that a repeated vote supersedes the first vote but does not count as a new one - metadata_store.vote_bump(channel.public_key, channel.id_, peer_key.pub().key_to_bin()[10:]) - assert 2.0 < channel.votes < 2.5 - - -async def test_commit_channel_torrent(metadata_store): - """ - Test committing a channel torrent - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - - with db_session: - channel = metadata_store.ChannelMetadata.create_channel('test', 'test') - channel.add_torrent_to_channel(tdef, None) - # The first run should return the infohash, the second should return None, because nothing was really done - assert channel.commit_channel_torrent() - assert not channel.commit_channel_torrent() - - # Test adding flags to channel torrent when adding thumbnail and description - metadata_store.ChannelThumbnail(public_key=channel.public_key, origin_id=channel.id_, status=NEW) - metadata_store.ChannelDescription(public_key=channel.public_key, origin_id=channel.id_, status=NEW) - assert channel.commit_channel_torrent() - assert channel.reserved_flags == 3 - assert not channel.commit_channel_torrent() - - -@db_session -def test_recursive_commit_channel_torrent(metadata_store): - status_types = [NEW, UPDATED, TODELETE, COMMITTED] - - def all_status_combinations(): - result = [] - for card in range(0, len(status_types) + 1): - result.extend(list(combinations(status_types, card))) - return result - - def generate_collection(parent, collection_status, contents_statuses, recurse=False): - chan = metadata_store.CollectionNode( - title=parent.title + '->child_new_nonempty', origin_id=parent.id_, status=collection_status - ) - for s in contents_statuses: - metadata_store.TorrentMetadata(infohash=random_infohash(), origin_id=chan.id_, status=s) - if recurse: - for status in status_types: - generate_collection(chan, status, [NEW]) - return chan - - def generate_channel(recurse=False, status=NEW): - toplevel_channel = metadata_store.ChannelMetadata.create_channel('root', 'test') - metadata_store.ChannelThumbnail( - public_key=toplevel_channel.public_key, - origin_id=toplevel_channel.id_, - binary_data=os.urandom(20000), - data_type="image/png", - ) - metadata_store.ChannelDescription( - public_key=toplevel_channel.public_key, - origin_id=toplevel_channel.id_, - json_text='{"description_text":"foobar"}', - ) - toplevel_channel.status = status - for s in status_types: - metadata_store.TorrentMetadata(infohash=random_infohash(), origin_id=toplevel_channel.id_, status=s) - if recurse: - for status_combination in all_status_combinations(): - generate_collection(toplevel_channel, s, status_combination, recurse=recurse) - metadata_store.ChannelDescription( - text="foobar", - origin_id=toplevel_channel.id_, - ) - return toplevel_channel - - # Make sure running commit on empty channels produces no error - metadata_store.CollectionNode.commit_all_channels() - - # All types of non-empty and empty toplevel channels - for s in status_types: - empty_chan = metadata_store.ChannelMetadata.create_channel('root', 'test') - empty_chan.status = s - generate_channel(status=s) - - # A committed channel with a single deleted collection in it. It should not be deleted - single_del_cont_chan = metadata_store.ChannelMetadata.create_channel('root', 'test') - metadata_store.CollectionNode(status=TODELETE, origin_id=single_del_cont_chan.id_) - - # Create some orphaned MDs - chan = generate_channel() - orphaned_contents_rowids = [c.rowid for c in chan.get_contents_recursive()] - metadata_store.ChannelNode.delete(chan) # We use it to delete non-recursively - - # Create a top-level collection node - coll = metadata_store.CollectionNode(origin_id=0, status=NEW) - generate_collection(coll, NEW, [NEW, UPDATED, TODELETE]) - - commit_results = metadata_store.CollectionNode.commit_all_channels() - # Check that commit results in the correct number of torrents produced - assert len(commit_results) == 4 - # Check that top-level collection node, while not committed to disk, still has its num_entries recalculated - assert coll.num_entries == 2 - # Check that all orphaned entries are deleted during commit - assert not metadata_store.ChannelNode.exists(lambda g: g.rowid in orphaned_contents_rowids) - - # Create a single nested channel - chan = generate_channel(recurse=True) - - chan.commit_channel_torrent() - chan.local_version = 0 - len(chan.get_contents_recursive()) - - chan.consolidate_channel_torrent() - # Remove the channel and read it back from disk - for c in chan.contents: - c.delete() - my_dir = Path(metadata_store.ChannelMetadata._channels_dir / chan.dirname).absolute() - metadata_store.process_channel_dir(my_dir, chan.public_key, chan.id_, skip_personal_metadata_payload=False) - assert chan.num_entries == 366 - - -async def test_consolidate_channel_torrent(torrent_template, metadata_store): - """ - Test completely re-commit your channel - """ - tdef = await TorrentDef.load(TORRENT_UBUNTU_FILE) - - with db_session: - channel = metadata_store.ChannelMetadata.create_channel('test', 'test') - my_dir = Path(metadata_store.ChannelMetadata._channels_dir / channel.dirname).absolute() - - # 1st torrent - torrent_entry = channel.add_torrent_to_channel(tdef, None) - channel.commit_channel_torrent() - - # 2nd torrent - metadata_store.TorrentMetadata.from_dict( - dict(torrent_template, public_key=channel.public_key, origin_id=channel.id_, status=NEW) - ) - channel.commit_channel_torrent() - # Delete entry - torrent_entry.soft_delete() - channel.commit_channel_torrent() - - assert len(channel.contents_list) == 1 - assert len(os.listdir(my_dir)) == 3 - - torrent3 = metadata_store.TorrentMetadata( - public_key=channel.public_key, origin_id=channel.id_, status=NEW, infohash=random_infohash() - ) - channel.commit_channel_torrent() - torrent3.soft_delete() - - channel.consolidate_channel_torrent() - assert len(os.listdir(my_dir)) == 1 - metadata_store.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT).delete() - channel.local_version = 0 - metadata_store.process_channel_dir(my_dir, channel.public_key, channel.id_, - skip_personal_metadata_payload=False) - assert len(channel.contents[:]) == 1 - - -@db_session -def test_data_dont_fit_in_mdblob(metadata_store): - import random as rng # pylint: disable=import-outside-toplevel - - rng.seed(123) - md_list = [ - metadata_store.TorrentMetadata( - title='test' + str(x), - infohash=random_infohash(rng), - id_=rng.randint(0, 100000000), - torrent_date=int2time(rng.randint(0, 4000000)), - timestamp=rng.randint(0, 100000000), - ) - for x in range(0, 1) - ] - chunk, index = entries_to_chunk(md_list, chunk_size=1) - assert index == 1 - assert len(chunk) == 205 - - # Test corner case of empty list and/or too big index - with pytest.raises(Exception): - entries_to_chunk(md_list, chunk_size=1000, start_index=1000) - with pytest.raises(Exception): - entries_to_chunk([], chunk_size=1) - - -@db_session -def test_get_channels(metadata_store): - """ - Test whether we can get channels - """ - - # First we create a few channels - for ind in range(10): - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - metadata_store.ChannelMetadata(title='channel%d' % ind, subscribed=(ind % 2 == 0), infohash=random_infohash()) - metadata_store.TorrentMetadata(title='tor%d' % ind, infohash=random_infohash()) - channels = metadata_store.get_entries(first=1, last=5, metadata_type=CHANNEL_TORRENT) - assert len(channels) == 5 - - # Test filtering - channels = metadata_store.get_entries(first=1, last=5, metadata_type=CHANNEL_TORRENT, txt_filter='channel5') - assert len(channels) == 1 - - # Test sorting - channels = metadata_store.get_entries( - first=1, last=10, metadata_type=CHANNEL_TORRENT, sort_by='title', sort_desc=True - ) - assert len(channels) == 10 - assert channels[0].title == 'channel9' - - # Test fetching subscribed channels - channels = metadata_store.get_entries( - first=1, last=10, metadata_type=CHANNEL_TORRENT, sort_by='title', subscribed=True - ) - assert len(channels) == 5 - - -@db_session -def test_default_sorting_no_fts(mds_with_some_torrents): - metadata_store, channel = mds_with_some_torrents - - # Search through the entire set of torrents & folders. - # Currently objects are returned in order "newest at first" - objects = metadata_store.get_entries() - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent6', - 'torrent5', - 'channel2', - 'torrent2_1', - 'folder2_2', - 'folder2_1', - 'torrent4', - 'folder2', - 'torrent3', - 'torrent2', - 'folder1', - 'torrent1', - 'channel1', - ] - - objects = metadata_store.get_entries(channel_pk=channel.public_key) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent2_1', - 'folder2_2', - 'folder2_1', - 'torrent4', - 'folder2', - 'torrent3', - 'torrent2', - 'folder1', - 'torrent1', - 'channel1', - ] - - objects = metadata_store.get_entries(origin_id=channel.id_) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == ['torrent4', 'folder2', 'torrent3', 'torrent2', 'folder1', 'torrent1'] - - -@db_session -def test_default_sorting_with_fts(mds_with_some_torrents): - metadata_store, channel = mds_with_some_torrents - - # Search through the entire set of torrents & folders. - # Returns channels at first, then folders (newest at first), - # then torrents (with seeders at first) - objects = metadata_store.get_entries(txt_filter='aaa') - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'channel2', - 'channel1', - 'folder2_1', - 'folder2', - 'folder1', - 'torrent2_1', # has seeders - 'torrent1', # has seeders - 'torrent5', # has seeders - 'torrent6', # no seeders - 'torrent2', # no seeders - ] - - objects = metadata_store.get_entries(channel_pk=channel.public_key, txt_filter='aaa') - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'channel1', - 'folder2_1', - 'folder2', - 'folder1', - 'torrent2_1', # has seeders - 'torrent1', # has seeders - 'torrent2', # no seeders - ] - - objects = metadata_store.get_entries(origin_id=channel.id_, txt_filter='aaa') - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == ['folder2', 'folder1', 'torrent1', 'torrent2'] - - -@db_session -def test_sort_by_health_no_fts(mds_with_some_torrents): - metadata_store, channel = mds_with_some_torrents - - objects = metadata_store.get_entries(sort_by='HEALTH', sort_desc=True) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent4', # 30 seeders - 'torrent2_1', # 20 seeders - 'torrent1', # 10 seeders - 'torrent3', # 5 seeders - 'torrent5', # 1 seeders - 'torrent6', # no seeders - 'channel2', - 'torrent2', # no seeders - 'channel1', - 'folder2_2', - 'folder2_1', - 'folder2', - 'folder1', - ] - - objects = metadata_store.get_entries(sort_by='HEALTH', sort_desc=False) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'folder1', - 'folder2', - 'folder2_1', - 'folder2_2', - 'channel1', - 'torrent2', # no seeders - 'channel2', - 'torrent6', # no seeders - 'torrent5', # 1 seeders - 'torrent3', # 2 seeders - 'torrent1', # 10 seeders - 'torrent2_1', # 20 seeders - 'torrent4', # 30 seeders - ] - - objects = metadata_store.get_entries(channel_pk=channel.public_key, sort_by='HEALTH', sort_desc=True) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent4', # has seeders - 'torrent2_1', # has seeders - 'torrent1', # has seeders - 'torrent3', # has seeders - 'torrent2', # no seeders - 'channel1', - 'folder2_2', - 'folder2_1', - 'folder2', - 'folder1', - ] - - objects = metadata_store.get_entries(channel_pk=channel.public_key, sort_by='HEALTH', sort_desc=False) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'folder1', - 'folder2', - 'folder2_1', - 'folder2_2', - 'channel1', - 'torrent2', # no seeders - 'torrent3', # has seeders - 'torrent1', # has seeders - 'torrent2_1', # has seeders - 'torrent4', # has seeders - ] - - objects = metadata_store.get_entries( - origin_id=channel.id_, - sort_by='HEALTH', - sort_desc=True, - ) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent4', # has seeders - 'torrent1', # has seeders - 'torrent3', # has seeders - 'torrent2', # no seeders - 'folder2', - 'folder1', - ] - - objects = metadata_store.get_entries(origin_id=channel.id_, sort_by='HEALTH', sort_desc=False) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'folder1', - 'folder2', - 'torrent2', # no seeders - 'torrent3', # has seeders - 'torrent1', # has seeders - 'torrent4', # has seeders - ] - - -@db_session -def test_sort_by_health_with_fts(mds_with_some_torrents): - metadata_store, channel = mds_with_some_torrents - - objects = metadata_store.get_entries(txt_filter='aaa', sort_by='HEALTH', sort_desc=True) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent2_1', # 20 seeders - 'torrent1', # 10 seeders - 'torrent5', # 1 seeder - 'torrent6', # no seeders - 'channel2', - 'torrent2', - 'channel1', - 'folder2_1', - 'folder2', - 'folder1', - ] - - objects = metadata_store.get_entries(txt_filter='aaa', sort_by='HEALTH', sort_desc=False) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'folder1', - 'folder2', - 'folder2_1', - 'channel1', - 'torrent2', - 'channel2', - 'torrent6', # no seeders - 'torrent5', # 1 seeder - 'torrent1', # 10 seeders - 'torrent2_1', # 20 seeders - ] - - objects = metadata_store.get_entries( - channel_pk=channel.public_key, txt_filter='aaa', sort_by='HEALTH', sort_desc=True - ) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'torrent2_1', # 20 seeders - 'torrent1', # 10 seeders - 'torrent2', # no seeders - 'channel1', - 'folder2_1', - 'folder2', - 'folder1', - ] - - objects = metadata_store.get_entries( - channel_pk=channel.public_key, txt_filter='aaa', sort_by='HEALTH', sort_desc=False - ) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == [ - 'folder1', - 'folder2', - 'folder2_1', - 'channel1', - 'torrent2', # no seeders - 'torrent1', # 10 seeders - 'torrent2_1', # 20 seeders - ] - - objects = metadata_store.get_entries( - origin_id=channel.id_, - txt_filter='aaa', - sort_by='HEALTH', - sort_desc=True, - ) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == ['torrent1', 'torrent2', 'folder2', 'folder1'] - - objects = metadata_store.get_entries(origin_id=channel.id_, txt_filter='aaa', sort_by='HEALTH', sort_desc=False) - titles = [obj.title.partition(' ')[0] for obj in objects] - assert titles == ['folder1', 'folder2', 'torrent2', 'torrent1'] - - -@db_session -def test_get_channel_name(metadata_store): - """ - Test getting torrent name for a channel to be displayed in the downloads list - """ - infohash = b"\x00" * 20 - title = "testchan" - chan = metadata_store.ChannelMetadata(title=title, infohash=infohash) - dirname = chan.dirname - - assert title == metadata_store.ChannelMetadata.get_channel_name(dirname, infohash) - assert title == metadata_store.ChannelMetadata.get_channel_name_cached(dirname, infohash) - chan.infohash = b"\x11" * 20 - assert "OLD:" + title == metadata_store.ChannelMetadata.get_channel_name(dirname, infohash) - chan.delete() - assert dirname == metadata_store.ChannelMetadata.get_channel_name(dirname, infohash) - # Check that the cached version of the name is returned even if the channel has been deleted - metadata_store.ChannelMetadata.get_channel_name = Mock() - assert title == metadata_store.ChannelMetadata.get_channel_name_cached(dirname, infohash) - metadata_store.ChannelMetadata.get_channel_name.assert_not_called() - - -async def check_add(metadata_store, torrents_in_dir, errors, recursive): - TEST_TORRENTS_DIR = TESTS_DATA_DIR / 'linux_torrents' - with db_session: - chan = metadata_store.ChannelMetadata.create_channel(title='testchan') - torrents, e = await chan.add_torrents_from_dir(TEST_TORRENTS_DIR, recursive) - assert torrents_in_dir == len(torrents) - assert errors == len(e) - with db_session: - q = metadata_store.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT) - assert torrents_in_dir - len(e) == q.count() - - -async def test_add_torrents_from_dir(metadata_store): - await check_add(metadata_store, 9, 0, recursive=False) - - -async def test_add_torrents_from_dir_recursive(metadata_store): - await check_add(metadata_store, 11, 1, recursive=True) - - -@db_session -def create_ext_chan(metadata_store, ext_key): - src_chan = metadata_store.ChannelMetadata(sign_with=ext_key, title="bla", infohash=random_infohash()) - metadata_store.TorrentMetadata(origin_id=src_chan.id_, sign_with=ext_key, infohash=random_infohash()) - l2_coll1 = metadata_store.CollectionNode(origin_id=src_chan.id_, sign_with=ext_key, title="bla-l2-1") - metadata_store.TorrentMetadata(origin_id=l2_coll1.id_, sign_with=ext_key, infohash=random_infohash()) - metadata_store.TorrentMetadata(origin_id=l2_coll1.id_, sign_with=ext_key, infohash=random_infohash()) - l2_coll2 = metadata_store.CollectionNode(origin_id=src_chan.id_, sign_with=ext_key, title="bla-l2-2") - metadata_store.TorrentMetadata(origin_id=l2_coll2.id_, sign_with=ext_key, infohash=random_infohash()) - metadata_store.TorrentMetadata(origin_id=l2_coll2.id_, sign_with=ext_key, infohash=random_infohash()) - return src_chan - - -@db_session -def test_make_copy(metadata_store): - """ - Test copying if recursive copying an external channel to a personal channel works as expected - """ - src_chan = create_ext_chan(metadata_store, default_eccrypto.generate_key("curve25519")) - - tgt_chan = metadata_store.ChannelMetadata(title='our chan', infohash=random_infohash(), status=NEW) - src_chan.make_copy(tgt_chan.id_) - src_chan.pprint_tree() - tgt_chan.pprint_tree() - copy = metadata_store.CollectionNode.get(public_key=tgt_chan.public_key, origin_id=tgt_chan.id_) - assert copy.title == "bla" - assert 1 + len(src_chan.get_contents_recursive()) == len(tgt_chan.get_contents_recursive()) - - -@db_session -def test_update_properties_move(metadata_store): - """ - Test moving a Channel/Collection into another Channel/Collection or at the top of channel hierachy. - """ - src_chan = create_ext_chan(metadata_store, metadata_store.ChannelMetadata._my_key) - src_chan_contents = src_chan.get_contents_recursive() - tgt_chan = metadata_store.ChannelMetadata.create_channel('dstchan') - - # Move channel into another channel so it becomes a collection - result_chan = src_chan.update_properties({'origin_id': tgt_chan.id_}) - # Assert the moved channel changed type to collection - assert isinstance(result_chan, metadata_store.CollectionNode) - assert result_chan.metadata_type == COLLECTION_NODE - assert 1 + len(src_chan_contents) == len(tgt_chan.get_contents_recursive()) - - # Move collection to top level so it become a channel - result_chan = result_chan.update_properties({'origin_id': 0}) - # Assert the move collection changed type to channel - assert isinstance(result_chan, metadata_store.ChannelMetadata) - assert result_chan.metadata_type == CHANNEL_TORRENT - - -@db_session -def test_delete_recursive(metadata_store): - """ - Test deleting channel and its contents recursively - """ - src_chan = create_ext_chan(metadata_store, default_eccrypto.generate_key("curve25519")) - src_chan.delete() - assert not metadata_store.ChannelNode.select().count() - - src_chan = create_ext_chan(metadata_store, default_eccrypto.generate_key("curve25519")) - src_chan_rowid = src_chan.rowid - src_chan.delete(recursive=False) - assert metadata_store.ChannelNode.select().count() == 7 - with pytest.raises(ObjectNotFound): - metadata_store.ChannelNode.__getitem__(src_chan_rowid) - - -@db_session -def test_get_parents(metadata_store): - """ - Test the routine that gets the full set (path) of a node's predecessors in the channels tree - """ - key = default_eccrypto.generate_key("curve25519") - src_chan = create_ext_chan(metadata_store, key) - coll1 = metadata_store.CollectionNode.select(lambda g: g.origin_id == src_chan.id_).first() - torr1 = coll1.contents.first() - assert (src_chan, coll1, torr1) == torr1.get_parent_nodes() - - loop = metadata_store.CollectionNode(id_=777, origin_id=777) - assert loop.get_parent_nodes() == (loop,) - - -@db_session -def test_collection_node_state(metadata_store): - """ - Test that CollectionNode state is inherited from the top-level parent channel - """ - key = default_eccrypto.generate_key("curve25519") - src_chan = create_ext_chan(metadata_store, key) - coll1 = metadata_store.CollectionNode.select(lambda g: g.origin_id == src_chan.id_).first() - - # Initially, the top level parent channel is in the preview state, so must be the collection - assert coll1.state == CHANNEL_STATE.PREVIEW.value - - src_chan.local_version = src_chan.timestamp - # Now the top level parent channel is complete, so must become the collection - assert coll1.state == CHANNEL_STATE.COMPLETE.value - - # For personal collections, state should always be "personal" no matter what - pers_chan = metadata_store.ChannelMetadata(infohash=random_infohash()) - pers_coll = metadata_store.CollectionNode(origin_id=pers_chan.id_) - assert pers_coll.state == CHANNEL_STATE.PERSONAL.value - - -@db_session -def test_metadata_compressor(): - SERIALIZED_METADATA = f"<{'S' * 1000}>".encode('ascii') - SERIALIZED_DELETE = f"<{'D' * 100}>".encode('ascii') - SERIALIZED_HEALTH = "1,2,1234567890;".encode('ascii') - - metadata = Mock() - metadata.status = NEW - metadata.serialized = Mock(return_value=SERIALIZED_METADATA) - metadata.serialized_delete = Mock(return_value=SERIALIZED_DELETE) - metadata.serialized_health = Mock(return_value=SERIALIZED_HEALTH) - - def add_items(mc: MetadataCompressor, expected_items_count: int): - prev_size = 0 - for i in range(1, 1000): - item_was_added = mc.put(metadata) - if not item_was_added: - assert mc.count == i - 1 # last item was not added - assert mc.count == expected_items_count # compressor was able to add 10 items only - break - - assert mc.count == i # after the element was successfully added, the count should increase - assert mc.size > prev_size # with each item the total size should become bigger - prev_size = mc.size - else: - assert False # too many items was added, something is wrong - - assert prev_size < mc.chunk_size # total size should fit into the chunk - - assert not mc.closed - result = mc.close() - assert mc.closed - assert isinstance(result, bytes) - assert len(result) == prev_size - assert len(result) < len(SERIALIZED_METADATA) * expected_items_count # our test data should be easy to compress - - return result - - # compressing a normal data without a health info - - mc = MetadataCompressor(200) - assert mc.chunk_size == 200 - assert not mc.include_health # include_health is False by default - assert mc.count == 0 # no items added yet - - expected_items_count = 10 # chunk of size 200 should be enough to put 10 test items - data = add_items(mc, expected_items_count) - - d = LZ4FrameDecompressor() - decompressed = d.decompress(data) - assert decompressed == SERIALIZED_METADATA * expected_items_count # check the correctness of the decompressed data - unused_data = d.unused_data - assert not unused_data # if health info is not included, no unused_data should be placed after the LZ4 frame - - assert metadata.serialized_health.assert_not_called - assert metadata.serialized_delete.assert_not_called - - # cannot operate on closed MetadataCompressor - - with pytest.raises(TypeError, match='^Compressor is already closed$'): - mc.put(metadata) - - with pytest.raises(TypeError, match='^Compressor is already closed$'): - mc.close() - - # chunk size is not enough even for a single item - - mc = MetadataCompressor(10) - added = mc.put(metadata) - # first item should be added successfully even if the size of compressed item is bigger than the chunk size - assert added - size = mc.size - assert size > mc.chunk_size - - added = mc.put(metadata) - assert not added # second item was not added - assert mc.count == 1 - assert mc.size == size # size was not changed - - data = mc.close() - d = LZ4FrameDecompressor() - decompressed = d.decompress(data) - assert decompressed == SERIALIZED_METADATA - - # include health info - - mc = MetadataCompressor(200, True) - assert mc.include_health - - expected_items_count = 5 # with health info we can put at most 10 test items into the chunk of size 200 - data = add_items(mc, expected_items_count) - - d = LZ4FrameDecompressor() - decompressed = d.decompress(data) - assert decompressed == SERIALIZED_METADATA * expected_items_count # check the correctness of the decompressed data - unused_data = d.unused_data - - assert metadata.serialized_health.assert_called - assert metadata.serialized_delete.assert_not_called - - health_items = HealthItemsPayload.unpack(unused_data) - assert len(health_items) == expected_items_count - for health_item in health_items: - assert health_item == (1, 2, 1234567890) def test_unpack_health_items(): diff --git a/src/tribler/core/components/metadata_store/tests/test_metadata.py b/src/tribler/core/components/metadata_store/tests/test_metadata.py index 299fe33e7ef..2e5b68cc73a 100644 --- a/src/tribler/core/components/metadata_store/tests/test_metadata.py +++ b/src/tribler/core/components/metadata_store/tests/test_metadata.py @@ -1,25 +1,15 @@ -import pytest +import os from ipv8.keyvault.crypto import default_eccrypto from pony import orm from pony.orm import db_session -from tribler.core.components.metadata_store.db.serialization import ( - CHANNEL_NODE, - ChannelNodePayload, - KeysMismatchException, - NULL_KEY, - NULL_SIG, -) -from tribler.core.exceptions import InvalidChannelNodeException, InvalidSignatureException -from tribler.core.utilities.unicode import hexlify - @db_session def test_to_dict(metadata_store): """ Test whether converting metadata to a dictionary works """ - metadata = metadata_store.ChannelNode.from_dict({}) + metadata = metadata_store.TorrentMetadata.from_dict({'infohash': os.urandom(20), 'public_key': b''}) assert metadata.to_dict() @@ -28,120 +18,43 @@ def test_serialization(metadata_store): """ Test converting metadata to serialized data and back """ - for md_type in [ - metadata_store.ChannelNode, - metadata_store.MetadataNode, - metadata_store.CollectionNode, - metadata_store.ChannelDescription, - metadata_store.ChannelThumbnail, - ]: - metadata1 = md_type() - serialized1 = metadata1.serialized() - metadata1.delete() - orm.flush() - - metadata2 = md_type.from_payload(md_type._payload_class.from_signed_blob(serialized1)) - serialized2 = metadata2.serialized() - assert serialized1 == serialized2 - - # Test no signature exception - metadata2_dict = metadata2.to_dict() - metadata2_dict.pop("signature") - with pytest.raises(InvalidSignatureException): - md_type._payload_class(**metadata2_dict) - - serialized3 = serialized2[:-5] + b"\xee" * 5 - with pytest.raises(InvalidSignatureException): - md_type._payload_class.from_signed_blob(serialized3) - # Test bypass signature check - md_type._payload_class.from_signed_blob(serialized3, check_signature=False) - - -@db_session -def test_ffa_serialization(metadata_store): - """ - Test converting free-for-all (unsigned) torrent metadata to payload and back - """ - metadata1 = metadata_store.ChannelNode.from_dict({"public_key": b"", "id_": "123"}) - serialized1 = metadata1.serialized() - # Make sure sig is really zeroes - assert hexlify(serialized1).endswith(hexlify(NULL_SIG)) + md_type = metadata_store.TorrentMetadata + metadata1 = md_type(infohash=os.urandom(20)) + serialized1 = metadata1.serialized(metadata_store.my_key) metadata1.delete() orm.flush() - metadata2 = metadata_store.ChannelNode.from_payload(ChannelNodePayload.from_signed_blob(serialized1)) + metadata2 = md_type.from_payload(md_type.payload_class.from_signed_blob(serialized1)) serialized2 = metadata2.serialized() assert serialized1 == serialized2 - # Check that it is impossible to create FFA node without specifying id_ - with pytest.raises(InvalidChannelNodeException): - metadata_store.ChannelNode.from_dict({"public_key": b""}) - # Check that it is impossible to create FFA payload with non-null signature - with pytest.raises(InvalidSignatureException): - ChannelNodePayload(CHANNEL_NODE, 0, NULL_KEY, 0, 0, 0, signature=b"123") - # Check that creating a pair of metadata entries do not trigger uniqueness constraints error - metadata_store.ChannelNode.from_dict({"public_key": b"", "id_": "124"}) - metadata_store.ChannelNode.from_dict({"public_key": b"", "id_": "125"}) + metadata2_dict = metadata2.to_dict() + metadata2_dict.pop("signature") + assert not md_type.payload_class.from_dict(**metadata2_dict).check_signature() + serialized3 = serialized2[:-5] + b"\xee" * 5 + metadata3 = md_type.payload_class.from_signed_blob(serialized3) + assert metadata3.has_signature() + assert not metadata3.check_signature() -@db_session -def test_key_mismatch_exception(metadata_store): - mismatched_key = default_eccrypto.generate_key("curve25519") - metadata = metadata_store.ChannelNode.from_dict({}) - with pytest.raises(KeysMismatchException): - metadata.serialized(key=mismatched_key) - - -@db_session -def test_to_file(tmpdir, metadata_store): - """ - Test writing metadata to a file - """ - metadata = metadata_store.ChannelNode.from_dict({}) - file_path = tmpdir / 'metadata.file' - metadata.to_file(file_path) - assert file_path.exists() - - -@db_session -def test_has_valid_signature(metadata_store): - """ - Test whether a signature can be validated correctly - """ - metadata = metadata_store.ChannelNode.from_dict({}) - assert metadata.has_valid_signature() - - md_dict = metadata.to_dict() - - # Mess with the signature - metadata.signature = b'a' - assert not metadata.has_valid_signature() - - # Create metadata with wrong key - metadata.delete() - md_dict.update(public_key=b"aaa") - md_dict.pop("rowid") - - metadata = metadata_store.ChannelNode(skip_key_check=True, **md_dict) - assert not metadata.has_valid_signature() - + # Test adding a signature and checking for correctness key = default_eccrypto.generate_key("curve25519") - metadata2 = metadata_store.ChannelNode(sign_with=key, **md_dict) - assert key.pub().key_to_bin()[10:], metadata2.public_key - md_dict2 = metadata2.to_dict() - md_dict2["signature"] = md_dict["signature"] - with pytest.raises(InvalidSignatureException): - metadata_store.ChannelNode(**md_dict2) - + metadata3.add_signature(key) + assert metadata3.has_signature() + assert metadata3.check_signature() + metadata3.signature = os.urandom(64) + assert metadata3.has_signature() + assert not metadata3.check_signature() @db_session def test_from_payload(metadata_store): """ Test converting a metadata payload to a metadata object """ - metadata = metadata_store.ChannelNode.from_dict({}) + md_type = metadata_store.TorrentMetadata + metadata = md_type.from_dict({'infohash': os.urandom(20), 'public_key': b''}) metadata_dict = metadata.to_dict() metadata.delete() orm.flush() - metadata_payload = ChannelNodePayload(**metadata_dict) - assert metadata_store.ChannelNode.from_payload(metadata_payload) + metadata_payload = md_type.payload_class.from_dict(**metadata_dict) + assert md_type.from_payload(metadata_payload) diff --git a/src/tribler/core/components/metadata_store/utils.py b/src/tribler/core/components/metadata_store/utils.py index eecdd4f1c3a..a4a439428c4 100644 --- a/src/tribler/core/components/metadata_store/utils.py +++ b/src/tribler/core/components/metadata_store/utils.py @@ -6,11 +6,8 @@ from pony.orm import db_session from tribler.core.components.database.db.layers.knowledge_data_access_layer import Operation, ResourceType -from tribler.core.components.database.db.tribler_database import TriblerDatabase from tribler.core.components.knowledge.community.knowledge_payload import StatementOperation from tribler.core.components.knowledge.knowledge_constants import MIN_RESOURCE_LENGTH -from tribler.core.components.metadata_store.db.store import MetadataStore -from tribler.core.tests.tools.common import PNG_FILE from tribler.core.utilities.unicode import hexlify from tribler.core.utilities.utilities import random_infohash @@ -99,61 +96,3 @@ def generate_torrent(metadata_store, db, parent, title=None): origin_id=parent.id_, health=torrent_state, tags=category) tag_torrent(infohash, db) - - -@db_session -def generate_collection(metadata_store, tags_db, parent): - coll = metadata_store.CollectionNode(title=generate_title(words_count=3), origin_id=parent.id_) - for _ in range(0, 3): - generate_torrent(metadata_store, tags_db, coll) - - -@db_session -def generate_channel(metadata_store: MetadataStore, db: TriblerDatabase, title=None, subscribed=False): - # Remember and restore the original key - orig_key = metadata_store.ChannelNode._my_key - - metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low') - chan = metadata_store.ChannelMetadata( - title=title or generate_title(words_count=5), subscribed=subscribed, infohash=random_infohash() - ) - - # add some collections to the channel - for _ in range(0, 3): - generate_collection(metadata_store, db, chan) - - metadata_store.ChannelNode._my_key = orig_key - - -@db_session -def generate_test_channels(metadata_store, tags_db) -> None: - # First, generate some foreign channels - for ind in range(0, 10): - generate_channel(metadata_store, tags_db, subscribed=ind % 2 == 0) - - # This one is necessary to test filters, etc - generate_channel(metadata_store, tags_db, title="nonrandom unsubscribed channel name") - - # The same, but subscribed - generate_channel(metadata_store, tags_db, title="nonrandom subscribed channel name", subscribed=True) - - # Now generate a couple of personal channels - chan1 = metadata_store.ChannelMetadata.create_channel(title="personal channel with nonrandom name") - generate_torrent(metadata_store, tags_db, chan1, title='Some torrent with nonrandom name') - generate_torrent(metadata_store, tags_db, chan1, title='Another torrent with nonrandom name') - - with open(PNG_FILE, "rb") as f: - pic_bytes = f.read() - metadata_store.ChannelThumbnail(binary_data=pic_bytes, data_type="image/png", origin_id=chan1.id_) - metadata_store.ChannelDescription(json_text='{"description_text": "# Hi guys"}', origin_id=chan1.id_) - - for _ in range(0, 3): - generate_collection(metadata_store, tags_db, chan1) - chan1.commit_channel_torrent() - - chan2 = metadata_store.ChannelMetadata.create_channel(title="personal channel " + generate_title(words_count=2)) - for _ in range(0, 3): - generate_collection(metadata_store, tags_db, chan2) - - # add 'Tribler' entry to facilitate keyword search tests - generate_channel(metadata_store, tags_db, title="Tribler tribler chan", subscribed=True) diff --git a/src/tribler/core/components/popularity/community/popularity_community.py b/src/tribler/core/components/popularity/community/popularity_community.py index 4e7e31c90c4..0a6453faf77 100644 --- a/src/tribler/core/components/popularity/community/popularity_community.py +++ b/src/tribler/core/components/popularity/community/popularity_community.py @@ -1,16 +1,22 @@ from __future__ import annotations import random +import time +import uuid from binascii import unhexlify from typing import List, TYPE_CHECKING from ipv8.lazy_community import lazy_wrapper from pony.orm import db_session +from tribler.core import notifications +from tribler.core.components.ipv8.discovery_booster import DiscoveryBooster +from tribler.core.components.metadata_store.db.store import ObjState from tribler.core.components.metadata_store.remote_query_community.remote_query_community import RemoteQueryCommunity from tribler.core.components.popularity.community.payload import PopularTorrentsRequest, TorrentsHealthPayload from tribler.core.components.popularity.community.version_community_mixin import VersionCommunityMixin from tribler.core.components.torrent_checker.torrent_checker.dataclasses import HealthInfo +from tribler.core.utilities.notifier import Notifier from tribler.core.utilities.pony_utils import run_threaded from tribler.core.utilities.unicode import hexlify from tribler.core.utilities.utilities import get_normally_distributed_positive_integers @@ -18,6 +24,8 @@ if TYPE_CHECKING: from tribler.core.components.torrent_checker.torrent_checker.torrent_checker import TorrentChecker +max_address_cache_lifetime = 5.0 # seconds + class PopularityCommunity(RemoteQueryCommunity, VersionCommunityMixin): """ @@ -37,10 +45,11 @@ class PopularityCommunity(RemoteQueryCommunity, VersionCommunityMixin): community_id = unhexlify('9aca62f878969c437da9844cba29a134917e1648') - def __init__(self, *args, torrent_checker=None, **kwargs): + def __init__(self, *args, torrent_checker=None, notifier=None, **kwargs): # Creating a separate instance of Network for this community to find more peers super().__init__(*args, **kwargs) self.torrent_checker: TorrentChecker = torrent_checker + self.notifier: Notifier = notifier self.add_message_handler(TorrentsHealthPayload, self.on_torrents_health) self.add_message_handler(PopularTorrentsRequest, self.on_popular_torrents_request) @@ -52,6 +61,12 @@ def __init__(self, *args, torrent_checker=None, **kwargs): # Init version community message handlers self.init_version_community() + self.address_cache = {} + self.address_cache_created_at = time.time() + + self.discovery_booster = DiscoveryBooster() + self.discovery_booster.apply(self) + def introduction_request_callback(self, peer, dist, payload): super().introduction_request_callback(peer, dist, payload) # Send request to peer to send popular torrents @@ -141,3 +156,29 @@ def get_random_torrents(self) -> List[HealthInfo]: random_torrents = random.sample(checked_and_alive, num_torrents_to_send) return random_torrents + + def get_random_peers(self, sample_size=None): + # Randomly sample sample_size peers from the complete list of our peers + all_peers = self.get_peers() + return random.sample(all_peers, min(sample_size or len(all_peers), len(all_peers))) + + def send_search_request(self, **kwargs): + # Send a remote query request to multiple random peers to search for some terms + request_uuid = uuid.uuid4() + + def notify_gui(request, processing_results): + results = [ + r.md_obj.to_simple_dict() + for r in processing_results + if r.obj_state == ObjState.NEW_OBJECT + ] + if self.notifier: + self.notifier[notifications.remote_query_results]( + {"results": results, "uuid": str(request_uuid), "peer": hexlify(request.peer.mid)}) + + peers_to_query = self.get_random_peers(self.rqc_settings.max_query_peers) + + for p in peers_to_query: + self.send_remote_select(p, **kwargs, processing_callback=notify_gui) + + return request_uuid, peers_to_query diff --git a/src/tribler/core/components/gigachannel/community/sync_strategy.py b/src/tribler/core/components/popularity/community/sync_strategy.py similarity index 100% rename from src/tribler/core/components/gigachannel/community/sync_strategy.py rename to src/tribler/core/components/popularity/community/sync_strategy.py diff --git a/src/tribler/core/components/popularity/community/tests/test_popularity_community.py b/src/tribler/core/components/popularity/community/tests/test_popularity_community.py index c7be0b7b9a1..f449782eccc 100644 --- a/src/tribler/core/components/popularity/community/tests/test_popularity_community.py +++ b/src/tribler/core/components/popularity/community/tests/test_popularity_community.py @@ -1,211 +1 @@ -import time -from random import randint -from typing import List -from unittest.mock import Mock - -from ipv8.keyvault.crypto import default_eccrypto -from pony.orm import db_session - -from tribler.core.components.ipv8.adapters_tests import TriblerMockIPv8, TriblerTestBase -from tribler.core.components.metadata_store.db.store import MetadataStore -from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings -from tribler.core.components.popularity.community.popularity_community import PopularityCommunity -from tribler.core.components.torrent_checker.torrent_checker.torrentchecker_session import HealthInfo -from tribler.core.tests.tools.base_test import MockObject -from tribler.core.utilities.path_util import Path -from tribler.core.utilities.utilities import random_infohash - - -def _generate_single_checked_torrent(status: str = None) -> HealthInfo: - """ - Assumptions - DEAD -> peers: 0 - POPULAR -> Peers: [101, 1000] - DEFAULT -> peers: [1, 100] # alive - """ - - def get_peers_for(health_status): - if health_status == 'DEAD': - return 0 - if health_status == 'POPULAR': - return randint(101, 1000) - return randint(1, 100) - - return HealthInfo(random_infohash(), seeders=get_peers_for(status), leechers=get_peers_for(status)) - - -def _generate_checked_torrents(count: int, status: str = None) -> List[HealthInfo]: - return [_generate_single_checked_torrent(status) for _ in range(count)] - - -class TestPopularityCommunity(TriblerTestBase): - NUM_NODES = 2 - - def setUp(self): - super().setUp() - self.count = 0 - self.metadata_store_set = set() - self.initialize(PopularityCommunity, self.NUM_NODES) - - async def tearDown(self): - for metadata_store in self.metadata_store_set: - metadata_store.shutdown() - await super().tearDown() - - def create_node(self, *args, **kwargs): - mds = MetadataStore(Path(self.temporary_directory()) / f"{self.count}", - Path(self.temporary_directory()), - default_eccrypto.generate_key("curve25519")) - self.metadata_store_set.add(mds) - torrent_checker = MockObject() - torrent_checker.torrents_checked = {} - - self.count += 1 - - rqc_settings = RemoteQueryCommunitySettings() - return TriblerMockIPv8("curve25519", PopularityCommunity, metadata_store=mds, - torrent_checker=torrent_checker, - rqc_settings=rqc_settings - ) - - @db_session - def fill_database(self, metadata_store, last_check_now=False): - for torrent_ind in range(5): - last_check = int(time.time()) if last_check_now else 0 - metadata_store.TorrentState( - infohash=str(torrent_ind).encode() * 20, seeders=torrent_ind + 1, last_check=last_check) - - async def init_first_node_and_gossip(self, checked_torrent_info: HealthInfo, deliver_timeout: float = 0.1): - self.nodes[0].overlay.torrent_checker.torrents_checked[checked_torrent_info.infohash] = checked_torrent_info - await self.introduce_nodes() - - self.nodes[0].overlay.gossip_random_torrents_health() - - await self.deliver_messages(timeout=deliver_timeout) - - async def test_torrents_health_gossip(self): - """ - Test whether torrent health information is correctly gossiped around - """ - checked_torrent_info = HealthInfo(b'a' * 20, seeders=200, leechers=0) - node0_db = self.nodes[0].overlay.mds.TorrentState - node1_db2 = self.nodes[1].overlay.mds.TorrentState - - with db_session: - assert node0_db.select().count() == 0 - assert node1_db2.select().count() == 0 - - await self.init_first_node_and_gossip(checked_torrent_info) - - # Check whether node 1 has new torrent health information - with db_session: - torrent = node1_db2.select().first() - assert torrent.infohash == checked_torrent_info.infohash - assert torrent.seeders == checked_torrent_info.seeders - assert torrent.leechers == checked_torrent_info.leechers - assert torrent.last_check == checked_torrent_info.last_check - - def test_get_alive_torrents(self): - dead_torrents = _generate_checked_torrents(100, 'DEAD') - popular_torrents = _generate_checked_torrents(100, 'POPULAR') - alive_torrents = _generate_checked_torrents(100) - - all_checked_torrents = dead_torrents + alive_torrents + popular_torrents - self.nodes[0].overlay.torrent_checker.torrents_checked.update( - {health.infohash: health for health in all_checked_torrents}) - - actual_alive_torrents = self.nodes[0].overlay.get_alive_checked_torrents() - assert len(actual_alive_torrents) == len(alive_torrents + popular_torrents) - - async def test_torrents_health_gossip_multiple(self): - """ - Test whether torrent health information is correctly gossiped around - """ - dead_torrents = _generate_checked_torrents(100, 'DEAD') - popular_torrents = _generate_checked_torrents(100, 'POPULAR') - alive_torrents = _generate_checked_torrents(100) - - all_checked_torrents = dead_torrents + alive_torrents + popular_torrents - - node0_db = self.nodes[0].overlay.mds.TorrentState - node1_db = self.nodes[1].overlay.mds.TorrentState - - # Given, initially there are no torrents in the database - with db_session: - node0_count = node0_db.select().count() - node1_count = node1_db.select().count() - assert node0_count == 0 - assert node1_count == 0 - - # Setup, node 0 checks some torrents, both dead and alive (including popular ones). - self.nodes[0].overlay.torrent_checker.torrents_checked.update( - {health.infohash: health for health in all_checked_torrents}) - - # Nodes are introduced - await self.introduce_nodes() - - # Since on introduction request callback, node asks for popular torrents, we expect that - # popular torrents are shared by node 0 to node 1. - with db_session: - node0_count = node0_db.select().count() - node1_count = node1_db.select().count() - - assert node0_count == 0 # Nothing received from Node 1 because it hasn't checked anything to share. - assert node1_count == PopularityCommunity.GOSSIP_POPULAR_TORRENT_COUNT - - node1_db_last_count = node1_count - - # Now, assuming Node 0 gossips random torrents to Node 1 multiple times to simulate periodic nature - for _ in range(10): - self.nodes[0].overlay.gossip_random_torrents_health() - await self.deliver_messages(timeout=0.1) - - # After gossip, Node 1 should have received some random torrents from Node 0. - # Note that random torrents can also include popular torrents sent during introduction - # and random torrents sent in earlier gossip since no state is maintained. - with db_session: - node0_count = node0_db.select().count() - node1_count = node1_db.select().count() - - assert node0_count == 0 # Still nothing received from Node 1 because it hasn't checked torrents - assert node1_count >= node1_db_last_count - - node1_db_last_count = node1_count - - async def test_torrents_health_update(self): - """ - Test updating the local torrent health information from network - """ - self.fill_database(self.nodes[1].overlay.mds) - - checked_torrent_info = HealthInfo(b'0' * 20, seeders=200, leechers=0) - await self.init_first_node_and_gossip(checked_torrent_info, deliver_timeout=0.5) - - # Check whether node 1 has new torrent health information - with db_session: - state = self.nodes[1].overlay.mds.TorrentState.get(infohash=b'0' * 20) - self.assertIsNot(state.last_check, 0) - - async def test_unknown_torrent_query_back(self): - """ - Test querying sender for metadata upon receiving an unknown torrent - """ - - infohash = b'1' * 20 - with db_session: - self.nodes[0].overlay.mds.TorrentMetadata(infohash=infohash) - await self.init_first_node_and_gossip( - HealthInfo(infohash, seeders=200, leechers=0)) - with db_session: - assert self.nodes[1].overlay.mds.TorrentMetadata.get() - - async def test_skip_torrent_query_back_for_known_torrent(self): - # Test that we _don't_ send the query if we already know about the infohash - infohash = b'1' * 20 - with db_session: - self.nodes[0].overlay.mds.TorrentMetadata(infohash=infohash) - self.nodes[1].overlay.mds.TorrentMetadata(infohash=infohash) - self.nodes[1].overlay.send_remote_select = Mock() - await self.init_first_node_and_gossip( - HealthInfo(infohash, seeders=200, leechers=0)) - self.nodes[1].overlay.send_remote_select.assert_not_called() +import time from random import randint from typing import List from unittest.mock import Mock from ipv8.keyvault.crypto import default_eccrypto from pony.orm import db_session from tribler.core import notifications from tribler.core.components.ipv8.adapters_tests import TriblerMockIPv8, TriblerTestBase from tribler.core.components.metadata_store.db.store import MetadataStore from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings from tribler.core.components.popularity.community.popularity_community import PopularityCommunity from tribler.core.components.torrent_checker.torrent_checker.torrentchecker_session import HealthInfo from tribler.core.tests.tools.base_test import MockObject from tribler.core.utilities.path_util import Path from tribler.core.utilities.utilities import random_infohash def _generate_single_checked_torrent(status: str = None) -> HealthInfo: """ Assumptions DEAD -> peers: 0 POPULAR -> Peers: [101, 1000] DEFAULT -> peers: [1, 100] # alive """ def get_peers_for(health_status): if health_status == 'DEAD': return 0 if health_status == 'POPULAR': return randint(101, 1000) return randint(1, 100) return HealthInfo(random_infohash(), seeders=get_peers_for(status), leechers=get_peers_for(status)) def _generate_checked_torrents(count: int, status: str = None) -> List[HealthInfo]: return [_generate_single_checked_torrent(status) for _ in range(count)] class TestPopularityCommunity(TriblerTestBase): NUM_NODES = 2 def setUp(self): super().setUp() self.count = 0 self.metadata_store_set = set() self.initialize(PopularityCommunity, self.NUM_NODES) async def tearDown(self): for metadata_store in self.metadata_store_set: metadata_store.shutdown() await super().tearDown() def create_node(self, *args, **kwargs): mds = MetadataStore(Path(self.temporary_directory()) / f"{self.count}", Path(self.temporary_directory()), default_eccrypto.generate_key("curve25519")) self.metadata_store_set.add(mds) torrent_checker = MockObject() torrent_checker.torrents_checked = {} self.count += 1 rqc_settings = RemoteQueryCommunitySettings() return TriblerMockIPv8("curve25519", PopularityCommunity, metadata_store=mds, torrent_checker=torrent_checker, rqc_settings=rqc_settings ) @db_session def fill_database(self, metadata_store, last_check_now=False): for torrent_ind in range(5): last_check = int(time.time()) if last_check_now else 0 metadata_store.TorrentState( infohash=str(torrent_ind).encode() * 20, seeders=torrent_ind + 1, last_check=last_check) async def init_first_node_and_gossip(self, checked_torrent_info: HealthInfo, deliver_timeout: float = 0.1): self.nodes[0].overlay.torrent_checker.torrents_checked[checked_torrent_info.infohash] = checked_torrent_info await self.introduce_nodes() self.nodes[0].overlay.gossip_random_torrents_health() await self.deliver_messages(timeout=deliver_timeout) def torrent_metadata(self, i): return self.overlay(i).mds.TorrentMetadata async def test_torrents_health_gossip(self): """ Test whether torrent health information is correctly gossiped around """ checked_torrent_info = HealthInfo(b'a' * 20, seeders=200, leechers=0) node0_db = self.nodes[0].overlay.mds.TorrentState node1_db2 = self.nodes[1].overlay.mds.TorrentState with db_session: assert node0_db.select().count() == 0 assert node1_db2.select().count() == 0 await self.init_first_node_and_gossip(checked_torrent_info) # Check whether node 1 has new torrent health information with db_session: torrent = node1_db2.select().first() assert torrent.infohash == checked_torrent_info.infohash assert torrent.seeders == checked_torrent_info.seeders assert torrent.leechers == checked_torrent_info.leechers assert torrent.last_check == checked_torrent_info.last_check def test_get_alive_torrents(self): dead_torrents = _generate_checked_torrents(100, 'DEAD') popular_torrents = _generate_checked_torrents(100, 'POPULAR') alive_torrents = _generate_checked_torrents(100) all_checked_torrents = dead_torrents + alive_torrents + popular_torrents self.nodes[0].overlay.torrent_checker.torrents_checked.update( {health.infohash: health for health in all_checked_torrents}) actual_alive_torrents = self.nodes[0].overlay.get_alive_checked_torrents() assert len(actual_alive_torrents) == len(alive_torrents + popular_torrents) async def test_torrents_health_gossip_multiple(self): """ Test whether torrent health information is correctly gossiped around """ dead_torrents = _generate_checked_torrents(100, 'DEAD') popular_torrents = _generate_checked_torrents(100, 'POPULAR') alive_torrents = _generate_checked_torrents(100) all_checked_torrents = dead_torrents + alive_torrents + popular_torrents node0_db = self.nodes[0].overlay.mds.TorrentState node1_db = self.nodes[1].overlay.mds.TorrentState # Given, initially there are no torrents in the database with db_session: node0_count = node0_db.select().count() node1_count = node1_db.select().count() assert node0_count == 0 assert node1_count == 0 # Setup, node 0 checks some torrents, both dead and alive (including popular ones). self.nodes[0].overlay.torrent_checker.torrents_checked.update( {health.infohash: health for health in all_checked_torrents}) # Nodes are introduced await self.introduce_nodes() # Since on introduction request callback, node asks for popular torrents, we expect that # popular torrents are shared by node 0 to node 1. with db_session: node0_count = node0_db.select().count() node1_count = node1_db.select().count() assert node0_count == 0 # Nothing received from Node 1 because it hasn't checked anything to share. assert node1_count == PopularityCommunity.GOSSIP_POPULAR_TORRENT_COUNT node1_db_last_count = node1_count # Now, assuming Node 0 gossips random torrents to Node 1 multiple times to simulate periodic nature for _ in range(10): self.nodes[0].overlay.gossip_random_torrents_health() await self.deliver_messages(timeout=0.1) # After gossip, Node 1 should have received some random torrents from Node 0. # Note that random torrents can also include popular torrents sent during introduction # and random torrents sent in earlier gossip since no state is maintained. with db_session: node0_count = node0_db.select().count() node1_count = node1_db.select().count() assert node0_count == 0 # Still nothing received from Node 1 because it hasn't checked torrents assert node1_count >= node1_db_last_count node1_db_last_count = node1_count async def test_torrents_health_update(self): """ Test updating the local torrent health information from network """ self.fill_database(self.nodes[1].overlay.mds) checked_torrent_info = HealthInfo(b'0' * 20, seeders=200, leechers=0) await self.init_first_node_and_gossip(checked_torrent_info, deliver_timeout=0.5) # Check whether node 1 has new torrent health information with db_session: state = self.nodes[1].overlay.mds.TorrentState.get(infohash=b'0' * 20) self.assertIsNot(state.last_check, 0) async def test_unknown_torrent_query_back(self): """ Test querying sender for metadata upon receiving an unknown torrent """ infohash = b'1' * 20 with db_session: self.nodes[0].overlay.mds.TorrentMetadata(infohash=infohash) await self.init_first_node_and_gossip( HealthInfo(infohash, seeders=200, leechers=0)) with db_session: assert self.nodes[1].overlay.mds.TorrentMetadata.get() async def test_skip_torrent_query_back_for_known_torrent(self): # Test that we _don't_ send the query if we already know about the infohash infohash = b'1' * 20 with db_session: self.nodes[0].overlay.mds.TorrentMetadata(infohash=infohash) self.nodes[1].overlay.mds.TorrentMetadata(infohash=infohash) self.nodes[1].overlay.send_remote_select = Mock() await self.init_first_node_and_gossip( HealthInfo(infohash, seeders=200, leechers=0)) self.nodes[1].overlay.send_remote_select.assert_not_called() async def test_popularity_search(self): """ Test searching several nodes for metadata entries based on title text """ with db_session: # Add test metadata to node ID2 self.torrent_metadata(1)(title="ubuntu torrent", infohash=random_infohash()) self.torrent_metadata(1)(title="debian torrent", infohash=random_infohash()) notifier = Mock() self.overlay(0).notifier = {notifications.remote_query_results: notifier} self.overlay(0).send_search_request(**{"txt_filter": "ubuntu*"}) await self.deliver_messages() notifier.assert_called() \ No newline at end of file diff --git a/src/tribler/core/components/gigachannel/community/tests/test_sync_strategy.py b/src/tribler/core/components/popularity/community/tests/test_sync_strategy.py similarity index 95% rename from src/tribler/core/components/gigachannel/community/tests/test_sync_strategy.py rename to src/tribler/core/components/popularity/community/tests/test_sync_strategy.py index af2d77a98aa..7d9e27a3e2a 100644 --- a/src/tribler/core/components/gigachannel/community/tests/test_sync_strategy.py +++ b/src/tribler/core/components/popularity/community/tests/test_sync_strategy.py @@ -2,7 +2,7 @@ from ipv8.peer import Peer from ipv8.peerdiscovery.network import Network -from tribler.core.components.gigachannel.community.sync_strategy import RemovePeers +from tribler.core.components.popularity.community.sync_strategy import RemovePeers from tribler.core.components.ipv8.adapters_tests import TriblerTestBase diff --git a/src/tribler/core/components/popularity/popularity_component.py b/src/tribler/core/components/popularity/popularity_component.py index bbc54a5ac86..0c0b00ff566 100644 --- a/src/tribler/core/components/popularity/popularity_component.py +++ b/src/tribler/core/components/popularity/popularity_component.py @@ -1,10 +1,10 @@ from ipv8.peerdiscovery.network import Network from tribler.core.components.component import Component -from tribler.core.components.gigachannel.community.sync_strategy import RemovePeers from tribler.core.components.ipv8.ipv8_component import INFINITE, Ipv8Component from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent from tribler.core.components.popularity.community.popularity_community import PopularityCommunity +from tribler.core.components.popularity.community.sync_strategy import RemovePeers from tribler.core.components.reporter.reporter_component import ReporterComponent from tribler.core.components.torrent_checker.torrent_checker_component import TorrentCheckerComponent @@ -29,7 +29,8 @@ async def run(self): settings=config.popularity_community, rqc_settings=config.remote_query_community, metadata_store=metadata_store_component.mds, - torrent_checker=torrent_checker_component.torrent_checker) + torrent_checker=torrent_checker_component.torrent_checker, + notifier=self.session.notifier) self.community = community self._ipv8_component.initialise_community_by_default(community, default_random_walk_max_peers=30) diff --git a/src/tribler/core/components/popularity/settings.py b/src/tribler/core/components/popularity/settings.py index 8ede6ada29a..e305372723c 100644 --- a/src/tribler/core/components/popularity/settings.py +++ b/src/tribler/core/components/popularity/settings.py @@ -1,6 +1,22 @@ +from pydantic import Field + from tribler.core.config.tribler_config_section import TriblerConfigSection +from tribler.core.utilities.simpledefs import STATEDIR_CHANNELS_DIR class PopularityCommunitySettings(TriblerConfigSection): enabled: bool = True cache_dir: str = 'health_cache' + + +class ChantSettings(TriblerConfigSection): + enabled: bool = True + manager_enabled: bool = True + channel_edit: bool = False + channels_dir: str = STATEDIR_CHANNELS_DIR + testnet: bool = Field(default=False, env='CHANT_TESTNET') + + queried_peers_limit: int = 1000 + # The maximum number of peers that we got from channels to peers mapping, + # that must be queried in addition to randomly queried peers + max_mapped_query_peers = 3 diff --git a/src/tribler/core/components/restapi/rest/statistics_endpoint.py b/src/tribler/core/components/restapi/rest/statistics_endpoint.py index dc60ae95f16..8e2a197407c 100644 --- a/src/tribler/core/components/restapi/rest/statistics_endpoint.py +++ b/src/tribler/core/components/restapi/rest/statistics_endpoint.py @@ -32,7 +32,6 @@ def setup_routes(self): 200: { "schema": schema(TriblerStatisticsResponse={ 'statistics': schema(TriblerStatistics={ - 'num_channels': Integer, 'database_size': Integer, 'torrent_queue_stats': [ schema(TorrentQueueStats={ @@ -53,7 +52,6 @@ async def get_tribler_stats(self, request): if self.mds: db_size = self.mds.get_db_file_size() stats_dict = {"db_size": db_size, - "num_channels": self.mds.get_num_channels(), "num_torrents": self.mds.get_num_torrents()} return RESTResponse({'tribler_statistics': stats_dict}) diff --git a/src/tribler/core/components/restapi/rest/tests/test_statistics_endpoint.py b/src/tribler/core/components/restapi/rest/tests/test_statistics_endpoint.py index fa2cad6a5ed..a61be19e762 100644 --- a/src/tribler/core/components/restapi/rest/tests/test_statistics_endpoint.py +++ b/src/tribler/core/components/restapi/rest/tests/test_statistics_endpoint.py @@ -34,8 +34,7 @@ async def test_get_tribler_statistics(rest_api): """ stats = (await do_request(rest_api, 'statistics/tribler', expected_code=200))['tribler_statistics'] assert 'db_size' in stats - assert 'num_channels' in stats - assert 'num_channels' in stats + assert 'num_torrents' in stats async def test_get_ipv8_statistics(rest_api): diff --git a/src/tribler/core/components/restapi/restapi_component.py b/src/tribler/core/components/restapi/restapi_component.py index 603f468a5ba..d0ceb5ff4d4 100644 --- a/src/tribler/core/components/restapi/restapi_component.py +++ b/src/tribler/core/components/restapi/restapi_component.py @@ -8,8 +8,6 @@ from tribler.core.components.component import Component from tribler.core.components.database.database_component import DatabaseComponent from tribler.core.components.exceptions import NoneComponent -from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent -from tribler.core.components.gigachannel_manager.gigachannel_manager_component import GigachannelManagerComponent from tribler.core.components.ipv8.ipv8_component import Ipv8Component from tribler.core.components.key.key_component import KeyComponent from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent @@ -20,10 +18,9 @@ from tribler.core.components.libtorrent.restapi.libtorrent_endpoint import LibTorrentEndpoint from tribler.core.components.libtorrent.restapi.torrentinfo_endpoint import TorrentInfoEndpoint from tribler.core.components.metadata_store.metadata_store_component import MetadataStoreComponent -from tribler.core.components.metadata_store.restapi.channels_endpoint import ChannelsEndpoint from tribler.core.components.metadata_store.restapi.metadata_endpoint import MetadataEndpoint -from tribler.core.components.metadata_store.restapi.remote_query_endpoint import RemoteQueryEndpoint from tribler.core.components.metadata_store.restapi.search_endpoint import SearchEndpoint +from tribler.core.components.popularity.popularity_component import PopularityComponent from tribler.core.components.reporter.exception_handler import CoreExceptionHandler, default_core_exception_handler from tribler.core.components.reporter.reported_error import ReportedError from tribler.core.components.reporter.reporter_component import ReporterComponent @@ -80,11 +77,10 @@ async def run(self): libtorrent_component = await self.maybe_component(LibtorrentComponent) resource_monitor_component = await self.maybe_component(ResourceMonitorComponent) bandwidth_accounting_component = await self.maybe_component(BandwidthAccountingComponent) - gigachannel_component = await self.maybe_component(GigaChannelComponent) + popularity_component = await self.maybe_component(PopularityComponent) knowledge_component = await self.maybe_component(KnowledgeComponent) tunnel_component = await self.maybe_component(TunnelsComponent) torrent_checker_component = await self.maybe_component(TorrentCheckerComponent) - gigachannel_manager_component = await self.maybe_component(GigachannelManagerComponent) db_component = await self.maybe_component(DatabaseComponent) public_key = key_component.primary_key.key.pk if not isinstance(key_component, NoneComponent) else b'' @@ -93,7 +89,6 @@ async def run(self): torrent_checker = None if config.gui_test_mode else torrent_checker_component.torrent_checker tunnel_community = None if config.gui_test_mode else tunnel_component.community - gigachannel_manager = None if config.gui_test_mode else gigachannel_manager_component.gigachannel_manager # add endpoints self.root_endpoint.add_endpoint(EventsEndpoint.path, self._events_endpoint) @@ -110,15 +105,11 @@ async def run(self): self.maybe_add(StatisticsEndpoint, ipv8=ipv8_component.ipv8, metadata_store=metadata_store_component.mds) self.maybe_add(LibTorrentEndpoint, libtorrent_component.download_manager) self.maybe_add(TorrentInfoEndpoint, libtorrent_component.download_manager) - self.maybe_add(MetadataEndpoint, torrent_checker, metadata_store_component.mds, - tribler_db=db_component.db, + self.maybe_add(MetadataEndpoint, libtorrent_component.download_manager, torrent_checker, + metadata_store_component.mds, tribler_db=db_component.db, tag_rules_processor=knowledge_component.rules_processor) - self.maybe_add(ChannelsEndpoint, libtorrent_component.download_manager, gigachannel_manager, - gigachannel_component.community, metadata_store_component.mds, - tribler_db=db_component.db, - tag_rules_processor=knowledge_component.rules_processor) - self.maybe_add(SearchEndpoint, metadata_store_component.mds, tribler_db=db_component.db) - self.maybe_add(RemoteQueryEndpoint, gigachannel_component.community, metadata_store_component.mds) + self.maybe_add(SearchEndpoint, popularity_component.community, + metadata_store_component.mds, tribler_db=db_component.db) self.maybe_add(KnowledgeEndpoint, db=db_component.db, community=knowledge_component.community) if not isinstance(ipv8_component, NoneComponent): diff --git a/src/tribler/core/components/restapi/tests/test_restapi_component.py b/src/tribler/core/components/restapi/tests/test_restapi_component.py index fe9f8fc8f8a..501a600bbc4 100644 --- a/src/tribler/core/components/restapi/tests/test_restapi_component.py +++ b/src/tribler/core/components/restapi/tests/test_restapi_component.py @@ -5,7 +5,6 @@ from tribler.core.components.bandwidth_accounting.bandwidth_accounting_component import BandwidthAccountingComponent from tribler.core.components.database.database_component import DatabaseComponent from tribler.core.components.exceptions import NoneComponent -from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent from tribler.core.components.ipv8.ipv8_component import Ipv8Component from tribler.core.components.key.key_component import KeyComponent from tribler.core.components.knowledge.knowledge_component import KnowledgeComponent @@ -22,7 +21,7 @@ # pylint: disable=protected-access, not-callable, redefined-outer-name async def test_rest_component(tribler_config): components = [KeyComponent(), RESTComponent(), Ipv8Component(), LibtorrentComponent(), ResourceMonitorComponent(), - BandwidthAccountingComponent(), GigaChannelComponent(), KnowledgeComponent(), SocksServersComponent(), + BandwidthAccountingComponent(), KnowledgeComponent(), SocksServersComponent(), MetadataStoreComponent(), DatabaseComponent()] async with Session(tribler_config, components) as session: # Test REST component starts normally diff --git a/src/tribler/core/components/tunnel/community/caches.py b/src/tribler/core/components/tunnel/community/caches.py index c7d62c58e87..49aa437cec2 100644 --- a/src/tribler/core/components/tunnel/community/caches.py +++ b/src/tribler/core/components/tunnel/community/caches.py @@ -26,7 +26,7 @@ def __init__(self, community, circuit_id): def add_response(self, payload): self.response[payload.part] = payload.response - if len(self.response) == payload.total: + if len(self.response) == payload.total and not self.response_future.done(): self.response_future.set_result(b''.join([t[1] for t in sorted(self.response.items())])) return True return False diff --git a/src/tribler/core/config/tribler_config.py b/src/tribler/core/config/tribler_config.py index c41af5cdc9f..e8478756276 100644 --- a/src/tribler/core/config/tribler_config.py +++ b/src/tribler/core/config/tribler_config.py @@ -10,7 +10,6 @@ from pydantic import BaseSettings, Extra, PrivateAttr, validate_model from tribler.core.components.bandwidth_accounting.settings import BandwidthAccountingSettings -from tribler.core.components.gigachannel.community.settings import ChantSettings from tribler.core.components.ipv8.settings import ( BootstrapSettings, DHTSettings, @@ -20,7 +19,7 @@ from tribler.core.components.key.settings import TrustchainSettings from tribler.core.components.libtorrent.settings import DownloadDefaultsSettings, LibtorrentSettings from tribler.core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings -from tribler.core.components.popularity.settings import PopularityCommunitySettings +from tribler.core.components.popularity.settings import PopularityCommunitySettings, ChantSettings from tribler.core.components.resource_monitor.settings import ResourceMonitorSettings from tribler.core.components.restapi.rest.settings import APISettings from tribler.core.components.torrent_checker.settings import TorrentCheckerSettings diff --git a/src/tribler/core/start_core.py b/src/tribler/core/start_core.py index e119d6b2224..1164c6d0e9b 100644 --- a/src/tribler/core/start_core.py +++ b/src/tribler/core/start_core.py @@ -15,8 +15,6 @@ from tribler.core.components.bandwidth_accounting.bandwidth_accounting_component import BandwidthAccountingComponent from tribler.core.components.component import Component from tribler.core.components.database.database_component import DatabaseComponent -from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent -from tribler.core.components.gigachannel_manager.gigachannel_manager_component import GigachannelManagerComponent from tribler.core.components.gui_process_watcher.gui_process_watcher import GuiProcessWatcher from tribler.core.components.gui_process_watcher.gui_process_watcher_component import GuiProcessWatcherComponent from tribler.core.components.ipv8.ipv8_component import Ipv8Component @@ -69,17 +67,11 @@ def components_gen(config: TriblerConfig): if config.libtorrent.enabled: yield LibtorrentComponent() - if config.ipv8.enabled and config.chant.enabled: - yield GigaChannelComponent() if config.ipv8.enabled: yield BandwidthAccountingComponent() if config.resource_monitor.enabled: yield ResourceMonitorComponent() - # The components below are skipped if config.gui_test_mode == True - if config.gui_test_mode: - return - if config.libtorrent.enabled: yield SocksServersComponent() @@ -87,6 +79,11 @@ def components_gen(config: TriblerConfig): yield TorrentCheckerComponent() if config.ipv8.enabled and config.torrent_checking.enabled and config.popularity_community.enabled: yield PopularityComponent() + + # The components below are skipped if config.gui_test_mode == True + if config.gui_test_mode: + return + if config.ipv8.enabled and config.tunnel_community.enabled: yield TunnelsComponent() if config.ipv8.enabled: @@ -94,8 +91,6 @@ def components_gen(config: TriblerConfig): yield WatchFolderComponent() if config.general.version_checker_enabled: yield VersionCheckComponent() - if config.chant.enabled and config.chant.manager_enabled and config.libtorrent.enabled: - yield GigachannelManagerComponent() async def core_session(config: TriblerConfig, components: List[Component]) -> int: diff --git a/src/tribler/core/tests/test_start_core.py b/src/tribler/core/tests/test_start_core.py index 7b5f20842fc..43f0e1b8330 100644 --- a/src/tribler/core/tests/test_start_core.py +++ b/src/tribler/core/tests/test_start_core.py @@ -14,3 +14,15 @@ def test_start_tribler_core_no_exceptions(mocked_core_session): # test that base logic of tribler core runs without exceptions run_tribler_core_session(1, 'key', Path('.'), False) mocked_core_session.assert_called_once() + + +@patch('tribler.core.logger.logger.load_logger_config', new=MagicMock()) +@patch('tribler.core.start_core.set_process_priority', new=MagicMock()) +@patch('tribler.core.start_core.check_and_enable_code_tracing', new=MagicMock()) +@patch('asyncio.get_event_loop', new=MagicMock()) +@patch('tribler.core.start_core.TriblerConfig.load', new=MagicMock()) +@patch('tribler.core.start_core.core_session') +def test_start_tribler_core_gui_test_mode(mocked_core_session): + # test that base logic of tribler core runs without exceptions + run_tribler_core_session(1, 'key', Path('.'), True) + mocked_core_session.assert_called_once() diff --git a/src/tribler/core/upgrade/tags_to_knowledge/previous_dbs/tags_db.py b/src/tribler/core/upgrade/tags_to_knowledge/previous_dbs/tags_db.py index a39fa25d31c..a0834e2f28a 100644 --- a/src/tribler/core/upgrade/tags_to_knowledge/previous_dbs/tags_db.py +++ b/src/tribler/core/upgrade/tags_to_knowledge/previous_dbs/tags_db.py @@ -1,7 +1,9 @@ import datetime +from contextlib import suppress from typing import Optional from pony import orm +from pony.orm import db_session, OperationalError from tribler.core.utilities.pony_utils import TrackedDatabase, get_or_create @@ -11,6 +13,10 @@ def __init__(self, filename: Optional[str] = None, *, create_tables: bool = True self.instance = TrackedDatabase() self.define_binding(self.instance) self.instance.bind(provider='sqlite', filename=filename or ':memory:', create_db=True) + if create_tables: + with db_session, suppress(OperationalError): + cursor = self.instance.execute("ALTER TABLE TorrentTagOp ADD auto_generated INTEGER") + cursor.close() generate_mapping_kwargs['create_tables'] = create_tables self.instance.generate_mapping(**generate_mapping_kwargs) diff --git a/src/tribler/core/upgrade/tests/test_upgrader.py b/src/tribler/core/upgrade/tests/test_upgrader.py index 21b6e94d70d..d04610be0c6 100644 --- a/src/tribler/core/upgrade/tests/test_upgrader.py +++ b/src/tribler/core/upgrade/tests/test_upgrader.py @@ -10,7 +10,7 @@ from pony.orm import db_session, select from tribler.core.components.bandwidth_accounting.db.database import BandwidthDatabase -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import CHANNEL_DIR_NAME_LENGTH +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import CHANNEL_DIR_NAME_LENGTH from tribler.core.components.metadata_store.db.store import CURRENT_DB_VERSION, MetadataStore from tribler.core.tests.tools.common import TESTS_DATA_DIR from tribler.core.upgrade.db8_to_db10 import calc_progress @@ -100,11 +100,7 @@ def test_upgrade_pony_db_complete(upgrader, channels_dir, state_dir, trustchain_ mds = MetadataStore(mds_path, channels_dir, trustchain_keypair) db = mds.db - existing_indexes = [ - 'idx_channelnode__metadata_type__partial', - 'idx_channelnode__metadata_subscribed__partial', - 'idx_torrentstate__last_check__partial', - ] + existing_indexes = [] removed_indexes = [ 'idx_channelnode__public_key', @@ -120,8 +116,6 @@ def test_upgrade_pony_db_complete(upgrader, channels_dir, state_dir, trustchain_ ] with db_session: - assert mds.TorrentMetadata.select().count() == 23 - assert mds.ChannelMetadata.select().count() == 2 assert mds.get_value("db_version") == str(CURRENT_DB_VERSION) for index_name in existing_indexes: assert list(db.execute(f'PRAGMA index_info("{index_name}")')) @@ -161,7 +155,6 @@ def test_upgrade_pony_8to10(upgrader, channels_dir, mds_path, trustchain_keypair mds = MetadataStore(mds_path, channels_dir, trustchain_keypair, check_tables=False, db_version=10) with db_session: assert mds.get_value("db_version") == '10' - assert mds.ChannelNode.select().count() == 23 mds.shutdown() @@ -288,11 +281,7 @@ def test_upgrade_pony12to13(upgrader, channels_dir, mds_path, trustchain_keypair mds = MetadataStore(mds_path, channels_dir, trustchain_keypair, check_tables=False, db_version=12) db = mds.db - existing_indexes = [ - 'idx_channelnode__metadata_type__partial', - 'idx_channelnode__metadata_subscribed__partial', - 'idx_torrentstate__last_check__partial', - ] + existing_indexes = ['idx_torrentstate__last_check__partial'] removed_indexes = [ 'idx_channelnode__public_key', @@ -308,8 +297,7 @@ def test_upgrade_pony12to13(upgrader, channels_dir, mds_path, trustchain_keypair ] with db_session: - assert mds.TorrentMetadata.select().count() == 23 - assert mds.ChannelMetadata.select().count() == 2 + assert mds.TorrentMetadata.select().count() == 21 assert mds.get_value("db_version") == '13' for index_name in existing_indexes: assert list(db.execute(f'PRAGMA index_info("{index_name}")')), index_name diff --git a/src/tribler/core/upgrade/upgrade.py b/src/tribler/core/upgrade/upgrade.py index a33ccf47b26..884393a877f 100644 --- a/src/tribler/core/upgrade/upgrade.py +++ b/src/tribler/core/upgrade/upgrade.py @@ -11,11 +11,9 @@ from pony.orm import db_session, delete from tribler.core.components.bandwidth_accounting.db.database import BandwidthDatabase -from tribler.core.components.metadata_store.db.orm_bindings.channel_metadata import CHANNEL_DIR_NAME_LENGTH +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import CHANNEL_DIR_NAME_LENGTH from tribler.core.components.metadata_store.db.store import ( CURRENT_DB_VERSION, MetadataStore, - sql_create_partial_index_channelnode_metadata_type, - sql_create_partial_index_channelnode_subscribed, sql_create_partial_index_torrentstate_last_check, ) from tribler.core.upgrade.config_converter import convert_config_to_tribler76 @@ -338,9 +336,6 @@ def do_upgrade_pony_db_12to13(self, mds): db.execute(sql_create_partial_index_torrentstate_last_check) mds.create_torrentstate_triggers() - db.execute(sql_create_partial_index_channelnode_metadata_type) - db.execute(sql_create_partial_index_channelnode_subscribed) - db_version.value = str(to_version) def do_upgrade_pony_db_14to15(self, mds: Optional[MetadataStore]): diff --git a/src/tribler/gui/debug_window.py b/src/tribler/gui/debug_window.py index 1356c2ff7b6..c6544dc77ce 100644 --- a/src/tribler/gui/debug_window.py +++ b/src/tribler/gui/debug_window.py @@ -115,10 +115,6 @@ def __init__(self, settings, gui_settings, tribler_version): # Libtorrent tab self.init_libtorrent_tab() - # Channels tab - connect(self.window().channels_tab_widget.currentChanged, self.channels_tab_changed) - self.window().channels_tab_widget.setCurrentIndex(0) - # Position to center frame_geometry = self.frameGeometry() screen = QDesktopWidget().screenNumber(QDesktopWidget().cursor().pos()) @@ -199,8 +195,6 @@ def tab_changed(self, index): self.load_libtorrent_data() elif index == 9: self.load_logs_tab() - elif index == 10: - self.channels_tab_changed(self.window().channels_tab_widget.currentIndex()) def ipv8_tab_changed(self, index): if index == 0: @@ -266,7 +260,6 @@ def on_tribler_statistics(self, data): self.create_and_add_widget_item("BEP33 support", has_bep33_support(), self.window().general_tree_widget) self.create_and_add_widget_item("", "", self.window().general_tree_widget) - self.create_and_add_widget_item("Number of channels", data["num_channels"], self.window().general_tree_widget) self.create_and_add_widget_item( "Database size", format_size(data["db_size"]), self.window().general_tree_widget ) @@ -943,31 +936,3 @@ def save_to_file(self, filename, data): torrent_file.write(json.dumps(data)) except OSError as exc: ConfirmationDialog.show_error(self.window(), "Error exporting file", str(exc)) - - def on_channels_peers(self, data): - widget = self.window().channels_peers_tree_widget - widget.clear() - if not data: - return - - for c in data["channels_list"]: - channel_item = QTreeWidgetItem() - channel_item.setText(0, str(c["channel_name"])) - channel_item.setText(1, str(c["channel_pk"])) - channel_item.setText(2, str(c["channel_id"])) - channel_item.setData(3, Qt.DisplayRole, len(c["peers"])) # Peers count - for p in c["peers"]: - peer_item = QTreeWidgetItem() - peer_item.setText(1, str(p[0])) # Peer mid - peer_item.setData(4, Qt.DisplayRole, p[1]) # Peer age - channel_item.addChild(peer_item) - widget.addTopLevelItem(channel_item) - - def load_channels_peers_tab(self): - request_manager.get("remote_query/channels_peers", self.on_channels_peers) - - def channels_tab_changed(self, index): - if index == 0: - self.run_with_timer(self.load_channels_peers_tab) - elif index == 1: - pass diff --git a/src/tribler/gui/defs.py b/src/tribler/gui/defs.py index c1fca85cd0c..6b37fef0a57 100644 --- a/src/tribler/gui/defs.py +++ b/src/tribler/gui/defs.py @@ -16,12 +16,7 @@ PAGE_SETTINGS = 1 PAGE_DOWNLOADS = 2 PAGE_LOADING = 3 -PAGE_DISCOVERING = 4 -PAGE_DISCOVERED = 5 -PAGE_TRUST = 6 -PAGE_TRUST_GRAPH_PAGE = 7 -PAGE_CHANNEL_CONTENTS = 8 -PAGE_POPULAR = 9 +PAGE_POPULAR = 4 PAGE_EDIT_CHANNEL_TORRENTS = 2 diff --git a/src/tribler/gui/dialogs/addtopersonalchanneldialog.py b/src/tribler/gui/dialogs/addtopersonalchanneldialog.py deleted file mode 100644 index 737a78ed277..00000000000 --- a/src/tribler/gui/dialogs/addtopersonalchanneldialog.py +++ /dev/null @@ -1,143 +0,0 @@ -import json - -from PyQt5 import QtWidgets, uic -from PyQt5.QtCore import pyqtSignal - -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE -from tribler.gui.dialogs.dialogcontainer import DialogContainer -from tribler.gui.dialogs.new_channel_dialog import NewChannelDialog -from tribler.gui.network.request_manager import request_manager -from tribler.gui.utilities import connect, get_ui_file_path - - -class ChannelQTreeWidgetItem(QtWidgets.QTreeWidgetItem): - def __init__(self, *args, **kwargs): - self.id_ = kwargs.pop("id_") if "id_" in kwargs else 0 - QtWidgets.QTreeWidgetItem.__init__(self, *args, **kwargs) - - -class AddToChannelDialog(DialogContainer): - create_torrent_notification = pyqtSignal(dict) - - def __init__(self, parent): - DialogContainer.__init__(self, parent) - uic.loadUi(get_ui_file_path('addtochanneldialog.ui'), self.dialog_widget) - connect(self.dialog_widget.btn_cancel.clicked, self.close_dialog) - connect(self.dialog_widget.btn_confirm.clicked, self.on_confirm_clicked) - connect(self.dialog_widget.btn_new_channel.clicked, self.on_create_new_channel_clicked) - connect(self.dialog_widget.btn_new_folder.clicked, self.on_create_new_folder_clicked) - - self.confirm_clicked_callback = None - - self.root_requests_list = [] - - self.channels_tree = {} - self.id2wt_mapping = {0: self.dialog_widget.channels_tree_wt} - connect(self.dialog_widget.channels_tree_wt.itemExpanded, self.on_item_expanded) - - self.dialog_widget.channels_tree_wt.setHeaderLabels(['Name']) - self.on_main_window_resize() - - def on_new_channel_response(self, response): - if not response or not response.get("results", None): - return - self.window().channels_menu_list.reload_if_necessary(response["results"]) - self.load_channel(response["results"][0]["origin_id"]) - - def on_create_new_channel_clicked(self, checked): - def create_channel_callback(channel_name=None): - request_manager.post("channels/mychannel/0/channels", self.on_new_channel_response, - data=json.dumps({"name": channel_name}) if channel_name else None) - - NewChannelDialog(self, create_channel_callback) - - def on_create_new_folder_clicked(self, checked): - selected = self.dialog_widget.channels_tree_wt.selectedItems() - if not selected: - return - - channel_id = selected[0].id_ - postfix = "channels" if not channel_id else "collections" - endpoint = f"channels/mychannel/{channel_id}/{postfix}" - - def create_channel_callback(channel_name=None): - request_manager.post(endpoint, self.on_new_channel_response, - data=json.dumps({"name": channel_name}) if channel_name else None) - - NewChannelDialog(self, create_channel_callback) - - def clear_channels_tree(self): - # ACHTUNG! All running requests must always be cancelled first to prevent race condition! - for rq in self.root_requests_list: - rq.cancel() - self.dialog_widget.channels_tree_wt.clear() - self.id2wt_mapping = {0: self.dialog_widget.channels_tree_wt} - self.load_channel(0) - - def show_dialog(self, on_confirm, confirm_button_text="CONFIRM_BUTTON"): - self.dialog_widget.btn_confirm.setText(confirm_button_text) - self.show() - self.confirm_clicked_callback = on_confirm - - def on_item_expanded(self, item): - # Load the grand-children - for channel_id in self.channels_tree.get(item.id_, None): - # "None" means that the node was previously loaded and has no children - # Empty set means it is still not known if it has children or not - # Non-empty set means it was already loaded before - subchannels_set = self.channels_tree.get(channel_id, set()) - if subchannels_set is None or subchannels_set: - continue - self.load_channel(channel_id) - - def load_channel(self, channel_id): - request = request_manager.get( - f"channels/mychannel/{channel_id}", - on_success=lambda result: self.on_channel_contents(result, channel_id), - url_params={ - "metadata_type": [CHANNEL_TORRENT, COLLECTION_NODE], - "first": 1, - "last": 1000, - "exclude_deleted": True, - } - ) - if request: - self.root_requests_list.append(request) - - def get_selected_channel_id(self): - selected = self.dialog_widget.channels_tree_wt.selectedItems() - return None if not selected else selected[0].id_ - - def on_confirm_clicked(self, checked): - channel_id = self.get_selected_channel_id() - if channel_id is None: - return - if self.confirm_clicked_callback: - self.confirm_clicked_callback(channel_id) - self.close_dialog() - - def on_channel_contents(self, response, channel_id): - if not response: - return - - # No results means this node is a leaf - self.channels_tree[channel_id] = set() if response.get("results") else None - - for subchannel in response.get("results", []): - subchannel_id = subchannel["id"] - if subchannel_id in self.id2wt_mapping: - continue - wt = ChannelQTreeWidgetItem(self.id2wt_mapping[channel_id], [subchannel["name"]], id_=subchannel_id) - self.id2wt_mapping[subchannel_id] = wt - # Add the received node to the tree - self.channels_tree[channel_id].add(subchannel_id) - # For top-level channels, we want to immediately load their children so "expand" arrows are shown - if channel_id == 0: - self.load_channel(subchannel_id) - - def close_dialog(self, checked=False): - # Instead of deleting the dialog, hide it. We do this for two reasons: - # a. we do not want to lose the channels tree structure loaded from the core. - # b. we want the tree state (open subtrees, selection) to stay the same, as the user is - # likely to put stuff into the same channel they did before. - self.hide() diff --git a/src/tribler/gui/dialogs/createtorrentdialog.py b/src/tribler/gui/dialogs/createtorrentdialog.py index 329a7b73acd..dc27711d35d 100644 --- a/src/tribler/gui/dialogs/createtorrentdialog.py +++ b/src/tribler/gui/dialogs/createtorrentdialog.py @@ -45,7 +45,6 @@ def sanitize_filename(filename: str) -> str: class CreateTorrentDialog(DialogContainer): create_torrent_notification = pyqtSignal(dict) - add_to_channel_selected = pyqtSignal(str) def __init__(self, parent): DialogContainer.__init__(self, parent) @@ -61,7 +60,6 @@ def __init__(self, parent): connect(self.dialog_widget.create_torrent_files_list.customContextMenuRequested, self.on_right_click_file_item) self.dialog_widget.create_torrent_files_list.clear() connect(self.dialog_widget.save_directory_chooser.clicked, self.on_select_save_directory) - self.dialog_widget.edit_channel_create_torrent_progress_label.setText("") self.dialog_widget.file_export_dir.setText(os.path.expanduser("~")) self.dialog_widget.adjustSize() @@ -158,8 +156,6 @@ def on_torrent_created(self, result): if 'torrent' in result: self.create_torrent_notification.emit({"msg": tr("Torrent successfully created")}) self.close_dialog() - if self.dialog_widget.add_to_channel_checkbox.isChecked(): - self.add_to_channel_selected.emit(result['torrent']) def on_select_save_directory(self, checked): chosen_dir = QFileDialog.getExistingDirectory( diff --git a/src/tribler/gui/dialogs/new_channel_dialog.py b/src/tribler/gui/dialogs/new_channel_dialog.py deleted file mode 100644 index 8df7b94a8a6..00000000000 --- a/src/tribler/gui/dialogs/new_channel_dialog.py +++ /dev/null @@ -1,32 +0,0 @@ -from tribler.gui.defs import BUTTON_TYPE_CONFIRM, BUTTON_TYPE_NORMAL -from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog -from tribler.gui.utilities import connect, tr - - -class NewChannelDialog(ConfirmationDialog): - def __init__(self, parent, create_channel_callback): - super().__init__( - parent, - tr("Create new channel"), - tr("Enter the name of the channel/folder to create:"), - [(tr("NEW"), BUTTON_TYPE_NORMAL), (tr("CANCEL"), BUTTON_TYPE_CONFIRM)], - show_input=True, - ) - - # Submitting channel model is necessary because the model will trigger - # some signals to update its on-screen data on adding a new subchannel - # Also, the type of the created entity (channel vs collection) is decided - # by the model. That is a rough hack, but works. - self.create_channel_callback = create_channel_callback - self.dialog_widget.dialog_input.setPlaceholderText(tr("Channel name")) - self.dialog_widget.dialog_input.setFocus() - connect(self.button_clicked, self.on_channel_name_dialog_done) - self.show() - - def on_channel_name_dialog_done(self, action): - if action == 0: - text = self.dialog_widget.dialog_input.text() - if text: - self.create_channel_callback(channel_name=text) - - self.close_dialog() diff --git a/src/tribler/gui/dialogs/startdownloaddialog.py b/src/tribler/gui/dialogs/startdownloaddialog.py index 339f660be64..4d3c43b75dc 100644 --- a/src/tribler/gui/dialogs/startdownloaddialog.py +++ b/src/tribler/gui/dialogs/startdownloaddialog.py @@ -102,9 +102,6 @@ def __init__(self, parent, download_uri): self.dialog_widget.safe_seed_checkbox.setChecked( self.window().tribler_settings['download_defaults']['safeseeding_enabled'] ) - self.dialog_widget.add_to_channel_checkbox.setChecked( - self.window().tribler_settings['download_defaults']['add_download_to_channel'] - ) self.dialog_widget.safe_seed_checkbox.setEnabled(self.dialog_widget.anon_download_checkbox.isChecked()) diff --git a/src/tribler/gui/event_request_manager.py b/src/tribler/gui/event_request_manager.py index 88f68108c57..24a03cc8f06 100644 --- a/src/tribler/gui/event_request_manager.py +++ b/src/tribler/gui/event_request_manager.py @@ -28,7 +28,6 @@ class EventRequestManager(QNetworkAccessManager): received_remote_query_results = pyqtSignal(object) core_connected = pyqtSignal(object) new_version_available = pyqtSignal(str) - discovered_channel = pyqtSignal(object) torrent_finished = pyqtSignal(object) low_storage_signal = pyqtSignal(object) tribler_shutdown_signal = pyqtSignal(str) @@ -56,9 +55,7 @@ def __init__(self, api_port: Optional[int], api_key, error_handler): self.notifier = notifier = Notifier() notifier.add_observer(notifications.events_start, self.on_events_start) notifier.add_observer(notifications.tribler_exception, self.on_tribler_exception) - notifier.add_observer(notifications.channel_entity_updated, self.on_channel_entity_updated) notifier.add_observer(notifications.tribler_new_version, self.on_tribler_new_version) - notifier.add_observer(notifications.channel_discovered, self.on_channel_discovered) notifier.add_observer(notifications.torrent_finished, self.on_torrent_finished) notifier.add_observer(notifications.low_space, self.on_low_space) notifier.add_observer(notifications.remote_query_results, self.on_remote_query_results) @@ -87,15 +84,9 @@ def on_events_start(self, public_key: str, version: str): def on_tribler_exception(self, error: dict): self.error_handler.core_error(ReportedError(**error)) - def on_channel_entity_updated(self, channel_update_dict: dict): - self.node_info_updated.emit(channel_update_dict) - def on_tribler_new_version(self, version: str): self.new_version_available.emit(version) - def on_channel_discovered(self, data: dict): - self.discovered_channel.emit(data) - def on_torrent_finished(self, infohash: str, name: str, hidden: bool): self.torrent_finished.emit(dict(infohash=infohash, name=name, hidden=hidden)) diff --git a/src/tribler/gui/qt_resources/addtochanneldialog.ui b/src/tribler/gui/qt_resources/addtochanneldialog.ui deleted file mode 100644 index c03b18c683c..00000000000 --- a/src/tribler/gui/qt_resources/addtochanneldialog.ui +++ /dev/null @@ -1,323 +0,0 @@ - - - Form - - - - 0 - 0 - 1030 - 500 - - - - - 0 - 0 - - - - - 0 - 0 - - - - - 16777215 - 16777215 - - - - ArrowCursor - - - Form - - - false - - - QWidget { -background-color: #333333; -border-radius: 2px; -} -QLabel { - color: #ffffff; -} -QToolButton { -border: 1px solid #B5B5B5; -border-radius: 12px; -color: white; -padding-left: 4px; -padding-right: 4px; -} -QToolButton::hover { -border: 1px solid white; -color: white; -} - - - - 0 - - - QLayout::SetMinimumSize - - - 0 - - - 0 - - - 0 - - - 0 - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - padding: 12px 12px 8px 12px; -font-size: 16px; -font-weight: bold; -color: white; - - - Add torrent(s) to personal channel - - - - - - - PointingHandCursor - - - border-radius: 4px; -padding: 4px; -margin-right:8px; -border: None; -background-color:#555; - - - CANCEL (X) - - - - - - - - - - - 0 - 0 - - - - - - - - 1 - - - - - - - - - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - - 8 - - - 8 - - - 8 - - - 8 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 20 - 20 - - - - - - - - - 0 - 24 - - - - - 16777212 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - New channel - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 24 - - - - - 16777212 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - New folder - - - - - - - Qt::Horizontal - - - - 570 - 20 - - - - - - - - - 0 - 24 - - - - - 16777212 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - CONFIRM_BUTTON - - - - - - - - - - - - - diff --git a/src/tribler/gui/qt_resources/channel_description.ui b/src/tribler/gui/qt_resources/channel_description.ui deleted file mode 100644 index 2e60ec319c1..00000000000 --- a/src/tribler/gui/qt_resources/channel_description.ui +++ /dev/null @@ -1,387 +0,0 @@ - - - channel_description_widget - - - - 0 - 0 - 823 - 463 - - - - - - - - 0 - - - QLayout::SetMinimumSize - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 36 - - - - - 16777215 - 36 - - - - PointingHandCursor - - - EDIT - - - true - - - false - - - - - - - - 0 - 0 - - - - - 0 - 36 - - - - - 16777215 - 36 - - - - PointingHandCursor - - - - - - PREVIEW - - - true - - - - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - - - - - - 0 - 0 - - - - - 200 - 200 - - - - - - - ../images/chan_thumb.png - - - false - - - Qt::AlignCenter - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 5 - 20 - - - - - - - - 0 - - - - - 0 - - - - QTextEdit{border-radius: 4px; background-color: rgba(255,255,255,0); border: 0px;} - - - # Welcome -* You can use Markdown syntax here -* https://guides.github.com/features/mastering-markdown/ - - - - - - QPlainTextEdit{border-radius: 4px; background-color: #303030} - - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - Save - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 5 - 20 - - - - - - - - Cancel - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - - - - - - - - - - - 0 - - - 0 - - - 0 - - - - - QFrame::StyledPanel - - - QFrame::Raised - - - - - - You can create a description for your channel - - - Qt::AlignCenter - - - - - - - - - - 0 - 0 - - - - Create - - - - - - - - - - - - - - - - ClickableLabel - QLabel -
tribler.gui.widgets.clickablewidgets.h
-
- - UnderlineTabButton - QToolButton -
tribler.gui.widgets.underlinetabbutton.h
-
- - TabButtonPanel - QWidget -
tribler.gui.widgets.tabbuttonpanel.h
- 1 -
-
- - -
diff --git a/src/tribler/gui/qt_resources/createtorrentdialog.ui b/src/tribler/gui/qt_resources/createtorrentdialog.ui index c32ca46ec10..721eea0287e 100644 --- a/src/tribler/gui/qt_resources/createtorrentdialog.ui +++ b/src/tribler/gui/qt_resources/createtorrentdialog.ui @@ -398,19 +398,6 @@ padding:4px; - - - - color: #bbb; - - - Add this torrent to your channel - - - true - - - diff --git a/src/tribler/gui/qt_resources/debugwindow.ui b/src/tribler/gui/qt_resources/debugwindow.ui index e693e3086f2..9c0866ac45a 100644 --- a/src/tribler/gui/qt_resources/debugwindow.ui +++ b/src/tribler/gui/qt_resources/debugwindow.ui @@ -1721,95 +1721,6 @@ - - - Channels - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - 1 - - - - Channels peers - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - false - - - true - - - 200 - - - - Channel name - - - - - Public key - - - - - Id - - - - - Peer count - - - - - Peer age (seconds) - - - - - - - - - - diff --git a/src/tribler/gui/qt_resources/mainwindow.ui b/src/tribler/gui/qt_resources/mainwindow.ui index 32028b7e0bb..1e1a96d55bd 100644 --- a/src/tribler/gui/qt_resources/mainwindow.ui +++ b/src/tribler/gui/qt_resources/mainwindow.ui @@ -357,6 +357,22 @@ QPushButton::menu-indicator{width:0px;} + + + + Qt::Vertical + + + QSizePolicy::Fixed + + + + 20 + 14 + + + + @@ -407,64 +423,11 @@ QPushButton::menu-indicator{width:0px;} 20 - 18 + 14 - - - - CHANNELS - - - - - - - - 0 - 0 - - - - - 0 - 26 - - - - PointingHandCursor - - - Qt::NoFocus - - - - - - Discovered - - - - ../images/discovered.png - ../images/discovered.png - ../images/discovered.png../images/discovered.png - - - - 16 - 16 - - - - true - - - true - - - @@ -508,96 +471,6 @@ QPushButton::menu-indicator{width:0px;} - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 14 - - - - - - - - MY CHANNELS - - - - - - - - 0 - 0 - - - - PointingHandCursor - - - QListWidget::item:hover {color:white;} -QListWidget { - margin-left: 19px; -} -QListWidget::item:disabled -{ - color: grey; -} - - - QFrame::NoFrame - - - QFrame::Sunken - - - Qt::ScrollBarAlwaysOff - - - QAbstractItemView::NoEditTriggers - - - - 16 - 16 - - - - - - - - - 0 - 26 - - - - PointingHandCursor - - - Qt::NoFocus - - - Create channel - - - - ../images/plus.svg../images/plus.svg - - - true - - - @@ -1298,13 +1171,6 @@ color: white;margin-top:10px; - - - Add torrent to My channel - - - - font-weight: bold; @@ -1315,7 +1181,7 @@ color: white;margin-top:10px; - + @@ -1422,7 +1288,7 @@ color: white;margin-top:10px; - + font-weight: bold; @@ -1433,32 +1299,21 @@ color: white;margin-top:10px - + Minimize to system tray? - + Use monochrome icon? - - - - font-weight: bold; -color: white;margin-top:10px; - - - Personal channel settings - - - - + Commit changes automatically (requires Tribler restart) @@ -1496,7 +1351,7 @@ color: white;margin-top:10px; - + @@ -1513,7 +1368,7 @@ color: white;margin-top:10px; - + Hide tags from content items @@ -3150,31 +3005,6 @@ padding-left: 2px; - - - - - 0 - 36 - - - - - 16777215 - 36 - - - - PointingHandCursor - - - CHANNELS - - - true - - - @@ -4089,1015 +3919,6 @@ font-weight:bold; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - color: white; font-size: 18px; - - - Discovering your first content... - - - Qt::AlignCenter - - - - - - - - 0 - 0 - - - - - 0 - 200 - - - - - 16777215 - 200 - - - - border: none; - - - - - - - - 0 - 0 - - - - color: white; font-size: 14px; - - - This process might take around a minute. - - - Qt::AlignCenter - - - - - - - - - QLabel { color: #eee; } - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - - 0 - 50 - - - - - 16777215 - 50 - - - - color: #eee; - background-color: transparent; - font-size: 20px; - font-weight: bold; - margin: 10px; -margin-right: 4px; -margin-top: 9px; - - - Trust statistics - - - - - - - - 20 - 20 - - - - - 20 - 20 - - - - - 14 - - - - PointingHandCursor - - - border: 1px solid white; -color: white; -border-radius: 10px; - - - - i - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 5 - 20 - - - - - - - - - 20 - 20 - - - - PointingHandCursor - - - Show trust graph - - - QToolButton{ -border: 1px solid white; -color: white; -border-radius: 10px; -} - - - - - - - - ../images/network.png../images/network.png - - - - - - - Qt::Horizontal - - - QSizePolicy::Expanding - - - - 10 - 20 - - - - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 6 - - - - - - - - padding-left: 10px; padding-right: 10px; - - - You can build trust by contributing bandwidth to the Tribler network. This is done by letting Tribler run idle. - - - true - - - - - - - - 0 - 0 - - - - - - - - 20 - - - 20 - - - 20 - - - 20 - - - 20 - - - - - QWidget { -background-color: #282828; border: 1px solid #555; -} -QLabel { -border: none; -} - - - - 0 - - - - - font-size: 20px; font-weight: bold; - - - - MBytes - - - - - - - Qt::Vertical - - - QSizePolicy::Minimum - - - - 20 - 3 - - - - - - - - font-size: 15px; - - - Given to community - - - - - - - Qt::Vertical - - - QSizePolicy::Minimum - - - - 20 - 20 - - - - - - - - font-size: 18px; - - - - - - - - - - - font-size: 15px; - - - People you helped - - - - - - - Qt::Vertical - - - QSizePolicy::Expanding - - - - 20 - 40 - - - - - - - - - - - QWidget { -background-color: #282828; border: 1px solid #555; -} -QLabel { -border: none; -} - - - - 0 - - - - - font-size: 20px; font-weight: bold; - - - - MBytes - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 3 - - - - - - - - font-size: 15px; - - - Taken from community - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 20 - - - - - - - - font-size: 18px; - - - - - - - - - - - font-size: 15px; - - - People who helped you - - - - - - - Qt::Vertical - - - - 20 - 40 - - - - - - - - - - - - - - - 0 - 0 - - - - - - - - 20 - - - 20 - - - 0 - - - 20 - - - 20 - - - - - - - - - 0 - - - 2 - - - 2 - - - 2 - - - 2 - - - - - - 0 - 250 - - - - - 0 - 250 - - - - border: none; - - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - - - - - - - - - - QLabel { color: #eee; } - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - - 0 - 50 - - - - - 16777215 - 50 - - - - color: #eee; -background-color: transparent; -font-size: 20px; -font-weight: bold; -margin: 10px 4px 2px 10px; - - - Trust Graph - - - - - - - Qt::Horizontal - - - QSizePolicy::Expanding - - - - 10 - 20 - - - - - - - - - 64 - 28 - - - - - 64 - 28 - - - - PointingHandCursor - - - margin-right:16px - - - REFRESH - - - - ../images/refresh.png../images/refresh.png - - - - 18 - 18 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 6 - - - - - - - - - true - - - - padding-left: 10px; padding-right: 10px;margin-bottom:8px; - - - The graph below is based on your historical interactions with other users in the network. - - - Qt::RichText - - - true - - - - - - - - 0 - 0 - - - - - 0 - 32 - - - - - 16777215 - 32 - - - - QProgressBar { - background-color: #FFF; - border: 0px; - padding: 0px; - height: 10px; - margin: 0 16px; - } - QProgressBar::chunk { - background: #5c58ee; - width:5px - } - - - 0 - - - 24 - - - Qt::AlignCenter - - - QProgressBar::TopToBottom - - - Progress %p% - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - - 0 - 32 - - - - - 16777215 - 32 - - - - - 10 - 50 - false - - - - margin:8px 16px 0 16px - - - - - - Qt::RichText - - - 0 - - - - - - - - - - - 1 - 1 - - - - - 250 - 250 - - - - - 1 - 1 - - - - - 0 - 250 - - - - false - - - border: None; margin:0 - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - - - - - - - 0 - 0 - - - - - 10 - - - - margin:4px 16px 4px 16px - - - Peer : - - - Qt::RichText - - - false - - - Qt::LinksAccessibleByMouse|Qt::TextSelectableByMouse - - - - - - - - 10 - - - - margin:4px 16px 16px 16px - - - Balance Given: XX Taken: YY - - - Qt::RichText - - - true - - - Qt::LinksAccessibleByMouse|Qt::TextSelectableByMouse - - - - - - @@ -5341,77 +4162,6 @@ border: 1px solid #FF924F; - - - - - 0 - 0 - - - - - 92 - 34 - - - - - 92 - 34 - - - - PointingHandCursor - - - QWidget { background-color: #4c4c4c; border-radius: 4px; } - - - - 0 - - - 0 - - - 2 - - - 0 - - - 2 - - - - - color: #ddd; font-weight: bold; font-size: 14px; border: none; - - - - - - - Qt::AlignCenter - - - - - - - color: #ddd; font-size: 12px; border: none; - - - Token balance - - - Qt::AlignCenter - - - - - - @@ -5558,33 +4308,16 @@ background: none;
tribler.gui.widgets.downloadprogressbar.h
1 - - TrustPage - QWidget -
tribler.gui.widgets.trustpage.h
- 1 -
ChannelContentsWidget QWidget
tribler.gui.widgets.channelcontentswidget.h
- - TrustGraphPage - QWidget -
tribler.gui.widgets.trustgraphpage.h
- 1 -
ClickableLineEdit QLineEdit
tribler.gui.widgets.clickable_line_edit.h
- - ChannelsMenuListWidget - QListWidget -
tribler.gui.widgets.channelsmenulistwidget.h
-
SearchResultsWidget QWidget @@ -5689,22 +4422,6 @@ background: none; - - left_menu_button_discovered - clicked() - MainWindow - clicked_menu_button_discovered() - - - 101 - 187 - - - 427 - 327 - - - left_menu_button_popular clicked() @@ -5726,13 +4443,9 @@ background: none; on_top_search_button_click() on_add_torrent_button_click() on_top_menu_button_click() - on_channel_item_click(QListWidgetItem*) - clicked_menu_button_my_channel() clicked_menu_button_downloads() clicked_menu_button_subscriptions() - clicked_edit_channel_commit_button() on_search_text_change() - on_edit_channel_clicked() clicked_menu_button_discovered() clicked_menu_button_trust() on_settings_button_click() diff --git a/src/tribler/gui/qt_resources/startdownloaddialog.ui b/src/tribler/gui/qt_resources/startdownloaddialog.ui index ca6cf04f20e..ca56477462d 100644 --- a/src/tribler/gui/qt_resources/startdownloaddialog.ui +++ b/src/tribler/gui/qt_resources/startdownloaddialog.ui @@ -417,25 +417,6 @@ background: yellow;
- - - - - 0 - 24 - - - - - 16777215 - 24 - - - - Add to my channel - - -
diff --git a/src/tribler/gui/qt_resources/torrents_list.ui b/src/tribler/gui/qt_resources/torrents_list.ui index 3946789c5db..dd6464bb2ff 100644 --- a/src/tribler/gui/qt_resources/torrents_list.ui +++ b/src/tribler/gui/qt_resources/torrents_list.ui @@ -174,7 +174,7 @@ font-size: 20px; font-weight: bold; - Some fancy channel + Qt::LinksAccessibleByMouse @@ -435,101 +435,9 @@ QToolButton{border:none;margin-left:16px;}
- - - - - 0 - 0 - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 20 - 0 - - - - - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - ||||| - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - -
- - - - - 0 - 0 - - - - @@ -627,96 +535,6 @@ QToolButton{border:none;margin-left:16px;} - - - - - 0 - 0 - - - - - 50 - 0 - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - - 0 - 30 - - - - - 16777215 - 30 - - - - font-size: 14px; color: #cc6600; -margin-right: 8px; - - - - Your channel has uncommitted changes. - - - Qt::AlignCenter - - - - - - - - 0 - 28 - - - - - 16777215 - 28 - - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - APPLY CHANGES - - - - - -
@@ -840,12 +658,6 @@ QTableView::item::hover {
tribler.gui.widgets.qtbug.h
1 - - SubscriptionsWidget - QWidget -
tribler.gui.widgets.subscriptionswidget.h
- 1 -
TriblerContentTableView QTableView @@ -856,11 +668,6 @@ QTableView::item::hover { QWidget
tribler.gui.widgets.togglebutton.h
- - ChannelDescriptionWidget - QWidget -
tribler.gui.widgets.channeldescriptionwidget.h
-
InstantTooltipButton QToolButton diff --git a/src/tribler/gui/tests/gui_test_data.py b/src/tribler/gui/tests/gui_test_data.py deleted file mode 100644 index ad8d6f20b79..00000000000 --- a/src/tribler/gui/tests/gui_test_data.py +++ /dev/null @@ -1,104 +0,0 @@ -# Test data for token balance history - -negative_token_balance_history = [ - {'timestamp': 1639989682705, 'balance': -4242445126}, - {'timestamp': 1639989682715, 'balance': -4242942932}, - {'timestamp': 1639989682725, 'balance': -4243010528}, - {'timestamp': 1639989836725, 'balance': -4243010528}, - {'timestamp': 1639989836756, 'balance': -4243010528}, - {'timestamp': 1639989837713, 'balance': -4242920753}, - {'timestamp': 1639989838040, 'balance': -4242920753}, - {'timestamp': 1639989838050, 'balance': -4242920753}, - {'timestamp': 1639989865600, 'balance': -4243183609}, - {'timestamp': 1639989865600, 'balance': -4243364423}, - {'timestamp': 1639989865615, 'balance': -4243542847}, - {'timestamp': 1639989865615, 'balance': -4243706616}, - {'timestamp': 1639989865631, 'balance': -4243826841}, - {'timestamp': 1639989865647, 'balance': -4243891015}, - {'timestamp': 1639989904528, 'balance': -4243891015}, - {'timestamp': 1639989921434, 'balance': -4243891015}, - {'timestamp': 1639989921450, 'balance': -4243891015}, - {'timestamp': 1639989921997, 'balance': -4243891015}, - {'timestamp': 1639989921997, 'balance': -4243891015}, - {'timestamp': 1639989931318, 'balance': -4244475952}, - {'timestamp': 1639989931334, 'balance': -4244704214}, - {'timestamp': 1639989931334, 'balance': -4244732128}, - {'timestamp': 1639989931350, 'balance': -4244758963}, - {'timestamp': 1639989931365, 'balance': -4244781725}, - {'timestamp': 1639989931365, 'balance': -4244806208}, - {'timestamp': 1639990014979, 'balance': -4245061842}, - {'timestamp': 1639990014995, 'balance': -4245120697}, - {'timestamp': 1639990014995, 'balance': -4245168539}, - {'timestamp': 1639990015010, 'balance': -4245216358}, - {'timestamp': 1639990015010, 'balance': -4245221001}, - {'timestamp': 1639990015026, 'balance': -4245225659}, - {'timestamp': 1639990193728, 'balance': -4245241816}, - {'timestamp': 1639990250821, 'balance': -4245243295}, - {'timestamp': 1639990448300, 'balance': -4245243295}, - {'timestamp': 1639990454609, 'balance': -4245243295}, - {'timestamp': 1639991174571, 'balance': -4245852308}, - {'timestamp': 1639991174587, 'balance': -4245986649}, - {'timestamp': 1639991174587, 'balance': -4246441060}, - {'timestamp': 1639991174602, 'balance': -4248106527}, - {'timestamp': 1639991174602, 'balance': -4249141793}, - {'timestamp': 1639991174618, 'balance': -4249402255}, - {'timestamp': 1639991279504, 'balance': -4249454759}, - {'timestamp': 1639991279519, 'balance': -4249569078}, - {'timestamp': 1639991279519, 'balance': -4249804508}, - {'timestamp': 1639991279535, 'balance': -4249919744}, - {'timestamp': 1639991279582, 'balance': -4249927169}, - {'timestamp': 1639991279598, 'balance': -4249939590}, - {'timestamp': 1639991355956, 'balance': -4250162322}, - {'timestamp': 1639991355971, 'balance': -4250284312}, - {'timestamp': 1639991355971, 'balance': -4250512181}, - {'timestamp': 1639991355987, 'balance': -4250567205}, - {'timestamp': 1639991355987, 'balance': -4250726076}, - {'timestamp': 1639991356003, 'balance': -4251008765}, - {'timestamp': 1639991356003, 'balance': -4251491838}, - {'timestamp': 1639991484789, 'balance': -4251490530}, - {'timestamp': 1639991484789, 'balance': -4251491838}, - {'timestamp': 1639991491322, 'balance': -4251493162}, - {'timestamp': 1639991491338, 'balance': -4251497771}, - {'timestamp': 1639991491338, 'balance': -4251498931}, - {'timestamp': 1639991491353, 'balance': -4251501656}, - {'timestamp': 1639991491369, 'balance': -4251502083}, - {'timestamp': 1639991491385, 'balance': -4251502510}, - {'timestamp': 1639991901542, 'balance': -4251500986}, - {'timestamp': 1639991948147, 'balance': -4253313336}, - {'timestamp': 1639991948162, 'balance': -4253442797}, - {'timestamp': 1639991948162, 'balance': -4253643279}, - {'timestamp': 1639991948162, 'balance': -4257892944}, - {'timestamp': 1639991948178, 'balance': -4260661821}, - {'timestamp': 1639991948188, 'balance': -4261099251}, - {'timestamp': 1639992013196, 'balance': -4261261311}, - {'timestamp': 1639992013206, 'balance': -4261438136}, - {'timestamp': 1639992013216, 'balance': -4261805891}, - {'timestamp': 1639992013216, 'balance': -4262175431}, - {'timestamp': 1639992013236, 'balance': -4262246474}, - {'timestamp': 1639992013236, 'balance': -4262542764}, - {'timestamp': 1639992111113, 'balance': -4262590904}, - {'timestamp': 1639992111128, 'balance': -4262657779}, - {'timestamp': 1639992111144, 'balance': -4262720033}, - {'timestamp': 1639992111144, 'balance': -4262831837}, - {'timestamp': 1639992111160, 'balance': -4262832264}, - {'timestamp': 1639992111160, 'balance': -4262832691}, - {'timestamp': 1639992534358, 'balance': -4262824219}, - {'timestamp': 1639992597448, 'balance': -4262825830}, - {'timestamp': 1639992740444, 'balance': -4262820880}, - {'timestamp': 1639992740447, 'balance': -4262822860}, - {'timestamp': 1639992740728, 'balance': -4262822860}, - {'timestamp': 1639992863178, 'balance': -4457312394}, - {'timestamp': 1639992863194, 'balance': -4526475834}, - {'timestamp': 1639992863213, 'balance': -4640392288}, - {'timestamp': 1639992863222, 'balance': -4806582573}, - {'timestamp': 1639992863232, 'balance': -4929355104}, - {'timestamp': 1639992863490, 'balance': -4929355104}, - {'timestamp': 1639992863513, 'balance': -4815438650}, - {'timestamp': 1639992863517, 'balance': -4815438650}, - {'timestamp': 1639992863604, 'balance': -4929355104}, - {'timestamp': 1639992866229, 'balance': -4929355104}, - {'timestamp': 1639992868239, 'balance': -5008495201}, - {'timestamp': 1639992868395, 'balance': -5008495201}, - {'timestamp': 1639992883269, 'balance': -5008495201}, - {'timestamp': 1639992883456, 'balance': -5008495201}, -] diff --git a/src/tribler/gui/tests/test_gui.py b/src/tribler/gui/tests/test_gui.py index 42bef5469fc..4ec71cfefd3 100644 --- a/src/tribler/gui/tests/test_gui.py +++ b/src/tribler/gui/tests/test_gui.py @@ -20,13 +20,10 @@ from tribler.core.utilities.unicode import hexlify from tribler.gui.app_manager import AppManager from tribler.gui.dialogs.feedbackdialog import FeedbackDialog -from tribler.gui.dialogs.new_channel_dialog import NewChannelDialog -from tribler.gui.tests.gui_test_data import negative_token_balance_history from tribler.gui.tribler_app import TriblerApplication from tribler.gui.tribler_window import TriblerWindow from tribler.gui.utilities import connect from tribler.gui.widgets.loading_list_item import LoadingListItem -from tribler.gui.widgets.tablecontentmodel import Column from tribler.gui.widgets.tagbutton import TagButton from tribler.gui.widgets.torrentfiletreewidget import CHECKBOX_COL, PreformattedTorrentFileTreeWidget @@ -265,107 +262,50 @@ def get_index_of_row_column(table_view, row, column): return table_view.indexAt(QPoint(x, y)) -def tst_channels_widget(window, widget, widget_name, sort_column=1, test_filter=True, test_subscribe=True): - wait_for_list_populated(widget.content_table) - screenshot(window, name=f"{widget_name}-page") - - # Sort - widget.content_table.sortByColumn(sort_column, 1) - wait_for_list_populated(widget.content_table) - screenshot(window, name=f"{widget_name}-sorted") - total = widget.content_table.model().channel_info.get("total") - if total is not None: - max_items = min(total, 50) - assert widget.content_table.verticalHeader().count() <= max_items - - # Filter - if test_filter: - old_num_items = widget.content_table.verticalHeader().count() - widget.channel_torrents_filter_input.setText("nonrandom") - widget.controller.on_filter_input_return_pressed() - wait_for_list_populated(widget.content_table) - screenshot(window, name=f"{widget_name}-filtered") - assert widget.content_table.verticalHeader().count() <= old_num_items - widget.channel_torrents_filter_input.setText("") - widget.controller.on_filter_input_return_pressed() - wait_for_list_populated(widget.content_table) - - if test_subscribe: - widget.content_table.sortByColumn(0, 0) - wait_for_list_populated(widget.content_table) - screenshot(window, name=f"{widget_name}-sorted-on-subscribe") - # Subscribe - index = get_index_of_row_column(widget.content_table, 0, widget.model.column_position[Column.VOTES]) - widget.content_table.on_subscribe_control_clicked(index) - QTest.qWait(200) - - # Unsubscribe - widget.content_table.on_subscribe_control_clicked(index) - QTest.qWait(200) - screenshot(window, name=f"{widget_name}-unsubscribed") - window.dialog.button_clicked.emit(0) - - # Test channel view - index = get_index_of_row_column(widget.content_table, 0, widget.model.column_position[Column.NAME]) - widget.content_table.on_table_item_clicked(index) - wait_for_list_populated(widget.content_table) - screenshot(window, name=f"{widget_name}-channel_loaded") - - # Click the first torrent - index = get_index_of_row_column(widget.content_table, 0, widget.model.column_position[Column.NAME]) - widget.content_table.on_table_item_clicked(index) - QTest.qWait(WAIT_INTERVAL_MSEC) - screenshot(window, name=f"{widget_name}-torrent_details") - - -@pytest.mark.guitest -def test_discovered_page(window): - QTest.mouseClick(window.left_menu_button_discovered, Qt.LeftButton) - tst_channels_widget(window, window.discovered_page, "discovered_page", sort_column=2) +def fake_core_response_popular(window): + widget = window.popular_page + wait_for_list_populated(widget.content_table, num_items=0) + widget.model.on_query_results({ + 'results': [ + { + 'name': 'Some Torrent', + 'category': 'other', + 'infohash': 'af' * 20, + 'size': 1234, + 'num_seeders': 1, + 'num_leechers': 1000000, + 'last_tracker_check': 1500000000, + 'created': 1000000000, + 'tag_processor_version': 5, + 'type': 300, + 'id': 1, + 'origin_id': 0, + 'public_key': '', + 'status': 2, + 'statements': [{ + 'subject_type': 102, + 'object': '2023', + 'predicate': 101, + 'subject': 'ec34c231dde3e92d8d26a17c223152c8541295aa' + }, { + 'subject_type': 102, + 'object': 'Ubuntu', + 'predicate': 101, + 'subject': '382b14edddae478f2148d1ec7c6cc6311d261caf' + }] + }] + }) @pytest.mark.guitest def test_popular_page(window): QTest.mouseClick(window.left_menu_button_popular, Qt.LeftButton) widget = window.popular_page + fake_core_response_popular(window) wait_for_list_populated(widget.content_table) screenshot(window, name="popular_page") -def wait_for_thumbnail(chan_widget): - """ Wait for the thumbnail to be populated. - - Args: - chan_widget: The channel widget to check - """ - # Wait for the thumbnail to be populated in intervals of `DEFAULT_WAIT_INTERVAL_MSEC` - for _ in range(0, 1000 * DEFAULT_TIMEOUT_SEC, WAIT_INTERVAL_MSEC): - QTest.qWait(WAIT_INTERVAL_MSEC) - if chan_widget.channel_description_container.channel_thumbnail_bytes is not None: - return - - # thumbnail was not populated in time, fail the test - raise TimeoutException(f"The thumbnail was not shown within {DEFAULT_TIMEOUT_SEC} seconds") - - -@pytest.mark.guitest -def test_edit_channel_torrents(window): - wait_for_list_populated(window.channels_menu_list) - - idx = window.channels_menu_list.model().index(0, 0) - item_pos = window.channels_menu_list.visualRect(idx).center() - QTest.mouseClick(window.channels_menu_list.viewport(), Qt.LeftButton, pos=item_pos) - wait_for_list_populated(window.channel_contents_page.content_table) - screenshot(window, name="edit_channel_committed") - - idx = window.channels_menu_list.model().index(1, 0) - item_pos = window.channels_menu_list.visualRect(idx).center() - QTest.mouseClick(window.channels_menu_list.viewport(), Qt.LeftButton, pos=item_pos) - wait_for_list_populated(window.channel_contents_page.content_table) - wait_for_thumbnail(window.channel_contents_page) - screenshot(window, name="edit_channel_thumbnail_description") - - @pytest.mark.guitest def test_settings(window): QTest.mouseClick(window.settings_button, Qt.LeftButton) @@ -399,8 +339,6 @@ def test_downloads(window): screenshot(window, name="downloads_active") QTest.mouseClick(window.downloads_inactive_button, Qt.LeftButton) screenshot(window, name="downloads_inactive") - QTest.mouseClick(window.downloads_channels_button, Qt.LeftButton) - screenshot(window, name="downloads_channels") @pytest.mark.guitest @@ -451,14 +389,6 @@ def test_search(window): QTest.keyClick(window.top_search_bar, Qt.Key_Enter) QTest.qWait(WAIT_INTERVAL_MSEC) screenshot(window, name="search_loading_page") - tst_channels_widget( - window, - window.search_results_page.results_page_content, - "search_results", - sort_column=2, - test_filter=False, - test_subscribe=False, - ) @pytest.mark.guitest @@ -631,39 +561,13 @@ def test_debug_pane(window): window.debug_window.close() -@pytest.mark.guitest -@pytest.mark.skip(reason="This element not in UI anymore") -def test_trust_page(window): - QTest.mouseClick(window.token_balance_widget, Qt.LeftButton) - wait_for_variable(window, "trust_page.history") - screenshot(window, name="trust_page_values") - - -@pytest.mark.guitest -@pytest.mark.skip(reason="This element not in UI anymore") -def test_big_negative_token_balance(window): - QTest.mouseClick(window.token_balance_widget, Qt.LeftButton) - wait_for_variable(window, "trust_page.history") - window.trust_page.history = negative_token_balance_history - window.trust_page.plot_absolute_values() - screenshot(window, name="big_negative_token_balance") - - -@pytest.mark.guitest -def test_close_dialog_with_esc_button(window): - QTest.mouseClick(window.left_menu_button_new_channel, Qt.LeftButton) - screenshot(window, name="create_new_channel_dialog") - assert window.findChildren(NewChannelDialog) - QTest.keyPress(window, Qt.Key_Escape) - assert not window.findChildren(NewChannelDialog) - - @pytest.mark.guitest def test_tags_dialog(window): """ Test the behaviour of the dialog where a user can edit tags. """ QTest.mouseClick(window.left_menu_button_popular, Qt.LeftButton) + fake_core_response_popular(window) widget = window.popular_page wait_for_list_populated(widget.content_table) @@ -757,6 +661,7 @@ def test_tags_dialog(window): assert not tags_input.hasFocus() # Click on a suggestion + widget.content_table.add_tags_dialog.on_received_tag_suggestions({"suggestions": ["Tribler"]}) tag_suggestion_buttons = widget.content_table.add_tags_dialog.dialog_widget.suggestions.findChildren(TagButton) assert tag_suggestion_buttons QTest.mouseClick(tag_suggestion_buttons[0], Qt.LeftButton) @@ -783,6 +688,7 @@ def test_no_tags(window): Test removing all tags from a content item. """ QTest.mouseClick(window.left_menu_button_popular, Qt.LeftButton) + fake_core_response_popular(window) widget = window.popular_page wait_for_list_populated(widget.content_table) diff --git a/src/tribler/gui/tribler_window.py b/src/tribler/gui/tribler_window.py index a7608e8a154..0d67e65d7d3 100644 --- a/src/tribler/gui/tribler_window.py +++ b/src/tribler/gui/tribler_window.py @@ -3,7 +3,6 @@ import signal import sys import time -from base64 import b64encode from pathlib import Path from typing import Optional @@ -47,9 +46,6 @@ from tribler.core.utilities.network_utils import default_network_utils from tribler.core.utilities.process_manager import ProcessManager from tribler.core.utilities.rest_utils import ( - FILE_SCHEME, - MAGNET_SCHEME, - scheme_from_url, url_is_valid_file, url_to_path, ) @@ -65,22 +61,15 @@ BUTTON_TYPE_NORMAL, CATEGORY_SELECTOR_FOR_POPULAR_ITEMS, DARWIN, - PAGE_CHANNEL_CONTENTS, - PAGE_DISCOVERED, - PAGE_DISCOVERING, PAGE_DOWNLOADS, PAGE_LOADING, PAGE_POPULAR, PAGE_SEARCH_RESULTS, PAGE_SETTINGS, - PAGE_TRUST, - PAGE_TRUST_GRAPH_PAGE, SHUTDOWN_WAITING_PERIOD, ) -from tribler.gui.dialogs.addtopersonalchanneldialog import AddToChannelDialog from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog from tribler.gui.dialogs.createtorrentdialog import CreateTorrentDialog -from tribler.gui.dialogs.new_channel_dialog import NewChannelDialog from tribler.gui.dialogs.startdownloaddialog import StartDownloadDialog from tribler.gui.error_handler import ErrorHandler from tribler.gui.event_request_manager import EventRequestManager @@ -105,10 +94,8 @@ show_message_box, tr, ) -from tribler.gui.widgets.channelsmenulistwidget import ChannelsMenuListWidget from tribler.gui.widgets.instanttooltipstyle import InstantTooltipStyle from tribler.gui.widgets.tablecontentmodel import ( - DiscoveredChannelsModel, PopularTorrentsModel, ) from tribler.gui.widgets.triblertablecontrollers import ( @@ -215,7 +202,6 @@ def __init__( self.start_download_dialog_active = False self.selected_torrent_files = [] self.start_time = time.time() - self.token_refresh_timer = None self.shutdown_timer = None self.add_torrent_url_dialog_active = False @@ -236,8 +222,6 @@ def __init__( RequestManager.window = self self.tribler_status_bar.hide() - self.token_balance_widget.mouseReleaseEvent = self.on_token_balance_click - self.magnet_handler = MagnetHandler(self.window) QDesktopServices.setUrlHandler("magnet", self.magnet_handler, "on_open_magnet_link") @@ -265,7 +249,6 @@ def __init__( self.menu_buttons = [ self.left_menu_button_downloads, - self.left_menu_button_discovered, self.left_menu_button_popular, ] @@ -276,9 +259,6 @@ def __init__( self.settings_page.initialize_settings_page(version_history=self.version_history) self.downloads_page.initialize_downloads_page() self.loading_page.initialize_loading_page() - self.discovering_page.initialize_discovering_page() - - self.discovered_page.initialize_content_page(hide_xxx=self.hide_xxx) self.popular_page.initialize_content_page( hide_xxx=self.hide_xxx, @@ -286,9 +266,6 @@ def __init__( categories=CATEGORY_SELECTOR_FOR_POPULAR_ITEMS, ) - self.trust_page.initialize_trust_page() - self.trust_graph_page.initialize_trust_graph() - self.stackedWidget.setCurrentIndex(PAGE_LOADING) # Create the system tray icon @@ -315,7 +292,6 @@ def __init__( self.debug_panel_button.setHidden(True) self.top_menu_button.setHidden(True) self.left_menu.setHidden(True) - self.token_balance_widget.setHidden(True) self.settings_button.setHidden(True) self.add_torrent_button.setHidden(True) self.top_search_bar.setHidden(True) @@ -366,24 +342,10 @@ def sigint_handler(*_): self.show() - self.add_to_channel_dialog = AddToChannelDialog(self.window()) - self.add_torrent_menu = self.create_add_torrent_menu() self.add_torrent_button.setMenu(self.add_torrent_menu) - self.channels_menu_list = self.findChild(ChannelsMenuListWidget, "channels_menu_list") - - connect(self.channels_menu_list.itemClicked, self.open_channel_contents_page) - - # The channels content page is only used to show subscribed channels, so we always show xxx - # contents in it. - connect( - self.core_manager.events_manager.node_info_updated, - lambda data: self.channels_menu_list.reload_if_necessary([data]), - ) - connect(self.left_menu_button_new_channel.clicked, self.create_new_channel) connect(self.debug_panel_button.clicked, self.clicked_debug_panel_button) - connect(self.trust_graph_button.clicked, self.clicked_trust_graph_page_button) # Apply a custom style to our checkboxes, with custom images. stylesheet = self.styleSheet() @@ -435,27 +397,6 @@ def restore_position(): restore_size() restore_position() - def create_new_channel(self, checked): - # TODO: DRY this with tablecontentmodel, possibly using QActions - - def update_channels_state(_): - self.channels_menu_list.load_channels() - self.add_to_channel_dialog.clear_channels_tree() - - def create_channel_callback(channel_name): - request_manager.post("channels/mychannel/0/channels", update_channels_state, - data={"name": channel_name} if channel_name else None) - - NewChannelDialog(self, create_channel_callback) - - def open_channel_contents_page(self, channel_list_item): - if not channel_list_item.flags() & Qt.ItemIsEnabled: - return - - self.channel_contents_page.initialize_root_model_from_channel_info(channel_list_item.channel_info) - self.stackedWidget.setCurrentIndex(PAGE_CHANNEL_CONTENTS) - self.deselect_all_menu_buttons() - def update_tray_icon(self, use_monochrome_icon): if not QSystemTrayIcon.isSystemTrayAvailable() or not self.tray_icon: return @@ -510,7 +451,6 @@ def on_torrent_finished(self, torrent_info): def show_loading_screen(self): self.top_menu_button.setHidden(True) self.left_menu.setHidden(True) - self.token_balance_widget.setHidden(True) self.debug_panel_button.setHidden(True) self.settings_button.setHidden(True) self.add_torrent_button.setHidden(True) @@ -564,7 +504,6 @@ def start_ui(self): self.top_menu_button.setHidden(False) self.left_menu.setHidden(False) - # self.token_balance_widget.setHidden(False) # restore it after the token balance calculation is fixed self.settings_button.setHidden(False) self.add_torrent_button.setHidden(False) self.top_search_bar.setHidden(False) @@ -574,35 +513,12 @@ def start_ui(self): self.setAcceptDrops(True) self.setWindowTitle(f"Tribler {self.tribler_version}") - autocommit_enabled = ( - get_gui_setting(self.gui_settings, "autocommit_enabled", True, is_bool=True) if self.gui_settings else True - ) - self.channel_contents_page.initialize_content_page(autocommit_enabled=autocommit_enabled, hide_xxx=False) - - self.discovered_page.initialize_root_model( - DiscoveredChannelsModel( - channel_info={"name": tr("Discovered channels")}, endpoint_url="channels", hide_xxx=self.hide_xxx - ) - ) - connect(self.core_manager.events_manager.discovered_channel, self.discovered_page.model.on_new_entry_received) - self.popular_page.initialize_root_model( PopularTorrentsModel(channel_info={"name": tr("Popular torrents")}, hide_xxx=self.hide_xxx) ) self.popular_page.explanation_tooltip_button.setHidden(False) - self.add_to_channel_dialog.load_channel(0) - - if not self.gui_settings.value("first_discover", False) and not self.core_manager.use_existing_core: - connect(self.core_manager.events_manager.discovered_channel, self.stop_discovering) - self.window().gui_settings.setValue("first_discover", True) - self.discovering_page.is_discovering = True - self.stackedWidget.setCurrentIndex(PAGE_DISCOVERING) - else: - self.clicked_menu_button_discovered() - self.left_menu_button_discovered.setChecked(True) - - self.channels_menu_list.load_channels() + self.clicked_menu_button_downloads() # Toggle debug if developer mode is enabled self.window().debug_panel_button.setHidden(not get_gui_setting(self.gui_settings, "debug", False, is_bool=True)) @@ -615,15 +531,6 @@ def start_ui(self): def hide_xxx(self): return get_gui_setting(self.gui_settings, "family_filter", True, is_bool=True) - def stop_discovering(self, response): - if not self.discovering_page.is_discovering: - return - disconnect(self.core_manager.events_manager.discovered_channel, self.stop_discovering) - self.discovering_page.is_discovering = False - if self.stackedWidget.currentIndex() == PAGE_DISCOVERING: - self.clicked_menu_button_discovered() - self.left_menu_button_discovered.setChecked(True) - def on_events_started(self, json_dict): self.setWindowTitle(f"Tribler {json_dict['version']}") @@ -669,7 +576,6 @@ def perform_start_download_request( safe_seeding, destination, selected_files, - add_to_channel=False, callback=None, ): # Check if destination directory is writable @@ -698,40 +604,6 @@ def perform_start_download_request( self.update_recent_download_locations(destination) - if add_to_channel: - self.show_add_torrent_to_channel_dialog_from_uri(uri) - - def show_add_torrent_to_channel_dialog_from_uri(self, uri): - def on_add_button_pressed(channel_id): - post_data = {} - scheme = scheme_from_url(uri) - if scheme == FILE_SCHEME: - file_path = url_to_path(uri) - content = Path(file_path).read_bytes() - post_data['torrent'] = b64encode(content).decode('ascii') - elif scheme == MAGNET_SCHEME: - post_data['uri'] = uri - - if post_data: - request_manager.put(f"channels/mychannel/{channel_id}/torrents", - on_success=lambda _: self.tray_show_message(tr("Channel update"), - tr("Torrent(s) added to your channel")), - data=post_data) - - self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text="Add torrent") - - def show_add_torrent_to_channel_dialog_from_torrent_data(self, torrent_data): - def on_add_button_pressed(channel_id): - post_data = {'torrent': torrent_data} - - if post_data: - request_manager.put(f"channels/mychannel/{channel_id}/torrents", - on_success=lambda _: self.tray_show_message(tr("Channel update"), - tr("Torrent(s) added to your channel")), - data=post_data) - - self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text="Add torrent") - def on_new_version_available(self, version): self.upgrade_manager.on_new_version_available(tribler_window=self, new_version=version) @@ -756,49 +628,6 @@ def on_settings_button_click(self): self.stackedWidget.setCurrentIndex(PAGE_SETTINGS) self.settings_page.load_settings() - def enable_token_balance_refresh(self): - # Set token balance refresh timer and load the token balance - self.token_refresh_timer = QTimer() - connect(self.token_refresh_timer.timeout, self.load_token_balance) - self.token_refresh_timer.start(2000) - - self.load_token_balance() - - def on_token_balance_click(self, _): - self.raise_window() - self.deselect_all_menu_buttons() - self.stackedWidget.setCurrentIndex(PAGE_TRUST) - self.load_token_balance() - self.trust_page.load_history() - - def load_token_balance(self): - request_manager.get("bandwidth/statistics", self.received_bandwidth_statistics, capture_errors=False) - - def received_bandwidth_statistics(self, statistics): - if not statistics or "statistics" not in statistics: - return - - self.trust_page.received_bandwidth_statistics(statistics) - - statistics = statistics["statistics"] - balance = statistics["total_given"] - statistics["total_taken"] - self.set_token_balance(balance) - - # If trust page is currently visible, then load the graph as well - if self.stackedWidget.currentIndex() == PAGE_TRUST: - self.trust_page.load_history() - - def set_token_balance(self, balance): - if abs(balance) > 1024 ** 4: # Balance is over a TB - balance /= 1024.0 ** 4 - self.token_balance_label.setText(f"{balance:.1f} TB") - elif abs(balance) > 1024 ** 3: # Balance is over a GB - balance /= 1024.0 ** 3 - self.token_balance_label.setText(f"{balance:.1f} GB") - else: - balance /= 1024.0 ** 2 - self.token_balance_label.setText("%d MB" % balance) - def on_system_tray_icon_activated(self, reason): if reason != QSystemTrayIcon.DoubleClick: return @@ -845,7 +674,6 @@ def on_create_torrent(self, checked): self.create_dialog = CreateTorrentDialog(self) connect(self.create_dialog.create_torrent_notification, self.on_create_torrent_updates) - connect(self.create_dialog.add_to_channel_selected, self.show_add_torrent_to_channel_dialog_from_torrent_data) self.create_dialog.show() def on_create_torrent_updates(self, update_dict): @@ -893,8 +721,7 @@ def on_start_download_action(self, action): self.dialog.dialog_widget.anon_download_checkbox.isChecked(), self.dialog.dialog_widget.safe_seed_checkbox.isChecked(), self.dialog.dialog_widget.destination_input.currentText(), - self.dialog.dialog_widget.files_list_view.get_selected_files_indexes(), - add_to_channel=self.dialog.dialog_widget.add_to_channel_checkbox.isChecked(), + self.dialog.dialog_widget.files_list_view.get_selected_files_indexes() ) else: ConfirmationDialog.show_error( @@ -934,25 +761,6 @@ def on_add_torrent_browse_dir(self, checked): def on_confirm_add_directory_dialog(self, action): if action == 0: - if self.dialog.checkbox.isChecked(): - # TODO: add recursive directory scanning - def on_add_button_pressed(channel_id): - if not Path(self.chosen_dir).is_dir(): - show_message_box(f'"{self.chosen_dir}" is not a directory') - return - - request_manager.put( - endpoint=f"channels/mychannel/{channel_id}/torrents", - on_success=lambda _: self.tray_show_message( - tr("Channels update"), tr("%s added to your channel") % self.chosen_dir - ), - data={"torrents_dir": self.chosen_dir} - ) - - self.window().add_to_channel_dialog.show_dialog( - on_add_button_pressed, confirm_button_text=tr("Add torrent(s)") - ) - for torrent_file in self.selected_torrent_files: self.perform_start_download_request( torrent_file.as_uri(), @@ -1049,15 +857,6 @@ def on_top_search_bar_return_pressed(self): self.deselect_all_menu_buttons() self.stackedWidget.setCurrentIndex(PAGE_SEARCH_RESULTS) - def clicked_menu_button_discovered(self): - self.deselect_all_menu_buttons() - self.left_menu_button_discovered.setChecked(True) - if self.stackedWidget.currentIndex() == PAGE_DISCOVERED: - self.discovered_page.go_back_to_level(0) - self.discovered_page.reset_view() - self.stackedWidget.setCurrentIndex(PAGE_DISCOVERED) - self.discovered_page.content_table.setFocus() - def clicked_menu_button_popular(self): self.deselect_all_menu_buttons() self.left_menu_button_popular.setChecked(True) @@ -1067,10 +866,6 @@ def clicked_menu_button_popular(self): self.stackedWidget.setCurrentIndex(PAGE_POPULAR) self.popular_page.content_table.setFocus() - def clicked_trust_graph_page_button(self, _): - self.deselect_all_menu_buttons() - self.stackedWidget.setCurrentIndex(PAGE_TRUST_GRAPH_PAGE) - def clicked_menu_button_downloads(self): self.deselect_all_menu_buttons(self.left_menu_button_downloads) self.raise_window() @@ -1120,10 +915,6 @@ def show_force_shutdown(): self.downloads_page.stop_refreshing_downloads() request_manager.clear() - # Stop the token balance timer - if self.token_refresh_timer: - self.token_refresh_timer.stop() - def closeEvent(self, close_event): self.close_tribler() close_event.ignore() @@ -1184,58 +975,6 @@ def clicked_skip_conversion(self): def node_info_updated(self, node_info): self.core_manager.events_manager.node_info_updated.emit(node_info) - def on_channel_subscribe(self, channel_info): - patch_data = [{ - "public_key": channel_info['public_key'], - "id": channel_info['id'], - "subscribed": True - }] - request_manager.patch("metadata", lambda data: self.node_info_updated(data[0]), data=patch_data) - - def on_channel_unsubscribe(self, channel_info): - def _on_unsubscribe_action(action): - if action == 0: - patch_data = [{"public_key": channel_info['public_key'], "id": channel_info['id'], "subscribed": False}] - request_manager.patch("metadata", lambda data: self.node_info_updated(data[0]), data=patch_data) - if self.dialog: - self.dialog.close_dialog() - self.dialog = None - - self.dialog = ConfirmationDialog( - self, - tr("Unsubscribe from channel"), - tr("Are you sure you want to unsubscribe from channel
") - + '\"' - + f"{channel_info['name']}" - + '\"' - + tr("
and remove its contents?"), - [(tr("UNSUBSCRIBE"), BUTTON_TYPE_NORMAL), (tr("CANCEL"), BUTTON_TYPE_CONFIRM)], - ) - connect(self.dialog.button_clicked, _on_unsubscribe_action) - self.dialog.show() - - def on_channel_delete(self, channel_info): - def _on_delete_action(action): - if action == 0: - delete_data = [{"public_key": channel_info['public_key'], "id": channel_info['id']}] - request_manager.delete("metadata", lambda data: self.node_info_updated(data[0]), data=delete_data) - if self.dialog: - self.dialog.close_dialog() - self.dialog = None - - self.dialog = ConfirmationDialog( - self, - tr("Delete channel"), - tr("Are you sure you want to delete your personal channel
") - + '\"' - + f"{channel_info['name']}" - + '\"' - + tr("
and all its contents?"), - [(tr("DELETE"), BUTTON_TYPE_NORMAL), (tr("CANCEL"), BUTTON_TYPE_CONFIRM)], - ) - connect(self.dialog.button_clicked, _on_delete_action) - self.dialog.show() - def on_skip_conversion_dialog(self, action): if action == 0: self.upgrade_manager.stop_upgrade() diff --git a/src/tribler/gui/widgets/channelcontentswidget.py b/src/tribler/gui/widgets/channelcontentswidget.py index 7645fc5ca85..4831d3f4b2d 100644 --- a/src/tribler/gui/widgets/channelcontentswidget.py +++ b/src/tribler/gui/widgets/channelcontentswidget.py @@ -1,12 +1,12 @@ from base64 import b64encode from PyQt5 import uic -from PyQt5.QtCore import QDir, QTimer, Qt, pyqtSignal +from PyQt5.QtCore import QDir, Qt, pyqtSignal from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QAction, QFileDialog from psutil import LINUX -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import DIRTY_STATUSES, NEW +from tribler.core.components.metadata_store.db.orm_bindings.torrent_metadata import NEW from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE from tribler.core.utilities.simpledefs import CHANNEL_STATE from tribler.gui.defs import ( @@ -16,22 +16,13 @@ ContentCategories, ) from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog -from tribler.gui.dialogs.new_channel_dialog import NewChannelDialog from tribler.gui.network.request_manager import request_manager from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin from tribler.gui.tribler_action_menu import TriblerActionMenu from tribler.gui.utilities import connect, disconnect, get_image_path, get_ui_file_path, tr -from tribler.gui.widgets.tablecontentmodel import ( - ChannelContentModel, - ChannelPreviewModel, - DiscoveredChannelsModel, - PersonalChannelsModel, - SearchResultsModel, - SimplifiedPersonalChannelsModel, -) +from tribler.gui.widgets.tablecontentmodel import ChannelContentModel, SearchResultsModel from tribler.gui.widgets.triblertablecontrollers import ContentTableViewController -CHANNEL_COMMIT_DELAY = 30000 # milliseconds widget_form, widget_class = uic.loadUiType(get_ui_file_path('torrents_list.ui')) @@ -63,8 +54,6 @@ def __init__(self, parent=None): self.chosen_dir = None self.dialog = None self.controller = None - self.commit_timer = None - self.autocommit_enabled = None self.channel_options_menu = None self.channels_stack = [] @@ -92,20 +81,14 @@ def __exit__(self, *args): obj.blockSignals(False) self.freeze_controls = freeze_controls_class - self.channel_description_container.setHidden(True) self.explanation_tooltip_button.setHidden(True) def hide_all_labels(self): self.edit_channel_contents_top_bar.setHidden(True) - self.subscription_widget.setHidden(True) self.channel_num_torrents_label.setHidden(True) self.channel_state_label.setHidden(True) - @property - def personal_channel_model(self): - return SimplifiedPersonalChannelsModel if self.autocommit_enabled else PersonalChannelsModel - @property def model(self): return self.channels_stack[-1] if self.channels_stack else None @@ -114,27 +97,8 @@ def model(self): def root_model(self): return self.channels_stack[0] if self.channels_stack else None - def on_channel_committed(self, response): - if not response or not response.get("success", False): - return - - if not self.autocommit_enabled: - self.commit_control_bar.setHidden(True) - - if not self.model: - return - - info = self.model.channel_info - if info.get("state") == "Personal" and info.get("dirty"): - self.model.reset() - self.update_labels() - - def commit_channels(self, checked=False): # pylint: disable=W0613 - request_manager.post("channels/mychannel/0/commit", on_success=self.on_channel_committed) - def initialize_content_page( self, - autocommit_enabled=False, hide_xxx=None, controller_class=ContentTableViewController, categories=CATEGORY_SELECTOR_FOR_SEARCH_ITEMS, @@ -148,9 +112,9 @@ def initialize_content_page( self.category_selector.addItems(self.categories) connect(self.category_selector.currentIndexChanged, self.on_category_selector_changed) self.channel_back_button.setIcon(QIcon(get_image_path('page_back.png'))) + self.channel_back_button.setHidden(True) connect(self.channel_back_button.clicked, self.go_back) connect(self.channel_name_label.linkActivated, self.on_breadcrumb_clicked) - self.commit_control_bar.setHidden(True) if LINUX: # On Linux, the default font sometimes does not contain the emoji characters. @@ -158,30 +122,7 @@ def initialize_content_page( self.controller = controller_class(self.content_table, filter_input=self.channel_torrents_filter_input) - # Hide channel description on scroll - connect(self.controller.table_view.verticalScrollBar().valueChanged, self._on_table_scroll) - - self.autocommit_enabled = autocommit_enabled - if self.autocommit_enabled: - self._enable_autocommit_timer() - - # New channel button - connect(self.new_channel_button.clicked, self.create_new_channel) - connect(self.content_table.channel_clicked, self.on_channel_clicked) - connect(self.edit_channel_commit_button.clicked, self.commit_channels) - - self.subscription_widget.initialize(self) - - self.channel_options_menu = self.create_channel_options_menu() - self.channel_options_button.setMenu(self.channel_options_menu) - connect(self.channel_description_container.became_hidden, self.run_brain_dead_refresh) - connect(self.channel_description_container.description_changed, self._description_changed) - def _description_changed(self): - # Initialize commit timer on channel description change - if self.autocommit_enabled: - self.commit_timer.stop() - self.commit_timer.start(CHANNEL_COMMIT_DELAY) self.model.channel_info["dirty"] = True self.update_labels() @@ -189,33 +130,6 @@ def run_brain_dead_refresh(self): if self.model: self.controller.brain_dead_refresh() - def _on_table_scroll(self, event): # pylint: disable=unused-argument - # Hide the description widget when the channel is scrolled down - if not self.model.data_items: - return - - scrollbar = self.controller.table_view.verticalScrollBar() - container = self.channel_description_container - - is_time_to_hide = scrollbar.minimum() < scrollbar.value() - 10 and scrollbar.maximum() > 100 - is_time_to_show = scrollbar.minimum() == scrollbar.value() - - if is_time_to_hide and not container.isHidden(): - container.setHidden(True) - elif is_time_to_show and container.isHidden() and container.initialized: - container.setHidden(False) - - def _enable_autocommit_timer(self): - self.commit_timer = QTimer() - self.commit_timer.setSingleShot(True) - connect(self.commit_timer.timeout, self.commit_channels) - - # Commit the channel just in case there are uncommitted changes left since the last time (e.g. Tribler crashed) - # The timer thing here is a workaround for race condition with the core startup - self.controller.table_view.setColumnHidden(3, True) - self.commit_timer.stop() - self.commit_timer.start(10000) - def on_category_selector_changed(self, ind): category = self.categories[ind] if ind else None content_category = ContentCategories.get(category) @@ -246,22 +160,10 @@ def on_model_info_changed(self, changed_entries): self.window().channels_menu_list.reload_if_necessary(changed_entries) dirty = False structure_changed = False - for entry in changed_entries: - dirty = dirty or entry.get('status', None) in DIRTY_STATUSES - structure_changed = ( - structure_changed - or entry.get("state", None) == "Deleted" - or (entry.get("type", None) in [CHANNEL_TORRENT, COLLECTION_NODE] and entry[ - "status"] in DIRTY_STATUSES) - ) if structure_changed: self.window().add_to_channel_dialog.clear_channels_tree() - if self.autocommit_enabled and dirty: - self.commit_timer.stop() - self.commit_timer.start(CHANNEL_COMMIT_DELAY) - self.model.channel_info["dirty"] = dirty self.update_labels() @@ -300,13 +202,11 @@ def reset_view(self, text_filter=None, category_filter=None): def disconnect_current_model(self): disconnect(self.window().core_manager.events_manager.node_info_updated, self.model.update_node_info) - disconnect(self.model.info_changed, self.on_model_info_changed) disconnect(self.model.query_complete, self.on_model_query_completed) self.controller.unset_model() # Disconnect the selectionChanged signal def connect_current_model(self): - connect(self.model.info_changed, self.on_model_info_changed) connect(self.model.query_complete, self.on_model_query_completed) connect(self.window().core_manager.events_manager.node_info_updated, self.model.update_node_info) @@ -364,26 +264,6 @@ def go_back_to_level(self, level): self.connect_current_model() self.update_labels() - def on_channel_clicked(self, channel_dict): - self.initialize_with_channel(channel_dict) - - def create_new_channel(self, checked): # pylint: disable=W0613 - NewChannelDialog(self, self.model.create_new_channel) - - def initialize_with_channel(self, channel_info): - # Hide the edit controls by default, to prevent the user clicking the buttons prematurely - self.hide_all_labels() - # Turn off sorting by default to speed up SQL queries - if channel_info.get("state") == CHANNEL_STATE.PREVIEW.value: - self.push_channels_stack(ChannelPreviewModel(channel_info=channel_info)) - else: - self.push_channels_stack(self.default_channel_model(channel_info=channel_info)) - self.controller.set_model(self.model) - self.update_navigation_breadcrumbs() - self.controller.table_view.deselect_all_rows() - self.controller.table_view.resizeEvent(None) - - self.content_table.setFocus() def update_navigation_breadcrumbs(self): # Assemble the channels navigation breadcrumb by utilising RichText links feature @@ -432,41 +312,20 @@ def update_labels(self): personal = self.model.channel_info.get("state", None) == CHANNEL_STATE.PERSONAL.value root = self.current_level == 0 legacy = self.model.channel_info.get("state", None) == CHANNEL_STATE.LEGACY.value - discovered = isinstance(self.model, DiscoveredChannelsModel) - personal_model = isinstance(self.model, PersonalChannelsModel) is_a_channel = self.model.channel_info.get("type", None) == CHANNEL_TORRENT - description_flag = self.model.channel_info.get("description_flag") - thumbnail_flag = self.model.channel_info.get("thumbnail_flag") - dirty = self.model.channel_info.get("dirty") self.update_navigation_breadcrumbs() - info = self.model.channel_info container = self.channel_description_container - if is_a_channel and (description_flag or thumbnail_flag or personal_model): - container.initialize_with_channel(info["public_key"], info["id"], edit=personal and personal_model) - else: - container.initialized = False - container.setHidden(True) - - self.category_selector.setHidden(root and (discovered or personal_model)) - # initialize the channel page - - self.edit_channel_contents_top_bar.setHidden(not personal) - self.new_channel_button.setText(tr("NEW CHANNEL") if not is_a_channel and not folder else tr("NEW FOLDER")) - self.channel_options_button.setHidden(not personal_model or not personal or (root and not is_a_channel)) - self.new_channel_button.setHidden(not personal_model or not personal) + container.initialized = False + container.setHidden(True) - self.channel_state_label.setText(self.model.channel_info.get("state", "This text should not ever be shown")) + self.category_selector.setHidden(root) self.subscription_widget.setHidden(not is_a_channel or personal or folder or legacy) if not self.subscription_widget.isHidden(): self.subscription_widget.update_subscribe_button(self.model.channel_info) - self.channel_state_label.setHidden((root and not is_a_channel) or personal) - - self.commit_control_bar.setHidden(self.autocommit_enabled or not dirty or not personal) - if "total" in self.model.channel_info: self.channel_num_torrents_label.setHidden(False) if "torrents" in self.model.channel_info: diff --git a/src/tribler/gui/widgets/channeldescriptionwidget.py b/src/tribler/gui/widgets/channeldescriptionwidget.py deleted file mode 100644 index 3f84d8dd998..00000000000 --- a/src/tribler/gui/widgets/channeldescriptionwidget.py +++ /dev/null @@ -1,280 +0,0 @@ -from pathlib import Path - -from PyQt5 import QtCore, uic -from PyQt5.QtCore import QDir, pyqtSignal, pyqtSlot -from PyQt5.QtGui import QIcon, QImage, QPixmap -from PyQt5.QtWidgets import QFileDialog, QPushButton - -from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog -from tribler.gui.network.request_manager import request_manager -from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin -from tribler.gui.utilities import connect, get_image_path, get_ui_file_path, tr - -widget_form, widget_class = uic.loadUiType(get_ui_file_path('channel_description.ui')) - -EDIT_BUTTON = "edit_mode_button" -PREVIEW_BUTTON = "preview_mode_button" -EDIT_BUTTON_NUM = 0 -PREVIEW_BUTTON_NUM = 1 - -DEFAULT_THUMBNAIL_PIXMAP = QPixmap(get_image_path('chan_thumb.png')) -CREATE_THUMBNAIL_TEXT = tr("Click this to add \n channel thumbnail \n (max. 1MB JPG/PNG)") - -PREVIEW_PAGE = 0 -EDIT_PAGE = 1 - - -class FloatingButtonWidget(QPushButton): - # Solution inspired by https://gist.github.com/namuan/floating_button_widget.py - - def __init__(self, parent): - super().__init__(QIcon(QPixmap(get_image_path('edit.png'))), "", parent) - - self.setGeometry(20, 20, 20, 20) - - self.setFlat(True) - self.paddingRight = 5 - self.paddingTop = 5 - - def update_position(self): - if hasattr(self.parent(), 'viewport'): - parent_rect = self.parent().viewport().rect() - else: - parent_rect = self.parent().rect() - - if not parent_rect: - return - - x = parent_rect.width() - self.width() - self.paddingRight - y = self.paddingTop - self.setGeometry(x, y, self.width(), self.height()) - - def resizeEvent(self, event): - super().resizeEvent(event) - self.update_position() - - -class ChannelDescriptionWidget(AddBreadcrumbOnShowMixin, widget_form, widget_class): - became_hidden = pyqtSignal() - description_changed = pyqtSignal() - - def __init__(self, parent=None): - widget_class.__init__(self, parent=parent) - try: - self.setupUi(self) - except SystemError: - pass - self.edit_mode_tab.initialize() - - # Set the preview tab and button as default - self.edit_mode_tab.buttons[PREVIEW_BUTTON_NUM].setEnabled(True) - self.edit_mode_tab.buttons[PREVIEW_BUTTON_NUM].setChecked(True) - - # Note that button signals are connected - # automatically by connectSlotsByName when loading the .ui file - connect(self.edit_mode_tab.clicked_tab_button, self.tab_button_clicked) - - self.description_text = None - self.channel_thumbnail_bytes = None - self.channel_thumbnail_qimage = None - - self.channel_pk = None - self.channel_id = None - - self.edit_enabled = False - - self.bottom_buttons_container.setHidden(True) - - self.initialized = False - - self.dialog = None - - self.floating_edit_button = FloatingButtonWidget(parent=self.description_text_preview) - self.floating_edit_button.setHidden(True) - connect(self.floating_edit_button.pressed, self.on_start_editing) - - def resizeEvent(self, event): - super().resizeEvent(event) - self.floating_edit_button.update_position() - - def hideEvent(self, event): - # This one is unfortunately necessary to ensure thant brain_dead_refresh will - # run every time this thing is hidden - self.became_hidden.emit() - super().hideEvent(event) - - def showEvent(self, *args): - # This one is unfortunately necessary to ensure thant brain_dead_refresh will - # run every time this thing is shown - self.became_hidden.emit() - super().showEvent(*args) - - def tab_button_clicked(self, button_name): - if button_name == EDIT_BUTTON: - self.switch_to_edit() - elif button_name == PREVIEW_BUTTON: - self.description_text = self.description_text_edit.toPlainText() - self.switch_to_preview() - - def on_start_editing(self): - self.edit_buttons_panel_widget.setHidden(False) - self.floating_edit_button.setHidden(True) - self.switch_to_edit(update_buttons=True) - self.bottom_buttons_container.setHidden(False) - if self.channel_thumbnail_bytes is None: - self.channel_thumbnail.setText(CREATE_THUMBNAIL_TEXT) - - @pyqtSlot() - def on_create_description_button_clicked(self, *args): - self.description_text = "" - self.channel_thumbnail_bytes = None - self.show_description_page() - self.on_start_editing() - - @pyqtSlot() - def on_save_button_clicked(self): - self.bottom_buttons_container.setHidden(True) - self.description_text = self.description_text_edit.toPlainText() - - self.switch_to_preview(update_buttons=True) - - descr_changed = False - thumb_changed = False - - if self.description_text is not None: - descr_changed = True - request_manager.put(f'channels/{self.channel_pk}/{self.channel_id}/description', - on_success=self._on_description_received, - data={"description_text": self.description_text}) - - if self.channel_thumbnail_bytes is not None: - thumb_changed = True - - def _on_thumbnail_updated(_): - pass - - request_manager.put(f'channels/{self.channel_pk}/{self.channel_id}/thumbnail', - on_success=_on_thumbnail_updated, - data=self.channel_thumbnail_bytes, - raw_response=True) - - if descr_changed or thumb_changed: - self.description_changed.emit() - - def on_channel_thumbnail_clicked(self): - if not (self.edit_enabled and self.edit_mode_tab.get_selected_index() == EDIT_BUTTON_NUM): - return - filename = QFileDialog.getOpenFileName( - self, - tr("Please select a thumbnail file"), - QDir.homePath(), - filter=(tr("PNG/XPM/JPG images %s") % '(*.png *.xpm *.jpg)'), - )[0] - - if not filename: - return - - content_type = f"image/{str(Path(filename).suffix)[1:]}" - - with open(filename, "rb") as f: - data = f.read() - - if len(data) > 1024 ** 2: - self.dialog = ConfirmationDialog.show_error( - self, - tr(tr("Image too large error")), - tr(tr("Image file you're trying to upload is too large.")), - ) - return - - self.channel_thumbnail_bytes = data - self.update_channel_thumbnail(data, content_type) - - @pyqtSlot() - def on_cancel_button_clicked(self): - self.initialize_with_channel(self.channel_pk, self.channel_id, edit=self.edit_enabled) - - def switch_to_preview(self, update_buttons=False): - self.description_stack_widget.setCurrentIndex(PREVIEW_PAGE) - if self.edit_enabled: - self.floating_edit_button.setHidden(False) - self.description_text_preview.setMarkdown(self.description_text) - self.description_text_preview.setReadOnly(True) - if self.channel_thumbnail_bytes is None: - self.channel_thumbnail.setPixmap(DEFAULT_THUMBNAIL_PIXMAP) - if update_buttons: - self.edit_mode_tab.deselect_all_buttons(except_select=self.edit_mode_tab.buttons[PREVIEW_BUTTON_NUM]) - - def switch_to_edit(self, update_buttons=False): - self.description_stack_widget.setCurrentIndex(EDIT_PAGE) - self.floating_edit_button.setHidden(True) - self.description_text_edit.setPlainText(self.description_text) - self.description_text_edit.setReadOnly(False) - if self.channel_thumbnail_bytes is None: - self.channel_thumbnail.setText(CREATE_THUMBNAIL_TEXT) - if update_buttons: - self.edit_mode_tab.deselect_all_buttons(except_select=self.edit_mode_tab.buttons[EDIT_BUTTON_NUM]) - - def show_create_page(self): - self.create_page.setHidden(False) - self.description_page.setHidden(True) - - def show_description_page(self): - self.create_page.setHidden(True) - self.description_page.setHidden(False) - - def _on_description_received(self, result): - self.description_text = result.get("description_text") if result else None - self.description_text_preview.setMarkdown(self.description_text or "") - - request_manager.get(f'channels/{self.channel_pk}/{self.channel_id}/thumbnail', - on_success=self._on_thumbnail_received, - raw_response=True) - - def set_widget_visible(self, show): - self.bottom_buttons_container.setHidden(True) - self.setHidden(not self.edit_enabled) - if not show: - # No data + edit enabled = invite to create a description - if self.edit_enabled: - self.show_create_page() - return - self.show_description_page() - self.setHidden(False) - self.initialized = True - self.switch_to_preview(update_buttons=True) - self.edit_buttons_panel_widget.setHidden(True) - if self.edit_enabled: - self.enable_edit() - else: - self.disable_edit() - - def update_channel_thumbnail(self, image_data: bytes, image_type: str): - w = self.channel_thumbnail.width() - h = self.channel_thumbnail.height() - qimage = QImage.fromData(image_data, image_type.split("/")[1]) - self.channel_thumbnail.setPixmap(QPixmap.fromImage(qimage).scaled(w, h, QtCore.Qt.KeepAspectRatio)) - - def _on_thumbnail_received(self, result_and_header): - result, header = result_and_header - if not (result and header): - self.channel_thumbnail_bytes = None - self.channel_thumbnail.setPixmap(DEFAULT_THUMBNAIL_PIXMAP) - self.set_widget_visible(self.description_text is not None) - return - self.channel_thumbnail_bytes = result - self.update_channel_thumbnail(result, header) - self.set_widget_visible(True) - - def initialize_with_channel(self, channel_pk, channel_id, edit=True): - self.initialized = False - self.edit_enabled = edit - self.floating_edit_button.setHidden(not self.edit_enabled) - self.channel_pk, self.channel_id = channel_pk, channel_id - request_manager.get(f'channels/{self.channel_pk}/{self.channel_id}/description', self._on_description_received) - - def enable_edit(self): - self.floating_edit_button.setHidden(False) - - def disable_edit(self): - self.edit_buttons_panel_widget.setHidden(True) diff --git a/src/tribler/gui/widgets/channelsmenulistwidget.py b/src/tribler/gui/widgets/channelsmenulistwidget.py deleted file mode 100644 index dead62c5279..00000000000 --- a/src/tribler/gui/widgets/channelsmenulistwidget.py +++ /dev/null @@ -1,142 +0,0 @@ -from PyQt5.QtCore import QSize, Qt -from PyQt5.QtGui import QBrush, QColor, QIcon, QPixmap -from PyQt5.QtWidgets import QAbstractItemView, QAbstractScrollArea, QAction, QListWidget, QListWidgetItem - -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT -from tribler.core.utilities.simpledefs import CHANNEL_STATE -from tribler.gui.network.request_manager import request_manager -from tribler.gui.tribler_action_menu import TriblerActionMenu -from tribler.gui.utilities import connect, get_image_path, tr - - -def entry_to_tuple(entry): - return entry["public_key"], entry["id"], entry.get('subscribed', False), entry.get('state'), entry.get('progress') - - -class ChannelListItem(QListWidgetItem): - loading_brush = QBrush(Qt.darkGray) - - def __init__(self, parent=None, channel_info=None): - self.channel_info = channel_info - title = channel_info.get('name') - QListWidgetItem.__init__(self, title, parent=parent) - # This is necessary to increase vertical height of the items - self.setSizeHint(QSize(50, 25)) - if channel_info.get('state') not in (CHANNEL_STATE.COMPLETE.value, CHANNEL_STATE.PERSONAL.value): - self.setForeground(self.loading_brush) - - def setData(self, role, new_value): - # TODO: call higher-level signal to propagate the change to other widgets - if role == Qt.EditRole: - item = self.channel_info - if item['name'] != new_value: - request_manager.patch(f"metadata/{item['public_key']}/{item['id']}", data={"title": new_value}) - - return super().setData(role, new_value) - - -class ChannelsMenuListWidget(QListWidget): - def __init__(self, parent=None): - QListWidget.__init__(self, parent=parent) - self.base_url = "channels" - self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustToContents) - - # Items set, used for checking changes - self.items_set = frozenset() - self.personal_channel_icon = QIcon(get_image_path("share.png")) - empty_transparent_image = QPixmap(15, 15) - empty_transparent_image.fill(QColor(0, 0, 0, 0)) - self.empty_image = QIcon(empty_transparent_image) - - self.foreign_channel_menu = self.create_foreign_menu() - self.personal_channel_menu = self.create_personal_menu() - self.setSelectionMode(QAbstractItemView.NoSelection) - - def sizeHint(self): - count = self.count() - height = self.sizeHintForRow(0) * count if count else 0 - # !!!ACHTUNG!!! - # !!! Qt Bug !!! - # Qt never shrinks QListWidget vertically to less than the size - # that is required to contain list three items. Even if there a no items. - # sizeHint is ignored completely, the real minimum size is always at least - # three items. Also, Qt ignores the overloaded self.maximumHeight method. - # So, the only way to shrink it is to call setMaximumHeight manually. - # Qt, I hate you! Why are you doing this to me!? - self.setMaximumHeight(height) - return QSize(self.width(), height) - - def contextMenuEvent(self, event): - item = self.itemAt(event.pos()) - if item is None: - return - - if item.channel_info["state"] == CHANNEL_STATE.PERSONAL.value: - self.personal_channel_menu.exec_(self.mapToGlobal(event.pos())) - else: - self.foreign_channel_menu.exec_(self.mapToGlobal(event.pos())) - - def create_foreign_menu(self): - menu = TriblerActionMenu(self) - unsubscribe_action = QAction(tr("Unsubscribe"), self) - connect(unsubscribe_action.triggered, self._on_unsubscribe_action) - menu.addAction(unsubscribe_action) - return menu - - def create_personal_menu(self): - menu = TriblerActionMenu(self) - delete_action = QAction(tr("Delete channel"), self) - connect(delete_action.triggered, self._on_delete_action) - menu.addAction(delete_action) - - rename_action = QAction(tr("Rename channel"), self) - connect(rename_action.triggered, self._trigger_name_editor) - menu.addAction(rename_action) - return menu - - def _trigger_name_editor(self, checked): - self.editItem(self.currentItem()) - - def _on_unsubscribe_action(self, checked): - self.window().on_channel_unsubscribe(self.currentItem().channel_info) - - def _on_delete_action(self, checked): - self.window().on_channel_delete(self.currentItem().channel_info) - - def on_query_results(self, response): - channels = response.get('results') - if channels is None: - return - self.clear() - for channel_info in sorted(channels, key=lambda x: x.get('state') != 'Personal'): - item = ChannelListItem(channel_info=channel_info) - self.addItem(item) - # ACHTUNG! Qt bug prevents moving this thing into ChannelListItem ! - if channel_info.get('state') == CHANNEL_STATE.PERSONAL.value: - item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable) - item.setIcon(self.personal_channel_icon) - else: - # We assign a transparent icon to foreign channels to align - # their text with the personal ones - item.setIcon(self.empty_image) - tooltip_text = channel_info['name'] + "\n" + channel_info['state'] - if channel_info.get('progress'): - tooltip_text += f" {int(float(channel_info['progress']) * 100)}%" - item.setToolTip(tooltip_text) - - self.items_set = frozenset(entry_to_tuple(channel_info) for channel_info in channels) - - def load_channels(self): - request_manager.get(self.base_url, self.on_query_results, url_params={"subscribed": True, "last": 1000}) - - def reload_if_necessary(self, changed_entries): - # Compare the state changes in the changed entries list to our current list - # and update the list if necessary - changeset = frozenset( - entry_to_tuple(entry) - for entry in changed_entries - if entry.get("state") == "Deleted" or entry.get("type") == CHANNEL_TORRENT - ) - need_update = not self.items_set.issuperset(changeset) - if need_update: - self.load_channels() diff --git a/src/tribler/gui/widgets/createtorrentpage.py b/src/tribler/gui/widgets/createtorrentpage.py index 4f377beb021..03168e332c0 100644 --- a/src/tribler/gui/widgets/createtorrentpage.py +++ b/src/tribler/gui/widgets/createtorrentpage.py @@ -4,7 +4,7 @@ from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QAction, QFileDialog, QWidget -from tribler.gui.defs import BUTTON_TYPE_NORMAL, PAGE_EDIT_CHANNEL_TORRENTS +from tribler.gui.defs import BUTTON_TYPE_NORMAL from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog from tribler.gui.network.request_manager import request_manager from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin @@ -19,7 +19,6 @@ class CreateTorrentPage(AddBreadcrumbOnShowMixin, QWidget): def __init__(self): QWidget.__init__(self) - self.channel_identifier = None self.dialog = None self.selected_item_index = -1 self.initialized = False @@ -44,9 +43,6 @@ def initialize(self): self.initialized = True - def on_create_torrent_manage_back_clicked(self, checked): - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_TORRENTS) - def on_choose_files_clicked(self, checked): filenames, _ = QFileDialog.getOpenFileNames(self.window(), "Please select the files", QDir.homePath()) @@ -104,19 +100,6 @@ def on_torrent_created(self, result): if not result: return self.window().edit_channel_create_torrent_button.setEnabled(True) - if 'torrent' in result: - self.add_torrent_to_channel(result['torrent']) - - def add_torrent_to_channel(self, torrent): - request_manager.put("mychannel/torrents", self.on_torrent_to_channel_added, data={"torrent": torrent}) - - def on_torrent_to_channel_added(self, result): - if not result: - return - self.window().edit_channel_create_torrent_progress_label.hide() - if 'added' in result: - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_TORRENTS) - self.window().personal_channel_page.load_my_torrents() def on_remove_entry(self): self.window().create_torrent_files_list.takeItem(self.selected_item_index) diff --git a/src/tribler/gui/widgets/discoveringpage.py b/src/tribler/gui/widgets/discoveringpage.py deleted file mode 100644 index dc92a607d09..00000000000 --- a/src/tribler/gui/widgets/discoveringpage.py +++ /dev/null @@ -1,33 +0,0 @@ -from PyQt5.QtWidgets import QWidget - -from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin -from tribler.gui.utilities import connect -from tribler.gui.widgets.loadingpage import LOADING_ANIMATION - - -class DiscoveringPage(AddBreadcrumbOnShowMixin, QWidget): - """ - The DiscoveringPage is shown when users are starting Tribler for the first time. It hides when there are at least - five discovered channels. - """ - - def __init__(self): - QWidget.__init__(self) - self.found_channels = 0 - self.is_discovering = False - - def initialize_discovering_page(self): - self.window().discovering_svg_view.setScene(LOADING_ANIMATION) - connect(self.window().core_manager.events_manager.discovered_channel, self.on_discovered_channel) - - def on_discovered_channel(self, _): - self.found_channels += 1 - - if self.found_channels >= 5 and self.is_discovering: - self.is_discovering = False - self.window().clicked_menu_button_discovered() - return - - self.window().discovering_top_label.setText( - "Discovering your first content...\n\nFound %d channels" % self.found_channels - ) diff --git a/src/tribler/gui/widgets/downloadspage.py b/src/tribler/gui/widgets/downloadspage.py index 9706a36f2de..1baa861cd33 100644 --- a/src/tribler/gui/widgets/downloadspage.py +++ b/src/tribler/gui/widgets/downloadspage.py @@ -264,14 +264,9 @@ def update_download_visibility(self): continue filter_match = self.window().downloads_filter_input.text().lower() in item.download_info["name"].lower() - is_channel = item.download_info["channel_download"] - if self.filter == DOWNLOADS_FILTER_CHANNELS: - hide = not (is_channel and filter_match) - item.setHidden(hide) - else: - filtered = DOWNLOADS_FILTER_DEFINITION[self.filter] - hide = item.get_status() not in filtered or not filter_match or is_channel - item.setHidden(hide) + filtered = DOWNLOADS_FILTER_DEFINITION[self.filter] + hide = item.get_status() not in filtered or not filter_match + item.setHidden(hide) def on_downloads_tab_button_clicked(self, button_name): self.filter = button_name2filter[button_name] @@ -516,22 +511,6 @@ def on_export_download_request_done(self, filename, data): tr("Torrent file exported"), tr("Torrent file exported to %s") % str(dest_path) ) - def on_add_to_channel(self, checked): - def on_add_button_pressed(channel_id): - for item in self.selected_items: - infohash = item.infohash - name = item.download_info["name"] - request_manager.put( - f"channels/mychannel/{channel_id}/torrents", - on_success=lambda _: self.window().tray_show_message( - tr("Channel update"), - tr("Torrent(s) added to your channel") - ), - data={"uri": compose_magnetlink(infohash, name=name)} - ) - - self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text=tr("Add torrent(s)")) - def on_right_click_item(self, pos): item_clicked = self.window().downloads_list.itemAt(pos) if not item_clicked or not self.selected_items: @@ -545,7 +524,6 @@ def on_right_click_item(self, pos): start_action = QAction(tr("Start"), self) stop_action = QAction(tr("Stop"), self) remove_download_action = QAction(tr("Remove download"), self) - add_to_channel_action = QAction(tr("Add to my channel"), self) force_recheck_action = QAction(tr("Force recheck"), self) export_download_action = QAction(tr("Export .torrent file"), self) explore_files_action = QAction(tr("Explore files"), self) @@ -560,7 +538,6 @@ def on_right_click_item(self, pos): start_action.setEnabled(DownloadsPage.start_download_enabled(self.selected_items)) connect(stop_action.triggered, self.on_stop_download_clicked) stop_action.setEnabled(DownloadsPage.stop_download_enabled(self.selected_items)) - connect(add_to_channel_action.triggered, self.on_add_to_channel) connect(remove_download_action.triggered, self.on_remove_download_clicked) connect(force_recheck_action.triggered, self.on_force_recheck_download) force_recheck_action.setEnabled(DownloadsPage.force_recheck_download_enabled(self.selected_items)) @@ -576,8 +553,6 @@ def on_right_click_item(self, pos): menu.addAction(start_action) menu.addAction(stop_action) - menu.addSeparator() - menu.addAction(add_to_channel_action) menu.addSeparator() menu.addAction(remove_download_action) menu.addSeparator() diff --git a/src/tribler/gui/widgets/lazytableview.py b/src/tribler/gui/widgets/lazytableview.py index 15ca7e49024..fa966149a5e 100644 --- a/src/tribler/gui/widgets/lazytableview.py +++ b/src/tribler/gui/widgets/lazytableview.py @@ -5,10 +5,7 @@ from PyQt5.QtGui import QGuiApplication, QMouseEvent, QMovie from PyQt5.QtWidgets import QAbstractItemView, QApplication, QHeaderView, QLabel, QTableView -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import LEGACY_ENTRY -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT, \ - SNIPPET -from tribler.gui.defs import COMMIT_STATUS_COMMITTED +from tribler.core.components.metadata_store.db.serialization import SNIPPET from tribler.gui.dialogs.editmetadatadialog import EditMetadataDialog from tribler.gui.network.request_manager import request_manager from tribler.gui.utilities import connect, data_item2uri, get_image_path, index2uri @@ -49,7 +46,6 @@ class TriblerContentTableView(QTableView): When the user reached the end of the table, it will ask the model for more items, and load them dynamically. """ - channel_clicked = pyqtSignal(dict) torrent_clicked = pyqtSignal(dict) torrent_doubleclicked = pyqtSignal(dict) edited_metadata = pyqtSignal(dict) @@ -183,19 +179,6 @@ def redraw(self, index, redraw_whole_row): for control in self.delegate.controls: control.rect = QRect() - def on_subscribe_control_clicked(self, index): - item = index.model().data_items[index.row()] - # skip LEGACY entries, regular torrents and personal channel - if 'subscribed' not in item or item['status'] == LEGACY_ENTRY or item['state'] == 'Personal': - return - - status = int(item['subscribed']) - - if status: - self.window().on_channel_unsubscribe(item) - else: - self.window().on_channel_subscribe(item) - def on_edit_tags_clicked(self, index: QModelIndex) -> None: self.add_tags_dialog = EditMetadataDialog(self.window(), index) self.add_tags_dialog.show() @@ -218,29 +201,11 @@ def on_table_item_clicked(self, item, doubleclick=False): return data_item = self.model().data_items[item.row()] - # Safely determine if the thing is a channel. A little bit hackish - if data_item.get('type') in [CHANNEL_TORRENT, COLLECTION_NODE]: - self.channel_clicked.emit(data_item) - elif data_item.get('type') == REGULAR_TORRENT: - if not doubleclick: - self.torrent_clicked.emit(data_item) - else: - self.torrent_doubleclicked.emit(data_item) - - def on_torrent_status_updated(self, json_result, index): - if not json_result: - return - - if 'success' in json_result and json_result['success']: - index.model().data_items[index.row()]['status'] = json_result['new_status'] - # Note: this should instead use signal and do not address the widget globally - # and properly handle entry removal - self.window().personal_channel_page.channel_dirty = ( - self.table_view.window().edit_channel_page.channel_dirty - or json_result['new_status'] != COMMIT_STATUS_COMMITTED - ) - self.window().personal_channel_page.update_channel_commit_views(deleted_index=index) + if not doubleclick: + self.torrent_clicked.emit(data_item) + else: + self.torrent_doubleclicked.emit(data_item) def on_delete_button_clicked(self, _index): self.model().delete_rows(self.selectionModel().selectedRows()) diff --git a/src/tribler/gui/widgets/searchresultswidget.py b/src/tribler/gui/widgets/searchresultswidget.py index 0bc502782cb..e7112ff4b44 100644 --- a/src/tribler/gui/widgets/searchresultswidget.py +++ b/src/tribler/gui/widgets/searchresultswidget.py @@ -65,7 +65,6 @@ def __init__(self, parent=None): def initialize(self, hide_xxx=False): self.hide_xxx = hide_xxx self.results_page_content.initialize_content_page(hide_xxx=hide_xxx) - self.results_page_content.channel_torrents_filter_input.setHidden(True) @property def has_results(self): @@ -93,7 +92,7 @@ def search(self, query: Query) -> bool: self.last_search_time = time.time() model = SearchResultsModel( - endpoint_url="search", + endpoint_url="search/local", hide_xxx=self.results_page_content.hide_xxx, original_query=query.original_query, text_filter=to_fts_query(query.fts_text), @@ -117,7 +116,7 @@ def register_request(response): params = {'txt_filter': fts_query, 'hide_xxx': self.hide_xxx, 'tags': list(query.tags), 'metadata_type': REGULAR_TORRENT, 'exclude_deleted': True} - request_manager.put('remote_query', register_request, url_params=params) + request_manager.put('search/remote', register_request, url_params=params) return True diff --git a/src/tribler/gui/widgets/settingspage.py b/src/tribler/gui/widgets/settingspage.py index 1db4ba3dd94..d6e7e7e1ccf 100644 --- a/src/tribler/gui/widgets/settingspage.py +++ b/src/tribler/gui/widgets/settingspage.py @@ -60,7 +60,6 @@ def initialize_settings_page(self, version_history): connect(self.window().download_location_chooser_button.clicked, self.on_choose_download_dir_clicked) connect(self.window().watch_folder_chooser_button.clicked, self.on_choose_watch_dir_clicked) - connect(self.window().channel_autocommit_checkbox.stateChanged, self.on_channel_autocommit_checkbox_changed) connect(self.window().family_filter_checkbox.stateChanged, self.on_family_filter_checkbox_changed) connect(self.window().developer_mode_enabled_checkbox.stateChanged, self.on_developer_mode_checkbox_changed) connect(self.window().use_monochrome_icon_checkbox.stateChanged, self.on_use_monochrome_icon_checkbox_changed) @@ -84,9 +83,6 @@ def showEvent(self, *args): super().showEvent(*args) self.window().settings_tab.process_button_click(self.window().settings_general_button) - def on_channel_autocommit_checkbox_changed(self, _): - self.window().gui_settings.setValue("autocommit_enabled", self.window().channel_autocommit_checkbox.isChecked()) - def on_family_filter_checkbox_changed(self, _): self.window().gui_settings.setValue("family_filter", self.window().family_filter_checkbox.isChecked()) @@ -176,17 +172,9 @@ def initialize_with_settings(self, settings): self.window().download_settings_anon_seeding_checkbox.setChecked( settings['download_defaults']['safeseeding_enabled'] ) - self.window().download_settings_add_to_channel_checkbox.setChecked( - settings['download_defaults']['add_download_to_channel'] - ) self.window().watchfolder_enabled_checkbox.setChecked(settings['watch_folder']['enabled']) self.window().watchfolder_location_input.setText(settings['watch_folder']['directory']) - # Channel settings - self.window().channel_autocommit_checkbox.setChecked( - get_gui_setting(gui_settings, "autocommit_enabled", True, is_bool=True) - ) - # Tags settings self.window().disable_tags_checkbox.setChecked( get_gui_setting(gui_settings, "disable_tags", False, is_bool=True) @@ -499,9 +487,6 @@ def save_settings(self, checked): settings_data['download_defaults'][ 'safeseeding_enabled' ] = self.window().download_settings_anon_seeding_checkbox.isChecked() - settings_data['download_defaults'][ - 'add_download_to_channel' - ] = self.window().download_settings_add_to_channel_checkbox.isChecked() settings_data['resource_monitor']['enabled'] = self.window().checkbox_enable_resource_log.isChecked() settings_data['resource_monitor']['cpu_priority'] = int(self.window().slider_cpu_level.value()) @@ -536,7 +521,6 @@ def on_settings_saved(self, data): gui_settings.setValue("family_filter", self.window().family_filter_checkbox.isChecked()) gui_settings.setValue("disable_tags", self.window().disable_tags_checkbox.isChecked()) - gui_settings.setValue("autocommit_enabled", self.window().channel_autocommit_checkbox.isChecked()) gui_settings.setValue("ask_download_settings", self.window().always_ask_location_checkbox.isChecked()) gui_settings.setValue("use_monochrome_icon", self.window().use_monochrome_icon_checkbox.isChecked()) gui_settings.setValue("minimize_to_tray", self.window().minimize_to_tray_checkbox.isChecked()) diff --git a/src/tribler/gui/widgets/subscriptionswidget.py b/src/tribler/gui/widgets/subscriptionswidget.py deleted file mode 100644 index a86725f2063..00000000000 --- a/src/tribler/gui/widgets/subscriptionswidget.py +++ /dev/null @@ -1,81 +0,0 @@ -from PyQt5.QtCore import Qt -from PyQt5.QtGui import QFont -from PyQt5.QtWidgets import QLabel, QWidget - -from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin -from tribler.gui.utilities import connect, format_votes_rich_text, get_votes_rating_description, tr -from tribler.gui.widgets.tablecontentdelegate import DARWIN, WINDOWS - - -class SubscriptionsWidget(AddBreadcrumbOnShowMixin, QWidget): - """ - This widget shows a favorite button and the number of subscriptions that a specific channel has. - """ - - def __init__(self, parent): - QWidget.__init__(self, parent) - self.subscribe_button = None - self.initialized = False - self.contents_widget = None - self.channel_rating_label = None - - def initialize(self, contents_widget): - if not self.initialized: - # We supply a link to the parent channelcontentswidget to use its property that - # returns the current model in use (top of the stack) - self.contents_widget = contents_widget - self.subscribe_button = self.findChild(QWidget, "subscribe_button") - self.channel_rating_label = self.findChild(QLabel, "channel_rating_label") - self.channel_rating_label.setTextFormat(Qt.RichText) - - connect(self.subscribe_button.clicked, self.on_subscribe_button_click) - self.subscribe_button.setToolTip(tr("Click to subscribe/unsubscribe")) - connect(self.subscribe_button.toggled, self._adjust_tooltip) - self.initialized = True - - def _adjust_tooltip(self, toggled): - tooltip = (tr("Subscribed.") if toggled else tr("Not subscribed.")) + tr("\n(Click to unsubscribe)") - self.subscribe_button.setToolTip(tooltip) - - def update_subscribe_button_if_channel_matches(self, changed_channels_list): - # TODO: the stuff requires MAJOR refactoring with properly implemented QT MVC model... - if not (self.contents_widget.model and self.contents_widget.model.channel_info.get("public_key")): - return - for channel_info in changed_channels_list: - if ( - self.contents_widget.model.channel_info["public_key"] == channel_info["public_key"] - and self.contents_widget.model.channel_info["id"] == channel_info["id"] - ): - self.update_subscribe_button(remote_response=channel_info) - return - - def update_subscribe_button(self, remote_response=None): - # A safeguard against race condition that happens when the user changed - # the channel view before the response came in - if self.isHidden(): - return - - if remote_response and "subscribed" in remote_response: - self.contents_widget.model.channel_info["subscribed"] = remote_response["subscribed"] - - self.subscribe_button.setChecked(bool(remote_response["subscribed"])) - self._adjust_tooltip(bool(remote_response["subscribed"])) - - # Update rating display - votes = remote_response['votes'] - self.channel_rating_label.setText(format_votes_rich_text(votes)) - if DARWIN or WINDOWS: - font = QFont() - font.setLetterSpacing(QFont.PercentageSpacing, 60.0) - self.channel_rating_label.setFont(font) - - self.channel_rating_label.setToolTip(get_votes_rating_description(votes)) - - def on_subscribe_button_click(self, checked): - self.subscribe_button.setCheckedInstant(bool(self.contents_widget.model.channel_info["subscribed"])) - channel_info = self.contents_widget.model.channel_info - if channel_info["subscribed"]: - # Show the global unsubscribe confirmation dialog - self.window().on_channel_unsubscribe(channel_info) - else: - self.window().on_channel_subscribe(channel_info) diff --git a/src/tribler/gui/widgets/tablecontentdelegate.py b/src/tribler/gui/widgets/tablecontentdelegate.py index e0d5df3e4b5..005171674aa 100644 --- a/src/tribler/gui/widgets/tablecontentdelegate.py +++ b/src/tribler/gui/widgets/tablecontentdelegate.py @@ -7,7 +7,6 @@ from psutil import LINUX from tribler.core.components.database.db.layers.knowledge_data_access_layer import ResourceType -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import LEGACY_ENTRY from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT, \ SNIPPET from tribler.core.utilities.simpledefs import CHANNEL_STATE @@ -302,74 +301,6 @@ def createEditor(self, parent, option, index): return super().createEditor(parent, option, index) -class ChannelStateMixin: - wait_png = QIcon(get_image_path("wait.png")) - share_icon = QIcon(get_image_path("share.png")) - downloading_icon = QIcon(get_image_path("downloads.png")) - - @staticmethod - def get_indicator_rect(rect): - r = rect - indicator_border = 1 - indicator_side = (r.height() if r.width() > r.height() else r.width()) - indicator_border * 2 - y = int(r.top() + (r.height() - indicator_side) // 2) - x = r.left() + indicator_border - w = indicator_side - h = indicator_side - indicator_rect = QRect(x, y, w, h) - return indicator_rect - - def draw_channel_state(self, painter, option, index, data_item): - # Draw empty cell as the background - - self.paint_empty_background(painter, option) - text_rect = option.rect - - if data_item['status'] == CHANNEL_STATE.LEGACY.value: - painter.drawText(text_rect, Qt.AlignCenter, "Legacy") - return True - - if 'type' in data_item and data_item['type'] != CHANNEL_TORRENT: - return True - if data_item['state'] == CHANNEL_STATE.COMPLETE.value: - painter.drawText(text_rect, Qt.AlignCenter, "✔") - return True - if data_item['state'] == CHANNEL_STATE.PERSONAL.value: - self.share_icon.paint(painter, self.get_indicator_rect(option.rect)) - return True - if data_item['state'] == CHANNEL_STATE.DOWNLOADING.value: - painter.drawText(text_rect, Qt.AlignCenter, "⏳") - return True - if data_item['state'] == CHANNEL_STATE.METAINFO_LOOKUP.value: - painter.drawText(text_rect, Qt.AlignCenter, "❓") - return True - if data_item['state'] == CHANNEL_STATE.UPDATING.value: - progress = data_item.get('progress') - if progress is not None: - draw_progress_bar(painter, option.rect, float(progress)) - return True - return True - - -class SubscribedControlMixin: - def draw_subscribed_control(self, painter, option, index, data_item): - # Draw empty cell as the background - self.paint_empty_background(painter, option) - - if 'type' in data_item and data_item['type'] != CHANNEL_TORRENT: - return True - if data_item['status'] == LEGACY_ENTRY: - return True - if data_item['state'] == 'Personal': - return True - - self.subscribe_control.paint( - painter, option.rect, index, toggled=data_item.get('subscribed'), hover=index == self.hover_index - ) - - return True - - class TagsMixin: edit_tags_icon = QIcon(get_image_path("edit_white.png")) edit_tags_icon_hover = QIcon(get_image_path("edit_orange.png")) @@ -530,13 +461,6 @@ def draw_rating_control(self, painter, option, index, data_item): # Draw empty cell as the background self.paint_empty_background(painter, option) - if 'type' in data_item and data_item['type'] != CHANNEL_TORRENT: - return True - if data_item['status'] == LEGACY_ENTRY: - return True - - self.rating_control.paint(painter, option.rect, index, votes=data_item['votes']) - return True @@ -545,12 +469,7 @@ def draw_category_label(self, painter, option, index, data_item): # Draw empty cell as the background self.paint_empty_background(painter, option) - if 'type' in data_item and data_item['type'] == CHANNEL_TORRENT: - if data_item['state'] == 'Personal': - category_txt = "\U0001F3E0" # 'home' emoji - else: - category_txt = "🌐" - elif 'type' in data_item and data_item['type'] == COLLECTION_NODE: + if 'type' in data_item and data_item['type'] == COLLECTION_NODE: category_txt = "\U0001F4C1" # 'folder' emoji else: # Precautions to safely draw wrong category descriptions @@ -617,15 +536,11 @@ class TriblerContentDelegate( RatingControlMixin, DownloadControlsMixin, HealthLabelMixin, - ChannelStateMixin, - SubscribedControlMixin, TagsMixin, ): def __init__(self, table_view, parent=None): # TODO: refactor this not to rely on inheritance order, but instead use interface method pattern TriblerButtonsDelegate.__init__(self, parent) - self.subscribe_control = SubscribeToggleControl(Column.SUBSCRIBED) - self.rating_control = RatingControl(Column.VOTES) self.download_button = DownloadIconButton() self.ondemand_container = [self.download_button] @@ -633,21 +548,16 @@ def __init__(self, table_view, parent=None): self.commit_control = CommitStatusControl(Column.STATUS) self.health_status_widget = HealthStatusControl(Column.HEALTH) self.controls = [ - self.subscribe_control, self.download_button, - self.commit_control, - self.rating_control, self.health_status_widget, ] self.column_drawing_actions = [ - (Column.SUBSCRIBED, self.draw_subscribed_control), (Column.NAME, self.draw_title_and_tags), (Column.VOTES, self.draw_rating_control), (Column.ACTIONS, self.draw_action_column), (Column.CATEGORY, self.draw_category_label), (Column.HEALTH, self.draw_health_column), (Column.STATUS, self.draw_commit_status_column), - (Column.STATE, self.draw_channel_state), ] self.table_view = table_view @@ -705,95 +615,6 @@ def paint(self, painter, option, _, draw_border=True): painter.restore() -class SubscribeToggleControl(QObject, CheckClickedMixin): - clicked = pyqtSignal(QModelIndex) - - def __init__(self, column_name, parent=None): - QObject.__init__(self, parent=parent) - self.column_name = column_name - self.last_index = QModelIndex() - - self._track_radius = 10 - self._thumb_radius = 8 - self._line_thickness = self._track_radius - self._thumb_radius - self._margin = max(0, self._thumb_radius - self._track_radius) - self._base_offset = max(self._thumb_radius, self._track_radius) - - self._width = 4 * self._track_radius + 2 * self._margin - self._height = 2 * self._track_radius + 2 * self._margin - - self._end_offset = {True: lambda: self._width - self._base_offset, False: lambda: self._base_offset} - - self._offset = self._base_offset - - self._thumb_color = {True: TRIBLER_PALETTE.highlightedText(), False: TRIBLER_PALETTE.light()} - self._track_color = {True: TRIBLER_PALETTE.highlight(), False: TRIBLER_PALETTE.dark()} - self._text_color = {True: TRIBLER_PALETTE.highlight().color(), False: TRIBLER_PALETTE.dark().color()} - self._thumb_text = {True: '✔', False: '✕'} - self._track_opacity = 0.8 - - def paint(self, painter, rect, index, toggled=False, hover=False): - data_item = index.model().data_items[index.row()] - complete = data_item.get('state') == CHANNEL_STATE.COMPLETE.value - - painter.save() - - x = int(rect.x() + (rect.width() - self._width) // 2) - y = int(rect.y() + (rect.height() - self._height) // 2) - - offset = self._end_offset[toggled]() - p = painter - - p.setRenderHint(QPainter.Antialiasing, True) - track_opacity = 1.0 if hover else self._track_opacity - thumb_opacity = 1.0 - text_opacity = 1.0 - track_brush = self._track_color[toggled] - thumb_brush = self._thumb_color[toggled] - text_color = self._text_color[toggled] - - p.setBrush(track_brush) - p.setPen(QPen(track_brush.color(), 2)) - if not complete and toggled: - p.setBrush(Qt.NoBrush) - p.setOpacity(track_opacity) - p.drawRoundedRect( - x, - y, - self._width - 2 * self._margin, - self._height - 2 * self._margin, - self._track_radius, - self._track_radius, - ) - p.setPen(Qt.NoPen) - - p.setBrush(thumb_brush) - p.setOpacity(thumb_opacity) - p.drawEllipse( - x + offset - self._thumb_radius, - y + self._base_offset - self._thumb_radius, - 2 * self._thumb_radius, - 2 * self._thumb_radius, - ) - p.setPen(text_color) - p.setOpacity(text_opacity) - font = p.font() - font.setPixelSize(int(1.5 * self._thumb_radius)) - p.setFont(font) - p.drawText( - QRectF( - x + offset - self._thumb_radius, - y + self._base_offset - self._thumb_radius, - 2 * self._thumb_radius, - 2 * self._thumb_radius, - ), - Qt.AlignCenter, - self._thumb_text[toggled], - ) - - painter.restore() - - class CommitStatusControl(QObject, CheckClickedMixin): # Column-level controls are stateless collections of methods for visualizing cell data and # triggering corresponding events. @@ -922,33 +743,3 @@ def __init__(self, column_name, parent=None): QObject.__init__(self, parent=parent) self.column_name = column_name self.last_index = QModelIndex() - - -class RatingControl(QObject, CheckClickedMixin): - """ - Controls for visualizing the votes and subscription information for channels. - """ - - rating_colors = { - "BACKGROUND": QColor("#444444"), - "FOREGROUND": QColor("#BBBBBB"), - # "SUBSCRIBED_HOVER": QColor("#FF5722"), - } - - clicked = pyqtSignal(QModelIndex) - - def __init__(self, column_name, parent=None): - QObject.__init__(self, parent=parent) - self.column_name = column_name - self.last_index = QModelIndex() - self.font = None - # For some reason, on MacOS default inter-character spacing for some symbols - # is too wide. We have to adjust it manually. - if DARWIN or WINDOWS: - self.font = QFont() - self.font.setLetterSpacing(QFont.PercentageSpacing, 60.0) - - def paint(self, painter, rect, _index, votes=0): - lpad = " " # we pad it to move it closer to the center - draw_text(painter, rect, lpad + format_votes(1.0), color=self.rating_colors["BACKGROUND"], font=self.font) - draw_text(painter, rect, lpad + format_votes(votes), color=self.rating_colors["FOREGROUND"], font=self.font) diff --git a/src/tribler/gui/widgets/tablecontentmodel.py b/src/tribler/gui/widgets/tablecontentmodel.py index b5175b827cb..cd057d7f7a8 100644 --- a/src/tribler/gui/widgets/tablecontentmodel.py +++ b/src/tribler/gui/widgets/tablecontentmodel.py @@ -10,13 +10,11 @@ from PyQt5.QtCore import QAbstractTableModel, QModelIndex, QRectF, QSize, QTimerEvent, Qt, pyqtSignal -from tribler.core.components.metadata_store.db.orm_bindings.channel_node import NEW -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT, \ - SNIPPET +from tribler.core.components.metadata_store.db.serialization import COLLECTION_NODE, REGULAR_TORRENT, SNIPPET from tribler.core.utilities.search_utils import item_rank -from tribler.core.utilities.simpledefs import CHANNELS_VIEW_UUID, CHANNEL_STATE +from tribler.core.utilities.simpledefs import CHANNEL_STATE from tribler.core.utilities.utilities import to_fts_query -from tribler.gui.defs import BITTORRENT_BIRTHDAY, COMMIT_STATUS_TODELETE, HEALTH_CHECKING +from tribler.gui.defs import BITTORRENT_BIRTHDAY, HEALTH_CHECKING from tribler.gui.network.request_manager import request_manager from tribler.gui.utilities import connect, format_size, format_votes, get_votes_rating_description, pretty_date, tr @@ -438,11 +436,9 @@ def __init__( self.edit_tags_rects: Dict[QModelIndex, QRectF] = {} self.download_popular_content_rects: Dict[QModelIndex, List[QRectF]] = {} - # Current channel attributes. This is intentionally NOT copied, so local changes - # can propagate to the origin, e.g. parent channel. - self.channel_info = channel_info or {"name": "My channels", "status": 123} + self.channel_info = channel_info - self.endpoint_url_override = endpoint_url + self.endpoint_url = endpoint_url # Load the initial batch of entries self.perform_initial_query() @@ -451,13 +447,6 @@ def __init__( def edit_enabled(self): return False - @property - def endpoint_url(self): - return self.endpoint_url_override or "channels/%s/%i" % ( - self.channel_info["public_key"], - self.channel_info["id"], - ) - def headerData(self, num, orientation, role=None): if orientation == Qt.Horizontal and role == Qt.DisplayRole: header_text = self.columns[num].header @@ -502,7 +491,7 @@ def item_txt(self, index, role, is_editing: bool = False): column_type == Column.SIZE and "torrents" not in self.columns and "torrents" in item - and item["type"] in (CHANNEL_TORRENT, COLLECTION_NODE, SNIPPET) + and item["type"] in (COLLECTION_NODE, SNIPPET) ): if item["type"] == SNIPPET: return "" @@ -646,12 +635,6 @@ def on_new_entry_received(self, response): self.on_query_results(response, remote=True) -class ChannelPreviewModel(ChannelContentModel): - def perform_query(self, **kwargs): - kwargs["remote"] = True - super().perform_query(**kwargs) - - class SearchResultsModel(ChannelContentModel): def __init__(self, original_query, **kwargs): self.original_query = original_query @@ -712,91 +695,5 @@ class PopularTorrentsModel(ChannelContentModel): columns_shown = (Column.CATEGORY, Column.NAME, Column.SIZE, Column.CREATED) def __init__(self, *args, **kwargs): - kwargs["endpoint_url"] = 'channels/popular_torrents' - super().__init__(*args, **kwargs) - - -class DiscoveredChannelsModel(ChannelContentModel): - columns_shown = (Column.SUBSCRIBED, Column.NAME, Column.STATE, Column.TORRENTS, Column.VOTES, Column.CREATED) - - @property - def default_sort_column(self): - return self.columns_shown.index(Column.VOTES) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - # Subscribe to new channels updates notified over the Events endpoint - self.remote_queries.add(CHANNELS_VIEW_UUID) - - -class PersonalChannelsModel(ChannelContentModel): - columns_shown = ( - Column.ACTIONS, - Column.CATEGORY, - Column.NAME, - Column.SIZE, - Column.HEALTH, - Column.CREATED, - Column.STATUS, - ) - - def __init__(self, *args, **kwargs): - kwargs["hide_xxx"] = kwargs.get("hide_xxx", False) - super().__init__(*args, **kwargs) - self.columns[self.column_position[Column.CATEGORY]].qt_flags |= Qt.ItemIsEditable - self.columns[self.column_position[Column.NAME]].qt_flags |= Qt.ItemIsEditable - - def delete_rows(self, rows): - patch_data = [] - delete_data = [] - for entry in [row.model().data_items[row.row()] for row in rows]: - if entry["status"] == NEW: - delete_data.append({"public_key": entry['public_key'], "id": entry['id']}) - else: - patch_data.append( - {"public_key": entry['public_key'], "id": entry['id'], "status": COMMIT_STATUS_TODELETE} - ) - - # We don't wait for the Core to report back and emit - # the info_changed signal speculativley to prevent the race condition between - # Python object deletion and PyQT one. Otherwise, if the users e.g. clicks the back - # button, by the moment the request callback triggers some actions on the model, - # QT could have already deleted the underlying model object, which will result in - # "wrapped C/C++ object has been deleted" error (see e.g. https://github.com/Tribler/tribler/issues/6083) - - if patch_data: - self.remove_items(patch_data) - request_manager.patch("metadata", data=patch_data) - - if delete_data: - self.remove_items(delete_data) - request_manager.delete("metadata", data=delete_data) - - def create_new_channel(self, channel_name=None): - public_key = self.channel_info.get("public_key", '') - channel_id = self.channel_info.get("id", 0) - - endpoint = self.endpoint_url_override or f"channels/{public_key}/{channel_id}" - postfix = "channels" if not channel_id else "collections" - request_manager.post(f'{endpoint}/{postfix}', self.on_create_query_results, - data=json.dumps({"name": channel_name}) if channel_name else None) - - def on_create_query_results(self, response, **kwargs): - # This is a hack to put the newly created object at the top of the table - kwargs["on_top"] = 1 - self.on_query_results(response, **kwargs) - if not response or self.qt_object_destroyed: - return False - self.info_changed.emit(response['results']) - - @property - def edit_enabled(self): - return self.channel_info.get("state", None) == "Personal" - - -class SimplifiedPersonalChannelsModel(PersonalChannelsModel): - columns_shown = (Column.ACTIONS, Column.CATEGORY, Column.NAME, Column.SIZE, Column.HEALTH, Column.CREATED) - - def __init__(self, *args, **kwargs): - kwargs["exclude_deleted"] = kwargs.get("exclude_deleted", True) + kwargs["endpoint_url"] = 'metadata/torrents/popular' super().__init__(*args, **kwargs) diff --git a/src/tribler/gui/widgets/triblertablecontrollers.py b/src/tribler/gui/widgets/triblertablecontrollers.py index 7b17d0b2799..e130036835e 100644 --- a/src/tribler/gui/widgets/triblertablecontrollers.py +++ b/src/tribler/gui/widgets/triblertablecontrollers.py @@ -10,8 +10,7 @@ from PyQt5.QtNetwork import QNetworkRequest from PyQt5.QtWidgets import QAction -from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT -from tribler.core.utilities.simpledefs import CHANNEL_STATE +from tribler.core.components.metadata_store.db.serialization import REGULAR_TORRENT from tribler.gui.defs import HEALTH_CHECKING, HEALTH_UNCHECKED from tribler.gui.network.request_manager import request_manager from tribler.gui.tribler_action_menu import TriblerActionMenu @@ -33,7 +32,6 @@ def __init__(self, table_view, *args, filter_input=None, **kwargs): self.table_view = table_view connect(self.table_view.verticalScrollBar().valueChanged, self._on_list_scroll) - connect(self.table_view.delegate.subscribe_control.clicked, self.table_view.on_subscribe_control_clicked) connect(self.table_view.delegate.download_button.clicked, self.table_view.start_download_from_index) connect(self.table_view.torrent_doubleclicked, self.table_view.start_download_from_dataitem) @@ -223,54 +221,6 @@ def _show_context_menu(self, pos): item_index.model().data_items[item_index.row()], lambda x: self.check_torrent_health(x, forced=True), ) - if num_selected == 1 and item_index.model().column_position.get(Column.SUBSCRIBED) is not None: - data_item = item_index.model().data_items[item_index.row()] - if data_item["type"] == CHANNEL_TORRENT and data_item["state"] != CHANNEL_STATE.PERSONAL.value: - self.add_menu_item( - menu, - tr("Unsubscribe channel") if data_item["subscribed"] else tr("Subscribe channel"), - item_index.model().index(item_index.row(), item_index.model().column_position[Column.SUBSCRIBED]), - self.table_view.delegate.subscribe_control.clicked.emit, - ) - - # Add menu separator for channel stuff - menu.addSeparator() - - entries = [self.model.data_items[index.row()] for index in self.table_view.selectionModel().selectedRows()] - - def on_add_to_channel(_): - def on_confirm_clicked(channel_id): - request_manager.post(f"channels/mychannel/{channel_id}/copy", - on_success=lambda _: self.table_view.window().tray_show_message( - tr("Channel update"), tr("Torrent(s) added to your channel") - ), - data=json.dumps(entries)) - - self.table_view.window().add_to_channel_dialog.show_dialog( - on_confirm_clicked, confirm_button_text=tr("Copy") - ) - - def on_move(_): - def on_confirm_clicked(channel_id): - changes_list = [ - {'public_key': entry['public_key'], 'id': entry['id'], 'origin_id': channel_id} for entry in entries - ] - self.model.remove_items(entries) - request_manager.patch("metadata", data=changes_list) - - self.table_view.window().add_to_channel_dialog.show_dialog( - on_confirm_clicked, confirm_button_text=tr("Move") - ) - - if not self.model.edit_enabled: - if self.selection_can_be_added_to_channel(): - self.add_menu_item(menu, tr(" Copy into personal channel"), item_index, on_add_to_channel) - else: - self.add_menu_item(menu, tr(" Move "), item_index, on_move) - self.add_menu_item(menu, tr(" Rename "), item_index, self._trigger_name_editor) - self.add_menu_item(menu, tr(" Change category "), item_index, self._trigger_category_editor) - menu.addSeparator() - self.add_menu_item(menu, tr(" Remove from channel"), item_index, self.table_view.on_delete_button_clicked) menu.exec_(QCursor.pos()) @@ -279,13 +229,6 @@ def add_menu_item(self, menu, name, item_index, callback): connect(action.triggered, lambda _: callback(item_index)) menu.addAction(action) - def selection_can_be_added_to_channel(self): - for row in self.table_view.selectionModel().selectedRows(): - data_item = row.model().data_items[row.row()] - if dict_item_is_any_of(data_item, 'type', [REGULAR_TORRENT, CHANNEL_TORRENT, COLLECTION_NODE]): - return True - return False - class PopularContentTableViewController( TableSelectionMixin, ContextMenuMixin, TableLoadingAnimationMixin, TriblerTableViewController