diff --git a/.coveragerc b/.coveragerc index aed1caaf7c1..5f7f3c422a5 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,4 +1,3 @@ # .coveragerc to control coverage.py [run] branch = True -omit = Tribler/dispersy/* diff --git a/.gitignore b/.gitignore index fc2c6d84d48..bad786de0c3 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ # Apple .DS_Store + +*.autosave +*.stats .*project local.properties *.bak @@ -41,6 +44,7 @@ htmlcov/ # Ignore nosetest output logs Tribler/Test/logs/ +Tribler/Test/Core/logs/ # Ignore trial temp dirs _trial_temp*/ _trial_temp*.lock diff --git a/.gitmodules b/.gitmodules index bac6c46120e..ce13b135588 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,3 @@ -[submodule "Tribler/dispersy"] - path = Tribler/dispersy - url = https://github.com/Tribler/dispersy.git [submodule "py-ipv8"] path = Tribler/pyipv8 url = https://github.com/Tribler/py-ipv8.git diff --git a/.pylintrc b/.pylintrc index 8886d7dbb65..519f097542d 100644 --- a/.pylintrc +++ b/.pylintrc @@ -9,7 +9,7 @@ # Add files or directories to the blacklist. They should be base names, not # paths. -ignore=.git,dispersy,libnacl,data +ignore=.git,libnacl,data # Pickle collected data for later comparisons. persistent=yes @@ -150,7 +150,7 @@ generated-members= # sudo apt install aspell-en # List of comma separated words that should not be checked. -spelling-ignore-words=Tribler,dispersy +spelling-ignore-words=Tribler # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= diff --git a/Tribler/Core/APIImplementation/IPv8EndpointAdapter.py b/Tribler/Core/APIImplementation/IPv8EndpointAdapter.py deleted file mode 100644 index bbc623620ee..00000000000 --- a/Tribler/Core/APIImplementation/IPv8EndpointAdapter.py +++ /dev/null @@ -1,65 +0,0 @@ -import socket -from time import time - -from Tribler.pyipv8.ipv8.messaging.interfaces.endpoint import Endpoint - - -class IPv8EndpointAdapter(Endpoint): - """ - Wrap a Dispersy MIMEndpoint as an IPv8 Endpoint - """ - - def __init__(self, mimep): - super(IPv8EndpointAdapter, self).__init__() - mimep.mim = self - self.endpoint = mimep - self._is_open = False - self._prefixes = [] - - def add_listener(self, listener): - super(IPv8EndpointAdapter, self).add_listener(listener) - if hasattr(listener, "_prefix") and listener.__class_.__name__ != "DiscoveryCommunity": - self._prefixes.append(listener._prefix) - - def close(self, timeout=0.0): - """ - Stop the Endpoint. Because we are wrapping a Dispersy endpoint, this does nothing. - Otherwise, Dispersy would error out. - - The proper way of closing the wrapped endpoint would be: - self.endpoint.close(timeout) - """ - pass - - @property - def _port(self): - return self.endpoint._port - - def assert_open(self): - assert self._is_open - - def is_open(self): - return True - - def open(self, dispersy=None): - self._is_open = self.endpoint.open(dispersy) - - def send(self, socket_address, packet): - try: - self.endpoint._socket.sendto(packet, socket_address) - except socket.error: - with self.endpoint._sendqueue_lock: - did_have_senqueue = bool(self.endpoint._sendqueue) - self.endpoint._sendqueue.append((time(), socket_address, packet)) - if not did_have_senqueue: - self.endpoint._process_sendqueue() - - def get_address(self): - return (self.endpoint._ip, self.endpoint._port) - - def data_came_in(self, packets): - for packet in packets: - self.notify_listeners(packet) - if packets: - _, data = packets[0] - return any([data.startswith(prefix) for prefix in self._prefixes]) diff --git a/Tribler/Core/APIImplementation/IPv8Module.py b/Tribler/Core/APIImplementation/IPv8Module.py deleted file mode 100644 index c9345d2715f..00000000000 --- a/Tribler/Core/APIImplementation/IPv8Module.py +++ /dev/null @@ -1,16 +0,0 @@ -from os import path - -from Tribler.Core.APIImplementation.IPv8EndpointAdapter import IPv8EndpointAdapter -from Tribler.pyipv8.ipv8_service import IPv8 -from Tribler.pyipv8.ipv8.configuration import get_default_configuration - - -class IPv8Module(IPv8): - - def __init__(self, mimendpoint, working_dir="."): - config = get_default_configuration() - config['overlays'] = [] - for key in config['keys']: - key['file'] = path.abspath(path.join(working_dir, key['file'])) - config['keys'] = [key for key in config['keys'] if path.isdir(path.dirname(key['file']))] - super(IPv8Module, self).__init__(config, IPv8EndpointAdapter(mimendpoint)) diff --git a/Tribler/Core/APIImplementation/LaunchManyCore.py b/Tribler/Core/APIImplementation/LaunchManyCore.py index cb3b9bf4e65..b8d3af5f289 100644 --- a/Tribler/Core/APIImplementation/LaunchManyCore.py +++ b/Tribler/Core/APIImplementation/LaunchManyCore.py @@ -15,23 +15,18 @@ from threading import Event, enumerate as enumerate_threads from traceback import print_exc -from pony.orm import db_session - -from six import text_type - from twisted.internet import reactor from twisted.internet.defer import Deferred, DeferredList, inlineCallbacks, succeed from twisted.internet.task import LoopingCall from twisted.internet.threads import deferToThread from twisted.python.threadable import isInIOThread -from Tribler.Core.CacheDB.sqlitecachedb import forceDBThread -from Tribler.Core.DownloadConfig import DefaultDownloadStartupConfig, DownloadStartupConfig -from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload, float2time +from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Modules.gigachannel_manager import GigaChannelManager from Tribler.Core.Modules.payout_manager import PayoutManager from Tribler.Core.Modules.resource_monitor import ResourceMonitor -from Tribler.Core.Modules.search_manager import SearchManager +from Tribler.Core.Modules.tracker_manager import TrackerManager from Tribler.Core.Modules.versioncheck_manager import VersionCheckManager from Tribler.Core.Modules.wallet.dummy_wallet import DummyWallet1, DummyWallet2 from Tribler.Core.Modules.wallet.tc_wallet import TrustchainWallet @@ -41,41 +36,20 @@ from Tribler.Core.Utilities.configparser import CallbackConfigParser from Tribler.Core.Utilities.install_dir import get_lib_path from Tribler.Core.Video.VideoServer import VideoServer -from Tribler.Core.exceptions import InvalidSignatureException -from Tribler.Core.simpledefs import (DLSTATUS_DOWNLOADING, DLSTATUS_SEEDING, DLSTATUS_STOPPED_ON_ERROR, NTFY_DISPERSY, - NTFY_ERROR, NTFY_FINISHED, NTFY_STARTED, NTFY_TORRENT, NTFY_TORRENTS, NTFY_TRIBLER, - NTFY_UPDATE, STATE_INITIALIZE_CHANNEL_MGR, STATE_LOADING_COMMUNITIES, - STATE_STARTING_DISPERSY, STATE_START_API_ENDPOINTS, STATE_START_CREDIT_MINING, - STATE_START_LIBTORRENT, STATE_START_REMOTE_TORRENT_HANDLER, - STATE_START_TORRENT_CHECKER, STATE_START_WATCH_FOLDER) +from Tribler.Core.simpledefs import (DLSTATUS_DOWNLOADING, DLSTATUS_SEEDING, DLSTATUS_STOPPED_ON_ERROR, NTFY_ERROR, + NTFY_FINISHED, NTFY_STARTED, NTFY_TORRENT, NTFY_TRIBLER, + STATE_START_API_ENDPOINTS, STATE_START_CREDIT_MINING, + STATE_START_LIBTORRENT, STATE_START_TORRENT_CHECKER, STATE_START_WATCH_FOLDER) from Tribler.pyipv8.ipv8.dht.provider import DHTCommunityProvider -from Tribler.pyipv8.ipv8.keyvault.private.m2crypto import M2CryptoSK from Tribler.pyipv8.ipv8.messaging.anonymization.community import TunnelSettings from Tribler.pyipv8.ipv8.peer import Peer from Tribler.pyipv8.ipv8.peerdiscovery.churn import RandomChurn from Tribler.pyipv8.ipv8.peerdiscovery.community import DiscoveryCommunity, PeriodicSimilarity from Tribler.pyipv8.ipv8.peerdiscovery.discovery import EdgeWalk, RandomWalk from Tribler.pyipv8.ipv8.taskmanager import TaskManager -from Tribler.pyipv8.ipv8.util import blockingCallFromThread from Tribler.pyipv8.ipv8_service import IPv8 -class DualStackDiscoveryCommunity(DiscoveryCommunity): - """ - This is a stopgap measure until Dispersy is removed. - The reason for this class is that Dispersy bypasses IPv8's load balancing. - By injecting peers into IPv8, Dispersy then causes a peer explosion. - This subclass can be removed once Dispersy is gone. - """ - - def on_introduction_response(self, source_address, data): - if self.max_peers >= 0 and len(self.get_peers()) > self.max_peers: - self.logger.info("Dropping introduction response from (%s, %d): too many peers!", - source_address[0], source_address[1]) - return - return super(DualStackDiscoveryCommunity, self).on_introduction_response(source_address, data) - - class TriblerLaunchMany(TaskManager): def __init__(self): @@ -84,7 +58,6 @@ def __init__(self): self.initComplete = False self.registered = False - self.dispersy = None self.ipv8 = None self.ipv8_start_time = 0 self.state_cb_count = 0 @@ -104,10 +77,6 @@ def __init__(self): self.shutdownstarttime = None # modules - self.torrent_store = None - self.metadata_store = None - self.rtorrent_handler = None - self.tftp_handler = None self.api_manager = None self.watch_folder = None self.version_check_manager = None @@ -120,8 +89,7 @@ def __init__(self): self.votecast_db = None self.channelcast_db = None - self.search_manager = None - self.channel_manager = None + self.gigachannel_manager = None self.video_server = None @@ -150,56 +118,13 @@ def register(self, session, session_lock): self.session = session self.session_lock = session_lock + self.tracker_manager = TrackerManager(self.session) + # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join(get_lib_path(), 'root_certs_mac.pem') - if self.session.config.get_torrent_store_enabled(): - from Tribler.Core.leveldbstore import LevelDbStore - self.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) - if not self.torrent_store.get_db(): - raise RuntimeError("Torrent store (leveldb) is None which should not normally happen") - - if self.session.config.get_metadata_enabled(): - from Tribler.Core.leveldbstore import LevelDbStore - self.metadata_store = LevelDbStore(self.session.config.get_metadata_store_dir()) - if not self.metadata_store.get_db(): - raise RuntimeError("Metadata store (leveldb) is None which should not normally happen") - - # torrent collecting: RemoteTorrentHandler - if self.session.config.get_torrent_collecting_enabled() and self.session.config.get_dispersy_enabled(): - from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler - self.rtorrent_handler = RemoteTorrentHandler(self.session) - - # TODO(emilon): move this to a megacache component or smth - if self.session.config.get_megacache_enabled(): - from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler, - MyPreferenceDBHandler, VoteCastDBHandler, - ChannelCastDBHandler) - from Tribler.Core.Category.Category import Category - - self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) - - self.category = Category() - - # create DBHandlers - self.peer_db = PeerDBHandler(self.session) - self.torrent_db = TorrentDBHandler(self.session) - self.mypref_db = MyPreferenceDBHandler(self.session) - self.votecast_db = VoteCastDBHandler(self.session) - self.channelcast_db = ChannelCastDBHandler(self.session) - - # initializes DBHandlers - self.peer_db.initialize() - self.torrent_db.initialize() - self.mypref_db.initialize() - self.votecast_db.initialize() - self.channelcast_db.initialize() - - from Tribler.Core.Modules.tracker_manager import TrackerManager - self.tracker_manager = TrackerManager(self.session) - if self.session.config.get_video_server_enabled(): self.video_server = VideoServer(self.session.config.get_video_server_port(), self.session) self.video_server.start() @@ -208,7 +133,7 @@ def register(self, session, session_lock): if self.session.config.get_ipv8_enabled(): from Tribler.pyipv8.ipv8.configuration import get_default_configuration ipv8_config = get_default_configuration() - ipv8_config['port'] = self.session.config.get_dispersy_port() + ipv8_config['port'] = self.session.config.get_ipv8_port() ipv8_config['address'] = self.session.config.get_ipv8_address() ipv8_config['overlays'] = [] ipv8_config['keys'] = [] # We load the keys ourselves @@ -223,33 +148,6 @@ def register(self, session, session_lock): self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", self.session. config.get_tunnel_community_socks5_listen_ports())) - # Dispersy - self.tftp_handler = None - if self.session.config.get_dispersy_enabled(): - from Tribler.dispersy.dispersy import Dispersy - from Tribler.dispersy.endpoint import MIMEndpoint - from Tribler.dispersy.endpoint import IPv8toDispersyAdapter - - # set communication endpoint - if self.session.config.get_ipv8_enabled(): - dispersy_endpoint = IPv8toDispersyAdapter(self.ipv8.endpoint) - else: - dispersy_endpoint = MIMEndpoint(self.session.config.get_dispersy_port()) - - working_directory = text_type(self.session.config.get_state_dir()) - self.dispersy = Dispersy(dispersy_endpoint, working_directory) - self.dispersy.statistics.enable_debug_statistics(False) - - # register TFTP service - from Tribler.Core.TFTP.handler import TftpHandler - self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'), - block_size=1024) - self.tftp_handler.initialize() - - # Torrent search - if self.session.config.get_torrent_search_enabled() or self.session.config.get_channel_search_enabled(): - self.search_manager = SearchManager(self.session) - self.search_manager.initialize() if not self.initComplete: self.init() @@ -262,24 +160,16 @@ def on_tribler_started(self, subject, changetype, objectID, *args): reactor.callFromThread(self.startup_deferred.callback, None) def load_ipv8_overlays(self): - # Discovery Community - with open(self.session.config.get_permid_keypair_filename(), 'r') as key_file: - content = key_file.read() - content = content[31:-30].replace('\n', '').decode("BASE64") - peer = Peer(M2CryptoSK(keystring=content)) - discovery_community = DualStackDiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network) - discovery_community.resolve_dns_bootstrap_addresses() - self.ipv8.overlays.append(discovery_community) - self.ipv8.strategies.append((RandomChurn(discovery_community), -1)) - self.ipv8.strategies.append((PeriodicSimilarity(discovery_community), -1)) - - if not self.session.config.get_dispersy_enabled(): - self.ipv8.strategies.append((RandomWalk(discovery_community), 20)) - if self.session.config.get_testnet(): peer = Peer(self.session.trustchain_testnet_keypair) else: peer = Peer(self.session.trustchain_keypair) + discovery_community = DiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network) + discovery_community.resolve_dns_bootstrap_addresses() + self.ipv8.overlays.append(discovery_community) + self.ipv8.strategies.append((RandomChurn(discovery_community), -1)) + self.ipv8.strategies.append((PeriodicSimilarity(discovery_community), -1)) + self.ipv8.strategies.append((RandomWalk(discovery_community), 20)) # TrustChain Community if self.session.config.get_trustchain_enabled(): @@ -313,7 +203,7 @@ def load_ipv8_overlays(self): random_slots = self.session.config.get_tunnel_community_random_slots() competing_slots = self.session.config.get_tunnel_community_competing_slots() - dht_provider = DHTCommunityProvider(self.dht_community, self.session.config.get_dispersy_port()) + dht_provider = DHTCommunityProvider(self.dht_community, self.session.config.get_ipv8_port()) settings = TunnelSettings() settings.min_circuits = 3 settings.max_circuits = 10 @@ -349,7 +239,7 @@ def load_ipv8_overlays(self): from Tribler.community.popularity.community import PopularityCommunity self.popularity_community = PopularityCommunity(peer, self.ipv8.endpoint, self.ipv8.network, - torrent_db=self.session.lm.torrent_db, session=self.session) + metadata_store=self.session.lm.mds, session=self.session) self.ipv8.overlays.append(self.popularity_community) @@ -363,7 +253,7 @@ def load_ipv8_overlays(self): from Tribler.community.gigachannel.sync_strategy import SyncChannels community_cls = GigaChannelTestnetCommunity if self.session.config.get_testnet() else GigaChannelCommunity - self.gigachannel_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network, self.session) + self.gigachannel_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network, self.mds) self.ipv8.overlays.append(self.gigachannel_community) @@ -375,73 +265,7 @@ def enable_ipv8_statistics(self): for overlay in self.ipv8.overlays: self.ipv8.endpoint.enable_community_statistics(overlay.get_prefix(), True) - def load_dispersy_communities(self): - self._logger.info("tribler: Preparing Dispersy communities...") - now_time = timemod.time() - default_kwargs = {'tribler_session': self.session} - - # Search Community - if self.session.config.get_torrent_search_enabled() and self.dispersy: - from Tribler.community.search.community import SearchCommunity - self.dispersy.define_auto_load(SearchCommunity, self.session.dispersy_member, load=True, - kargs=default_kwargs) - - # AllChannel Community - if self.session.config.get_channel_search_enabled() and self.dispersy: - from Tribler.community.allchannel.community import AllChannelCommunity - self.dispersy.define_auto_load(AllChannelCommunity, self.session.dispersy_member, load=True, - kargs=default_kwargs) - - # Channel Community - if self.session.config.get_channel_community_enabled() and self.dispersy: - from Tribler.community.channel.community import ChannelCommunity - self.dispersy.define_auto_load(ChannelCommunity, - self.session.dispersy_member, load=True, kargs=default_kwargs) - - # PreviewChannel Community - if self.session.config.get_preview_channel_community_enabled() and self.dispersy: - from Tribler.community.channel.preview import PreviewChannelCommunity - self.dispersy.define_auto_load(PreviewChannelCommunity, - self.session.dispersy_member, kargs=default_kwargs) - - self._logger.info("tribler: communities are ready in %.2f seconds", timemod.time() - now_time) - def init(self): - if self.dispersy: - from Tribler.dispersy.community import HardKilledCommunity - - self._logger.info("lmc: Starting Dispersy...") - - self.session.readable_status = STATE_STARTING_DISPERSY - now = timemod.time() - success = self.dispersy.start(self.session.autoload_discovery) - - diff = timemod.time() - now - if success: - self._logger.info("lmc: Dispersy started successfully in %.2f seconds [port: %d]", - diff, self.dispersy.wan_address[1]) - else: - self._logger.info("lmc: Dispersy failed to start in %.2f seconds", diff) - - self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) - - from Tribler.dispersy.crypto import M2CryptoSK - private_key = self.dispersy.crypto.key_to_bin( - M2CryptoSK(filename=self.session.config.get_permid_keypair_filename())) - self.session.dispersy_member = blockingCallFromThread(reactor, self.dispersy.get_member, - private_key=private_key) - - blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity, - self.session.dispersy_member, load=True) - - if self.session.config.get_megacache_enabled(): - self.dispersy.database.attach_commit_callback(self.session.sqlite_db.commit_now) - - # notify dispersy finished loading - self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None) - - self.session.readable_status = STATE_LOADING_COMMUNITIES - # Wallets if self.session.config.get_bitcoinlib_enabled(): try: @@ -454,6 +278,11 @@ def init(self): except ImportError: self._logger.error("bitcoinlib library cannot be found, Bitcoin wallet not available!") + if self.session.config.get_chant_enabled(): + channels_dir = os.path.join(self.session.config.get_chant_channels_dir()) + database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db') + self.mds = MetadataStore(database_path, channels_dir, self.session.trustchain_keypair) + if self.session.config.get_dummy_wallets_enabled(): # For debugging purposes, we create dummy wallets dummy_wallet1 = DummyWallet1() @@ -467,18 +296,9 @@ def init(self): self.load_ipv8_overlays() self.enable_ipv8_statistics() - if self.dispersy: - self.load_dispersy_communities() - tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports() self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", tunnel_community_ports)) - if self.session.config.get_channel_search_enabled() and self.session.config.get_dispersy_enabled(): - self.session.readable_status = STATE_INITIALIZE_CHANNEL_MGR - from Tribler.Core.Modules.channel.channel_manager import ChannelManager - self.channel_manager = ChannelManager(self.session) - self.channel_manager.initialize() - if self.session.config.get_libtorrent_enabled(): self.session.readable_status = STATE_START_LIBTORRENT from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr @@ -487,16 +307,16 @@ def init(self): for port, protocol in self.upnp_ports: self.ltmgr.add_upnp_mapping(port, protocol) + if self.session.config.get_chant_enabled(): + self.gigachannel_manager = GigaChannelManager(self.session) + self.gigachannel_manager.start() + # add task for tracker checking if self.session.config.get_torrent_checking_enabled(): self.session.readable_status = STATE_START_TORRENT_CHECKER self.torrent_checker = TorrentChecker(self.session) self.torrent_checker.initialize() - if self.rtorrent_handler and self.session.config.get_dispersy_enabled(): - self.session.readable_status = STATE_START_REMOTE_TORRENT_HANDLER - self.rtorrent_handler.initialize() - if self.api_manager: self.session.readable_status = STATE_START_API_ENDPOINTS self.api_manager.root_endpoint.start_endpoints() @@ -519,11 +339,6 @@ def init(self): self.version_check_manager = VersionCheckManager(self.session) self.version_check_manager.start() - if self.session.config.get_chant_enabled(): - channels_dir = os.path.join(self.session.config.get_chant_channels_dir()) - database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db') - self.mds = MetadataStore(database_path, channels_dir, self.session.trustchain_keypair) - self.session.set_download_states_callback(self.sesscb_states_callback) if self.session.config.get_ipv8_enabled() and self.session.config.get_trustchain_enabled(): @@ -531,72 +346,6 @@ def init(self): self.initComplete = True - def on_channel_download_finished(self, download, channel_id, finished_deferred=None): - if download.get_channel_download(): - channel_dirname = os.path.join(self.session.lm.mds.channels_dir, download.get_def().get_name()) - self.mds.process_channel_dir(channel_dirname, channel_id) - if finished_deferred: - finished_deferred.callback(download) - - @db_session - def update_channel(self, payload): - """ - We received some channel metadata, possibly over the network. - Validate the signature, update the local metadata store and start downloading this channel if needed. - :param payload: The channel metadata, in serialized form. - """ - if not payload.has_valid_signature(): - raise InvalidSignatureException("The signature of the channel metadata is invalid.") - - channel = self.mds.ChannelMetadata.get_channel_with_id(payload.public_key) - if channel: - if float2time(payload.timestamp) > channel.timestamp: - # Update the channel that is already there. - self._logger.info("Updating channel metadata %s ts %s->%s", str(channel.public_key).encode("hex"), - str(channel.timestamp), str(float2time(payload.timestamp))) - channel.set(**ChannelMetadataPayload.to_dict(payload)) - else: - # Add new channel object to DB - channel = self.mds.ChannelMetadata.from_payload(payload) - channel.subscribed = True - - if channel.version > channel.local_version: - self._logger.info("Downloading new channel version %s ver %i->%i", str(channel.public_key).encode("hex"), - channel.local_version, channel.version) - #TODO: handle the case where the local version is the same as the new one and is not seeded - return self.download_channel(channel) - - def download_channel(self, channel): - """ - Download a channel with a given infohash and title. - :param channel: The channel metadata ORM object. - """ - finished_deferred = Deferred() - - dcfg = DownloadStartupConfig() - dcfg.set_dest_dir(self.mds.channels_dir) - dcfg.set_channel_download(True) - tdef = TorrentDefNoMetainfo(infohash=str(channel.infohash), name=channel.title) - download = self.session.start_download_from_tdef(tdef, dcfg) - channel_id = channel.public_key - download.finished_callback = lambda dl: self.on_channel_download_finished(dl, channel_id, finished_deferred) - if download.get_state().get_status() == DLSTATUS_SEEDING and not download.finished_callback_already_called: - download.finished_callback_already_called = True - download.finished_callback(download) - return download, finished_deferred - - def updated_my_channel(self, new_torrent_path): - """ - Notify the core that we updated our channel. - :param new_torrent_path: path to the new torrent file - """ - # Start the new download - tdef = TorrentDef.load(new_torrent_path) - dcfg = DownloadStartupConfig() - dcfg.set_dest_dir(self.mds.channels_dir) - dcfg.set_channel_download(True) - self.add(tdef, dcfg) - def add(self, tdef, dscfg, pstate=None, setupDelay=0, hidden=False, share_mode=False, checkpoint_disabled=False): """ Called by any thread """ @@ -636,24 +385,6 @@ def add(self, tdef, dscfg, pstate=None, setupDelay=0, hidden=False, share_mode=share_mode, checkpoint_disabled=checkpoint_disabled) setup_deferred.addCallback(self.on_download_handle_created) - if d and not hidden and self.session.config.get_megacache_enabled(): - @forceDBThread - def write_my_pref(): - torrent_id = self.torrent_db.getTorrentID(infohash) - data = {'destination_path': d.get_dest_dir()} - self.mypref_db.addMyPreference(torrent_id, data) - - if isinstance(tdef, TorrentDefNoMetainfo): - self.torrent_db.addOrGetTorrentID(tdef.get_infohash()) - self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name_as_unicode()) - self.torrent_db._db.commit_now() - write_my_pref() - elif self.rtorrent_handler: - self.rtorrent_handler.save_torrent(tdef, write_my_pref) - else: - self.torrent_db.addExternalTorrent(tdef, extra_info={'status': 'good'}) - write_my_pref() - return d def on_download_handle_created(self, download): @@ -672,26 +403,17 @@ def remove(self, d, removecontent=False, removestate=True, hidden=False): if infohash in self.downloads: del self.downloads[infohash] - if not hidden: - self.remove_id(infohash) - return out or succeed(None) - def remove_id(self, infohash): - @forceDBThread - def do_db(): - torrent_id = self.torrent_db.getTorrentID(infohash) - if torrent_id: - self.mypref_db.deletePreference(torrent_id) - - if self.session.config.get_megacache_enabled(): - do_db() - def get_downloads(self): """ Called by any thread """ with self.session_lock: return self.downloads.values() # copy, is mutable + def get_channel_downloads(self): + with self.session_lock: + return [download for download in self.downloads.values() if download.get_channel_download()] + def get_download(self, infohash): """ Called by any thread """ with self.session_lock: @@ -752,21 +474,6 @@ def update_trackers(self, infohash, trackers): dl.set_def(new_def) dl.checkpoint() - if isinstance(old_def, TorrentDefNoMetainfo): - @forceDBThread - def update_trackers_db(infohash, new_trackers): - torrent_id = self.torrent_db.getTorrentID(infohash) - if torrent_id is not None: - self.torrent_db.addTorrentTrackerMappingInBatch(torrent_id, new_trackers) - self.session.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) - - if self.session.config.get_megacache_enabled(): - update_trackers_db(infohash, new_trackers) - - elif not isinstance(old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: - # Update collected torrents - self.rtorrent_handler.save_torrent(new_def) - # # State retrieval # @@ -857,9 +564,6 @@ def sesscb_states_callback(self, states_list): if self.credit_mining_manager: self.credit_mining_manager.monitor_downloads(states_list) - if self.gigachannel_community: - self.gigachannel_community.update_states(states_list) - return [] # @@ -894,41 +598,19 @@ def load_download_pstate_noexc(self, infohash): def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None - try: - pstate = self.load_download_pstate(filename) - - # SWIFTPROC - metainfo = pstate.get('state', 'metainfo') - if 'infohash' in metainfo: - tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) - else: - tdef = TorrentDef.load_from_dict(metainfo) - - if pstate.has_option('download_defaults', 'saveas') and \ - isinstance(pstate.get('download_defaults', 'saveas'), tuple): - pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) - - dscfg = DownloadStartupConfig(pstate) - - except: - # pstate is invalid or non-existing - _, file = os.path.split(filename) + pstate = self.load_download_pstate(filename) - infohash = binascii.unhexlify(file[:-6]) + metainfo = pstate.get('state', 'metainfo') + if 'infohash' in metainfo: + tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) + else: + tdef = TorrentDef.load_from_dict(metainfo) - torrent_data = self.torrent_store.get(infohash) - if torrent_data: - try: - tdef = TorrentDef.load_from_memory(torrent_data) - defaultDLConfig = DefaultDownloadStartupConfig.getInstance() - dscfg = defaultDLConfig.copy() + if pstate.has_option('download_defaults', 'saveas') and \ + isinstance(pstate.get('download_defaults', 'saveas'), tuple): + pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) - if self.mypref_db is not None: - dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) - if dest_dir and os.path.isdir(dest_dir): - dscfg.set_dest_dir(dest_dir) - except ValueError: - self._logger.warning("tlm: torrent data invalid") + dscfg = DownloadStartupConfig(pstate) if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None @@ -992,7 +674,8 @@ def do_remove(): self._logger.exception("Could not remove state") else: self._logger.warning("remove pstate: download is back, restarted? Canceling removal! %s", - repr(infohash)) + repr(infohash)) + reactor.callFromThread(do_remove) @inlineCallbacks @@ -1018,20 +701,10 @@ def early_shutdown(self): yield self.torrent_checker.shutdown() self.torrent_checker = None - if self.channel_manager: - self.session.notify_shutdown_state("Shutting down Channel Manager...") - yield self.channel_manager.shutdown() - self.channel_manager = None - - if self.search_manager: - self.session.notify_shutdown_state("Shutting down Search Manager...") - yield self.search_manager.shutdown() - self.search_manager = None - - if self.rtorrent_handler: - self.session.notify_shutdown_state("Shutting down Remote Torrent Handler...") - yield self.rtorrent_handler.shutdown() - self.rtorrent_handler = None + if self.gigachannel_manager: + self.session.notify_shutdown_state("Shutting down Gigachannel Manager...") + yield self.gigachannel_manager.shutdown() + self.gigachannel_manager = None if self.video_server: self.session.notify_shutdown_state("Shutting down Video Server...") @@ -1050,11 +723,6 @@ def early_shutdown(self): self.tracker_manager = None - if self.tftp_handler is not None: - self.session.notify_shutdown_state("Shutting down TFTP Handler...") - yield self.tftp_handler.shutdown() - self.tftp_handler = None - if self.tunnel_community and self.trustchain_community: # We unload these overlays manually since the TrustChain has to be unloaded after the tunnel overlay. tunnel_community = self.tunnel_community @@ -1066,31 +734,10 @@ def early_shutdown(self): self.session.notify_shutdown_state("Shutting down TrustChain Community...") yield self.ipv8.unload_overlay(trustchain_community) - if self.dispersy: - self._logger.info("lmc: Shutting down Dispersy...") - self.session.notify_shutdown_state("Shutting down Dispersy...") - now = timemod.time() - try: - success = yield self.dispersy.stop() - except: - print_exc() - success = False - - diff = timemod.time() - now - if success: - self._logger.info("lmc: Dispersy successfully shutdown in %.2f seconds", diff) - else: - self._logger.info("lmc: Dispersy failed to shutdown in %.2f seconds", diff) - if self.ipv8: self.session.notify_shutdown_state("Shutting down IPv8...") yield self.ipv8.stop(stop_reactor=False) - if self.metadata_store is not None: - self.session.notify_shutdown_state("Shutting down Metadata Store...") - yield self.metadata_store.close() - self.metadata_store = None - if self.channelcast_db is not None: self.session.notify_shutdown_state("Shutting down ChannelCast DB...") yield self.channelcast_db.close() @@ -1116,11 +763,6 @@ def early_shutdown(self): yield self.peer_db.close() self.peer_db = None - if self.torrent_store is not None: - self.session.notify_shutdown_state("Shutting down Torrent Store...") - yield self.torrent_store.close() - self.torrent_store = None - if self.watch_folder is not None: self.session.notify_shutdown_state("Shutting down Watch Folder...") yield self.watch_folder.stop() diff --git a/Tribler/Core/CacheDB/SqliteCacheDBHandler.py b/Tribler/Core/CacheDB/SqliteCacheDBHandler.py deleted file mode 100644 index 0a603d0f6ee..00000000000 --- a/Tribler/Core/CacheDB/SqliteCacheDBHandler.py +++ /dev/null @@ -1,2587 +0,0 @@ -""" -SqlitecacheDBHanler. - -Author(s): Jie Yang -""" -import logging -import math -import os -import threading -from collections import OrderedDict, defaultdict -from copy import deepcopy -from itertools import chain -from libtorrent import bencode -from pprint import pformat -from struct import unpack_from -from time import time -from traceback import print_exc - -from six import text_type -from twisted.internet.task import LoopingCall - -from Tribler.Core.CacheDB.sqlitecachedb import bin2str, str2bin -from Tribler.Core.TorrentDef import TorrentDef -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Utilities.search_utils import split_into_keywords, filter_keywords -from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url -from Tribler.Core.Utilities.unicode import dunno2unicode -from Tribler.Core.simpledefs import (INFOHASH_LENGTH, NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE, NTFY_CREATE, - NTFY_MODIFIED, NTFY_TRACKERINFO, NTFY_MYPREFERENCES, NTFY_VOTECAST, NTFY_TORRENTS, - NTFY_CHANNELCAST, NTFY_COMMENTS, NTFY_PLAYLISTS, NTFY_MODIFICATIONS, - NTFY_MODERATIONS, NTFY_MARKINGS, NTFY_STATE, - SIGNAL_CHANNEL_COMMUNITY, SIGNAL_ON_TORRENT_UPDATED) -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - -VOTECAST_FLUSH_DB_INTERVAL = 15 - -DEFAULT_ID_CACHE_SIZE = 1024 * 5 - - -class LimitedOrderedDict(OrderedDict): - - def __init__(self, limit, *args, **kargs): - super(LimitedOrderedDict, self).__init__(*args, **kargs) - self._limit = limit - - def __setitem__(self, *args, **kargs): - super(LimitedOrderedDict, self).__setitem__(*args, **kargs) - if len(self) > self._limit: - self.popitem(last=False) - - -class BasicDBHandler(TaskManager): - - def __init__(self, session, table_name): - super(BasicDBHandler, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - - self.session = session - self._db = self.session.sqlite_db - self.table_name = table_name - self.notifier = session.notifier - - def initialize(self, *args, **kwargs): - """ - Initializes this DBHandler. - """ - pass - - def close(self): - self.shutdown_task_manager() - - def size(self): - return self._db.size(self.table_name) - - def getOne(self, value_name, where=None, conj=u"AND", **kw): - return self._db.getOne(self.table_name, value_name, where=where, conj=conj, **kw) - - def getAll(self, value_name, where=None, group_by=None, having=None, order_by=None, limit=None, offset=None, conj=u"AND", **kw): - return self._db.getAll(self.table_name, value_name, where=where, group_by=group_by, having=having, order_by=order_by, limit=limit, offset=offset, conj=conj, **kw) - - -class PeerDBHandler(BasicDBHandler): - - def __init__(self, session): - super(PeerDBHandler, self).__init__(session, u"Peer") - - self.permid_id = LimitedOrderedDict(DEFAULT_ID_CACHE_SIZE) - - def getPeerID(self, permid): - return self.getPeerIDS([permid, ])[0] - - def getPeerIDS(self, permids): - to_select = [] - - for permid in permids: - assert isinstance(permid, str), permid - - if permid not in self.permid_id: - to_select.append(bin2str(permid)) - - if len(to_select) > 0: - parameters = u", ".join(u'?' * len(to_select)) - sql_get_peer_ids = u"SELECT peer_id, permid FROM Peer WHERE permid IN (%s)" % parameters - peerids = self._db.fetchall(sql_get_peer_ids, to_select) - for peer_id, permid in peerids: - self.permid_id[str2bin(permid)] = peer_id - - to_return = [] - for permid in permids: - if permid in self.permid_id: - to_return.append(self.permid_id[permid]) - else: - to_return.append(None) - return to_return - - def addOrGetPeerID(self, permid): - peer_id = self.getPeerID(permid) - if peer_id is None: - self.addPeer(permid, {}) - peer_id = self.getPeerID(permid) - - return peer_id - - def getPeer(self, permid, keys=None): - if keys is not None: - res = self.getOne(keys, permid=bin2str(permid)) - return res - else: - # return a dictionary - # make it compatible for calls to old bsddb interface - value_name = (u'peer_id', u'permid', u'name') - - item = self.getOne(value_name, permid=bin2str(permid)) - if not item: - return None - peer = dict(zip(value_name, item)) - peer['permid'] = str2bin(peer['permid']) - return peer - - def getPeerById(self, peer_id, keys=None): - if keys is not None: - res = self.getOne(keys, peer_id=peer_id) - return res - else: - # return a dictionary - # make it compatible for calls to old bsddb interface - value_name = (u'peer_id', u'permid', u'name') - - item = self.getOne(value_name, peer_id=peer_id) - if not item: - return None - peer = dict(zip(value_name, item)) - peer['permid'] = str2bin(peer['permid']) - return peer - - def addPeer(self, permid, value): - # add or update a peer - # ARNO: AAARGGH a method that silently changes the passed value param!!! - # Jie: deepcopy(value)? - - _permid = None - if 'permid' in value: - _permid = value.pop('permid') - - peer_id = self.getPeerID(permid) - if 'name' in value: - value['name'] = dunno2unicode(value['name']) - if peer_id is not None: - where = u'peer_id == %d' % peer_id - self._db.update('Peer', where, **value) - else: - self._db.insert_or_ignore('Peer', permid=bin2str(permid), **value) - - if _permid is not None: - value['permid'] = permid - - def hasPeer(self, permid, check_db=False): - if not check_db: - return bool(self.getPeerID(permid)) - else: - permid_str = bin2str(permid) - sql_get_peer_id = u"SELECT peer_id FROM Peer WHERE permid == ?" - peer_id = self._db.fetchone(sql_get_peer_id, (permid_str,)) - if peer_id is None: - return False - else: - return True - - def deletePeer(self, permid=None, peer_id=None): - # don't delete friend of superpeers, except that force is True - if peer_id is None: - peer_id = self.getPeerID(permid) - if peer_id is None: - return - - self._db.delete(u"Peer", peer_id=peer_id) - deleted = not self.hasPeer(permid, check_db=True) - if deleted and permid in self.permid_id: - self.permid_id.pop(permid) - - -class TorrentDBHandler(BasicDBHandler): - - def __init__(self, session): - super(TorrentDBHandler, self).__init__(session, u"Torrent") - - self.torrent_dir = None - - self.keys = ['torrent_id', 'name', 'length', 'creation_date', 'num_files', - 'insert_time', 'secret', 'relevance', 'category', 'status', - 'num_seeders', 'num_leechers', 'comment', 'last_tracker_check', 'is_collected'] - self.existed_torrents = set() - - self.value_name = ['C.torrent_id', 'category', 'status', 'name', 'creation_date', 'num_files', - 'num_leechers', 'num_seeders', 'length', 'secret', 'insert_time', - 'relevance', 'infohash', 'last_tracker_check'] - - self.value_name_for_channel = ['C.torrent_id', 'infohash', 'name', 'length', - 'creation_date', 'num_files', 'insert_time', 'secret', - 'relevance', 'category', 'status', - 'num_seeders', 'num_leechers', 'comment'] - - self.category = None - self.mypref_db = self.votecast_db = self.channelcast_db = self._rtorrent_handler = None - - self.infohash_id = LimitedOrderedDict(DEFAULT_ID_CACHE_SIZE) - - # We are saving the latest match info object we got so we can assign a relevance score - # to incoming remote torrents without doing a full text search. - self.latest_matchinfo_torrent = None - - def initialize(self, *args, **kwargs): - super(TorrentDBHandler, self).initialize(*args, **kwargs) - self.category = self.session.lm.category - self.mypref_db = self.session.open_dbhandler(NTFY_MYPREFERENCES) - self.votecast_db = self.session.open_dbhandler(NTFY_VOTECAST) - self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) - self._rtorrent_handler = self.session.lm.rtorrent_handler - - def close(self): - super(TorrentDBHandler, self).close() - self.category = None - self.mypref_db = None - self.votecast_db = None - self.channelcast_db = None - self._rtorrent_handler = None - - def getTorrentID(self, infohash): - return self.getTorrentIDS([infohash, ]).get(infohash) - - def getTorrentIDS(self, infohashes): - unique_infohashes = set(infohashes) - - to_return = {} - - to_select = [] - for infohash in unique_infohashes: - assert isinstance(infohash, str), "INFOHASH has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len(infohash) - - if infohash in self.infohash_id: - to_return[infohash] = self.infohash_id[infohash] - else: - to_select.append(bin2str(infohash)) - - parameters = '?,' * len(to_select) - parameters = parameters[:-1] - sql_stmt = u"SELECT torrent_id, infohash FROM Torrent WHERE infohash IN (%s)" % parameters - torrents = self._db.fetchall(sql_stmt, to_select) - for torrent_id, infohash in torrents: - self.infohash_id[str2bin(infohash)] = torrent_id - - for infohash in unique_infohashes: - if infohash not in to_return: - to_return[infohash] = self.infohash_id.get(infohash) - - if __debug__ and len(to_return) != len(unique_infohashes): - self._logger.error("to_return doesn't match infohashes:") - self._logger.error("to_return:") - self._logger.error(pformat(to_return)) - self._logger.error("infohashes:") - self._logger.error(pformat([bin2str(infohash) for infohash in unique_infohashes])) - assert len(to_return) == len(unique_infohashes), (len(to_return), len(unique_infohashes)) - - return to_return - - def getTorrentFiles(self, torrent_id): - return self._db.fetchall("SELECT path, length FROM TorrentFiles WHERE torrent_id = ?", (torrent_id,)) - - def getInfohash(self, torrent_id): - sql_get_infohash = "SELECT infohash FROM Torrent WHERE torrent_id==?" - ret = self._db.fetchone(sql_get_infohash, (torrent_id,)) - if ret: - ret = str2bin(ret) - return ret - - def hasTorrent(self, infohash): - assert isinstance(infohash, str), "INFOHASH has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len(infohash) - if infohash in self.existed_torrents: # to do: not thread safe - return True - infohash_str = bin2str(infohash) - existed = self._db.getOne('CollectedTorrent', 'torrent_id', infohash=infohash_str) - if existed is None: - return False - else: - self.existed_torrents.add(infohash) - return True - - def addExternalTorrent(self, torrentdef, extra_info={}): - assert isinstance(torrentdef, TorrentDef), "TORRENTDEF has invalid type: %s" % type(torrentdef) - assert torrentdef.is_finalized(), "TORRENTDEF is not finalized" - infohash = torrentdef.get_infohash() - if not self.hasTorrent(infohash): - torrent_id = self._addTorrentToDB(torrentdef, extra_info) - files = sorted(torrentdef.get_files_with_length(), key=lambda x: x[0]) - insert_files = [(torrent_id, unicode(path), length) for path, length in files] - sql_insert_files = "INSERT OR IGNORE INTO TorrentFiles (torrent_id, path, length) VALUES (?,?,?)" - self._db.executemany(sql_insert_files, insert_files) - self.notifier.notify(NTFY_TORRENTS, NTFY_INSERT, infohash) - - def addExternalTorrentNoDef(self, infohash, name, files, trackers, timestamp, extra_info={}): - if self.hasTorrent(infohash): - return - metainfo = {'info': {}, 'encoding': 'utf_8'} - metainfo['info']['name'] = name.encode('utf_8') - metainfo['info']['piece length'] = -1 - metainfo['info']['pieces'] = '' - - if len(files) > 1: - files_as_dict = [] - for filename, file_length in files: - filename = filename.encode('utf_8') - files_as_dict.append({'path': [filename], 'length': file_length}) - metainfo['info']['files'] = files_as_dict - - elif len(files) == 1: - metainfo['info']['length'] = files[0][1] - else: - return - - if len(trackers) > 0: - metainfo['announce'] = trackers[0] - metainfo['announce-list'] = [list(trackers)] - else: - metainfo['nodes'] = [] - - metainfo['creation date'] = timestamp - - try: - torrentdef = TorrentDef.load_from_dict(metainfo) - torrentdef.infohash = infohash - - torrent_id = self._addTorrentToDB(torrentdef, extra_info) - if self._rtorrent_handler: - self._rtorrent_handler.notify_possible_torrent_infohash(infohash) - - insert_files = [(torrent_id, unicode(path), length) for path, length in files] - sql_insert_files = "INSERT OR IGNORE INTO TorrentFiles (torrent_id, path, length) VALUES (?,?,?)" - self._db.executemany(sql_insert_files, insert_files) - except: - self._logger.error("Could not create a TorrentDef instance %r %r %r %r %r %r", - infohash, timestamp, name, files, trackers, extra_info) - - def addOrGetTorrentID(self, infohash): - assert isinstance(infohash, str), "INFOHASH has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len(infohash) - - torrent_id = self.getTorrentID(infohash) - if torrent_id is None: - self._db.insert('Torrent', infohash=bin2str(infohash), status=u'unknown') - torrent_id = self.getTorrentID(infohash) - return torrent_id - - def addOrGetTorrentIDSReturn(self, infohashes): - to_be_inserted = set() - torrent_id_results = self.getTorrentIDS(infohashes) - for infohash, torrent_id in torrent_id_results.iteritems(): - if torrent_id is None: - to_be_inserted.add(infohash) - - sql = "INSERT INTO Torrent (infohash, status) VALUES (?, ?)" - self._db.executemany(sql, [(bin2str(infohash), u'unknown') for infohash in to_be_inserted]) - - torrent_id_results = self.getTorrentIDS(infohashes) - torrent_ids = [] - for infohash in infohashes: - torrent_ids.append(torrent_id_results[infohash]) - assert all(torrent_id for torrent_id in torrent_ids), torrent_ids - return torrent_ids, to_be_inserted - - def _get_database_dict(self, torrentdef, extra_info={}): - assert isinstance(torrentdef, TorrentDef), "TORRENTDEF has invalid type: %s" % type(torrentdef) - assert torrentdef.is_finalized(), "TORRENTDEF is not finalized" - - dict = {"infohash": bin2str(torrentdef.get_infohash()), - "name": torrentdef.get_name_as_unicode(), - "length": torrentdef.get_length(), - "creation_date": torrentdef.get_creation_date(), - "num_files": len(torrentdef.get_files()), - "insert_time": long(time()), - "secret": 1 if torrentdef.is_private() else 0, - "relevance": 0.0, - "category": self.category.calculateCategory(torrentdef.metainfo, torrentdef.get_name_as_unicode()), - "status": extra_info.get("status", "unknown"), - "comment": torrentdef.get_comment_as_unicode(), - "is_collected": extra_info.get('is_collected', 0) - } - - if extra_info.get("seeder", -1) != -1: - dict["num_seeders"] = extra_info["seeder"] - if extra_info.get("leecher", -1) != -1: - dict["num_leechers"] = extra_info["leecher"] - - return dict - - def _addTorrentToDB(self, torrentdef, extra_info): - assert isinstance(torrentdef, TorrentDef), "TORRENTDEF has invalid type: %s" % type(torrentdef) - assert torrentdef.is_finalized(), "TORRENTDEF is not finalized" - - infohash = torrentdef.get_infohash() - swarmname = torrentdef.get_name_as_unicode() - database_dict = self._get_database_dict(torrentdef, extra_info) - - # see if there is already a torrent in the database with this infohash - torrent_id = self.getTorrentID(infohash) - if torrent_id is None: # not in database - self._db.insert("Torrent", **database_dict) - torrent_id = self.getTorrentID(infohash) - - else: # infohash in db - del database_dict["infohash"] # no need for infohash, its already stored - where = "torrent_id = %d" % torrent_id - self._db.update('Torrent', where=where, **database_dict) - - if not torrentdef.is_multifile_torrent(): - swarmname, _ = os.path.splitext(swarmname) - self._indexTorrent(torrent_id, swarmname, torrentdef.get_files()) - - self._addTorrentTracker(torrent_id, torrentdef, extra_info) - return torrent_id - - def _indexTorrent(self, torrent_id, swarmname, files): - # Niels: new method for indexing, replaces invertedindex - # Making sure that swarmname does not include extension for single file torrents - swarm_keywords = " ".join(split_into_keywords(swarmname)) - - filedict = {} - fileextensions = set() - for filename in files: - filename, extension = os.path.splitext(filename) - for keyword in split_into_keywords(filename, to_filter_stopwords=True): - filedict[keyword] = filedict.get(keyword, 0) + 1 - - fileextensions.add(extension[1:]) - - filenames = filedict.keys() - if len(filenames) > 1000: - def popSort(a, b): - return filedict[a] - filedict[b] - filenames.sort(cmp=popSort, reverse=True) - filenames = filenames[:1000] - - values = (torrent_id, swarm_keywords, " ".join(filenames), " ".join(fileextensions)) - try: - # INSERT OR REPLACE not working for fts3 table - self._db.execute_write(u"DELETE FROM FullTextIndex WHERE rowid = ?", (torrent_id,)) - self._db.execute_write( - u"INSERT INTO FullTextIndex (rowid, swarmname, filenames, fileextensions) VALUES(?,?,?,?)", values) - except: - # this will fail if the fts3 module cannot be found - print_exc() - - # ------------------------------------------------------------ - # Adds the trackers of a given torrent into the database. - # ------------------------------------------------------------ - def _addTorrentTracker(self, torrent_id, torrentdef, extra_info={}): - # Set add_all to True if you want to put all multi-trackers into db. - # In the current version (4.2) only the main tracker is used. - - announce = torrentdef.get_tracker() - announce_list = torrentdef.get_tracker_hierarchy() - - # check if to use DHT - new_tracker_set = set() - if torrentdef.is_private(): - new_tracker_set.add(u'no-DHT') - else: - new_tracker_set.add(u'DHT') - - # get rid of junk trackers - # prepare the tracker list to add - if announce: - tracker_url = get_uniformed_tracker_url(announce) - if tracker_url: - new_tracker_set.add(tracker_url) - if announce_list: - for tier in announce_list: - for tracker in tier: - tracker_url = get_uniformed_tracker_url(tracker) - if tracker_url: - new_tracker_set.add(tracker_url) - - # add trackers in batch - self.addTorrentTrackerMappingInBatch(torrent_id, list(new_tracker_set)) - - def updateTorrent(self, infohash, notify=True, **kw): # watch the schema of database - if 'seeder' in kw: - kw['num_seeders'] = kw.pop('seeder') - if 'leecher' in kw: - kw['num_leechers'] = kw.pop('leecher') - - for key in kw.keys(): - if key not in self.keys: - kw.pop(key) - - if len(kw) > 0: - infohash_str = bin2str(infohash) - where = "infohash='%s'" % infohash_str - self._db.update(self.table_name, where, **kw) - - if notify: - self.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) - - def update_torrent_with_metainfo(self, infohash, metainfo): - """ Updates name, length and num files from metainfo if record does not exist in the database. """ - torrent_id = self.addOrGetTorrentID(infohash) - name = self.getOne('name', torrent_id=torrent_id) - if not name: - num_files, length = 0, 0 - if 'info' in metainfo: - info = metainfo['info'] - name = u''.join([unichr(ord(c)) for c in info["name"]]) if "name" in info else "" - if 'files' in info: - num_files = len(info['files']) - for piece in info['files']: - length += piece['length'] - - if name and num_files and length: - self.updateTorrent(infohash, notify=False, name=name, num_files=num_files, length=length) - - def on_torrent_collect_response(self, infohashes): - infohash_list = [(bin2str(infohash)) for infohash in infohashes] - - i_parameters = u"?," * len(infohash_list) - i_parameters = i_parameters[:-1] - - sql = u"SELECT torrent_id, infohash FROM Torrent WHERE infohash in (%s)" % i_parameters - results = self._db.fetchall(sql, infohash_list) - - info_dict = {} - for torrent_id, infohash in results: - if infohash: - info_dict[infohash] = torrent_id - - to_be_inserted = [] - for infohash in infohash_list: - if infohash in info_dict: - continue - to_be_inserted.append((infohash,)) - - if len(to_be_inserted) > 0: - sql = u"INSERT OR IGNORE INTO Torrent (infohash) VALUES (?)" - self._db.executemany(sql, to_be_inserted) - - def on_search_response(self, torrents): - status = u'unknown' - - torrents = [(bin2str(torrent[0]), torrent[1], torrent[2], torrent[3], torrent[4][0], - torrent[5]) for torrent in torrents] - infohash = [(torrent[0],) for torrent in torrents] - - sql = u"SELECT torrent_id, infohash, is_collected, name FROM Torrent WHERE infohash == ?" - results = self._db.executemany(sql, infohash) or [] - - infohash_tid = {} - - tid_collected = set() - tid_name = {} - for torrent_id, infohash, is_collected, name in results: - infohash = str(infohash) - - if infohash: - infohash_tid[infohash] = torrent_id - if is_collected: - tid_collected.add(torrent_id) - tid_name[torrent_id] = name - - insert = [] - update = [] - update_infohash = [] - to_be_indexed = [] - for infohash, swarmname, length, nrfiles, category, creation_date in torrents: - tid = infohash_tid.get(infohash, None) - - if tid: # we know this torrent - if tid not in tid_collected and swarmname != tid_name.get(tid, ''): # if not collected and name not equal then do fullupdate - update.append((swarmname, length, nrfiles, category, creation_date, infohash, status, tid)) - to_be_indexed.append((tid, swarmname)) - - elif infohash and infohash not in infohash_tid: - update_infohash.append((infohash, tid)) - else: - insert.append((swarmname, length, nrfiles, category, creation_date, infohash, status)) - - if len(update) > 0: - sql = u"UPDATE Torrent SET name = ?, length = ?, num_files = ?, category = ?, creation_date = ?," \ - u" infohash = ?, status = ? WHERE torrent_id = ?" - self._db.executemany(sql, update) - - if len(update_infohash) > 0: - sql = u"UPDATE Torrent SET infohash = ? WHERE torrent_id = ?" - self._db.executemany(sql, update_infohash) - - if len(insert) > 0: - sql = u"INSERT INTO Torrent (name, length, num_files, category, creation_date, infohash," \ - u" status) VALUES (?, ?, ?, ?, ?, ?, ?)" - try: - self._db.executemany(sql, insert) - - were_inserted = [(inserted[5],) for inserted in insert] - sql = u"SELECT torrent_id, name FROM Torrent WHERE infohash == ?" - to_be_indexed = to_be_indexed + list(self._db.executemany(sql, were_inserted)) - except: - print_exc() - self._logger.error(u"infohashes: %s", insert) - - for torrent_id, swarmname in to_be_indexed: - self._indexTorrent(torrent_id, swarmname, []) - - def getTorrentCheckRetries(self, torrent_id): - sql = u"SELECT tracker_check_retries FROM Torrent WHERE torrent_id = ?" - result = self._db.fetchone(sql, (torrent_id,)) - return result - - def updateTorrentCheckResult(self, torrent_id, infohash, seeders, leechers, last_check, next_check, status, - retries): - sql = u"UPDATE Torrent SET num_seeders = ?, num_leechers = ?, last_tracker_check = ?, next_tracker_check = ?," \ - u" status = ?, tracker_check_retries = ? WHERE torrent_id = ?" - - self._db.execute_write(sql, (seeders, leechers, last_check, next_check, status, retries, torrent_id)) - - self._logger.debug(u"update result %d/%d for %s/%d", seeders, leechers, bin2str(infohash), torrent_id) - - # notify - self.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) - - def addTorrentTrackerMapping(self, torrent_id, tracker): - self.addTorrentTrackerMappingInBatch(torrent_id, [tracker, ]) - - def addTorrentTrackerMappingInBatch(self, torrent_id, tracker_list): - if not tracker_list: - return - - parameters = u"?," * len(tracker_list) - parameters = parameters[:-1] - sql = u"SELECT tracker FROM TrackerInfo WHERE tracker IN (%s)" % parameters - - found_tracker_list = self._db.fetchall(sql, tuple(tracker_list)) - found_tracker_list = [tracker[0] for tracker in found_tracker_list] - - # update tracker info - not_found_tracker_list = [tracker for tracker in tracker_list if tracker not in found_tracker_list] - for tracker in not_found_tracker_list: - if self.session.lm.tracker_manager is not None: - self.session.lm.tracker_manager.add_tracker(tracker) - - # update torrent-tracker mapping - sql = 'INSERT OR IGNORE INTO TorrentTrackerMapping(torrent_id, tracker_id)'\ - + ' VALUES(?, (SELECT tracker_id FROM TrackerInfo WHERE tracker = ?))' - new_mapping_list = [(torrent_id, tracker) for tracker in tracker_list] - if new_mapping_list: - self._db.executemany(sql, new_mapping_list) - - # add trackers into the torrent file if it has been collected - if not self.session.config.get_torrent_store_enabled() or self.session.lm.torrent_store is None: - return - - infohash = self.getInfohash(torrent_id) - if infohash and self.session.has_collected_torrent(infohash): - torrent_data = self.session.get_collected_torrent(infohash) - - try: - tdef = TorrentDef.load_from_memory(torrent_data) - except ValueError: - self._logger.warning("Invalid torrent file when adding trackers to database.") - return - - new_tracker_list = [] - for tracker in tracker_list: - if tdef.get_tracker() and tracker == tdef.get_tracker(): - continue - if tdef.get_tracker_hierarchy() and tracker in tdef.get_tracker_hierarchy(): - continue - if tracker in ('DHT', 'no-DHT'): - continue - tracker = get_uniformed_tracker_url(tracker) - if tracker and [tracker] not in new_tracker_list: - new_tracker_list.append([tracker]) - - if tdef.get_tracker_hierarchy(): - new_tracker_list = tdef.get_tracker_hierarchy() + new_tracker_list - if new_tracker_list: - tdef.set_tracker_hierarchy(new_tracker_list) - # have to use bencode to get around the TorrentDef.is_finalized() check in TorrentDef.encode() - self.session.save_collected_torrent(infohash, bencode(tdef.metainfo)) - - def getTorrentsOnTracker(self, tracker, current_time, limit=30): - sql = """ - SELECT T.infohash - FROM Torrent T, TrackerInfo TI, TorrentTrackerMapping TTM - WHERE TI.tracker = ? - AND TI.tracker_id = TTM.tracker_id AND T.torrent_id = TTM.torrent_id - AND next_tracker_check < ? - ORDER BY next_tracker_check DESC - LIMIT ? - """ - return [str2bin(tinfo[0]) for tinfo in self._db.fetchall(sql, (tracker, current_time, limit))] - - def getTrackerListByTorrentID(self, torrent_id): - sql = 'SELECT TR.tracker FROM TrackerInfo TR, TorrentTrackerMapping MP'\ - + ' WHERE MP.torrent_id = ?'\ - + ' AND TR.tracker_id = MP.tracker_id' - tracker_list = self._db.fetchall(sql, (torrent_id,)) - return [tracker[0] for tracker in tracker_list] - - def getTrackerListByInfohash(self, infohash): - torrent_id = self.getTorrentID(infohash) - return self.getTrackerListByTorrentID(torrent_id) - - def addTrackerInfo(self, tracker, to_notify=True): - self.addTrackerInfoInBatch([tracker, ], to_notify) - - def addTrackerInfoInBatch(self, tracker_list, to_notify=True): - sql = 'INSERT INTO TrackerInfo(tracker) VALUES(?)' - self._db.executemany(sql, [(tracker,) for tracker in tracker_list]) - - if to_notify: - self.notifier.notify(NTFY_TRACKERINFO, NTFY_INSERT, tracker_list) - - def getTrackerInfoList(self): - sql = 'SELECT tracker, last_check, failures, is_alive FROM TrackerInfo' - tracker_info_list = self._db.fetchall(sql) - return tracker_info_list - - def updateTrackerInfo(self, args): - sql = 'UPDATE TrackerInfo SET'\ - + ' last_check = ?, failures = ?, is_alive = ?'\ - + ' WHERE tracker = ?' - self._db.executemany(sql, args) - - def getRecentlyAliveTrackers(self, limit=10): - sql = """ - SELECT DISTINCT tracker FROM TrackerInfo - WHERE is_alive = 1 - AND tracker != 'no-DHT' AND tracker != 'DHT' - ORDER BY last_check DESC LIMIT ? - """ - trackers = self._db.fetchall(sql, (limit,)) - return [tracker[0] for tracker in trackers] - - def getTorrent(self, infohash, keys=None, include_mypref=True): - assert isinstance(infohash, str), "INFOHASH has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len(infohash) - - if keys is None: - keys = deepcopy(self.value_name) - else: - keys = list(keys) - - res = self._db.getOne('Torrent C', keys, infohash=bin2str(infohash)) - - if not res: - return None - torrent = dict(zip(keys, res)) - - torrent['infohash'] = infohash - - if include_mypref: - tid = torrent['C.torrent_id'] - stats = self.mypref_db.getMyPrefStats(tid) - - if stats: - torrent['myDownloadHistory'] = True - torrent['destination_path'] = stats[tid] - else: - torrent['myDownloadHistory'] = False - - return torrent - - def getLibraryTorrents(self, keys): - sql = u"SELECT " + u", ".join(keys) + u""" FROM MyPreference, Torrent LEFT JOIN ChannelTorrents - ON Torrent.torrent_id = ChannelTorrents.torrent_id WHERE destination_path != '' - AND MyPreference.torrent_id = Torrent.torrent_id""" - data = self._db.fetchall(sql) - - fixed = self.__fixTorrents(keys, data) - return fixed - - def __fixTorrents(self, keys, results): - def fix_value(key): - if key in keys: - key_index = keys.index(key) - for i in range(len(results)): - result = list(results[i]) - if result[key_index]: - result[key_index] = str2bin(result[key_index]) - results[i] = result - fix_value('infohash') - return results - - def getNumberCollectedTorrents(self): - # return self._db.size('CollectedTorrent') - return self._db.getOne('CollectedTorrent', 'count(torrent_id)') - - def getRecentlyCollectedTorrents(self, limit): - sql = u""" - SELECT CT.infohash, CT.num_seeders, CT.num_leechers, T.last_tracker_check, CT.insert_time - FROM Torrent T, CollectedTorrent CT - WHERE CT.torrent_id = T.torrent_id - AND T.secret is not 1 ORDER BY CT.insert_time DESC LIMIT ? - """ - results = self._db.fetchall(sql, (limit,)) - return [[str2bin(result[0]), result[1], result[2], result[3] or 0, result[4]] for result in results] - - def getRecentlyCheckedTorrents(self, limit): - sql = u""" - SELECT T.infohash, T.num_seeders, T.num_leechers, T.last_tracker_check - FROM Torrent T - WHERE T.is_collected = 0 AND T.num_seeders > 1 - AND T.secret is not 1 ORDER BY T.last_tracker_check, T.num_seeders DESC LIMIT ? - """ - results = self._db.fetchall(sql, (limit,)) - return [[str2bin(result[0]), result[1], result[2], result[3] or 0] for result in results] - - def getRandomlyCollectedTorrents(self, insert_time, limit): - sql = u""" - SELECT CT.infohash, CT.num_seeders, CT.num_leechers, T.last_tracker_check - FROM Torrent T, CollectedTorrent CT - WHERE CT.torrent_id = T.torrent_id - AND CT.insert_time < ? - AND T.secret is not 1 ORDER BY RANDOM() DESC LIMIT ? - """ - results = self._db.fetchall(sql, (insert_time, limit)) - return [[str2bin(result[0]), result[1], result[2], result[3] or 0] for result in results] - - def select_torrents_to_collect(self, hashes): - parameters = '?,' * len(hashes) - parameters = parameters[:-1] - - # TODO: bias according to votecast, popular first - - sql = u"SELECT infohash FROM Torrent WHERE is_collected == 0 AND infohash IN (%s)" % parameters - results = self._db.fetchall(sql, map(bin2str, hashes)) - return [str2bin(infohash) for infohash, in results] - - def getTorrentsStats(self): - return self._db.getOne('CollectedTorrent', ['count(torrent_id)', 'sum(length)', 'sum(num_files)']) - - def freeSpace(self, torrents2del): - if self.channelcast_db and self.channelcast_db._channel_id: - sql = U""" - SELECT name, torrent_id, infohash, relevance, - MIN(relevance, 2500) + MIN(500, num_leechers) + 4*MIN(500, num_seeders) - (MAX(0, MIN(500, (%d - creation_date)/86400)) ) AS weight - FROM CollectedTorrent - WHERE torrent_id NOT IN (SELECT torrent_id FROM MyPreference) - AND torrent_id NOT IN (SELECT torrent_id FROM ChannelTorrents WHERE channel_id == %d) - ORDER BY weight - LIMIT %d - """ % (int(time()), self.channelcast_db._channel_id, torrents2del) - else: - sql = u""" - SELECT name, torrent_id, infohash, relevance, - min(relevance,2500) + min(500,num_leechers) + 4*min(500,num_seeders) - (max(0,min(500,(%d-creation_date)/86400)) ) AS weight - FROM CollectedTorrent - WHERE torrent_id NOT IN (SELECT torrent_id FROM MyPreference) - ORDER BY weight - LIMIT %d - """ % (int(time()), torrents2del) - - res_list = self._db.fetchall(sql) - if len(res_list) == 0: - return 0 - - # delete torrents from db - sql_del_torrent = u"UPDATE Torrent SET name = NULL, is_collected = 0 WHERE torrent_id = ?" - # sql_del_pref = "delete from Preference where torrent_id=?" - - tids = [] - for _name, torrent_id, infohash, _relevance, _weight in res_list: - tids.append((torrent_id,)) - self.session.delete_collected_torrent(infohash) - - self._db.executemany(sql_del_torrent, tids) - # self._db.executemany(sql_del_tracker, tids) - deleted = self._db.connection.changes() - # self._db.executemany(sql_del_pref, tids) - - # but keep the infohash in db to maintain consistence with preference db - # torrent_id_infohashes = [(torrent_id,infohash_str,relevance) for torrent_file_name, torrent_id, infohash_str, relevance, weight in res_list] - # sql_insert = "insert into Torrent (torrent_id, infohash, relevance) values (?,?,?)" - # self._db.executemany(sql_insert, torrent_id_infohashes) - - self._logger.info("Erased %d torrents", deleted) - return deleted - - def relevance_score_remote_torrent(self, torrent_name): - """ - Calculate the relevance score of a remote torrent, based on the name and the matchinfo object - of the last torrent from the database. - The algorithm used is the same one as in search_in_local_torrents_db in SqliteCacheDBHandler.py. - """ - if self.latest_matchinfo_torrent is None: - return 0.0 - matchinfo, raw_keywords = self.latest_matchinfo_torrent - - # Make sure the strings are utf-8 encoded - keywords = [] - for keyword in raw_keywords: - if not isinstance(keyword, text_type): - keyword = keyword.decode('raw_unicode_escape') - keywords.append(keyword) - - if not isinstance(torrent_name, text_type): - torrent_name = torrent_name.decode('raw_unicode_escape') - - num_phrases, num_cols, num_rows = unpack_from('III', matchinfo) - unpack_str = 'I' * (3 * num_cols * num_phrases) - matchinfo = unpack_from('I' * 9 + unpack_str, matchinfo)[9:] - - score = 0.0 - for phrase_ind in xrange(num_phrases): - rows_with_term = matchinfo[3 * (phrase_ind * num_cols) + 2] - term_freq = torrent_name.lower().count(keywords[phrase_ind]) - - inv_doc_freq = math.log((num_rows - rows_with_term + 0.5) / (rows_with_term + 0.5), 2) - right_side = ((term_freq * (1.2 + 1)) / (term_freq + 1.2)) - - score += inv_doc_freq * right_side - return score - - def search_in_local_torrents_db(self, query, keys=None): - """ - Search in the local database for torrents matching a specific query. This method also assigns a relevance - score to each torrent, based on the name, files and file extensions. - The algorithm is based on BM25. The document length factor is regarded since our "documents" are very small - (often a few keywords). - See https://en.wikipedia.org/wiki/Okapi_BM25 for more information about BM25. - """ - search_results = [] - keys_str = ", ".join(keys) - keywords = split_into_keywords(query, to_filter_stopwords=True) - infohash_index = keys.index('infohash') - - # This query gets torrents matching speciifc keywords. The matchinfo object is also returned. For more - # information about the returned matchinfo parameters, see https://www.sqlite.org/fts3.html#matchinfo. - results = self._db.fetchall("SELECT DISTINCT %s, Matchinfo(FullTextIndex, 'pcnalx') " - "FROM Torrent T, FullTextIndex " - "LEFT OUTER JOIN _ChannelTorrents C ON T.torrent_id = C.torrent_id " - "WHERE t.name IS NOT NULL AND t.torrent_id = FullTextIndex.rowid " - "AND C.deleted_at IS NULL AND FullTextIndex MATCH ?" - % keys_str, (" OR ".join(keywords),)) - - for result in results: - result = list(result) # We convert the result to a mutable list since we have to decode the infohash - result[infohash_index] = str2bin(result[infohash_index]) - matchinfo = result[len(keys)] # The matchinfo is the last element in the results tuple - self.latest_matchinfo_torrent = matchinfo, keywords - num_phrases, num_cols, num_rows = unpack_from('III', matchinfo) - - unpack_str = 'I' * (3 * num_cols * num_phrases) - matchinfo = unpack_from('I' * 9 + unpack_str, matchinfo)[9:] - - scores = [] - - for col_ind in xrange(num_cols): - score = 0 - for phrase_ind in xrange(num_phrases): - # Fetch info about the current matching term. This number is fetched from the matchinfo object. - # See https://www.sqlite.org/fts3.html#matchinfo for info about the offset calculation. - base_term_offset = 3 * (col_ind + phrase_ind * num_cols) - rows_with_term = matchinfo[base_term_offset + 2] - term_freq = matchinfo[base_term_offset] - - inv_doc_freq = math.log((num_rows - rows_with_term + 0.5) / (rows_with_term + 0.5), 2) - right_side = ((term_freq * (1.2 + 1)) / (term_freq + 1.2)) - - score += inv_doc_freq * right_side - - scores.append(score) - - # Our score is 80% dependent on matching in the name of the torrent, 10% on the names of the files in the - # torrent and 10% on the extensions of files in the torrent. - rel_score = 0.8 * scores[0] + 0.1 * scores[1] + 0.1 * scores[2] - if 'num_seeders' in keys and result[keys.index('num_seeders')] > 0: - # If this torrent has a non-zero amount of seeders, we make it more relevant - rel_score += result[keys.index('num_seeders')] - - extended_result = result + [rel_score] - search_results.append(extended_result) - - return search_results - - def searchNames(self, kws, local=True, keys=None, doSort=True): - assert 'infohash' in keys - assert not doSort or ('num_seeders' in keys or 'T.num_seeders' in keys) - - infohash_index = keys.index('infohash') - num_seeders_index = keys.index('num_seeders') if 'num_seeders' in keys else -1 - - if num_seeders_index == -1: - doSort = False - - values = ", ".join(keys) - mainsql = "SELECT " + values + ", C.channel_id, Matchinfo(FullTextIndex) FROM" - if local: - mainsql += " Torrent T" - else: - mainsql += " CollectedTorrent T" - - mainsql += """, FullTextIndex - LEFT OUTER JOIN _ChannelTorrents C ON T.torrent_id = C.torrent_id - WHERE t.name IS NOT NULL AND t.torrent_id = FullTextIndex.rowid AND C.deleted_at IS NULL AND FullTextIndex MATCH ? - """ - - if not local: - mainsql += "AND T.secret is not 1 LIMIT 250" - - query = " ".join(filter_keywords(kws)) - not_negated = [kw for kw in filter_keywords(kws) if kw[0] != '-'] - - results = self._db.fetchall(mainsql, (query,)) - - channels = set() - channel_dict = {} - for result in results: - if result[-2]: - channels.add(result[-2]) - - if len(channels) > 0: - # results are tuples of (id, str(dispersy_cid), name, description, - # nr_torrents, nr_favorites, nr_spam, my_vote, modified, id == - # self._channel_id) - for channel in self.channelcast_db.getChannels(channels): - if channel[1] != '-1': - channel_dict[channel[0]] = channel - - myChannelId = self.channelcast_db._channel_id or 0 - - result_dict = {} - - # step 1, merge torrents keep one with best channel - for result in results: - channel_id = result[-2] - channel = channel_dict.get(channel_id, None) - - infohash = result[infohash_index] - if channel: - # ignoring spam channels - if channel[7] < 0: - continue - - # see if we have a better channel in torrents_dict - if infohash in result_dict: - old_channel = channel_dict.get(result_dict[infohash][-2], False) - if old_channel: - - # allways prefer my channel - if old_channel[0] == myChannelId: - continue - - # allways prefer channel with higher vote - if channel[7] < old_channel[7]: - continue - - votes = (channel[5] or 0) - (channel[6] or 0) - oldvotes = (old_channel[5] or 0) - (old_channel[6] or 0) - if votes < oldvotes: - continue - - result_dict[infohash] = result - - elif infohash not in result_dict: - result_dict[infohash] = result - - - # step 2, fix all dict fields - dont_sort_list = [] - results = [list(result) for result in result_dict.values()] - for index in xrange(len(results) - 1, -1, -1): - result = results[index] - - result[infohash_index] = str2bin(result[infohash_index]) - - matches = {'swarmname': set(), 'filenames': set(), 'fileextensions': set()} - - # Matchinfo is documented at: http://www.sqlite.org/fts3.html#matchinfo - matchinfo = str(result[-1]) - num_phrases, num_cols = unpack_from('II', matchinfo) - unpack_str = 'I' * (3 * num_cols * num_phrases) - matchinfo = unpack_from('II' + unpack_str, matchinfo) - - swarmnames, filenames, fileextensions = [ - [matchinfo[3 * (i + p * num_cols) + 2] for p in range(num_phrases)] - for i in range(num_cols) - ] - - for i, keyword in enumerate(not_negated): - if swarmnames[i]: - matches['swarmname'].add(keyword) - if filenames[i]: - matches['filenames'].add(keyword) - if fileextensions[i]: - matches['fileextensions'].add(keyword) - result[-1] = matches - - channel = channel_dict.get(result[-2], (result[-2], None, '', '', 0, 0, 0, 0, 0, False)) - result.extend(channel) - - if doSort and result[num_seeders_index] <= 0: - dont_sort_list.append((index, result)) - - if doSort: - # Remove the items with 0 seeders from the results list so the sort is faster, append them to the - # results list afterwards. - for index, result in dont_sort_list: - results.pop(index) - - def compare(a, b): - return cmp(a[num_seeders_index], b[num_seeders_index]) - results.sort(compare, reverse=True) - - for index, result in dont_sort_list: - results.append(result) - - if not local: - results = results[:25] - - return results - - def getAutoCompleteTerms(self, keyword, max_terms, limit=100): - sql = "SELECT swarmname FROM FullTextIndex WHERE swarmname MATCH ? LIMIT ?" - result = self._db.fetchall(sql, ('"%s*"' % keyword, limit)) - - all_terms = set() - for line, in result: - if len(all_terms) >= max_terms: - break - i1 = line.find(keyword) - i2 = line.find(' ', i1 + len(keyword)) - all_terms.add(line[i1:i2] if i2 >= 0 else line[i1:]) - - if keyword in all_terms: - all_terms.remove(keyword) - if '' in all_terms: - all_terms.remove('') - - return list(all_terms) - - def getSearchSuggestion(self, keywords, limit=1): - match = [keyword.lower() for keyword in keywords if len(keyword) > 3] - - def lev(a, b): - "Calculates the Levenshtein distance between a and b." - n, m = len(a), len(b) - if n > m: - # Make sure n <= m, to use O(min(n,m)) space - a, b = b, a - n, m = m, n - - current = range(n + 1) - for i in range(1, m + 1): - previous, current = current, [i] + [0] * n - for j in range(1, n + 1): - add, delete = previous[j] + 1, current[j - 1] + 1 - change = previous[j - 1] - if a[j - 1] != b[i - 1]: - change = change + 1 - current[j] = min(add, delete, change) - - return current[n] - - def levcollate(s1, s2): - l1 = sum(sorted([lev(a, b) for a in s1.split() for b in match])[:len(match)]) - l2 = sum(sorted([lev(a, b) for a in s2.split() for b in match])[:len(match)]) - - # return -1 if s1s2 else 0 - if l1 < l2: - return -1 - if l1 > l2: - return 1 - return 0 - - cursor = self._db.get_cursor() - connection = cursor.getconnection() - connection.createcollation("leven", levcollate) - - sql = "SELECT swarmname FROM FullTextIndex WHERE swarmname MATCH ? ORDER By swarmname collate leven ASC LIMIT ?" - results = self._db.fetchall(sql, (' OR '.join(['*%s*' % m for m in match]), limit)) - connection.createcollation("leven", None) - return [result[0] for result in results] - - -class MyPreferenceDBHandler(BasicDBHandler): - - def __init__(self, session): - super(MyPreferenceDBHandler, self).__init__(session, u"MyPreference") - - self.rlock = threading.RLock() - - self.recent_preflist = None - self._torrent_db = None - - def initialize(self, *args, **kwargs): - self._torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - - def close(self): - super(MyPreferenceDBHandler, self).close() - self._torrent_db = None - - def getMyPrefListInfohash(self, returnDeleted=True, limit=None): - # Arno, 2012-08-01: having MyPreference (the shorter list) first makes - # this faster. - sql = u"SELECT infohash FROM MyPreference, Torrent WHERE Torrent.torrent_id == MyPreference.torrent_id" - if not returnDeleted: - sql += u' AND destination_path != ""' - - if limit: - sql += u" ORDER BY creation_time DESC LIMIT %d" % limit - - res = self._db.fetchall(sql) - res = [item for sublist in res for item in sublist] - return [str2bin(p) if p else '' for p in res] - - def getMyPrefStats(self, torrent_id=None): - value_name = ('torrent_id', 'destination_path',) - if torrent_id is not None: - where = 'torrent_id == %s' % torrent_id - else: - where = None - res = self.getAll(value_name, where) - mypref_stats = {} - for torrent_id, destination_path in res: - mypref_stats[torrent_id] = destination_path - return mypref_stats - - def getMyPrefStatsInfohash(self, infohash): - torrent_id = self._torrent_db.getTorrentID(infohash) - if torrent_id is not None: - return self.getMyPrefStats(torrent_id)[torrent_id] - - def addMyPreference(self, torrent_id, data): - # keys in data: destination_path, creation_time, torrent_id - if self.getOne('torrent_id', torrent_id=torrent_id) is not None: - # Arno, 2009-03-09: Torrent already exists in myrefs. - # Hack for hiding from lib while keeping in myprefs. - # see standardOverview.removeTorrentFromLibrary() - # - self.updateDestDir(torrent_id, data.get('destination_path')) - infohash = self._torrent_db.getInfohash(torrent_id) - if infohash: - self.notifier.notify(NTFY_MYPREFERENCES, NTFY_UPDATE, infohash) - return False - - d = {} - d['destination_path'] = data.get('destination_path') - d['creation_time'] = data.get('creation_time', int(time())) - d['torrent_id'] = torrent_id - - self._db.insert(self.table_name, **d) - - infohash = self._torrent_db.getInfohash(torrent_id) - if infohash: - self.notifier.notify(NTFY_MYPREFERENCES, NTFY_INSERT, infohash) - - return True - - def deletePreference(self, torrent_id): - # Preferences are never actually deleted from the database, only their destdirs get reset. - # self._db.delete(self.table_name, **{'torrent_id': torrent_id}) - self.updateDestDir(torrent_id, "") - - infohash = self._torrent_db.getInfohash(torrent_id) - if infohash: - self.notifier.notify(NTFY_MYPREFERENCES, NTFY_DELETE, infohash) - - def updateDestDir(self, torrent_id, destdir): - if not isinstance(destdir, basestring): - self._logger.info('DESTDIR IS NOT STRING: %s', destdir) - return - self._db.update(self.table_name, 'torrent_id=%d' % torrent_id, destination_path=destdir) - - -class VoteCastDBHandler(BasicDBHandler): - - def __init__(self, session): - super(VoteCastDBHandler, self).__init__(session, u"VoteCast") - - self.my_votes = None - self.updatedChannels = set() - - self.channelcast_db = None - - def initialize(self, *args, **kwargs): - self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) - self.session.sqlite_db.register_task(u"flush to database", - LoopingCall(self._flush_to_database)).start(VOTECAST_FLUSH_DB_INTERVAL, - now=False) - - def close(self): - super(VoteCastDBHandler, self).close() - self.channelcast_db = None - - def on_votes_from_dispersy(self, votes): - insert_vote = "INSERT OR REPLACE INTO _ChannelVotes (channel_id, voter_id, dispersy_id, vote, time_stamp) VALUES (?,?,?,?,?)" - self._db.executemany(insert_vote, votes) - - for channel_id, voter_id, _, vote, _ in votes: - if voter_id is None: - self.notifier.notify(NTFY_VOTECAST, NTFY_UPDATE, channel_id, voter_id is None) - if self.my_votes is not None: - self.my_votes[channel_id] = vote - self.updatedChannels.add(channel_id) - - def on_remove_votes_from_dispersy(self, votes, contains_my_vote): - remove_vote = "UPDATE _ChannelVotes SET deleted_at = ? WHERE channel_id = ? AND dispersy_id = ?" - self._db.executemany(remove_vote, votes) - - if contains_my_vote: - for _, channel_id, _ in votes: - self.notifier.notify(NTFY_VOTECAST, NTFY_UPDATE, channel_id, contains_my_vote) - - for _, channel_id, _ in votes: - self.updatedChannels.add(channel_id) - - def _flush_to_database(self): - channel_ids = list(self.updatedChannels) - self.updatedChannels.clear() - - if channel_ids: - parameters = ",".join("?" * len(channel_ids)) - sql = "Select channel_id, vote FROM ChannelVotes WHERE channel_id in (" + parameters + ")" - positive_votes = {} - negative_votes = {} - for channel_id, vote in self._db.fetchall(sql, channel_ids): - if vote == 2: - positive_votes[channel_id] = positive_votes.get(channel_id, 0) + 1 - elif vote == -1: - negative_votes[channel_id] = negative_votes.get(channel_id, 0) + 1 - - updates = [(positive_votes.get(channel_id, 0), negative_votes.get(channel_id, 0), channel_id) - for channel_id in channel_ids] - self._db.executemany("UPDATE OR IGNORE _Channels SET nr_favorite = ?, nr_spam = ? WHERE id = ?", updates) - - for channel_id in channel_ids: - self.notifier.notify(NTFY_VOTECAST, NTFY_UPDATE, channel_id) - - def get_latest_vote_dispersy_id(self, channel_id, voter_id): - if voter_id: - select_vote = """SELECT dispersy_id FROM ChannelVotes - WHERE channel_id = ? AND voter_id = ? AND dispersy_id != -1 - ORDER BY time_stamp DESC Limit 1""" - return self._db.fetchone(select_vote, (channel_id, voter_id)) - - select_vote = """SELECT dispersy_id FROM ChannelVotes - WHERE channel_id = ? AND voter_id ISNULL AND dispersy_id != -1 - ORDER BY time_stamp DESC Limit 1""" - return self._db.fetchone(select_vote, (channel_id,)) - - def getPosNegVotes(self, channel_id): - sql = 'select nr_favorite, nr_spam from Channels where id = ?' - result = self._db.fetchone(sql, (channel_id,)) - if result: - return result - return 0, 0 - - def getVoteOnChannel(self, channel_id, voter_id): - """ return the vote status if such record exists, otherwise None """ - if voter_id: - sql = "select vote from ChannelVotes where channel_id = ? and voter_id = ?" - return self._db.fetchone(sql, (channel_id, voter_id)) - sql = "select vote from ChannelVotes where channel_id = ? and voter_id ISNULL" - return self._db.fetchone(sql, (channel_id,)) - - def getVoteForMyChannel(self, voter_id): - return self.getVoteOnChannel(self.channelcast_db._channel_id, voter_id) - - def getDispersyId(self, channel_id, voter_id): - """ return the dispersy_id for this vote """ - if voter_id: - sql = "select dispersy_id from ChannelVotes where channel_id = ? and voter_id = ?" - return self._db.fetchone(sql, (channel_id, voter_id)) - sql = "select dispersy_id from ChannelVotes where channel_id = ? and voter_id ISNULL" - return self._db.fetchone(sql, (channel_id,)) - - def getTimestamp(self, channel_id, voter_id): - """ return the timestamp for this vote """ - if voter_id: - sql = "select time_stamp from ChannelVotes where channel_id = ? and voter_id = ?" - return self._db.fetchone(sql, (channel_id, voter_id)) - sql = "select time_stamp from ChannelVotes where channel_id = ? and voter_id ISNULL" - return self._db.fetchone(sql, (channel_id,)) - - def getMyVotes(self): - if not self.my_votes: - sql = "SELECT channel_id, vote FROM ChannelVotes WHERE voter_id ISNULL" - - self.my_votes = {} - for channel_id, vote in self._db.fetchall(sql): - self.my_votes[channel_id] = vote - return self.my_votes - - -class ChannelCastDBHandler(BasicDBHandler): - - def __init__(self, session): - super(ChannelCastDBHandler, self).__init__(session, u"_Channels") - - self._channel_id = None - self.my_dispersy_cid = None - - self.votecast_db = None - self.torrent_db = None - - def initialize(self, *args, **kwargs): - self._channel_id = self.getMyChannelId() - self._logger.debug(u"Channels: my channel is %s", self._channel_id) - - self.votecast_db = self.session.open_dbhandler(NTFY_VOTECAST) - self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - - def update_nr_torrents(): - rows = self.getChannelNrTorrents(50) - update = "UPDATE _Channels SET nr_torrents = ? WHERE id = ?" - self._db.executemany(update, rows) - - rows = self.getChannelNrTorrentsLatestUpdate(50) - update = "UPDATE _Channels SET nr_torrents = ?, modified = ? WHERE id = ?" - self._db.executemany(update, rows) - - self.register_task(u"update_nr_torrents", LoopingCall(update_nr_torrents)).start(300, now=False) - - def close(self): - super(ChannelCastDBHandler, self).close() - self._channel_id = None - self.my_dispersy_cid = None - - self.votecast_db = None - self.torrent_db = None - - def get_metadata_torrents(self, is_collected=True, limit=20): - stmt = u""" -SELECT T.torrent_id, T.infohash, T.name, T.length, T.category, T.status, T.num_seeders, T.num_leechers, CMD.value -FROM MetaDataTorrent, ChannelTorrents AS CT, ChannelMetaData AS CMD, Torrent AS T -WHERE CT.id == MetaDataTorrent.channeltorrent_id - AND CMD.id == MetaDataTorrent.metadata_id - AND T.torrent_id == CT.torrent_id - AND CMD.type == 'metadata-json' - AND CMD.value LIKE '%thumb_hash%' - AND T.is_collected == ? -ORDER BY CMD.time_stamp DESC LIMIT ?; -""" - result_list = self._db.fetchall(stmt, (int(is_collected), limit)) or [] - torrent_list = [] - for torrent_id, info_hash, name, length, category, status, num_seeders, num_leechers, metadata_json in result_list: - torrent_dict = {'id': torrent_id, - 'info_hash': str2bin(info_hash), - 'name': name, - 'length': length, - 'category': category, - 'status': status, - 'num_seeders': num_seeders, - 'num_leechers': num_leechers, - 'metadata-json': metadata_json} - torrent_list.append(torrent_dict) - - return torrent_list - - # dispersy helper functions - def _get_my_dispersy_cid(self): - if not self.my_dispersy_cid: - from Tribler.community.channel.community import ChannelCommunity - - for community in self.session.lm.dispersy.get_communities(): - if isinstance(community, ChannelCommunity) and community.master_member and community.master_member.private_key: - self.my_dispersy_cid = community.cid - break - - return self.my_dispersy_cid - - def get_torrent_metadata(self, channel_torrent_id): - stmt = u"""SELECT ChannelMetadata.value FROM ChannelMetadata, MetaDataTorrent - WHERE type = 'metadata-json' - AND ChannelMetadata.id = MetaDataTorrent.metadata_id - AND MetaDataTorrent.channeltorrent_id = ?""" - result = self._db.fetchone(stmt, (channel_torrent_id,)) - if result: - metadata_dict = json.loads(result) - metadata_dict['thumb_hash'] = metadata_dict['thumb_hash'].decode('hex') - return metadata_dict - - def getDispersyCIDFromChannelId(self, channel_id): - return self._db.fetchone(u"SELECT dispersy_cid FROM Channels WHERE id = ?", (channel_id,)) - - def getChannelIdFromDispersyCID(self, dispersy_cid): - return self._db.fetchone(u"SELECT id FROM Channels WHERE dispersy_cid = ?", (dispersy_cid,)) - - def getCountMaxFromChannelId(self, channel_id): - sql = u"SELECT COUNT(*), MAX(inserted) FROM ChannelTorrents WHERE channel_id = ? LIMIT 1" - return self._db.fetchone(sql, (channel_id,)) - - def on_channel_from_dispersy(self, dispersy_cid, peer_id, name, description): - if isinstance(dispersy_cid, (str)): - _dispersy_cid = buffer(dispersy_cid) - else: - _dispersy_cid = dispersy_cid - - # merge channels if we detect upgrade from old-channelcast to new-dispersy-channelcast - get_channel = "SELECT id FROM Channels Where peer_id = ? and dispersy_cid == -1" - channel_id = self._db.fetchone(get_channel, (peer_id,)) - - if channel_id: # update this channel - update_channel = "UPDATE _Channels SET dispersy_cid = ?, name = ?, description = ? WHERE id = ?" - self._db.execute_write(update_channel, (_dispersy_cid, name, description, channel_id)) - - self.notifier.notify(NTFY_CHANNELCAST, NTFY_UPDATE, channel_id) - - else: - get_channel = "SELECT id FROM Channels Where dispersy_cid = ?" - channel_id = self._db.fetchone(get_channel, (_dispersy_cid,)) - - if channel_id: - update_channel = "UPDATE _Channels SET name = ?, description = ?, peer_id = ? WHERE dispersy_cid = ?" - self._db.execute_write(update_channel, (name, description, peer_id, _dispersy_cid)) - - else: - # insert channel - insert_channel = "INSERT INTO _Channels (dispersy_cid, peer_id, name, description) VALUES (?, ?, ?, ?); SELECT last_insert_rowid();" - channel_id = self._db.fetchone(insert_channel, (_dispersy_cid, peer_id, name, description)) - - self.notifier.notify(NTFY_CHANNELCAST, NTFY_INSERT, channel_id) - - if not self._channel_id and self._get_my_dispersy_cid() == dispersy_cid: - self._channel_id = channel_id - self.notifier.notify(NTFY_CHANNELCAST, NTFY_CREATE, channel_id) - return channel_id - - def on_channel_modification_from_dispersy(self, channel_id, modification_type, modification_value): - if modification_type in ['name', 'description']: - update_channel = "UPDATE _Channels Set " + modification_type + " = ?, modified = ? WHERE id = ?" - self._db.execute_write(update_channel, (modification_value, long(time()), channel_id)) - - self.notifier.notify(NTFY_CHANNELCAST, NTFY_MODIFIED, channel_id) - - def on_torrents_from_dispersy(self, torrentlist): - infohashes = [torrent[3] for torrent in torrentlist] - torrent_ids, inserted = self.torrent_db.addOrGetTorrentIDSReturn(infohashes) - - insert_data = [] - updated_channels = {} - - for i, torrent in enumerate(torrentlist): - channel_id, dispersy_id, peer_id, infohash, timestamp, name, files, trackers = torrent - torrent_id = torrent_ids[i] - - # if new or not yet collected - if infohash in inserted: - self.torrent_db.addExternalTorrentNoDef( - infohash, name, files, trackers, timestamp, {'dispersy_id': dispersy_id}) - - insert_data.append((dispersy_id, torrent_id, channel_id, peer_id, name, timestamp)) - updated_channels[channel_id] = updated_channels.get(channel_id, 0) + 1 - - if len(insert_data) > 0: - sql_insert_torrent = "INSERT INTO _ChannelTorrents (dispersy_id, torrent_id, channel_id, peer_id, name, time_stamp) VALUES (?,?,?,?,?,?)" - self._db.executemany(sql_insert_torrent, insert_data) - - updated_channel_torrent_dict = defaultdict(list) - for torrent in torrentlist: - channel_id, dispersy_id, peer_id, infohash, timestamp, name, files, trackers = torrent - channel_torrent_id = self.get_channel_torrent_id(channel_id, infohash) - updated_channel_torrent_dict[channel_id].append({u'info_hash': infohash, - u'channel_torrent_id': channel_torrent_id}) - - sql_update_channel = "UPDATE _Channels SET modified = strftime('%s','now'), nr_torrents = nr_torrents+? WHERE id = ?" - update_channels = [(new_torrents, channel_id) for channel_id, new_torrents in updated_channels.iteritems()] - self._db.executemany(sql_update_channel, update_channels) - - for channel_id in updated_channels.keys(): - self.notifier.notify(NTFY_CHANNELCAST, NTFY_UPDATE, channel_id) - - for channel_id, item in updated_channel_torrent_dict.items(): - # inform the channel_manager about new channel torrents - self.notifier.notify(SIGNAL_CHANNEL_COMMUNITY, SIGNAL_ON_TORRENT_UPDATED, channel_id, item) - - def on_remove_torrent_from_dispersy(self, channel_id, dispersy_id, redo): - sql = "UPDATE _ChannelTorrents SET deleted_at = ? WHERE channel_id = ? and dispersy_id = ?" - - if redo: - deleted_at = None - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, channel_id, dispersy_id)) - - self.notifier.notify(NTFY_CHANNELCAST, NTFY_UPDATE, channel_id) - - sql = """SELECT infohash, dispersy_cid FROM Torrent, _ChannelTorrents, Channels - WHERE Torrent.torrent_id = _ChannelTorrents.torrent_id - AND _ChannelTorrents.channel_id = ? AND _ChannelTorrents.dispersy_id = ? - AND Channels.id = _ChannelTorrents.channel_id""" - infohash, dispersy_cid = self._db.fetchone(sql, (channel_id, dispersy_id)) - - if infohash: - self.notifier.notify(NTFY_TORRENTS, NTFY_DELETE, None, - {"infohash": str2bin(infohash).encode('hex'), - "dispersy_cid": str(dispersy_cid).encode('hex')}) - - def on_torrent_modification_from_dispersy(self, channeltorrent_id, modification_type, modification_value): - if modification_type in ['name', 'description']: - update_torrent = "UPDATE _ChannelTorrents SET " + modification_type + " = ?, modified = ? WHERE id = ?" - self._db.execute_write(update_torrent, (modification_value, long(time()), channeltorrent_id)) - - sql = "Select infohash From Torrent, ChannelTorrents Where Torrent.torrent_id = ChannelTorrents.torrent_id And ChannelTorrents.id = ?" - infohash = self._db.fetchone(sql, (channeltorrent_id,)) - - if infohash: - infohash = str2bin(infohash) - self.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) - - def addOrGetChannelTorrentID(self, channel_id, infohash): - torrent_id = self.torrent_db.addOrGetTorrentID(infohash) - - sql = "SELECT id FROM _ChannelTorrents WHERE torrent_id = ? AND channel_id = ?" - channeltorrent_id = self._db.fetchone(sql, (torrent_id, channel_id)) - if not channeltorrent_id: - insert_torrent = "INSERT OR IGNORE INTO _ChannelTorrents (dispersy_id, torrent_id, channel_id, time_stamp) VALUES (?,?,?,?);" - self._db.execute_write(insert_torrent, (-1, torrent_id, channel_id, -1)) - - channeltorrent_id = self._db.fetchone(sql, (torrent_id, channel_id)) - return channeltorrent_id - - def get_channel_torrent_id(self, channel_id, info_hash): - torrent_id = self.torrent_db.getTorrentID(info_hash) - if torrent_id: - sql = "SELECT id FROM ChannelTorrents WHERE torrent_id = ? and channel_id = ?" - channeltorrent_id = self._db.fetchone(sql, (torrent_id, channel_id)) - return channeltorrent_id - - def hasTorrent(self, channel_id, infohash): - return True if self.get_channel_torrent_id(channel_id, infohash) else False - - def hasTorrents(self, channel_id, infohashes): - returnAr = [] - torrent_id_results = self.torrent_db.getTorrentIDS(infohashes) - - for infohash in infohashes: - if torrent_id_results[infohash] is None: - returnAr.append(False) - else: - torrent_id = torrent_id_results[infohash] - sql = "SELECT id FROM ChannelTorrents WHERE torrent_id = ? AND channel_id = ? AND dispersy_id <> -1" - channeltorrent_id = self._db.fetchone(sql, (torrent_id, channel_id)) - returnAr.append(True if channeltorrent_id else False) - return returnAr - - def playlistHasTorrent(self, playlist_id, channeltorrent_id): - sql = "SELECT id FROM PlaylistTorrents WHERE playlist_id = ? AND channeltorrent_id = ?" - playlisttorrent_id = self._db.fetchone(sql, (playlist_id, channeltorrent_id)) - if playlisttorrent_id: - return True - return False - - # dispersy receiving comments - def on_comment_from_dispersy(self, channel_id, dispersy_id, mid_global_time, peer_id, comment, timestamp, - reply_to, reply_after, playlist_dispersy_id, infohash): - # both reply_to and reply_after could be loose pointers to not yet received dispersy message - if isinstance(reply_to, (str)): - reply_to = buffer(reply_to) - - if isinstance(reply_after, (str)): - reply_after = buffer(reply_after) - mid_global_time = buffer(mid_global_time) - - sql = """INSERT OR REPLACE INTO _Comments - (channel_id, dispersy_id, peer_id, comment, reply_to_id, reply_after_id, time_stamp) - VALUES (?, ?, ?, ?, ?, ?, ?); SELECT last_insert_rowid();""" - comment_id = self._db.fetchone( - sql, (channel_id, dispersy_id, peer_id, comment, reply_to, reply_after, timestamp)) - - if playlist_dispersy_id or infohash: - if playlist_dispersy_id: - sql = "SELECT id FROM Playlists WHERE dispersy_id = ?" - playlist_id = self._db.fetchone(sql, (playlist_dispersy_id,)) - - sql = "INSERT INTO CommentPlaylist (comment_id, playlist_id) VALUES (?, ?)" - self._db.execute_write(sql, (comment_id, playlist_id)) - - if infohash: - channeltorrent_id = self.addOrGetChannelTorrentID(channel_id, infohash) - - sql = "INSERT INTO CommentTorrent (comment_id, channeltorrent_id) VALUES (?, ?)" - self._db.execute_write(sql, (comment_id, channeltorrent_id)) - - # try fo fix loose reply_to and reply_after pointers - sql = "UPDATE _Comments SET reply_to_id = ? WHERE reply_to_id = ?" - self._db.execute_write(sql, (dispersy_id, mid_global_time)) - sql = "UPDATE _Comments SET reply_after_id = ? WHERE reply_after_id = ?" - self._db.execute_write(sql, (dispersy_id, mid_global_time)) - - self.notifier.notify(NTFY_COMMENTS, NTFY_INSERT, channel_id) - if playlist_dispersy_id: - self.notifier.notify(NTFY_COMMENTS, NTFY_INSERT, playlist_id) - if infohash: - self.notifier.notify(NTFY_COMMENTS, NTFY_INSERT, infohash) - - # dispersy removing comments - def on_remove_comment_from_dispersy(self, channel_id, dispersy_id, infohash=None, redo=False): - sql = "UPDATE _Comments SET deleted_at = ? WHERE dispersy_id = ?" - - if redo: - deleted_at = None - self._db.execute_write(sql, (deleted_at, dispersy_id)) - - self.notifier.notify(NTFY_COMMENTS, NTFY_INSERT, channel_id) - if infohash: - self.notifier.notify(NTFY_COMMENTS, NTFY_INSERT, infohash) - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, dispersy_id)) - - self.notifier.notify(NTFY_COMMENTS, NTFY_DELETE, channel_id) - if infohash: - self.notifier.notify(NTFY_COMMENTS, NTFY_DELETE, infohash) - - # dispersy receiving, modifying playlists - def on_playlist_from_dispersy(self, channel_id, dispersy_id, peer_id, name, description): - sql = "INSERT OR REPLACE INTO _Playlists (channel_id, dispersy_id, peer_id, name, description) VALUES (?, ?, ?, ?, ?)" - self._db.execute_write(sql, (channel_id, dispersy_id, peer_id, name, description)) - - self.notifier.notify(NTFY_PLAYLISTS, NTFY_INSERT, channel_id) - - def on_remove_playlist_from_dispersy(self, channel_id, dispersy_id, redo): - sql = "UPDATE _Playlists SET deleted_at = ? WHERE channel_id = ? and dispersy_id = ?" - - if redo: - deleted_at = None - self._db.execute_write(sql, (deleted_at, channel_id, dispersy_id)) - self.notifier.notify(NTFY_PLAYLISTS, NTFY_INSERT, channel_id) - - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, channel_id, dispersy_id)) - self.notifier.notify(NTFY_PLAYLISTS, NTFY_DELETE, channel_id) - - def on_playlist_modification_from_dispersy(self, playlist_id, modification_type, modification_value): - if modification_type in ['name', 'description']: - update_playlist = "UPDATE _Playlists Set " + modification_type + " = ?, modified = ? WHERE id = ?" - self._db.execute_write(update_playlist, (modification_value, long(time()), playlist_id)) - - self.notifier.notify(NTFY_PLAYLISTS, NTFY_UPDATE, playlist_id) - - def on_playlist_torrent(self, dispersy_id, playlist_dispersy_id, peer_id, infohash): - get_playlist = "SELECT id, channel_id FROM _Playlists WHERE dispersy_id = ?" - playlist_id, channel_id = self._db.fetchone(get_playlist, (playlist_dispersy_id,)) - - channeltorrent_id = self.addOrGetChannelTorrentID(channel_id, infohash) - sql = "INSERT INTO _PlaylistTorrents (dispersy_id, playlist_id, peer_id, channeltorrent_id) VALUES (?,?,?,?)" - self._db.execute_write(sql, (dispersy_id, playlist_id, peer_id, channeltorrent_id)) - - self.notifier.notify(NTFY_PLAYLISTS, NTFY_UPDATE, playlist_id, infohash) - - def on_remove_playlist_torrent(self, channel_id, playlist_dispersy_id, infohash, redo): - get_playlist = "SELECT id FROM _Playlists WHERE dispersy_id = ? AND channel_id = ?" - playlist_id = self._db.fetchone(get_playlist, (playlist_dispersy_id, channel_id)) - - if playlist_id: - get_channeltorent_id = """SELECT _ChannelTorrents.id FROM _ChannelTorrents, Torrent, _PlaylistTorrents - WHERE _ChannelTorrents.torrent_id = Torrent.torrent_id AND _ChannelTorrents.id = - _PlaylistTorrents.channeltorrent_id AND playlist_id = ? AND Torrent.infohash = ?""" - channeltorrent_id = self._db.fetchone(get_channeltorent_id, (playlist_id, bin2str(infohash))) - - if channeltorrent_id: - sql = "UPDATE _PlaylistTorrents SET deleted_at = ? WHERE playlist_id = ? AND channeltorrent_id = ?" - - if redo: - deleted_at = None - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, playlist_id, channeltorrent_id)) - - self.notifier.notify(NTFY_PLAYLISTS, NTFY_UPDATE, playlist_id) - - def on_metadata_from_dispersy(self, type, channeltorrent_id, playlist_id, channel_id, dispersy_id, peer_id, - mid_global_time, modification_type, modification_value, timestamp, - prev_modification_id, prev_modification_global_time): - if isinstance(prev_modification_id, (str)): - prev_modification_id = buffer(prev_modification_id) - - sql = """INSERT OR REPLACE INTO _ChannelMetaData - (dispersy_id, channel_id, peer_id, type, value, time_stamp, prev_modification, prev_global_time) - VALUES (?, ?, ?, ?, ?, ?, ?, ?); SELECT last_insert_rowid();""" - metadata_id = self._db.fetchone(sql, (dispersy_id, channel_id, peer_id, - modification_type, - modification_value, timestamp, - prev_modification_id, - prev_modification_global_time)) - - if channeltorrent_id: - sql = "INSERT INTO MetaDataTorrent (metadata_id, channeltorrent_id) VALUES (?,?)" - self._db.execute_write(sql, (metadata_id, channeltorrent_id)) - - self.notifier.notify(NTFY_MODIFICATIONS, NTFY_INSERT, channeltorrent_id) - - if playlist_id: - sql = "INSERT INTO MetaDataPlaylist (metadata_id, playlist_id) VALUES (?,?)" - self._db.execute_write(sql, (metadata_id, playlist_id)) - - self.notifier.notify(NTFY_MODIFICATIONS, NTFY_INSERT, playlist_id) - self.notifier.notify(NTFY_MODIFICATIONS, NTFY_INSERT, channel_id) - - # try fo fix loose reply_to and reply_after pointers - sql = "UPDATE _ChannelMetaData SET prev_modification = ? WHERE prev_modification = ?;" - self._db.execute_write(sql, (dispersy_id, buffer(mid_global_time))) - - def on_remove_metadata_from_dispersy(self, channel_id, dispersy_id, redo): - sql = "UPDATE _ChannelMetaData SET deleted_at = ? WHERE dispersy_id = ? AND channel_id = ?" - - if redo: - deleted_at = None - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, dispersy_id, channel_id)) - - def on_moderation(self, channel_id, dispersy_id, peer_id, by_peer_id, cause, message, timestamp, severity): - sql = """INSERT OR REPLACE INTO _Moderations - (dispersy_id, channel_id, peer_id, by_peer_id, message, cause, time_stamp, severity) - VALUES (?,?,?,?,?,?,?,?)""" - self._db.execute_write(sql, (dispersy_id, channel_id, peer_id, by_peer_id, message, cause, timestamp, severity)) - - self.notifier.notify(NTFY_MODERATIONS, NTFY_INSERT, channel_id) - - def on_remove_moderation(self, channel_id, dispersy_id, redo): - sql = "UPDATE _Moderations SET deleted_at = ? WHERE dispersy_id = ? AND channel_id = ?" - if redo: - deleted_at = None - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, dispersy_id, channel_id)) - - def on_mark_torrent(self, channel_id, dispersy_id, global_time, peer_id, infohash, type, timestamp): - channeltorrent_id = self.addOrGetChannelTorrentID(channel_id, infohash) - - if peer_id: - select = "SELECT global_time FROM TorrentMarkings WHERE channeltorrent_id = ? AND peer_id = ?" - prev_global_time = self._db.fetchone(select, (channeltorrent_id, peer_id)) - else: - select = "SELECT global_time FROM TorrentMarkings WHERE channeltorrent_id = ? AND peer_id IS NULL" - prev_global_time = self._db.fetchone(select, (channeltorrent_id,)) - - if prev_global_time: - if global_time > prev_global_time: - if peer_id: - sql = "DELETE FROM _TorrentMarkings WHERE channeltorrent_id = ? AND peer_id = ?" - self._db.execute_write(sql, (channeltorrent_id, peer_id)) - else: - sql = "DELETE FROM _TorrentMarkings WHERE channeltorrent_id = ? AND peer_id IS NULL" - self._db.execute_write(sql, (channeltorrent_id,)) - else: - return - - sql = """INSERT INTO _TorrentMarkings (dispersy_id, global_time, channeltorrent_id, peer_id, type, time_stamp) - VALUES (?,?,?,?,?,?)""" - self._db.execute_write(sql, (dispersy_id, global_time, channeltorrent_id, peer_id, type, timestamp)) - self.notifier.notify(NTFY_MARKINGS, NTFY_INSERT, channeltorrent_id) - - def on_remove_mark_torrent(self, channel_id, dispersy_id, redo): - sql = "UPDATE _TorrentMarkings SET deleted_at = ? WHERE dispersy_id = ?" - - if redo: - deleted_at = None - else: - deleted_at = long(time()) - self._db.execute_write(sql, (deleted_at, dispersy_id)) - - def on_dynamic_settings(self, channel_id): - self.notifier.notify(NTFY_CHANNELCAST, NTFY_STATE, channel_id) - - def getNrTorrentsDownloaded(self, channel_id): - sql = """select count(*) from MyPreference, ChannelTorrents - WHERE MyPreference.torrent_id = ChannelTorrents.torrent_id and ChannelTorrents.channel_id = ? LIMIT 1""" - return self._db.fetchone(sql, (channel_id,)) - - def getChannelNrTorrents(self, limit=None): - if limit: - sql = """select count(torrent_id), channel_id from Channels, ChannelTorrents - WHERE Channels.id = ChannelTorrents.channel_id AND dispersy_cid <> -1 - GROUP BY channel_id ORDER BY RANDOM() LIMIT ?""" - return self._db.fetchall(sql, (limit,)) - - sql = """SELECT count(torrent_id), channel_id FROM Channels, ChannelTorrents - WHERE Channels.id = ChannelTorrents.channel_id AND dispersy_cid <> -1 GROUP BY channel_id""" - return self._db.fetchall(sql) - - def getChannelNrTorrentsLatestUpdate(self, limit=None): - if limit: - sql = """SELECT count(CollectedTorrent.torrent_id), max(ChannelTorrents.time_stamp), - channel_id from Channels, ChannelTorrents, CollectedTorrent - WHERE ChannelTorrents.torrent_id = CollectedTorrent.torrent_id - AND Channels.id = ChannelTorrents.channel_id AND dispersy_cid == -1 - GROUP BY channel_id ORDER BY RANDOM() LIMIT ?""" - return self._db.fetchall(sql, (limit,)) - - sql = """SELECT count(CollectedTorrent.torrent_id), max(ChannelTorrents.time_stamp), channel_id from Channels, - ChannelTorrents, CollectedTorrent - WHERE ChannelTorrents.torrent_id = CollectedTorrent.torrent_id - AND Channels.id = ChannelTorrents.channel_id AND dispersy_cid == -1 GROUP BY channel_id""" - return self._db.fetchall(sql) - - def getNrChannels(self): - sql = "select count(DISTINCT id) from Channels LIMIT 1" - return self._db.fetchone(sql) - - def getRecentAndRandomTorrents(self, NUM_OWN_RECENT_TORRENTS=15, NUM_OWN_RANDOM_TORRENTS=10, - NUM_OTHERS_RECENT_TORRENTS=15, NUM_OTHERS_RANDOM_TORRENTS=10, - NUM_OTHERS_DOWNLOADED=5): - torrent_dict = {} - - least_recent = -1 - sql = """SELECT dispersy_cid, infohash, time_stamp from ChannelTorrents, Channels, Torrent - WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.channel_id==? and ChannelTorrents.dispersy_id <> -1 order by time_stamp desc limit ?""" - myrecenttorrents = self._db.fetchall(sql, (self._channel_id, NUM_OWN_RECENT_TORRENTS)) - for cid, infohash, timestamp in myrecenttorrents: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - least_recent = timestamp - - if len(myrecenttorrents) == NUM_OWN_RECENT_TORRENTS and least_recent != -1: - sql = """SELECT dispersy_cid, infohash from ChannelTorrents, Channels, Torrent - WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.channel_id==? AND time_stamp -1 order by random() limit ?""" - myrandomtorrents = self._db.fetchall(sql, (self._channel_id, least_recent, NUM_OWN_RANDOM_TORRENTS)) - for cid, infohash, _ in myrecenttorrents: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - - for cid, infohash in myrandomtorrents: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - - nr_records = sum(len(torrents) for torrents in torrent_dict.values()) - additionalSpace = (NUM_OWN_RECENT_TORRENTS + NUM_OWN_RANDOM_TORRENTS) - nr_records - - if additionalSpace > 0: - NUM_OTHERS_RECENT_TORRENTS += additionalSpace / 2 - NUM_OTHERS_RANDOM_TORRENTS += additionalSpace - (additionalSpace / 2) - - # Niels 6-12-2011: we should substract additionalspace from recent and - # random, otherwise the totals will not be correct. - NUM_OWN_RECENT_TORRENTS -= additionalSpace / 2 - NUM_OWN_RANDOM_TORRENTS -= additionalSpace - (additionalSpace / 2) - - least_recent = -1 - sql = """SELECT dispersy_cid, infohash, time_stamp from ChannelTorrents, Channels, Torrent - WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.channel_id in (select channel_id from ChannelVotes - WHERE voter_id ISNULL AND vote=2) and ChannelTorrents.dispersy_id <> -1 ORDER BY time_stamp desc limit ?""" - othersrecenttorrents = self._db.fetchall(sql, (NUM_OTHERS_RECENT_TORRENTS,)) - for cid, infohash, timestamp in othersrecenttorrents: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - least_recent = timestamp - - if othersrecenttorrents and len(othersrecenttorrents) == NUM_OTHERS_RECENT_TORRENTS and least_recent != -1: - sql = """SELECT dispersy_cid, infohash FROM ChannelTorrents, Channels, Torrent - WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.channel_id in (select channel_id from ChannelVotes - WHERE voter_id ISNULL and vote=2) and time_stamp < ? - AND ChannelTorrents.dispersy_id <> -1 order by random() limit ?""" - othersrandomtorrents = self._db.fetchall(sql, (least_recent, NUM_OTHERS_RANDOM_TORRENTS)) - for cid, infohash in othersrandomtorrents: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - - twomonthsago = long(time() - 5259487) - nr_records = sum(len(torrents) for torrents in torrent_dict.values()) - additionalSpace = (NUM_OWN_RECENT_TORRENTS + NUM_OWN_RANDOM_TORRENTS + - NUM_OTHERS_RECENT_TORRENTS + NUM_OTHERS_RANDOM_TORRENTS) - nr_records - NUM_OTHERS_DOWNLOADED += additionalSpace - - sql = """SELECT dispersy_cid, infohash from ChannelTorrents, Channels, Torrent - WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.channel_id in (select distinct channel_id from ChannelTorrents - WHERE torrent_id in (select torrent_id from MyPreference)) - AND ChannelTorrents.dispersy_id <> -1 and Channels.modified > ? order by time_stamp desc limit ?""" - interesting_records = self._db.fetchall(sql, (twomonthsago, NUM_OTHERS_DOWNLOADED)) - for cid, infohash in interesting_records: - torrent_dict.setdefault(str(cid), set()).add(str2bin(infohash)) - - return torrent_dict - - def getRandomTorrents(self, channel_id, limit=15): - sql = """SELECT infohash FROM ChannelTorrents, Torrent WHERE ChannelTorrents.torrent_id = Torrent.torrent_id - AND channel_id = ? ORDER BY RANDOM() LIMIT ?""" - - returnar = [] - for infohash, in self._db.fetchall(sql, (channel_id, limit)): - returnar.append(str2bin(infohash)) - return returnar - - def getTorrentFromChannelId(self, channel_id, infohash, keys): - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id AND channel_id = ? AND infohash = ?""" - result = self._db.fetchone(sql, (channel_id, bin2str(infohash))) - - return self.__fixTorrent(keys, result) - - def getChannelTorrents(self, infohash, keys): - sql = "SELECT " ", ".join(keys) + """ FROM Torrent, ChannelTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id AND infohash = ?""" - results = self._db.fetchall(sql, (bin2str(infohash),)) - - return self.__fixTorrents(keys, results) - - def get_random_channel_torrents(self, keys, limit=10): - """ - Return some random (channel) torrents from the database. - """ - sql = "SELECT %s FROM ChannelTorrents, Torrent " \ - "WHERE ChannelTorrents.torrent_id = Torrent.torrent_id AND Torrent.name IS NOT NULL " \ - "ORDER BY RANDOM() LIMIT ?" % ", ".join(keys) - results = self._db.fetchall(sql, (limit,)) - return self.__fixTorrents(keys, results) - - def getTorrentFromChannelTorrentId(self, channeltorrent_id, keys): - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id AND ChannelTorrents.id = ?""" - result = self._db.fetchone(sql, (channeltorrent_id,)) - if not result: - self._logger.info("COULD NOT FIND CHANNELTORRENT_ID %s", channeltorrent_id) - else: - return self.__fixTorrent(keys, result) - - def getTorrentsFromChannelId(self, channel_id, isDispersy, keys, limit=None): - if isDispersy: - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id""" - else: - sql = "SELECT " + ", ".join(keys) + """ FROM CollectedTorrent as Torrent, ChannelTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id""" - - if channel_id: - sql += " AND channel_id = ?" - sql += " ORDER BY time_stamp DESC" - - if limit: - sql += " LIMIT %d" % limit - - if channel_id: - results = self._db.fetchall(sql, (channel_id,)) - else: - results = self._db.fetchall(sql) - - if limit is None and channel_id: - # use this possibility to update nrtorrent in channel - - if 'time_stamp' in keys and len(results) > 0: - update = "UPDATE _Channels SET nr_torrents = ?, modified = ? WHERE id = ?" - self._db.execute_write(update, (len(results), results[0][keys.index('time_stamp')], channel_id)) - else: - # use this possibility to update nrtorrent in channel - update = "UPDATE _Channels SET nr_torrents = ? WHERE id = ?" - self._db.execute_write(update, (len(results), channel_id)) - - return self.__fixTorrents(keys, results) - - def getRecentReceivedTorrentsFromChannelId(self, channel_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + " FROM Torrent, ChannelTorrents " + \ - "WHERE Torrent.torrent_id = ChannelTorrents.torrent_id AND channel_id = ? ORDER BY inserted DESC" - if limit: - sql += " LIMIT %d" % limit - results = self._db.fetchall(sql, (channel_id,)) - return self.__fixTorrents(keys, results) - - def getRecentModificationsFromChannelId(self, channel_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM ChannelMetaData - LEFT JOIN MetaDataTorrent ON ChannelMetaData.id = MetaDataTorrent.metadata_id - LEFT JOIN Moderations ON Moderations.cause = ChannelMetaData.dispersy_id - WHERE ChannelMetaData.channel_id = ? - ORDER BY -Moderations.time_stamp ASC, ChannelMetaData.inserted DESC""" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (channel_id,)) - - def getRecentModerationsFromChannel(self, channel_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM Moderations, MetaDataTorrent, ChannelMetaData - WHERE Moderations.cause = ChannelMetaData.dispersy_id - AND ChannelMetaData.id = MetaDataTorrent.metadata_id - AND Moderations.channel_id = ? - ORDER BY Moderations.inserted DESC""" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (channel_id,)) - - def getRecentMarkingsFromChannel(self, channel_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM TorrentMarkings, ChannelTorrents - WHERE TorrentMarkings.channeltorrent_id = ChannelTorrents.id - AND ChannelTorrents.channel_id = ? - ORDER BY TorrentMarkings.time_stamp DESC""" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (channel_id,)) - - def getTorrentsFromPlaylist(self, playlist_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents, PlaylistTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id - AND ChannelTorrents.id = PlaylistTorrents.channeltorrent_id - AND playlist_id = ? ORDER BY time_stamp DESC""" - if limit: - sql += " LIMIT %d" % limit - results = self._db.fetchall(sql, (playlist_id,)) - return self.__fixTorrents(keys, results) - - def getTorrentFromPlaylist(self, playlist_id, infohash, keys): - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents, PlaylistTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id - AND ChannelTorrents.id = PlaylistTorrents.channeltorrent_id - AND playlist_id = ? AND infohash = ?""" - result = self._db.fetchone(sql, (playlist_id, bin2str(infohash))) - - return self.__fixTorrent(keys, result) - - def getRecentTorrentsFromPlaylist(self, playlist_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM Torrent, ChannelTorrents, PlaylistTorrents - WHERE Torrent.torrent_id = ChannelTorrents.torrent_id - AND ChannelTorrents.id = PlaylistTorrents.channeltorrent_id - AND playlist_id = ? ORDER BY inserted DESC""" - if limit: - sql += " LIMIT %d" % limit - results = self._db.fetchall(sql, (playlist_id,)) - return self.__fixTorrents(keys, results) - - def getRecentModificationsFromPlaylist(self, playlist_id, keys, limit=None): - playlistKeys = keys[:] - if 'MetaDataTorrent.channeltorrent_id' in playlistKeys: - playlistKeys[playlistKeys.index('MetaDataTorrent.channeltorrent_id')] = '""' - - sql = "SELECT " + ", ".join(playlistKeys) + """ FROM MetaDataPlaylist, ChannelMetaData - LEFT JOIN Moderations ON Moderations.cause = ChannelMetaData.dispersy_id - WHERE MetaDataPlaylist.metadata_id = ChannelMetaData.id AND playlist_id = ?""" - if limit: - sql += " LIMIT %d" % limit - playlist_modifications = self._db.fetchall(sql, (playlist_id,)) - - sql = "SELECT " + ", ".join(keys) + """ FROM MetaDataTorrent, ChannelMetaData, PlaylistTorrents - LEFT JOIN Moderations ON Moderations.cause = ChannelMetaData.dispersy_id - WHERE MetaDataTorrent.metadata_id = ChannelMetaData.id - AND PlaylistTorrents.channeltorrent_id = MetaDataTorrent.channeltorrent_id AND playlist_id = ?""" - if limit: - sql += " LIMIT %d" % limit - torrent_modifications = self._db.fetchall(sql, (playlist_id,)) - - # merge two lists - orderIndex = keys.index('ChannelMetaData.time_stamp') - revertIndex = keys.index('Moderations.time_stamp') - data = [(row[revertIndex], row[orderIndex], row) for row in playlist_modifications] - data += [(row[revertIndex], row[orderIndex], row) for row in torrent_modifications] - data.sort(reverse=True) - - if limit: - data = data[:limit] - data = [item for _, _, item in data] - return data - - def getRecentModerationsFromPlaylist(self, playlist_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM Moderations, MetaDataTorrent, ChannelMetaData, PlaylistTorrents - WHERE Moderations.cause = ChannelMetaData.dispersy_id - AND ChannelMetaData.id = MetaDataTorrent.metadata_id - AND MetaDataTorrent.channeltorrent_id = PlaylistTorrents.channeltorrent_id - AND PlaylistTorrents.playlist_id = ? ORDER BY Moderations.inserted DESC""" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (playlist_id,)) - - def getRecentMarkingsFromPlaylist(self, playlist_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + """ FROM TorrentMarkings, PlaylistTorrents, ChannelTorrents - WHERE TorrentMarkings.channeltorrent_id = PlaylistTorrents.channeltorrent_id - AND ChannelTorrents.id = PlaylistTorrents.channeltorrent_id - AND PlaylistTorrents.playlist_id = ? - AND ChannelTorrents.dispersy_id <> -1 ORDER BY TorrentMarkings.time_stamp DESC""" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (playlist_id,)) - - def getTorrentsNotInPlaylist(self, channel_id, keys): - sql = "SELECT " + ", ".join(keys) + " FROM Torrent, ChannelTorrents " + \ - "WHERE Torrent.torrent_id = ChannelTorrents.torrent_id " + \ - "AND channel_id = ? " + \ - "And ChannelTorrents.id NOT IN (Select channeltorrent_id From PlaylistTorrents) " + \ - "ORDER BY time_stamp DESC" - results = self._db.fetchall(sql, (channel_id,)) - return self.__fixTorrents(keys, results) - - def getPlaylistForTorrent(self, channeltorrent_id, keys): - sql = "SELECT " + ", ".join(keys) + \ - ", count(DISTINCT channeltorrent_id) FROM Playlists, PlaylistTorrents " + \ - "WHERE Playlists.id = PlaylistTorrents.playlist_id AND channeltorrent_id = ?" - result = self._db.fetchone(sql, (channeltorrent_id,)) - # Niels: 29-02-2012 due to the count this always returns one row, check - # count to return None if playlist was actually not found. - if result[-1]: - return result - - def getPlaylistsForTorrents(self, torrent_ids, keys): - torrent_ids = " ,".join(map(str, torrent_ids)) - - sql = "SELECT channeltorrent_id, " + ", ".join(keys) + \ - ", count(DISTINCT channeltorrent_id) FROM Playlists, PlaylistTorrents " + \ - "WHERE Playlists.id = PlaylistTorrents.playlist_id AND channeltorrent_id IN (" + \ - torrent_ids + ") GROUP BY Playlists.id" - return self._db.fetchall(sql) - - def __fixTorrent(self, keys, torrent): - if len(keys) == 1: - if keys[0] == 'infohash': - return str2bin(torrent) - return torrent - - def fix_value(key, torrent): - if key in keys: - key_index = keys.index(key) - if torrent[key_index]: - torrent[key_index] = str2bin(torrent[key_index]) - if torrent: - torrent = list(torrent) - fix_value('infohash', torrent) - return torrent - - def __fixTorrents(self, keys, results): - def fix_value(key): - if key in keys: - key_index = keys.index(key) - for i in range(len(results)): - result = list(results[i]) - if result[key_index]: - result[key_index] = str2bin(result[key_index]) - results[i] = result - fix_value('infohash') - return results - - def getPlaylistsFromChannelId(self, channel_id, keys): - sql = "SELECT " + ", ".join(keys) + \ - ", count(DISTINCT ChannelTorrents.id) FROM Playlists " + \ - "LEFT JOIN PlaylistTorrents ON Playlists.id = PlaylistTorrents.playlist_id " + \ - "LEFT JOIN ChannelTorrents ON PlaylistTorrents.channeltorrent_id = ChannelTorrents.id " + \ - "WHERE Playlists.channel_id = ? GROUP BY Playlists.id ORDER BY Playlists.name DESC" - return self._db.fetchall(sql, (channel_id,)) - - def getPlaylist(self, playlist_id, keys): - sql = "SELECT " + ", ".join(keys) + \ - ", count(DISTINCT ChannelTorrents.id) FROM Playlists " + \ - "LEFT JOIN PlaylistTorrents ON Playlists.id = PlaylistTorrents.playlist_id " + \ - "LEFT JOIN ChannelTorrents ON PlaylistTorrents.channeltorrent_id = ChannelTorrents.id " + \ - "WHERE Playlists.id = ? GROUP BY Playlists.id" - return self._db.fetchone(sql, (playlist_id,)) - - def getCommentsFromChannelId(self, channel_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + " FROM Comments " + \ - "LEFT JOIN Peer ON Comments.peer_id = Peer.peer_id " + \ - "LEFT JOIN CommentPlaylist ON Comments.id = CommentPlaylist.comment_id " + \ - "LEFT JOIN CommentTorrent ON Comments.id = CommentTorrent.comment_id " + \ - "WHERE channel_id = ? ORDER BY time_stamp DESC" - if limit: - sql += " LIMIT %d" % limit - return self._db.fetchall(sql, (channel_id,)) - - def getCommentsFromPlayListId(self, playlist_id, keys, limit=None): - playlistKeys = keys[:] - if 'CommentTorrent.channeltorrent_id' in playlistKeys: - playlistKeys[playlistKeys.index('CommentTorrent.channeltorrent_id')] = '""' - - sql = "SELECT " + ", ".join(playlistKeys) + " FROM Comments " + \ - "LEFT JOIN Peer ON Comments.peer_id = Peer.peer_id " + \ - "LEFT JOIN CommentPlaylist ON Comments.id = CommentPlaylist.comment_id WHERE playlist_id = ?" - if limit: - sql += " LIMIT %d" % limit - - playlist_comments = self._db.fetchall(sql, (playlist_id,)) - - sql = "SELECT " + ", ".join(keys) + " FROM Comments, CommentTorrent, PlaylistTorrents " + \ - "LEFT JOIN Peer ON Comments.peer_id = Peer.peer_id " + \ - "WHERE Comments.id = CommentTorrent.comment_id " + \ - "AND PlaylistTorrents.channeltorrent_id = CommentTorrent.channeltorrent_id AND playlist_id = ?" - if limit: - sql += " LIMIT %d" % limit - - torrent_comments = self._db.fetchall(sql, (playlist_id,)) - - # merge two lists - orderIndex = keys.index('time_stamp') - data = [(row[orderIndex], row) for row in playlist_comments] - data += [(row[orderIndex], row) for row in torrent_comments] - data.sort(reverse=True) - - if limit: - data = data[:limit] - data = [item for _, item in data] - return data - - def getCommentsFromChannelTorrentId(self, channeltorrent_id, keys, limit=None): - sql = "SELECT " + ", ".join(keys) + " FROM Comments, CommentTorrent " + \ - "LEFT JOIN Peer ON Comments.peer_id = Peer.peer_id WHERE Comments.id = CommentTorrent.comment_id " + \ - "AND channeltorrent_id = ? ORDER BY time_stamp DESC" - if limit: - sql += " LIMIT %d" % limit - - return self._db.fetchall(sql, (channeltorrent_id,)) - - def searchChannelsTorrent(self, keywords, limitChannels=None, limitTorrents=None, dispersyOnly=False): - # search channels based on keywords - keywords = split_into_keywords(keywords) - keywords = [keyword for keyword in keywords if len(keyword) > 1] - - if len(keywords) > 0: - sql = "SELECT distinct id, dispersy_cid, name FROM Channels WHERE" - for keyword in keywords: - sql += " name like '%" + keyword + "%' and" - - if dispersyOnly: - sql += " dispersy_cid != '-1'" - else: - sql = sql[:-3] - - if limitChannels: - sql += " LIMIT %d" % limitChannels - - channels = self._db.fetchall(sql) - select_torrents = "SELECT infohash, ChannelTorrents.name, Torrent.name, time_stamp " + \ - "FROM Torrent, ChannelTorrents " + \ - "WHERE Torrent.torrent_id = ChannelTorrents.torrent_id AND channel_id = ? " + \ - "ORDER BY num_seeders DESC LIMIT ?" - - limitTorrents = limitTorrents or 20 - - results = [] - for channel_id, dispersy_cid, name in channels: - dispersy_cid = str(dispersy_cid) - torrents = self._db.fetchall(select_torrents, (channel_id, limitTorrents)) - for infohash, ChTname, CoTname, time_stamp in torrents: - infohash = str2bin(infohash) - results.append((channel_id, dispersy_cid, name, infohash, ChTname or CoTname, time_stamp)) - return results - return [] - - @staticmethod - def calculate_score_channel(keywords, channel_name, channel_description): - """ - Calculate the relevance score of a channel from the database. - The algorithm used is a very stripped-down version of BM25 where only the matching terms are counted. - """ - values = [channel_name, channel_description] - scores = [] - for col_ind in xrange(2): - score = 0 - for keyword in keywords: - term_freq = values[col_ind].lower().count(keyword) - - right_side = ((term_freq * (1.2 + 1)) / (term_freq + 1.2)) - score += right_side - - scores.append(score) - - # The relevance score is 80% dependent on the matching in the channel name - # and 20% on the matching in the channel description. - return 0.8 * scores[0] + 0.2 * scores[1] - - def search_in_local_channels_db(self, query): - """ - Searches for matching channels against a given query in the database. - """ - search_results = [] - keywords = split_into_keywords(query, to_filter_stopwords=True) - sql = "SELECT id, dispersy_cid, name, description, nr_torrents, nr_favorite, nr_spam, modified " \ - "FROM Channels WHERE " - for _ in xrange(len(keywords)): - sql += " name LIKE ? OR description LIKE ? OR " - sql = sql[:-4] - - bindings = list(chain.from_iterable(['%%%s%%' % keyword] * 2 for keyword in keywords)) - results = self._db.fetchall(sql, bindings) - - my_votes = self.votecast_db.getMyVotes() - - for result in results: - my_vote = my_votes.get(result[0], 0) - - relevance_score = ChannelCastDBHandler.calculate_score_channel(keywords, result[2], result[3]) - extended_result = (result[0], str(result[1]), result[2], result[3], - result[4], result[5], result[6], my_vote, result[7], relevance_score) - search_results.append(extended_result) - - return search_results - - def searchChannels(self, keywords): - sql = "SELECT id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels WHERE" - for keyword in keywords: - sql += " name like '%" + keyword + "%' and" - sql = sql[:-3] - return self._getChannels(sql) - - def getChannel(self, channel_id): - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels WHERE id = ?" - channels = self._getChannels(sql, (channel_id,)) - if len(channels) > 0: - return channels[0] - - def getChannels(self, channel_ids): - channel_ids = "','".join(map(str, channel_ids)) - sql = "Select id, name, description, dispersy_cid, modified, " + \ - "nr_torrents, nr_favorite, nr_spam FROM Channels " + \ - "WHERE id IN ('" + \ - channel_ids + \ - "')" - return self._getChannels(sql) - - def getChannelsByCID(self, channel_cids): - parameters = '?,' * len(channel_cids) - parameters = parameters[:-1] - - channel_cids = map(buffer, channel_cids) - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels WHERE dispersy_cid IN (" + \ - parameters + \ - ")" - return self._getChannels(sql, channel_cids) - - def getAllChannels(self): - """ Returns all the channels """ - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam FROM Channels" - return self._getChannels(sql) - - def getNewChannels(self, updated_since=0): - """ Returns all newest unsubscribed channels, ie the ones with no votes (positive or negative)""" - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels WHERE nr_favorite = 0 AND nr_spam = 0 AND modified > ?" - return self._getChannels(sql, (updated_since,)) - - def getLatestUpdated(self, max_nr=20): - def channel_sort(a, b): - # first compare local vote, spam -> return -1 - if a[7] == -1: - return 1 - if b[7] == -1: - return -1 - - # then compare latest update - if a[8] < b[8]: - return 1 - if a[8] > b[8]: - return -1 - # finally compare nr_torrents - return cmp(a[4], b[4]) - - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels Order By modified DESC Limit ?" - return self._getChannels(sql, (max_nr,), cmpF=channel_sort) - - def getMostPopularChannels(self, max_nr=20): - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels ORDER BY nr_favorite DESC, modified DESC LIMIT ?" - return self._getChannels(sql, (max_nr,), includeSpam=False) - - def getMySubscribedChannels(self, include_dispersy=False): - sql = "SELECT id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels, ChannelVotes " + \ - "WHERE Channels.id = ChannelVotes.channel_id AND voter_id ISNULL AND vote == 2" - if not include_dispersy: - sql += " AND dispersy_cid == -1" - - return self._getChannels(sql) - - def _getChannels(self, sql, args=None, cmpF=None, includeSpam=True): - """Returns the channels based on the input sql, if the number of positive votes - is less than maxvotes and the number of torrent > 0""" - if self.votecast_db is None: - return [] - - channels = [] - results = self._db.fetchall(sql, args) - - my_votes = self.votecast_db.getMyVotes() - for id, name, description, dispersy_cid, modified, nr_torrents, nr_favorites, nr_spam in results: - my_vote = my_votes.get(id, 0) - if not includeSpam and my_vote < 0: - continue - if len(name.strip()) == 0: - name = "Unnamed channel" - - channels.append((id, str(dispersy_cid), name, description, nr_torrents, - nr_favorites, nr_spam, my_vote, modified, id == self._channel_id)) - - def channel_sort(a, b): - # first compare local vote, spam -> return -1 - if a[7] == -1: - return 1 - if b[7] == -1: - return -1 - - # then compare nr_favorites - if a[5] < b[5]: - return 1 - if a[5] > b[5]: - return -1 - - # then compare latest update - if a[8] < b[8]: - return 1 - if a[8] > b[8]: - return -1 - - # finally compare nr_torrents - return cmp(a[4], b[4]) - - if cmpF is None: - cmpF = channel_sort - channels.sort(cmpF) - return channels - - def getMyChannelId(self): - if self._channel_id: - return self._channel_id - return self._db.fetchone('SELECT id FROM Channels WHERE peer_id ISNULL LIMIT 1') - - def getTorrentMarkings(self, channeltorrent_id): - counts = {} - sql = "SELECT type, peer_id FROM TorrentMarkings WHERE channeltorrent_id = ?" - for type, peer_id in self._db.fetchall(sql, (channeltorrent_id,)): - if type not in counts: - counts[type] = [type, 0, False] - counts[type][1] += 1 - if not peer_id: - counts[type][2] = True - return counts.values() - - def getTorrentModifications(self, channeltorrent_id, keys): - sql = "SELECT " + ", ".join(keys) + """ FROM MetaDataTorrent, ChannelMetaData - LEFT JOIN Moderations ON Moderations.cause = ChannelMetaData.dispersy_id - WHERE metadata_id = ChannelMetaData.id AND channeltorrent_id = ? - ORDER BY -Moderations.time_stamp ASC, prev_global_time DESC""" - return self._db.fetchall(sql, (channeltorrent_id,)) - - def getMostPopularChannelFromTorrent(self, infohash): - """Returns channel id, name, nrfavorites of most popular channel if any""" - sql = """SELECT Channels.id, Channels.dispersy_cid, Channels.name, Channels.description, - Channels.nr_torrents, Channels.nr_favorite, Channels.nr_spam, Channels.modified, - ChannelTorrents.id - FROM Channels, ChannelTorrents, Torrent - WHERE Channels.id = ChannelTorrents.channel_id - AND ChannelTorrents.torrent_id = Torrent.torrent_id AND infohash = ?""" - channels = self._db.fetchall(sql, (bin2str(infohash),)) - - if len(channels) > 0: - channel_ids = set() - for result in channels: - channel_ids.add(result[0]) - - myVotes = self.votecast_db.getMyVotes() - - best_channel = None - for id, dispersy_cid, name, description, nr_torrents, nr_favorites, nr_spam, modified, channeltorrent_id in channels: - channel = id, dispersy_cid, name, description, nr_torrents, nr_favorites, nr_spam, myVotes.get( - id, 0), modified, id == self._channel_id, channeltorrent_id - - # allways prefer mychannel - if channel[-1]: - return channel - - if not best_channel or channel[5] > best_channel[5]: - best_channel = channel - elif channel[5] == best_channel[5] and channel[4] > best_channel[4]: - best_channel = channel - return best_channel - - def get_torrent_ids_from_playlist(self, playlist_id): - """ - Returns the torrent dispersy IDs from a specified playlist. - """ - sql = "SELECT dispersy_id FROM PlaylistTorrents WHERE playlist_id = ?" - return self._db.fetchall(sql, (playlist_id,)) diff --git a/Tribler/Core/CacheDB/__init__.py b/Tribler/Core/CacheDB/__init__.py deleted file mode 100644 index 615239131c9..00000000000 --- a/Tribler/Core/CacheDB/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -The CacheDB package contains the cachedDB for Tribler including a notifier and manages different versions. - -Author(s): Jie Yang -""" diff --git a/Tribler/Core/CacheDB/db_versions.py b/Tribler/Core/CacheDB/db_versions.py deleted file mode 100644 index 82c72d6d591..00000000000 --- a/Tribler/Core/CacheDB/db_versions.py +++ /dev/null @@ -1,35 +0,0 @@ -# Database versions: -# 17 is used by Tribler 5.9.x - 6.0 -# 18 is used by Tribler 6.1.x - 6.2.0 -# 22 is used by Tribler 6.3.x -# 23 is used by Tribler 6.4.0 RC1 -# 24 is used by Tribler 6.4.0 RC2 - 6.4.X -# 25 is used by Tribler 6.5-git -# 26 is used by Tribler 6.5-git (with database upgrade scripts) -# 27 is used by Tribler 6.5-git (TorrentStatus and Category tables are removed) -# 28 is used by Tribler 6.5-git (cleanup Metadata stuff) - -TRIBLER_59_DB_VERSION = 17 -TRIBLER_60_DB_VERSION = 17 - -TRIBLER_61_DB_VERSION = 18 -TRIBLER_62_DB_VERSION = 18 - -TRIBLER_63_DB_VERSION = 22 - -TRIBLER_64RC1_DB_VERSION = 23 - -TRIBLER_64RC2_DB_VERSION = 24 - -TRIBLER_65PRE_DB_VERSION = 25 -TRIBLER_65PRE2_DB_VERSION = 26 -TRIBLER_65PRE3_DB_VERSION = 27 -TRIBLER_65PRE4_DB_VERSION = 28 - -TRIBLER_66_DB_VERSION = 29 - -# the lowest supported database version number -LOWEST_SUPPORTED_DB_VERSION = TRIBLER_59_DB_VERSION - -# the latest database version number -LATEST_DB_VERSION = TRIBLER_66_DB_VERSION diff --git a/Tribler/Core/CacheDB/schema_sdb_v29.sql b/Tribler/Core/CacheDB/schema_sdb_v29.sql deleted file mode 100644 index 1a996987091..00000000000 --- a/Tribler/Core/CacheDB/schema_sdb_v29.sql +++ /dev/null @@ -1,285 +0,0 @@ -BEGIN TRANSACTION create_table; - ----------------------------------------- - -CREATE TABLE MyInfo ( - entry PRIMARY KEY, - value text -); - ----------------------------------------- - -CREATE TABLE MyPreference ( - torrent_id integer PRIMARY KEY NOT NULL, - destination_path text NOT NULL, - creation_time integer NOT NULL -); - ----------------------------------------- - -CREATE TABLE Peer ( - peer_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - permid text NOT NULL, - name text, - thumbnail text -); - -CREATE UNIQUE INDEX permid_idx - ON Peer - (permid); - ----------------------------------------- - -CREATE TABLE Torrent ( - torrent_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - infohash text NOT NULL, - name text, - length integer, - creation_date integer, - num_files integer, - insert_time numeric, - secret integer, - relevance numeric DEFAULT 0, - category text, - status text DEFAULT 'unknown', - num_seeders integer, - num_leechers integer, - comment text, - dispersy_id integer, - is_collected integer DEFAULT 0, - last_tracker_check integer DEFAULT 0, - tracker_check_retries integer DEFAULT 0, - next_tracker_check integer DEFAULT 0 -); - -CREATE UNIQUE INDEX infohash_idx - ON Torrent - (infohash); - ----------------------------------------- - -CREATE TABLE TrackerInfo ( - tracker_id integer PRIMARY KEY AUTOINCREMENT, - tracker text UNIQUE NOT NULL, - last_check numeric DEFAULT 0, - failures integer DEFAULT 0, - is_alive integer DEFAULT 1 -); - -CREATE TABLE TorrentTrackerMapping ( - torrent_id integer NOT NULL, - tracker_id integer NOT NULL, - FOREIGN KEY (torrent_id) REFERENCES Torrent(torrent_id), - FOREIGN KEY (tracker_id) REFERENCES TrackerInfo(tracker_id), - PRIMARY KEY (torrent_id, tracker_id) -); - ----------------------------------------- - -CREATE VIEW CollectedTorrent AS SELECT * FROM Torrent WHERE is_collected == 1; - ----------------------------------------- --- v9: Open2Edit replacing ChannelCast tables - -CREATE TABLE IF NOT EXISTS _Channels ( - id integer PRIMARY KEY ASC, - dispersy_cid text, - peer_id integer, - name text NOT NULL, - description text, - modified integer DEFAULT (strftime('%s','now')), - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - nr_torrents integer DEFAULT 0, - nr_spam integer DEFAULT 0, - nr_favorite integer DEFAULT 0 -); -CREATE VIEW Channels AS SELECT * FROM _Channels WHERE deleted_at IS NULL; - -CREATE TABLE IF NOT EXISTS _ChannelTorrents ( - id integer PRIMARY KEY ASC, - dispersy_id integer, - torrent_id integer NOT NULL, - channel_id integer NOT NULL, - peer_id integer, - name text, - description text, - time_stamp integer, - modified integer DEFAULT (strftime('%s','now')), - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - FOREIGN KEY (channel_id) REFERENCES Channels(id) ON DELETE CASCADE -); -CREATE VIEW ChannelTorrents AS SELECT * FROM _ChannelTorrents WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS TorChannelIndex ON _ChannelTorrents(channel_id); -CREATE INDEX IF NOT EXISTS ChannelTorIndex ON _ChannelTorrents(torrent_id); -CREATE INDEX IF NOT EXISTS ChannelTorChanIndex ON _ChannelTorrents(torrent_id, channel_id); - -CREATE TABLE IF NOT EXISTS _Playlists ( - id integer PRIMARY KEY ASC, - channel_id integer NOT NULL, - dispersy_id integer NOT NULL, - peer_id integer, - playlist_id integer, - name text NOT NULL, - description text, - modified integer DEFAULT (strftime('%s','now')), - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - UNIQUE (dispersy_id), - FOREIGN KEY (channel_id) REFERENCES Channels(id) ON DELETE CASCADE -); -CREATE VIEW Playlists AS SELECT * FROM _Playlists WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS PlayChannelIndex ON _Playlists(channel_id); - -CREATE TABLE IF NOT EXISTS _PlaylistTorrents ( - id integer PRIMARY KEY ASC, - dispersy_id integer NOT NULL, - peer_id integer, - playlist_id integer, - channeltorrent_id integer, - deleted_at integer, - FOREIGN KEY (playlist_id) REFERENCES Playlists(id) ON DELETE CASCADE, - FOREIGN KEY (channeltorrent_id) REFERENCES ChannelTorrents(id) ON DELETE CASCADE -); -CREATE VIEW PlaylistTorrents AS SELECT * FROM _PlaylistTorrents WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS PlayTorrentIndex ON _PlaylistTorrents(playlist_id); - -CREATE TABLE IF NOT EXISTS _Comments ( - id integer PRIMARY KEY ASC, - dispersy_id integer NOT NULL, - peer_id integer, - channel_id integer NOT NULL, - comment text NOT NULL, - reply_to_id integer, - reply_after_id integer, - time_stamp integer, - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - UNIQUE (dispersy_id), - FOREIGN KEY (channel_id) REFERENCES Channels(id) ON DELETE CASCADE -); -CREATE VIEW Comments AS SELECT * FROM _Comments WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS ComChannelIndex ON _Comments(channel_id); - -CREATE TABLE IF NOT EXISTS CommentPlaylist ( - comment_id integer, - playlist_id integer, - PRIMARY KEY (comment_id,playlist_id), - FOREIGN KEY (playlist_id) REFERENCES Playlists(id) ON DELETE CASCADE - FOREIGN KEY (comment_id) REFERENCES Comments(id) ON DELETE CASCADE -); -CREATE INDEX IF NOT EXISTS CoPlaylistIndex ON CommentPlaylist(playlist_id); - -CREATE TABLE IF NOT EXISTS CommentTorrent ( - comment_id integer, - channeltorrent_id integer, - PRIMARY KEY (comment_id, channeltorrent_id), - FOREIGN KEY (comment_id) REFERENCES Comments(id) ON DELETE CASCADE - FOREIGN KEY (channeltorrent_id) REFERENCES ChannelTorrents(id) ON DELETE CASCADE -); -CREATE INDEX IF NOT EXISTS CoTorrentIndex ON CommentTorrent(channeltorrent_id); - -CREATE TABLE IF NOT EXISTS _Moderations ( - id integer PRIMARY KEY ASC, - dispersy_id integer NOT NULL, - channel_id integer NOT NULL, - peer_id integer, - severity integer NOT NULL DEFAULT (0), - message text NOT NULL, - cause integer NOT NULL, - by_peer_id integer, - time_stamp integer NOT NULL, - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - UNIQUE (dispersy_id), - FOREIGN KEY (channel_id) REFERENCES Channels(id) ON DELETE CASCADE -); -CREATE VIEW Moderations AS SELECT * FROM _Moderations WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS MoChannelIndex ON _Moderations(channel_id); - -CREATE TABLE IF NOT EXISTS _ChannelMetaData ( - id integer PRIMARY KEY ASC, - dispersy_id integer NOT NULL, - channel_id integer NOT NULL, - peer_id integer, - type text NOT NULL, - value text NOT NULL, - prev_modification integer, - prev_global_time integer, - time_stamp integer NOT NULL, - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - UNIQUE (dispersy_id) -); -CREATE VIEW ChannelMetaData AS SELECT * FROM _ChannelMetaData WHERE deleted_at IS NULL; - -CREATE TABLE IF NOT EXISTS MetaDataTorrent ( - metadata_id integer, - channeltorrent_id integer, - PRIMARY KEY (metadata_id, channeltorrent_id), - FOREIGN KEY (metadata_id) REFERENCES ChannelMetaData(id) ON DELETE CASCADE - FOREIGN KEY (channeltorrent_id) REFERENCES ChannelTorrents(id) ON DELETE CASCADE -); -CREATE INDEX IF NOT EXISTS MeTorrentIndex ON MetaDataTorrent(channeltorrent_id); - -CREATE TABLE IF NOT EXISTS MetaDataPlaylist ( - metadata_id integer, - playlist_id integer, - PRIMARY KEY (metadata_id,playlist_id), - FOREIGN KEY (playlist_id) REFERENCES Playlists(id) ON DELETE CASCADE - FOREIGN KEY (metadata_id) REFERENCES ChannelMetaData(id) ON DELETE CASCADE -); -CREATE INDEX IF NOT EXISTS MePlaylistIndex ON MetaDataPlaylist(playlist_id); - -CREATE TABLE IF NOT EXISTS _ChannelVotes ( - channel_id integer, - voter_id integer, - dispersy_id integer, - vote integer, - time_stamp integer, - deleted_at integer, - PRIMARY KEY (channel_id, voter_id) -); -CREATE VIEW ChannelVotes AS SELECT * FROM _ChannelVotes WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS ChaVotIndex ON _ChannelVotes(channel_id); -CREATE INDEX IF NOT EXISTS VotChaIndex ON _ChannelVotes(voter_id); - -CREATE TABLE IF NOT EXISTS TorrentFiles ( - torrent_id integer NOT NULL, - path text NOT NULL, - length integer NOT NULL, - PRIMARY KEY (torrent_id, path) -); -CREATE INDEX IF NOT EXISTS TorFileIndex ON TorrentFiles(torrent_id); - -CREATE TABLE IF NOT EXISTS _TorrentMarkings ( - dispersy_id integer NOT NULL, - channeltorrent_id integer NOT NULL, - peer_id integer, - global_time integer, - type text NOT NULL, - time_stamp integer NOT NULL, - deleted_at integer, - UNIQUE (dispersy_id), - PRIMARY KEY (channeltorrent_id, peer_id) -); -CREATE VIEW TorrentMarkings AS SELECT * FROM _TorrentMarkings WHERE deleted_at IS NULL; -CREATE INDEX IF NOT EXISTS TorMarkIndex ON _TorrentMarkings(channeltorrent_id); - -CREATE VIRTUAL TABLE FullTextIndex USING fts4(swarmname, filenames, fileextensions); - -------------------------------------- - -COMMIT TRANSACTION create_table; - ----------------------------------------- - -BEGIN TRANSACTION init_values; - -INSERT INTO MyInfo VALUES ('version', 28); - -INSERT INTO TrackerInfo (tracker) VALUES ('no-DHT'); -INSERT INTO TrackerInfo (tracker) VALUES ('DHT'); - -COMMIT TRANSACTION init_values; diff --git a/Tribler/Core/CacheDB/sqlitecachedb.py b/Tribler/Core/CacheDB/sqlitecachedb.py deleted file mode 100644 index 34b539fcd03..00000000000 --- a/Tribler/Core/CacheDB/sqlitecachedb.py +++ /dev/null @@ -1,526 +0,0 @@ -""" -Sqlitecachedb. - -Author(s): Jie Yang -""" -import logging -import os - -import apsw -from apsw import CantOpenError, SQLError -from base64 import encodestring, decodestring -from threading import currentThread, RLock - -from twisted.python.threadable import isInIOThread - -from Tribler.Core.CacheDB.db_versions import LATEST_DB_VERSION -from Tribler.Core.Utilities.install_dir import get_lib_path -from Tribler.pyipv8.ipv8.taskmanager import TaskManager -from Tribler.pyipv8.ipv8.util import blocking_call_on_reactor_thread - -DB_SCRIPT_NAME = "schema_sdb_v%s.sql" % str(LATEST_DB_VERSION) - -DB_DIR_NAME = u"sqlite" -DB_FILE_NAME = u"tribler.sdb" -DB_FILE_RELATIVE_PATH = os.path.join(DB_DIR_NAME, DB_FILE_NAME) -DB_SCRIPT_ABSOLUTE_PATH = os.path.join(get_lib_path(), 'Core', 'CacheDB', DB_SCRIPT_NAME) - -DEFAULT_BUSY_TIMEOUT = 10000 - -forceDBThread = blocking_call_on_reactor_thread -forceAndReturnDBThread = blocking_call_on_reactor_thread - - -class CorruptedDatabaseError(Exception): - pass - - -def bin2str(bin_data): - return encodestring(bin_data).replace("\n", "") - - -def str2bin(str_data): - return decodestring(str_data) - - -class SQLiteCacheDB(TaskManager): - - def __init__(self, db_path, db_script_path=DB_SCRIPT_ABSOLUTE_PATH, busytimeout=DEFAULT_BUSY_TIMEOUT): - super(SQLiteCacheDB, self).__init__() - - self._logger = logging.getLogger(self.__class__.__name__) - - self._cursor_lock = RLock() - self._cursor_table = {} - - self._connection = None - self.sqlite_db_path = db_path - self.db_script_path = db_script_path - self._busytimeout = busytimeout # busytimeout is in milliseconds - - self._version = None - - self._should_commit = False - self._show_execute = False - self.initialize() - self.initial_begin() - - @property - def version(self): - """The version of this database.""" - return self._version - - @property - def connection(self): - """ - Returns the connection of the database, which may be None if not initialized or closed. - :return: The connection object of the database - """ - return self._connection - - def initialize(self): - """ Initializes the database. If the database doesn't exist, we create a new one. Otherwise, we check the - version and upgrade to the latest version. - """ - - # open a connection to the database - self._open_connection() - - def close(self): - """ - Cancels all pending tasks and closes all cursors. Then, it closes the connection. - """ - self.shutdown_task_manager() - with self._cursor_lock: - self.commit_now(exiting=True) - for cursor in self._cursor_table.itervalues(): - cursor.close() - self._cursor_table = {} - self._connection.close() - self._connection = None - - def _open_connection(self): - """ Opens a connection to the database. If the database doesn't exist, we create a new one and run the - initialization SQL scripts. If the database doesn't exist, we simply connect to it. - And finally, we read the database version. - """ - # check if it is in memory - is_in_memory = self.sqlite_db_path == u":memory:" - is_new_db = is_in_memory - - # check if database file exists - if not is_in_memory: - if not os.path.exists(self.sqlite_db_path): - # create a new one - is_new_db = True - elif not os.path.isfile(self.sqlite_db_path): - msg = u"Not a file: %s" % self.sqlite_db_path - raise OSError(msg) - - # create connection - try: - self._connection = apsw.Connection(self.sqlite_db_path) - self._connection.setbusytimeout(self._busytimeout) - except CantOpenError as e: - msg = u"Failed to open connection to %s: %s" % (self.sqlite_db_path, e) - raise CantOpenError(msg) - - cursor = self.get_cursor() - - # Check integrity of the database only if there is Walk-Ahead Log (WAL) or Shared-Memory (shm) files present - shm_file = "%s-shm" % self.sqlite_db_path - wal_file = "%s-wal" % self.sqlite_db_path - if os.path.exists(shm_file) or os.path.exists(wal_file): - self.do_quick_integrity_check() - - # Enable memory map in sqlite (256MB) - cursor.execute(u"PRAGMA mmap_size=268435456;") - - # apply pragma - page_size, = next(cursor.execute(u"PRAGMA page_size")) - if page_size < 8192: - # journal_mode and page_size only need to be set once. because of the VACUUM this - # is very expensive - self._logger.info(u"begin page_size upgrade...") - cursor.execute(u"PRAGMA journal_mode = DELETE;") - cursor.execute(u"PRAGMA page_size = 8192;") - cursor.execute(u"VACUUM;") - self._logger.info(u"...end page_size upgrade") - - # http://www.sqlite.org/pragma.html - # When synchronous is NORMAL, the SQLite database engine will still - # pause at the most critical moments, but less often than in FULL - # mode. There is a very small (though non-zero) chance that a power - # failure at just the wrong time could corrupt the database in - # NORMAL mode. But in practice, you are more likely to suffer a - # catastrophic disk failure or some other unrecoverable hardware - # fault. - # - cursor.execute(u"PRAGMA synchronous = NORMAL;") - cursor.execute(u"PRAGMA cache_size = 10000;") - - # Niels 19-09-2012: even though my database upgraded to increase the pagesize it did not keep wal mode? - # Enabling WAL on every starup - cursor.execute(u"PRAGMA journal_mode = WAL;") - - # create tables if this is a new database - if is_new_db and self.db_script_path is not None: - self._logger.info(u"Initializing new database...") - # check if the SQL script exists - if not os.path.exists(self.db_script_path): - msg = u"SQL script doesn't exist: %s" % self.db_script_path - raise OSError(msg) - if not os.path.isfile(self.db_script_path): - msg = u"SQL script is not a file: %s" % self.db_script_path - raise OSError(msg) - - try: - f = open(self.db_script_path, "r") - sql_script = f.read() - f.close() - except IOError as e: - msg = u"Failed to load SQL script %s: %s" % (self.db_script_path, e) - raise IOError(msg) - - cursor.execute(sql_script) - - if self.db_script_path is not None: - # read database version - self._logger.info(u"Reading database version...") - try: - version_str, = cursor.execute(u"SELECT value FROM MyInfo WHERE entry == 'version'").next() - self._version = int(version_str) - self._logger.info(u"Current database version is %s", self._version) - except (StopIteration, SQLError) as e: - msg = u"Failed to load database version: %s" % e - raise CorruptedDatabaseError(msg) - else: - self._version = 1 - - def do_quick_integrity_check(self): - check_response, = self.execute(u"PRAGMA quick_check").next() - if check_response != 'ok': - msg = u"Quick integrity check of database failed" - self._logger.error(msg) - raise CorruptedDatabaseError(msg) - - def get_cursor(self): - thread_name = currentThread().getName() - - with self._cursor_lock: - if thread_name not in self._cursor_table: - self._cursor_table[thread_name] = self._connection.cursor() - return self._cursor_table[thread_name] - - def initial_begin(self): - try: - self._logger.info(u"Beginning the first transaction...") - self.execute(u"BEGIN;") - - except: - self._logger.exception(u"Failed to begin the first transaction") - raise - self._should_commit = False - - def write_version(self, version): - assert isinstance(version, int), u"Invalid version type: %s is not int" % type(version) - assert version <= LATEST_DB_VERSION, u"Invalid version value: %s > the latest %s" % (version, LATEST_DB_VERSION) - - sql = u"UPDATE MyInfo SET value = ? WHERE entry == 'version'" - self.execute_write(sql, (version,)) - self.commit_now() - self._version = version - - def commit_now(self, vacuum=False, exiting=False): - if self._should_commit and isInIOThread(): - try: - self._logger.info(u"Start committing...") - self.execute(u"COMMIT;") - except: - self._logger.exception(u"COMMIT FAILED") - if exiting: - # If we are exiting we don't propagate the error. - # The reason for the exit may be the reason this exception occurred. - self._logger.exception(u"Not propagating commit error, as we are exiting") - return - else: - raise - self._should_commit = False - - if vacuum: - self._logger.info(u"Start vacuuming...") - self.execute(u"VACUUM;") - - if not exiting: - try: - self._logger.info(u"Beginning another transaction...") - self.execute(u"BEGIN;") - except: - self._logger.exception(u"Failed to execute BEGIN") - raise - else: - self._logger.info(u"Exiting, not beginning another transaction") - - elif vacuum: - self.execute(u"VACUUM;") - - def clean_db(self, vacuum=False, exiting=False): - self.execute_write(u"DELETE FROM TorrentFiles WHERE torrent_id IN (SELECT torrent_id FROM CollectedTorrent)") - self.execute_write(u"DELETE FROM Torrent WHERE name IS NULL" - u" AND torrent_id NOT IN (SELECT torrent_id FROM _ChannelTorrents)") - - if vacuum: - self.commit_now(vacuum, exiting=exiting) - - def set_show_sql(self, switch): - self._show_execute = switch - - # --------- generic functions ------------- - - def execute(self, sql, args=None): - cur = self.get_cursor() - - if self._show_execute: - thread_name = currentThread().getName() - self._logger.info(u"===%s===\n%s\n-----\n%s\n======\n", thread_name, sql, args) - - try: - if args is None: - return cur.execute(sql) - else: - return cur.execute(sql, args) - - except Exception as msg: - if str(msg).startswith(u"BusyError"): - self._logger.error(u"cachedb: busylock error") - - else: - thread_name = currentThread().getName() - self._logger.exception(u"cachedb: ===%s===\nSQL Type: %s\n-----\n%s\n-----\n%s\n======\n", - thread_name, type(sql), sql, args) - - raise msg - - def executemany(self, sql, args=None): - self._should_commit = True - - cur = self.get_cursor() - if self._show_execute: - thread_name = currentThread().getName() - self._logger.info(u"===%s===\n%s\n-----\n%s\n======\n", thread_name, sql, args) - - try: - if args is None: - result = cur.executemany(sql) - else: - result = cur.executemany(sql, args) - - return result - - except Exception as msg: - thread_name = currentThread().getName() - self._logger.exception(u"===%s===\nSQL Type: %s\n-----\n%s\n-----\n%s\n======\n", - thread_name, type(sql), sql, args) - raise msg - - def execute_read(self, sql, args=None): - return self.execute(sql, args) - - def execute_write(self, sql, args=None): - self._should_commit = True - - self.execute(sql, args) - - def insert_or_ignore(self, table_name, **argv): - if len(argv) == 1: - sql = u'INSERT OR IGNORE INTO %s (%s) VALUES (?);' % (table_name, argv.keys()[0]) - else: - questions = '?,' * len(argv) - sql = u'INSERT OR IGNORE INTO %s %s VALUES (%s);' % (table_name, tuple(argv.keys()), questions[:-1]) - self.execute_write(sql, argv.values()) - - def insert(self, table_name, **argv): - if len(argv) == 1: - sql = u'INSERT INTO %s (%s) VALUES (?);' % (table_name, argv.keys()[0]) - else: - questions = '?,' * len(argv) - sql = u'INSERT INTO %s %s VALUES (%s);' % (table_name, tuple(argv.keys()), questions[:-1]) - self.execute_write(sql, argv.values()) - - # TODO: may remove this, only used by test_sqlitecachedb.py - def insertMany(self, table_name, values, keys=None): - """ values must be a list of tuples """ - - questions = u'?,' * len(values[0]) - if keys is None: - sql = u'INSERT INTO %s VALUES (%s);' % (table_name, questions[:-1]) - else: - sql = u'INSERT INTO %s %s VALUES (%s);' % (table_name, tuple(keys), questions[:-1]) - self.executemany(sql, values) - - def update(self, table_name, where=None, **argv): - assert len(argv) > 0, 'NO VALUES TO UPDATE SPECIFIED' - if len(argv) > 0: - sql = u'UPDATE %s SET ' % table_name - arg = [] - for k, v in argv.iteritems(): - if isinstance(v, tuple): - sql += u'%s %s ?,' % (k, v[0]) - arg.append(v[1]) - else: - sql += u'%s=?,' % k - arg.append(v) - sql = sql[:-1] - if where is not None: - sql += u' WHERE %s' % where - self.execute_write(sql, arg) - - def delete(self, table_name, **argv): - sql = u'DELETE FROM %s WHERE ' % table_name - arg = [] - for k, v in argv.iteritems(): - if isinstance(v, tuple): - sql += u'%s %s ? AND ' % (k, v[0]) - arg.append(v[1]) - else: - sql += u'%s=? AND ' % k - arg.append(v) - sql = sql[:-5] - self.execute_write(sql, arg) - - # -------- Read Operations -------- - def size(self, table_name): - num_rec_sql = u"SELECT count(*) FROM %s LIMIT 1" % table_name - result = self.fetchone(num_rec_sql) - return result - - def fetchone(self, sql, args=None): - find = self.execute_read(sql, args) - if not find: - return - else: - find = list(find) - if len(find) > 0: - if len(find) > 1: - self._logger.debug( - u"FetchONE resulted in many more rows than one, consider putting a LIMIT 1 in the sql statement %s, %s", sql, len(find)) - find = find[0] - else: - return - if len(find) > 1: - return find - else: - return find[0] - - def fetchall(self, sql, args=None): - res = self.execute_read(sql, args) - if res is not None: - find = list(res) - return find - else: - return [] # should it return None? - - def getOne(self, table_name, value_name, where=None, conj=u"AND", **kw): - """ value_name could be a string, a tuple of strings, or '*' - """ - if isinstance(value_name, tuple): - value_names = u",".join(value_name) - elif isinstance(value_name, list): - value_names = u",".join(value_name) - else: - value_names = value_name - - if isinstance(table_name, tuple): - table_names = u",".join(table_name) - elif isinstance(table_name, list): - table_names = u",".join(table_name) - else: - table_names = table_name - - sql = u'SELECT %s FROM %s' % (value_names, table_names) - - if where or kw: - sql += u' WHERE ' - if where: - sql += where - if kw: - sql += u' %s ' % conj - if kw: - arg = [] - for k, v in kw.iteritems(): - if isinstance(v, tuple): - operator = v[0] - arg.append(v[1]) - else: - operator = "=" - arg.append(v) - sql += u' %s %s ? ' % (k, operator) - sql += conj - sql = sql[:-len(conj)] - else: - arg = None - - # print >> sys.stderr, 'SQL: %s %s' % (sql, arg) - return self.fetchone(sql, arg) - - def getAll(self, table_name, value_name, where=None, group_by=None, having=None, order_by=None, limit=None, - offset=None, conj=u"AND", **kw): - """ value_name could be a string, or a tuple of strings - order by is represented as order_by - group by is represented as group_by - """ - if isinstance(value_name, tuple): - value_names = u",".join(value_name) - elif isinstance(value_name, list): - value_names = u",".join(value_name) - else: - value_names = value_name - - if isinstance(table_name, tuple): - table_names = u",".join(table_name) - elif isinstance(table_name, list): - table_names = u",".join(table_name) - else: - table_names = table_name - - sql = u'SELECT %s FROM %s' % (value_names, table_names) - - if where or kw: - sql += u' WHERE ' - if where: - sql += where - if kw: - sql += u' %s ' % conj - if kw: - arg = [] - for k, v in kw.iteritems(): - if isinstance(v, tuple): - operator = v[0] - arg.append(v[1]) - else: - operator = u"=" - arg.append(v) - - sql += u' %s %s ?' % (k, operator) - sql += conj - sql = sql[:-len(conj)] - else: - arg = None - - if group_by is not None: - sql += u' GROUP BY ' + group_by - if having is not None: - sql += u' HAVING ' + having - if order_by is not None: - # you should add desc after order_by to reversely sort, i.e, 'last_seen desc' as order_by - sql += u' ORDER BY ' + order_by - if limit is not None: - sql += u' LIMIT %d' % limit - if offset is not None: - sql += u' OFFSET %d' % offset - - try: - return self.fetchall(sql, arg) or [] - except Exception as msg: - self._logger.exception(u"Wrong getAll sql statement: %s", sql) - raise Exception(msg) diff --git a/Tribler/Core/Category/Category.py b/Tribler/Core/Category/Category.py index 106f3ef2089..d2d0678d183 100644 --- a/Tribler/Core/Category/Category.py +++ b/Tribler/Core/Category/Category.py @@ -4,57 +4,53 @@ Author(s): Yuan Yuan, Jelle Roozenburg """ from __future__ import absolute_import, division -from functools import cmp_to_key + import logging import os import re -from six.moves.configparser import MissingSectionHeaderError, ParsingError +from functools import cmp_to_key -from Tribler.Core.Category.FamilyFilter import XXXFilter +from Tribler.Core.Category.FamilyFilter import default_xxx_filter from Tribler.Core.Category.init_category import getCategoryInfo from Tribler.Core.Utilities.install_dir import get_lib_path CATEGORY_CONFIG_FILE = "category.conf" -class Category(object): +def cmp_rank(a, b): + if 'rank' not in a: + return 1 + if 'rank' not in b: + return -1 + if a['rank'] == b['rank']: + return 0 + if a['rank'] == -1: + return 1 + if b['rank'] == -1: + return -1 + if a['rank'] < b['rank']: + return -1 + return 1 - __size_change = 1024 * 1024 - def __init__(self, ffEnabled=False): - self._logger = logging.getLogger(self.__class__.__name__) +class Category(object): + __size_change = 1024 * 1024 + _logger = logging.getLogger("Category") - filename = os.path.join(get_lib_path(), 'Core', 'Category', CATEGORY_CONFIG_FILE) - try: - self.category_info = getCategoryInfo(filename) - self.category_info.sort(key=cmp_to_key(cmp_rank)) - except (MissingSectionHeaderError, ParsingError, IOError): - self.category_info = [] - self._logger.critical('', exc_info=True) - - self.xxx_filter = XXXFilter() - - self._logger.debug("category: Categories defined by user: %s", self.getCategoryNames()) - - self.ffEnabled = ffEnabled - self.set_family_filter(None) - - def getCategoryNames(self, filter=True): - if self.category_info is None: - return [] - keys = [] - for category in self.category_info: - rank = category['rank'] - if rank == -1 and filter: - break - keys.append((category['name'], category['displayname'])) - return keys + category_info = getCategoryInfo(os.path.join(get_lib_path(), 'Core', 'Category', CATEGORY_CONFIG_FILE)) + category_info.sort(key=cmp_to_key(cmp_rank)) def calculateCategory(self, torrent_dict, display_name): """ Calculate the category for a given torrent_dict of a torrent file. :return a list of categories this torrent belongs to. """ + is_xxx = default_xxx_filter.isXXXTorrent( + files_list=torrent_dict['info']["files"] if "files" in torrent_dict['info'] else [], + torrent_name=torrent_dict['info'].get("name"), + tracker=torrent_dict['info'].get("announce")) + if is_xxx: + return "xxx" files_list = [] try: # the multi-files mode @@ -75,9 +71,6 @@ def calculateCategory(self, torrent_dict, display_name): return self.calculateCategoryNonDict(files_list, display_name, tracker, comment) def calculateCategoryNonDict(self, files_list, display_name, tracker, comment): - if self.xxx_filter.isXXXTorrent(files_list, display_name, tracker, comment): - return 'xxx' - torrent_category = None # filename_list ready strongest_cat = 0.0 @@ -109,9 +102,9 @@ def judge(self, category, files_list, display_name=''): pass if (1 - factor) > 0.5: if 'strength' in category: - return (True, category['strength']) + return True, category['strength'] else: - return (True, (1 - factor)) + return True, (1 - factor) # judge each file matchSize = 0 @@ -160,51 +153,6 @@ def judge(self, category, files_list, display_name=''): def _getWords(self, string): return self.WORDS_REGEXP.findall(string) - def family_filter_enabled(self): - """ - Return is xxx filtering is enabled in this client - """ - return self.ffEnabled - - def set_family_filter(self, b=None): - assert b in (True, False, None) - old = self.family_filter_enabled() - if b != old or b is None: # update category data if initial call, or if state changes - if b is None: - b = old - - self.ffEnabled = b - - # change category data - for category in self.category_info: - if category['name'] == 'xxx': - if b: - category['old-rank'] = category['rank'] - category['rank'] = -1 - elif category['rank'] == -1: - category['rank'] = category['old-rank'] - break - - def get_family_filter_sql(self): - if self.family_filter_enabled(): - forbiddencats = [cat['name'] for cat in self.category_info if cat['rank'] == -1] - if forbiddencats: - return " and category not in (%s)" % ','.join(["'%s'" % cat for cat in forbiddencats]) - return '' - -def cmp_rank(a, b): - if not ('rank' in a): - return 1 - elif not ('rank' in b): - return -1 - elif a['rank'] == -1: - return 1 - elif b['rank'] == -1: - return -1 - elif a['rank'] == b['rank']: - return 0 - elif a['rank'] < b['rank']: - return -1 - else: - return 1 +# Category filter should be stateless +default_category_filter = Category() diff --git a/Tribler/Core/Category/FamilyFilter.py b/Tribler/Core/Category/FamilyFilter.py index c4b44df7c9d..21f0b53cf5a 100644 --- a/Tribler/Core/Category/FamilyFilter.py +++ b/Tribler/Core/Category/FamilyFilter.py @@ -8,41 +8,40 @@ import logging import os import re + from six.moves import xrange from Tribler.Core.Utilities.install_dir import get_lib_path WORDS_REGEXP = re.compile('[a-zA-Z0-9]+') +termfilename = os.path.join(get_lib_path(), 'Core', 'Category', 'filter_terms.filter') -class XXXFilter(object): - def __init__(self): - super(XXXFilter, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) +def initTerms(filename): + terms = set() + searchterms = set() - termfilename = os.path.join(get_lib_path(), 'Core', 'Category', 'filter_terms.filter') - self.xxx_terms, self.xxx_searchterms = self.initTerms(termfilename) + try: + f = open(filename, 'r') + lines = f.read().lower().splitlines() - def initTerms(self, filename): - terms = set() - searchterms = set() + for line in lines: + if line.startswith('*'): + searchterms.add(line[1:]) + else: + terms.add(line) + f.close() + except IOError: + raise IOError(u"Could not open %s, initTerms failed.", filename) - try: - f = open(filename, 'r') - lines = f.read().lower().splitlines() + return terms, searchterms - for line in lines: - if line.startswith('*'): - searchterms.add(line[1:]) - else: - terms.add(line) - f.close() - except IOError: - self._logger.exception(u"Could not open %s, initTerms failed.", filename) - self._logger.debug('Read %d XXX terms from file %s', len(terms) + len(searchterms), filename) - return terms, searchterms +class XXXFilter(object): + _logger = logging.getLogger("XXXFilter") + + xxx_terms, xxx_searchterms = initTerms(termfilename) def _getWords(self, string): return [a.lower() for a in WORDS_REGEXP.findall(string)] @@ -52,12 +51,11 @@ def isXXXTorrent(self, files_list, torrent_name, tracker, comment=None): tracker = tracker.lower().replace('http://', '').replace('announce', '') else: tracker = '' - terms = [a[0].lower() for a in files_list] + terms = [a["path"][0] for a in files_list] if files_list else [] is_xxx = (self.isXXX(torrent_name, False) or self.isXXX(tracker, False) or any(self.isXXX(term) for term in terms) or - (comment and self.isXXX(comment, False)) - ) + (comment and self.isXXX(comment, False))) tracker = repr(tracker) if is_xxx: self._logger.debug(u"Torrent is XXX: %s %s", torrent_name, tracker) @@ -65,7 +63,13 @@ def isXXXTorrent(self, files_list, torrent_name, tracker, comment=None): self._logger.debug(u"Torrent is NOT XXX: %s %s", torrent_name, tracker) return is_xxx - def isXXX(self, s, isFilename=True): + def isXXXTorrentMetadataDict(self, md_dict): + terms_combined = " ".join([md_dict[f] for f in ["title", "tags", "tracker"] if f in md_dict]) + non_xxx = "tags" in md_dict and \ + (md_dict["tags"].startswith(u"audio") or md_dict["tags"].startswith(u"CD/DVD/BD")) + return self.isXXX(terms_combined, nonXXXFormat=non_xxx) + + def isXXX(self, s, isFilename=True, nonXXXFormat=False): if not s: return False @@ -77,10 +81,9 @@ def isXXX(self, s, isFilename=True): words = self._getWords(s) words2 = [' '.join(words[i:i + 2]) for i in xrange(0, len(words) - 1)] num_xxx = len([w for w in words + words2 if self.isXXXTerm(w, s)]) - if isFilename and self.isAudio(s): + if nonXXXFormat or (isFilename and self.isAudio(s)): return num_xxx > 2 # almost never classify mp3 as porn - else: - return num_xxx > 0 + return num_xxx > 0 def foundXXXTerm(self, s): for term in self.xxx_searchterms: @@ -110,3 +113,7 @@ def isXXXTerm(self, s, title=None): def isAudio(self, s): return s[s.rfind('.') + 1:] in self.audio_extensions + + +# XXX filter should be stateless +default_xxx_filter = XXXFilter() diff --git a/Tribler/Core/Category/category.conf b/Tribler/Core/Category/category.conf index cec1a0bb84a..143366977ba 100644 --- a/Tribler/Core/Category/category.conf +++ b/Tribler/Core/Category/category.conf @@ -1,23 +1,3 @@ -[xxx] -rank = 10 -displayname = XXX -matchpercentage = 0.001 -strength = 1.1 -# Keywords are in seperate file: filter_content.filter - - -[other] -rank = 8 -displayname = Other -matchpercentage = 0.0 - - -[unknown] -rank = 9 -displayname = Unknown -matchpercentage = 0.0 - - [Video] rank = 1 displayname = Video Files @@ -45,15 +25,15 @@ suffix = cda, flac, m3u, mp2, mp3, vorbis, wav, wma, ogg, ape matchpercentage = 0.8 [Document] -rank = 5 +rank = 4 displayname = Documents -suffix = doc, pdf, ppt, ps, tex, txt, vsd +suffix = doc, pdf, ppt, ps, tex, txt, vsd, xls matchpercentage = 0.8 [Compressed] -rank = 4 +rank = 5 displayname = Compressed -suffix = ace, bin, bwt, cab, ccd, cdi, cue, gzip, iso, jar, mdf, mds, nrg, rar, tar, vcd, z, zip +suffix = ace, bin, bwt, cab, gzip, jar, rar, tar, z, zip matchpercentage = 0.8 *.r0 = 1 @@ -67,8 +47,33 @@ matchpercentage = 0.8 *.r8 = 1 *.r9 = 1 -[Picture] +[CD/DVD/BD] rank = 6 +displayname = CD/DVD/BD +suffix = iso, mdf, mds, nrg, vcd, dmg, toast, cue, dvd, imd, md0, md1, md2, mdx, udf, wii, dmgpart, cso, ccd, cdi, dax, xvd, sub, daa, vc4, lcd +matchpercentage = 0.8 + +[Picture] +rank = 7 displayname = Pictures suffix = bmp, dib, dwg, gif, ico, jpeg, jpg, pic, png, swf, tif, tiff matchpercentage = 0.8 + +[other] +rank = 8 +displayname = Other +matchpercentage = 0.0 + +[unknown] +rank = 9 +displayname = Unknown +matchpercentage = 0.0 + +[xxx] +rank = 10 +displayname = XXX +matchpercentage = 0.001 +strength = 1.1 +# Keywords are in seperate file: filter_content.filter + + diff --git a/Tribler/Core/Category/filter_terms.filter b/Tribler/Core/Category/filter_terms.filter index 607e29734f3..7a9b4b551a8 100644 --- a/Tribler/Core/Category/filter_terms.filter +++ b/Tribler/Core/Category/filter_terms.filter @@ -33,7 +33,6 @@ abspritzen2 abspritzer abuse accidental -action adel miller adriana adrianna @@ -47,17 +46,15 @@ adults adultsex adulttv advertenties -afro afscheiding aftrekken agustina akiba -akira +asa akira alba albanian alektra alektra blue -alex alex divine alexa alexandra @@ -78,7 +75,6 @@ alsscan alysha leigh alyssa alyssa chase -amanda amanda dawkins amanda white amanda3 @@ -131,7 +127,6 @@ amor amore ampland amputee -amy anaal anaallikken anaalneuken @@ -153,14 +148,12 @@ analneuken analnow5 analpassion analthe -anderson andrea andrea spinks anetta keys anette angel eyes angel long -angela angelica angelica sin angelina @@ -169,20 +162,16 @@ angewichst angewixt angie angie george -angus anika animalporno animalsex animalsexcom animalsexverhalen anime -anita anita crystal anja anjali -anna anna malle -anne anneke annika antistress @@ -200,12 +189,8 @@ arab arabian archieven archiv -argentina -argentine -argentino ariana ariana jollee -army arsch arschbesamung arschdildo @@ -282,9 +267,6 @@ bakire2002 balkon balls balmoral -banana -banane -bananen bang bangbros bangbross @@ -319,12 +301,9 @@ bdsmstartpagina bdsmverhalen bdsmvideosnet bdsmzaken -beach beachgirls beastiality beavis -become -bed bedroom bedtime beefy @@ -338,7 +317,6 @@ bekijksex bekommt belgian belgischeporno -bella bella starr belladonna bellydance @@ -352,7 +330,6 @@ bestialiteit bestiality besuch_beim_na bethroom -betty bev cox bianca bianca black @@ -454,9 +431,7 @@ blown blowputa blows bobbi eden -bobbie bocca -body bodypanty bodystocking boerensex @@ -485,19 +460,11 @@ borstenforum borstjes botergeil botergeile -boxing -boyfriend -boyfriends boyfuckmom boyz bra brandi brandibelle -brasil -brasilian -brazil -braziliaanse -brazilian brazzers breast breasted @@ -505,7 +472,6 @@ breasts breezahchicks breezersletjes briana banks -bridget brigitte brinquedinho britney spears @@ -520,7 +486,6 @@ brunettes brutal brutaldildos brutalviolence -bubble bud buitenbloot buitensex @@ -530,7 +495,6 @@ bukake bukkake bukkakeshop bukkakeshopcom -bunny bunnyteens busen bust @@ -563,7 +527,6 @@ camcrush cameltoe cameltoes cameras -cameron camgirl camgirls camgirlyoung @@ -581,7 +544,6 @@ camsletten camwithher canaal canaaldigitaal -canal canaldigitaal canaldigital canalplus @@ -589,12 +551,10 @@ candi candice candice paris candid -candy carly carmel carmen electra caroline -carrot carsex cartoonsex carupaneras @@ -621,13 +581,11 @@ chandigarhdicke changingroom chantal chantelle stevens -charisma charlestonkleding charlie charlie holays charlie laine charlotte -charming chatbabe chathonk chatrooms @@ -638,7 +596,6 @@ cheating cheatingnice cheerleader chenfick -cherry chica chicca chick @@ -720,9 +677,7 @@ convulsions coolios copines coppia -coral cornelia -corrida corset cosplay couch @@ -799,7 +754,6 @@ cunny cunt cuntlicker cursus -cute cutie cuties cuty @@ -829,7 +783,6 @@ debbie tomlins debora deborah deborandome -deep deepest deepthroat deepthroatblowjob @@ -843,7 +796,6 @@ desire despues destiny deville destiny st claire -deutsche devasso devinn lane devon @@ -913,8 +865,6 @@ dogsex doityourself dolitha dolithas -doll -dolls domai domina dominica leoni @@ -1035,7 +985,6 @@ euromillions eutube eva angelina eva vortex -eve everysexhasits evgeniya executies @@ -1046,7 +995,6 @@ exhibitionist exibicionista exotic exotische -expert extreem extreemsex extrem @@ -1065,7 +1013,6 @@ facialsusana facialthreesome familie familiesex -famous famouspornstars fanny fart @@ -1163,7 +1110,6 @@ floral flower tucci follada follando -foot footjob footjob1 footjobbrazilian @@ -1184,7 +1130,6 @@ fran lord francaise francesa francine -frankfurt frankie fraportgrenzsc frauen @@ -1344,14 +1289,12 @@ gigolo gijl gilly sampson giovanni -girl girlblowjob girlfreind girlfriend girlfriends girlfucking girlmilking -girls girlsanal girlslikegirls girlsprive @@ -1424,7 +1367,6 @@ gratistrailers gratisverhalen gratiswebcamsex gratisxxx -greece greekvsenglish grieksesex grilfriend @@ -1480,7 +1422,6 @@ hardeporno hardeseks harige harigepoes -harmony harmony hex hart hathaway @@ -1503,7 +1444,6 @@ hella hentai hermano hermaphrodite -herself hete heterosexverhalen hetesletjes @@ -1531,8 +1471,6 @@ hoertje hoertjes hoes hogtied -hole -holes hollandse hollandsesex homefuck @@ -1562,7 +1500,6 @@ homoverhalen hondenneuken hondentrimmen hondenzaad -honey honeymoon honeysuckle hooker @@ -1622,19 +1559,15 @@ hustlermagazine hustlertv ideepthroat ideepthroatcom -idols inari vachs incest incestporno incestsex inclusive incubus -indian -indonesian industrion ingetrokken ingoio -innocent inserted insertinons insertion @@ -1877,7 +1810,6 @@ lass lasses latex latexsex -latina latincouple latino laura @@ -1959,7 +1891,6 @@ linh linsey dawn mckenzie lips lipstick -lisa lisa daniels lisa marie literotica @@ -2001,7 +1932,6 @@ lolly badcock lombardi londonamateur long dong silver -lords lorna lace lory lotion @@ -2012,7 +1942,6 @@ lube lubricando lucia luciana -lucky lucy gresty lucy law lucygirl @@ -2029,11 +1958,9 @@ lutschen lutscht luxi lyndsey love -lynn lynn stone lynsey madame sindi -mafia maid maids maidstone @@ -2136,11 +2063,9 @@ mellons mercedez merel messy -mexican mexicana mexicano mia stone -michelle michelle b michelle thorne microbikini @@ -2166,14 +2091,12 @@ miranda miriams mirjam mirjams -miss missbunny missionary missy mistreated mistres mistress -misty miyah mmmmm moby @@ -2189,7 +2112,6 @@ molige moms mondneuken moni -monica monica sweetheart monika monique @@ -2219,7 +2141,6 @@ mundfick muschie muschifingern mya diamond -myself mysexgames mystique naakt @@ -2261,8 +2182,6 @@ nattekutten nattepoes nattepoesjes nattespleet -natural -naturals naturewonderwoman naturisme naturist @@ -2323,7 +2242,6 @@ nightfly nightfuck1 nightie niki blond -nikita nikita devine nikki nikki hunter @@ -2523,7 +2441,6 @@ pinay pinklips pinkpornstars pinky -pipe pis piskut pisshunters @@ -2571,11 +2488,9 @@ poesjes poland polandbathroom polderrape -polish pompino pompoarism ponygirl -pool poolbest poolhappy poolside @@ -2763,7 +2678,6 @@ reinundraus reiten reiter reitet -renaissance renee pornero renee richards rica @@ -2828,18 +2742,15 @@ samantha sambal sammi jayne sandie caine -sandra sandra romain sandra russo sandramodel sandwich -sandy sapphic sapphicerotica sapphire raw sara nice sara stone -sarah sarah young sarahs sasha @@ -2902,7 +2813,6 @@ searchbigtits secretary seduction seductions -seeker sekfilms sekret seks @@ -3130,7 +3040,6 @@ showering showershaved showing shows -shy shyla siffredi silicon diff --git a/Tribler/Core/Config/config.spec b/Tribler/Core/Config/config.spec index e9ce65574ad..9d3fa605d66 100644 --- a/Tribler/Core/Config/config.spec +++ b/Tribler/Core/Config/config.spec @@ -1,23 +1,8 @@ [general] -family_filter = boolean(default=True) state_dir = string(default='') -ec_keypair_filename = string(default='') -megacache = boolean(default=True) log_dir = string(default=None) testnet = boolean(default=False) -[allchannel_community] -enabled = boolean(default=True) - -[channel_community] -enabled = boolean(default=True) - -[preview_channel_community] -enabled = boolean(default=True) - -[search_community] -enabled = boolean(default=True) - [tunnel_community] enabled = boolean(default=True) socks5_listen_ports = string_list(default=list('-1', '-1', '-1', '-1', '-1')) @@ -28,7 +13,6 @@ competing_slots = integer(default=15) [market_community] enabled = boolean(default=True) matchmaker = boolean(default=True) -ec_keypair_filename = string(default='') record_transactions = boolean(default=False) [dht] @@ -49,26 +33,9 @@ enabled = boolean(default=True) channel_edit = boolean(default=False) channels_dir = string(default='channels') -[metadata] -enabled = boolean(default=True) -store_dir = string(default=collected_metadata) - -[mainline_dht] -enabled = boolean(default=True) -port = integer(min=-1, max=65536, default=-1) - [torrent_checking] enabled = boolean(default=True) -[torrent_store] -enabled = boolean(default=True) -store_dir = string(default=collected_torrents) - -[torrent_collecting] -enabled = boolean(default=True) -max_torrents = integer(default=50000) -directory = string(default='') - [libtorrent] enabled = boolean(default=True) port = integer(min=-1, max=65536, default=-1) @@ -97,12 +64,9 @@ seeding_ratio = float(default=2.0) seeding_time = float(default=60) channel_download = boolean(default=False) -[dispersy] -enabled = boolean(default=True) -port = integer(min=-1, max=65536, default=-1) - [ipv8] enabled = boolean(default=True) +port = integer(min=-1, max=65536, default=-1) address = string(default='0.0.0.0') bootstrap_override = string(default='') statistics = boolean(default=False) diff --git a/Tribler/Core/Config/tribler_config.py b/Tribler/Core/Config/tribler_config.py index e133f227c47..9659b1de8a3 100644 --- a/Tribler/Core/Config/tribler_config.py +++ b/Tribler/Core/Config/tribler_config.py @@ -6,16 +6,17 @@ import logging import os +from configobj import ConfigObj + from six import text_type -from configobj import ConfigObj from validate import Validator +from Tribler.Core.DownloadConfig import get_default_dest_dir from Tribler.Core.Utilities.install_dir import get_lib_path from Tribler.Core.Utilities.network_utils import get_random_port from Tribler.Core.exceptions import InvalidConfigException from Tribler.Core.osutils import get_appstate_dir -from Tribler.Core.DownloadConfig import get_default_dest_dir FILENAME = 'triblerd.conf' SPEC_FILENAME = 'config.spec' @@ -138,19 +139,6 @@ def get_chant_channels_dir(self): path = self.config['chant']['channels_dir'] return path if os.path.isabs(path) else os.path.join(self.get_state_dir(), path) - def set_chant_channel_edit(self, value): - self.config['chant']['channel_edit'] = bool(value) - - def get_chant_channel_edit(self): - return self.config['chant']['channel_edit'] - - # General - def set_family_filter_enabled(self, value): - self.config['general']['family_filter'] = bool(value) - - def get_family_filter_enabled(self): - return self.config['general'].as_bool('family_filter') - def set_state_dir(self, state_dir): self.config["general"]["state_dir"] = state_dir @@ -160,16 +148,6 @@ def get_state_dir(self): return self.config["general"]["state_dir"] - def set_permid_keypair_filename(self, keypair_filename): - self.config['general']['ec_keypair_filename'] = keypair_filename - - def get_permid_keypair_filename(self): - file_name = self.config["general"]["ec_keypair_filename"] - if not file_name: - file_name = os.path.join(self.get_state_dir(), 'ec.pem') - self.set_permid_keypair_filename(file_name) - return file_name - def set_trustchain_keypair_filename(self, keypairfilename): self.config['trustchain']['ec_keypair_filename'] = keypairfilename @@ -202,12 +180,6 @@ def set_trustchain_live_edges_enabled(self, value): def get_trustchain_live_edges_enabled(self): return self.config['trustchain']['live_edges_enabled'] - def set_megacache_enabled(self, value): - self.config['general']['megacache'] = value - - def get_megacache_enabled(self): - return self.config['general']['megacache'] - def set_log_dir(self, value): self.config['general']['log_dir'] = value @@ -258,20 +230,6 @@ def set_http_api_retry_port(self, retry_port): def get_http_api_retry_port(self): return self.config['http_api']['retry_port'] - # Dispersy - - def set_dispersy_enabled(self, value): - self.config['dispersy']['enabled'] = value - - def get_dispersy_enabled(self): - return self.config['dispersy']['enabled'] - - def set_dispersy_port(self, value): - self.config['dispersy']['port'] = value - - def get_dispersy_port(self): - return self._obtain_port('dispersy', 'port') - # IPv8 def set_ipv8_enabled(self, value): @@ -280,6 +238,12 @@ def set_ipv8_enabled(self, value): def get_ipv8_enabled(self): return self.config['ipv8']['enabled'] + def set_ipv8_port(self, value): + self.config['ipv8']['port'] = value + + def get_ipv8_port(self): + return self._obtain_port('ipv8', 'port') + def set_ipv8_bootstrap_override(self, value): self.config['ipv8']['bootstrap_override'] = value @@ -447,20 +411,6 @@ def set_libtorrent_dht_enabled(self, value): def get_libtorrent_dht_enabled(self): return self.config['libtorrent']['dht'] - # Mainline DHT - - def set_mainline_dht_enabled(self, value): - self.config['mainline_dht']['enabled'] = value - - def get_mainline_dht_enabled(self): - return self.config['mainline_dht']['enabled'] - - def set_mainline_dht_port(self, port): - self.config['mainline_dht']['port'] = port - - def get_mainline_dht_port(self): - return self._obtain_port('mainline_dht', 'port') - # Video server def set_video_server_enabled(self, value): @@ -584,86 +534,6 @@ def get_popularity_community_enabled(self): def set_popularity_community_enabled(self, value): self.config['popularity_community']['enabled'] = value - # Torrent store - - def get_torrent_store_enabled(self): - return self.config['torrent_store']['enabled'] - - def set_torrent_store_enabled(self, value): - self.config['torrent_store']['enabled'] = value - - def get_torrent_store_dir(self): - return os.path.join(self.get_state_dir(), self.config['torrent_store']['store_dir']) - - def set_torrent_store_dir(self, value): - self.config['torrent_store']['store_dir'] = value - - # Metadata - - def get_metadata_enabled(self): - return self.config['metadata']['enabled'] - - def set_metadata_enabled(self, mode): - self.config['metadata']['enabled'] = mode - - def get_metadata_store_dir(self): - return os.path.join(self.get_state_dir(), self.config['metadata']['store_dir']) - - def set_metadata_store_dir(self, value): - self.config['metadata']['store_dir'] = value - - # Torrent collecting - - def set_torrent_collecting_enabled(self, value): - self.config['torrent_collecting']['enabled'] = value - - def get_torrent_collecting_enabled(self): - return self.config['torrent_collecting']['enabled'] - - def set_torrent_collecting_max_torrents(self, value): - self.config['torrent_collecting']['max_torrents'] = value - - def get_torrent_collecting_max_torrents(self): - return self.config['torrent_collecting']['max_torrents'] - - def set_torrent_collecting_dir(self, value): - self.config['torrent_collecting']['directory'] = value - - def get_torrent_collecting_dir(self): - return self.config['torrent_collecting']['directory'] - - # Search Community - - def set_torrent_search_enabled(self, mode): - self.config['search_community']['enabled'] = mode - - def get_torrent_search_enabled(self): - return self.config['search_community']['enabled'] - - # AllChannel Community - - def set_channel_search_enabled(self, mode): - self.config['allchannel_community']['enabled'] = mode - - def get_channel_search_enabled(self): - return self.config['allchannel_community']['enabled'] - - # Channel Community - - def set_channel_community_enabled(self, value): - self.config['channel_community']['enabled'] = value - - def get_channel_community_enabled(self): - return self.config['channel_community']['enabled'] - - # PreviewChannel Community - - def set_preview_channel_community_enabled(self, value): - self.config['preview_channel_community']['enabled'] = value - - def get_preview_channel_community_enabled(self): - return self.config['preview_channel_community']['enabled'] - # Watch folder def set_watch_folder_enabled(self, value): diff --git a/Tribler/Core/CreditMining/CreditMiningManager.py b/Tribler/Core/CreditMining/CreditMiningManager.py index 3c42ddc4f11..5647483ae55 100644 --- a/Tribler/Core/CreditMining/CreditMiningManager.py +++ b/Tribler/Core/CreditMining/CreditMiningManager.py @@ -1,5 +1,4 @@ -from __future__ import absolute_import -from __future__ import division +from __future__ import absolute_import, division import logging import os @@ -19,7 +18,8 @@ from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.TorrentDef import TorrentDefNoMetainfo from Tribler.Core.simpledefs import DLSTATUS_DOWNLOADING, DLSTATUS_SEEDING, DLSTATUS_STOPPED, \ - DLSTATUS_STOPPED_ON_ERROR, DOWNLOAD, NTFY_CREDIT_MINING, NTFY_ERROR, UPLOAD + DLSTATUS_STOPPED_ON_ERROR, NTFY_CREDIT_MINING, NTFY_ERROR, UPLOAD +from Tribler.Core.simpledefs import DOWNLOAD from Tribler.pyipv8.ipv8.taskmanager import TaskManager @@ -27,6 +27,7 @@ class CreditMiningTorrent(object): """ Wrapper class for Credit Mining download """ + def __init__(self, infohash, name, download=None, state=None): self.infohash = infohash self.name = name @@ -49,6 +50,7 @@ class CreditMiningSettings(object): """ This class contains settings used by the credit mining manager """ + def __init__(self, config=None): self.max_torrents_active = 8 self.max_torrents_listed = 100 @@ -158,8 +160,8 @@ def add_source(self, source_str): if source_str not in self.sources: num_torrents = len(self.torrents) - if isinstance(source_str, string_types) and len(source_str) == 40: - source = ChannelSource(self.session, source_str, self.on_torrent_insert) + if isinstance(source_str, string_types): + source = ChannelSource(self.session, unhexlify(source_str), self.on_torrent_insert) else: self._logger.error('Cannot add unknown source %s', source_str) return @@ -222,7 +224,7 @@ def on_torrent_insert(self, source_str, infohash, name): # If a download already exists or already has a checkpoint, skip this torrent if self.session.get_download(unhexlify(infohash)) or \ - os.path.exists(os.path.join(self.session.get_downloads_pstate_dir(), infohash + '.state')): + os.path.exists(os.path.join(self.session.get_downloads_pstate_dir(), infohash + '.state')): self._logger.debug('Skipping torrent %s (download already running or scheduled to run)', infohash) return @@ -337,7 +339,7 @@ def monitor_downloads(self, dslist): self._logger.info('Downloading: %d, Uploading: %d, Stopped: %d', num_seeding, num_downloading, stopped) self._logger.info('%d active download(s), %.3f MB uploaded, %.3f MB downloaded', - num_seeding + num_downloading, bytes_uploaded/MB, bytes_downloaded/MB) + num_seeding + num_downloading, bytes_uploaded / MB, bytes_downloaded / MB) if not self.session_ready.called and len(dslist) == self.num_checkpoints: self.session_ready.callback(None) diff --git a/Tribler/Core/CreditMining/CreditMiningSource.py b/Tribler/Core/CreditMining/CreditMiningSource.py index 8132e58d4b3..de29fda6938 100644 --- a/Tribler/Core/CreditMining/CreditMiningSource.py +++ b/Tribler/Core/CreditMining/CreditMiningSource.py @@ -1,17 +1,15 @@ from __future__ import absolute_import import logging +from binascii import hexlify -from binascii import hexlify, unhexlify - -from Tribler.dispersy.exception import CommunityNotFoundException -from Tribler.Core.simpledefs import NTFY_DISCOVERED, NTFY_TORRENT, NTFY_CHANNELCAST from Tribler.pyipv8.ipv8.taskmanager import TaskManager class BaseSource(TaskManager): """ - Base class for credit mining source. For now, it can only be a Dispersy channel + Base class for a credit mining source. + The source specifies where to get torrents from. """ def __init__(self, session, source, torrent_insert_cb): @@ -47,57 +45,20 @@ def __str__(self): class ChannelSource(BaseSource): """ - Credit mining source from a channel. + Credit mining source from a (giga)channel. """ - def __init__(self, session, dispersy_cid, torrent_insert_cb): - super(ChannelSource, self).__init__(session, dispersy_cid, torrent_insert_cb) - self.community = None - self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) - def start(self): super(ChannelSource, self).start() - # Join the community if needed - dispersy = self.session.get_dispersy_instance() - try: - self.community = dispersy.get_community(unhexlify(self.source), True) - except CommunityNotFoundException: - from Tribler.community.allchannel.community import AllChannelCommunity - from Tribler.community.channel.community import ChannelCommunity - - allchannelcommunity = None - for community in dispersy.get_communities(): - if isinstance(community, AllChannelCommunity): - allchannelcommunity = community - - if allchannelcommunity: - self.community = ChannelCommunity.init_community(dispersy, - dispersy.get_member(mid=unhexlify(self.source)), - allchannelcommunity.my_member, self.session) - self._logger.info('Joined channel community %s', self.source) - else: - self._logger.error('Could not find AllChannelCommunity') - return + channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(self.source) + if not channel: + self._logger.error("Could not find channel!") + return # Add torrents from database - channel_id = self.community.get_channel_id() - torrents = self.channelcast_db.getTorrentsFromChannelId(channel_id, True, - ['infohash', 'ChannelTorrents.name']) - - for infohash_bin, name in torrents: - self.torrent_insert_callback(self.source, hexlify(infohash_bin), name) - - self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [NTFY_DISCOVERED]) - - def stop(self): - super(ChannelSource, self).stop() - self.session.remove_observer(self.on_torrent_discovered) - - def on_torrent_discovered(self, subject, changetype, objectID, object_dict): - # Add newly discovered torrents - if self.source == object_dict['dispersy_cid']: - self.torrent_insert_callback(self.source, object_dict['infohash'], object_dict['name']) + for torrent in channel.contents_list: + self.torrent_insert_callback(self.source, hexlify(torrent.infohash), torrent.title) def __str__(self): return 'channel:' + self.source diff --git a/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py b/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py index b891a581b02..8f16bd64800 100644 --- a/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py +++ b/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py @@ -11,6 +11,7 @@ import sys import time from binascii import hexlify +from threading import RLock import libtorrent as lt @@ -22,7 +23,6 @@ from twisted.internet.task import LoopingCall from twisted.python.failure import Failure -from Tribler.Core import NoDispersyRLock from Tribler.Core.DownloadConfig import DownloadConfigInterface, DownloadStartupConfig, get_default_dest_dir from Tribler.Core.DownloadState import DownloadState from Tribler.Core.Libtorrent import checkHandleAndSynchronize @@ -51,7 +51,7 @@ def __init__(self, f, d): self._download = d pieces = self._download.tdef.get_pieces() - self.pieces = [pieces[x:x + 20]for x in xrange(0, len(pieces), 20)] + self.pieces = [pieces[x:x + 20] for x in xrange(0, len(pieces), 20)] self.piecesize = self._download.tdef.get_piece_length() self.startpiece = get_info_from_handle(self._download.handle).map_file( @@ -64,7 +64,9 @@ def read(self, *args): self._logger.debug('VODFile: get bytes %s - %s', oldpos, oldpos + args[0]) - while not self._file.closed and self._download.get_byte_progress([(self._download.get_vod_fileindex(), oldpos, oldpos + args[0])]) < 1 and self._download.vod_seekpos is not None: + while not self._file.closed and self._download.get_byte_progress([ + (self._download.get_vod_fileindex(), oldpos, oldpos + args[0])]) < 1 \ + and self._download.vod_seekpos is not None: time.sleep(1) if self._file.closed: @@ -94,7 +96,7 @@ def seek(self, *args): self._logger.debug('VODFile: seek, get pieces %s', self._download.handle.piece_priorities()) self._logger.debug('VODFile: seek, got pieces %s', [ - int(piece) for piece in self._download.handle.status().pieces]) + int(piece) for piece in self._download.handle.status().pieces]) def close(self, *args): self._file.close(*args) @@ -105,7 +107,6 @@ def closed(self): class LibtorrentDownloadImpl(DownloadConfigInterface, TaskManager): - """ Download subclass that represents a libtorrent download.""" def __init__(self, session, tdef): @@ -113,7 +114,7 @@ def __init__(self, session, tdef): self._logger = logging.getLogger(self.__class__.__name__) - self.dllock = NoDispersyRLock() + self.dllock = RLock() self.session = session self.tdef = tdef self.handle = None @@ -396,7 +397,7 @@ def get_pieces_base64(self): encoded_str = "" for i in range(0, len(bitstr), 8): - encoded_str += chr(int(bitstr[i:i+8].ljust(8, '0'), 2)) + encoded_str += chr(int(bitstr[i:i + 8].ljust(8, '0'), 2)) return base64.b64encode(encoded_str) @checkHandleAndSynchronize(0.0) @@ -438,7 +439,8 @@ def set_piece_priority(self, pieces_need, priority): do_prio = True else: self._logger.info( - "LibtorrentDownloadImpl: could not set priority for non-existing piece %d / %d", piece, len(piecepriorities)) + "LibtorrentDownloadImpl: could not set priority for non-existing piece %d / %d", piece, + len(piecepriorities)) if do_prio: self.handle.prioritize_pieces(piecepriorities) else: @@ -574,11 +576,6 @@ def on_metadata_received_alert(self, alert): self.set_filepieceranges() self.set_selected_files() - if self.session.lm.rtorrent_handler: - self.session.lm.rtorrent_handler.save_torrent(self.tdef) - elif self.session.lm.torrent_db: - self.session.lm.torrent_db.addExternalTorrent(self.tdef, extra_info={'status': 'good'}) - self.checkpoint() def on_file_renamed_alert(self, alert): @@ -626,9 +623,9 @@ def on_torrent_finished_alert(self, alert): if self.finished_callback_already_called: self._logger.warning("LibtorrentDownloadImpl: tried to repeat the call to finished_callback %s", self.tdef.get_name()) - else: - self.finished_callback_already_called = True + self.finished_callback(self) + self.finished_callback_already_called = True progress = self.get_state().get_progress() if self.get_mode() == DLMODE_VOD: @@ -645,6 +642,7 @@ def reset_priorities(): return if self.get_state().get_progress() == 1.0: self.set_byte_priority([(self.get_vod_fileindex(), 0, -1)], 1) + self.register_anonymous_task("reset_priorities", reactor.callLater(5, reset_priorities)) if self.endbuffsize: @@ -670,7 +668,8 @@ def set_corrected_infoname(self): self.correctedinfoname = fix_filebasename(self.tdef.get_name_as_unicode()) # Allow correctedinfoname to be overwritten for multifile torrents only - if self.get_corrected_filename() and self.get_corrected_filename() != '' and 'files' in self.tdef.get_metainfo()['info']: + if self.get_corrected_filename() and self.get_corrected_filename() != '' and 'files' in \ + self.tdef.get_metainfo()['info']: self.correctedinfoname = self.get_corrected_filename() @property @@ -814,8 +813,9 @@ def calc_prebuf_frac(self, consecutive=False): [(self.get_vod_fileindex(), self.vod_seekpos, self.vod_seekpos + self.prebuffsize), (self.get_vod_fileindex(), -self.endbuffsize - 1, -1)], consecutive=consecutive) else: - return self.get_byte_progress([(self.get_vod_fileindex(), self.vod_seekpos, self.vod_seekpos + self.prebuffsize)], - consecutive=consecutive) + return self.get_byte_progress( + [(self.get_vod_fileindex(), self.vod_seekpos, self.vod_seekpos + self.prebuffsize)], + consecutive=consecutive) else: return 0.0 @@ -1042,7 +1042,7 @@ def checkpoint(self): filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) if not os.path.isfile(filename): resume_data = self.pstate_for_restart.get('state', 'engineresumedata') \ - if self.pstate_for_restart else None + if self.pstate_for_restart else None # 2. If there is no saved data for this infohash, checkpoint it without data so we do not # lose it when we crash or restart before the download becomes known. @@ -1051,7 +1051,7 @@ def checkpoint(self): 'file-version': 1, 'info-hash': self.tdef.get_infohash() } - alert = type('anonymous_alert', (object, ), dict(resume_data=resume_data)) + alert = type('anonymous_alert', (object,), dict(resume_data=resume_data)) self.on_save_resume_data_alert(alert) return succeed(None) @@ -1068,7 +1068,8 @@ def get_persistent_download_config(self): pstate.set('state', 'version', PERSISTENTSTATE_CURRENTVERSION) if isinstance(self.tdef, TorrentDefNoMetainfo): pstate.set('state', 'metainfo', { - 'infohash': self.tdef.get_infohash(), 'name': self.tdef.get_name_as_unicode(), 'url': self.tdef.get_url()}) + 'infohash': self.tdef.get_infohash(), 'name': self.tdef.get_name_as_unicode(), + 'url': self.tdef.get_url()}) else: pstate.set('state', 'metainfo', self.tdef.get_metainfo()) diff --git a/Tribler/Core/Libtorrent/LibtorrentMgr.py b/Tribler/Core/Libtorrent/LibtorrentMgr.py index 9bc4b4ce7d7..7cd73513594 100644 --- a/Tribler/Core/Libtorrent/LibtorrentMgr.py +++ b/Tribler/Core/Libtorrent/LibtorrentMgr.py @@ -155,6 +155,7 @@ def create_session(self, hops=0, store_listen_port=True): # the settings dictionary settings['outgoing_port'] = 0 settings['num_outgoing_ports'] = 1 + settings['allow_multiple_connections_per_ip'] = 0 # Copy construct so we don't modify the default list extensions = list(DEFAULT_LT_EXTENSIONS) diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_metadata.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_metadata.py index 24c27c38727..22687e26c92 100644 --- a/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_metadata.py +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_metadata.py @@ -1,18 +1,36 @@ from __future__ import absolute_import import os +import sys +from binascii import hexlify from datetime import datetime -from libtorrent import file_storage, add_files, create_torrent, set_piece_hashes, bencode, torrent_info -from pony import orm -from pony.orm import db_session +from libtorrent import add_files, bencode, create_torrent, file_storage, set_piece_hashes, torrent_info + +import lz4.frame -from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload, CHANNEL_TORRENT -from Tribler.Core.exceptions import DuplicateTorrentFileError, DuplicateChannelNameError +from pony import orm +from pony.orm import db_session, raw_sql, select + +from Tribler.Core.Category.Category import default_category_filter +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import COMMITTED, LEGACY_ENTRY, NEW, PUBLIC_KEY_LEN, \ + TODELETE +from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_TORRENT, ChannelMetadataPayload +from Tribler.Core.TorrentDef import TorrentDef +from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url +from Tribler.Core.exceptions import DuplicateChannelIdError, DuplicateTorrentFileError from Tribler.pyipv8.ipv8.database import database_blob -CHANNEL_DIR_NAME_LENGTH = 60 # Its not 40 so it could be distinguished from infohash +CHANNEL_DIR_NAME_LENGTH = 32 # Its not 40 so it could be distinguished from infohash BLOB_EXTENSION = '.mdblob' +LZ4_END_MARK_SIZE = 4 # in bytes, from original specification. We don't use CRC +ROOT_CHANNEL_ID = 0 + + +def chunks(l, n): + """Yield successive n-sized chunks from l.""" + for i in range(0, len(l), n): + yield l[i:i + n] def create_torrent_from_dir(directory, torrent_filename): @@ -34,26 +52,30 @@ def entries_to_chunk(metadata_list, chunk_size, start_index=0): """ For efficiency reasons, this is deliberately written in C style :param metadata_list: the list of metadata to process. - :param limit: maximum size of a resulting chunk, in bytes. + :param chunk_size: the desired chunk size limit, in bytes. The produced chunk's size will never exceed this value. :param start_index: the index of the element of metadata_list from which the processing should start. :return: (chunk, last_entry_index) tuple, where chunk is the resulting chunk in string form and last_entry_index is the index of the element of the input list that was put into the chunk the last. """ # Try to fit as many blobs into this chunk as permitted by chunk_size and # calculate their ends' offsets in the blob - out_list = [] - offset = 0 last_entry_index = None - for index, metadata in enumerate(metadata_list[start_index:], start_index): - blob = ''.join(metadata.serialized_delete() if metadata.deleted else metadata.serialized()) - # Chunk size limit reached? - if offset + len(blob) > chunk_size: - break - # Now that we now it will fit in, we can safely append it - offset += len(blob) - last_entry_index = index - out_list.append(blob) + with lz4.frame.LZ4FrameCompressor(auto_flush=True) as c: + header = c.begin() + offset = len(header) + out_list = [header] # LZ4 header + for index, metadata in enumerate(metadata_list[start_index:], start_index): + blob = c.compress( + ''.join(metadata.serialized_delete() if metadata.status == TODELETE else metadata.serialized())) + # Chunk size limit reached? + if offset + len(blob) > (chunk_size - LZ4_END_MARK_SIZE): + break + # Now that we now it will fit in, we can safely append it + offset += len(blob) + last_entry_index = index + out_list.append(blob) + out_list.append(c.flush()) # LZ4 end mark chunk = ''.join(out_list) if last_entry_index is None: @@ -65,23 +87,25 @@ def entries_to_chunk(metadata_list, chunk_size, start_index=0): def define_binding(db): class ChannelMetadata(db.TorrentMetadata): _discriminator_ = CHANNEL_TORRENT - version = orm.Optional(int, size=64, default=0) + + # Serializable + num_entries = orm.Optional(int, size=64, default=0) + start_timestamp = orm.Optional(int, size=64, default=0) + + # Local subscribed = orm.Optional(bool, default=False) votes = orm.Optional(int, size=64, default=0) local_version = orm.Optional(int, size=64, default=0) + _payload_class = ChannelMetadataPayload _channels_dir = None + _category_filter = None _CHUNK_SIZE_LIMIT = 1 * 1024 * 1024 # We use 1MB chunks as a workaround for Python's lack of string pointers @db_session def update_metadata(self, update_dict=None): - now = datetime.utcnow() channel_dict = self.to_dict() channel_dict.update(update_dict or {}) - channel_dict.update({ - "size": self.contents_len, - "timestamp": now, - }) self.set(**channel_dict) self.sign() @@ -97,24 +121,29 @@ def process_channel_metadata_payload(cls, payload): if not channel: return ChannelMetadata.from_payload(payload) - if payload.version > channel.version: + if payload.timestamp > channel.timestamp: channel.set(**payload.to_dict()) return channel @classmethod @db_session - def create_channel(cls, title, description): + def get_my_channel(cls): + return ChannelMetadata.get_channel_with_id(cls._my_key.pub().key_to_bin()[10:]) + + @classmethod + @db_session + def create_channel(cls, title, description=""): """ Create a channel and sign it with a given key. :param title: The title of the channel :param description: The description of the channel :return: The channel metadata """ - if ChannelMetadata.get_channel_with_id(cls._my_key.pub().key_to_bin()): - raise DuplicateChannelNameError() + if ChannelMetadata.get_channel_with_id(cls._my_key.pub().key_to_bin()[10:]): + raise DuplicateChannelIdError() - my_channel = cls(public_key=database_blob(cls._my_key.pub().key_to_bin()), title=title, - tags=description, subscribed=True) + my_channel = cls(id_=ROOT_CHANNEL_ID, public_key=database_blob(cls._my_key.pub().key_to_bin()[10:]), + title=title, tags=description, subscribed=True) my_channel.sign() return my_channel @@ -131,9 +160,18 @@ def consolidate_channel_torrent(self): for filename in os.listdir(folder): file_path = os.path.join(folder, filename) # We only remove mdblobs and leave the rest as it is - if filename.endswith(BLOB_EXTENSION): + if filename.endswith(BLOB_EXTENSION) or filename.endswith(BLOB_EXTENSION + '.lz4'): os.unlink(file_path) - self.update_channel_torrent(self.contents_list) + + # Channel should get a new starting timestamp and its contents should get higher timestamps + start_timestamp = self._clock.tick() + for g in self.contents: + if g.status == COMMITTED: + g.status = NEW + g.timestamp = self._clock.tick() + g.sign() + + self.commit_channel_torrent(new_start_timestamp=start_timestamp) def update_channel_torrent(self, metadata_list): """ @@ -147,93 +185,121 @@ def update_channel_torrent(self, metadata_list): if not os.path.isdir(channel_dir): os.makedirs(channel_dir) - # Basically, a channel's version represents the count of unique entries it ever had. - # For a channel that never had deleted anything, it's version = len(contents) - old_version = self.version index = 0 + new_timestamp = self.timestamp while index < len(metadata_list): # Squash several serialized and signed metadata entries into a single file data, index = entries_to_chunk(metadata_list, self._CHUNK_SIZE_LIMIT, start_index=index) - blob_filename = str(old_version + index).zfill(12) + BLOB_EXTENSION + new_timestamp = self._clock.tick() + blob_filename = str(new_timestamp).zfill(12) + BLOB_EXTENSION + '.lz4' with open(os.path.join(channel_dir, blob_filename), 'wb') as f: f.write(data) - new_version = self.version + len(metadata_list) - + # TODO: add error-handling routines to make sure the timestamp is not messed up in case of an error # Make torrent out of dir with metadata files - start_ts = datetime.utcnow() torrent, infohash = create_torrent_from_dir(channel_dir, os.path.join(self._channels_dir, self.dir_name + ".torrent")) - - # Torrent files have time resolution of 1 second. If a channel torrent is created in the same second as - # a new metadata entry, the latter would still be listened as a staged entry. To account for this, - # we store torrent_date with higher resolution. As libtorrent uses the moment of beginning of the torrent - # creation as a source for 'creation date' for torrent, we sample it just before calling it. Then we select - # the larger of two timestamps. torrent_date = datetime.utcfromtimestamp(torrent['creation date']) - torrent_date_corrected = start_ts if start_ts > torrent_date else torrent_date - - self.update_metadata(update_dict={"infohash": infohash, "version": new_version, - "torrent_date": torrent_date_corrected}) - self.local_version = new_version - # Write the channel mdblob away - with open(os.path.join(self._channels_dir, self.dir_name + BLOB_EXTENSION), 'wb') as out_file: - out_file.write(''.join(self.serialized())) + return {"infohash": infohash, "num_entries": self.contents_len, + "timestamp": new_timestamp, "torrent_date": torrent_date}, torrent - self._logger.info("Channel %s committed with %i new entries. New version is %i", - str(self.public_key).encode("hex"), len(metadata_list), new_version) - return infohash - - def commit_channel_torrent(self): + def commit_channel_torrent(self, new_start_timestamp=None): """ Collect new/uncommitted and marked for deletion metadata entries, commit them to a channel torrent and remove the obsolete entries if the commit succeeds. :return The new infohash, should be used to update the downloads """ new_infohash = None + torrent = None + md_list = self.staged_entries_list + if not md_list: + return None + try: - new_infohash = self.update_channel_torrent(self.staged_entries_list) + update_dict, torrent = self.update_channel_torrent(md_list) except IOError: self._logger.error( "Error during channel torrent commit, not going to garbage collect the channel. Channel %s", str(self.public_key).encode("hex")) else: - # Clean up obsolete entries - self.garbage_collect() - return new_infohash + if new_start_timestamp: + update_dict['start_timestamp'] = new_start_timestamp + new_infohash = update_dict['infohash'] if self.infohash != update_dict['infohash'] else None + self.update_metadata(update_dict) + self.local_version = self.timestamp + # Change status of committed metadata and clean up obsolete TODELETE entries + for g in md_list: + if g.status == NEW: + g.status = COMMITTED + elif g.status == TODELETE: + g.delete() + + # Write the channel mdblob to disk + self.to_file(os.path.join(self._channels_dir, self.dir_name + BLOB_EXTENSION)) + + self._logger.info("Channel %s committed with %i new entries. New version is %i", + str(self.public_key).encode("hex"), len(md_list), update_dict['timestamp']) + return torrent @db_session - def has_torrent(self, infohash): + def get_torrent(self, infohash): """ - Check whether this channel contains the torrent with a provided infohash. + Return the torrent with a provided infohash. :param infohash: The infohash of the torrent to search for - :return: True if the torrent exists in the channel, else False + :return: TorrentMetadata if the torrent exists in the channel, else None """ - return db.TorrentMetadata.get(public_key=self.public_key, infohash=infohash) is not None + return db.TorrentMetadata.get(public_key=self.public_key, infohash=infohash) @db_session - def add_torrent_to_channel(self, tdef, extra_info): + def add_torrent_to_channel(self, tdef, extra_info=None): """ Add a torrent to your channel. :param tdef: The torrent definition file of the torrent to add :param extra_info: Optional extra info to add to the torrent """ - if self.has_torrent(tdef.get_infohash()): - raise DuplicateTorrentFileError() + if extra_info: + tags = extra_info.get('description', '') + else: + # We only want to determine the type of the data. XXX filtering is done by the receiving side + tags = default_category_filter.calculateCategory(tdef.metainfo, tdef.get_name_as_unicode()) - torrent_metadata = db.TorrentMetadata.from_dict({ + new_entry_dict = { "infohash": tdef.get_infohash(), - "title": tdef.get_name_as_unicode(), - "tags": extra_info.get('description', '') if extra_info else '', + "title": tdef.get_name_as_unicode()[:300], # TODO: do proper size checking based on bytes + "tags": tags[:200], # TODO: do proper size checking based on bytes "size": tdef.get_length(), "torrent_date": datetime.fromtimestamp(tdef.get_creation_date()), - "tc_pointer": 0, - "tracker_info": tdef.get_tracker() or '', - "public_key": self._my_key.pub().key_to_bin() - }) - torrent_metadata.sign() + "tracker_info": get_uniformed_tracker_url(tdef.get_tracker() or '') or '', + "status": NEW} + + # See if the torrent is already in the channel + old_torrent = self.get_torrent(tdef.get_infohash()) + if old_torrent: + # If it is there, check if we were going to delete it + if old_torrent.status == TODELETE: + if old_torrent.metadata_conflicting(new_entry_dict): + # Metadata from torrent we're trying to add is conflicting with the + # deleted old torrent's metadata. We will replace the old metadata. + new_timestamp = self._clock.tick() + old_torrent.set(timestamp=new_timestamp, **new_entry_dict) + old_torrent.sign() + else: + # No conflict. This means the user is trying to replace the deleted torrent + # with the same one. Just recover the old one. + old_torrent.status = COMMITTED + torrent_metadata = old_torrent + else: + raise DuplicateTorrentFileError() + else: + torrent_metadata = db.TorrentMetadata.from_dict(new_entry_dict) + torrent_metadata.parents.add(self) + return torrent_metadata + + @property + def dirty(self): + return self.contents.where(lambda g: g.status == NEW or g.status == TODELETE).exists() @property def contents(self): @@ -241,39 +307,21 @@ def contents(self): @property def uncommitted_contents(self): - return (g for g in self.newer_entries if not g.deleted) - - @property - def committed_contents(self): - return (g for g in self.older_entries if not g.deleted) + return self.contents.where(lambda g: g.status == NEW) @property def deleted_contents(self): - return (g for g in self.contents if g.deleted) + return self.contents.where(lambda g: g.status == TODELETE) @property def dir_name(self): # Have to limit this to support Windows file path length limit - return str(self.public_key).encode('hex')[-CHANNEL_DIR_NAME_LENGTH:] - - @property - def newer_entries(self): - return db.Metadata.select( - lambda g: g.timestamp > self.torrent_date and g.public_key == self.public_key and g != self) - - @property - def older_entries(self): - return db.Metadata.select( - lambda g: g.timestamp < self.torrent_date and g.public_key == self.public_key and g != self) - - @db_session - def garbage_collect(self): - orm.delete(g for g in self.older_entries if g.deleted) + return hexlify(self.public_key)[:CHANNEL_DIR_NAME_LENGTH] @property @db_session def staged_entries_list(self): - return list(self.deleted_contents) + list(self.newer_entries) + return list(self.deleted_contents) + list(self.uncommitted_contents) @property @db_session @@ -285,22 +333,23 @@ def contents_len(self): return orm.count(self.contents) @db_session - def delete_torrent_from_channel(self, infohash): + def delete_torrent(self, infohash): """ Remove a torrent from this channel. Obsolete blob files are never deleted except on defragmentation of the channel. :param infohash: The infohash of the torrent to remove :return True if deleted, False if no MD with the given infohash found """ - if self.has_torrent(infohash): - torrent_metadata = db.TorrentMetadata.get(public_key=self.public_key, infohash=infohash) - else: + torrent_metadata = db.TorrentMetadata.get(public_key=self.public_key, infohash=infohash) + if not torrent_metadata: return False - if torrent_metadata.timestamp > self.torrent_date: + + if torrent_metadata.status == NEW: # Uncommited metadata. Delete immediately torrent_metadata.delete() else: - torrent_metadata.deleted = True + torrent_metadata.status = TODELETE + return True @classmethod @@ -313,6 +362,17 @@ def get_channel_with_id(cls, channel_id): """ return cls.get(public_key=database_blob(channel_id)) + @db_session + def drop_channel_contents(self): + """ + Remove all torrents from the channel + """ + # Immediately delete uncommitted metadata + self.uncommitted_contents.delete() + # Mark the rest as deleted + for g in self.contents: + g.status = TODELETE + @classmethod @db_session def get_channel_with_infohash(cls, infohash): @@ -320,14 +380,136 @@ def get_channel_with_infohash(cls, infohash): @classmethod @db_session - def get_random_channels(cls, limit): + def get_channel_with_dirname(cls, dirname): + # It is impossible to use LIKE queries on BLOBs, so we have to use comparisons + def extend_to_bitmask(txt): + return txt + "0" * (PUBLIC_KEY_LEN * 2 - CHANNEL_DIR_NAME_LENGTH) + + dirname_binmask_start = "x'" + extend_to_bitmask(dirname) + "'" + + binmask_plus_one = ("%X" % (int(dirname, 16) + 1)).zfill(len(dirname)) + dirname_binmask_end = "x'" + extend_to_bitmask(binmask_plus_one) + "'" + + sql = "g.public_key >= " + dirname_binmask_start + " AND g.public_key < " + dirname_binmask_end + return orm.get(g for g in cls if raw_sql(sql)) + + @classmethod + @db_session + def get_random_channels(cls, limit, only_subscribed=False): """ - Fetch up to some limit of channels we are subscribed to. + Fetch up to some limit of torrents from this channel - :param limit: the maximum amount of channels to fetch + :param limit: the maximum amount of torrents to fetch + :param only_subscribed: whether we only want random channels we are subscribed to :return: the subset of random channels we are subscribed to :rtype: list """ - return db.ChannelMetadata.select(lambda g: g.subscribed).random(limit) + if only_subscribed: + select_lambda = lambda g: g.subscribed and g.status not in [LEGACY_ENTRY, NEW, + TODELETE] and g.num_entries > 0 + else: + select_lambda = lambda g: g.status not in [LEGACY_ENTRY, NEW, TODELETE] and g.num_entries > 0 + + return db.ChannelMetadata.select(select_lambda).random(limit) + + @db_session + def get_random_torrents(self, limit): + return self.contents.where(lambda g: g.status not in [NEW, TODELETE]).random(limit) + + @classmethod + @db_session + def get_updated_channels(cls): + return select(g for g in cls if g.subscribed and (g.local_version < g.timestamp)) + + @classmethod + @db_session + def get_entries(cls, first=None, last=None, subscribed=False, metadata_type=CHANNEL_TORRENT, **kwargs): + """ + Get some channels. Optionally sort the results by a specific field, or filter the channels based + on a keyword/whether you are subscribed to it. + :return: A tuple. The first entry is a list of ChannelMetadata entries. The second entry indicates + the total number of results, regardless the passed first/last parameter. + """ + pony_query, count = super(ChannelMetadata, cls).get_entries(metadata_type=metadata_type, **kwargs) + if subscribed: + pony_query = pony_query.where(subscribed=subscribed) + + return pony_query[(first or 1) - 1:last] if first or last else pony_query, count + + @db_session + def to_simple_dict(self): + """ + Return a basic dictionary with information about the channel. + """ + return { + "id": self.rowid, + "public_key": hexlify(self.public_key), + "name": self.title, + "torrents": self.contents_len, + "subscribed": self.subscribed, + "votes": self.votes, + "status": self.status, + + # TODO: optimize this? + "my_channel": database_blob(self._my_key.pub().key_to_bin()[10:]) == database_blob(self.public_key) + } + + @classmethod + @db_session + def get_channel_name(cls, name, infohash): + """ + Try to translate a Tribler download name into matching channel name. By searching for a channel with the + given dirname and/or infohash. Try do determine if infohash belongs to an older version of + some channel we already have. + :param name - name of the download. Should match the directory name of the channel. + :param infohash - infohash of the download. + :return: Channel title as a string, prefixed with 'OLD:' for older versions + """ + try: + channel = cls.get_channel_with_dirname(name) + except UnicodeEncodeError: + channel = cls.get_channel_with_infohash(infohash) + + if not channel: + return name + if channel.infohash == database_blob(infohash): + return channel.title + else: + return u'OLD:' + channel.title + + @db_session + def add_torrents_from_dir(self, torrents_dir, recursive=False): + # TODO: Optimize this properly!!!! + torrents_list = [] + errors_list = [] + + if recursive: + def rec_gen(): + for root, _, filenames in os.walk(torrents_dir): + for fn in filenames: + yield os.path.join(root, fn) + + filename_generator = rec_gen() + else: + filename_generator = os.listdir(torrents_dir) + + # Build list of .torrents to process + for f in filename_generator: + filepath = os.path.join(torrents_dir, f) + filename = str(filepath) if sys.platform == 'win32' else filepath.decode('utf-8') + if os.path.isfile(filepath) and filename.endswith(u'.torrent'): + torrents_list.append(filepath) + + for chunk in chunks(torrents_list, 100): # 100 is a reasonable chunk size for commits + for f in chunk: + try: + self.add_torrent_to_channel(TorrentDef.load(f)) + except DuplicateTorrentFileError: + pass + except: + errors_list.append(f) + orm.commit() # Kinda optimization to drop excess cache? + + return torrents_list, errors_list return ChannelMetadata diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_node.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_node.py new file mode 100644 index 00000000000..cb84b9bf3cf --- /dev/null +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/channel_node.py @@ -0,0 +1,198 @@ +from __future__ import absolute_import + +from binascii import hexlify +from datetime import datetime + +from pony import orm +from pony.orm.core import DEFAULT + +from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_NODE, ChannelNodePayload, DELETED, \ + DeletedMetadataPayload +from Tribler.Core.exceptions import InvalidSignatureException +from Tribler.pyipv8.ipv8.database import database_blob +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + +# Metadata, torrents and channel statuses +NEW = 0 +TODELETE = 1 +COMMITTED = 2 +JUST_RECEIVED = 3 +UPDATE_AVAILABLE = 4 +PREVIEW_UPDATE_AVAILABLE = 5 +LEGACY_ENTRY = 6 + +PUBLIC_KEY_LEN = 64 + + +def generate_dict_from_pony_args(cls, skip_list=None, **kwargs): + """ + Note: this is a way to manually define Pony entity default attributes in case we really + have to generate the signature before creating the object + """ + d = {} + skip_list = skip_list or [] + for attr in cls._attrs_: + val = kwargs.get(attr.name, DEFAULT) + if attr.name in skip_list: + continue + d[attr.name] = attr.validate(val, entity=cls) + return d + + +def define_binding(db, logger=None, key=None, clock=None): + class ChannelNode(db.Entity): + _discriminator_ = CHANNEL_NODE + + rowid = orm.PrimaryKey(int, size=64, auto=True) + + # Serializable + metadata_type = orm.Discriminator(int, size=16) + reserved_flags = orm.Optional(int, size=16, default=0) + origin_id = orm.Optional(int, size=64, default=0) + + public_key = orm.Required(database_blob) + id_ = orm.Required(int, size=64) + orm.composite_index(public_key, id_) + + timestamp = orm.Required(int, size=64, default=0) + signature = orm.Required(database_blob, unique=True) + + # Local + added_on = orm.Optional(datetime, default=datetime.utcnow) + status = orm.Optional(int, default=COMMITTED) + + parents = orm.Set('ChannelNode', reverse='children') + children = orm.Set('ChannelNode', reverse='parents') + + # Special properties + _payload_class = ChannelNodePayload + _my_key = key + _logger = logger + _clock = clock + + def __init__(self, *args, **kwargs): + """ + Initialize a metadata object. + All this dance is required to ensure that the signature is there and it is correct. + """ + + # Process special keyworded arguments + # "sign_with" argument given, sign with it + private_key_override = None + if "sign_with" in kwargs: + kwargs["public_key"] = database_blob(kwargs["sign_with"].pub().key_to_bin()[10:]) + private_key_override = kwargs["sign_with"] + kwargs.pop("sign_with") + + # For putting legacy/test stuff in + skip_key_check = False + if "skip_key_check" in kwargs and kwargs["skip_key_check"]: + skip_key_check = True + kwargs.pop("skip_key_check") + + if "id_" not in kwargs: + kwargs["id_"] = self._clock.tick() + + if not private_key_override and not skip_key_check: + # No key/signature given, sign with our own key. + if ("signature" not in kwargs) and \ + (("public_key" not in kwargs) or ( + kwargs["public_key"] == database_blob(self._my_key.pub().key_to_bin()[10:]))): + private_key_override = self._my_key + + # Key/signature given, check them for correctness + elif ("public_key" in kwargs) and ("signature" in kwargs): + try: + self._payload_class(**kwargs) + except InvalidSignatureException: + raise InvalidSignatureException( + ("Attempted to create %s object with invalid signature/PK: " % str( + self.__class__.__name__)) + + (hexlify(kwargs["signature"]) if "signature" in kwargs else "empty signature ") + " / " + + (hexlify(kwargs["public_key"]) if "public_key" in kwargs else " empty PK")) + + if private_key_override: + # Get default values for Pony class attributes. We have to do it manually because we need + # to know the payload signature *before* creating the object. + kwargs = generate_dict_from_pony_args(self.__class__, skip_list=["signature", "public_key"], **kwargs) + payload = self._payload_class( + **dict(kwargs, + public_key=str(private_key_override.pub().key_to_bin()[10:]), + key=private_key_override, + metadata_type=self.metadata_type)) + kwargs["public_key"] = payload.public_key + kwargs["signature"] = payload.signature + + super(ChannelNode, self).__init__(*args, **kwargs) + + def _serialized(self, key=None): + """ + Serializes the object and returns the result with added signature (tuple output) + :param key: private key to sign object with + :return: (serialized_data, signature) tuple + """ + return self._payload_class(key=key, **self.to_dict())._serialized() + + def serialized(self, key=None): + """ + Serializes the object and returns the result with added signature (blob output) + :param key: private key to sign object with + :return: serialized_data+signature binary string + """ + return ''.join(self._serialized(key)) + + def _serialized_delete(self): + """ + Create a special command to delete this metadata and encode it for transfer (tuple output). + :return: (serialized_data, signature) tuple + """ + my_dict = ChannelNode.to_dict(self) + my_dict.update({"metadata_type": DELETED, + "delete_signature": self.signature}) + return DeletedMetadataPayload(key=self._my_key, **my_dict)._serialized() + + def serialized_delete(self): + """ + Create a special command to delete this metadata and encode it for transfer (blob output). + :return: serialized_data+signature binary string + """ + return ''.join(self._serialized_delete()) + + def to_file(self, filename, key=None): + with open(filename, 'wb') as output_file: + output_file.write(self.serialized(key)) + + def to_delete_file(self, filename): + with open(filename, 'wb') as output_file: + output_file.write(self.serialized_delete()) + + def sign(self, key=None): + if not key: + key = self._my_key + self.public_key = database_blob(key.pub().key_to_bin()[10:]) + _, self.signature = self._serialized(key) + + def has_valid_signature(self): + crypto = default_eccrypto + signature_correct = False + key_correct = crypto.is_valid_public_bin(b"LibNaCLPK:" + str(self.public_key)) + + if key_correct: + try: + self._payload_class(**self.to_dict()) + except InvalidSignatureException: + signature_correct = False + else: + signature_correct = True + + return key_correct and signature_correct + + @classmethod + def from_payload(cls, payload): + return cls(**payload.to_dict()) + + @classmethod + def from_dict(cls, dct): + return cls(**dct) + + return ChannelNode diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/metadata.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/metadata.py deleted file mode 100644 index 857f3a2aa6d..00000000000 --- a/Tribler/Core/Modules/MetadataStore/OrmBindings/metadata.py +++ /dev/null @@ -1,94 +0,0 @@ -from __future__ import absolute_import - -from datetime import datetime - -from pony import orm - -from Tribler.Core.Modules.MetadataStore.serialization import MetadataPayload, DeletedMetadataPayload, TYPELESS, DELETED -from Tribler.pyipv8.ipv8.database import database_blob -from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto - - -def define_binding(db): - class Metadata(db.Entity): - rowid = orm.PrimaryKey(int, auto=True) - metadata_type = orm.Discriminator(int) - _discriminator_ = TYPELESS - # We want to make signature unique=True for safety, but can't do it in Python2 because of Pony bug #390 - signature = orm.Optional(database_blob) - timestamp = orm.Optional(datetime, default=datetime.utcnow) - tc_pointer = orm.Optional(int, size=64, default=0) - public_key = orm.Optional(database_blob, default='\x00' * 74) - addition_timestamp = orm.Optional(datetime, default=datetime.utcnow) - deleted = orm.Optional(bool, default=False) - _payload_class = MetadataPayload - _my_key = None - _logger = None - - def __init__(self, *args, **kwargs): - super(Metadata, self).__init__(*args, **kwargs) - # If no key/signature given, sign with our own key. - if "public_key" not in kwargs or (kwargs["public_key"] == self._my_key and "signature" not in kwargs): - self.sign(self._my_key) - - def _serialized(self, key=None): - """ - Serializes the object and returns the result with added signature (tuple output) - :param key: private key to sign object with - :return: (serialized_data, signature) tuple - """ - return self._payload_class(**self.to_dict())._serialized(key) - - def serialized(self, key=None): - """ - Serializes the object and returns the result with added signature (blob output) - :param key: private key to sign object with - :return: serialized_data+signature binary string - """ - return ''.join(self._serialized(key)) - - def _serialized_delete(self): - """ - Create a special command to delete this metadata and encode it for transfer (tuple output). - :return: (serialized_data, signature) tuple - """ - my_dict = Metadata.to_dict(self) - my_dict.update({"metadata_type": DELETED, - "delete_signature": self.signature}) - return DeletedMetadataPayload(**my_dict)._serialized(self._my_key) - - def serialized_delete(self): - """ - Create a special command to delete this metadata and encode it for transfer (blob output). - :return: serialized_data+signature binary string - """ - return ''.join(self._serialized_delete()) - - def to_file(self, filename, key=None): - with open(filename, 'wb') as output_file: - output_file.write(self.serialized(key)) - - def to_delete_file(self, filename): - with open(filename, 'wb') as output_file: - output_file.write(self.serialized_delete()) - - def sign(self, key=None): - if not key: - key = self._my_key - self.public_key = database_blob(key.pub().key_to_bin()) - _, self.signature = self._serialized(key) - - def has_valid_signature(self): - crypto = default_eccrypto - return (crypto.is_valid_public_bin(str(self.public_key)) - and self._payload_class(**self.to_dict()).has_valid_signature()) - - @classmethod - def from_payload(cls, payload): - return cls(**payload.to_dict()) - - @classmethod - def from_dict(cls, dct): - return cls(**dct) - - return Metadata diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/misc.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/misc.py new file mode 100644 index 00000000000..c95c989de65 --- /dev/null +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/misc.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from pony import orm + +# This binding is used to store all kinds of values, like DB version, counters, etc. + +def define_binding(db): + class MiscData(db.Entity): + name = orm.PrimaryKey(str) + value = orm.Optional(str) + + return MiscData diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_metadata.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_metadata.py index c9aeaaf0f44..dae75192daf 100644 --- a/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_metadata.py +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_metadata.py @@ -1,53 +1,83 @@ from __future__ import absolute_import +from binascii import hexlify from datetime import datetime from pony import orm -from pony.orm import db_session +from pony.orm import db_session, desc, raw_sql, select -from Tribler.Core.Modules.MetadataStore.serialization import TorrentMetadataPayload, REGULAR_TORRENT +from Tribler.Core.Category.FamilyFilter import default_xxx_filter +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import LEGACY_ENTRY, TODELETE +from Tribler.Core.Modules.MetadataStore.serialization import REGULAR_TORRENT, TorrentMetadataPayload +from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url from Tribler.pyipv8.ipv8.database import database_blob def define_binding(db): - class TorrentMetadata(db.Metadata): + class TorrentMetadata(db.ChannelNode): _discriminator_ = REGULAR_TORRENT + + # Serializable infohash = orm.Optional(database_blob, default='\x00' * 20) - title = orm.Optional(str, default='') size = orm.Optional(int, size=64, default=0) + torrent_date = orm.Optional(datetime, default=datetime.utcnow) + title = orm.Optional(str, default='') tags = orm.Optional(str, default='') tracker_info = orm.Optional(str, default='') - torrent_date = orm.Optional(datetime, default=datetime.utcnow) + + # Local + xxx = orm.Optional(float, default=0) + health = orm.Optional('TorrentState', reverse='metadata') + _payload_class = TorrentMetadataPayload + def __init__(self, *args, **kwargs): + if "health" not in kwargs and "infohash" in kwargs: + kwargs["health"] = db.TorrentState.get(infohash=kwargs["infohash"]) or db.TorrentState( + infohash=kwargs["infohash"]) + if 'xxx' not in kwargs: + kwargs["xxx"] = default_xxx_filter.isXXXTorrentMetadataDict(kwargs) + + super(TorrentMetadata, self).__init__(*args, **kwargs) + + if 'tracker_info' in kwargs: + self.add_tracker(kwargs["tracker_info"]) + + def add_tracker(self, tracker_url): + sanitized_url = get_uniformed_tracker_url(tracker_url) + if sanitized_url: + tracker = db.TrackerState.get(url=sanitized_url) or db.TrackerState(url=sanitized_url) + self.health.trackers.add(tracker) + + def before_update(self): + self.add_tracker(self.tracker_info) + def get_magnet(self): return ("magnet:?xt=urn:btih:%s&dn=%s" % (str(self.infohash).encode('hex'), self.title)) + \ ("&tr=%s" % self.tracker_info if self.tracker_info else "") @classmethod - def search_keyword(cls, query, entry_type=None, lim=100): + def search_keyword(cls, query, lim=100): # Requires FTS5 table "FtsIndex" to be generated and populated. # FTS table is maintained automatically by SQL triggers. # BM25 ranking is embedded in FTS5. # Sanitize FTS query - if not query: + if not query or query == "*": return [] - if query.endswith("*"): - query = "\"" + query[:-1] + "\"" + "*" - else: - query = "\"" + query + "\"" - metadata_type = entry_type or cls._discriminator_ - sql_search_fts = "metadata_type = %d AND rowid IN (SELECT rowid FROM FtsIndex WHERE " \ - "FtsIndex MATCH $query ORDER BY bm25(FtsIndex) LIMIT %d)" % (metadata_type, lim) - return cls.select(lambda x: orm.raw_sql(sql_search_fts))[:] + fts_ids = raw_sql( + 'SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query ORDER BY bm25(FtsIndex) LIMIT $lim') + return cls.select(lambda g: g.rowid in fts_ids) @classmethod - def get_auto_complete_terms(cls, keyword, max_terms, limit=100): + def get_auto_complete_terms(cls, keyword, max_terms, limit=10): + if not keyword: + return [] + with db_session: - result = cls.search_keyword(keyword + "*", lim=limit) + result = cls.search_keyword("\"" + keyword + "\"*", lim=limit)[:] titles = [g.title.lower() for g in result] # Copy-pasted from the old DBHandler (almost) completely @@ -62,4 +92,100 @@ def get_auto_complete_terms(cls, keyword, max_terms, limit=100): all_terms.add(term) return list(all_terms) + @classmethod + @db_session + def get_random_torrents(cls, limit): + """ + Return some random torrents from the database. + """ + return TorrentMetadata.select( + lambda g: g.metadata_type == REGULAR_TORRENT and g.status != LEGACY_ENTRY).random(limit) + + @classmethod + @db_session + def get_entries_query(cls, sort_by=None, sort_asc=True, query_filter=None): + """ + Get some metadata entries. Optionally sort the results by a specific field, or filter the channels based + on a keyword/whether you are subscribed to it. + :return: A tuple. The first entry is a list of ChannelMetadata entries. The second entry indicates + the total number of results, regardless the passed first/last parameter. + """ + # Warning! For Pony magic to work, iteration variable name (e.g. 'g') should be the same everywhere! + # Filter the results on a keyword or some keywords + pony_query = cls.search_keyword(query_filter, lim=1000) if query_filter else select(g for g in cls) + + # Sort the query + if sort_by: + if sort_by == "HEALTH": + pony_query = pony_query.sort_by("(g.health.seeders, g.health.leechers)") if sort_asc else \ + pony_query.sort_by("(desc(g.health.seeders), desc(g.health.leechers))") + else: + sort_expression = "g." + sort_by + sort_expression = sort_expression if sort_asc else desc(sort_expression) + pony_query = pony_query.sort_by(sort_expression) + return pony_query + + + @classmethod + @db_session + def get_entries(cls, first=None, last=None, metadata_type=REGULAR_TORRENT, channel_pk=False, + exclude_deleted=False, hide_xxx=False, **kwargs): + """ + Get some torrents. Optionally sort the results by a specific field, or filter the channels based + on a keyword/whether you are subscribed to it. + :return: A tuple. The first entry is a list of ChannelMetadata entries. The second entry indicates + the total number of results, regardless the passed first/last parameter. + """ + pony_query = cls.get_entries_query(**kwargs) + + if isinstance(metadata_type, list): + pony_query = pony_query.where(lambda g: g.metadata_type in metadata_type) + else: + pony_query = pony_query.where(metadata_type=metadata_type) + + if exclude_deleted: + pony_query = pony_query.where(lambda g: g.status != TODELETE) + if hide_xxx: + pony_query = pony_query.where(lambda g: g.xxx == 0) + + # Filter on channel + if channel_pk: + pony_query = pony_query.where(public_key=channel_pk) + + count = pony_query.count() + + return pony_query[(first or 1) - 1:last] if first or last else pony_query, count + + @db_session + def to_simple_dict(self, include_trackers=False): + """ + Return a basic dictionary with information about the channel. + """ + simple_dict = { + "id": self.rowid, + "name": self.title, + "infohash": hexlify(self.infohash), + "size": self.size, + "category": self.tags, + "num_seeders": self.health.seeders, + "num_leechers": self.health.leechers, + "last_tracker_check": self.health.last_check, + "status": self.status + } + + if include_trackers: + simple_dict['trackers'] = [tracker.url for tracker in self.health.trackers] + + return simple_dict + + def metadata_conflicting(self, b): + # Check if metadata in the given dict has conflicts with this entry + # WARNING! This does NOT check the INFOHASH + a = self.to_dict() + for comp in ["title", "size", "tags", "torrent_date", "tracker_info"]: + if (comp not in b) or (str(a[comp]) == str(b[comp])): + continue + return True + return False + return TorrentMetadata diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_state.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_state.py new file mode 100644 index 00000000000..7fb1233e40f --- /dev/null +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/torrent_state.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + +from pony import orm + +from Tribler.pyipv8.ipv8.database import database_blob + + +def define_binding(db): + class TorrentState(db.Entity): + rowid = orm.PrimaryKey(int, auto=True) + infohash = orm.Required(database_blob, unique=True) + seeders = orm.Optional(int, default=0) + leechers = orm.Optional(int, default=0) + last_check = orm.Optional(int, size=64, default=0) + metadata = orm.Set('TorrentMetadata', reverse='health') + trackers = orm.Set('TrackerState', reverse='torrents') + + return TorrentState diff --git a/Tribler/Core/Modules/MetadataStore/OrmBindings/tracker_state.py b/Tribler/Core/Modules/MetadataStore/OrmBindings/tracker_state.py new file mode 100644 index 00000000000..9d9275b1d19 --- /dev/null +++ b/Tribler/Core/Modules/MetadataStore/OrmBindings/tracker_state.py @@ -0,0 +1,27 @@ +from __future__ import absolute_import + +from pony import orm + +from Tribler.Core.Utilities.tracker_utils import MalformedTrackerURLException, get_uniformed_tracker_url + + +def define_binding(db): + class TrackerState(db.Entity): + rowid = orm.PrimaryKey(int, auto=True) + url = orm.Required(str, unique=True) + last_check = orm.Optional(int, size=64, default=0) + alive = orm.Optional(bool, default=True) + torrents = orm.Set('TorrentState', reverse='trackers') + failures = orm.Optional(int, size=32, default=0) + + def __init__(self, *args, **kwargs): + # Sanitize and canonicalize the tracker URL + sanitized = get_uniformed_tracker_url(kwargs['url']) + if sanitized: + kwargs['url'] = sanitized + else: + raise MalformedTrackerURLException("Could not canonicalize tracker URL (%s)" % kwargs['url']) + + super(TrackerState, self).__init__(*args, **kwargs) + + return TrackerState diff --git a/Tribler/Core/Modules/MetadataStore/serialization.py b/Tribler/Core/Modules/MetadataStore/serialization.py index 6ed8cdd4bc2..c4559c9fe10 100644 --- a/Tribler/Core/Modules/MetadataStore/serialization.py +++ b/Tribler/Core/Modules/MetadataStore/serialization.py @@ -9,7 +9,6 @@ from Tribler.pyipv8.ipv8.messaging.payload import Payload from Tribler.pyipv8.ipv8.messaging.serialization import default_serializer - EPOCH = datetime(1970, 1, 1) INFOHASH_SIZE = 20 # bytes @@ -17,46 +16,34 @@ EMPTY_SIG = '0' * 64 # Metadata types. Should have been an enum, but in Python its unwieldy. -TYPELESS = 1 -REGULAR_TORRENT = 2 -CHANNEL_TORRENT = 3 -DELETED = 4 - - -# We have to write our own serialization procedure for timestamps, since -# there is no standard for this, except Unix time, and that is -# deprecated by 2038, that is very soon. +TYPELESS = 100 +CHANNEL_NODE = 200 +REGULAR_TORRENT = 300 +CHANNEL_TORRENT = 400 +DELETED = 500 -def time2float(date_time, epoch=EPOCH): +def time2int(date_time, epoch=EPOCH): """ - Convert a datetime object to a float. + Convert a datetime object to an int . :param date_time: The datetime object to convert. :param epoch: The epoch time, defaults to Jan 1, 1970. - :return: The floating point representation of date_time. + :return: The int representation of date_time. WARNING: TZ-aware timestamps are madhouse... - For Python3 we could use a simpler method: - timestamp = (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)) / timedelta(seconds=1) """ - time_diff = date_time - epoch - return float((time_diff.microseconds + (time_diff.seconds + time_diff.days * 86400) * 10 ** 6) / 10 ** 6) + return int((date_time - epoch).total_seconds()) -def float2time(timestamp, epoch=EPOCH): + +def int2time(timestamp, epoch=EPOCH): """ - Convert a float into a datetime object. + Convert an int into a datetime object. :param timestamp: The timestamp to be converted. :param epoch: The epoch time, defaults to Jan 1, 1970. :return: The datetime representation of timestamp. """ - microseconds_total = int(timestamp * 10 ** 6) - microseconds = microseconds_total % 10 ** 6 - seconds_total = (microseconds_total - microseconds) / 10 ** 6 - seconds = seconds_total % 86400 - days = (seconds_total - seconds) / 86400 - dt = epoch + timedelta(days=days, seconds=seconds, microseconds=microseconds) - return dt + return epoch + timedelta(seconds=timestamp) class KeysMismatchException(Exception): @@ -69,13 +56,13 @@ class UnknownBlobTypeException(Exception): def read_payload_with_offset(data, offset=0): # First we have to determine the actual payload type - metadata_type = struct.unpack_from('>I', database_blob(data), offset=offset)[0] + metadata_type = struct.unpack_from('>H', database_blob(data), offset=offset)[0] if metadata_type == DELETED: - return DeletedMetadataPayload.from_signed_blob_with_offset(data, check_signature=True, offset=offset) + return DeletedMetadataPayload.from_signed_blob_with_offset(data, offset=offset) elif metadata_type == REGULAR_TORRENT: - return TorrentMetadataPayload.from_signed_blob_with_offset(data, check_signature=True, offset=offset) + return TorrentMetadataPayload.from_signed_blob_with_offset(data, offset=offset) elif metadata_type == CHANNEL_TORRENT: - return ChannelMetadataPayload.from_signed_blob_with_offset(data, check_signature=True, offset=offset) + return ChannelMetadataPayload.from_signed_blob_with_offset(data, offset=offset) # Unknown metadata type, raise exception raise UnknownBlobTypeException @@ -85,35 +72,48 @@ def read_payload(data): return read_payload_with_offset(data)[0] -class MetadataPayload(Payload): +class SignedPayload(Payload): """ Payload for metadata. """ - format_list = ['I', '74s', 'f', 'Q'] + format_list = ['H', 'H', '64s'] - def __init__(self, metadata_type, public_key, timestamp, tc_pointer, **kwargs): - super(MetadataPayload, self).__init__() + def __init__(self, metadata_type, reserved_flags, public_key, **kwargs): + super(SignedPayload, self).__init__() self.metadata_type = metadata_type + self.reserved_flags = reserved_flags self.public_key = str(public_key) - self.timestamp = time2float(timestamp) if isinstance(timestamp, datetime) else timestamp - self.tc_pointer = tc_pointer self.signature = str(kwargs["signature"]) if "signature" in kwargs else EMPTY_SIG - def has_valid_signature(self): - sig_data = default_serializer.pack_multiple(self.to_pack_list())[0] - return default_eccrypto.is_valid_signature(default_eccrypto.key_from_public_bin(self.public_key), sig_data, self.signature) + skip_key_check = kwargs["skip_key_check"] if "skip_key_check" in kwargs else False + + serialized_data = default_serializer.pack_multiple(self.to_pack_list())[0] + if not skip_key_check: + if "key" in kwargs and kwargs["key"]: + key = kwargs["key"] + if self.public_key != str(key.pub().key_to_bin()[10:]): + raise KeysMismatchException(self.public_key, str(key.pub().key_to_bin()[10:])) + + self.signature = default_eccrypto.create_signature(key, serialized_data) + elif "signature" in kwargs: + # This check ensures that an entry with a wrong signature will not proliferate further + if not default_eccrypto.is_valid_signature( + default_eccrypto.key_from_public_bin(b"LibNaCLPK:" + self.public_key), + serialized_data, self.signature): + raise InvalidSignatureException("Tried to create payload with wrong signature") + else: + raise InvalidSignatureException("Tried to create payload without signature") def to_pack_list(self): - data = [('I', self.metadata_type), - ('74s', self.public_key), - ('f', self.timestamp), - ('Q', self.tc_pointer)] + data = [('H', self.metadata_type), + ('H', self.reserved_flags), + ('64s', self.public_key)] return data @classmethod - def from_unpack_list(cls, metadata_type, public_key, timestamp, tc_pointer): - return MetadataPayload(metadata_type, public_key, timestamp, tc_pointer) + def from_unpack_list(cls, metadata_type, reserved_flags, public_key, **kwargs): + return SignedPayload(metadata_type, reserved_flags, public_key, **kwargs) @classmethod def from_signed_blob(cls, data, check_signature=True): @@ -121,36 +121,29 @@ def from_signed_blob(cls, data, check_signature=True): @classmethod def from_signed_blob_with_offset(cls, data, check_signature=True, offset=0): + # TODO: stop serializing/deserializing the stuff twice unpack_list, end_offset = default_serializer.unpack_multiple(cls.format_list, data, offset=offset) - payload = cls.from_unpack_list(*unpack_list) if check_signature: - payload.signature = data[end_offset:end_offset + SIGNATURE_SIZE] - data_unsigned = data[offset:end_offset] - key = default_eccrypto.key_from_public_bin(payload.public_key) - if not default_eccrypto.is_valid_signature(key, data_unsigned, payload.signature): - raise InvalidSignatureException + signature = data[end_offset:end_offset + SIGNATURE_SIZE] + payload = cls.from_unpack_list(*unpack_list, signature=signature) + else: + payload = cls.from_unpack_list(*unpack_list, skip_key_check=True) return payload, end_offset + SIGNATURE_SIZE def to_dict(self): return { "metadata_type": self.metadata_type, + "reserved_flags": self.reserved_flags, "public_key": self.public_key, - "timestamp": float2time(self.timestamp), - "tc_pointer": self.tc_pointer, "signature": self.signature } - def _serialized(self, key=None): - # If we are going to sign it, we must provide a matching key - if key and self.public_key != str(key.pub().key_to_bin()): - raise KeysMismatchException(self.public_key, str(key.pub().key_to_bin())) - + def _serialized(self): serialized_data = default_serializer.pack_multiple(self.to_pack_list())[0] - signature = default_eccrypto.create_signature(key, serialized_data) if key else self.signature - return str(serialized_data), str(signature) + return str(serialized_data), str(self.signature) - def serialized(self, key=None): - return ''.join(self._serialized(key)) + def serialized(self): + return ''.join(self._serialized()) @classmethod def from_file(cls, filepath): @@ -158,41 +151,87 @@ def from_file(cls, filepath): return cls.from_signed_blob(f.read()) -class TorrentMetadataPayload(MetadataPayload): +class ChannelNodePayload(SignedPayload): + format_list = SignedPayload.format_list + ['Q', 'Q', 'Q'] + + def __init__(self, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + **kwargs): + self.id_ = id_ + self.origin_id = origin_id + self.timestamp = timestamp + super(ChannelNodePayload, self).__init__(metadata_type, reserved_flags, public_key, + **kwargs) + + def to_pack_list(self): + data = super(ChannelNodePayload, self).to_pack_list() + data.append(('Q', self.id_)) + data.append(('Q', self.origin_id)) + data.append(('Q', self.timestamp)) + return data + + @classmethod + def from_unpack_list(cls, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + **kwargs): + return ChannelNodePayload(metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + **kwargs) + + def to_dict(self): + dct = super(ChannelNodePayload, self).to_dict() + dct.update({ + "id_": self.id_, + "origin_id": self.origin_id, + "timestamp": self.timestamp + }) + return dct + + +class TorrentMetadataPayload(ChannelNodePayload): """ Payload for metadata that stores a torrent. """ - format_list = MetadataPayload.format_list + ['20s', 'Q', 'varlenI', 'varlenI', 'varlenI'] + format_list = ChannelNodePayload.format_list + ['20s', 'Q', 'I', 'varlenI', 'varlenI', 'varlenI'] - def __init__(self, metadata_type, public_key, timestamp, tc_pointer, infohash, size, title, tags, tracker_info, + def __init__(self, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, **kwargs): - super(TorrentMetadataPayload, self).__init__(metadata_type, public_key, timestamp, tc_pointer, **kwargs) self.infohash = str(infohash) self.size = size - self.title = title.encode("utf-8") - self.tags = tags.encode("utf-8") - self.tracker_info = tracker_info.encode("utf-8") + self.torrent_date = time2int(torrent_date) if isinstance(torrent_date, datetime) else torrent_date + self.title = title.decode('utf-8') if isinstance(title, str) else title + self.tags = tags.decode('utf-8') if isinstance(tags, str) else tags + self.tracker_info = tracker_info.decode('utf-8') if isinstance(tracker_info, str) else tracker_info + super(TorrentMetadataPayload, self).__init__(metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + **kwargs) def to_pack_list(self): data = super(TorrentMetadataPayload, self).to_pack_list() data.append(('20s', self.infohash)) data.append(('Q', self.size)) - data.append(('varlenI', self.title)) - data.append(('varlenI', self.tags)) - data.append(('varlenI', self.tracker_info)) + data.append(('I', self.torrent_date)) + data.append(('varlenI', self.title.encode('utf-8'))) + data.append(('varlenI', self.tags.encode('utf-8'))) + data.append(('varlenI', self.tracker_info.encode('utf-8'))) return data @classmethod - def from_unpack_list(cls, metadata_type, public_key, timestamp, tc_pointer, infohash, size, title, tags, - tracker_info): - return TorrentMetadataPayload(metadata_type, public_key, timestamp, tc_pointer, infohash, size, title, tags, - tracker_info) + def from_unpack_list(cls, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, **kwargs): + return TorrentMetadataPayload(metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, **kwargs) def to_dict(self): dct = super(TorrentMetadataPayload, self).to_dict() dct.update({ "infohash": self.infohash, "size": self.size, + "torrent_date": int2time(self.torrent_date), "title": self.title, "tags": self.tags, "tracker_info": self.tracker_info @@ -210,40 +249,57 @@ class ChannelMetadataPayload(TorrentMetadataPayload): """ Payload for metadata that stores a channel. """ - format_list = TorrentMetadataPayload.format_list + ['Q'] + format_list = TorrentMetadataPayload.format_list + ['Q'] + ['Q'] - def __init__(self, metadata_type, public_key, timestamp, tc_pointer, infohash, size, title, tags, tracker_info, - version, **kwargs): - super(ChannelMetadataPayload, self).__init__(metadata_type, public_key, timestamp, tc_pointer, - infohash, size, title, tags, tracker_info, **kwargs) - self.version = version + def __init__(self, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, + num_entries, start_timestamp, + **kwargs): + self.num_entries = num_entries + self.start_timestamp = start_timestamp + super(ChannelMetadataPayload, self).__init__(metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, + **kwargs) def to_pack_list(self): data = super(ChannelMetadataPayload, self).to_pack_list() - data.append(('Q', self.version)) + data.append(('Q', self.num_entries)) + data.append(('Q', self.start_timestamp)) return data @classmethod - def from_unpack_list(cls, metadata_type, public_key, timestamp, tc_pointer, infohash, size, title, tags, - tracker_info, version): - return ChannelMetadataPayload(metadata_type, public_key, timestamp, tc_pointer, infohash, size, - title, tags, tracker_info, version) + def from_unpack_list(cls, metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, + num_entries, start_timestamp, + **kwargs): + return ChannelMetadataPayload(metadata_type, reserved_flags, public_key, + id_, origin_id, timestamp, + infohash, size, torrent_date, title, tags, tracker_info, + num_entries, start_timestamp, + **kwargs) def to_dict(self): dct = super(ChannelMetadataPayload, self).to_dict() - dct.update({"version": self.version}) + dct.update({"num_entries": self.num_entries, + "start_timestamp": self.start_timestamp}) return dct -class DeletedMetadataPayload(MetadataPayload): +class DeletedMetadataPayload(SignedPayload): """ Payload for metadata that stores deleted metadata. """ - format_list = MetadataPayload.format_list + ['64s'] + format_list = SignedPayload.format_list + ['64s'] - def __init__(self, metadata_type, public_key, timestamp, tc_pointer, delete_signature, **kwargs): - super(DeletedMetadataPayload, self).__init__(metadata_type, public_key, timestamp, tc_pointer, **kwargs) + def __init__(self, metadata_type, reserved_flags, public_key, + delete_signature, + **kwargs): self.delete_signature = str(delete_signature) + super(DeletedMetadataPayload, self).__init__(metadata_type, reserved_flags, public_key, + **kwargs) def to_pack_list(self): data = super(DeletedMetadataPayload, self).to_pack_list() @@ -251,8 +307,12 @@ def to_pack_list(self): return data @classmethod - def from_unpack_list(cls, metadata_type, public_key, timestamp, tc_pointer, delete_signature): - return DeletedMetadataPayload(metadata_type, public_key, timestamp, tc_pointer, delete_signature) + def from_unpack_list(cls, metadata_type, reserved_flags, public_key, + delete_signature, + **kwargs): + return DeletedMetadataPayload(metadata_type, reserved_flags, public_key, + delete_signature, + **kwargs) def to_dict(self): dct = super(DeletedMetadataPayload, self).to_dict() diff --git a/Tribler/Core/Modules/MetadataStore/store.py b/Tribler/Core/Modules/MetadataStore/store.py index 6b0476ead83..c8ed6b2dfba 100644 --- a/Tribler/Core/Modules/MetadataStore/store.py +++ b/Tribler/Core/Modules/MetadataStore/store.py @@ -1,53 +1,98 @@ +from __future__ import absolute_import + import logging import os +from datetime import datetime + +import lz4.frame from pony import orm from pony.orm import db_session -from Tribler.Core.Modules.MetadataStore.OrmBindings import metadata, torrent_metadata, channel_metadata +from Tribler.Core.Modules.MetadataStore.OrmBindings import channel_metadata, channel_node, misc, torrent_metadata, \ + torrent_state, tracker_state from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import BLOB_EXTENSION -from Tribler.Core.Modules.MetadataStore.serialization import read_payload_with_offset, REGULAR_TORRENT, \ - CHANNEL_TORRENT, DELETED +from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_TORRENT, ChannelMetadataPayload, DELETED, \ + REGULAR_TORRENT, read_payload_with_offset, time2int +from Tribler.Core.exceptions import InvalidSignatureException + +CLOCK_STATE_FILE = "clock.state" + +UNKNOWN_CHANNEL = 1 +UPDATED_OUR_VERSION = 2 +GOT_SAME_VERSION = 3 +GOT_NEWER_VERSION = 4 +UNKNOWN_TORRENT = 5 +NO_ACTION = 6 +DELETED_METADATA = 7 + # This table should never be used from ORM directly. # It is created as a VIRTUAL table by raw SQL and # maintained by SQL triggers. -from Tribler.Core.exceptions import InvalidSignatureException -from Tribler.pyipv8.ipv8.messaging.serialization import Serializer - sql_create_fts_table = """ CREATE VIRTUAL TABLE IF NOT EXISTS FtsIndex USING FTS5 - (title, tags, content='Metadata', + (title, tags, content='ChannelNode', prefix = '2 3 4 5', tokenize='porter unicode61 remove_diacritics 1');""" sql_add_fts_trigger_insert = """ - CREATE TRIGGER IF NOT EXISTS fts_ai AFTER INSERT ON Metadata + CREATE TRIGGER IF NOT EXISTS fts_ai AFTER INSERT ON ChannelNode BEGIN INSERT INTO FtsIndex(rowid, title, tags) VALUES (new.rowid, new.title, new.tags); END;""" sql_add_fts_trigger_delete = """ - CREATE TRIGGER IF NOT EXISTS fts_ad AFTER DELETE ON Metadata + CREATE TRIGGER IF NOT EXISTS fts_ad AFTER DELETE ON ChannelNode BEGIN DELETE FROM FtsIndex WHERE rowid = old.rowid; END;""" sql_add_fts_trigger_update = """ - CREATE TRIGGER IF NOT EXISTS fts_au AFTER UPDATE ON Metadata BEGIN + CREATE TRIGGER IF NOT EXISTS fts_au AFTER UPDATE ON ChannelNode BEGIN DELETE FROM FtsIndex WHERE rowid = old.rowid; INSERT INTO FtsIndex(rowid, title, tags) VALUES (new.rowid, new.title, new.tags); END;""" -sql_add_signature_index = "CREATE INDEX SignatureIndex ON Metadata(signature);" -sql_add_public_key_index = "CREATE INDEX PublicKeyIndex ON Metadata(public_key);" -sql_add_infohash_index = "CREATE INDEX InfohashIndex ON Metadata(infohash);" +sql_add_signature_index = "CREATE INDEX SignatureIndex ON ChannelNode(signature);" +sql_add_public_key_index = "CREATE INDEX PublicKeyIndex ON ChannelNode(public_key);" +sql_add_infohash_index = "CREATE INDEX InfohashIndex ON ChannelNode(infohash);" class BadChunkException(Exception): pass +class DiscreteClock(object): + # Lamport-clock-like persistent counter + # Horribly inefficient and stupid, but works + store_value_name = "discrete_clock" + + def __init__(self, datastore=None): + # This is a stupid workaround for people who reinstall Tribler + # and lose their database. We don't know what was their channel + # clock before, but at least we can assume that they were not + # adding to it 1000 torrents per second constantly... + self.clock = time2int(datetime.utcnow()) * 1000 + self.datastore = datastore + + def init_clock(self): + if self.datastore: + with db_session: + store_object = self.datastore.get(name=self.store_value_name, ) + if not store_object: + self.datastore(name=self.store_value_name, value=str(self.clock)) + else: + self.clock = int(store_object.value) + + def tick(self): + self.clock += 1 + if self.datastore: + with db_session: + self.datastore[self.store_value_name].value = str(self.clock) + return self.clock + + class MetadataStore(object): def __init__(self, db_filename, channels_dir, my_key): self.db_filename = db_filename @@ -62,14 +107,18 @@ def __init__(self, db_filename, channels_dir, my_key): # at definition. self._db = orm.Database() - # Accessors for ORM-managed classes - self.Metadata = metadata.define_binding(self._db) + self.MiscData = misc.define_binding(self._db) + + self.TrackerState = tracker_state.define_binding(self._db) + self.TorrentState = torrent_state.define_binding(self._db) + + self.clock = DiscreteClock(None if db_filename == ":memory:" else self.MiscData) + + self.ChannelNode = channel_node.define_binding(self._db, logger=self._logger, key=my_key, clock=self.clock) self.TorrentMetadata = torrent_metadata.define_binding(self._db) self.ChannelMetadata = channel_metadata.define_binding(self._db) - self.Metadata._my_key = my_key self.ChannelMetadata._channels_dir = channels_dir - self.Metadata._logger = self._logger # Use Store-level logger for every ORM-based class self._db.bind(provider='sqlite', filename=db_filename, create_db=create_db) if create_db: @@ -85,6 +134,12 @@ def __init__(self, db_filename, channels_dir, my_key): self._db.execute(sql_add_public_key_index) self._db.execute(sql_add_infohash_index) + if create_db: + with db_session: + self.MiscData(name="db_version", value="0") + + self.clock.init_clock() + def shutdown(self): self._db.disconnect() @@ -95,21 +150,32 @@ def process_channel_dir(self, dirname, channel_id): :param channel_id: public_key of the channel. """ # We use multiple separate db_sessions here to limit memory usage when reading big channels - with db_session: channel = self.ChannelMetadata.get(public_key=channel_id) self._logger.debug("Starting processing channel dir %s. Channel %s local/max version %i/%i", - dirname, str(channel.public_key).encode("hex"), channel.local_version, channel.version) + dirname, str(channel.public_key).encode("hex"), channel.local_version, + channel.timestamp) for filename in sorted(os.listdir(dirname)): with db_session: channel = self.ChannelMetadata.get(public_key=channel_id) full_filename = os.path.join(dirname, filename) + + blob_sequence_number = None if filename.endswith(BLOB_EXTENSION): blob_sequence_number = int(filename[:-len(BLOB_EXTENSION)]) + elif filename.endswith(BLOB_EXTENSION + '.lz4'): + blob_sequence_number = int(filename[:-len(BLOB_EXTENSION + '.lz4')]) + + if blob_sequence_number is not None: # Skip blobs containing data we already have and those that are # ahead of the channel version known to us - if blob_sequence_number <= channel.local_version or blob_sequence_number > channel.version: + # ==================| channel data |=== + # ===start_timestamp|---local_version----timestamp|=== + # local_version is essentially a cursor pointing into the current state of update process + if blob_sequence_number <= channel.start_timestamp or \ + blob_sequence_number <= channel.local_version or \ + blob_sequence_number > channel.timestamp: continue try: self.process_mdblob_file(full_filename) @@ -119,47 +185,95 @@ def process_channel_dir(self, dirname, channel_id): self._logger.error("Not processing metadata located at %s: invalid signature", full_filename) self._logger.debug("Finished processing channel dir %s. Channel %s local/max version %i/%i", - dirname, str(channel.public_key).encode("hex"), channel.local_version, channel.version) + dirname, str(channel.public_key).encode("hex"), channel.local_version, + channel.timestamp) @db_session def process_mdblob_file(self, filepath): """ Process a file with metadata in a channel directory. :param filepath: The path to the file - :return a Metadata object if we can correctly load the metadata + :return ChannelNode objects list if we can correctly load the metadata """ with open(filepath, 'rb') as f: serialized_data = f.read() - return self.process_squashed_mdblob(serialized_data) + + return (self.process_compressed_mdblob(serialized_data) if filepath.endswith('.lz4') else + self.process_squashed_mdblob(serialized_data)) + @db_session + def process_compressed_mdblob(self, compressed_data): + return self.process_squashed_mdblob(lz4.frame.decompress(compressed_data)) @db_session def process_squashed_mdblob(self, chunk_data): - metadata_list = [] + results_list = [] offset = 0 while offset < len(chunk_data): payload, offset = read_payload_with_offset(chunk_data, offset) - md = self.process_payload(payload) - if md: - metadata_list.append(md) - return metadata_list + results_list.append(self.process_payload(payload)) + return results_list # Can't use db_session wrapper here, performance drops 10 times! Pony bug! def process_payload(self, payload): with db_session: - if self.Metadata.exists(signature=payload.signature): - return self.Metadata.get(signature=payload.signature) + if self.ChannelNode.exists(signature=payload.signature): + return None, GOT_SAME_VERSION if payload.metadata_type == DELETED: # We only allow people to delete their own entries, thus PKs must match - existing_metadata = self.Metadata.get(signature=payload.delete_signature, public_key=payload.public_key) + existing_metadata = self.ChannelNode.get(signature=payload.delete_signature, + public_key=payload.public_key) if existing_metadata: existing_metadata.delete() - return None + return None, DELETED_METADATA + else: + return None, NO_ACTION elif payload.metadata_type == REGULAR_TORRENT: - return self.TorrentMetadata.from_payload(payload) + return self.TorrentMetadata.from_payload(payload), UNKNOWN_TORRENT elif payload.metadata_type == CHANNEL_TORRENT: - return self.ChannelMetadata.from_payload(payload) + return self.update_channel_info(payload) + + return None, NO_ACTION + + @db_session + def update_channel_info(self, payload): + """ + We received some channel metadata, possibly over the network. + Validate the signature, update the local metadata store and put in at the beginning of the download queue + if necessary. + :param payload: The channel metadata, in serialized form. + :returns (metadata, status): tuple consisting of possibly newer metadata and result status + """ + + channel = self.ChannelMetadata.get_channel_with_id(payload.public_key) + if channel: + if payload.timestamp > channel.timestamp: + # Update the channel that is already there. + self._logger.info("Updating channel metadata %s ts %s->%s", str(channel.public_key).encode("hex"), + str(channel.timestamp), str(payload.timestamp)) + channel.set(**ChannelMetadataPayload.to_dict(payload)) + status = UPDATED_OUR_VERSION + elif payload.timestamp == channel.timestamp: + status = GOT_SAME_VERSION + else: + status = GOT_NEWER_VERSION + + else: + status = UNKNOWN_CHANNEL + # Add new channel object to DB + channel = self.ChannelMetadata.from_payload(payload) + + #TODO: handle the case where the local version is the same as the new one and is not seeded + return channel, status @db_session def get_my_channel(self): - return self.ChannelMetadata.get_channel_with_id(self.my_key.pub().key_to_bin()) + return self.ChannelMetadata.get_channel_with_id(self.my_key.pub().key_to_bin()[10:]) + + @db_session + def get_num_channels(self): + return orm.count(self.ChannelMetadata.select(lambda g: g.metadata_type == CHANNEL_TORRENT)) + + @db_session + def get_num_torrents(self): + return orm.count(self.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT)) diff --git a/Tribler/Core/Modules/channel/__init__.py b/Tribler/Core/Modules/channel/__init__.py deleted file mode 100644 index 51f29aa53c7..00000000000 --- a/Tribler/Core/Modules/channel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Channels are lists of torrents created by users. -""" diff --git a/Tribler/Core/Modules/channel/cache.py b/Tribler/Core/Modules/channel/cache.py deleted file mode 100644 index 7ac9b46a550..00000000000 --- a/Tribler/Core/Modules/channel/cache.py +++ /dev/null @@ -1,46 +0,0 @@ -import codecs -import logging -import os - -import Tribler.Core.Utilities.json_util as json - - -class SimpleCache(object): - """ - This is a cache for recording the keys that we have seen before. - """ - def __init__(self, file_path): - self._logger = logging.getLogger(self.__class__.__name__) - self._file_path = file_path - - self._cache_list = list() - self._initial_cache_size = 0 - - def add(self, key): - if not self.has(key): - self._cache_list.append(key) - - def has(self, key): - return key in self._cache_list - - def load(self): - if os.path.exists(self._file_path): - try: - with codecs.open(self._file_path, 'rb', encoding='utf-8') as f: - self._cache_list = json.load(f) - except Exception as e: - self._logger.error(u"Failed to load cache file %s: %s", self._file_path, repr(e)) - else: - self._cache_list = list() - self._initial_cache_size = len(self._cache_list) - - def save(self): - if self._initial_cache_size == len(self._cache_list): - return - try: - with codecs.open(self._file_path, 'wb', encoding='utf-8') as f: - json.dump(self._cache_list, f) - self._initial_cache_size = len(self._cache_list) - except Exception as e: - self._logger.error(u"Failed to save cache file %s: %s", self._file_path, repr(e)) - return diff --git a/Tribler/Core/Modules/channel/channel.py b/Tribler/Core/Modules/channel/channel.py deleted file mode 100644 index a118555b6b6..00000000000 --- a/Tribler/Core/Modules/channel/channel.py +++ /dev/null @@ -1,130 +0,0 @@ -import codecs -import collections -import logging -import os -from binascii import hexlify -from twisted.internet import reactor -from twisted.internet.defer import DeferredList - -from Tribler.Core.Modules.channel.channel_rss import ChannelRssParser -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.simpledefs import SIGNAL_CHANNEL, SIGNAL_ON_CREATED, SIGNAL_RSS_FEED, SIGNAL_ON_UPDATED -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - - -class ChannelObject(TaskManager): - - def __init__(self, session, channel_community, is_created=False): - super(ChannelObject, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - - self._session = session - self._channel_community = channel_community - self._is_created = is_created - self._rss_feed_dict = collections.OrderedDict() - - rss_name = u"channel_rss_%s.json" % hexlify(self._channel_community.cid) - self._rss_file_path = os.path.join(self._session.config.get_state_dir(), rss_name) - - @property - def channel_id(self): - return self._channel_community.get_channel_id() - - @property - def name(self): - return self._channel_community.get_channel_name() - - @property - def description(self): - return self._channel_community.get_channel_description() - - @property - def mode(self): - return self._channel_community.get_channel_mode() - - def get_rss_feed_url_list(self): - return [url for url in self._rss_feed_dict.iterkeys()] - - def refresh_all_feeds(self): - deferreds = [feed.parse_feed() for feed in self._rss_feed_dict.itervalues()] - return DeferredList(deferreds, consumeErrors=True) - - def initialize(self): - # load existing rss_feeds - if os.path.exists(self._rss_file_path): - self._logger.debug(u"loading existing channel rss list from %s...", self._rss_file_path) - - with codecs.open(self._rss_file_path, 'rb', encoding='utf8') as f: - rss_list = json.load(f) - for rss_url in rss_list: - self._rss_feed_dict[rss_url] = None - - if self._is_created: - # create rss-parsers - for rss_feed_url in self._rss_feed_dict: - rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url) - rss_parser.initialize() - self._rss_feed_dict[rss_feed_url] = rss_parser - else: - # subscribe to the channel creation event - self._session.add_observer(self._on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED]) - - def shutdown(self): - self.shutdown_task_manager() - for key, rss_parser in self._rss_feed_dict.iteritems(): - if rss_parser is not None: - rss_parser.shutdown() - self._rss_feed_dict = None - self._channel_community = None - self._session = None - - def _on_channel_created(self, subject, change_type, object_id, channel_data): - if channel_data[u'channel'].cid != self._channel_community.cid: - return - - def _create_rss_feed(channel_date): - self._is_created = True - - # create rss feed parsers - self._logger.debug(u"channel %s %s created", self.name, hexlify(self._channel_community.cid)) - for rss_feed_url in self._rss_feed_dict: - assert self._rss_feed_dict[rss_feed_url] is None - rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url) - rss_parser.initialize() - self._rss_feed_dict[rss_feed_url] = rss_parser - - task_name = u'create_rss_%s' % hexlify(channel_data[u'channel'].cid) - self.register_task(task_name, reactor.callLater(0, _create_rss_feed, channel_data)) - - def create_rss_feed(self, rss_feed_url): - if rss_feed_url in self._rss_feed_dict: - self._logger.warn(u"skip existing rss feed: %s", repr(rss_feed_url)) - return - - if not self._is_created: - # append the rss url if the channel has not been created yet - self._rss_feed_dict[rss_feed_url] = None - else: - # create an rss feed parser for this - rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url) - rss_parser.initialize() - self._rss_feed_dict[rss_feed_url] = rss_parser - - # flush the rss_feed_url to json file - with codecs.open(self._rss_file_path, 'wb', encoding='utf8') as f: - rss_list = [rss_url for rss_url in self._rss_feed_dict.iterkeys()] - json.dump(rss_list, f) - - def remove_rss_feed(self, rss_feed_url): - if rss_feed_url not in self._rss_feed_dict: - self._logger.warn(u"skip existing rss feed: %s", repr(rss_feed_url)) - return - - rss_parser = self._rss_feed_dict[rss_feed_url] - if rss_parser is not None: - rss_parser.shutdown() - del self._rss_feed_dict[rss_feed_url] - - rss_feed_data = {u'channel': self._channel_community, - u'rss_feed_url': rss_feed_url} - self._session.notifier.notify(SIGNAL_RSS_FEED, SIGNAL_ON_UPDATED, None, rss_feed_data) diff --git a/Tribler/Core/Modules/channel/channel_manager.py b/Tribler/Core/Modules/channel/channel_manager.py deleted file mode 100644 index 472dcb7a557..00000000000 --- a/Tribler/Core/Modules/channel/channel_manager.py +++ /dev/null @@ -1,127 +0,0 @@ -from __future__ import absolute_import - -import logging -from binascii import hexlify - -from six import string_types - -from Tribler.Core.Modules.channel.channel import ChannelObject -from Tribler.Core.exceptions import DuplicateChannelNameError -from Tribler.community.channel.community import ChannelCommunity -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - - -class ChannelManager(TaskManager): - """ - The Manager class that handles the Channels owned by ourselves. - It supports multiple-Channel creation and RSS feed. - """ - - def __init__(self, session): - super(ChannelManager, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - self.session = session - self.dispersy = None - - self._channel_mode_map = {u'open': ChannelCommunity.CHANNEL_OPEN, - u'semi-open': ChannelCommunity.CHANNEL_SEMI_OPEN, - u'closed': ChannelCommunity.CHANNEL_CLOSED, - } - - self._channel_list = [] - - def initialize(self): - self.dispersy = self.session.get_dispersy_instance() - - # get all channels owned by me - from Tribler.community.channel.community import ChannelCommunity - for community in self.session.lm.dispersy.get_communities(): - if isinstance(community, ChannelCommunity) and community.master_member and community.master_member.private_key: - channel_obj = ChannelObject(self.session, community, is_created=True) - channel_obj.initialize() - self._channel_list.append(channel_obj) - - self._logger.debug(u"loaded channel '%s', %s", channel_obj.name, hexlify(community.cid)) - - def shutdown(self): - self.shutdown_task_manager() - self._channel_mode_map = None - - for channel_object in self._channel_list: - channel_object.shutdown() - self._channel_list = None - - self.dispersy = None - self.session = None - - def create_channel(self, name, description, mode, rss_url=None): - """ - Creates a new Channel. - :param name: Name of the Channel. - :param description: Description of the Channel. - :param mode: Mode of the Channel ('open', 'semi-open', or 'closed'). - :param rss_url: RSS URL for the Channel. - :return: Channel ID - :raises DuplicateChannelNameError if name already exists - """ - assert isinstance(name, string_types), u"name is not a string_types: %s" % type(name) - assert isinstance(description, string_types), u"description is not a string_types: %s" % type(description) - assert mode in self._channel_mode_map, u"invalid mode: %s" % mode - assert isinstance(rss_url, string_types) or rss_url is None, (u"rss_url is not a string_types or None: %s" - % type(rss_url)) - - # if two channels have the same name, this will not work - for channel_object in self._channel_list: - if name == channel_object.name: - raise DuplicateChannelNameError(u"Channel name already exists: %s" % name) - - channel_mode = self._channel_mode_map[mode] - community = ChannelCommunity.create_community(self.dispersy, self.session.dispersy_member, - tribler_session=self.session) - - channel_obj = ChannelObject(self.session, community) - channel_obj.initialize() - - community.set_channel_mode(channel_mode) - community.create_channel(name, description) - - # create channel object - self._channel_list.append(channel_obj) - - if rss_url is not None: - channel_obj.create_rss_feed(rss_url) - - self._logger.debug(u"creating channel '%s', %s", channel_obj.name, hexlify(community.cid)) - return channel_obj.channel_id - - def get_my_channel(self, channel_id): - """ - Gets the ChannelObject with the given channel id. - :return: The ChannelObject if exists, otherwise None. - """ - channel_object = None - for obj in self._channel_list: - if obj.channel_id == channel_id: - channel_object = obj - break - return channel_object - - def get_channel(self, name): - """ - Gets a Channel by name. - :param name: Channel name. - :return: The channel object if exists, otherwise None. - """ - channel_object = None - for obj in self._channel_list: - if obj.name == name: - channel_object = obj - break - return channel_object - - def get_channel_list(self): - """ - Gets a list of all channel objects. - :return: The list of all channel objects. - """ - return self._channel_list diff --git a/Tribler/Core/Modules/channel/channel_rss.py b/Tribler/Core/Modules/channel/channel_rss.py deleted file mode 100644 index e220844ef61..00000000000 --- a/Tribler/Core/Modules/channel/channel_rss.py +++ /dev/null @@ -1,268 +0,0 @@ -import hashlib -import logging -import os -import re -import time -from binascii import hexlify - -import feedparser -from twisted.internet import reactor -from twisted.internet.defer import DeferredList, succeed -from twisted.web.client import getPage - -from Tribler.Core.Modules.channel.cache import SimpleCache -from Tribler.Core.TorrentDef import TorrentDef -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Utilities.utilities import http_get -from Tribler.Core.simpledefs import (SIGNAL_CHANNEL_COMMUNITY, SIGNAL_ON_TORRENT_UPDATED, SIGNAL_RSS_FEED, - SIGNAL_ON_UPDATED) -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - -try: - long # pylint: disable=long-builtin -except NameError: - long = int # pylint: disable=redefined-builtin - -DEFAULT_CHECK_INTERVAL = 1800 # half an hour - - -class ChannelRssParser(TaskManager): - - def __init__(self, session, channel_community, rss_url, check_interval=DEFAULT_CHECK_INTERVAL): - super(ChannelRssParser, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - - self.session = session - self.channel_community = channel_community - self.rss_url = rss_url - self.check_interval = check_interval - - self._url_cache = None - - self._pending_metadata_requests = {} - - self._to_stop = False - - self.running = False - - def initialize(self): - # initialize URL cache - # use the SHA1 of channel cid + rss_url as key - cache_key = hashlib.sha1(self.channel_community.cid) - cache_key.update(self.rss_url) - cache_key_str = hexlify(cache_key.digest()) - self._logger.debug(u"using key %s for channel %s, rss %s", - cache_key_str, hexlify(self.channel_community.cid), self.rss_url) - - url_cache_name = u"rss_cache_%s.txt" % cache_key_str - url_cache_path = os.path.join(self.session.config.get_state_dir(), url_cache_name) - self._url_cache = SimpleCache(url_cache_path) - self._url_cache.load() - - # schedule the scraping task - self.register_task(u"rss_scrape", - reactor.callLater(2, self._task_scrape)) - - # subscribe to channel torrent creation - self.session.notifier.add_observer(self.on_channel_torrent_created, SIGNAL_CHANNEL_COMMUNITY, - [SIGNAL_ON_TORRENT_UPDATED], self.channel_community.get_channel_id()) - - # notify that a RSS feed has been created - rss_feed_data = {u'channel': self.channel_community, - u'rss_feed_url': self.rss_url} - self.session.notifier.notify(SIGNAL_RSS_FEED, SIGNAL_ON_UPDATED, None, rss_feed_data) - self.running = True - - def shutdown(self): - self._to_stop = True - self.shutdown_task_manager() - - self._url_cache.save() - self._url_cache = None - - self.channel_community = None - self.session = None - self.running = False - - def parse_feed(self): - rss_parser = RSSFeedParser() - - def on_rss_items(rss_items): - if not rss_items: - self._logger.warning(u"No RSS items found.") - return succeed(None) - - def_list = [] - for rss_item in rss_items: - if self._to_stop: - continue - - torrent_url = rss_item[u'torrent_url'].encode('utf-8') - if torrent_url.startswith('magnet:'): - self._logger.warning(u"Tribler does not support adding magnet links to a channel from a RSS feed.") - continue - - torrent_deferred = getPage(torrent_url) - torrent_deferred.addCallbacks(lambda t, r=rss_item: self.on_got_torrent(t, rss_item=r), - self.on_got_torrent_error) - def_list.append(torrent_deferred) - - return DeferredList(def_list, consumeErrors=True) - - return rss_parser.parse(self.rss_url, self._url_cache).addCallback(on_rss_items) - - def _task_scrape(self): - deferred = self.parse_feed() - - if not self._to_stop: - # schedule the next scraping task - self._logger.info(u"Finish scraping %s, schedule task after %s", self.rss_url, self.check_interval) - self.register_task(u'rss_scrape', - reactor.callLater(self.check_interval, self._task_scrape)) - - return deferred - - def on_got_torrent(self, torrent_data, rss_item=None): - if self._to_stop: - return - - # save torrent - tdef = TorrentDef.load_from_memory(torrent_data) - self.session.lm.rtorrent_handler.save_torrent(tdef) - - # add metadata pending request - info_hash = tdef.get_infohash() - if u'thumbnail_list' in rss_item and rss_item[u'thumbnail_list']: - # only use the first thumbnail - rss_item[u'thumbnail_url'] = rss_item[u'thumbnail_list'][0] - if info_hash not in self._pending_metadata_requests: - self._pending_metadata_requests[info_hash] = rss_item - - # create channel torrent - self.channel_community._disp_create_torrent_from_torrentdef(tdef, long(time.time())) - - # update URL cache - self._url_cache.add(rss_item[u'torrent_url']) - self._url_cache.save() - - self._logger.info(u"Channel torrent %s created", tdef.get_name_as_unicode()) - - def on_got_torrent_error(self, failure): - """ - This callback is invoked when the lookup for a specific torrent failed. - """ - self._logger.warning(u"Failed to fetch torrent info from RSS feed: %s", failure) - - def on_channel_torrent_created(self, subject, events, object_id, data_list): - if self._to_stop: - return - - for data in data_list: - if data[u'info_hash'] in self._pending_metadata_requests: - rss_item = self._pending_metadata_requests.pop(data[u'info_hash']) - rss_item[u'info_hash'] = data[u'info_hash'] - rss_item[u'channel_torrent_id'] = data[u'channel_torrent_id'] - - metadata_deferred = getPage(rss_item[u'thumbnail_url'].encode('utf-8')) - metadata_deferred.addCallback(lambda md, r=rss_item: self.on_got_metadata(md, rss_item=r)) - - def on_got_metadata(self, metadata_data, rss_item=None): - # save metadata - thumb_hash = hashlib.sha1(metadata_data).digest() - self.session.lm.rtorrent_handler.save_metadata(thumb_hash, metadata_data) - - # create modification message for channel - modifications = {u'metadata-json': json.dumps({u'title': rss_item['title'][:64], - u'description': rss_item['description'][:768], - u'thumb_hash': thumb_hash.encode('hex')})} - self.channel_community.modifyTorrent(rss_item[u'channel_torrent_id'], modifications) - - -class RSSFeedParser(object): - - def __init__(self): - self._logger = logging.getLogger(self.__class__.__name__) - - def _parse_html(self, content): - """ - Parses an HTML content and find links. - """ - if content is None: - return None - url_set = set() - - a_list = re.findall(r']+)', content) - for a_href in a_list: - url_set.add(a_href) - - img_list = re.findall(r']+)', content) - for img_src in img_list: - url_set.add(img_src) - - return url_set - - def _html2plaintext(self, html_content): - """ - Converts an HTML document to plain text. - """ - content = html_content.replace('\r\n', '\n') - - content = re.sub('', '\n', content) - content = re.sub('', '\n', content) - - content = re.sub('

', '', content) - content = re.sub('

', '\n', content) - - content = re.sub('<.+/>', '', content) - content = re.sub('<.+>', '', content) - content = re.sub('', '', content) - - content = re.sub('[\n]+', '\n', content) - content = re.sub('[ \t\v\f]+', ' ', content) - - parsed_html_content = u'' - for line in content.split('\n'): - trimmed_line = line.strip() - if trimmed_line: - parsed_html_content += trimmed_line + u'\n' - - return parsed_html_content - - def parse(self, url, cache): - """ - Parses a RSS feed. This methods supports RSS 2.0 and Media RSS. - """ - def on_rss_response(response): - feed = feedparser.parse(response) - feed_items = [] - - for item in feed.entries: - # ignore the ones that we have seen before - link = item.get(u'link', None) - if link is None or cache.has(link): - continue - - title = self._html2plaintext(item[u'title']).strip() - description = self._html2plaintext(item.get(u'media_description', u'')).strip() - torrent_url = item[u'link'] - - thumbnail_list = [] - media_thumbnail_list = item.get(u'media_thumbnail', None) - if media_thumbnail_list: - for thumbnail in media_thumbnail_list: - thumbnail_list.append(thumbnail[u'url']) - - # assemble the information - parsed_item = {u'title': title, - u'description': description, - u'torrent_url': torrent_url, - u'thumbnail_list': thumbnail_list} - - feed_items.append(parsed_item) - - return feed_items - - def on_rss_error(failure): - self._logger.error("Error when fetching RSS feed: %s", failure) - - return http_get(str(url)).addCallbacks(on_rss_response, on_rss_error) diff --git a/Tribler/Core/Modules/gigachannel_manager.py b/Tribler/Core/Modules/gigachannel_manager.py new file mode 100644 index 00000000000..d54b450bf35 --- /dev/null +++ b/Tribler/Core/Modules/gigachannel_manager.py @@ -0,0 +1,201 @@ +from __future__ import absolute_import + +import os +from binascii import hexlify + +from pony.orm import db_session + +from twisted.internet.defer import Deferred +from twisted.internet.task import LoopingCall + +from Tribler.Core.DownloadConfig import DownloadStartupConfig +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import COMMITTED +from Tribler.Core.TorrentDef import TorrentDef, TorrentDefNoMetainfo +from Tribler.Core.simpledefs import DLSTATUS_SEEDING +from Tribler.pyipv8.ipv8.taskmanager import TaskManager + + +class GigaChannelManager(TaskManager): + """ + This class represents the main manager for gigachannels. + It provides methods to manage channels, download new channels or remove existing ones. + """ + + def __init__(self, session): + super(GigaChannelManager, self).__init__() + self.session = session + self.channels_lc = None + + def start(self): + """ + The Metadata Store checks the database at regular intervals to see if new channels are available for preview + or subscribed channels require updating. + """ + + # Test if we our channel is there, but we don't share it because Tribler was closed unexpectedly + try: + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if my_channel and my_channel.status == COMMITTED and \ + not self.session.has_download(str(my_channel.infohash)): + torrent_path = os.path.join(self.session.lm.mds.channels_dir, my_channel.dir_name + ".torrent") + self.updated_my_channel(TorrentDef.load(torrent_path)) + except: + pass + + channels_check_interval = 5.0 # seconds + self.channels_lc = self.register_task("Process channels download queue and remove cruft", + LoopingCall(self.service_channels)).start(channels_check_interval) + + def shutdown(self): + """ + Stop the gigachannel manager. + """ + self.shutdown_task_manager() + + def remove_cruft_channels(self): + """ + Assembles a list of obsolete channel torrents to be removed. + The list is formed from older versions of channels we are subscribed to and from channel torrents we are not + subscribed to (i.e. we recently unsubscribed from these). The unsubscribed channels are removed completely + with their contents, while in the case of older versions the files are left in place because the newer version + possibly uses them. + :return: list of tuples (download_to_remove=download, remove_files=Bool) + """ + with db_session: + channels, _ = self.session.lm.mds.ChannelMetadata.get_entries(last=10000, subscribed=True) + subscribed_infohashes = [bytes(c.infohash) for c in list(channels)] + dirnames = [c.dir_name for c in channels] + + # TODO: add some more advanced logic for removal of older channel versions + cruft_list = [(d, d.get_def().get_name_utf8() not in dirnames) \ + for d in self.session.lm.get_channel_downloads() \ + if bytes(d.get_def().infohash) not in subscribed_infohashes] + self.remove_channels_downloads(cruft_list) + + def service_channels(self): + try: + self.remove_cruft_channels() + except: + pass + try: + self.check_channels_updates() + except: + pass + + def check_channels_updates(self): + """ + Check whether there are channels that are updated. If so, download the new version of the channel. + """ + # FIXME: These naughty try-except-pass workarounds are necessary to keep the loop going in all circumstances + + with db_session: + channels_queue = list(self.session.lm.mds.ChannelMetadata.get_updated_channels()) + + for channel in channels_queue: + try: + if not self.session.has_download(str(channel.infohash)): + self._logger.info("Downloading new channel version %s ver %i->%i", + str(channel.public_key).encode("hex"), + channel.local_version, channel.timestamp) + self.download_channel(channel) + except: + pass + + def on_channel_download_finished(self, download, channel_id, finished_deferred=None): + """ + We have finished with downloading a channel. + :param download: The channel download itself. + :param channel_id: The ID of the channel. + :param finished_deferred: An optional deferred that should fire if the channel download has finished. + """ + if download.finished_callback_already_called: + return + channel_dirname = os.path.join(self.session.lm.mds.channels_dir, download.get_def().get_name()) + self.session.lm.mds.process_channel_dir(channel_dirname, channel_id) + if finished_deferred: + finished_deferred.callback(download) + + # TODO: finish this routine + # This thing should check if the files in the torrent we're going to delete are used in another torrent for + # the newer version of the same channel, and determine a safe sub-set to delete. + """ + def safe_files_to_remove(self, download): + # Check for intersection of files from old download with files from the newer version of the same channel + dirname = download.get_def().get_name_utf8() + files_to_remove = [] + with db_session: + channel = self.session.lm.mds.ChannelMetadata.get_channel_with_dirname(dirname) + if channel and channel.subscribed: + print self.session.lm.downloads + current_version = self.session.get_download(hexlify(channel.infohash)) + current_version_files = set(current_version.get_tdef().get_files()) + obsolete_version_files = set(download.get_tdef().get_files()) + files_to_remove_relative = obsolete_version_files - current_version_files + for f in files_to_remove_relative: + files_to_remove.append(os.path.join(dirname, f)) + return files_to_remove + """ + + def remove_channels_downloads(self, to_remove_list): + """ + :param to_remove_list: list of tuples (download_to_remove=download, remove_files=Bool) + """ + + #TODO: make file removal from older versions safe (i.e. check if it overlaps with newer downloads) + + """ + files_to_remove = [] + for download in to_remove_list: + files_to_remove.extend(self.safe_files_to_remove(download)) + """ + + def _on_remove_failure(failure): + self._logger.error("Error when removing the channel download: %s", failure) + + for i, dl_tuple in enumerate(to_remove_list): + d, remove_content = dl_tuple + deferred = self.session.remove_download(d, remove_content=remove_content) + deferred.addErrback(_on_remove_failure) + self.register_task(u'remove_channel' + d.tdef.get_name_utf8() + u'-' + hexlify(d.tdef.get_infohash()) + + u'-' + str(i), deferred) + + """ + def _on_torrents_removed(torrent): + print files_to_remove + dl = DeferredList(removed_list) + dl.addCallback(_on_torrents_removed) + self.register_task(u'remove_channels_files-' + "_".join([d.tdef.get_name_utf8() for d in to_remove_list]), dl) + """ + + def download_channel(self, channel): + """ + Download a channel with a given infohash and title. + :param channel: The channel metadata ORM object. + """ + finished_deferred = Deferred() + + dcfg = DownloadStartupConfig() + dcfg.set_dest_dir(self.session.lm.mds.channels_dir) + dcfg.set_channel_download(True) + tdef = TorrentDefNoMetainfo(infohash=str(channel.infohash), name=channel.dir_name) + download = self.session.start_download_from_tdef(tdef, dcfg) + channel_id = channel.public_key + # TODO: add errbacks here! + download.finished_callback = lambda dl: self.on_channel_download_finished(dl, channel_id, finished_deferred) + if download.get_state().get_status() == DLSTATUS_SEEDING and not download.finished_callback_already_called: + download.finished_callback_already_called = True + download.finished_callback(download) + return download, finished_deferred + + def updated_my_channel(self, tdef): + """ + Notify the core that we updated our channel. + """ + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if my_channel and my_channel.status == COMMITTED and not self.session.has_download(str(my_channel.infohash)): + dcfg = DownloadStartupConfig() + dcfg.set_dest_dir(self.session.lm.mds.channels_dir) + dcfg.set_channel_download(True) + self.session.lm.add(tdef, dcfg) diff --git a/Tribler/Core/Modules/restapi/channels/__init__.py b/Tribler/Core/Modules/restapi/channels/__init__.py deleted file mode 100644 index 1724474da8f..00000000000 --- a/Tribler/Core/Modules/restapi/channels/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains endpoints to manage items in a given channel such as torrents, rss feeds and playlists. -""" diff --git a/Tribler/Core/Modules/restapi/channels/base_channels_endpoint.py b/Tribler/Core/Modules/restapi/channels/base_channels_endpoint.py deleted file mode 100644 index 39acc664866..00000000000 --- a/Tribler/Core/Modules/restapi/channels/base_channels_endpoint.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import absolute_import - -import logging -import time -from twisted.web import http, resource - -from Tribler.Core.simpledefs import NTFY_CHANNELCAST -import Tribler.Core.Utilities.json_util as json -from Tribler.dispersy.exception import CommunityNotFoundException - -UNKNOWN_CHANNEL_RESPONSE_MSG = "the channel with the provided cid is not known" -UNAUTHORIZED_RESPONSE_MSG = "you are not authorized to perform this request" - - -class BaseChannelsEndpoint(resource.Resource): - """ - This class contains some utility methods to work with raw channels from the database. - All endpoints that are using the database, should derive from this class. - """ - - def __init__(self, session): - resource.Resource.__init__(self) - self.session = session - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - self._logger = logging.getLogger(self.__class__.__name__) - - @staticmethod - def return_404(request, message=UNKNOWN_CHANNEL_RESPONSE_MSG): - """ - Returns a 404 response code if your channel has not been created. - """ - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": message}) - - def return_500(self, request, exception): - self._logger.exception(exception) - request.setResponseCode(http.INTERNAL_SERVER_ERROR) - return json.dumps({ - u"error": { - u"handled": True, - u"code": exception.__class__.__name__, - u"message": exception.message - } - }) - - @staticmethod - def return_401(request, message=UNAUTHORIZED_RESPONSE_MSG): - """ - Returns a 401 response code if you are not authorized to perform a specific request. - """ - request.setResponseCode(http.UNAUTHORIZED) - return json.dumps({"error": message}) - - def get_channel_from_db(self, cid): - """ - Returns information about the channel from the database. Returns None if the channel with given cid - does not exist. - """ - channels_list = self.channel_db_handler.getChannelsByCID([cid]) - return channels_list[0] if len(channels_list) > 0 else None - - def get_my_channel_object(self): - """ - Returns the Channel object associated with a channel that is used to manage rss feeds. - """ - my_channel_id = self.channel_db_handler.getMyChannelId() - return self.session.lm.channel_manager.get_my_channel(my_channel_id) - - def vote_for_channel(self, cid, vote): - """ - Make a vote in the channel specified by the cid. Returns a deferred that fires when the vote is done. - """ - # TODO remove when we remove Dispersy - from Tribler.community.allchannel.community import AllChannelCommunity - for community in self.session.get_dispersy_instance().get_communities(): - if isinstance(community, AllChannelCommunity): - return community.disp_create_votecast(cid, vote, int(time.time())) - - def get_community_for_channel_id(self, channel_id): - """ - Returns a Dispersy community from the given channel id. The Community object can be used to delete/add torrents - or modify playlists in a specific channel. - """ - dispersy_cid = str(self.channel_db_handler.getDispersyCIDFromChannelId(channel_id)) - try: - return self.session.get_dispersy_instance().get_community(dispersy_cid) - except CommunityNotFoundException: - return None diff --git a/Tribler/Core/Modules/restapi/channels/channels_discovered_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_discovered_endpoint.py deleted file mode 100644 index 938b0fc1abb..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_discovered_endpoint.py +++ /dev/null @@ -1,221 +0,0 @@ -from pony.orm import db_session -from twisted.web import http - -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.channels.channels_playlists_endpoint import ChannelsPlaylistsEndpoint -from Tribler.Core.Modules.restapi.channels.channels_rss_endpoint import ChannelsRssFeedsEndpoint, \ - ChannelsRecheckFeedsEndpoint -from Tribler.Core.Modules.restapi.channels.channels_torrents_endpoint import ChannelsTorrentsEndpoint -from Tribler.Core.Modules.restapi.util import convert_db_channel_to_json, convert_channel_metadata_to_tuple -from Tribler.Core.exceptions import DuplicateChannelNameError -import Tribler.Core.Utilities.json_util as json - - -class ChannelsDiscoveredEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for requests regarding the discovered channels. - """ - def getChild(self, path, request): - return ChannelsDiscoveredSpecificEndpoint(self.session, path) - - @db_session - def render_GET(self, _): - """ - .. http:get:: /channels/discovered - - A GET request to this endpoint returns all channels discovered in Tribler. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered - - **Example response**: - - .. sourcecode:: javascript - - { - "channels": [{ - "id": 3, - "dispersy_cid": "da69aaad39ccf468aba2ab9177d5f8d8160135e6", - "name": "My fancy channel", - "description": "A description of this fancy channel", - "subscribed": False, - "votes": 23, - "torrents": 3, - "spam": 5, - "modified": 14598395, - "can_edit": True - }, ...] - } - """ - all_channels_db = self.channel_db_handler.getAllChannels() - - if self.session.config.get_chant_enabled(): - chant_channels = list(self.session.lm.mds.ChannelMetadata.select()) - for chant_channel in chant_channels: - all_channels_db.append(convert_channel_metadata_to_tuple(chant_channel)) - - results_json = [] - for channel in all_channels_db: - channel_json = convert_db_channel_to_json(channel) - if self.session.config.get_family_filter_enabled() and \ - self.session.lm.category.xxx_filter.isXXX(channel_json['name']): - continue - - results_json.append(channel_json) - - return json.dumps({"channels": results_json}) - - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered - - Create your own new channel. The passed mode and descriptions are optional. - Valid modes include: 'open', 'semi-open' or 'closed'. By default, the mode of the new channel is 'closed'. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered - --data "name=fancy name&description=fancy description&mode=open" - - **Example response**: - - .. sourcecode:: javascript - - { - "added": 23 - } - - :statuscode 500: if a channel with the specified name already exists. - """ - parameters = http.parse_qs(request.content.read(), 1) - - if 'name' not in parameters or len(parameters['name']) == 0 or len(parameters['name'][0]) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "channel name cannot be empty"}) - - if 'description' not in parameters or len(parameters['description']) == 0: - description = u'' - else: - description = unicode(parameters['description'][0], 'utf-8') - - if self.session.config.get_chant_channel_edit(): - my_key = self.session.trustchain_keypair - my_channel_id = my_key.pub().key_to_bin() - - # Do not allow to add a channel twice - if self.session.lm.mds.get_my_channel(): - request.setResponseCode(http.INTERNAL_SERVER_ERROR) - return json.dumps({"error": "channel already exists"}) - - title = unicode(parameters['name'][0], 'utf-8') - self.session.lm.mds.ChannelMetadata.create_channel(title, description) - return json.dumps({ - "added": str(my_channel_id).encode("hex"), - }) - - if 'mode' not in parameters or len(parameters['mode']) == 0: - # By default, the mode of the new channel is closed. - mode = u'closed' - else: - mode = unicode(parameters['mode'][0], 'utf-8') - - try: - channel_id = self.session.create_channel(unicode(parameters['name'][0], 'utf-8'), description, mode) - except DuplicateChannelNameError as ex: - return BaseChannelsEndpoint.return_500(self, request, ex) - - return json.dumps({"added": channel_id}) - - -class ChannelsDiscoveredSpecificEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for dispatching requests to perform operations in a specific discovered channel. - """ - - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = bytes(cid.decode('hex')) - - child_handler_dict = {"torrents": ChannelsTorrentsEndpoint, "rssfeeds": ChannelsRssFeedsEndpoint, - "playlists": ChannelsPlaylistsEndpoint, "recheckfeeds": ChannelsRecheckFeedsEndpoint, - "mdblob": ChannelsDiscoveredExportEndpoint} - for path, child_cls in child_handler_dict.iteritems(): - self.putChild(path, child_cls(session, self.cid)) - - def render_GET(self, request): - """ - .. http:get:: /channels/discovered/(string: channelid) - - Return the name, description and identifier of a channel. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered/4a9cfc7ca9d15617765f4151dd9fae94c8f3ba11 - - **Example response**: - - .. sourcecode:: javascript - - { - "overview": { - "name": "My Tribler channel", - "description": "A great collection of open-source movies", - "identifier": "4a9cfc7ca9d15617765f4151dd9fae94c8f3ba11" - } - } - - :statuscode 404: if your channel has not been created (yet). - """ - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsDiscoveredSpecificEndpoint.return_404(request) - - return json.dumps({'overview': {'identifier': channel_info[1].encode('hex'), 'name': channel_info[2], - 'description': channel_info[3]}}) - - -class ChannelsDiscoveredExportEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for serving .mdblob file export requests for a specific channel. - """ - - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - self.is_chant_channel = (len(cid) == 74) - - def render_GET(self, request): - """ - .. http:get:: /channels/discovered/(string: channelid)/mdblob - - Return the mdblob binary - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered/(string: channel_id)/mdblob - - **Example response**: - - The .mdblob file containing the serialized and signed metadata for the channelid. - - :statuscode 404: if channel with given channeld is not found. - """ - with db_session: - channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(self.cid) - if not channel: - return ChannelsDiscoveredSpecificEndpoint.return_404(request) - else: - mdblob = channel.serialized() - - request.setHeader(b'content-type', 'application/octet-stream') - request.setHeader(b'Content-Disposition', 'attachment; filename=%s.mdblob' % self.cid.encode('hex')) - return mdblob diff --git a/Tribler/Core/Modules/restapi/channels/channels_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_endpoint.py deleted file mode 100644 index 5de7e5939ac..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_endpoint.py +++ /dev/null @@ -1,18 +0,0 @@ -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.channels.channels_discovered_endpoint import ChannelsDiscoveredEndpoint -from Tribler.Core.Modules.restapi.channels.channels_popular_endpoint import ChannelsPopularEndpoint -from Tribler.Core.Modules.restapi.channels.channels_subscription_endpoint import ChannelsSubscribedEndpoint - - -class ChannelsEndpoint(BaseChannelsEndpoint): - """ - This endpoint is responsible for handing all requests regarding channels in Tribler. - """ - - def __init__(self, session): - BaseChannelsEndpoint.__init__(self, session) - - child_handler_dict = {"subscribed": ChannelsSubscribedEndpoint, "discovered": ChannelsDiscoveredEndpoint, - "popular": ChannelsPopularEndpoint} - for path, child_cls in child_handler_dict.iteritems(): - self.putChild(path, child_cls(self.session)) diff --git a/Tribler/Core/Modules/restapi/channels/channels_playlists_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_playlists_endpoint.py deleted file mode 100644 index a915cc26337..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_playlists_endpoint.py +++ /dev/null @@ -1,359 +0,0 @@ -from twisted.web import http - -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.util import convert_db_torrent_to_json -import Tribler.Core.Utilities.json_util as json - - -class ChannelsPlaylistsEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for handling requests regarding playlists in a channel. - """ - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - - def getChild(self, path, request): - return ChannelsModifyPlaylistsEndpoint(self.session, self.cid, path) - - def render_GET(self, request): - """ - .. http:get:: /channels/discovered/(string: channelid)/playlists - - Returns the playlists in your channel. Returns error 404 if you have not created a channel. - - disable_filter: whether the family filter should be disabled for this request (1 = disabled) - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered/abcd/playlists - - **Example response**: - - .. sourcecode:: javascript - - { - "playlists": [{ - "id": 1, - "name": "My first playlist", - "description": "Funny movies", - "torrents": [{ - "id": 4, - "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779", - "name": "Ubuntu-16.04-desktop-amd64", - "size": 8592385, - "category": "other", - "num_seeders": 42, - "num_leechers": 184, - "last_tracker_check": 1463176959 - }, ... ] - }, ...] - } - - :statuscode 404: if you have not created a channel. - """ - - channel = self.get_channel_from_db(self.cid) - if channel is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - playlists = [] - req_columns = ['Playlists.id', 'Playlists.name', 'Playlists.description'] - req_columns_torrents = ['Torrent.torrent_id', 'infohash', 'Torrent.name', 'length', 'Torrent.category', - 'num_seeders', 'num_leechers', 'last_tracker_check', 'ChannelTorrents.inserted'] - - should_filter = self.session.config.get_family_filter_enabled() - if 'disable_filter' in request.args and len(request.args['disable_filter']) > 0 \ - and request.args['disable_filter'][0] == "1": - should_filter = False - - for playlist in self.channel_db_handler.getPlaylistsFromChannelId(channel[0], req_columns): - # Fetch torrents in the playlist - playlist_torrents = self.channel_db_handler.getTorrentsFromPlaylist(playlist[0], req_columns_torrents) - torrents = [] - for torrent_result in playlist_torrents: - torrent = convert_db_torrent_to_json(torrent_result) - if (should_filter and torrent['category'] == 'xxx') or torrent['name'] is None: - continue - torrents.append(torrent) - - playlists.append({"id": playlist[0], "name": playlist[1], "description": playlist[2], "torrents": torrents}) - - return json.dumps({"playlists": playlists}) - - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered/(string: channelid)/playlists - - Create a new empty playlist with a given name and description. The name and description parameters are - mandatory. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered/abcd/playlists - --data "name=My fancy playlist&description=This playlist contains some random movies" - - **Example response**: - - .. sourcecode:: javascript - - { - "created": True - } - - :statuscode 400: if you are missing the name and/or description parameter - :statuscode 404: if the specified channel does not exist - """ - parameters = http.parse_qs(request.content.read(), 1) - - if 'name' not in parameters or len(parameters['name']) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "name parameter missing"}) - - if 'description' not in parameters or len(parameters['description']) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "description parameter missing"}) - - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the specific channel cannot be found") - - channel_community.create_playlist(unicode(parameters['name'][0], 'utf-8'), - unicode(parameters['description'][0], 'utf-8'), []) - - return json.dumps({"created": True}) - - -class ChannelsModifyPlaylistsEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for requests that are modifying a specific playlist in a channel. - """ - - def __init__(self, session, cid, playlist_id): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - self.playlist_id = playlist_id - - def getChild(self, path, request): - return ChannelsModifyPlaylistTorrentsEndpoint(self.session, self.cid, self.playlist_id, path) - - def render_DELETE(self, request): - """ - .. http:delete:: /channels/discovered/(string: channelid)/playlists/(int: playlistid) - - Remove a playlist with a specified playlist id. - - **Example request**: - - .. sourcecode:: none - - curl -X DELETE http://localhost:8085/channels/discovered/abcd/playlists/3 - - **Example response**: - - .. sourcecode:: javascript - - { - "removed": True - } - - :statuscode 404: if the specified channel (community) or playlist does not exist - """ - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - playlist = self.channel_db_handler.getPlaylist(self.playlist_id, ['Playlists.dispersy_id', 'Playlists.id']) - if playlist is None: - return BaseChannelsEndpoint.return_404(request, message="this playlist cannot be found") - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the specific channel cannot be found") - - # Remove all torrents from this playlist - playlist_torrents = self.channel_db_handler.get_torrent_ids_from_playlist(playlist[1]) - channel_community.remove_playlist_torrents(playlist[0], [dispersy_id for dispersy_id, in playlist_torrents]) - - # Remove the playlist itself - channel_community.remove_playlists([playlist[0]]) - - return json.dumps({"removed": True}) - - def render_POST(self, request): - """ - .. http:post:: /channels/discovered/(string: channelid)/playlists/(int: playlistid) - - Edit a specific playlist. The new name and description should be passed as parameter. - - **Example request**: - - .. sourcecode:: none - - curl -X POST http://localhost:8085/channels/discovered/abcd/playlists/3 - --data "name=test&description=my test description" - - **Example response**: - - .. sourcecode:: javascript - - { - "modified": True - } - - :statuscode 404: if the specified channel (community) or playlist does not exist or if the - name and description parameters are missing. - """ - parameters = http.parse_qs(request.content.read(), 1) - - if 'name' not in parameters or len(parameters['name']) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "name parameter missing"}) - - if 'description' not in parameters or len(parameters['description']) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "description parameter missing"}) - - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - playlist = self.channel_db_handler.getPlaylist(self.playlist_id, ['Playlists.id']) - if playlist is None: - return BaseChannelsEndpoint.return_404(request, message="this playlist cannot be found") - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the specific channel cannot be found") - - channel_community.modifyPlaylist(playlist[0], {'name': parameters['name'][0], - 'description': parameters['description'][0]}) - - return json.dumps({"modified": True}) - - -class ChannelsModifyPlaylistTorrentsEndpoint(BaseChannelsEndpoint): - - def __init__(self, session, cid, playlist_id, infohash): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - self.playlist_id = playlist_id - self.infohash = infohash.decode('hex') - - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered/(string: channelid)/playlists/(int: playlistid)/(string: infohash) - - Add a torrent with a specified infohash to a specified playlist. The torrent that is added to the playlist, - should be present in the channel. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered/abcd/playlists/3/abcdef - - **Example response**: - - .. sourcecode:: javascript - - { - "added": True - } - - :statuscode 404: if the specified channel/playlist/torrent does not exist. - :statuscode 409: if the specified torrent is already in the specified playlist. - """ - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the specific channel cannot be found") - - playlist = self.channel_db_handler.getPlaylist(self.playlist_id, ['Playlists.dispersy_id']) - if playlist is None: - return BaseChannelsEndpoint.return_404(request, message="this playlist cannot be found") - - # Check whether this torrent is present in your channel - torrent_in_channel = False - for torrent in self.channel_db_handler.getTorrentsFromChannelId(channel_info[0], True, ["infohash"]): - if torrent[0] == self.infohash: - torrent_in_channel = True - break - - if not torrent_in_channel: - return BaseChannelsEndpoint.return_404(request, message="this torrent is not available in your channel") - - # Check whether this torrent is not already present in this playlist - for torrent in self.channel_db_handler.getTorrentsFromPlaylist(self.playlist_id, ["infohash"]): - if torrent[0] == self.infohash: - request.setResponseCode(http.CONFLICT) - return json.dumps({"error": "this torrent is already in your playlist"}) - - channel_community.create_playlist_torrents(int(self.playlist_id), [self.infohash]) - - return json.dumps({"added": True}) - - def render_DELETE(self, request): - """ - .. http:delete:: /channels/discovered/(string: channelid)/playlists/(int: playlistid)/(string: infohash) - - Remove a torrent with a specified infohash from a specified playlist. - - **Example request**: - - .. sourcecode:: none - - curl -X DELETE http://localhost:8085/channels/discovered/abcd/playlists/3/abcdef - - **Example response**: - - .. sourcecode:: javascript - - { - "removed": True - } - - :statuscode 404: if the specified channel/playlist/torrent does not exist. - """ - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsPlaylistsEndpoint.return_404(request) - - playlist = self.channel_db_handler.getPlaylist(self.playlist_id, ['Playlists.dispersy_id']) - if playlist is None: - return BaseChannelsEndpoint.return_404(request, message="this playlist cannot be found") - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the specific channel cannot be found") - - # Check whether this torrent is present in this playlist and if so, get the dispersy ID - torrent_dispersy_id = -1 - for torrent in self.channel_db_handler.getTorrentsFromPlaylist(self.playlist_id, - ["infohash", "PlaylistTorrents.dispersy_id"]): - if torrent[0] == self.infohash: - torrent_dispersy_id = torrent[1] - break - - if torrent_dispersy_id == -1: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": "this torrent is not in your playlist"}) - - channel_community.remove_playlist_torrents(int(self.playlist_id), [torrent_dispersy_id]) - - return json.dumps({"removed": True}) diff --git a/Tribler/Core/Modules/restapi/channels/channels_popular_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_popular_endpoint.py deleted file mode 100644 index 828b0faf106..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_popular_endpoint.py +++ /dev/null @@ -1,61 +0,0 @@ -from twisted.web import http - -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.util import convert_db_channel_to_json -import Tribler.Core.Utilities.json_util as json - - -class ChannelsPopularEndpoint(BaseChannelsEndpoint): - - def render_GET(self, request): - """ - .. http:get:: /channels/popular?limit=(int:max nr of channels) - - A GET request to this endpoint will return the most popular discovered channels in Tribler. - You can optionally pass a limit parameter to limit the number of results. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/popular?limit=1 - - **Example response**: - - .. sourcecode:: javascript - - { - "channels": [{ - "id": 3, - "dispersy_cid": "da69aaad39ccf468aba2ab9177d5f8d8160135e6", - "name": "My fancy channel", - "description": "A description of this fancy channel", - "subscribed": False, - "votes": 23, - "torrents": 3, - "spam": 5, - "modified": 14598395, - "can_edit": True, - }] - } - """ - limit_channels = 10 - - if 'limit' in request.args and len(request.args['limit']) > 0: - limit_channels = int(request.args['limit'][0]) - - if limit_channels <= 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "the limit parameter must be a positive number"}) - - popular_channels = self.channel_db_handler.getMostPopularChannels(max_nr=limit_channels) - results_json = [] - for channel in popular_channels: - channel_json = convert_db_channel_to_json(channel) - if self.session.config.get_family_filter_enabled() and \ - self.session.lm.category.xxx_filter.isXXX(channel_json['name']): - continue - - results_json.append(channel_json) - - return json.dumps({"channels": results_json}) diff --git a/Tribler/Core/Modules/restapi/channels/channels_rss_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_rss_endpoint.py deleted file mode 100644 index c65c06e19d7..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_rss_endpoint.py +++ /dev/null @@ -1,190 +0,0 @@ -from twisted.web import http -from twisted.web.server import NOT_DONE_YET - -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -import Tribler.Core.Utilities.json_util as json - - -class BaseChannelsRssFeedsEndpoint(BaseChannelsEndpoint): - - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - - def get_my_channel_obj_or_error(self, request): - """ - Returns a tuple of (channel_obj, error). Callers of this method should check whether the channel_obj is None and - if so, return the error. - """ - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return None, BaseChannelsRssFeedsEndpoint.return_404(request) - - if channel_info[0] != self.channel_db_handler.getMyChannelId(): - return None, BaseChannelsRssFeedsEndpoint.return_401(request) - - channel_obj = self.get_my_channel_object() - if channel_obj is None: - return None, BaseChannelsRssFeedsEndpoint.return_404(request) - - return channel_obj, None - - -class ChannelsRssFeedsEndpoint(BaseChannelsRssFeedsEndpoint): - """ - This class is responsible for handling requests regarding rss feeds in a channel. - """ - - def getChild(self, path, request): - return ChannelModifyRssFeedEndpoint(self.session, self.cid, path) - - def render_GET(self, request): - """ - .. http:get:: /channels/discovered/(string: channelid)/rssfeeds - - Returns the RSS feeds in your channel. - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered/abcd/rssfeeds - - **Example response**: - - .. sourcecode:: javascript - - { - "rssfeeds": [{ - "url": "http://rssprovider.com/feed.xml", - }, ...] - } - """ - channel_obj, error = self.get_my_channel_obj_or_error(request) - if channel_obj is None: - return error - - request.setHeader('Content-Type', 'text/json') - feeds_list = [{'url': rss_item} for rss_item in channel_obj.get_rss_feed_url_list()] - - return json.dumps({"rssfeeds": feeds_list}) - - -class ChannelsRecheckFeedsEndpoint(BaseChannelsRssFeedsEndpoint): - """ - This class is responsible for handling requests regarding refreshing rss feeds in your channel. - """ - - def render_POST(self, request): - """ - .. http:post:: /channels/discovered/(string: channelid)/recheckfeeds - - Rechecks all rss feeds in your channel. Returns error 404 if you channel does not exist. - - **Example request**: - - .. sourcecode:: none - - curl -X POST http://localhost:8085/channels/discovered/recheckrssfeeds - - **Example response**: - - .. sourcecode:: javascript - - { - "rechecked": True - } - - :statuscode 404: if you have not created a channel. - """ - channel_obj, error = self.get_my_channel_obj_or_error(request) - if channel_obj is None: - return error - - def on_refreshed(_): - request.write(json.dumps({"rechecked": True})) - request.finish() - - def on_refresh_error(failure): - self._logger.exception(failure.value) - request.write(BaseChannelsEndpoint.return_500(self, request, failure.value)) - request.finish() - - channel_obj.refresh_all_feeds().addCallbacks(on_refreshed, on_refresh_error) - - return NOT_DONE_YET - - -class ChannelModifyRssFeedEndpoint(BaseChannelsRssFeedsEndpoint): - """ - This class is responsible for methods that modify the list of RSS feed URLs (adding/removing feeds). - """ - - def __init__(self, session, cid, feed_url): - BaseChannelsRssFeedsEndpoint.__init__(self, session, cid) - self.feed_url = feed_url - - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered/(string: channelid)/rssfeeds/http%3A%2F%2Ftest.com%2Frss.xml - - Add a RSS feed to your channel. Returns error 409 if the supplied RSS feed already exists. - Note that the rss feed url should be URL-encoded. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered/abcd/rssfeeds/http%3A%2F%2Ftest.com%2Frss.xml - - **Example response**: - - .. sourcecode:: javascript - - { - "added": True - } - - :statuscode 409: (conflict) if the specified RSS URL is already present in your feeds. - """ - channel_obj, error = self.get_my_channel_obj_or_error(request) - if channel_obj is None: - return error - - if self.feed_url in channel_obj.get_rss_feed_url_list(): - request.setResponseCode(http.CONFLICT) - return json.dumps({"error": "this rss feed already exists"}) - - channel_obj.create_rss_feed(self.feed_url) - return json.dumps({"added": True}) - - def render_DELETE(self, request): - """ - .. http:delete:: /channels/discovered/(string: channelid)/rssfeeds/http%3A%2F%2Ftest.com%2Frss.xml - - Delete a RSS feed from your channel. Returns error 404 if the RSS feed that is being removed does not exist. - Note that the rss feed url should be URL-encoded. - - **Example request**: - - .. sourcecode:: none - - curl -X DELETE http://localhost:8085/channels/discovered/abcd/rssfeeds/http%3A%2F%2Ftest.com%2Frss.xml - - **Example response**: - - .. sourcecode:: javascript - - { - "removed": True - } - - :statuscode 404: if the specified RSS URL is not in your feed list. - """ - channel_obj, error = self.get_my_channel_obj_or_error(request) - if channel_obj is None: - return error - - if self.feed_url not in channel_obj.get_rss_feed_url_list(): - return ChannelModifyRssFeedEndpoint.return_404(request, message="this url is not added to your RSS feeds") - - channel_obj.remove_rss_feed(self.feed_url) - return json.dumps({"removed": True}) diff --git a/Tribler/Core/Modules/restapi/channels/channels_subscription_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_subscription_endpoint.py deleted file mode 100644 index 2b0b979eb03..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_subscription_endpoint.py +++ /dev/null @@ -1,197 +0,0 @@ -from __future__ import absolute_import - -from pony.orm import db_session -from twisted.web import http -from twisted.web.server import NOT_DONE_YET - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.restapi import VOTE_SUBSCRIBE, VOTE_UNSUBSCRIBE -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.util import convert_db_channel_to_json, convert_chant_channel_to_json -from Tribler.pyipv8.ipv8.database import database_blob - -ALREADY_SUBSCRIBED_RESPONSE_MSG = "you are already subscribed to this channel" -NOT_SUBSCRIBED_RESPONSE_MSG = "you are not subscribed to this channel" -CHANNEL_NOT_FOUND = "this channel is not found" - - -class ChannelsSubscribedEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for requests regarding the subscriptions to channels. - """ - def getChild(self, path, request): - return ChannelsModifySubscriptionEndpoint(self.session, path) - - def render_GET(self, _): - """ - .. http:get:: /channels/subscribed - - Returns all the channels the user is subscribed to. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/subscribed - - **Example response**: - - .. sourcecode:: javascript - - { - "subscribed": [{ - "id": 3, - "dispersy_cid": "da69aaad39ccf468aba2ab9177d5f8d8160135e6", - "name": "My fancy channel", - "description": "A description of this fancy channel", - "subscribed": True, - "votes": 23, - "torrents": 3, - "spam": 5, - "modified": 14598395, - "can_edit": True, - }, ...] - } - """ - subscribed_channels_db = self.channel_db_handler.getMySubscribedChannels(include_dispersy=True) - results_json = [convert_db_channel_to_json(channel) for channel in subscribed_channels_db] - if self.session.config.get_chant_enabled(): - with db_session: - channels_list = list(self.session.lm.mds.ChannelMetadata.select(lambda g: g.subscribed)) - results_json.extend([convert_chant_channel_to_json(channel) for channel in channels_list]) - return json.dumps({"subscribed": results_json}) - - -class ChannelsModifySubscriptionEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for methods that modify the list of RSS feed URLs (adding/removing feeds). - """ - - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = bytes(cid.decode('hex')) - - def render_GET(self, request): - """ - .. http:get:: /channels/subscribed/(string: channelid) - - Shows the status of subscription to a specific channel along with number of existing votes in the channel - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/subscribed/da69aaad39ccf468aba2ab9177d5f8d8160135e6 - - **Example response**: - - .. sourcecode:: javascript - - { - "subscribed" : True, "votes": 111 - } - """ - request.setHeader('Content-Type', 'text/json') - channel_info = self.get_channel_from_db(self.cid) - - if channel_info is None: - return ChannelsModifySubscriptionEndpoint.return_404(request) - - response = dict() - response[u'subscribed'] = channel_info[7] == VOTE_SUBSCRIBE - response[u'votes'] = channel_info[5] - - return json.dumps(response) - - def render_PUT(self, request): - """ - .. http:put:: /channels/subscribed/(string: channelid) - - Subscribe to a specific channel. Returns error 409 if you are already subscribed to this channel. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/subscribed/da69aaad39ccf468aba2ab9177d5f8d8160135e6 - - **Example response**: - - .. sourcecode:: javascript - - { - "subscribed" : True - } - - :statuscode 409: (conflict) if you are already subscribed to the specified channel. - """ - request.setHeader('Content-Type', 'text/json') - - if self.session.config.get_chant_channel_edit(): - with db_session: - channel = self.session.lm.mds.ChannelMetadata.get(public_key=database_blob(self.cid)) - if not channel: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": CHANNEL_NOT_FOUND}) - - if channel.subscribed: - request.setResponseCode(http.CONFLICT) - return json.dumps({"error": ALREADY_SUBSCRIBED_RESPONSE_MSG}) - channel.subscribed = True - - return json.dumps({"subscribed": True}) - - channel_info = self.get_channel_from_db(self.cid) - - if channel_info is not None and channel_info[7] == VOTE_SUBSCRIBE: - request.setResponseCode(http.CONFLICT) - return json.dumps({"error": ALREADY_SUBSCRIBED_RESPONSE_MSG}) - - def on_vote_done(_): - request.write(json.dumps({"subscribed": True})) - request.finish() - - def on_vote_error(failure): - request.processingFailed(failure) - - self.vote_for_channel(self.cid, VOTE_SUBSCRIBE).addCallback(on_vote_done).addErrback(on_vote_error) - - return NOT_DONE_YET - - def render_DELETE(self, request): - """ - .. http:delete:: /channels/subscribed/(string: channelid) - - Unsubscribe from a specific channel. Returns error 404 if you are not subscribed to this channel. - - **Example request**: - - .. sourcecode:: none - - curl -X DELETE http://localhost:8085/channels/subscribed/da69aaad39ccf468aba2ab9177d5f8d8160135e6 - - **Example response**: - - .. sourcecode:: javascript - - { - "unsubscribed" : True - } - - :statuscode 404: if you are not subscribed to the specified channel. - """ - request.setHeader('Content-Type', 'text/json') - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsModifySubscriptionEndpoint.return_404(request) - - if channel_info[7] != VOTE_SUBSCRIBE: - return ChannelsModifySubscriptionEndpoint.return_404(request, message=NOT_SUBSCRIBED_RESPONSE_MSG) - - def on_vote_done(_): - request.write(json.dumps({"unsubscribed": True})) - request.finish() - - self.vote_for_channel(self.cid, VOTE_UNSUBSCRIBE).addCallback(on_vote_done) - - return NOT_DONE_YET diff --git a/Tribler/Core/Modules/restapi/channels/channels_torrents_endpoint.py b/Tribler/Core/Modules/restapi/channels/channels_torrents_endpoint.py deleted file mode 100644 index 42738066aa2..00000000000 --- a/Tribler/Core/Modules/restapi/channels/channels_torrents_endpoint.py +++ /dev/null @@ -1,374 +0,0 @@ -import base64 -from twisted.web.error import SchemeNotSupported - -from pony.orm import db_session -from twisted.internet.defer import Deferred -from twisted.web import http -from twisted.web.server import NOT_DONE_YET - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.util import convert_db_torrent_to_json, convert_torrent_metadata_to_tuple -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.Utilities.utilities import http_get -from Tribler.Core.exceptions import DuplicateTorrentFileError, HttpError -from TriblerGUI.defs import UNCOMMITTED, TODELETE, COMMITTED - -UNKNOWN_TORRENT_MSG = "this torrent is not found in the specified channel" -UNKNOWN_COMMUNITY_MSG = "the community for the specified channel cannot be found" - - -class ChannelsTorrentsEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for managing requests regarding torrents in a channel. - """ - - def __init__(self, session, cid): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - self.is_chant_channel = (len(cid) == 74) - - def getChild(self, path, request): - return ChannelModifyTorrentEndpoint(self.session, self.cid, path) - - def render_GET(self, request): - """ - .. http:get:: /channels/discovered/(string: channelid)/torrents - - A GET request to this endpoint returns all discovered torrents in a specific channel. The size of the torrent is - in number of bytes. The last_tracker_check value will be 0 if we did not check the tracker state of the torrent - yet. Optionally, we can disable the family filter for this particular request by passing the following flag: - - disable_filter: whether the family filter should be disabled for this request (1 = disabled) - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/channels/discovered/da69aaad39ccf468aba2ab9177d5f8d8160135e6/torrents - - **Example response**: - - .. sourcecode:: javascript - - { - "torrents": [{ - "id": 4, - "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779", - "name": "Ubuntu-16.04-desktop-amd64", - "size": 8592385, - "category": "other", - "num_seeders": 42, - "num_leechers": 184, - "last_tracker_check": 1463176959 - }, ...] - } - - :statuscode 404: if the specified channel cannot be found. - """ - chant_dirty = False - if self.is_chant_channel: - with db_session: - channel = self.session.lm.mds.ChannelMetadata.get(public_key=self.cid) - if channel: - if channel == self.session.lm.mds.get_my_channel(): - # That's our channel, it gets special treatment - uncommitted = [convert_torrent_metadata_to_tuple(x, UNCOMMITTED) for x in - list(channel.uncommitted_contents)] - deleted = [convert_torrent_metadata_to_tuple(x, TODELETE) for x in - list(channel.deleted_contents)] - committed = [convert_torrent_metadata_to_tuple(x, COMMITTED) for x in - list(channel.committed_contents)] - results_local_torrents_channel = uncommitted + deleted + committed - chant_dirty = bool(uncommitted + deleted) - else: - results_local_torrents_channel = map(convert_torrent_metadata_to_tuple, - list(channel.contents)) - else: - return ChannelsTorrentsEndpoint.return_404(request) - else: - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsTorrentsEndpoint.return_404(request) - - torrent_db_columns = ['Torrent.torrent_id', 'infohash', 'Torrent.name', 'length', 'Torrent.category', - 'num_seeders', 'num_leechers', 'last_tracker_check', 'ChannelTorrents.inserted'] - results_local_torrents_channel = self.channel_db_handler\ - .getTorrentsFromChannelId(channel_info[0], True, torrent_db_columns) - - should_filter = self.session.config.get_family_filter_enabled() - if 'disable_filter' in request.args and len(request.args['disable_filter']) > 0 \ - and request.args['disable_filter'][0] == "1": - should_filter = False - - results_json = [] - for torrent_result in results_local_torrents_channel: - torrent_json = convert_db_torrent_to_json(torrent_result) - if torrent_json['name'] is None or (should_filter and torrent_json['category'] == 'xxx'): - continue - - results_json.append(torrent_json) - - return json.dumps({"torrents": results_json, "chant_dirty": chant_dirty}) - - @db_session - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered/(string: channelid)/torrents - - Add a torrent file to your own channel. Returns error 500 if something is wrong with the torrent file - and DuplicateTorrentFileError if already added to your channel. The torrent data is passed as base-64 encoded - string. The description is optional. - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered/abcd/torrents - --data "torrent=...&description=funny video" - - **Example response**: - - .. sourcecode:: javascript - - { - "added": True - } - - :statuscode 404: if your channel does not exist. - :statuscode 500: if the passed torrent data is corrupt. - """ - key = self.session.trustchain_keypair - my_channel_id = key.pub().key_to_bin() - - # First check whether the channel actually exists - if self.is_chant_channel: - if my_channel_id != self.cid: - request.setResponseCode(http.NOT_ALLOWED) - return json.dumps({"error": "you can only add torrents to your own chant channel"}) - - channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - if not channel: - return ChannelsTorrentsEndpoint.return_404(request) - else: - channel = self.get_channel_from_db(self.cid) - if channel is None: - return ChannelsTorrentsEndpoint.return_404(request) - - parameters = http.parse_qs(request.content.read(), 1) - - if 'torrent' not in parameters or len(parameters['torrent']) == 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "torrent parameter missing"}) - - if 'description' not in parameters or len(parameters['description']) == 0: - extra_info = {} - else: - extra_info = {'description': parameters['description'][0]} - - # Try to parse the torrent data - try: - torrent = base64.b64decode(parameters['torrent'][0]) - torrent_def = TorrentDef.load_from_memory(torrent) - except ValueError as exc: - return BaseChannelsEndpoint.return_500(self, request, exc) - - if self.is_chant_channel: - try: - channel.add_torrent_to_channel(torrent_def, extra_info) - except DuplicateTorrentFileError as exc: - return BaseChannelsEndpoint.return_500(self, request, exc) - else: - try: - self.session.add_torrent_def_to_channel(channel[0], torrent_def, extra_info, forward=True) - except (DuplicateTorrentFileError, HttpError) as ex: - return BaseChannelsEndpoint.return_500(self, request, ex) - - return json.dumps({"added": True}) - - -class ChannelModifyTorrentEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for methods that modify the list of torrents (adding/removing torrents). - """ - - def __init__(self, session, cid, path): - BaseChannelsEndpoint.__init__(self, session) - self.cid = cid - self.path = path - self.deferred = Deferred() - self.is_chant_channel = (len(cid) == 74) - - @db_session - def render_PUT(self, request): - """ - .. http:put:: /channels/discovered/(string: channelid)/torrents/http%3A%2F%2Ftest.com%2Ftest.torrent - - Add a torrent by magnet or url to your channel. Returns error 500 if something is wrong with the torrent file - and DuplicateTorrentFileError if already added to your channel (except with magnet links). - - **Example request**: - - .. sourcecode:: none - - curl -X PUT http://localhost:8085/channels/discovered/abcdefg/torrents/ - http%3A%2F%2Ftest.com%2Ftest.torrent --data "description=nice video" - - **Example response**: - - .. sourcecode:: javascript - - { - "added": "http://test.com/test.torrent" - } - - :statuscode 404: if your channel does not exist. - :statuscode 500: if the specified torrent is already in your channel. - """ - my_key = self.session.trustchain_keypair - my_channel_id = my_key.pub().key_to_bin() - - if self.is_chant_channel: - if my_channel_id != self.cid: - request.setResponseCode(http.NOT_ALLOWED) - return json.dumps({"error": "you can only add torrents to your own chant channel"}) - channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - else: - channel = self.get_channel_from_db(self.cid) - - if channel is None: - return BaseChannelsEndpoint.return_404(request) - - parameters = http.parse_qs(request.content.read(), 1) - - if 'description' not in parameters or len(parameters['description']) == 0: - extra_info = {} - else: - extra_info = {'description': parameters['description'][0]} - - def _on_url_fetched(data): - return TorrentDef.load_from_memory(data) - - def _on_magnet_fetched(meta_info): - return TorrentDef.load_from_dict(meta_info) - - def _on_torrent_def_loaded(torrent_def): - if self.is_chant_channel: - # We have to get my channel again since we are in a different database session now - with db_session: - channel = self.session.lm.mds.get_my_channel() - channel.add_torrent_to_channel(torrent_def, extra_info) - else: - channel = self.get_channel_from_db(self.cid) - self.session.add_torrent_def_to_channel(channel[0], torrent_def, extra_info, forward=True) - return self.path - - def _on_added(added): - request.write(json.dumps({"added": added})) - request.finish() - - def _on_add_failed(failure): - failure.trap(ValueError, DuplicateTorrentFileError, SchemeNotSupported) - self._logger.exception(failure.value) - request.write(BaseChannelsEndpoint.return_500(self, request, failure.value)) - request.finish() - - def _on_timeout(_): - request.write(BaseChannelsEndpoint.return_500(self, request, RuntimeError("Metainfo timeout"))) - request.finish() - - if self.path.startswith("http:") or self.path.startswith("https:"): - self.deferred = http_get(self.path) - self.deferred.addCallback(_on_url_fetched) - - if self.path.startswith("magnet:"): - try: - self.session.lm.ltmgr.get_metainfo(self.path, callback=self.deferred.callback, - timeout=30, timeout_callback=_on_timeout, notify=True) - except Exception as ex: - self.deferred.errback(ex) - - self.deferred.addCallback(_on_magnet_fetched) - - self.deferred.addCallback(_on_torrent_def_loaded) - self.deferred.addCallback(_on_added) - self.deferred.addErrback(_on_add_failed) - return NOT_DONE_YET - - def render_DELETE(self, request): - """ - .. http:delete:: /channels/discovered/(string: channelid)/torrents/(string: comma separated torrent infohashes) - - Remove a single or multiple torrents with the given comma separated infohashes from a given channel. - - **Example request**: - - .. sourcecode:: none - - curl -X DELETE http://localhost:8085/channels/discovered/abcdefg/torrents/ - 97d2d8f5d37e56cfaeaae151d55f05b077074779,971d55f05b077074779d2d8f5d37e56cfaeaae15 - - **Example response**: - - .. sourcecode:: javascript - - { - "removed": True - } - - .. sourcecode:: javascript - - { - "removed": False, "failed_torrents":["97d2d8f5d37e56cfaeaae151d55f05b077074779"] - } - - :statuscode 404: if the channel is not found - """ - if self.is_chant_channel: - with db_session: - my_key = self.session.trustchain_keypair - my_channel_id = my_key.pub().key_to_bin() - failed_torrents = [] - - if my_channel_id != self.cid: - request.setResponseCode(http.NOT_ALLOWED) - return json.dumps({"error": "you can only remove torrents from your own chant channel"}) - - my_channel = self.session.lm.mds.get_my_channel() - if not my_channel: - return ChannelsTorrentsEndpoint.return_404(request) - - for torrent_path in self.path.split(","): - infohash = torrent_path.decode('hex') - if not my_channel.delete_torrent_from_channel(infohash): - failed_torrents.append(torrent_path) - - if failed_torrents: - return json.dumps({"removed": False, "failed_torrents": failed_torrents}) - return json.dumps({"removed": True}) - else: - channel_info = self.get_channel_from_db(self.cid) - if channel_info is None: - return ChannelsTorrentsEndpoint.return_404(request) - - channel_community = self.get_community_for_channel_id(channel_info[0]) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, message=UNKNOWN_COMMUNITY_MSG) - - torrent_db_columns = ['Torrent.torrent_id', 'infohash', 'Torrent.name', 'length', 'Torrent.category', - 'num_seeders', 'num_leechers', 'last_tracker_check', 'ChannelTorrents.dispersy_id'] - - failed_torrents = [] - for torrent_path in self.path.split(","): - torrent_info = self.channel_db_handler.getTorrentFromChannelId(channel_info[0], - torrent_path.decode('hex'), - torrent_db_columns) - if torrent_info is None: - failed_torrents.append(torrent_path) - else: - # the 8th index is the dispersy id of the channel torrent - channel_community.remove_torrents([torrent_info[8]]) - - if failed_torrents: - return json.dumps({"removed": False, "failed_torrents": failed_torrents}) - - return json.dumps({"removed": True}) diff --git a/Tribler/Core/Modules/restapi/channels/my_channel_endpoint.py b/Tribler/Core/Modules/restapi/channels/my_channel_endpoint.py deleted file mode 100644 index 449a1910ffb..00000000000 --- a/Tribler/Core/Modules/restapi/channels/my_channel_endpoint.py +++ /dev/null @@ -1,149 +0,0 @@ -import os - -from pony.orm import db_session -from twisted.web import http - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import BaseChannelsEndpoint -from Tribler.Core.Modules.restapi.util import get_parameter - -NO_CHANNEL_CREATED_RESPONSE_MSG = "your channel has not been created" - - -class MyChannelEndpoint(BaseChannelsEndpoint): - """ - This class is responsible for managing requests regarding your channel. - """ - - def render_GET(self, request): - """ - .. http:get:: /mychannel - - Return the name, description and identifier of your channel. - This endpoint returns a 404 HTTP response if you have not created a channel (yet). - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/mychannel - - **Example response**: - - .. sourcecode:: javascript - - { - "overview": { - "name": "My Tribler channel", - "description": "A great collection of open-source movies", - "identifier": "4a9cfc7ca9d15617765f4151dd9fae94c8f3ba11" - } - } - - :statuscode 404: if your channel has not been created (yet). - """ - if self.session.config.get_chant_channel_edit(): - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - with db_session: - my_channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - - if not my_channel: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": NO_CHANNEL_CREATED_RESPONSE_MSG}) - - my_channel = my_channel.to_dict() - return json.dumps({ - 'mychannel': { - 'identifier': str(my_channel["public_key"]).encode('hex'), - 'name': my_channel["title"], - 'description': my_channel["tags"], - 'chant': True - }}) - else: - my_channel_id = self.channel_db_handler.getMyChannelId() - if my_channel_id is None: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": NO_CHANNEL_CREATED_RESPONSE_MSG}) - - my_channel = self.channel_db_handler.getChannel(my_channel_id) - - return json.dumps({'mychannel': {'identifier': my_channel[1].encode('hex'), 'name': my_channel[2], - 'description': my_channel[3]}}) - - def render_POST(self, request): - """ - .. http:post:: /mychannel - - Modify the name and/or the description of your channel. - This endpoint returns a 404 HTTP response if you have not created a channel (yet). - - **Example request**: - - .. sourcecode:: none - - curl -X POST http://localhost:8085/mychannel - --data "name=My fancy playlist&description=This playlist contains some random movies" - - **Example response**: - - .. sourcecode:: javascript - - { - "modified": True - } - - :statuscode 404: if your channel has not been created (yet). - """ - parameters = http.parse_qs(request.content.read(), 1) - - if not get_parameter(parameters, 'name') and not get_parameter(parameters, 'commit_changes'): - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": 'channel name cannot be empty'}) - - if self.session.config.get_chant_channel_edit(): - with db_session: - modified = False - my_key = self.session.trustchain_keypair - my_channel_id = my_key.pub().key_to_bin() - my_channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - - if not my_channel: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": NO_CHANNEL_CREATED_RESPONSE_MSG}) - - if get_parameter(parameters, 'name'): - my_channel.update_metadata(update_dict={ - "tags": unicode(get_parameter(parameters, 'description'), 'utf-8'), - "title": unicode(get_parameter(parameters, 'name'), 'utf-8') - }) - modified = True - - if get_parameter(parameters, 'commit_changes') and my_channel.staged_entries_list: - # Update torrent if we have uncommitted content in the channel - my_channel.commit_channel_torrent() - torrent_path = os.path.join(self.session.lm.mds.channels_dir, my_channel.dir_name + ".torrent") - self.session.lm.updated_my_channel(torrent_path) - modified = True - - return json.dumps({'modified': modified}) - else: - my_channel_id = self.channel_db_handler.getMyChannelId() - if my_channel_id is None: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": NO_CHANNEL_CREATED_RESPONSE_MSG}) - - channel_community = self.get_community_for_channel_id(my_channel_id) - if channel_community is None: - return BaseChannelsEndpoint.return_404(request, - message="the community for the your channel cannot be found") - - my_channel = self.channel_db_handler.getChannel(my_channel_id) - changes = {} - if my_channel[2] != get_parameter(parameters, 'name'): - changes['name'] = unicode(get_parameter(parameters, 'name'), 'utf-8') - if my_channel[3] != get_parameter(parameters, 'description'): - changes['description'] = unicode(get_parameter(parameters, 'description'), 'utf-8') - - channel_community.modifyChannel(changes) - - return json.dumps({'modified': True}) diff --git a/Tribler/Core/Modules/restapi/downloads_endpoint.py b/Tribler/Core/Modules/restapi/downloads_endpoint.py index ba84876da93..f49a75357c5 100644 --- a/Tribler/Core/Modules/restapi/downloads_endpoint.py +++ b/Tribler/Core/Modules/restapi/downloads_endpoint.py @@ -1,21 +1,28 @@ from __future__ import absolute_import import logging +from binascii import hexlify -from six import text_type, unichr # pylint: disable=redefined-builtin +from libtorrent import bencode, create_torrent + +from pony.orm import db_session + +import six +from six import unichr # pylint: disable=redefined-builtin from six.moves.urllib.parse import unquote_plus from six.moves.urllib.request import url2pathname from twisted.web import http, resource from twisted.web.server import NOT_DONE_YET +import Tribler.Core.Utilities.json_util as json from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload from Tribler.Core.Modules.restapi.util import return_handled_exception +from Tribler.Core.Utilities.torrent_utils import get_info_from_handle from Tribler.Core.Utilities.utilities import unichar_string from Tribler.Core.exceptions import InvalidSignatureException -from Tribler.Core.simpledefs import DOWNLOAD, UPLOAD, dlstatus_strings, DLMODE_VOD -import Tribler.Core.Utilities.json_util as json +from Tribler.Core.simpledefs import DLMODE_VOD, DOWNLOAD, UPLOAD, dlstatus_strings from Tribler.util import cast_to_unicode_utf8 @@ -85,11 +92,11 @@ def create_dconfig_from_params(parameters): download_config.set_safe_seeding(True) if 'destination' in parameters and len(parameters['destination']) > 0: - dest_dir = unicode(parameters['destination'][0], 'utf-8') + dest_dir = cast_to_unicode_utf8(parameters['destination'][0]) download_config.set_dest_dir(dest_dir) if 'selected_files[]' in parameters: - selected_files_list = [unicode(f, 'utf-8') for f in parameters['selected_files[]']] + selected_files_list = [cast_to_unicode_utf8(f) for f in parameters['selected_files[]']] download_config.set_selected_files(selected_files_list) return download_config, None @@ -223,38 +230,43 @@ def render_GET(self, request): num_seeds, num_peers = state.get_num_seeds_peers() num_connected_seeds, num_connected_peers = download.get_num_connected_seeds_peers() - def get_chant_name(download): - infohash = download.tdef.get_infohash() - channel = self.session.lm.mds.ChannelMetadata.get_channel_with_infohash(infohash) - if channel: - return channel.title - else: - return u"" - - download_json = {"name": get_chant_name(download) if download.get_channel_download() else tdef.get_name_utf8(), - "progress": state.get_progress(), - "infohash": tdef.get_infohash().encode('hex'), - "speed_down": state.get_current_payload_speed(DOWNLOAD), - "speed_up": state.get_current_payload_speed(UPLOAD), - "status": dlstatus_strings[state.get_status()], - "size": tdef.get_length(), "eta": state.get_eta(), - "num_peers": num_peers, "num_seeds": num_seeds, - "num_connected_peers": num_connected_peers, "num_connected_seeds": num_connected_seeds, - "total_up": state.get_total_transferred(UPLOAD), - "total_down": state.get_total_transferred(DOWNLOAD), "ratio": state.get_seeding_ratio(), - "trackers": tracker_info, "hops": download.get_hops(), - "anon_download": download.get_anon_mode(), "safe_seeding": download.get_safe_seeding(), - # Maximum upload/download rates are set for entire sessions - "max_upload_speed": self.session.config.get_libtorrent_max_upload_rate(), - "max_download_speed": self.session.config.get_libtorrent_max_download_rate(), - "destination": download.get_dest_dir(), "availability": state.get_availability(), - "total_pieces": tdef.get_nr_pieces(), "vod_mode": download.get_mode() == DLMODE_VOD, - "vod_prebuffering_progress": state.get_vod_prebuffering_progress(), - "vod_prebuffering_progress_consec": state.get_vod_prebuffering_progress_consec(), - "error": repr(state.get_error()) if state.get_error() else "", - "time_added": download.get_time_added(), - "credit_mining": download.get_credit_mining(), - "channel_download": download.get_channel_download()} + download_name = self.session.lm.mds.ChannelMetadata.get_channel_name( + tdef.get_name_utf8(), tdef.get_infohash()) if download.get_channel_download() else tdef.get_name_utf8() + + download_json = { + "name": download_name, + "progress": state.get_progress(), + "infohash": hexlify(tdef.get_infohash()), + "speed_down": state.get_current_payload_speed(DOWNLOAD), + "speed_up": state.get_current_payload_speed(UPLOAD), + "status": dlstatus_strings[state.get_status()], + "size": tdef.get_length(), + "eta": state.get_eta(), + "num_peers": num_peers, + "num_seeds": num_seeds, + "num_connected_peers": num_connected_peers, + "num_connected_seeds": num_connected_seeds, + "total_up": state.get_total_transferred(UPLOAD), + "total_down": state.get_total_transferred(DOWNLOAD), + "ratio": state.get_seeding_ratio(), + "trackers": tracker_info, + "hops": download.get_hops(), + "anon_download": download.get_anon_mode(), + "safe_seeding": download.get_safe_seeding(), + # Maximum upload/download rates are set for entire sessions + "max_upload_speed": self.session.config.get_libtorrent_max_upload_rate(), + "max_download_speed": self.session.config.get_libtorrent_max_download_rate(), + "destination": download.get_dest_dir(), + "availability": state.get_availability(), + "total_pieces": tdef.get_nr_pieces(), + "vod_mode": download.get_mode() == DLMODE_VOD, + "vod_prebuffering_progress": state.get_vod_prebuffering_progress(), + "vod_prebuffering_progress_consec": state.get_vod_prebuffering_progress_consec(), + "error": repr(state.get_error()) if state.get_error() else "", + "time_added": download.get_time_added(), + "credit_mining": download.get_credit_mining(), + "channel_download": download.get_channel_download() + } # Add peers information if requested if get_peers: @@ -326,7 +338,7 @@ def on_error(error): uri = parameters['uri'][0] if uri.startswith("file:"): if uri.endswith(".mdblob"): - filename = url2pathname(uri[5:].encode('utf-8') if isinstance(uri, text_type) else uri[5:]) + filename = url2pathname(uri[5:].encode('utf-8') if isinstance(uri, six.text_type) else uri[5:]) try: payload = ChannelMetadataPayload.from_file(filename) except IOError: @@ -336,7 +348,14 @@ def on_error(error): request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "Metadata has invalid signature"}) - download, _ = self.session.lm.update_channel(payload) + with db_session: + channel, _ = self.session.lm.mds.process_payload(payload) + if channel and not channel.subscribed and channel.local_version < channel.timestamp: + channel.subscribed = True + download, _ = self.session.lm.gigachannel_manager.download_channel(channel) + else: + return json.dumps({"error": "Already subscribed"}) + return json.dumps({"started": True, "infohash": str(download.get_def().get_infohash()).encode('hex')}) else: download_uri = u"file:%s" % url2pathname(uri[5:]).decode('utf-8') @@ -535,13 +554,21 @@ def render_GET(self, request): The contents of the .torrent file. """ - torrent = self.session.get_collected_torrent(self.infohash) - if not torrent: - return DownloadExportTorrentEndpoint.return_404(request) + download = self.session.get_download(self.infohash) + if not download: + return DownloadSpecificEndpoint.return_404(request) + + if not download.handle or not download.handle.is_valid() or not download.handle.has_metadata(): + return DownloadSpecificEndpoint.return_404(request) + + torrent_info = get_info_from_handle(download.handle) + t = create_torrent(torrent_info) + torrent = t.generate() + bencoded_torrent = bencode(torrent) request.setHeader(b'content-type', 'application/x-bittorrent') request.setHeader(b'Content-Disposition', 'attachment; filename=%s.torrent' % self.infohash.encode('hex')) - return torrent + return bencoded_torrent class DownloadFilesEndpoint(DownloadBaseEndpoint): diff --git a/Tribler/Core/Modules/restapi/events_endpoint.py b/Tribler/Core/Modules/restapi/events_endpoint.py index 2a10a231cf4..9438b2a08a1 100644 --- a/Tribler/Core/Modules/restapi/events_endpoint.py +++ b/Tribler/Core/Modules/restapi/events_endpoint.py @@ -1,18 +1,17 @@ from __future__ import absolute_import import time +from binascii import hexlify from twisted.web import resource, server import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.restapi.util import convert_db_channel_to_json, convert_search_torrent_to_json, \ - fix_unicode_dict -from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DELETE, NTFY_DISCOVERED, NTFY_ERROR,\ - NTFY_FINISHED, NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID,\ - NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT,\ - NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TRIBLER,\ - NTFY_TUNNEL, NTFY_UPDATE, NTFY_UPGRADER, NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_CHANNEL,\ - SIGNAL_LOW_SPACE, SIGNAL_ON_SEARCH_RESULTS, SIGNAL_RESOURCE_CHECK, SIGNAL_TORRENT, STATE_SHUTDOWN +from Tribler.Core.Modules.restapi.util import fix_unicode_dict +from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DISCOVERED, NTFY_ERROR, NTFY_FINISHED, \ + NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_BID_TIMEOUT, \ + NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, NTFY_MARKET_ON_TRANSACTION_COMPLETE, \ + NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TRIBLER, NTFY_TUNNEL, NTFY_UPDATE, NTFY_UPGRADER, \ + NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_LOW_SPACE, SIGNAL_RESOURCE_CHECK, STATE_SHUTDOWN from Tribler.Core.version import version_id from Tribler.pyipv8.ipv8.messaging.anonymization.tunnel import Circuit @@ -68,8 +67,6 @@ def __init__(self, session): self.infohashes_sent = set() self.channel_cids_sent = set() - self.session.add_observer(self.on_search_results_channels, SIGNAL_CHANNEL, [SIGNAL_ON_SEARCH_RESULTS]) - self.session.add_observer(self.on_search_results_torrents, SIGNAL_TORRENT, [SIGNAL_ON_SEARCH_RESULTS]) self.session.add_observer(self.on_upgrader_started, NTFY_UPGRADER, [NTFY_STARTED]) self.session.add_observer(self.on_upgrader_finished, NTFY_UPGRADER, [NTFY_FINISHED]) self.session.add_observer(self.on_upgrader_tick, NTFY_UPGRADER_TICK, [NTFY_STARTED]) @@ -79,9 +76,9 @@ def __init__(self, session): self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.add_observer(self.on_channel_discovered, NTFY_CHANNEL, [NTFY_DISCOVERED]) self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [NTFY_DISCOVERED]) - self.session.add_observer(self.on_torrent_removed_from_channel, NTFY_TORRENT, [NTFY_DELETE]) self.session.add_observer(self.on_torrent_finished, NTFY_TORRENT, [NTFY_FINISHED]) self.session.add_observer(self.on_torrent_error, NTFY_TORRENT, [NTFY_ERROR]) + self.session.add_observer(self.on_torrent_info_updated, NTFY_TORRENT, [NTFY_UPDATE]) self.session.add_observer(self.on_market_ask, NTFY_MARKET_ON_ASK, [NTFY_UPDATE]) self.session.add_observer(self.on_market_bid, NTFY_MARKET_ON_BID, [NTFY_UPDATE]) self.session.add_observer(self.on_market_ask_timeout, NTFY_MARKET_ON_ASK_TIMEOUT, [NTFY_UPDATE]) @@ -110,46 +107,6 @@ def write_data(self, message): else: [request.write(message_str + '\n') for request in self.events_requests] - def start_new_query(self): - self.infohashes_sent = set() - self.channel_cids_sent = set() - - def on_search_results_channels(self, subject, changetype, objectID, results): - """ - Returns the channel search results over the events endpoint. - """ - query = ' '.join(results['keywords']) - - for channel in results['result_list']: - channel_json = convert_db_channel_to_json(channel, include_rel_score=True) - - if self.session.config.get_family_filter_enabled() and \ - self.session.lm.category.xxx_filter.isXXX(channel_json['name']): - continue - - if channel_json['dispersy_cid'] not in self.channel_cids_sent: - self.write_data({"type": "search_result_channel", "event": {"query": query, "result": channel_json}}) - self.channel_cids_sent.add(channel_json['dispersy_cid']) - - def on_search_results_torrents(self, subject, changetype, objectID, results): - """ - Returns the torrent search results over the events endpoint. - """ - query = ' '.join(results['keywords']) - - for torrent in results['result_list']: - torrent_json = convert_search_torrent_to_json(torrent) - torrent_name = torrent_json['name'] - torrent_json['relevance_score'] = torrent_json['relevance_score'] if 'relevance_score' in torrent_json \ - else self.session.lm.torrent_db.relevance_score_remote_torrent(torrent_name) - - if self.session.config.get_family_filter_enabled() and torrent_json['category'] == 'xxx': - continue - - if 'infohash' in torrent_json and torrent_json['infohash'] not in self.infohashes_sent: - self.write_data({"type": "search_result_torrent", "event": {"query": query, "result": torrent_json}}) - self.infohashes_sent.add(torrent_json['infohash']) - def on_upgrader_started(self, subject, changetype, objectID, *args): self.write_data({"type": "upgrader_started"}) @@ -174,14 +131,14 @@ def on_channel_discovered(self, subject, changetype, objectID, *args): def on_torrent_discovered(self, subject, changetype, objectID, *args): self.write_data({"type": "torrent_discovered", "event": args[0]}) - def on_torrent_removed_from_channel(self, subject, changetype, objectID, *args): - self.write_data({"type": "torrent_removed_from_channel", "event": args[0]}) - def on_torrent_finished(self, subject, changetype, objectID, *args): - self.write_data({"type": "torrent_finished", "event": {"infohash": objectID.encode('hex'), "name": args[0]}}) + self.write_data({"type": "torrent_finished", "event": {"infohash": hexlify(objectID), "name": args[0]}}) def on_torrent_error(self, subject, changetype, objectID, *args): - self.write_data({"type": "torrent_error", "event": {"infohash": objectID.encode('hex'), "error": args[0]}}) + self.write_data({"type": "torrent_error", "event": {"infohash": hexlify(objectID), "error": args[0]}}) + + def on_torrent_info_updated(self, subject, changetype, objectID, *args): + self.write_data({"type": "torrent_info_updated", "event": dict(infohash=hexlify(objectID), **args[0])}) def on_tribler_exception(self, exception_text): self.write_data({"type": "tribler_exception", "event": {"text": exception_text}}) @@ -238,6 +195,7 @@ def render_GET(self, request): curl -X GET http://localhost:8085/events """ + def on_request_finished(_): self.events_requests.remove(request) diff --git a/Tribler/Core/Modules/restapi/metadata_endpoint.py b/Tribler/Core/Modules/restapi/metadata_endpoint.py new file mode 100644 index 00000000000..45a66421619 --- /dev/null +++ b/Tribler/Core/Modules/restapi/metadata_endpoint.py @@ -0,0 +1,311 @@ +from __future__ import absolute_import + +import json +import logging +from binascii import unhexlify + +from pony.orm import db_session + +from twisted.web import http, resource +from twisted.web.server import NOT_DONE_YET + +from Tribler.pyipv8.ipv8.database import database_blob +from Tribler.util import cast_to_unicode_utf8 + + +class BaseMetadataEndpoint(resource.Resource): + + def __init__(self, session): + resource.Resource.__init__(self) + self.session = session + + @staticmethod + def sanitize_parameters(parameters): + """ + Sanitize the parameters for a request that fetches channels. + """ + sanitized = { + "first": 1 if 'first' not in parameters else int(parameters['first'][0]), + "last": 50 if 'last' not in parameters else int(parameters['last'][0]), + "sort_by": None if 'sort_by' not in parameters else BaseMetadataEndpoint.convert_sort_param_to_pony_col( + parameters['sort_by'][0]), + "sort_asc": True if 'sort_asc' not in parameters else bool(int(parameters['sort_asc'][0])), + "query_filter": None if 'filter' not in parameters else cast_to_unicode_utf8(parameters['filter'][0]), + "hide_xxx": False if 'hide_xxx' not in parameters else bool(int(parameters['hide_xxx'][0]) > 0)} + + return sanitized + + @staticmethod + def convert_sort_param_to_pony_col(sort_param): + """ + Convert an incoming sort parameter to a pony column in the database. + :return a string with the right column. None if there exists no value for the given key. + """ + json2pony_columns = { + u'category': "tags", + u'id': "rowid", + u'name': "title", + u'size': "size", + u'infohash': "infohash", + u'date': "torrent_date", + u'status': 'status', + u'torrents': 'num_entries', + u'health': 'HEALTH' + } + + return json2pony_columns[sort_param] if sort_param in json2pony_columns else None + + +class MetadataEndpoint(resource.Resource): + + def __init__(self, session): + resource.Resource.__init__(self) + + child_handler_dict = { + "channels": ChannelsEndpoint, + "torrents": TorrentsEndpoint + } + + for path, child_cls in child_handler_dict.items(): + self.putChild(path, child_cls(session)) + + +class BaseChannelsEndpoint(BaseMetadataEndpoint): + @staticmethod + def sanitize_parameters(parameters): + """ + Sanitize the parameters for a request that fetches channels. + """ + sanitized = BaseMetadataEndpoint.sanitize_parameters(parameters) + + if 'subscribed' in parameters: + sanitized['subscribed'] = bool(int(parameters['subscribed'][0])) + + return sanitized + + +class ChannelsEndpoint(BaseChannelsEndpoint): + + def getChild(self, path, request): + if path == "popular": + return ChannelsPopularEndpoint(self.session) + + return SpecificChannelEndpoint(self.session, path) + + def render_GET(self, request): + sanitized = ChannelsEndpoint.sanitize_parameters(request.args) + with db_session: + channels, total = self.session.lm.mds.ChannelMetadata.get_entries(**sanitized) + channels_list = [channel.to_simple_dict() for channel in channels] + + return json.dumps({ + "channels": channels_list, + "first": sanitized["first"], + "last": sanitized["last"], + "sort_by": sanitized["sort_by"], + "sort_asc": int(sanitized["sort_asc"]), + "total": total + }) + + +class ChannelsPopularEndpoint(BaseChannelsEndpoint): + + def render_GET(self, request): + limit_channels = 10 + + if 'limit' in request.args and request.args['limit']: + limit_channels = int(request.args['limit'][0]) + + if limit_channels <= 0: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "the limit parameter must be a positive number"}) + + popular_channels = self.session.lm.mds.ChannelMetadata.get_random_channels(limit=limit_channels) + return json.dumps({"channels": [channel.to_simple_dict() for channel in popular_channels]}) + + +class SpecificChannelEndpoint(BaseChannelsEndpoint): + + def __init__(self, session, channel_pk): + BaseChannelsEndpoint.__init__(self, session) + self.channel_pk = unhexlify(channel_pk) + + self.putChild("torrents", SpecificChannelTorrentsEndpoint(session, self.channel_pk)) + + def render_POST(self, request): + parameters = http.parse_qs(request.content.read(), 1) + if 'subscribe' not in parameters: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"success": False, "error": "subscribe parameter missing"}) + + to_subscribe = bool(int(parameters['subscribe'][0])) + with db_session: + channel = self.session.lm.mds.ChannelMetadata.get(public_key=database_blob(self.channel_pk)) + if not channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "this channel cannot be found"}) + + channel.subscribed = to_subscribe + channel.local_version = 0 + + return json.dumps({"success": True, "subscribed": to_subscribe}) + + +class SpecificChannelTorrentsEndpoint(BaseMetadataEndpoint): + + def __init__(self, session, channel_pk): + BaseMetadataEndpoint.__init__(self, session) + self.channel_pk = channel_pk + + def render_GET(self, request): + sanitized = SpecificChannelTorrentsEndpoint.sanitize_parameters(request.args) + with db_session: + torrents, total = self.session.lm.mds.TorrentMetadata.get_entries(channel_pk=self.channel_pk, **sanitized) + torrents_list = [torrent.to_simple_dict() for torrent in torrents] + + return json.dumps({ + "torrents": torrents_list, + "first": sanitized['first'], + "last": sanitized['last'], + "sort_by": sanitized['sort_by'], + "sort_asc": int(sanitized['sort_asc']), + "total": total + }) + + +class TorrentsEndpoint(BaseMetadataEndpoint): + + def __init__(self, session): + BaseMetadataEndpoint.__init__(self, session) + self.putChild("random", TorrentsRandomEndpoint(session)) + + def getChild(self, path, request): + return SpecificTorrentEndpoint(self.session, path) + + +class SpecificTorrentEndpoint(resource.Resource): + """ + This class handles requests for a specific torrent. + """ + + def __init__(self, session, infohash): + resource.Resource.__init__(self) + self.session = session + self.infohash = unhexlify(infohash) + + self.putChild("health", TorrentHealthEndpoint(self.session, self.infohash)) + + def render_GET(self, request): + with db_session: + md = self.session.lm.mds.TorrentMetadata.select(lambda g: g.infohash == database_blob(self.infohash))[:1] + torrent_dict = md[0].to_simple_dict(include_trackers=True) if md else None + + if not md: + request.setResponseCode(http.NOT_FOUND) + request.write(json.dumps({"error": "torrent not found in database"})) + return + + return json.dumps({"torrent": torrent_dict}) + + +class TorrentsRandomEndpoint(BaseMetadataEndpoint): + + def render_GET(self, request): + limit_torrents = 10 + + if 'limit' in request.args and request.args['limit']: + limit_torrents = int(request.args['limit'][0]) + + if limit_torrents <= 0: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "the limit parameter must be a positive number"}) + + with db_session: + random_torrents = self.session.lm.mds.TorrentMetadata.get_random_torrents(limit=limit_torrents) + torrents = [torrent.to_simple_dict() for torrent in random_torrents] + return json.dumps({"torrents": torrents}) + + +class TorrentHealthEndpoint(resource.Resource): + """ + This class is responsible for endpoints regarding the health of a torrent. + """ + + def __init__(self, session, infohash): + resource.Resource.__init__(self) + self.session = session + self.infohash = infohash + self._logger = logging.getLogger(self.__class__.__name__) + + def finish_request(self, request): + try: + request.finish() + except RuntimeError: + self._logger.warning("Writing response failed, probably the client closed the connection already.") + + def render_GET(self, request): + """ + .. http:get:: /torrents/(string: torrent infohash)/health + + Fetch the swarm health of a specific torrent. You can optionally specify the timeout to be used in the + connections to the trackers. This is by default 20 seconds. + By default, we will not check the health of a torrent again if it was recently checked. You can force a health + recheck by passing the refresh parameter. + + **Example request**: + + .. sourcecode:: none + + curl http://localhost:8085/metadata/torrents/97d2d8f5d37e56cfaeaae151d55f05b077074779/health + ?timeout=15&refresh=1 + + **Example response**: + + .. sourcecode:: javascript + + { + "health": { + "http://mytracker.com:80/announce": { + "seeders": 43, + "leechers": 20, + "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779" + }, + "http://nonexistingtracker.com:80/announce": { + "error": "timeout" + } + } + } + + :statuscode 404: if the torrent is not found in the database + """ + timeout = 20 + if 'timeout' in request.args: + timeout = int(request.args['timeout'][0]) + + refresh = False + if 'refresh' in request.args and request.args['refresh'] and request.args['refresh'][0] == "1": + refresh = True + + nowait = False + if 'nowait' in request.args and request.args['nowait'] and request.args['nowait'][0] == "1": + nowait = True + + def on_health_result(result): + request.write(json.dumps({'health': result})) + self.finish_request(request) + + def on_request_error(failure): + if not request.finished: + request.setResponseCode(http.BAD_REQUEST) + request.write(json.dumps({"error": failure.getErrorMessage()})) + # If the above request.write failed, the request will have already been finished + if not request.finished: + self.finish_request(request) + + result_deferred = self.session.check_torrent_health(self.infohash, timeout=timeout, scrape_now=refresh) + # return immediately. Used by GUI to schedule health updates through the EventsEndpoint + if nowait: + return json.dumps({'checking': '1'}) + result_deferred.addCallback(on_health_result).addErrback(on_request_error) + + return NOT_DONE_YET diff --git a/Tribler/Core/Modules/restapi/mychannel_endpoint.py b/Tribler/Core/Modules/restapi/mychannel_endpoint.py new file mode 100644 index 00000000000..2f8cfe44217 --- /dev/null +++ b/Tribler/Core/Modules/restapi/mychannel_endpoint.py @@ -0,0 +1,297 @@ +from __future__ import absolute_import + +import base64 +import json +import os +import urllib +from binascii import hexlify, unhexlify + +from pony.orm import db_session + +from twisted.web import http, resource + +from Tribler.Core.Modules.restapi.metadata_endpoint import SpecificChannelTorrentsEndpoint +from Tribler.Core.TorrentDef import TorrentDef +from Tribler.Core.exceptions import DuplicateTorrentFileError +from Tribler.pyipv8.ipv8.database import database_blob + + +class BaseMyChannelEndpoint(resource.Resource): + + def __init__(self, session): + resource.Resource.__init__(self) + self.session = session + + +class MyChannelEndpoint(BaseMyChannelEndpoint): + + def __init__(self, session): + BaseMyChannelEndpoint.__init__(self, session) + self.putChild("torrents", MyChannelTorrentsEndpoint(session)) + self.putChild("commit", MyChannelCommitEndpoint(session)) + + def render_GET(self, request): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + return json.dumps({ + 'mychannel': { + 'public_key': hexlify(my_channel.public_key), + 'name': my_channel.title, + 'description': my_channel.tags, + 'dirty': my_channel.dirty + } + }) + + def render_POST(self, request): + parameters = http.parse_qs(request.content.read(), 1) + if 'name' not in parameters and 'description' not in parameters: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "name or description parameter missing"}) + + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + my_channel.update_metadata(update_dict={ + "tags": urllib.unquote(parameters['description'][0]).decode('utf-8'), + "title": urllib.unquote(parameters['name'][0]).decode('utf-8') + }) + + return json.dumps({"edited": True}) + + def render_PUT(self, request): + parameters = http.parse_qs(request.content.read(), 1) + + if 'name' not in parameters or not parameters['name'] or not parameters['name'][0]: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "channel name cannot be empty"}) + + if 'description' not in parameters or not parameters['description']: + description = u'' + else: + description = urllib.unquote(parameters['description'][0]).decode('utf-8') + + my_key = self.session.trustchain_keypair + my_channel_pk = my_key.pub().key_to_bin() + + # Do not allow to add a channel twice + if self.session.lm.mds.get_my_channel(): + request.setResponseCode(http.CONFLICT) + return json.dumps({"error": "channel already exists"}) + + title = urllib.unquote(parameters['name'][0]).decode('utf-8') + self.session.lm.mds.ChannelMetadata.create_channel(title, description) + return json.dumps({ + "added": str(my_channel_pk).encode("hex"), + }) + + +class MyChannelTorrentsEndpoint(BaseMyChannelEndpoint): + + def getChild(self, path, request): + return MyChannelSpecificTorrentEndpoint(self.session, path) + + def render_GET(self, request): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + sanitized = SpecificChannelTorrentsEndpoint.sanitize_parameters(request.args) + if 'exclude_deleted' in request.args: + sanitized['exclude_deleted'] = request.args['exclude_deleted'] + + torrents, total = self.session.lm.mds.TorrentMetadata.get_entries( + channel_pk=database_blob(my_channel.public_key), **sanitized) + torrents = [torrent.to_simple_dict() for torrent in torrents] + + return json.dumps({ + "torrents": torrents, + "first": sanitized['first'], + "last": sanitized['last'], + "sort_by": sanitized['sort_by'], + "sort_asc": int(sanitized['sort_asc']), + "total": total, + "dirty": my_channel.dirty + }) + + def render_POST(self, request): + parameters = http.parse_qs(request.content.read(), 1) + if 'status' not in parameters or 'infohashes' not in parameters: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "status or infohashes parameter missing"}) + + new_status = int(parameters['status'][0]) + infohashes = parameters['infohashes'][0].split(',') + + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + for infohash in infohashes: + torrent = my_channel.get_torrent(unhexlify(infohash)) + if not torrent: + continue + torrent.status = new_status + + return json.dumps({"success": True}) + + def render_DELETE(self, request): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + my_channel.drop_channel_contents() + + return json.dumps({"success": True}) + + @db_session + def render_PUT(self, request): + """ + .. http:put:: /mychannel/torrents + + Add a torrent file to your own channel. Returns error 500 if something is wrong with the torrent file + and DuplicateTorrentFileError if already added to your channel. The torrent data is passed as base-64 encoded + string. The description is optional. + + Option torrents_dir adds all .torrent files from a chosen directory + Option recursive enables recursive scanning of the chosen directory for .torrent files + + **Example request**: + + .. sourcecode:: none + + curl -X PUT http://localhost:8085/mychannel/torrents + --data "torrent=...&description=funny video" + + **Example response**: + + .. sourcecode:: javascript + + { + "added": True + } + + **Example request**: + + .. sourcecode:: none + + curl -X PUT http://localhost:8085/mychannel/torrents? --data "torrents_dir=some_dir&recursive=1" + + **Example response**: + + .. sourcecode:: javascript + + { + "added": 13 + } + + :statuscode 404: if your channel does not exist. + :statuscode 500: if the passed torrent data is corrupt. + """ + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created yet"}) + + parameters = http.parse_qs(request.content.read(), 1) + + torrents_dir = None + if 'torrents_dir' in parameters and parameters['torrents_dir'] > 0: + torrents_dir = parameters['torrents_dir'][0] + if not os.path.isabs(torrents_dir): + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "the torrents_dir should point to a directory"}) + + recursive = False + if 'recursive' in parameters and parameters['recursive'] > 0: + recursive = parameters['recursive'][0] + if not torrents_dir: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "the torrents_dir parameter should be provided when the recursive " + "parameter is set"}) + + if torrents_dir: + torrents_list, errors_list = my_channel.add_torrents_from_dir(torrents_dir, recursive) + return json.dumps({"added": len(torrents_list), "errors": errors_list}) + + if 'torrent' not in parameters or not parameters['torrent']: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "torrent parameter missing"}) + + if 'description' not in parameters or not parameters['description']: + extra_info = {} + else: + extra_info = {'description': parameters['description'][0]} + + # Try to parse the torrent data + try: + torrent = base64.b64decode(parameters['torrent'][0]) + torrent_def = TorrentDef.load_from_memory(torrent) + except (TypeError, ValueError): + request.setResponseCode(http.INTERNAL_SERVER_ERROR) + return json.dumps({"error": "invalid torrent file"}) + + try: + my_channel.add_torrent_to_channel(torrent_def, extra_info) + except DuplicateTorrentFileError: + request.setResponseCode(http.INTERNAL_SERVER_ERROR) + return json.dumps({"error": "this torrent already exists in your channel"}) + + return json.dumps({"added": 1}) + + +class MyChannelSpecificTorrentEndpoint(BaseMyChannelEndpoint): + + def __init__(self, session, infohash): + BaseMyChannelEndpoint.__init__(self, session) + self.infohash = unhexlify(infohash) + + @db_session + def render_PATCH(self, request): + parameters = http.parse_qs(request.content.read(), 1) + if 'status' not in parameters: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "status parameter missing"}) + + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + torrent = my_channel.get_torrent(self.infohash) + if not torrent: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "torrent with the specified infohash could not be found"}) + + new_status = int(parameters['status'][0]) + torrent.status = new_status + + return json.dumps({"success": True, "new_status": new_status, "dirty": my_channel.dirty}) + + +class MyChannelCommitEndpoint(BaseMyChannelEndpoint): + + def render_POST(self, request): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + if not my_channel: + request.setResponseCode(http.NOT_FOUND) + return json.dumps({"error": "your channel has not been created"}) + + torrent_dict = my_channel.commit_channel_torrent() + if torrent_dict: + self.session.lm.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(torrent_dict)) + + return json.dumps({"success": True}) diff --git a/Tribler/Core/Modules/restapi/rest_manager.py b/Tribler/Core/Modules/restapi/rest_manager.py index 7cf9ed9fef2..d1178199ac7 100644 --- a/Tribler/Core/Modules/restapi/rest_manager.py +++ b/Tribler/Core/Modules/restapi/rest_manager.py @@ -3,15 +3,16 @@ import logging import os from traceback import format_tb + from twisted.internet import reactor from twisted.internet.defer import maybeDeferred from twisted.internet.error import CannotListenError from twisted.python.compat import intToBytes from twisted.python.failure import Failure -from twisted.web import server, http +from twisted.web import http, server -from Tribler.Core.Modules.restapi.root_endpoint import RootEndpoint import Tribler.Core.Utilities.json_util as json +from Tribler.Core.Modules.restapi.root_endpoint import RootEndpoint from Tribler.pyipv8.ipv8.taskmanager import TaskManager @@ -48,6 +49,8 @@ def start(self): except CannotListenError: bind_attempts += 1 + self._logger.info("Starting REST API on port %d", self.site.port) + # REST Manager does not accept any new requests if Tribler is shutting down. # Note that environment variable 'TRIBLER_SHUTTING_DOWN' is set to 'TRUE' (string) # when shutdown has started. Also see RESTRequest.process() method below. diff --git a/Tribler/Core/Modules/restapi/root_endpoint.py b/Tribler/Core/Modules/restapi/root_endpoint.py index c813cebd228..e2ec46a4c05 100644 --- a/Tribler/Core/Modules/restapi/root_endpoint.py +++ b/Tribler/Core/Modules/restapi/root_endpoint.py @@ -1,20 +1,21 @@ +from __future__ import absolute_import + from twisted.web import resource -from Tribler.Core.Modules.restapi.channels.channels_endpoint import ChannelsEndpoint -from Tribler.Core.Modules.restapi.channels.my_channel_endpoint import MyChannelEndpoint from Tribler.Core.Modules.restapi.create_torrent_endpoint import CreateTorrentEndpoint from Tribler.Core.Modules.restapi.debug_endpoint import DebugEndpoint from Tribler.Core.Modules.restapi.downloads_endpoint import DownloadsEndpoint from Tribler.Core.Modules.restapi.events_endpoint import EventsEndpoint from Tribler.Core.Modules.restapi.libtorrent_endpoint import LibTorrentEndpoint from Tribler.Core.Modules.restapi.market_endpoint import MarketEndpoint +from Tribler.Core.Modules.restapi.metadata_endpoint import MetadataEndpoint +from Tribler.Core.Modules.restapi.mychannel_endpoint import MyChannelEndpoint from Tribler.Core.Modules.restapi.search_endpoint import SearchEndpoint from Tribler.Core.Modules.restapi.settings_endpoint import SettingsEndpoint from Tribler.Core.Modules.restapi.shutdown_endpoint import ShutdownEndpoint from Tribler.Core.Modules.restapi.state_endpoint import StateEndpoint from Tribler.Core.Modules.restapi.statistics_endpoint import StatisticsEndpoint from Tribler.Core.Modules.restapi.torrentinfo_endpoint import TorrentInfoEndpoint -from Tribler.Core.Modules.restapi.torrents_endpoint import TorrentsEndpoint from Tribler.Core.Modules.restapi.trustchain_endpoint import TrustchainEndpoint from Tribler.Core.Modules.restapi.wallets_endpoint import WalletsEndpoint from Tribler.pyipv8.ipv8.REST.root_endpoint import RootEndpoint as IPV8RootEndpoint @@ -45,16 +46,22 @@ def start_endpoints(self): This method is only called when Tribler has started. It enables the other endpoints that are dependent on a fully started Tribler. """ - child_handler_dict = {"settings": SettingsEndpoint, "downloads": DownloadsEndpoint, - "createtorrent": CreateTorrentEndpoint, "torrents": TorrentsEndpoint, - "debug": DebugEndpoint, "shutdown": ShutdownEndpoint, "trustchain": TrustchainEndpoint, - "statistics": StatisticsEndpoint, "torrentinfo": TorrentInfoEndpoint, - "market": MarketEndpoint, "wallets": WalletsEndpoint, "libtorrent": LibTorrentEndpoint} - - if self.session.config.get_megacache_enabled(): - child_handler_dict["search"] = SearchEndpoint - child_handler_dict["channels"] = ChannelsEndpoint - child_handler_dict["mychannel"] = MyChannelEndpoint + child_handler_dict = { + "settings": SettingsEndpoint, + "downloads": DownloadsEndpoint, + "createtorrent": CreateTorrentEndpoint, + "debug": DebugEndpoint, + "shutdown": ShutdownEndpoint, + "trustchain": TrustchainEndpoint, + "statistics": StatisticsEndpoint, + "market": MarketEndpoint, + "wallets": WalletsEndpoint, + "libtorrent": LibTorrentEndpoint, + "torrentinfo": TorrentInfoEndpoint, + "metadata": MetadataEndpoint, + "mychannel": MyChannelEndpoint, + "search": SearchEndpoint + } for path, child_cls in child_handler_dict.iteritems(): self.putChild(path, child_cls(self.session)) diff --git a/Tribler/Core/Modules/restapi/search_endpoint.py b/Tribler/Core/Modules/restapi/search_endpoint.py index 8a266c29a69..3bf78c3f619 100644 --- a/Tribler/Core/Modules/restapi/search_endpoint.py +++ b/Tribler/Core/Modules/restapi/search_endpoint.py @@ -1,17 +1,18 @@ +from __future__ import absolute_import + import logging from pony.orm import db_session + from twisted.web import http, resource -from Tribler.Core.Modules.restapi.util import convert_channel_metadata_to_tuple, convert_torrent_metadata_to_tuple -from Tribler.Core.Utilities.search_utils import split_into_keywords -from Tribler.Core.exceptions import OperationNotEnabledByConfigurationException -from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_TORRENTS, SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, \ - SIGNAL_CHANNEL import Tribler.Core.Utilities.json_util as json +from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_TORRENT, REGULAR_TORRENT +from Tribler.Core.Modules.restapi.metadata_endpoint import BaseMetadataEndpoint +from Tribler.util import cast_to_unicode_utf8 -class SearchEndpoint(resource.Resource): +class SearchEndpoint(BaseMetadataEndpoint): """ This endpoint is responsible for searching in channels and torrents present in the local Tribler database. It also fires a remote search in the Dispersy communities. @@ -21,83 +22,92 @@ def __init__(self, session): resource.Resource.__init__(self) self.session = session self.events_endpoint = None - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - self.torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) self._logger = logging.getLogger(self.__class__.__name__) self.putChild("completions", SearchCompletionsEndpoint(session)) + @staticmethod + def convert_datatype_param_to_search_scope(data_type): + return {'': [REGULAR_TORRENT, CHANNEL_TORRENT], + "channel": CHANNEL_TORRENT, + "torrent": REGULAR_TORRENT}.get(data_type) + + @staticmethod + def sanitize_parameters(parameters): + sanitized = BaseMetadataEndpoint.sanitize_parameters(parameters) + sanitized['metadata_type'] = SearchEndpoint.convert_datatype_param_to_search_scope( + parameters['metadata_type'][0] if 'metadata_type' in parameters else '') + return sanitized + def render_GET(self, request): """ .. http:get:: /search?q=(string:query) - A GET request to this endpoint will create a search. Results are returned over the events endpoint, one by one. - First, the results available in the local database will be pushed. After that, incoming Dispersy results are - pushed. The query to this endpoint is passed using the url, i.e. /search?q=pioneer. + A GET request to this endpoint will create a search. + + first and last options limit the range of the query. + xxx_filter option disables xxx filter + channel option limits search to a certain channel + sort_by option sorts results in forward or backward, based on column name (e.g. "id" vs "-id") + txt option uses FTS search on the chosen word* terms + type option limits query to certain metadata types (e.g. "torrent" or "channel") **Example request**: .. sourcecode:: none - curl -X GET http://localhost:8085/search?q=tribler + curl -X GET 'http://localhost:8085/search?txt=ubuntu&first=0&last=30&type=torrent&sort_by=size' **Example response**: .. sourcecode:: javascript { - "type": "search_result_channel", - "query": "test", - "result": { - "id": 3, - "dispersy_cid": "da69aaad39ccf468aba2ab9177d5f8d8160135e6", - "name": "My fancy channel", - "description": "A description of this fancy channel", - "subscribed": True, - "votes": 23, - "torrents": 3, - "spam": 5, - "modified": 14598395, - "can_edit": False - } + "torrents":[ + { + "commit_status":1, + "num_leechers":0, + "date":"1539867830.0", + "relevance_score":0, + "id":21, + "size":923795456, + "category":"unknown", + "public_key":"4c69624e...", + "name":"ubuntu-18.10-live-server-amd64.iso", + "last_tracker_check":0, + "infohash":"8c4adbf9ebe66f1d804fb6a4fb9b74966c3ab609", + "num_seeders":0, + "type":"torrent" + }, + ... + ], + "chant_dirty":false } """ - if 'q' not in request.args: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "query parameter missing"}) - - # Notify the events endpoint that we are starting a new search query - self.events_endpoint.start_new_query() - # We first search the local database for torrents and channels - query = unicode(request.args['q'][0], 'utf-8') - keywords = split_into_keywords(query) + sanitized = SearchEndpoint.sanitize_parameters(request.args) - results_local_channels = self.channel_db_handler.search_in_local_channels_db(query) - with db_session: - results_local_channels.extend(map(convert_channel_metadata_to_tuple, - self.session.lm.mds.ChannelMetadata.search_keyword(query))) + if not sanitized["query_filter"]: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "filter parameter missing"}) - results_dict = {"keywords": keywords, "result_list": results_local_channels} - self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) + if not sanitized["metadata_type"]: + request.setResponseCode(http.BAD_REQUEST) + return json.dumps({"error": "Trying to query for unknown type of metadata"}) - torrent_db_columns = ['T.torrent_id', 'infohash', 'T.name', 'length', 'category', - 'num_seeders', 'num_leechers', 'last_tracker_check'] - results_local_torrents = self.torrent_db_handler.search_in_local_torrents_db(query, keys=torrent_db_columns) with db_session: - results_local_torrents.extend(map(convert_torrent_metadata_to_tuple, - self.session.lm.mds.TorrentMetadata.search_keyword(query))) - results_dict = {"keywords": keywords, "result_list": results_local_torrents} - self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - - # Create remote searches - try: - self.session.search_remote_torrents(keywords) - self.session.search_remote_channels(keywords) - except OperationNotEnabledByConfigurationException as exc: - self._logger.error(exc) + pony_query, total = self.session.lm.mds.TorrentMetadata.get_entries(**sanitized) + search_results = [(dict(type={REGULAR_TORRENT: 'torrent', CHANNEL_TORRENT: 'channel'}[r.metadata_type], + **(r.to_simple_dict()))) for r in pony_query] - return json.dumps({"queried": True}) + return json.dumps({ + "results": search_results, + "first": sanitized["first"], + "last": sanitized["last"], + "sort_by": sanitized["sort_by"], + "sort_asc": sanitized["sort_asc"], + "total": total + }) class SearchCompletionsEndpoint(resource.Resource): @@ -108,7 +118,6 @@ class SearchCompletionsEndpoint(resource.Resource): def __init__(self, session): resource.Resource.__init__(self) self.session = session - self.torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) def render_GET(self, request): """ @@ -136,7 +145,7 @@ def render_GET(self, request): request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "query parameter missing"}) - keywords = unicode(request.args['q'][0], 'utf-8').lower() - results = self.torrent_db_handler.getAutoCompleteTerms(keywords, max_terms=5) - results.extend(self.session.lm.mds.TorrentMetadata.get_auto_complete_terms(keywords, max_terms=5)) + keywords = cast_to_unicode_utf8(request.args['q'][0]).lower() + # TODO: add XXX filtering for completion terms + results = self.session.lm.mds.TorrentMetadata.get_auto_complete_terms(keywords, max_terms=5) return json.dumps({"completions": results}) diff --git a/Tribler/Core/Modules/restapi/statistics_endpoint.py b/Tribler/Core/Modules/restapi/statistics_endpoint.py index 0d400f8d2d7..c822eb1e065 100644 --- a/Tribler/Core/Modules/restapi/statistics_endpoint.py +++ b/Tribler/Core/Modules/restapi/statistics_endpoint.py @@ -13,7 +13,6 @@ def __init__(self, session): child_handler_dict = { "tribler": StatisticsTriblerEndpoint, - "dispersy": StatisticsDispersyEndpoint, "ipv8": StatisticsIPv8Endpoint, } @@ -64,50 +63,6 @@ def render_GET(self, request): return json.dumps({'tribler_statistics': self.session.get_tribler_statistics()}) -class StatisticsDispersyEndpoint(resource.Resource): - """ - This class handles requests regarding Dispersy statistics. - """ - - def __init__(self, session): - resource.Resource.__init__(self) - self.session = session - - def render_GET(self, request): - """ - .. http:get:: /statistics/dispersy - - A GET request to this endpoint returns general statistics in Dispersy. - The returned runtime is the amount of seconds that Dispersy is active. The total uploaded and total downloaded - statistics are in bytes. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/statistics/dispersy - - **Example response**: - - .. sourcecode:: javascript - - { - "dispersy_statistics": { - "wan_address": "123.321.456.654:1234", - "lan_address": "192.168.1.2:1435", - "connection": "unknown", - "runtime": 859.34, - "total_downloaded": 538.53, - "total_uploaded": 983.24, - "packets_sent": 43, - "packets_received": 89, - ... - } - } - """ - return json.dumps({'dispersy_statistics': self.session.get_dispersy_statistics()}) - - class StatisticsIPv8Endpoint(resource.Resource): """ This class handles requests regarding IPv8 statistics. diff --git a/Tribler/Core/Modules/restapi/torrentinfo_endpoint.py b/Tribler/Core/Modules/restapi/torrentinfo_endpoint.py index a9955514c7a..d1abc1b9e67 100644 --- a/Tribler/Core/Modules/restapi/torrentinfo_endpoint.py +++ b/Tribler/Core/Modules/restapi/torrentinfo_endpoint.py @@ -2,12 +2,14 @@ import hashlib import logging + from libtorrent import bdecode, bencode from six import text_type from six.moves.urllib.request import url2pathname + from twisted.internet.defer import Deferred -from twisted.internet.error import DNSLookupError, ConnectError, ConnectionLost +from twisted.internet.error import ConnectError, ConnectionLost, DNSLookupError from twisted.web import http, resource from twisted.web.server import NOT_DONE_YET @@ -15,9 +17,9 @@ from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import BLOB_EXTENSION from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_TORRENT, \ REGULAR_TORRENT, read_payload -from Tribler.Core.TorrentDef import TorrentDef from Tribler.Core.Utilities.utilities import fix_torrent, http_get, parse_magnetlink, unichar_string from Tribler.Core.exceptions import HttpError, InvalidSignatureException +from Tribler.util import cast_to_unicode_utf8 class TorrentInfoEndpoint(resource.Resource): @@ -65,23 +67,12 @@ def on_got_metainfo(metainfo): self.finish_request(request) return + # TODO(Martijn): store the stuff in a database!!! infohash = hashlib.sha1(bencode(metainfo['info'])).digest() # Check if the torrent is already in the downloads metainfo['download_exists'] = infohash in self.session.lm.downloads - # Update the torrent database with metainfo if it is an unnamed torrent - if self.session.lm.torrent_db: - self.session.lm.torrent_db.update_torrent_with_metainfo(infohash, metainfo) - self.session.lm.torrent_db._db.commit_now() - - # Save the torrent to our store - try: - self.session.save_collected_torrent(infohash, bencode(metainfo)) - except TypeError: - # Note: in libtorrent 1.1.1, bencode throws a TypeError which is a known bug - pass - request.write(json.dumps({"metainfo": metainfo}, ensure_ascii=False)) self.finish_request(request) @@ -140,15 +131,6 @@ def on_magnet(mlink=None): request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "missing infohash"}) - if self.session.has_collected_torrent(infohash): - try: - tdef = TorrentDef.load_from_memory(self.session.get_collected_torrent(infohash)) - except ValueError as exc: - request.setResponseCode(http.INTERNAL_SERVER_ERROR) - return json.dumps({"error": "invalid torrent file: %s" % str(exc)}) - on_got_metainfo(tdef.get_metainfo()) - return NOT_DONE_YET - self.session.lm.ltmgr.get_metainfo(mlink or uri, callback=metainfo_deferred.callback, timeout=20, timeout_callback=on_metainfo_timeout, notify=True) return NOT_DONE_YET @@ -160,7 +142,7 @@ def on_magnet(mlink=None): request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "uri parameter missing"}) - uri = unicode(request.args['uri'][0], 'utf-8') + uri = cast_to_unicode_utf8(request.args['uri'][0]) if uri.startswith('file:'): return on_file() diff --git a/Tribler/Core/Modules/restapi/torrents_endpoint.py b/Tribler/Core/Modules/restapi/torrents_endpoint.py deleted file mode 100644 index af868c4b94c..00000000000 --- a/Tribler/Core/Modules/restapi/torrents_endpoint.py +++ /dev/null @@ -1,350 +0,0 @@ -from __future__ import absolute_import - -import logging - -from pony.orm import db_session - -from twisted.web import http, resource -from twisted.web.server import NOT_DONE_YET - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.restapi.util import convert_db_torrent_to_json -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.simpledefs import NTFY_TORRENTS, NTFY_CHANNELCAST -from Tribler.pyipv8.ipv8.database import database_blob - - -class TorrentsEndpoint(resource.Resource): - - def __init__(self, session): - resource.Resource.__init__(self) - self.session = session - - def getChild(self, path, request): - if path == "random": - return TorrentsRandomEndpoint(self.session) - return SpecificTorrentEndpoint(self.session, path) - - -class TorrentsRandomEndpoint(resource.Resource): - - def __init__(self, session): - resource.Resource.__init__(self) - self.session = session - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - self.torrents_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) - - def render_GET(self, request): - """ - .. http:get:: /torrents/random?limit=(int: max nr of torrents) - - A GET request to this endpoint returns random (channel) torrents. - You can optionally specify a limit parameter to limit the maximum number of results. By default, this is 10. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/torrents/random?limit=1 - - **Example response**: - - .. sourcecode:: javascript - - { - "torrents": [{ - "id": 4, - "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779", - "name": "Ubuntu-16.04-desktop-amd64", - "size": 8592385, - "category": "other", - "num_seeders": 42, - "num_leechers": 184, - "last_tracker_check": 1463176959 - }] - } - """ - limit_torrents = 10 - - if 'limit' in request.args and len(request.args['limit']) > 0: - limit_torrents = int(request.args['limit'][0]) - - if limit_torrents <= 0: - request.setResponseCode(http.BAD_REQUEST) - return json.dumps({"error": "the limit parameter must be a positive number"}) - - torrent_db_columns = ['Torrent.torrent_id', 'infohash', 'Torrent.name', 'length', 'Torrent.category', - 'num_seeders', 'num_leechers', 'last_tracker_check', 'ChannelTorrents.inserted'] - - popular_torrents = self.channel_db_handler.get_random_channel_torrents(torrent_db_columns, limit=limit_torrents) - - results_json = [] - for popular_torrent in popular_torrents: - torrent_json = convert_db_torrent_to_json(popular_torrent) - if (self.session.config.get_family_filter_enabled() and - self.session.lm.category.xxx_filter.isXXX(torrent_json['category'])) \ - or torrent_json['name'] is None \ - or torrent_json['infohash'] in self.session.lm.downloads: - continue - - results_json.append(torrent_json) - - return json.dumps({"torrents": results_json}) - - -class SpecificTorrentEndpoint(resource.Resource): - """ - This class handles requests for a specific torrent. - """ - - def __init__(self, session, infohash): - resource.Resource.__init__(self) - self.session = session - self.infohash = infohash - self.torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) - - self.putChild("health", TorrentHealthEndpoint(self.session, self.infohash)) - self.putChild("trackers", TorrentTrackersEndpoint(self.session, self.infohash)) - - def render_GET(self, request): - """ - .. http:get:: /torrents/(string: torrent infohash) - - Get information of a torrent with a given infohash from a given channel. - - **Example request**: - - .. sourcecode:: none - - curl -X GET http://localhost:8085/torrents/97d2d8f5d37e56cfaeaae151d55f05b077074779 - - **Example response**: - - .. sourcecode:: javascript - - { - "id": 4, - "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779", - "name": "Ubuntu-16.04-desktop-amd64", - "size": 8592385, - "category": "other", - "num_seeders": 42, - "num_leechers": 184, - "last_tracker_check": 1463176959, - "files": [{"path": "test.txt", "length": 1234}, ...], - "trackers": ["http://tracker.org:8080", ...] - } - - :statuscode 404: if the torrent is not found in the specified channel - """ - torrent_db_columns = ['C.torrent_id', 'infohash', 'name', 'length', 'category', - 'num_seeders', 'num_leechers', 'last_tracker_check'] - torrent_info = self.torrent_db_handler.getTorrent(self.infohash.decode('hex'), keys=torrent_db_columns) - if torrent_info is None: - # Maybe this is a chant torrent? - infohash = self.infohash.decode('hex') - with db_session: - md_list = list(self.session.lm.mds.TorrentMetadata.select(lambda g: - g.infohash == database_blob(infohash))) - if md_list: - torrent_md = md_list[0] # Any MD containing this infohash is fine - # FIXME: replace these placeholder values when Dispersy is gone - torrent_info = { - "C.torrent_id": "", - "name": torrent_md.title, - "length": torrent_md.size, - "category": torrent_md.tags.split(",")[0] or '', - "last_tracker_check": 0, - "num_seeders": 0, - "num_leechers": 0 - } - - if torrent_info is None: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": "Unknown torrent"}) - - torrent_files = [] - for path, length in self.torrent_db_handler.getTorrentFiles(torrent_info['C.torrent_id']): - torrent_files.append({"path": path, "size": length}) - - torrent_json = { - "id": torrent_info['C.torrent_id'], - "infohash": self.infohash, - "name": torrent_info['name'], - "size": torrent_info['length'], - "category": torrent_info['category'], - "num_seeders": torrent_info['num_seeders'] if torrent_info['num_seeders'] else 0, - "num_leechers": torrent_info['num_leechers'] if torrent_info['num_leechers'] else 0, - "last_tracker_check": torrent_info['last_tracker_check'], - "files": torrent_files, - "trackers": self.torrent_db_handler.getTrackerListByTorrentID(torrent_info['C.torrent_id']) - } - - return json.dumps(torrent_json) - - -class TorrentTrackersEndpoint(resource.Resource): - """ - This class is responsible for fetching all trackers of a specific torrent. - """ - - def __init__(self, session, infohash): - resource.Resource.__init__(self) - self.session = session - self.infohash = infohash - self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - - def render_GET(self, request): - """ - .. http:get:: /torrents/(string: torrent infohash)/tracker - - Fetch all trackers of a specific torrent. - - **Example request**: - - .. sourcecode:: none - - curl http://localhost:8085/torrents/97d2d8f5d37e56cfaeaae151d55f05b077074779/trackers - - **Example response**: - - .. sourcecode:: javascript - - { - "trackers": [ - "http://mytracker.com:80/announce", - "udp://fancytracker.org:1337/announce" - ] - } - - :statuscode 404: if the torrent is not found in the database - """ - torrent_info = self.torrent_db.getTorrent(self.infohash.decode('hex'), ['C.torrent_id', 'num_seeders']) - - if torrent_info is None: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": "torrent not found in database"}) - - trackers = self.torrent_db.getTrackerListByInfohash(self.infohash.decode('hex')) - return json.dumps({"trackers": trackers}) - - -class TorrentHealthEndpoint(resource.Resource): - """ - This class is responsible for endpoints regarding the health of a torrent. - """ - - def __init__(self, session, infohash): - resource.Resource.__init__(self) - self.session = session - self.infohash = infohash - self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - self._logger = logging.getLogger(self.__class__.__name__) - - def finish_request(self, request): - try: - request.finish() - except RuntimeError: - self._logger.warning("Writing response failed, probably the client closed the connection already.") - - def render_GET(self, request): - """ - .. http:get:: /torrents/(string: torrent infohash)/health - - Fetch the swarm health of a specific torrent. You can optionally specify the timeout to be used in the - connections to the trackers. This is by default 20 seconds. - By default, we will not check the health of a torrent again if it was recently checked. You can force a health - recheck by passing the refresh parameter. - - **Example request**: - - .. sourcecode:: none - - curl http://localhost:8085/torrents/97d2d8f5d37e56cfaeaae151d55f05b077074779/health?timeout=15&refresh=1 - - **Example response**: - - .. sourcecode:: javascript - - { - "http://mytracker.com:80/announce": [{ - "seeders": 43, - "leechers": 20, - "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779" - }], - "http://nonexistingtracker.com:80/announce": { - "error": "timeout" - } - } - - :statuscode 404: if the torrent is not found in the database - """ - timeout = 20 - if 'timeout' in request.args: - timeout = int(request.args['timeout'][0]) - - refresh = False - if 'refresh' in request.args and len(request.args['refresh']) > 0 and request.args['refresh'][0] == "1": - refresh = True - - torrent_db_columns = ['C.torrent_id', 'num_seeders', 'num_leechers', 'next_tracker_check'] - torrent_info = self.torrent_db.getTorrent(self.infohash.decode('hex'), torrent_db_columns) - - def on_health_result(result): - request.write(json.dumps({'health': result})) - self.finish_request(request) - - def on_magnet_timeout_error(_): - if not request.finished: - request.setResponseCode(http.NOT_FOUND) - request.write(json.dumps({"error": "torrent not found in database"})) - if not request.finished: - self.finish_request(request) - - def on_request_error(failure): - if not request.finished: - request.setResponseCode(http.BAD_REQUEST) - request.write(json.dumps({"error": failure.getErrorMessage()})) - # If the above request.write failed, the request will have already been finished - if not request.finished: - self.finish_request(request) - - def make_torrent_health_request(): - self.session.check_torrent_health(self.infohash.decode('hex'), timeout=timeout, scrape_now=refresh) \ - .addCallback(on_health_result).addErrback(on_request_error) - - magnet = None - if torrent_info is None: - # Maybe this is a chant torrent? - infohash = self.infohash.decode('hex') - with db_session: - md_list = list(self.session.lm.mds.TorrentMetadata.select(lambda g: - g.infohash == database_blob(infohash))) - if md_list: - torrent_md = md_list[0] # Any MD containing this infohash is fine - magnet = torrent_md.get_magnet() - if 'timeout' in request.args: - timeout = int(request.args['timeout'][0]) - else: - timeout = 50 - - def _add_torrent_and_check(metainfo): - tdef = TorrentDef.load_from_dict(metainfo) - assert (tdef.infohash == infohash), "DHT infohash does not match locally generated one" - self._logger.info("Chant-managed torrent fetched from DHT. Adding it to local cache, %s", self.infohash) - self.session.lm.torrent_db.addExternalTorrent(tdef) - self.session.lm.torrent_db._db.commit_now() - make_torrent_health_request() - - if magnet: - # Try to get the torrent from DHT and add it to the local cache - self._logger.info("Chant-managed torrent not in cache. Going to fetch it from DHT, %s", self.infohash) - self.session.lm.ltmgr.get_metainfo(magnet, callback=_add_torrent_and_check, - timeout=timeout, timeout_callback=on_magnet_timeout_error, notify=False) - elif torrent_info is None: - request.setResponseCode(http.NOT_FOUND) - return json.dumps({"error": "torrent not found in database"}) - else: - make_torrent_health_request() - - return NOT_DONE_YET diff --git a/Tribler/Core/Modules/restapi/util.py b/Tribler/Core/Modules/restapi/util.py index 35b19dde460..055ab2abde9 100644 --- a/Tribler/Core/Modules/restapi/util.py +++ b/Tribler/Core/Modules/restapi/util.py @@ -9,8 +9,6 @@ from twisted.web import http import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.MetadataStore.serialization import time2float -from Tribler.Core.Modules.restapi import VOTE_SUBSCRIBE def return_handled_exception(request, exception): @@ -29,109 +27,6 @@ def return_handled_exception(request, exception): }) -def convert_channel_metadata_to_tuple(metadata): - """ - Convert some given channel metadata to a tuple, similar to returned channels from the database. - :param metadata: The metadata to convert. - :return: A tuple with information about the torrent. - """ - # TODO: the values here are totally random temporary placeholders, and should be removed eventually. - votes = 1 - my_vote = 2 - spam = 0 - relevance = 0.9 - unix_timestamp = time2float(metadata.timestamp) - return (metadata.rowid, str(metadata.public_key), metadata.title, metadata.tags, int(metadata.size), votes, spam, - my_vote, unix_timestamp, relevance) - - -def convert_torrent_metadata_to_tuple(metadata, commit_status=None): - """ - Convert some given torrent metadata to a tuple, similar to returned torrents from the database. - :param metadata: The metadata to convert. - :return: A tuple with information about the torrent. - """ - seeders = 0 - leechers = 0 - last_tracker_check = 0 - category = 'unknown' - infohash = str(metadata.infohash) - relevance = 0.9 - - return (metadata.rowid, infohash, metadata.title, int(metadata.size), category, seeders, leechers, - last_tracker_check, None, relevance, commit_status) - - -def convert_search_torrent_to_json(torrent): - """ - Converts a given torrent to a JSON dictionary. Note that the torrent might be either a result from the local - database in which case it is a tuple or a remote search result in which case it is a dictionary. - """ - if isinstance(torrent, dict): - return convert_remote_torrent_to_json(torrent) - return convert_db_torrent_to_json(torrent, include_rel_score=True) - - -def convert_db_channel_to_json(channel, include_rel_score=False): - """ - This method converts a channel in the database to a JSON dictionary. - """ - res_json = {"id": channel[0], "dispersy_cid": channel[1].encode('hex'), "name": channel[2], - "description": channel[3], "votes": channel[5], "torrents": channel[4], "spam": channel[6], - "modified": channel[8], "subscribed": (channel[7] == VOTE_SUBSCRIBE)} - - if include_rel_score: - res_json["relevance_score"] = channel[9] - - return res_json - - -def convert_chant_channel_to_json(channel): - """ - This method converts a chant channel entry to a JSON dictionary. - """ - # TODO: this stuff is mostly placeholder, especially 'modified' field. Should be changed when Dispersy is out. - res_json = {"id": 0, "dispersy_cid": str(channel.public_key).encode('hex'), "name": channel.title, - "description": channel.tags, "votes": channel.votes, "torrents": channel.size, "spam": 0, - "modified": channel.version, "subscribed": channel.subscribed} - - return res_json - - -def convert_db_torrent_to_json(torrent, include_rel_score=False): - """ - This method converts a torrent in the database to a JSON dictionary. - """ - torrent_name = torrent[2] - if torrent_name is None or len(torrent_name.strip()) == 0: - torrent_name = "Unnamed torrent" - - res_json = {"id": torrent[0], "infohash": torrent[1].encode('hex'), "name": torrent_name, "size": torrent[3], - "category": torrent[4], "num_seeders": torrent[5] or 0, "num_leechers": torrent[6] or 0, - "last_tracker_check": torrent[7] or 0} - - if len(torrent) >= 11: - res_json["commit_status"] = torrent[10] - - if include_rel_score: - res_json["relevance_score"] = torrent[9] - - return res_json - - -def convert_remote_torrent_to_json(torrent): - """ - This method converts a torrent that has been received by remote peers in the network to a JSON dictionary. - """ - torrent_name = torrent['name'] - if torrent_name is None or len(torrent_name.strip()) == 0: - torrent_name = "Unnamed torrent" - - return {'id': torrent['torrent_id'], "infohash": torrent['infohash'].encode('hex'), "name": torrent_name, - 'size': torrent['length'], 'category': torrent['category'], 'num_seeders': torrent['num_seeders'], - 'num_leechers': torrent['num_leechers'], 'last_tracker_check': 0} - - def get_parameter(parameters, name): """ Return a specific parameter with a name from a HTTP request (or None if that parameter is not available). diff --git a/Tribler/Core/Modules/search_manager.py b/Tribler/Core/Modules/search_manager.py deleted file mode 100644 index c809ef71e10..00000000000 --- a/Tribler/Core/Modules/search_manager.py +++ /dev/null @@ -1,216 +0,0 @@ -from __future__ import absolute_import - -import logging -import os - -from Tribler.Core.Utilities.search_utils import split_into_keywords -from Tribler.Core.simpledefs import (SIGNAL_SEARCH_COMMUNITY, SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, - NTFY_CHANNELCAST, SIGNAL_TORRENT, SIGNAL_CHANNEL) -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - - -class SearchManager(TaskManager): - - def __init__(self, session): - super(SearchManager, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - self.session = session - self.dispersy = None - self.channelcast_db = None - - self._current_keywords = None - - def initialize(self): - self.dispersy = self.session.lm.dispersy - self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) - - self.session.add_observer(self._on_torrent_search_results, - SIGNAL_SEARCH_COMMUNITY, [SIGNAL_ON_SEARCH_RESULTS]) - self.session.add_observer(self._on_channel_search_results, - SIGNAL_ALLCHANNEL_COMMUNITY, [SIGNAL_ON_SEARCH_RESULTS]) - - def shutdown(self): - self.shutdown_task_manager() - self.channelcast_db = None - self.dispersy = None - self.session = None - - def search_for_torrents(self, keywords): - """ - Searches for torrents using SearchCommunity with the given keywords. - :param keywords: The given keywords. - """ - nr_requests_made = 0 - if self.dispersy is None: - return nr_requests_made - - # TODO remove when we remove Dispersy - from Tribler.community.search.community import SearchCommunity - for community in self.dispersy.get_communities(): - if isinstance(community, SearchCommunity): - self._current_keywords = keywords - nr_requests_made = community.create_search(keywords) - if not nr_requests_made: - self._logger.warn("Could not send search in SearchCommunity, no verified candidates found") - break - - self._current_keywords = keywords - # If popularity community is enabled, send the search request there as well - if self.session.lm.popularity_community: - self.session.lm.popularity_community.send_torrent_search_request(keywords) - - return nr_requests_made - - def _on_torrent_search_results(self, subject, change_type, object_id, search_results): - """ - The callback function handles the search results from SearchCommunity. - :param subject: Must be SIGNAL_SEARCH_COMMUNITY. - :param change_type: Must be SIGNAL_ON_SEARCH_RESULTS. - :param object_id: Must be None. - :param search_results: The result dictionary which has 'keywords', 'results', and 'candidate'. - """ - if self.session is None: - return 0 - - keywords = search_results['keywords'] - results = search_results['results'] - candidate = search_results['candidate'] - - self._logger.debug("Got torrent search results %s, keywords %s, candidate %s", - len(results), keywords, candidate) - - # drop it if these are the results of an old keyword - if keywords != self._current_keywords: - return - - # results is a list of tuples that are: - # (1) infohash, (2) name, (3) length, (4) num_files, (5) category, (6) creation_date, (7) num_seeders - # (8) num_leechers, (9) channel_cid - - remote_torrent_result_list = [] - - # get and cache channels - channel_cid_list = [result[-1] for result in results if result[-1] is not None] - channel_cache_list = self.channelcast_db.getChannelsByCID(channel_cid_list) - channel_cache_dict = {} - for channel in channel_cache_list: - # index 1 is cid - channel_cache_dict[channel[1]] = channel - - # create result dictionaries that are understandable - for result in results: - remote_torrent_result = {'torrent_type': 'remote', # indicates if it is a remote torrent - 'relevance_score': None, - 'torrent_id':-1, - 'infohash': result[0], - 'name': result[1], - 'length': result[2], - 'num_files': result[3], - 'category': result[4][0], - 'creation_date': result[5], - 'num_seeders': result[6], - 'num_leechers': result[7], - 'status': u'good', - 'query_candidates': {candidate}, - 'channel': None} - - channel_cid = result[-1] - if channel_cid is not None and channel_cid in channel_cache_dict: - channel = channel_cache_dict[channel_cid] - channel_result = {'id': channel[0], - 'name': channel[2], - 'description': channel[3], - 'dispersy_cid': channel[1], - 'num_torrents': channel[4], - 'num_favorite': channel[5], - 'num_spam': channel[6], - 'modified': channel[8], - } - remote_torrent_result['channel'] = channel_result - - # guess matches - keyword_set = set(keywords) - swarmname_set = set(split_into_keywords(remote_torrent_result['name'])) - matches = {'fileextensions': set(), - 'swarmname': swarmname_set & keyword_set, # all keywords matching in swarmname - } - matches['filenames'] = keyword_set - matches['swarmname'] # remaining keywords should thus me matching in filenames or fileextensions - - if len(matches['filenames']) == 0: - _, ext = os.path.splitext(result[0]) - ext = ext[1:] - - matches['filenames'] = matches['swarmname'] - matches['filenames'].discard(ext) - - if ext in keyword_set: - matches['fileextensions'].add(ext) - - # Find the lowest term position of the matching keywords - pos_score = None - if matches['swarmname']: - swarmnameTerms = split_into_keywords(remote_torrent_result['name']) - swarmnameMatches = matches['swarmname'] - - for i, term in enumerate(swarmnameTerms): - if term in swarmnameMatches: - pos_score = -i - break - - remote_torrent_result['relevance_score'] = [len(matches['swarmname']), - pos_score, - len(matches['filenames']), - len(matches['fileextensions']), - 0] - - # append the result into the result list - remote_torrent_result_list.append(remote_torrent_result) - - results_data = {'keywords': keywords, - 'result_list': remote_torrent_result_list} - # inform other components about the results - self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_data) - - def search_for_channels(self, keywords): - """ - Searches for channels using AllChannelCommunity with the given keywords. - :param keywords: The given keywords. - """ - if self.dispersy is None: - return - - #TODO remove when we remove Dispersy - from Tribler.community.allchannel.community import AllChannelCommunity - for community in self.dispersy.get_communities(): - if isinstance(community, AllChannelCommunity): - self._current_keywords = keywords - community.create_channelsearch(keywords) - break - - def _on_channel_search_results(self, subject, change_type, object_id, search_results): - """ - The callback function handles the search results from AllChannelCommunity. - :param subject: Must be SIGNAL_ALLCHANNEL_COMMUNITY. - :param change_type: Must be SIGNAL_ON_SEARCH_RESULTS. - :param object_id: Must be None. - :param search_results: The result dictionary which has 'keywords', 'results', and 'candidate'. - """ - if self.session is None: - return - - keywords = search_results['keywords'] - results = search_results['torrents'] - - self._logger.debug("Got channel search results %s. keywords %s", - len(results), keywords) - - if keywords != self._current_keywords: - return - - channel_cids = results.keys() - channel_results = self.channelcast_db.getChannelsByCID(channel_cids) - - results_data = {'keywords': keywords, - 'result_list': channel_results} - # inform other components about the results - self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_data) diff --git a/Tribler/Core/Modules/tracker_manager.py b/Tribler/Core/Modules/tracker_manager.py index 64966db9c56..afa042250d8 100644 --- a/Tribler/Core/Modules/tracker_manager.py +++ b/Tribler/Core/Modules/tracker_manager.py @@ -1,6 +1,10 @@ +from __future__ import absolute_import + import logging import time +from pony.orm import count, db_session + from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url MAX_TRACKER_FAILURES = 5 # if a tracker fails this amount of times in a row, its 'is_alive' will be marked as 0 (dead). @@ -13,6 +17,10 @@ def __init__(self, session): self._logger = logging.getLogger(self.__class__.__name__) self._session = session + @property + def tracker_store(self): + return self._session.lm.mds.TrackerState + def get_tracker_info(self, tracker_url): """ Gets the tracker information with the given tracker URL. @@ -20,13 +28,17 @@ def get_tracker_info(self, tracker_url): :return: The tracker info dict if exists, None otherwise. """ sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) if tracker_url != u"DHT" else tracker_url - try: - sql_stmt = u"SELECT tracker_id, tracker, last_check, failures, is_alive FROM TrackerInfo WHERE tracker = ?" - result = self._session.sqlite_db.execute(sql_stmt, (sanitized_tracker_url,)).next() - except StopIteration: - return None - return {u'id': result[0], u'last_check': result[2], u'failures': result[3], u'is_alive': bool(result[4])} + with db_session: + tracker = list(self.tracker_store.select(lambda g: g.url == sanitized_tracker_url)) + if tracker: + return { + u'id': tracker[0].url, + u'last_check': tracker[0].last_check, + u'failures': tracker[0].failures, + u'is_alive': tracker[0].alive + } + return None def add_tracker(self, tracker_url): """ @@ -38,24 +50,18 @@ def add_tracker(self, tracker_url): self._logger.warn(u"skip invalid tracker: %s", repr(tracker_url)) return - sql_stmt = u"SELECT COUNT() FROM TrackerInfo WHERE tracker = ?" - num = self._session.sqlite_db.execute(sql_stmt, (sanitized_tracker_url,)).next()[0] - if num > 0: - self._logger.debug(u"skip existing tracker: %s", repr(tracker_url)) - return - - # add the tracker into dict and database - tracker_info = {u'last_check': 0, - u'failures': 0, - u'is_alive': True} + with db_session: + num = count(g for g in self.tracker_store if g.url == sanitized_tracker_url) + if num > 0: + self._logger.debug(u"skip existing tracker: %s", repr(tracker_url)) + return - # insert into database - sql_stmt = u"""INSERT INTO TrackerInfo(tracker, last_check, failures, is_alive) VALUES(?,?,?,?); - SELECT tracker_id FROM TrackerInfo WHERE tracker = ?; - """ - value_tuple = (sanitized_tracker_url, tracker_info[u'last_check'], tracker_info[u'failures'], - tracker_info[u'is_alive'], sanitized_tracker_url) - self._session.sqlite_db.execute(sql_stmt, value_tuple).next() + # insert into database + self.tracker_store(url=sanitized_tracker_url, + last_check=0, + failures=0, + alive=True, + torrents={}) def remove_tracker(self, tracker_url): """ @@ -65,48 +71,50 @@ def remove_tracker(self, tracker_url): :param tracker_url: The URL of the tracker to be deleted. """ sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) - sql_stmt = u"DELETE FROM TrackerInfo WHERE tracker = ?;" - if sanitized_tracker_url: - self._session.sqlite_db.execute(sql_stmt, (sanitized_tracker_url,)) - else: - self._session.sqlite_db.execute(sql_stmt, (tracker_url,)) + with db_session: + options = self.tracker_store.select(lambda g: g.url in [tracker_url, sanitized_tracker_url]) + for option in options[:]: + option.delete() + + @db_session def update_tracker_info(self, tracker_url, is_successful): """ Updates a tracker information. :param tracker_url: The given tracker_url. :param is_successful: If the check was successful. """ - tracker_info = self.get_tracker_info(tracker_url) - if not tracker_info: + + if tracker_url == u"DHT": + return + + sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) + tracker = self.tracker_store.get(lambda g: g.url == sanitized_tracker_url) + + if not tracker: self._logger.error("Trying to update the tracker info of an unknown tracker URL") return current_time = int(time.time()) - failures = 0 if is_successful else tracker_info[u'failures'] + 1 - is_alive = tracker_info[u'failures'] < MAX_TRACKER_FAILURES + failures = 0 if is_successful else tracker.failures + 1 + is_alive = tracker.alive < MAX_TRACKER_FAILURES # update the dict - tracker_info[u'last_check'] = current_time - tracker_info[u'failures'] = failures - tracker_info[u'is_alive'] = is_alive - - # update the database - sql_stmt = u"UPDATE TrackerInfo SET last_check = ?, failures = ?, is_alive = ? WHERE tracker_id = ?" - value_tuple = (tracker_info[u'last_check'], tracker_info[u'failures'], tracker_info[u'is_alive'], - tracker_info[u'id']) - self._session.sqlite_db.execute(sql_stmt, value_tuple) + tracker.last_check = current_time + tracker.failures = failures + tracker.alive = is_alive + @db_session def get_next_tracker_for_auto_check(self): """ Gets the next tracker for automatic tracker-checking. :return: The next tracker for automatic tracker-checking. """ - try: - sql_stmt = u"SELECT tracker FROM TrackerInfo WHERE tracker != 'no-DHT' AND tracker != 'DHT' AND " \ - u"last_check + ? <= strftime('%s','now') AND is_alive = 1 ORDER BY last_check LIMIT 1;" - result = self._session.sqlite_db.execute(sql_stmt, (TRACKER_RETRY_INTERVAL,)).next() - except StopIteration: - return None + tracker = self.tracker_store.select(lambda g: g.url not in ['no-DHT', 'DHT'] + and g.alive + and g.last_check + TRACKER_RETRY_INTERVAL <= int(time.time()))\ + .order_by(self.tracker_store.last_check).limit(1) - return result[0] + if not tracker: + return None + return tracker[0].url diff --git a/Tribler/Core/CacheDB/Notifier.py b/Tribler/Core/Notifier.py similarity index 97% rename from Tribler/Core/CacheDB/Notifier.py rename to Tribler/Core/Notifier.py index 0d003b06cd2..a434a8d7e02 100644 --- a/Tribler/Core/CacheDB/Notifier.py +++ b/Tribler/Core/Notifier.py @@ -42,7 +42,8 @@ def __init__(self): self.observertimers = {} self.observerLock = threading.Lock() - def add_observer(self, func, subject, changeTypes=[NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE], id=None, cache=0): + def add_observer(self, func, subject, changeTypes=None, id=None, cache=0): + changeTypes = changeTypes or [NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE] """ Add observer function which will be called upon certain event Example: diff --git a/Tribler/Core/RemoteTorrentHandler.py b/Tribler/Core/RemoteTorrentHandler.py deleted file mode 100644 index ecafeaea8b5..00000000000 --- a/Tribler/Core/RemoteTorrentHandler.py +++ /dev/null @@ -1,628 +0,0 @@ -""" -Handles the case where the user did a remote query and now selected one of the -returned torrents for download. - -Author(s): Niels Zeilemaker -""" -from __future__ import absolute_import - -import logging -import sys -import urllib -from abc import ABCMeta, abstractmethod -from binascii import hexlify, unhexlify -from collections import deque - -from decorator import decorator - -from twisted.internet import reactor -from twisted.internet.task import LoopingCall - -from Tribler.Core.TFTP.handler import METADATA_PREFIX -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.exceptions import LevelDBKeyDeletionException -from Tribler.Core.simpledefs import INFOHASH_LENGTH, NTFY_TORRENTS -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - -TORRENT_OVERFLOW_CHECKING_INTERVAL = 30 * 60 -LOW_PRIO_COLLECTING = 0 -MAGNET_TIMEOUT = 5.0 -MAX_PRIORITY = 1 - -@decorator -def pass_when_stopped(f, self, *argv, **kwargs): - if self.running: - return f(self, *argv, **kwargs) - - -class RemoteTorrentHandler(TaskManager): - - def __init__(self, session): - super(RemoteTorrentHandler, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - - self.running = False - - self.torrent_callbacks = {} - self.metadata_callbacks = {} - - self.torrent_requesters = {} - self.torrent_message_requesters = {} - self.magnet_requesters = {} - self.metadata_requester = None - - self.num_torrents = 0 - - self.session = session - self.dispersy = None - self.max_num_torrents = 0 - self.tor_col_dir = None - self.torrent_db = None - - def initialize(self): - self.dispersy = self.session.get_dispersy_instance() - self.max_num_torrents = self.session.config.get_torrent_collecting_max_torrents() - - self.torrent_db = None - if self.session.config.get_megacache_enabled(): - self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - self.__check_overflow() - - self.running = True - - for priority in (0, 1): - self.magnet_requesters[priority] = MagnetRequester(self.session, self, priority) - self.torrent_requesters[priority] = TftpRequester(u"tftp_torrent_%s" % priority, - self.session, self, priority) - self.torrent_message_requesters[priority] = TorrentMessageRequester(self.session, self, priority) - - self.metadata_requester = TftpRequester(u"tftp_metadata_%s" % 0, self.session, self, 0) - - - def shutdown(self): - self.running = False - for requester in self.torrent_requesters.itervalues(): - requester.stop() - self.shutdown_task_manager() - - def set_max_num_torrents(self, max_num_torrents): - self.max_num_torrents = max_num_torrents - - def __check_overflow(self): - def clean_until_done(num_delete, deletions_per_step): - """ - Delete torrents in steps to avoid too much IO at once. - """ - if num_delete > 0: - to_remove = min(num_delete, deletions_per_step) - num_delete -= to_remove - try: - self.torrent_db.freeSpace(to_remove) - self.register_task(u"remote_torrent clean_until_done", - reactor.callLater(5, clean_until_done, num_delete, deletions_per_step)) - except LevelDBKeyDeletionException: - self._logger.error("Failed to remove collected torrents above limit.") - - def torrent_overflow_check(): - """ - Check if we have reached the collected torrent limit and throttle its collection if so. - """ - self.num_torrents = self.torrent_db.getNumberCollectedTorrents() - self._logger.debug(u"check overflow: current %d max %d", self.num_torrents, self.max_num_torrents) - - if self.num_torrents > self.max_num_torrents: - num_delete = int(self.num_torrents - self.max_num_torrents * 0.95) - deletions_per_step = max(25, num_delete / 180) - clean_until_done(num_delete, deletions_per_step) - self._logger.info(u"** limit space:: %d %d %d", self.num_torrents, self.max_num_torrents, num_delete) - - self.register_task(u"remote_torrent overflow_check", - LoopingCall(torrent_overflow_check)).start(TORRENT_OVERFLOW_CHECKING_INTERVAL, now=True) - - def schedule_task(self, name, task, delay_time=0.0, *args, **kwargs): - self.register_task(name, reactor.callLater(delay_time, task, *args, **kwargs)) - - def download_torrent(self, candidate, infohash, user_callback=None, priority=1, timeout=None): - assert isinstance(infohash, str), u"infohash has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, u"infohash has invalid length: %s" % len(infohash) - - # fix prio levels to 1 and 0 - priority = min(priority, 1) - - # we use DHT if we don't have candidate - if candidate: - self.torrent_requesters[priority].add_request(infohash, candidate, timeout) - else: - self.magnet_requesters[priority].add_request(infohash) - - if user_callback: - callback = lambda ih = infohash: user_callback(ih) - self.torrent_callbacks.setdefault(infohash, set()).add(callback) - - def save_torrent(self, tdef, callback=None): - infohash = tdef.get_infohash() - infohash_str = hexlify(infohash) - - if self.session.lm.torrent_store is None: - self._logger.error("Torrent store is not loaded") - return - - # TODO(emilon): could we check the database instead of the store? - # Checking if a key is present fetches the whole torrent from disk if its - # not on the writeback cache. - if infohash_str not in self.session.lm.torrent_store: - # save torrent to file - try: - bdata = tdef.encode() - - except Exception as e: - self._logger.error(u"failed to encode torrent %s: %s", infohash_str, e) - return - try: - self.session.lm.torrent_store[infohash_str] = bdata - except Exception as e: - self._logger.error(u"failed to store torrent data for %s, exception was: %s", infohash_str, e) - - # add torrent to database - if self.torrent_db.hasTorrent(infohash): - self.torrent_db.updateTorrent(infohash, is_collected=1) - else: - self.torrent_db.addExternalTorrent(tdef, extra_info={u"is_collected": 1, u"status": u"good"}) - - if callback: - # TODO(emilon): should we catch exceptions from the callback? - callback() - - # notify all - self.notify_possible_torrent_infohash(infohash) - - def download_torrentmessage(self, candidate, infohash, user_callback=None, priority=1): - assert isinstance(infohash, str), u"infohash has invalid type: %s" % type(infohash) - assert len(infohash) == INFOHASH_LENGTH, u"infohash has invalid length: %s" % len(infohash) - - if user_callback: - callback = lambda ih = infohash: user_callback(ih) - self.torrent_callbacks.setdefault(infohash, set()).add(callback) - - requester = self.torrent_message_requesters[priority] - - # make request - requester.add_request(infohash, candidate) - self._logger.debug(u"adding torrent messages request: %s %s %s", hexlify(infohash), candidate, priority) - - def has_metadata(self, thumb_hash): - thumb_hash_str = hexlify(thumb_hash) - return thumb_hash_str in self.session.lm.metadata_store - - def get_metadata(self, thumb_hash): - thumb_hash_str = hexlify(thumb_hash) - return self.session.lm.metadata_store[thumb_hash_str] - - def download_metadata(self, candidate, thumb_hash, usercallback=None, timeout=None): - if self.has_metadata(thumb_hash): - return - - if usercallback: - self.metadata_callbacks.setdefault(thumb_hash, set()).add(usercallback) - - self.metadata_requester.add_request(thumb_hash, candidate, timeout, is_metadata=True) - - self._logger.debug(u"added metadata request: %s %s", hexlify(thumb_hash), candidate) - - def save_metadata(self, thumb_hash, data): - # save data to a temporary tarball and extract it to the torrent collecting directory - thumb_hash_str = hexlify(thumb_hash) - if thumb_hash_str not in self.session.lm.metadata_store: - self.session.lm.metadata_store[thumb_hash_str] = data - - # notify about the new metadata - if thumb_hash in self.metadata_callbacks: - for callback in self.metadata_callbacks[thumb_hash]: - reactor.callInThread(callback, hexlify(thumb_hash)) - - del self.metadata_callbacks[thumb_hash] - - def notify_possible_torrent_infohash(self, infohash): - if infohash not in self.torrent_callbacks: - return - - for callback in self.torrent_callbacks[infohash]: - reactor.callInThread(callback, hexlify(infohash)) - - del self.torrent_callbacks[infohash] - - def get_queue_size_stats(self): - def get_queue_size_stats(qname, requesters): - qsize = {} - for requester in requesters.itervalues(): - qsize[requester.priority] = requester.pending_request_queue_size - items = qsize.items() - items.sort() - return {"type": qname, "size_stats": [{"priority": prio, "size": size} for prio, size in items]} - - return [stats_dict for stats_dict in (get_queue_size_stats("TFTP", self.torrent_requesters), - get_queue_size_stats("DHT", self.magnet_requesters), - get_queue_size_stats("Msg", self.torrent_message_requesters))] - - def get_queue_stats(self): - def get_queue_stats(qname, requesters): - pending_requests = success = failed = 0 - for requester in requesters.itervalues(): - pending_requests += requester.pending_request_queue_size - success += requester.requests_succeeded - failed += requester.requests_failed - total_requests = pending_requests + success + failed - - return {"type": qname, "total": total_requests, "success": success, - "pending": pending_requests, "failed": failed} - - return [stats_dict for stats_dict in [get_queue_stats("TFTP", self.torrent_requesters), - get_queue_stats("DHT", self.magnet_requesters), - get_queue_stats("Msg", self.torrent_message_requesters)]] - - def get_bandwidth_stats(self): - def get_bandwidth_stats(qname, requesters): - bw = 0 - for requester in requesters.itervalues(): - bw += requester.total_bandwidth - return {"type": qname, "bandwidth": bw} - return [stats_dict for stats_dict in [get_bandwidth_stats("TQueue", self.torrent_requesters), - get_bandwidth_stats("DQueue", self.magnet_requesters)]] - - -class Requester(object): - __metaclass__ = ABCMeta - - REQUEST_INTERVAL = 0.5 - - def __init__(self, name, session, remote_torrent_handler, priority): - self._logger = logging.getLogger(self.__class__.__name__) - self._name = name - self._session = session - self._remote_torrent_handler = remote_torrent_handler - self._priority = priority - - self._pending_request_queue = deque() - - self._requests_succeeded = 0 - self._requests_failed = 0 - self._total_bandwidth = 0 - - self.running = True - - def stop(self): - self._remote_torrent_handler.cancel_pending_task(self._name) - self.running = False - - @property - def priority(self): - return self._priority - - @property - def pending_request_queue_size(self): - return len(self._pending_request_queue) - - @property - def requests_succeeded(self): - return self._requests_succeeded - - @property - def requests_failed(self): - return self._requests_failed - - @property - def total_bandwidth(self): - return self._total_bandwidth - - @pass_when_stopped - def schedule_task(self, task, delay_time=0.0, *args, **kwargs): - """ - Uses RemoteTorrentHandler to schedule a task. - """ - self._remote_torrent_handler.schedule_task(self._name, task, delay_time=delay_time, *args, **kwargs) - - @pass_when_stopped - def _start_pending_requests(self): - """ - Starts pending requests. - """ - if self._remote_torrent_handler.is_pending_task_active(self._name): - return - if self._pending_request_queue: - self.schedule_task(self._do_request, - delay_time=Requester.REQUEST_INTERVAL * (MAX_PRIORITY - self._priority)) - - @abstractmethod - def add_request(self, key, candidate, timeout=None): - """ - Adds a new request. - """ - pass - - @abstractmethod - def _do_request(self): - """ - Starts processing pending requests. - """ - pass - - -class TorrentMessageRequester(Requester): - - def __init__(self, session, remote_torrent_handler, priority): - super(TorrentMessageRequester, self).__init__(u"torrent_message_requester", - session, remote_torrent_handler, priority) - if sys.platform == "darwin": - # Mac has just 256 fds per process, be less aggressive - self.REQUEST_INTERVAL = 1.0 - - self._source_dict = {} - self._search_community = None - - @pass_when_stopped - def add_request(self, infohash, candidate, timeout=None): - addr = candidate.sock_addr - queue_was_empty = len(self._pending_request_queue) == 0 - - if infohash in self._source_dict and candidate in self._source_dict[infohash]: - self._logger.debug(u"already has request %s from %s:%s, skip", hexlify(infohash), addr[0], addr[1]) - - if infohash not in self._pending_request_queue: - self._pending_request_queue.append(infohash) - self._source_dict[infohash] = [] - if candidate in self._source_dict[infohash]: - self._logger.warn(u"ignore duplicate torrent message request %s from %s:%s", - hexlify(infohash), addr[0], addr[1]) - return - - self._source_dict[infohash].append(candidate) - self._logger.debug(u"added request %s from %s:%s", hexlify(infohash), addr[0], addr[1]) - - # start scheduling tasks if the queue was empty, which means there was no task running previously - if queue_was_empty: - self._start_pending_requests() - - @pass_when_stopped - def _do_request(self): - # find search community - if not self._search_community: - for community in self._session.lm.dispersy.get_communities(): - from Tribler.community.search.community import SearchCommunity - if isinstance(community, SearchCommunity): - self._search_community = community - break - if not self._search_community: - self._logger.error(u"no SearchCommunity found.") - return - - # requesting messages - while self._pending_request_queue: - infohash = self._pending_request_queue.popleft() - - for candidate in self._source_dict[infohash]: - self._logger.debug(u"requesting torrent message %s from %s:%s", - hexlify(infohash), candidate.sock_addr[0], candidate.sock_addr[1]) - self._search_community.create_torrent_request(infohash, candidate) - - del self._source_dict[infohash] - - -class MagnetRequester(Requester): - - MAX_CONCURRENT = 1 - TIMEOUT = 30.0 - - def __init__(self, session, remote_torrent_handler, priority): - super(MagnetRequester, self).__init__(u"magnet_requester", session, remote_torrent_handler, priority) - if sys.platform == "darwin": - # Mac has just 256 fds per process, be less aggressive - self.REQUEST_INTERVAL = 15.0 - - if priority <= 1 and not sys.platform == "darwin": - self.MAX_CONCURRENT = 3 - - self._torrent_db_handler = session.open_dbhandler(NTFY_TORRENTS) - - self._running_requests = [] - - @pass_when_stopped - def add_request(self, infohash, candidate=None, timeout=None): - queue_was_empty = len(self._pending_request_queue) == 0 - if infohash not in self._pending_request_queue and infohash not in self._running_requests: - self._pending_request_queue.append(infohash) - - # start scheduling tasks if the queue was empty, which means there was no task running previously - if queue_was_empty: - self._start_pending_requests() - - @pass_when_stopped - def _do_request(self): - while self._pending_request_queue and self.running: - if len(self._running_requests) >= self.MAX_CONCURRENT: - self._logger.debug(u"max concurrency %s reached, request later", self.MAX_CONCURRENT) - return - - infohash = self._pending_request_queue.popleft() - infohash_str = hexlify(infohash) - - # try magnet link - magnetlink = "magnet:?xt=urn:btih:" + infohash_str - - # see if we know any trackers for this magnet - trackers = self._torrent_db_handler.getTrackerListByInfohash(infohash) - for tracker in trackers: - if tracker not in (u"no-DHT", u"DHT"): - magnetlink += "&tr=" + urllib.quote_plus(tracker) - - self._logger.debug(u"requesting %s priority %s through magnet link %s", - infohash_str, self._priority, magnetlink) - - self._session.lm.ltmgr.get_metainfo(magnetlink, self._success_callback, - timeout=self.TIMEOUT, timeout_callback=self._failure_callback) - self._running_requests.append(infohash) - - def _success_callback(self, meta_info): - """ - The callback that will be called by LibtorrentMgr when a download was successful. - """ - tdef = TorrentDef.load_from_dict(meta_info) - assert tdef.get_infohash() in self._running_requests - - infohash = tdef.get_infohash() - self._logger.debug(u"received torrent %s through magnet", hexlify(infohash)) - - self._remote_torrent_handler.save_torrent(tdef) - self._running_requests.remove(infohash) - - self._requests_succeeded += 1 - self._total_bandwidth += tdef.get_torrent_size() - - self._start_pending_requests() - - def _failure_callback(self, infohash): - """ - The callback that will be called by LibtorrentMgr when a download failed. - """ - if infohash not in self._running_requests: - self._logger.debug(u"++ failed INFOHASH: %s", hexlify(infohash)) - for ih in self._running_requests: - self._logger.debug(u"++ INFOHASH in running_requests: %s", hexlify(ih)) - - self._logger.debug(u"failed to retrieve torrent %s through magnet", hexlify(infohash)) - self._running_requests.remove(infohash) - - self._requests_failed += 1 - - self._start_pending_requests() - - -class TftpRequester(Requester): - - def __init__(self, name, session, remote_torrent_handler, priority): - super(TftpRequester, self).__init__(name, session, remote_torrent_handler, priority) - - self.REQUEST_INTERVAL = 5.0 - - self._active_request_list = [] - self._untried_sources = {} - self._tried_sources = {} - - @pass_when_stopped - def add_request(self, key, candidate, timeout=None, is_metadata=False): - ip, port = candidate.sock_addr - # no binary for keys - if is_metadata: - key = "%s%s" % (METADATA_PREFIX, hexlify(key)) - key_str = key - else: - key = hexlify(key) - key_str = hexlify(key) - - if key in self._pending_request_queue or key in self._active_request_list: - # append to the active one - if candidate in self._untried_sources[key] or candidate in self._tried_sources[key]: - self._logger.debug(u"already has request %s from %s:%s, skip", key_str, ip, port) - return - - self._untried_sources[key].append(candidate) - self._logger.debug(u"appending to existing request: %s from %s:%s", key_str, ip, port) - - else: - # new request - self._logger.debug(u"adding new request: %s from %s:%s", key_str, ip, port) - self._pending_request_queue.append(key) - self._untried_sources[key] = deque([candidate]) - self._tried_sources[key] = deque() - - # start pending tasks if there is no task running - if not self._active_request_list: - self._start_pending_requests() - - @pass_when_stopped - def _do_request(self): - assert not self._active_request_list, "active_request_list is not empty = %s" % repr(self._active_request_list) - - # starts to download a torrent - key = self._pending_request_queue.popleft() - - candidate = self._untried_sources[key].popleft() - self._tried_sources[key].append(candidate) - - ip, port = candidate.sock_addr - - if key.startswith(METADATA_PREFIX): - # metadata requests has a METADATA_PREFIX prefix - thumb_hash = unhexlify(key[len(METADATA_PREFIX):]) - file_name = key - extra_info = {u'key': key, u'thumb_hash': thumb_hash} - else: - # key is the hexlified info hash - info_hash = unhexlify(key) - file_name = hexlify(info_hash) + u'.torrent' - extra_info = {u'key': key, u'info_hash': info_hash} - - self._logger.debug(u"start TFTP download for %s from %s:%s", file_name, ip, port) - - # do not download if TFTP has been shutdown - if self._session.lm.tftp_handler is None: - return - self._session.lm.tftp_handler.download_file(file_name, ip, port, extra_info=extra_info, - success_callback=self._on_download_successful, - failure_callback=self._on_download_failed) - self._active_request_list.append(key) - - def _clear_active_request(self, key): - del self._untried_sources[key] - del self._tried_sources[key] - self._active_request_list.remove(key) - - def _on_download_successful(self, address, file_name, file_data, extra_info): - self._logger.debug(u"successfully downloaded %s from %s:%s", file_name, address[0], address[1]) - - key = extra_info[u'key'] - info_hash = extra_info.get(u"info_hash") - thumb_hash = extra_info.get(u"thumb_hash") - - assert key in self._active_request_list, u"key = %s, active_request_list = %s" % (repr(key), - self._active_request_list) - - self._requests_succeeded += 1 - self._total_bandwidth += len(file_data) - - # save data - try: - if info_hash is not None: - # save torrent - tdef = TorrentDef.load_from_memory(file_data) - self._remote_torrent_handler.save_torrent(tdef) - elif thumb_hash is not None: - # save metadata - self._remote_torrent_handler.save_metadata(thumb_hash, file_data) - except ValueError: - self._logger.warning("Remote peer sent us invalid (torrent) content over TFTP socket, ignoring it.") - finally: - # start the next request - self._clear_active_request(key) - self._start_pending_requests() - - def _on_download_failed(self, address, file_name, error_msg, extra_info): - self._logger.debug(u"failed to download %s from %s:%s: %s", file_name, address[0], address[1], error_msg) - - key = extra_info[u'key'] - assert key in self._active_request_list, u"key = %s, active_request_list = %s" % (repr(key), - self._active_request_list) - - self._requests_failed += 1 - - if self._untried_sources[key]: - # try to download this data from another candidate - self._logger.debug(u"scheduling next try for %s", repr(key)) - - self._pending_request_queue.appendleft(key) - self._active_request_list.remove(key) - self.schedule_task(self._do_request) - - else: - # no more available candidates, download the next requested infohash - self._clear_active_request(key) - self._start_pending_requests() diff --git a/Tribler/Core/Session.py b/Tribler/Core/Session.py index 4980a8d8fd7..ea04ab1fc7a 100644 --- a/Tribler/Core/Session.py +++ b/Tribler/Core/Session.py @@ -3,12 +3,13 @@ Author(s): Arno Bakker """ +from __future__ import absolute_import + import errno import logging import os import sys -import time -from binascii import hexlify +from threading import RLock from twisted.internet import threads from twisted.internet.defer import fail, inlineCallbacks @@ -17,26 +18,22 @@ from twisted.python.threadable import isInIOThread import Tribler.Core.permid as permid_module -from Tribler.Core import NoDispersyRLock from Tribler.Core.APIImplementation.LaunchManyCore import TriblerLaunchMany -from Tribler.Core.CacheDB.Notifier import Notifier -from Tribler.Core.CacheDB.sqlitecachedb import DB_DIR_NAME, DB_FILE_RELATIVE_PATH, SQLiteCacheDB from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.Modules.restapi.rest_manager import RESTManager +from Tribler.Core.Notifier import Notifier from Tribler.Core.Upgrade.upgrade import TriblerUpgrader from Tribler.Core.Utilities import torrent_utils from Tribler.Core.Utilities.crypto_patcher import patch_crypto_be_discovery -from Tribler.Core.exceptions import DuplicateTorrentFileError, NotYetImplementedException, \ - OperationNotEnabledByConfigurationException -from Tribler.Core.simpledefs import (NTFY_CHANNELCAST, NTFY_DELETE, NTFY_INSERT, NTFY_MYPREFERENCES, NTFY_PEERS, - NTFY_TORRENTS, NTFY_TRIBLER, NTFY_UPDATE, NTFY_VOTECAST, STATEDIR_DLPSTATE_DIR, - STATEDIR_WALLET_DIR, STATE_LOAD_CHECKPOINTS, STATE_OPEN_DB, STATE_READABLE_STARTED, - STATE_SHUTDOWN, STATE_START_API, STATE_UPGRADING_READABLE) +from Tribler.Core.exceptions import NotYetImplementedException, OperationNotEnabledByConfigurationException +from Tribler.Core.simpledefs import NTFY_DELETE, NTFY_INSERT, NTFY_TRIBLER, NTFY_UPDATE, STATEDIR_CHANNELS_DIR, \ + STATEDIR_DLPSTATE_DIR, STATEDIR_WALLET_DIR, STATE_LOAD_CHECKPOINTS, STATE_READABLE_STARTED, STATE_SHUTDOWN, \ + STATE_START_API, STATE_UPGRADING_READABLE +from Tribler.Core.simpledefs import STATEDIR_DB_DIR from Tribler.Core.statistics import TriblerStatistics -from Tribler.pyipv8.ipv8.util import cast_to_long try: - long # pylint: disable=long-builtin + long # pylint: disable=long-builtin except NameError: long = int # pylint: disable=redefined-builtin @@ -71,7 +68,7 @@ def __init__(self, config=None, autoload_discovery=True): self._logger = logging.getLogger(self.__class__.__name__) - self.session_lock = NoDispersyRLock() + self.session_lock = RLock() self.config = config or TriblerConfig() self._logger.info("Session is using state directory: %s", self.config.get_state_dir()) @@ -79,9 +76,6 @@ def __init__(self, config=None, autoload_discovery=True): self.get_ports_in_config() self.create_state_directory_structure() - if not self.config.get_megacache_enabled(): - self.config.set_torrent_checking_enabled(False) - self.selected_ports = self.config.selected_ports self.init_keypair() @@ -89,15 +83,14 @@ def __init__(self, config=None, autoload_discovery=True): self.lm = TriblerLaunchMany() self.notifier = Notifier() - self.sqlite_db = None self.upgrader_enabled = True - self.dispersy_member = None self.readable_status = '' # Human-readable string to indicate the status during startup/shutdown of Tribler self.autoload_discovery = autoload_discovery def create_state_directory_structure(self): """Create directory structure of the state directory.""" + def create_dir(path): if not os.path.isdir(path): os.makedirs(path) @@ -106,17 +99,14 @@ def create_in_state_dir(path): create_dir(os.path.join(self.config.get_state_dir(), path)) create_dir(self.config.get_state_dir()) - create_dir(self.config.get_torrent_store_dir()) - create_dir(self.config.get_metadata_store_dir()) - create_in_state_dir(DB_DIR_NAME) + create_in_state_dir(STATEDIR_DB_DIR) create_in_state_dir(STATEDIR_DLPSTATE_DIR) create_in_state_dir(STATEDIR_WALLET_DIR) + create_in_state_dir(STATEDIR_CHANNELS_DIR) def get_ports_in_config(self): """Claim all required random ports.""" self.config.get_libtorrent_port() - self.config.get_dispersy_port() - self.config.get_mainline_dht_port() self.config.get_video_server_port() self.config.get_anon_listen_port() @@ -126,22 +116,6 @@ def init_keypair(self): """ Set parameters that depend on state_dir. """ - permid_module.init() - # Set params that depend on state_dir - # - # 1. keypair - # - pair_filename = self.config.get_permid_keypair_filename() - if os.path.exists(pair_filename): - self.keypair = permid_module.read_keypair(pair_filename) - else: - self.keypair = permid_module.generate_keypair() - - # Save keypair - public_key_filename = os.path.join(self.config.get_state_dir(), 'ecpub.pem') - permid_module.save_keypair(self.keypair, pair_filename) - permid_module.save_pub_key(self.keypair, public_key_filename) - trustchain_pairfilename = self.config.get_trustchain_keypair_filename() if os.path.exists(trustchain_pairfilename): self.trustchain_keypair = permid_module.read_keypair_trustchain(trustchain_pairfilename) @@ -334,8 +308,6 @@ def remove_download_by_id(self, infohash, remove_content=False, remove_state=Tru if download.get_def().get_infohash() == infohash: return self.remove_download(download, remove_content, remove_state) - self.lm.remove_id(infohash) - def set_download_states_callback(self, user_callback, interval=1.0): """ See Download.set_state_callback. Calls user_callback with a list of @@ -353,18 +325,6 @@ def set_download_states_callback(self, user_callback, interval=1.0): """ self.lm.set_download_states_callback(user_callback, interval) - # - # Config parameters that only exist at runtime - # - def get_permid(self): - """ - Returns the PermID of the Session, as determined by the - TriblerConfig.set_permid() parameter. A PermID is a public key. - - :return: the PermID encoded in a string in DER format - """ - return str(self.keypair.pub().get_der()) - # # Notification of events in the Session # @@ -399,45 +359,10 @@ def remove_observer(self, function): """ self.notifier.remove_observer(function) - def open_dbhandler(self, subject): - """ - Opens a connection to the specified database. Only the thread calling this method may - use this connection. The connection must be closed with close_dbhandler() when this - thread exits. This function is called by any thread. - - ;param subject: the database to open. Must be one of the subjects specified here. - :return: a reference to a DBHandler class for the specified subject or - None when the Session was not started with megacache enabled. - """ - if not self.config.get_megacache_enabled(): - raise OperationNotEnabledByConfigurationException() - - if subject == NTFY_PEERS: - return self.lm.peer_db - elif subject == NTFY_TORRENTS: - return self.lm.torrent_db - elif subject == NTFY_MYPREFERENCES: - return self.lm.mypref_db - elif subject == NTFY_VOTECAST: - return self.lm.votecast_db - elif subject == NTFY_CHANNELCAST: - return self.lm.channelcast_db - else: - raise ValueError(u"Cannot open DB subject: %s" % subject) - - @staticmethod - def close_dbhandler(database_handler): - """Closes the given database connection.""" - database_handler.close() - def get_tribler_statistics(self): """Return a dictionary with general Tribler statistics.""" return TriblerStatistics(self).get_tribler_statistics() - def get_dispersy_statistics(self): - """Return a dictionary with general Dispersy statistics.""" - return TriblerStatistics(self).get_dispersy_statistics() - def get_ipv8_statistics(self): """Return a dictionary with IPv8 statistics.""" return TriblerStatistics(self).get_ipv8_statistics() @@ -461,15 +386,6 @@ def checkpoint(self): """ self.lm.checkpoint_downloads() - def start_database(self): - """ - Start the SQLite database. - """ - db_path = os.path.join(self.config.get_state_dir(), DB_FILE_RELATIVE_PATH) - - self.sqlite_db = SQLiteCacheDB(db_path) - self.readable_status = STATE_OPEN_DB - def start(self): """ Start a Tribler session by initializing the LaunchManyCore class, opening the database and running the upgrader. @@ -481,10 +397,8 @@ def start(self): self.readable_status = STATE_START_API self.lm.api_manager.start() - self.start_database() - if self.upgrader_enabled: - upgrader = TriblerUpgrader(self, self.sqlite_db) + upgrader = TriblerUpgrader(self) self.readable_status = STATE_UPGRADING_READABLE upgrader.run() @@ -528,11 +442,6 @@ def on_early_shutdown_complete(_): self.notify_shutdown_state("Shutting down Metadata Store...") self.lm.mds.shutdown() - if self.sqlite_db: - self.notify_shutdown_state("Shutting down SQLite Database...") - self.sqlite_db.close() - self.sqlite_db = None - # We close the API manager as late as possible during shutdown. if self.lm.api_manager is not None: self.notify_shutdown_state("Shutting down API Manager...") @@ -562,69 +471,6 @@ def get_downloads_pstate_dir(self): """ return os.path.join(self.config.get_state_dir(), STATEDIR_DLPSTATE_DIR) - def download_torrentfile(self, infohash=None, user_callback=None, priority=0): - """ - Try to download the torrent file without a known source. A possible source could be the DHT. - If the torrent is received successfully, the user_callback method is called with the infohash as first - and the contents of the torrent file (bencoded dict) as second parameter. If the torrent could not - be obtained, the callback is not called. The torrent will have been added to the TorrentDBHandler (if enabled) - at the time of the call. - - :param infohash: the infohash of the torrent - :param user_callback: a function adhering to the above spec - :param priority: the priority of this download - """ - if not self.lm.rtorrent_handler: - raise OperationNotEnabledByConfigurationException() - - self.lm.rtorrent_handler.download_torrent(None, infohash, user_callback=user_callback, priority=priority) - - def download_torrentfile_from_peer(self, candidate, infohash=None, user_callback=None, priority=0): - """ - Ask the designated peer to send us the torrent file for the torrent - identified by the passed infohash. If the torrent is successfully - received, the user_callback method is called with the infohash as first - and the contents of the torrent file (bencoded dict) as second parameter. - If the torrent could not be obtained, the callback is not called. - The torrent will have been added to the TorrentDBHandler (if enabled) - at the time of the call. - - :param candidate: the designated peer - :param infohash: the infohash of the torrent - :param user_callback: a function adhering to the above spec - :param priority: priority of this request - """ - if not self.lm.rtorrent_handler: - raise OperationNotEnabledByConfigurationException() - - self.lm.rtorrent_handler.download_torrent(candidate, infohash, user_callback=user_callback, priority=priority) - - def download_torrentmessage_from_peer(self, candidate, infohash, user_callback, priority=0): - """ - Ask the designated peer to send us the torrent message for the torrent - identified by the passed infohash. If the torrent message is successfully - received, the user_callback method is called with the infohash as first - and the contents of the torrent file (bencoded dict) as second parameter. - If the torrent could not be obtained, the callback is not called. - The torrent will have been added to the TorrentDBHandler (if enabled) - at the time of the call. - - :param candidate: the designated peer - :param infohash: the infohash of the torrent - :param user_callback: a function adhering to the above spec - :param priority: priority of this request - """ - if not self.lm.rtorrent_handler: - raise OperationNotEnabledByConfigurationException() - - self.lm.rtorrent_handler.download_torrentmessage(candidate, infohash, user_callback, priority) - - def get_dispersy_instance(self): - if not self.config.get_dispersy_enabled(): - raise OperationNotEnabledByConfigurationException() - - return self.lm.dispersy - def get_ipv8_instance(self): if not self.config.get_ipv8_enabled(): raise OperationNotEnabledByConfigurationException() @@ -653,71 +499,6 @@ def update_trackers(self, infohash, trackers): """ return self.lm.update_trackers(infohash, trackers) - def has_collected_torrent(self, infohash): - """ - Checks if the given torrent infohash exists in the torrent_store database. - - :param infohash: The given infohash binary - :return: True or False indicating if we have the torrent - """ - if not self.config.get_torrent_store_enabled(): - raise OperationNotEnabledByConfigurationException("torrent_store is not enabled") - return hexlify(infohash) in self.lm.torrent_store - - def get_collected_torrent(self, infohash): - """ - Gets the given torrent from the torrent_store database. - - :param infohash: the given infohash binary - :return: the torrent data if exists, None otherwise - """ - if not self.config.get_torrent_store_enabled(): - raise OperationNotEnabledByConfigurationException("torrent_store is not enabled") - return self.lm.torrent_store.get(hexlify(infohash)) - - def save_collected_torrent(self, infohash, data): - """ - Saves the given torrent into the torrent_store database. - - :param infohash: the given infohash binary - :param data: the torrent file data - """ - if not self.config.get_torrent_store_enabled(): - raise OperationNotEnabledByConfigurationException("torrent_store is not enabled") - self.lm.torrent_store.put(hexlify(infohash), data) - - def delete_collected_torrent(self, infohash): - """ - Deletes the given torrent from the torrent_store database. - - :param infohash: the given infohash binary - """ - if not self.config.get_torrent_store_enabled(): - raise OperationNotEnabledByConfigurationException("torrent_store is not enabled") - - del self.lm.torrent_store[hexlify(infohash)] - - def search_remote_torrents(self, keywords): - """ - Searches for remote torrents through SearchCommunity with the given keywords. - - :param keywords: the given keywords - :return: the number of requests made - """ - if not self.config.get_torrent_search_enabled(): - raise OperationNotEnabledByConfigurationException("torrent_search is not enabled") - return self.lm.search_manager.search_for_torrents(keywords) - - def search_remote_channels(self, keywords): - """ - Searches for remote channels through AllChannelCommunity with the given keywords. - - :param keywords: the given keywords - """ - if not self.config.get_channel_search_enabled(): - raise OperationNotEnabledByConfigurationException("channel_search is not enabled") - self.lm.search_manager.search_for_channels(keywords) - @staticmethod def create_torrent_file(file_path_list, params=None): """ @@ -738,46 +519,10 @@ def create_channel(self, name, description, mode=u'closed'): :param description: description of the Channel :param mode: mode of the Channel ('open', 'semi-open', or 'closed') :return: a channel ID - :raises a DuplicateChannelNameError if name already exists + :raises a DuplicateChannelIdError if name already exists """ return self.lm.channel_manager.create_channel(name, description, mode) - def add_torrent_def_to_channel(self, channel_id, torrent_def, extra_info=None, forward=True): - """ - Adds a TorrentDef to a Channel. - - :param channel_id: id of the Channel to add the Torrent to - :param torrent_def: definition of the Torrent to add - :param extra_info: description of the Torrent to add - :param forward: when True the messages are forwarded (as defined by their message - destination policy) to other nodes in the community. This parameter should (almost always) - be True, its inclusion is mostly to allow certain debugging scenarios - """ - extra_info = extra_info or {} - # Make sure that this new torrent_def is also in collected torrents - self.lm.rtorrent_handler.save_torrent(torrent_def) - - channelcast_db = self.open_dbhandler(NTFY_CHANNELCAST) - if channelcast_db.hasTorrent(channel_id, torrent_def.infohash): - raise DuplicateTorrentFileError("This torrent file already exists in your channel.") - - dispersy_cid = str(channelcast_db.getDispersyCIDFromChannelId(channel_id)) - community = self.get_dispersy_instance().get_community(dispersy_cid) - - community._disp_create_torrent( - torrent_def.infohash, - cast_to_long(time.time()), - torrent_def.get_name_as_unicode(), - tuple(torrent_def.get_files_with_length()), - torrent_def.get_trackers_as_single_tuple(), - forward=forward) - - if 'description' in extra_info: - desc = extra_info['description'].strip() - if desc != '': - data = channelcast_db.getTorrentFromChannelId(channel_id, torrent_def.infohash, ['ChannelTorrents.id']) - community.modifyTorrent(data, {'description': desc}, forward=forward) - def check_torrent_health(self, infohash, timeout=20, scrape_now=False): """ Checks the given torrent's health on its trackers. @@ -790,17 +535,6 @@ def check_torrent_health(self, infohash, timeout=20, scrape_now=False): return self.lm.torrent_checker.add_gui_request(infohash, timeout=timeout, scrape_now=scrape_now) return fail(Failure(RuntimeError("Torrent checker not available"))) - def get_thumbnail_data(self, thumb_hash): - """ - Gets the thumbnail data. - - :param thumb_hash: the thumbnail SHA1 hash - :return: the thumbnail data - """ - if not self.lm.metadata_store: - raise OperationNotEnabledByConfigurationException("libtorrent is not enabled") - return self.lm.rtorrent_handler.get_metadata(thumb_hash) - def notify_shutdown_state(self, state): self._logger.info("Tribler shutdown state notification:%s", state) self.notifier.notify(NTFY_TRIBLER, STATE_SHUTDOWN, None, state) diff --git a/Tribler/Core/TFTP/__init__.py b/Tribler/Core/TFTP/__init__.py deleted file mode 100644 index 035a3ad2855..00000000000 --- a/Tribler/Core/TFTP/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Contains the the TFTP handler that should be registered at the thread pool to handle TFTP packets -""" diff --git a/Tribler/Core/TFTP/exception.py b/Tribler/Core/TFTP/exception.py deleted file mode 100644 index 2e7fdeb2256..00000000000 --- a/Tribler/Core/TFTP/exception.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -All exceptions used in the TFTP package. -""" - - -class InvalidPacketException(Exception): - """Indicates an invalid packet.""" - pass - - -class InvalidStringException(Exception): - """Indicates an invalid zero-terminated string.""" - pass - - -class FileNotFound(OSError): - """Indicates that a file is not found.""" - pass diff --git a/Tribler/Core/TFTP/handler.py b/Tribler/Core/TFTP/handler.py deleted file mode 100644 index 1fbb4ec2c5a..00000000000 --- a/Tribler/Core/TFTP/handler.py +++ /dev/null @@ -1,568 +0,0 @@ -import logging -from base64 import b64encode -from binascii import hexlify -from hashlib import sha1 -from random import randint -from socket import inet_aton -from struct import unpack -from time import time - -from twisted.internet import reactor -from twisted.internet.task import LoopingCall - -from Tribler.dispersy.candidate import Candidate -from Tribler.dispersy.util import (call_on_reactor_thread, attach_runtime_statistics, is_valid_address) -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - -from .exception import InvalidPacketException, FileNotFound -from .packet import (encode_packet, decode_packet, OPCODE_RRQ, OPCODE_WRQ, OPCODE_ACK, OPCODE_DATA, OPCODE_OACK, - OPCODE_ERROR, ERROR_DICT) -from .session import Session, DEFAULT_BLOCK_SIZE, DEFAULT_TIMEOUT - -MAX_INT16 = 2 ** 16 - 1 - -SEPARATOR = ":" -METADATA_PREFIX = "metadata" + SEPARATOR - -DEFAULT_RETIES = 5 - - -class TftpHandler(TaskManager): - - """ - This is the TFTP handler that should be registered at the thread pool to handle TFTP packets. - """ - - def __init__(self, session, endpoint, prefix, block_size=DEFAULT_BLOCK_SIZE, timeout=DEFAULT_TIMEOUT, - max_retries=DEFAULT_RETIES): - """ The constructor. - :param session: The tribler session. - :param endpoint: The endpoint to use. - :param prefix: The prefix to use. - :param block_size: Transmission block size. - :param timeout: Transmission timeout. - :param max_retries: Transmission maximum retries. - """ - super(TftpHandler, self).__init__() - self._logger = logging.getLogger(self.__class__.__name__) - - self.session = session - - self._endpoint = endpoint - self._prefix = prefix - - self._block_size = block_size - self._timeout = timeout - self._max_retries = max_retries - - self._timeout_check_interval = 0.5 - - self._session_id_dict = {} - self._session_dict = {} - - self._callback_scheduled = False - self._callbacks = [] - - self._is_running = False - - def initialize(self): - """ Initializes the TFTP service. We create a UDP socket and a server session. - """ - self._endpoint.listen_to(self._prefix, self.data_came_in) - # start a looping call that checks timeout - self.register_task(u"tftp timeout check", - LoopingCall(self._task_check_timeout)).start(self._timeout_check_interval, now=True) - self._is_running = True - - def shutdown(self): - """ Shuts down the TFTP service. - """ - self.shutdown_task_manager() - if self._endpoint: - self._endpoint.stop_listen_to(self._prefix) - self._endpoint = None - - self._session_id_dict = None - self._session_dict = None - - self._is_running = False - - @call_on_reactor_thread - def download_file(self, file_name, ip, port, extra_info=None, success_callback=None, failure_callback=None): - """ Downloads a file from a remote host. - :param file_name: The file name of the file to be downloaded. - :param ip: The IP of the remote host. - :param port: The port of the remote host. - :param success_callback: The success callback. - :param failure_callback: The failure callback. - """ - # generate a unique session id - # if the target address is higher than ours, we use even number. Otherwise, we use odd number. - if not self._is_running: - return - - target_ip = unpack('!L', inet_aton(ip))[0] - target_port = port - self_ip, self_port = self.session.lm.dispersy.wan_address - self_ip = unpack('!L', inet_aton(self_ip))[0] - if target_ip > self_ip: - generate_session = lambda: randint(0, MAX_INT16) & 0xfff0 - elif target_ip < self_ip: - generate_session = lambda: randint(0, MAX_INT16) | 1 - else: - if target_port > self_port: - generate_session = lambda: randint(0, MAX_INT16) & 0xfff0 - elif target_port < self_port: - generate_session = lambda: randint(0, MAX_INT16) | 1 - else: - self._logger.critical(u"communicating to myself %s:%s", ip, port) - generate_session = lambda: randint(0, MAX_INT16) - - session_id = generate_session() - while (ip, port, session_id) in self._session_dict: - session_id = generate_session() - - # create session - assert session_id is not None, u"session_id = %s" % session_id - self._logger.debug(u"start downloading %s from %s:%s, sid = %s", file_name, ip, port, session_id) - session = Session(True, session_id, (ip, port), OPCODE_RRQ, file_name, '', None, None, - extra_info=extra_info, block_size=self._block_size, timeout=self._timeout, - success_callback=success_callback, failure_callback=failure_callback) - - self._add_new_session(session) - self._send_request_packet(session) - - self._logger.info(u"%s started", session) - - @attach_runtime_statistics(u"{0.__class__.__name__}.{function_name}") - def _task_check_timeout(self): - """ A scheduled task that checks for timeout. - """ - if not self._is_running: - return - - need_session_cleanup = False - for key, session in self._session_dict.items(): - if self._check_session_timeout(session): - need_session_cleanup = True - - # fail as timeout - self._logger.info(u"%s timed out", session) - if session.failure_callback: - callback = lambda cb = session.failure_callback, addr = session.address, fn = session.file_name,\ - msg = "timeout", ei = session.extra_info: cb(addr, fn, msg, ei) - self._callbacks.append(callback) - - self._cleanup_session(key) - - if need_session_cleanup: - self._schedule_callback_processing() - - def _check_session_timeout(self, session): - """ - Checks if a session has timed out and tries to retransmit packet if allowed. - :param session: The given session. - :return: True or False indicating if the session has failed. - """ - has_failed = False - timeout = session.timeout * (2**session.retries) - if session.last_contact_time + timeout < time(): - # we do NOT resend packets that are not data-related - if session.retries < self._max_retries and session.last_sent_packet['opcode'] in (OPCODE_ACK, OPCODE_DATA): - self._send_packet(session, session.last_sent_packet) - session.retries += 1 - else: - has_failed = True - return has_failed - - def _schedule_callback_processing(self): - """ - Schedules a task to process callbacks. - """ - if not self._callback_scheduled: - self.register_task(u"tftp_process_callback", reactor.callLater(0, self._process_callbacks)) - self._callback_scheduled = True - - @attach_runtime_statistics(u"{0.__class__.__name__}.{function_name}") - def _process_callbacks(self): - """ - Process the callbacks - """ - for callback in self._callbacks: - callback() - self._callbacks = [] - self._callback_scheduled = False - - def _add_new_session(self, session): - self._session_id_dict[session.session_id] = 1 + self._session_id_dict.get(session.session_id, 0) - self._session_dict[(session.address[0], session.address[1], session.session_id)] = session - - def _cleanup_session(self, key): - session_id = key[2] - self._session_id_dict[session_id] -= 1 - if self._session_id_dict[session_id] == 0: - del self._session_id_dict[session_id] - del self._session_dict[key] - - @attach_runtime_statistics(u"{0.__class__.__name__}.{function_name}") - @call_on_reactor_thread - def data_came_in(self, addr, data): - """ The callback function that the thread pool will call when there is incoming data. - :param addr: The (IP, port) address tuple of the sender. - :param data: The data received. - """ - if not self._is_running or not is_valid_address(addr): - return - - ip, port = addr - - # decode the packet - try: - packet = decode_packet(data) - except InvalidPacketException as e: - self._logger.error(u"Invalid packet from [%s:%s], packet=[%s], error=%s", ip, port, hexlify(data), e) - return - - if packet['opcode'] == OPCODE_WRQ: - self._logger.error(u"WRQ is not supported from [%s:%s], packet=[%s]", ip, port, repr(packet)) - return - - self._logger.debug(u"GOT packet opcode[%s] from %s:%s", packet['opcode'], ip, port) - # a new request - if packet['opcode'] == OPCODE_RRQ: - self._logger.debug(u"start handling new request: %s", packet) - self._handle_new_request(ip, port, packet) - return - - if (ip, port, packet['session_id']) not in self._session_dict: - self._logger.warn(u"got non-existing session from %s:%s, id = %s", ip, port, packet['session_id']) - return - - # handle the response - session = self._session_dict[(ip, port, packet['session_id'])] - self._process_packet(session, packet) - - if not session.is_done and not session.is_failed: - return - - self._cleanup_session((ip, port, packet['session_id'])) - - # schedule callback - if session.is_failed: - self._logger.info(u"%s failed", session) - if session.failure_callback: - callback = lambda cb = session.failure_callback, a = session.address, fn = session.file_name,\ - msg = "download failed", ei = session.extra_info: cb(a, fn, msg, ei) - self._callbacks.append(callback) - elif session.is_done: - self._logger.info(u"%s finished", session) - if session.success_callback: - callback = lambda cb = session.success_callback, a = session.address, fn = session.file_name,\ - fd = session.file_data, ei = session.extra_info: cb(a, fn, fd, ei) - self._callbacks.append(callback) - - self._schedule_callback_processing() - - def _handle_new_request(self, ip, port, packet): - """ Handles a new request. - :param ip: The IP of the client. - :param port: The port of the client. - :param packet: The packet. - """ - if packet['opcode'] != OPCODE_RRQ: - self._logger.error(u"Unexpected request from %s:%s, opcode=%s: packet=%s", - ip, port, packet['opcode'], repr(packet)) - return - if 'options' not in packet: - self._logger.error(u"No 'options' in request from %s:%s, opcode=%s, packet=%s", - ip, port, packet['opcode'], repr(packet)) - return - if 'blksize' not in packet['options'] or 'timeout' not in packet['options']: - self._logger.error(u"No 'blksize' or 'timeout' not in 'options' from %s:%s, opcode=%s, packet=%s", - ip, port, packet['opcode'], repr(packet)) - return - - file_name = packet['file_name'].decode('utf8') - block_size = packet['options']['blksize'] - timeout = packet['options']['timeout'] - - # check session_id - if (ip, port, packet['session_id']) in self._session_dict: - self._logger.warn(u"Existing session_id %s from %s:%s", packet['session_id'], ip, port) - dummy_session = Session(False, packet['session_id'], (ip, port), packet['opcode'], - file_name, None, None, None, block_size=block_size, timeout=timeout) - self._handle_error(dummy_session, 50) - return - - # read the file/directory into memory - try: - if file_name.startswith(METADATA_PREFIX): - if not self.session.config.get_metadata_enabled(): - return - file_data, file_size = self._load_metadata(file_name[len(METADATA_PREFIX):]) - else: - if not self.session.config.get_torrent_store_enabled(): - return - file_data, file_size = self._load_torrent(file_name) - checksum = b64encode(sha1(file_data).digest()) - except FileNotFound as e: - self._logger.warn(u"[READ %s:%s] file not found: %s", ip, port, e) - dummy_session = Session(False, packet['session_id'], (ip, port), packet['opcode'], - file_name, None, None, None, block_size=block_size, timeout=timeout) - self._handle_error(dummy_session, 1) - return - except Exception as e: - self._logger.error(u"[READ %s:%s] failed to load file: %s", ip, port, e) - dummy_session = Session(False, packet['session_id'], (ip, port), packet['opcode'], - file_name, None, None, None, block_size=block_size, timeout=timeout) - self._handle_error(dummy_session, 2) - raise - - # create a session object - session = Session(False, packet['session_id'], (ip, port), packet['opcode'], - file_name, file_data, file_size, checksum, block_size=block_size, timeout=timeout) - - # insert session_id and session - self._add_new_session(session) - self._logger.debug(u"got new request: %s", session) - - # send back OACK now - self._send_oack_packet(session) - - def _load_metadata(self, thumb_hash): - """ Loads a thumbnail into memory. - :param thumb_hash: The thumbnail hash. - """ - file_data = self.session.lm.metadata_store.get(thumb_hash.encode('utf8')) - # check if file exists - if not file_data: - msg = u"Metadata not in store: %s" % thumb_hash - raise FileNotFound(msg) - - return file_data, len(file_data) - - def _load_torrent(self, file_name): - """ Loads a file into memory. - :param file_name: The file name. - """ - infohash = (file_name[:-8]).encode('utf8') # len('.torrent') = 8 - - file_data = self.session.lm.torrent_store.get(infohash) - # check if file exists - if not file_data: - msg = u"Torrent not in store: %s" % infohash - raise FileNotFound(msg) - - return file_data, len(file_data) - - def _get_next_data(self, session): - """ Gets the next block of data to be uploaded. This method is only used for data uploading. - :return The data to transfer. - """ - start_idx = session.block_number * session.block_size - end_idx = start_idx + session.block_size - data = session.file_data[start_idx:end_idx] - session.block_number += 1 - - # check if we are done - if len(data) < session.block_size: - session.is_waiting_for_last_ack = True - - return data - - def _process_packet(self, session, packet): - """ processes an incoming packet. - :param packet: The incoming packet dictionary. - """ - session.last_contact_time = time() - # check if it is an ERROR packet - if packet['opcode'] == OPCODE_ERROR: - self._logger.warning(u"%s got ERROR message: code = %s, msg = %s", - session, packet['error_code'], packet['error_msg']) - session.is_failed = True - return - - # client is the receiver, server is the sender - if session.is_client: - self._handle_packet_as_receiver(session, packet) - else: - self._handle_packet_as_sender(session, packet) - - def _handle_packet_as_receiver(self, session, packet): - """ Processes an incoming packet as a receiver. - :param packet: The incoming packet dictionary. - """ - # if this is the first packet, check OACK - if packet['opcode'] == OPCODE_OACK: - if session.last_received_packet is None: - # check options - if session.block_size != packet['options']['blksize']: - msg = "%s OACK blksize mismatch: %s != %s (expected)" %\ - (session, session.block_size, packet['options']['blksize']) - self._logger.error(msg) - self._handle_error(session, 0, error_msg=msg) # Error: blksize mismatch - return - - if session.timeout != packet['options']['timeout']: - msg = "%s OACK timeout mismatch: %s != %s (expected)" %\ - (session, session.timeout, packet['options']['timeout']) - self._logger.error(msg) - self._handle_error(session, 0, error_msg=msg) # Error: timeout mismatch - return - - session.file_size = packet['options']['tsize'] - session.checksum = packet['options']['checksum'] - - if session.request == OPCODE_RRQ: - # send ACK - self._send_ack_packet(session, session.block_number) - session.block_number += 1 - session.file_data = "" - - else: - self._logger.error(u"%s Got OPCODE %s which is not expected", session, packet['opcode']) - self._handle_error(session, 4) # illegal TFTP operation - return - - # expect a DATA - if packet['opcode'] != OPCODE_DATA: - self._logger.error(u"%s Got OPCODE %s while expecting %s", session, packet['opcode'], OPCODE_DATA) - self._handle_error(session, 4) # illegal TFTP operation - return - - self._logger.debug(u"%s Got data, #block = %s size = %s", session, packet['block_number'], len(packet['data'])) - - # check block_number - # ignore old ones, they may be retransmissions - if packet['block_number'] < session.block_number: - self._logger.warn(u"%s ignore old block number DATA %s < %s", - session, packet['block_number'], session.block_number) - return - - if packet['block_number'] != session.block_number: - msg = "%s Got ACK with block# %s while expecting %s" %\ - (session, packet['block_number'], session.block_number) - self._logger.error(msg) - self._handle_error(session, 0, error_msg=msg) # Error: block_number mismatch - return - - # save data - session.file_data += packet['data'] - self._send_ack_packet(session, session.block_number) - session.block_number += 1 - - # check if it is the end - if len(packet['data']) < session.block_size: - self._logger.info(u"%s transfer finished. checking data integrity...", session) - # check file size and checksum - if session.file_size != len(session.file_data): - self._logger.error(u"%s file size %s doesn't match expectation %s", - session, len(session.file_data), session.file_size) - session.is_failed = True - return - - # compare checksum - data_checksum = b64encode(sha1(session.file_data).digest()) - if session.checksum != data_checksum: - self._logger.error(u"%s file checksum %s doesn't match expectation %s", - session, data_checksum, session.checksum) - session.is_failed = True - return - - session.is_done = True - - def _handle_packet_as_sender(self, session, packet): - """ Processes an incoming packet as a sender. - :param packet: The incoming packet dictionary. - """ - # expect an ACK packet - if packet['opcode'] != OPCODE_ACK: - self._logger.error(u"%s got OPCODE(%s) while expecting %s", session, packet['opcode'], OPCODE_ACK) - self._handle_error(session, 4) # illegal TFTP operation - return - - # check block number - # ignore old ones, they may be retransmissions - if packet['block_number'] < session.block_number: - self._logger.warn(u"%s ignore old block number ACK %s < %s", - session, packet['block_number'], session.block_number) - return - - if packet['block_number'] != session.block_number: - msg = "%s got ACK with block# %s while expecting %s" %\ - (session, packet['block_number'], session.block_number) - self._logger.error(msg) - self._handle_error(session, 0, error_msg=msg) # Error: block_number mismatch - return - - if session.is_waiting_for_last_ack: - session.is_done = True - return - - data = self._get_next_data(session) - # send DATA - self._send_data_packet(session, session.block_number, data) - - def _handle_error(self, session, error_code, error_msg=""): - """ Handles an error during packet processing. - :param error_code: The error code. - """ - session.is_failed = True - msg = error_msg if error_msg else ERROR_DICT.get(error_code, error_msg) - self._send_error_packet(session, error_code, msg) - - def _send_packet(self, session, packet): - packet_buff = encode_packet(packet) - extra_msg = u" block_number = %s" % packet['block_number'] if packet.get('block_number') is not None else "" - extra_msg += u" block_size = %s" % len(packet['data']) if packet.get('data') is not None else "" - - self._logger.debug(u"SEND OP[%s] -> %s:%s %s", - packet['opcode'], session.address[0], session.address[1], extra_msg) - self._endpoint.send_packet(Candidate(session.address, False), packet_buff, prefix=self._prefix) - - # update information - session.last_contact_time = time() - session.last_sent_packet = packet - - def _send_request_packet(self, session): - assert session.request == OPCODE_RRQ, u"Invalid request_opcode %s" % repr(session.request) - - packet = {'opcode': session.request, - 'session_id': session.session_id, - 'file_name': session.file_name.encode('utf8'), - 'options': {'blksize': session.block_size, - 'timeout': session.timeout, - }} - self._send_packet(session, packet) - - def _send_data_packet(self, session, block_number, data): - packet = {'opcode': OPCODE_DATA, - 'session_id': session.session_id, - 'block_number': block_number, - 'data': data} - self._send_packet(session, packet) - - def _send_ack_packet(self, session, block_number): - packet = {'opcode': OPCODE_ACK, - 'session_id': session.session_id, - 'block_number': block_number} - self._send_packet(session, packet) - - def _send_error_packet(self, session, error_code, error_msg): - packet = {'opcode': OPCODE_ERROR, - 'session_id': session.session_id, - 'error_code': error_code, - 'error_msg': error_msg - } - self._send_packet(session, packet) - - def _send_oack_packet(self, session): - packet = {'opcode': OPCODE_OACK, - 'session_id': session.session_id, - 'block_number': session.block_number, - 'options': {'blksize': session.block_size, - 'timeout': session.timeout, - 'tsize': session.file_size, - 'checksum': session.checksum, - }} - self._send_packet(session, packet) diff --git a/Tribler/Core/TFTP/packet.py b/Tribler/Core/TFTP/packet.py deleted file mode 100644 index 212531b4b47..00000000000 --- a/Tribler/Core/TFTP/packet.py +++ /dev/null @@ -1,231 +0,0 @@ -import struct -from binascii import hexlify - -from .exception import InvalidStringException, InvalidPacketException - -# OPCODE -OPCODE_RRQ = 1 -OPCODE_WRQ = 2 -OPCODE_DATA = 3 -OPCODE_ACK = 4 -OPCODE_ERROR = 5 -OPCODE_OACK = 6 - -# supported options -OPTIONS = ("blksize", "timeout", "tsize", "checksum") - -# error codes and messages -ERROR_DICT = { - 0: "Not defined, see error message (if any).", - 1: "File not found", - 2: "Access violation", - 3: "Disk full or allocation exceeded", - 4: "Illegal TFTP operation", - 5: "Unknown transfer ID", - 6: "File already exists", - 7: "No such user", - 8: "Failed to negotiate options", - 50: "Session ID already exists", -} - - -def _get_string(buff, start_idx): - """ Gets a zero-terminated string from a given buffer. - :param buff: The buffer. - :param start_idx: The index to start from. - :return: A (str, idx) tuple that has the zero-terminated string and the next index. - """ - str_data = "" - next_idx = start_idx + 1 - got_end = False - for c in buff[start_idx:]: - if ord(c) == 0: - got_end = True - break - str_data += c - next_idx += 1 - - if not got_end: - raise InvalidStringException() - return str_data, next_idx - - -def _decode_options(packet, buff, start_idx): - """ Decodes options from a given packet buffer. - :param packet: The packet dictionary to use. - :param buff: The packet buffer. - :param start_idx: The index to start from. - :return: None - """ - packet['options'] = {} - idx = start_idx - while idx < len(buff): - option, idx = _get_string(buff, idx) - value, idx = _get_string(buff, idx) - if option == "": - raise InvalidPacketException(u"Empty option") - if value == "": - raise InvalidPacketException(u"Empty value for option[%s]" % repr(option)) - - packet['options'][option] = value - - # validate options and convert them to proper format - for k, v in packet['options'].items(): - if k not in OPTIONS: - raise InvalidPacketException(u"Unknown option[%s]" % repr(k)) - - # blksize, timeout, and tsize are all integers - try: - if k in ("blksize", "timeout", "tsize"): - packet['options'][k] = int(v) - else: - packet['options'][k] = v - except ValueError: - raise InvalidPacketException(u"Invalid value for option %s: %s" % (repr(k), repr(v))) - - -def _decode_rrq_wrq(packet, packet_buff, offset): - """ Decodes a RRQ/WRQ packet. - :param packet: The packet dictionary. - :param packet_buff: The packet buffer. - :return: The decoded packet as a dictionary. - """ - # get file_name and mode - file_name, idx = _get_string(packet_buff, offset) - - packet['file_name'] = file_name - - # get options - _decode_options(packet, packet_buff, idx) - return packet - - -def _decode_data(packet, packet_buff, offset): - """ Decodes a DATA packet. - :param packet: The packet dictionary. - :param packet_buff: The packet buffer. - :return: The decoded packet as a dictionary. - """ - # get block number and data - if len(packet_buff) < offset + 2: - raise InvalidPacketException(u"DATA packet too small (<4): %s" % repr(packet_buff)) - block_number, = struct.unpack_from("!H", packet_buff, offset) - data = packet_buff[offset + 2:] - - packet['block_number'] = block_number - packet['data'] = data - - return packet - - -def _decode_ack(packet, packet_buff, offset): - """ Decodes a ACK packet. - :param packet: The packet dictionary. - :param packet_buff: The packet buffer. - :return: The decoded packet as a dictionary. - """ - # get block number - if len(packet_buff) != offset + 2: - raise InvalidPacketException(u"ACK packet has invalid size (!=%s): %s" % (offset + 2, hexlify(packet_buff))) - block_number, = struct.unpack_from("!H", packet_buff, offset) - - packet['block_number'] = block_number - - return packet - - -def _decode_error(packet, packet_buff, offset): - """ Decodes a ERROR packet. - :param packet: The packet dictionary. - :param packet_buff: The packet buffer. - :return: The decoded packet as a dictionary. - """ - if len(packet_buff) < offset + 3: - raise InvalidPacketException(u"ERROR packet too small (<%s): %s" % (offset + 3, hexlify(packet_buff))) - error_code, = struct.unpack_from("!H", packet_buff, offset) - error_msg, idx = _get_string(packet_buff, offset + 2) - - if not error_msg: - raise InvalidPacketException(u"ERROR packet has empty error message: %s" % hexlify(packet_buff)) - if idx != len(packet_buff): - raise InvalidPacketException(u"Invalid ERROR packet: %s" % hexlify(packet_buff)) - - packet['error_code'] = error_code - packet['error_msg'] = error_msg - - return packet - - -def _decode_oack(packet, packet_buff, offset): - """ Decodes a OACK packet. - :param packet: The packet dictionary. - :param packet_buff: The packet buffer. - :return: The decoded packet as a dictionary. - """ - # get block number and data - _decode_options(packet, packet_buff, offset) - - return packet - - -PACKET_DECODE_DICT = { - OPCODE_RRQ: _decode_rrq_wrq, - OPCODE_WRQ: _decode_rrq_wrq, - OPCODE_DATA: _decode_data, - OPCODE_ACK: _decode_ack, - OPCODE_ERROR: _decode_error, - OPCODE_OACK: _decode_oack, -} - - -# =================================================================================== -# Public APIs for encoding and decoding -# =================================================================================== -def decode_packet(packet_buff): - """ Decodes a packet binary string into a packet dictionary. - :param packet_buff: The packet binary string. - :return: The decoded packet dictionary. - """ - # get the opcode - if len(packet_buff) < 4: - raise InvalidPacketException(u"Packet too small (<4): %s" % hexlify(packet_buff)) - opcode, session_id = struct.unpack_from("!HH", packet_buff, 0) - - if opcode not in PACKET_DECODE_DICT: - raise InvalidPacketException(u"Invalid opcode: %s" % opcode) - - # decode the packet - packet = {'opcode': opcode, - 'session_id': session_id} - return PACKET_DECODE_DICT[opcode](packet, packet_buff, 4) - - -def encode_packet(packet): - """ Encodes a packet dictionary into a binary string. - :param packet: The packet dictionary. - :return: The encoded packet buffer. - """ - # get block number and data - packet_buff = struct.pack("!HH", packet['opcode'], packet['session_id']) - if packet['opcode'] in (OPCODE_RRQ, OPCODE_WRQ): - packet_buff += packet['file_name'] + "\x00" - - for k, v in packet['options'].iteritems(): - packet_buff += "%s\x00%s\x00" % (k, v) - - elif packet['opcode'] == OPCODE_DATA: - packet_buff += struct.pack("!H", packet['block_number']) - packet_buff += packet['data'] - - elif packet['opcode'] == OPCODE_ACK: - packet_buff += struct.pack("!H", packet['block_number']) - - elif packet['opcode'] == OPCODE_ERROR: - packet_buff += struct.pack("!H", packet['error_code']) - packet_buff += packet['error_msg'] + "\x00" - - elif packet['opcode'] == OPCODE_OACK: - for k, v in packet['options'].iteritems(): - packet_buff += "%s\x00%s\x00" % (k, v) - - return packet_buff diff --git a/Tribler/Core/TFTP/session.py b/Tribler/Core/TFTP/session.py deleted file mode 100644 index 1f94a93b802..00000000000 --- a/Tribler/Core/TFTP/session.py +++ /dev/null @@ -1,52 +0,0 @@ -from time import time - - -# default packet data size -DEFAULT_BLOCK_SIZE = 512 - -# default timeout and maximum retries -DEFAULT_TIMEOUT = 2 - - -class Session(object): - - def __init__(self, is_client, session_id, address, request, file_name, file_data, file_size, checksum, - extra_info=None, block_size=DEFAULT_BLOCK_SIZE, timeout=DEFAULT_TIMEOUT, - success_callback=None, failure_callback=None): - self.is_client = is_client - self.session_id = session_id - self.address = address - self.request = request - self.file_name = file_name - self.file_data = file_data - self.file_size = file_size - self.checksum = checksum - - self.extra_info = extra_info - - self.block_number = 0 - self.block_size = block_size - self.timeout = timeout - self.success_callback = success_callback - self.failure_callback = failure_callback - - self.last_contact_time = time() - self.last_received_packet = None - self.last_sent_packet = None - self.is_waiting_for_last_ack = False - - self.retries = 0 - - self.is_done = False - self.is_failed = False - - self.next_func = None - - def __str__(self): - type_str = "C" if self.is_client else "S" - return "TFTP[%s %s %s:%s][%s]" % (self.session_id, type_str, self.address[0], self.address[1], - self.file_name.encode('utf8')) - - def __unicode__(self): - type_str = u"C" if self.is_client else u"S" - return u"TFTP[%s %s %s:%s][%s]" % (self.session_id, type_str, self.address[0], self.address[1], self.file_name) diff --git a/Tribler/Core/TorrentChecker/session.py b/Tribler/Core/TorrentChecker/session.py index a032f5afa3a..98f3afb1d03 100644 --- a/Tribler/Core/TorrentChecker/session.py +++ b/Tribler/Core/TorrentChecker/session.py @@ -4,8 +4,10 @@ import random import socket import struct +import sys import time from abc import ABCMeta, abstractmethod, abstractproperty +from binascii import hexlify from libtorrent import bdecode @@ -108,7 +110,7 @@ def can_add_request(self): :return: True or False. """ - #TODO(ardhi) : quickfix for etree.org can't handle multiple infohash in single call + # TODO(ardhi) : quickfix for etree.org can't handle multiple infohash in single call etree_condition = "etree" not in self.tracker_url return not self._is_initiated and len(self._infohash_list) < MAX_TRACKER_MULTI_SCRAPE and etree_condition @@ -327,7 +329,7 @@ def _process_scrape_response(self, body): leechers = incomplete # Store the information in the dictionary - response_list.append({'infohash': infohash.encode('hex'), 'seeders': seeders, 'leechers': leechers}) + response_list.append({'infohash': hexlify(infohash), 'seeders': seeders, 'leechers': leechers}) # remove this infohash in the infohash list of this session if infohash in unprocessed_infohash_list: @@ -340,7 +342,7 @@ def _process_scrape_response(self, body): # handle the infohashes with no result (seeders/leechers = 0/0) for infohash in unprocessed_infohash_list: - response_list.append({'infohash': infohash.encode('hex'), 'seeders': 0, 'leechers': 0}) + response_list.append({'infohash': hexlify(infohash), 'seeders': 0, 'leechers': 0}) self._is_finished = True if self.result_deferred and not self.result_deferred.called: @@ -597,8 +599,13 @@ def handle_connection_response(self, response): self.generate_transaction_id() # pack and send the message + if sys.version_info.major > 2: + infohash_list = self._infohash_list + else: + infohash_list = [str(infohash) for infohash in self._infohash_list] + fmt = '!qii' + ('20s' * len(self._infohash_list)) - message = struct.pack(fmt, self._connection_id, self.action, self.transaction_id, *self._infohash_list) + message = struct.pack(fmt, self._connection_id, self.action, self.transaction_id, *infohash_list) # Send the scrape message self.socket_mgr.send_request(message, self) @@ -645,7 +652,7 @@ def handle_scrape_response(self, response): # Store the information in the hash dict to be returned. # Sow complete as seeders. "complete: number of peers with the entire file, i.e. seeders (integer)" # - https://wiki.theory.org/BitTorrentSpecification#Tracker_.27scrape.27_Convention - response_list.append({'infohash': infohash.encode('hex'), 'seeders': complete, 'leechers': incomplete}) + response_list.append({'infohash': hexlify(infohash), 'seeders': complete, 'leechers': incomplete}) # close this socket and remove its transaction ID from the list self.remove_transaction_id() @@ -696,8 +703,9 @@ def connect_to_tracker(self): Fakely connects to a tracker. :return: A deferred with a callback containing an empty dictionary. """ + def on_metainfo_received(metainfo): - self.result_deferred.callback({'DHT': [{'infohash': self.infohash.encode('hex'), + self.result_deferred.callback({'DHT': [{'infohash': hexlify(self.infohash), 'seeders': metainfo['seeders'], 'leechers': metainfo['leechers']}]}) def on_metainfo_timeout(_): diff --git a/Tribler/Core/TorrentChecker/torrent_checker.py b/Tribler/Core/TorrentChecker/torrent_checker.py index 3328274c99b..7988db7cc53 100644 --- a/Tribler/Core/TorrentChecker/torrent_checker.py +++ b/Tribler/Core/TorrentChecker/torrent_checker.py @@ -1,19 +1,23 @@ +from __future__ import absolute_import + import logging import socket import time -from Tribler.Core.Utilities.utilities import is_valid_url from binascii import hexlify +from pony.orm import db_session + from twisted.internet import reactor -from twisted.internet.defer import DeferredList, CancelledError, fail, succeed, maybeDeferred +from twisted.internet.defer import CancelledError, DeferredList, fail, maybeDeferred, succeed from twisted.internet.error import ConnectingCancelledError, ConnectionLost from twisted.python.failure import Failure from twisted.web.client import HTTPConnectionPool -from Tribler.Core.TorrentChecker.session import create_tracker_session, FakeDHTSession, UdpSocketManager +from Tribler.Core.TorrentChecker.session import FakeDHTSession, UdpSocketManager, create_tracker_session from Tribler.Core.Utilities.tracker_utils import MalformedTrackerURLException -from Tribler.Core.simpledefs import NTFY_TORRENTS -from Tribler.community.popularity.repository import TYPE_TORRENT_HEALTH +from Tribler.Core.Utilities.utilities import is_valid_url +from Tribler.Core.simpledefs import NTFY_TORRENT, NTFY_UPDATE +from Tribler.pyipv8.ipv8.database import database_blob from Tribler.pyipv8.ipv8.taskmanager import TaskManager # some settings @@ -31,8 +35,6 @@ def __init__(self, session): self._logger = logging.getLogger(self.__class__.__name__) self.tribler_session = session - self._torrent_db = None - self._should_stop = False self._torrent_check_interval = DEFAULT_TORRENT_CHECK_INTERVAL @@ -49,7 +51,6 @@ def __init__(self, session): self.connection_pool = None def initialize(self): - self._torrent_db = self.tribler_session.open_dbhandler(NTFY_TORRENTS) self._reschedule_tracker_select() self.connection_pool = HTTPConnectionPool(reactor, False) self.socket_mgr = UdpSocketManager() @@ -99,16 +100,8 @@ def _reschedule_tracker_select(self): """ Changes the tracker selection interval dynamically and schedules the task. """ - # dynamically change the interval: update at least every 2h - num_torrents = self._torrent_db.getNumberCollectedTorrents() - - tracker_select_interval = min(max(7200 / num_torrents, 10), 100) if num_torrents \ - else DEFAULT_TORRENT_SELECTION_INTERVAL - - self._logger.debug(u"tracker selection interval changed to %s", tracker_select_interval) - self.register_task(u"torrent_checker_tracker_selection", - reactor.callLater(tracker_select_interval, self._task_select_tracker)) + reactor.callLater(DEFAULT_TORRENT_SELECTION_INTERVAL, self._task_select_tracker)) def _task_select_tracker(self): """ @@ -127,10 +120,18 @@ def _task_select_tracker(self): self._logger.debug(u"Start selecting torrents on tracker %s.", tracker_url) # get the torrents that should be checked - infohashes = self._torrent_db.getTorrentsOnTracker(tracker_url, int(time.time())) + infohashes = [] + with db_session: + tracker = self.tribler_session.lm.mds.TrackerState.get(url=tracker_url) + if tracker: + torrents = tracker.torrents + for torrent in torrents: + dynamic_interval = self._torrent_check_retry_interval * (2 ** tracker.failures) + if torrent.last_check + dynamic_interval < int(time.time()): + infohashes.append(torrent.infohash) if len(infohashes) == 0: - # We have not torrent to recheck for this tracker. Still update the last_check for this tracker. + # We have no torrent to recheck for this tracker. Still update the last_check for this tracker. self._logger.info("No torrent to check for tracker %s", tracker_url) self.update_tracker_info(tracker_url, True) return succeed(None) @@ -147,7 +148,7 @@ def _task_select_tracker(self): session.add_infohash(infohash) self._logger.info(u"Selected %d new torrents to check on tracker: %s", len(infohashes), tracker_url) - return session.connect_to_tracker().addCallbacks(*self.get_callbacks_for_session(session))\ + return session.connect_to_tracker().addCallbacks(*self.get_callbacks_for_session(session)) \ .addErrback(lambda _: None) def get_callbacks_for_session(self, session): @@ -171,78 +172,81 @@ def remove_tracker(self, tracker_url): def update_tracker_info(self, tracker_url, value): self.tribler_session.lm.tracker_manager.update_tracker_info(tracker_url, value) + @db_session def get_valid_trackers_of_torrent(self, torrent_id): """ Get a set of valid trackers for torrent. Also remove any invalid torrent.""" - db_tracker_list = self._torrent_db.getTrackerListByTorrentID(torrent_id) - return set([tracker for tracker in db_tracker_list if is_valid_url(tracker) or tracker == u'DHT']) + db_tracker_list = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(torrent_id)).trackers + return set([str(tracker.url) for tracker in db_tracker_list if is_valid_url(str(tracker.url))]) def on_gui_request_completed(self, infohash, result): final_response = {} - torrent_update_dict = {'infohash': infohash, 'seeders': 0, 'leechers': 0, 'last_check': time.time()} - for success, response in result: + torrent_update_dict = {'infohash': infohash, 'seeders': 0, 'leechers': 0, 'last_check': int(time.time())} + for success, response in reversed(result): if not success and isinstance(response, Failure): final_response[response.tracker_url] = {'error': response.getErrorMessage()} continue + final_response[response.keys()[0]] = response[response.keys()[0]][0] - response_seeders = response[response.keys()[0]][0]['seeders'] - response_leechers = response[response.keys()[0]][0]['leechers'] - if response_seeders > torrent_update_dict['seeders'] or \ - (response_seeders == torrent_update_dict['seeders'] - and response_leechers < torrent_update_dict['leechers']): - torrent_update_dict['seeders'] = response_seeders - torrent_update_dict['leechers'] = response_leechers + s = response[response.keys()[0]][0]['seeders'] + l = response[response.keys()[0]][0]['leechers'] - final_response[response.keys()[0]] = response[response.keys()[0]][0] + # More leeches is better, because undefined peers are marked as leeches in DHT + if s > torrent_update_dict['seeders'] or \ + (s == torrent_update_dict['seeders'] and l > torrent_update_dict['leechers']): + torrent_update_dict['seeders'] = s + torrent_update_dict['leechers'] = l self._update_torrent_result(torrent_update_dict) # Add this result to popularity community to publish to subscribers self.publish_torrent_result(torrent_update_dict) + # TODO: DRY! Stop doing lots of formats, just make REST endpoint automatically encode binary data to hex! + self.tribler_session.notifier.notify(NTFY_TORRENT, NTFY_UPDATE, infohash, + {"num_seeders": torrent_update_dict["seeders"], + "num_leechers": torrent_update_dict["leechers"], + "last_tracker_check": torrent_update_dict["last_check"], + "health": "updated"}) return final_response - def add_gui_request(self, infohash, timeout=20, scrape_now=False): + def add_gui_request(self, infohash, timeout=20, scrape_now=False, notify=False): """ Public API for adding a GUI request. :param infohash: Torrent infohash. :param timeout: The timeout to use in the performed requests :param scrape_now: Flag whether we want to force scraping immediately """ - result = self._torrent_db.getTorrent(infohash, (u'torrent_id', u'last_tracker_check', - u'num_seeders', u'num_leechers'), False) - if result is None: - self._logger.warn(u"torrent info not found, skip. infohash: %s", hexlify(infohash)) - return fail(Failure(RuntimeError("Torrent not found"))) - - torrent_id = result[u'torrent_id'] - last_check = result[u'last_tracker_check'] - time_diff = time.time() - last_check - if time_diff < self._torrent_check_interval and not scrape_now: - self._logger.debug(u"time interval too short, skip GUI request. infohash: %s", hexlify(infohash)) - return succeed({"db": {"seeders": result[u'num_seeders'], - "leechers": result[u'num_leechers'], "infohash": infohash.encode('hex')}}) - - # get torrent's tracker list from DB - tracker_set = self.get_valid_trackers_of_torrent(torrent_id) - if not tracker_set: - self._logger.warn(u"no trackers, skip GUI request. infohash: %s", hexlify(infohash)) - # TODO: add code to handle torrents with no tracker - return fail(Failure(RuntimeError("No trackers available for this torrent"))) + with db_session: + result = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash)) + if not result: + self._logger.warn(u"torrent info not found, skip. infohash: %s", hexlify(infohash)) + return fail(Failure(RuntimeError("Torrent not found"))) + + torrent_id = str(result.infohash) + last_check = result.last_check + time_diff = time.time() - last_check + if time_diff < self._torrent_check_interval and not scrape_now: + self._logger.debug(u"time interval too short, skip GUI request. infohash: %s", hexlify(infohash)) + return succeed({"db": {"seeders": result.seeders, + "leechers": result.leechers, + "infohash": hexlify(infohash)}}) + + # get torrent's tracker list from DB + tracker_set = self.get_valid_trackers_of_torrent(torrent_id) deferred_list = [] for tracker_url in tracker_set: - if tracker_url == u'DHT': - # Create a (fake) DHT session for the lookup - session = FakeDHTSession(self.tribler_session, infohash, timeout) - self._session_list['DHT'].append(session) - deferred_list.append(session.connect_to_tracker(). - addCallbacks(*self.get_callbacks_for_session(session))) - elif tracker_url != u'no-DHT': - session = self._create_session_for_request(tracker_url, timeout=timeout) - session.add_infohash(infohash) - deferred_list.append(session.connect_to_tracker(). - addCallbacks(*self.get_callbacks_for_session(session))) + session = self._create_session_for_request(tracker_url, timeout=timeout) + session.add_infohash(infohash) + deferred_list.append(session.connect_to_tracker(). + addCallbacks(*self.get_callbacks_for_session(session))) + + # Create a (fake) DHT session for the lookup + session = FakeDHTSession(self.tribler_session, infohash, timeout) + self._session_list['DHT'].append(session) + deferred_list.append(session.connect_to_tracker(). + addCallbacks(*self.get_callbacks_for_session(session))) return DeferredList(deferred_list, consumeErrors=True).addCallback( lambda res: self.on_gui_request_completed(infohash, res)) @@ -301,32 +305,17 @@ def _update_torrent_result(self, response): leechers = response['leechers'] last_check = response['last_check'] - # the torrent status logic, TODO: do it in other way self._logger.debug(u"Update result %s/%s for %s", seeders, leechers, hexlify(infohash)) - result = self._torrent_db.getTorrent(infohash, (u'torrent_id', u'tracker_check_retries'), include_mypref=False) - torrent_id = result[u'torrent_id'] - retries = result[u'tracker_check_retries'] - - # the status logic - if seeders > 0: - retries = 0 - status = u'good' - else: - retries += 1 - if retries < self._max_torrent_check_retries: - status = u'unknown' - else: - status = u'dead' - # prevent retries from exceeding the maximum - retries = self._max_torrent_check_retries - - # calculate next check time: + * (2 ^ ) - next_check = last_check + self._torrent_check_retry_interval * (2 ** retries) - - self._torrent_db.updateTorrentCheckResult(torrent_id, - infohash, seeders, leechers, last_check, next_check, - status, retries) + with db_session: + # Update torrent state + torrent = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash)) + if not torrent: + # Something is wrong, there should exist a corresponding TorrentState entry in the DB. + return + torrent.seeders = seeders + torrent.leechers = leechers + torrent.last_check = last_check def publish_torrent_result(self, response): if response['seeders'] == 0: @@ -334,6 +323,6 @@ def publish_torrent_result(self, response): return content = (response['infohash'], response['seeders'], response['leechers'], response['last_check']) if self.tribler_session.lm.popularity_community: - self.tribler_session.lm.popularity_community.queue_content(TYPE_TORRENT_HEALTH, content) + self.tribler_session.lm.popularity_community.queue_content(content) else: self._logger.info("Popular community not available to publish torrent checker result") diff --git a/Tribler/Core/Upgrade/config_converter.py b/Tribler/Core/Upgrade/config_converter.py index efdce05cf62..fd97e8795f6 100644 --- a/Tribler/Core/Upgrade/config_converter.py +++ b/Tribler/Core/Upgrade/config_converter.py @@ -1,11 +1,11 @@ from __future__ import absolute_import import ast -import os import logging +import os from glob import iglob -from six.moves.configparser import DuplicateSectionError, MissingSectionHeaderError, NoSectionError, ParsingError, \ - RawConfigParser + +from six.moves.configparser import DuplicateSectionError, MissingSectionHeaderError, NoSectionError, RawConfigParser from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.exceptions import InvalidConfigException @@ -131,20 +131,8 @@ def add_libtribler_config(new_config, old_config): temp_config = config.copy() if section == "general" and name == "state_dir": temp_config.set_state_dir(value) - elif section == "general" and name == "eckeypairfilename": - temp_config.set_permid_keypair_filename(value) - elif section == "general" and name == "megacache": - temp_config.set_megacache_enabled(value) elif section == "general" and name == "log_dir": temp_config.set_log_dir(value) - elif section == "allchannel_community" and name == "enabled": - temp_config.set_channel_search_enabled(value) - elif section == "channel_community" and name == "enabled": - temp_config.set_channel_community_enabled(value) - elif section == "preview_channel_community" and name == "enabled": - temp_config.set_preview_channel_community_enabled(value) - elif section == "search_community" and name == "enabled": - temp_config.set_torrent_search_enabled(value) elif section == "tunnel_community" and name == "enabled": temp_config.set_tunnel_community_enabled(value) elif section == "tunnel_community" and name == "socks5_listen_ports": @@ -154,26 +142,8 @@ def add_libtribler_config(new_config, old_config): temp_config.set_tunnel_community_exitnode_enabled(value) elif section == "general" and name == "ec_keypair_filename_multichain": temp_config.set_trustchain_keypair_filename(value) - elif section == "metadata" and name == "enabled": - temp_config.set_metadata_enabled(value) - elif section == "metadata" and name == "store_dir": - temp_config.set_metadata_store_dir(value) - elif section == "mainline_dht" and name == "enabled": - temp_config.set_mainline_dht_enabled(value) - elif section == "mainline_dht" and name == "mainline_dht_port": - temp_config.set_mainline_dht_port(value) elif section == "torrent_checking" and name == "enabled": temp_config.set_torrent_checking_enabled(value) - elif section == "torrent_store" and name == "enabled": - temp_config.set_torrent_store_enabled(value) - elif section == "torrent_store" and name == "dir": - temp_config.set_torrent_store_dir(value) - elif section == "torrent_collecting" and name == "enabled": - temp_config.set_torrent_collecting_enabled(value) - elif section == "torrent_collecting" and name == "torrent_collecting_max_torrents": - temp_config.set_torrent_collecting_max_torrents(value) - elif section == "torrent_collecting" and name == "torrent_collecting_dir": - temp_config.set_torrent_collecting_dir(value) elif section == "libtorrent" and name == "lt_proxytype": temp_config.config["libtorrent"]["proxy_type"] = value elif section == "libtorrent" and name == "lt_proxyserver": @@ -198,10 +168,6 @@ def add_libtribler_config(new_config, old_config): temp_config.config["libtorrent"]["anon_proxy_server_ports"] = [str(port) for port in value[1]] elif section == "libtorrent" and name == "anon_proxyauth": temp_config.config["libtorrent"]["anon_proxy_auth"] = value - elif section == "dispersy" and name == "enabled": - temp_config.set_dispersy_enabled(value) - elif section == "dispersy" and name == "dispersy_port": - temp_config.set_dispersy_port(value) elif section == "video" and name == "enabled": temp_config.set_video_server_enabled(value) elif section == "video" and name == "port": diff --git a/Tribler/Core/Upgrade/db72_to_pony.py b/Tribler/Core/Upgrade/db72_to_pony.py new file mode 100644 index 00000000000..5d0c594796e --- /dev/null +++ b/Tribler/Core/Upgrade/db72_to_pony.py @@ -0,0 +1,417 @@ +from __future__ import absolute_import, division + +import base64 +import datetime +import logging +import os +import sqlite3 +from binascii import unhexlify + +from pony import orm +from pony.orm import db_session +from six import text_type + +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import LEGACY_ENTRY, NEW +from Tribler.Core.Modules.MetadataStore.serialization import REGULAR_TORRENT +from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url +from Tribler.pyipv8.ipv8.database import database_blob + +BATCH_SIZE = 10000 + +DISCOVERED_CONVERSION_STARTED = "discovered_conversion_started" +CHANNELS_CONVERSION_STARTED = "channels_conversion_started" +TRACKERS_CONVERSION_STARTED = "trackers_conversion_started" +PERSONAL_CONVERSION_STARTED = "personal_conversion_started" +CONVERSION_FINISHED = "conversion_finished" +CONVERSION_FROM_72 = "conversion_from_72" + + +def dispesy_cid_to_pk(dispersy_cid): + return database_blob(unhexlify(("%X" % dispersy_cid).zfill(128))) + + +def pseudo_signature(): + return database_blob(os.urandom(32)) + + +def final_timestamp(): + return 1 << 62 + + +class DispersyToPonyMigration(object): + select_channels_sql = "SELECT id, name, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " \ + + "FROM Channels " \ + + "WHERE nr_torrents >= 3 " \ + + "AND name not NULL;" + + select_trackers_sql = "SELECT tracker_id, tracker, last_check, failures, is_alive FROM TrackerInfo" + + select_full = "SELECT" \ + " (SELECT ti.tracker FROM TorrentTrackerMapping ttm, TrackerInfo ti WHERE " \ + "ttm.torrent_id == t.torrent_id AND ttm.tracker_id == ti.tracker_id AND ti.tracker != 'DHT' " \ + "AND ti.tracker != 'http://retracker.local/announce' ORDER BY ti.is_alive ASC, ti.failures DESC, " \ + "ti.last_check ASC), ct.channel_id, ct.name, t.infohash, t.length, t.creation_date, t.torrent_id, " \ + "t.category, t.num_seeders, t.num_leechers, t.last_tracker_check " \ + "FROM _ChannelTorrents ct, Torrent t WHERE ct.name NOT NULL and t.length > 0 AND " \ + "t.category NOT NULL AND ct.deleted_at IS NULL AND t.torrent_id == ct.torrent_id AND " \ + "t.infohash NOT NULL " + + select_torrents_sql = " FROM _ChannelTorrents ct, Torrent t WHERE " + \ + "ct.name NOT NULL and t.length>0 AND t.category NOT NULL AND ct.deleted_at IS NULL " + \ + " AND t.torrent_id == ct.torrent_id AND t.infohash NOT NULL " + + def __init__(self, tribler_db, notifier_callback=None, logger=None): + self._logger = logger or logging.getLogger(self.__class__.__name__) + self.notifier_callback = notifier_callback + self.tribler_db = tribler_db + self.mds = None + + self.personal_channel_id = None + self.personal_channel_title = None + + def initialize(self, mds): + self.mds = mds + try: + self.personal_channel_id, self.personal_channel_title = self.get_personal_channel_id_title() + self.personal_channel_title = self.personal_channel_title[:200] # limit the title size + except: + self._logger.info("No personal channel found") + + def get_old_channels(self): + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + + channels = [] + for id_, name, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam in cursor.execute( + self.select_channels_sql): + if nr_torrents and nr_torrents > 0: + channels.append({"id_": 0, + "infohash": database_blob(os.urandom(20)), + "title": name or '', + "public_key": dispesy_cid_to_pk(id_), + "timestamp": final_timestamp(), + "votes": int(nr_favorite or 0), + # "xxx": float(nr_spam or 0), + "origin_id": 0, + "signature": pseudo_signature(), + "skip_key_check": True, + "size": 0, + "local_version": final_timestamp(), + "subscribed": False, + "status": LEGACY_ENTRY, + "num_entries": int(nr_torrents or 0)}) + return channels + + def get_personal_channel_id_title(self): + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + cursor.execute('SELECT id,name FROM Channels WHERE peer_id ISNULL LIMIT 1') + return cursor.fetchone() + + def get_old_trackers(self): + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + + trackers = {} + for tracker_id, tracker, last_check, failures, is_alive in cursor.execute(self.select_trackers_sql): + try: + tracker_url_sanitized = get_uniformed_tracker_url(tracker) + if not tracker_url_sanitized: + continue + except: + # Skip malformed trackers + continue + trackers[tracker_url_sanitized] = ({ + "last_check": last_check, + "failures": failures, + "alive": is_alive}) + return trackers + + def get_old_torrents_count(self, personal_channel_only=False): + personal_channel_filter = "" + if self.personal_channel_id: + personal_channel_filter = " AND ct.channel_id " + \ + (" == " if personal_channel_only else " != ") + \ + (" %i " % self.personal_channel_id) + + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + cursor.execute("SELECT COUNT(*) FROM (SELECT t.torrent_id " + self.select_torrents_sql + \ + personal_channel_filter + "group by infohash )") + return cursor.fetchone()[0] + + def get_personal_channel_torrents_count(self): + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + cursor.execute("SELECT COUNT(*) FROM (SELECT t.torrent_id " + self.select_torrents_sql + \ + (" AND ct.channel_id == %s " % self.personal_channel_id) + \ + " group by infohash )") + return cursor.fetchone()[0] + + def get_old_torrents(self, personal_channel_only=False, batch_size=BATCH_SIZE, offset=0, + sign=False): + connection = sqlite3.connect(self.tribler_db) + cursor = connection.cursor() + + personal_channel_filter = "" + if self.personal_channel_id: + personal_channel_filter = " AND ct.channel_id " + \ + (" == " if personal_channel_only else " != ") + \ + (" %i " % self.personal_channel_id) + + torrents = [] + for tracker_url, channel_id, name, infohash, length, creation_date, torrent_id, category, num_seeders, \ + num_leechers, last_tracker_check in \ + cursor.execute( + self.select_full + personal_channel_filter + " group by infohash" + + (" LIMIT " + str(batch_size) + " OFFSET " + str(offset))): + # check if name is valid unicode data + try: + name = text_type(name) + except UnicodeDecodeError: + continue + + try: + if len(base64.decodestring(infohash)) != 20: + continue + infohash = base64.decodestring(infohash) + + torrent_dict = { + "status": NEW, + "infohash": infohash, + "size": int(length or 0), + "torrent_date": datetime.datetime.utcfromtimestamp(creation_date or 0), + "title": name or '', + "tags": category or '', + "id_": torrent_id or 0, + "origin_id": 0, + "tracker_info": tracker_url or '', + "xxx": int(category == u'xxx')} + if not sign: + torrent_dict.update({ + "timestamp": int(torrent_id or 0), + "status": LEGACY_ENTRY, + "public_key": dispesy_cid_to_pk(channel_id), + "signature": pseudo_signature(), + "skip_key_check": True}) + + health_dict = { + "seeders": int(num_seeders or 0), + "leechers": int(num_leechers or 0), + "last_check": int(last_tracker_check or 0)} + torrents.append((torrent_dict, health_dict)) + except: + continue + + return torrents + + def convert_personal_channel(self): + # Reflect conversion state + with db_session: + v = self.mds.MiscData.get(name=CONVERSION_FROM_72) + if v: + if v.value == PERSONAL_CONVERSION_STARTED: + # Just drop the entries from the previous try + + my_channel = self.mds.ChannelMetadata.get_my_channel() + for g in my_channel.contents_list: + g.delete() + my_channel.delete() + elif v.value == CHANNELS_CONVERSION_STARTED: + v.set(value=PERSONAL_CONVERSION_STARTED) + else: + return + + else: + self.mds.MiscData(name=CONVERSION_FROM_72, value=PERSONAL_CONVERSION_STARTED) + + if not self.personal_channel_id or not self.get_personal_channel_torrents_count(): + return + + # Make sure there is nothing left of old personal channel, just in case + if self.mds.ChannelMetadata.get_my_channel(): + return + + old_torrents = self.get_old_torrents(personal_channel_only=True, sign=True) + with db_session: + my_channel = self.mds.ChannelMetadata.create_channel(title=self.personal_channel_title, description='') + for (torrent, _) in old_torrents: + try: + md = self.mds.TorrentMetadata(**torrent) + md.parents.add(my_channel) + except: + continue + my_channel.commit_channel_torrent() + + def convert_discovered_torrents(self): + offset = 0 + # Reflect conversion state + with db_session: + v = self.mds.MiscData.get(name=CONVERSION_FROM_72) + if v: + offset = orm.count( + g for g in self.mds.TorrentMetadata if + g.status == LEGACY_ENTRY and g.metadata_type == REGULAR_TORRENT) + v.set(value=DISCOVERED_CONVERSION_STARTED) + else: + self.mds.MiscData(name=CONVERSION_FROM_72, value=DISCOVERED_CONVERSION_STARTED) + + start = datetime.datetime.utcnow() + x = 0 + offset + batch_size = 1000 + total_to_convert = self.get_old_torrents_count() + + while True: + old_torrents = self.get_old_torrents(batch_size=batch_size, offset=x) + if not old_torrents: + break + with db_session: + for (t, _) in old_torrents: + try: + self.mds.TorrentMetadata(**t) + except: + continue + + x += batch_size + if self.notifier_callback: + self.notifier_callback("%i/%i" % (x, total_to_convert)) + self._logger.info("Converted old torrents: %i/%i" % (x, total_to_convert)) + + stop = datetime.datetime.utcnow() + elapsed = (stop - start).total_seconds() + + if self.notifier_callback: + self.notifier_callback("%i entries converted in %i seconds (%i e/s)" % (x, int(elapsed), int(x / elapsed))) + + def convert_discovered_channels(self): + # Reflect conversion state + with db_session: + v = self.mds.MiscData.get(name=CONVERSION_FROM_72) + if v: + if v.value == CHANNELS_CONVERSION_STARTED: + # Just drop the entries from the previous try + orm.delete(g for g in self.mds.ChannelMetadata if g.status == LEGACY_ENTRY) + else: + v.set(value=CHANNELS_CONVERSION_STARTED) + else: + self.mds.MiscData(name=CONVERSION_FROM_72, value=CHANNELS_CONVERSION_STARTED) + + with db_session: + old_channels = self.get_old_channels() + for c in old_channels: + try: + self.mds.ChannelMetadata(**c) + except: + continue + + with db_session: + for c in self.mds.ChannelMetadata.select()[:]: + c.num_entries = c.contents_len + if c.num_entries == 0: + c.delete() + + def update_trackers_info(self): + old_trackers = self.get_old_trackers() + with db_session: + trackers = self.mds.TrackerState.select()[:] + for tracker in trackers: + if tracker.url in old_trackers: + tracker.set(**old_trackers[tracker.url]) + + def mark_conversion_finished(self): + with db_session: + v = self.mds.MiscData.get(name=CONVERSION_FROM_72) + if v: + v.set(value=CONVERSION_FINISHED) + else: + self.mds.MiscData(name=CONVERSION_FROM_72, value=CONVERSION_FINISHED) + + def do_migration(self): + self.convert_discovered_torrents() + self.convert_discovered_channels() + self.convert_personal_channel() + self.update_trackers_info() + self.mark_conversion_finished() + + +def old_db_version_ok(old_database_path): + # Check the old DB version + connection = sqlite3.connect(old_database_path) + with connection: + cursor = connection.cursor() + cursor.execute('SELECT value FROM MyInfo WHERE entry == "version"') + version = int(cursor.fetchone()[0]) + if version == 29: + return True + return False + + +def cleanup_pony_experimental_db(new_database_path): + # Check for the old experimental version database + # ACHTUNG!!! NUCLEAR OPTION!!! DO NOT MESS WITH IT!!! + connection = sqlite3.connect(new_database_path) + with connection: + cursor = connection.cursor() + cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table' AND name = 'MiscData'") + result = cursor.fetchone() + delete_old_pony_db = not bool(result[0] if result else False) + connection.close() + # We're looking at the old experimental version database. Delete it. + if delete_old_pony_db: + os.unlink(new_database_path) + + +def new_db_version_ok(new_database_path): + # Let's check if we converted all/some entries before + connection = sqlite3.connect(new_database_path) + with connection: + cursor = connection.cursor() + cursor.execute('SELECT value FROM MiscData WHERE name == "db_version"') + version = int(cursor.fetchone()[0]) + if version != 0: + return False + return True + + +def already_upgraded(new_database_path): + connection = sqlite3.connect(new_database_path) + with connection: + # Check if already upgraded + cursor = connection.cursor() + cursor.execute('SELECT value FROM MiscData WHERE name == "%s"' % CONVERSION_FROM_72) + result = cursor.fetchone() + if result: + state = result[0] + if state == CONVERSION_FINISHED: + return True + return False + + +def should_upgrade(old_database_path, new_database_path, logger=None): + """ + Decide if we can migrate data from old DB to Pony + :return: False if something goes wrong, or we don't need/cannot migrate data + """ + if not os.path.exists(old_database_path): + # no old DB to upgrade + return False + + try: + if not old_db_version_ok(old_database_path): + return False + except: + logger.error("Can't open the old tribler.sdb file") + return False + + if os.path.exists(new_database_path): + try: + cleanup_pony_experimental_db(new_database_path) + if not new_db_version_ok(new_database_path): + return False + if already_upgraded(new_database_path): + return False + except: + logger.error("Error while trying to open Pony DB file ", new_database_path) + return False + + return True diff --git a/Tribler/Core/Upgrade/db_upgrader.py b/Tribler/Core/Upgrade/db_upgrader.py deleted file mode 100644 index 4ab191c9839..00000000000 --- a/Tribler/Core/Upgrade/db_upgrader.py +++ /dev/null @@ -1,588 +0,0 @@ -""" -Upgrades the database from one version to a newer version. - -Author(s): Elric Milon -""" -import logging -import os -from binascii import hexlify -from shutil import rmtree -from sqlite3 import Connection - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler -from Tribler.Core.CacheDB.db_versions import LOWEST_SUPPORTED_DB_VERSION, LATEST_DB_VERSION -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Core.Category.Category import Category -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.Utilities.search_utils import split_into_keywords - - -class VersionNoLongerSupportedError(Exception): - pass - - -class DatabaseUpgradeError(Exception): - pass - - -class DBUpgrader(object): - - """ - Migration tool for upgrading the collected torrent files/thumbnails on disk - structure from Tribler version 6.3 to 6.4. - """ - - def __init__(self, session, db, torrent_store, status_update_func=None): - self._logger = logging.getLogger(self.__class__.__name__) - self.session = session - self.db = db - self.status_update_func = status_update_func if status_update_func else lambda _: None - self.torrent_store = torrent_store - - self.failed = True - self.torrent_collecting_dir = self.session.config.get_torrent_collecting_dir() - - def start_migrate(self): - """ - Starts migrating from Tribler 6.3 to 6.4. - """ - - if self.db.version == 17: - self._upgrade_17_to_18() - - # version 18 -> 22 - if self.db.version == 18: - self._upgrade_18_to_22() - - # version 22 -> 23 - if self.db.version == 22: - self._upgrade_22_to_23() - - # version 23 -> 24 (24 is a dummy version in which we only cleans up thumbnail files - if self.db.version == 23: - self._upgrade_23_to_24() - - # version 24 -> 25 (25 is also a dummy version, where the torrent files get migrated to a levedb based store. - if self.db.version == 24: - self._upgrade_24_to_25() - - # version 25 -> 26 - if self.db.version == 25: - self._upgrade_25_to_26() - - # version 26 -> 27 - if self.db.version == 26: - self._upgrade_26_to_27() - - # version 27 -> 28 - if self.db.version == 27: - self._upgrade_27_to_28() - - # version 28 -> 29 - if self.db.version == 28: - self._upgrade_28_to_29() - - # check if we managed to upgrade to the latest DB version. - if self.db.version == LATEST_DB_VERSION: - self.status_update_func(u"Database upgrade finished.") - self.failed = False - else: - if self.db.version < LOWEST_SUPPORTED_DB_VERSION: - msg = u"Database is too old %s < %s" % (self.db.version, LOWEST_SUPPORTED_DB_VERSION) - self.status_update_func(msg) - raise VersionNoLongerSupportedError(msg) - else: - msg = u"Database upgrade failed: %s -> %s" % (self.db.version, LATEST_DB_VERSION) - self.status_update_func(msg) - raise DatabaseUpgradeError(msg) - - def _purge_old_search_metadata_communities(self): - """ - Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database. - """ - db_path = os.path.join(self.session.config.get_state_dir(), u"sqlite", u"dispersy.db") - if not os.path.isfile(db_path): - return - - communities_to_delete = (u"SearchCommunity", u"MetadataCommunity", u"TunnelCommunity") - - connection = Connection(db_path) - cursor = connection.cursor() - - for community in communities_to_delete: - try: - result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?;", (community,))) - - for community_id, in result: - cursor.execute(u"DELETE FROM community WHERE id == ?;", (community_id,)) - cursor.execute(u"DELETE FROM meta_message WHERE community == ?;", (community_id,)) - cursor.execute(u"DELETE FROM sync WHERE community == ?;", (community_id,)) - except StopIteration: - continue - - cursor.close() - connection.commit() - connection.close() - - def _upgrade_17_to_18(self): - self.current_status = u"Upgrading database from v%s to v%s..." % (17, 18) - - self.db.execute(u""" -DROP TABLE IF EXISTS BarterCast; -DROP INDEX IF EXISTS bartercast_idx; -INSERT OR IGNORE INTO MetaDataTypes ('name') VALUES ('swift-thumbnails'); -INSERT OR IGNORE INTO MetaDataTypes ('name') VALUES ('video-info'); -""") - # update database version - self.db.write_version(18) - - def _upgrade_18_to_22(self): - self.current_status = u"Upgrading database from v%s to v%s..." % (18, 22) - - self.db.execute(u""" -DROP INDEX IF EXISTS Torrent_swift_hash_idx; - -DROP VIEW IF EXISTS Friend; -DROP VIEW IF EXISTS SuperPeer; - -ALTER TABLE Peer RENAME TO __Peer_tmp; -CREATE TABLE IF NOT EXISTS Peer ( - peer_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - permid text NOT NULL, - name text, - thumbnail text -); - -INSERT INTO Peer (peer_id, permid, name, thumbnail) SELECT peer_id, permid, name, thumbnail FROM __Peer_tmp; - -DROP TABLE IF EXISTS __Peer_tmp; - -ALTER TABLE Torrent ADD COLUMN last_tracker_check integer DEFAULT 0; -ALTER TABLE Torrent ADD COLUMN tracker_check_retries integer DEFAULT 0; -ALTER TABLE Torrent ADD COLUMN next_tracker_check integer DEFAULT 0; - -CREATE TABLE IF NOT EXISTS TrackerInfo ( - tracker_id integer PRIMARY KEY AUTOINCREMENT, - tracker text UNIQUE NOT NULL, - last_check numeric DEFAULT 0, - failures integer DEFAULT 0, - is_alive integer DEFAULT 1 -); - -CREATE TABLE IF NOT EXISTS TorrentTrackerMapping ( - torrent_id integer NOT NULL, - tracker_id integer NOT NULL, - FOREIGN KEY (torrent_id) REFERENCES Torrent(torrent_id), - FOREIGN KEY (tracker_id) REFERENCES TrackerInfo(tracker_id), - PRIMARY KEY (torrent_id, tracker_id) -); - -INSERT OR IGNORE INTO TrackerInfo (tracker) VALUES ('no-DHT'); -INSERT OR IGNORE INTO TrackerInfo (tracker) VALUES ('DHT'); - -DROP INDEX IF EXISTS torrent_biterm_phrase_idx; -DROP TABLE IF EXISTS TorrentBiTermPhrase; -DROP INDEX IF EXISTS termfrequency_freq_idx; -DROP TABLE IF EXISTS TermFrequency; -DROP INDEX IF EXISTS Torrent_insert_idx; -DROP INDEX IF EXISTS Torrent_info_roothash_idx; - -DROP TABLE IF EXISTS ClicklogSearch; -DROP INDEX IF EXISTS idx_search_term; -DROP INDEX IF EXISTS idx_search_torrent; -""") - # update database version - self.db.write_version(22) - - def _upgrade_22_to_23(self): - """ - Migrates the database to the new version. - """ - self.status_update_func(u"Upgrading database from v%s to v%s..." % (22, 23)) - - self.db.execute(u""" -DROP TABLE IF EXISTS BarterCast; -DROP INDEX IF EXISTS bartercast_idx; - -DROP INDEX IF EXISTS Torrent_swift_torrent_hash_idx; -""") - - try: - next(self.db.execute(u"SELECT * From sqlite_master WHERE name == '_tmp_Torrent' and type == 'table';")) - - except StopIteration: - # no _tmp_Torrent table, check if the current Torrent table is new - lines = [(0, u'torrent_id', u'integer', 1, None, 1), - (1, u'infohash', u'text', 1, None, 0), - (2, u'name', u'text', 0, None, 0), - (3, u'torrent_file_name', u'text', 0, None, 0), - (4, u'length', u'integer', 0, None, 0), - (5, u'creation_date', u'integer', 0, None, 0), - (6, u'num_files', u'integer', 0, None, 0), - (7, u'thumbnail', u'integer', 0, None, 0), - (8, u'insert_time', u'numeric', 0, None, 0), - (9, u'secret', u'integer', 0, None, 0), - (10, u'relevance', u'numeric', 0, u'0', 0), - (11, u'source_id', u'integer', 0, None, 0), - (12, u'category_id', u'integer', 0, None, 0), - (13, u'status_id', u'integer', 0, u'0', 0), - (14, u'num_seeders', u'integer', 0, None, 0), - (15, u'num_leechers', u'integer', 0, None, 0), - (16, u'comment', u'text', 0, None, 0), - (17, u'dispersy_id', u'integer', 0, None, 0), - (18, u'last_tracker_check', u'integer', 0, u'0', 0), - (19, u'tracker_check_retries', u'integer', 0, u'0', 0), - (20, u'next_tracker_check', u'integer', 0, u'0', 0) - ] - i = 0 - is_new = True - for line in self.db.execute(u"PRAGMA table_info(Torrent);"): - if line != lines[i]: - is_new = False - break - i += 1 - - if not is_new: - # create the temporary table - self.db.execute(u""" -CREATE TABLE IF NOT EXISTS _tmp_Torrent ( - torrent_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - infohash text NOT NULL, - name text, - torrent_file_name text, - length integer, - creation_date integer, - num_files integer, - thumbnail integer, - insert_time numeric, - secret integer, - relevance numeric DEFAULT 0, - source_id integer, - category_id integer, - status_id integer DEFAULT 0, - num_seeders integer, - num_leechers integer, - comment text, - dispersy_id integer, - last_tracker_check integer DEFAULT 0, - tracker_check_retries integer DEFAULT 0, - next_tracker_check integer DEFAULT 0 -); -""") - - # migrate Torrent table - keys = (u"torrent_id", u"infohash", u"name", u"torrent_file_name", u"length", u"creation_date", - u"num_files", u"thumbnail", u"insert_time", u"secret", u"relevance", u"source_id", - u"category_id", u"status_id", u"num_seeders", u"num_leechers", u"comment", u"dispersy_id", - u"last_tracker_check", u"tracker_check_retries", u"next_tracker_check") - - keys_str = u", ".join(keys) - values_str = u"?," * len(keys) - insert_stmt = u"INSERT INTO _tmp_Torrent(%s) VALUES(%s)" % (keys_str, values_str[:-1]) - current_count = 0 - - results = self.db.execute(u"SELECT %s FROM Torrent;" % keys_str) - new_torrents = [] - for torrent in results: - torrent_id, infohash, name, torrent_file_name = torrent[:4] - - filepath = os.path.join(self.torrent_collecting_dir, hexlify(str2bin(infohash)) + u".torrent") - - # Check if we have the actual .torrent - torrent_file_name = None - if os.path.exists(filepath): - torrent_file_name = filepath - tdef = TorrentDef.load(filepath) - # Use the name on the .torrent file instead of the one stored in the database. - name = tdef.get_name_as_unicode() or name - - new_torrents.append((torrent_id, infohash, name, torrent_file_name) + torrent[4:]) - - current_count += 1 - self.status_update_func(u"Upgrading database, %s records upgraded..." % current_count) - - self.status_update_func(u"All torrent entries processed, inserting in database...") - self.db.executemany(insert_stmt, new_torrents) - self.status_update_func(u"All updated torrent entries inserted.") - - self.db.execute(u""" -DROP VIEW IF EXISTS CollectedTorrent; -DROP TABLE IF EXISTS Torrent; -ALTER TABLE _tmp_Torrent RENAME TO Torrent; -CREATE VIEW CollectedTorrent AS SELECT * FROM Torrent WHERE torrent_file_name IS NOT NULL; -""") - - # cleanup metadata tables - self.db.execute(u""" -DROP TABLE IF EXISTS MetadataMessage; -DROP TABLE IF EXISTS MetadataData; - -CREATE TABLE IF NOT EXISTS MetadataMessage ( - message_id INTEGER PRIMARY KEY AUTOINCREMENT, - dispersy_id INTEGER NOT NULL, - this_global_time INTEGER NOT NULL, - this_mid TEXT NOT NULL, - infohash TEXT NOT NULL, - previous_mid TEXT, - previous_global_time INTEGER -); - -CREATE TABLE IF NOT EXISTS MetadataData ( - message_id INTEGER, - data_key TEXT NOT NULL, - data_value INTEGER, - FOREIGN KEY (message_id) REFERENCES MetadataMessage(message_id) ON DELETE CASCADE -); -""") - - # cleanup all SearchCommunity and MetadataCommunity data in dispersy database - self._purge_old_search_metadata_communities() - - # update database version - self.db.write_version(23) - - def _upgrade_23_to_24(self): - self.status_update_func(u"Upgrading database from v%s to v%s..." % (23, 24)) - - # remove all thumbnail files - for root, dirs, _ in os.walk(self.session.config.get_torrent_collecting_dir()): - for d in dirs: - dir_path = os.path.join(root, d) - rmtree(dir_path, ignore_errors=True) - break - - # update database version - self.db.write_version(24) - - def _upgrade_24_to_25(self): - self.status_update_func(u"Upgrading database from v%s to v%s..." % (24, 25)) - - # update database version (that one was easy :D) - self.db.write_version(25) - - def _upgrade_25_to_26(self): - self.status_update_func(u"Upgrading database from v%s to v%s..." % (25, 26)) - - # remove UserEventLog, TorrentSource, and TorrentCollecting tables - self.status_update_func(u"Removing unused tables...") - self.db.execute(u""" -DROP TABLE IF EXISTS UserEventLog; -DROP TABLE IF EXISTS TorrentSource; -DROP TABLE IF EXISTS TorrentCollecting; -""") - - # remove click_position, reranking_strategy, and progress from MyPreference - self.status_update_func(u"Updating MyPreference table...") - self.db.execute(u""" -CREATE TABLE _tmp_MyPreference ( - torrent_id integer PRIMARY KEY NOT NULL, - destination_path text NOT NULL, - creation_time integer NOT NULL -); - -INSERT INTO _tmp_MyPreference SELECT torrent_id, destination_path, creation_time FROM MyPreference; - -DROP TABLE MyPreference; -ALTER TABLE _tmp_MyPreference RENAME TO MyPreference; -""") - - # remove source_id and thumbnail columns from Torrent table - # replace torrent_file_name column with is_collected column - # change CollectedTorrent view - self.status_update_func(u"Updating Torrent table...") - self.db.execute(u""" -CREATE TABLE _tmp_Torrent ( - torrent_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - infohash text NOT NULL, - name text, - length integer, - creation_date integer, - num_files integer, - insert_time numeric, - secret integer, - relevance numeric DEFAULT 0, - category_id integer, - status_id integer DEFAULT 0, - num_seeders integer, - num_leechers integer, - comment text, - dispersy_id integer, - is_collected integer DEFAULT 0, - last_tracker_check integer DEFAULT 0, - tracker_check_retries integer DEFAULT 0, - next_tracker_check integer DEFAULT 0 -); - -UPDATE Torrent SET torrent_file_name = '1' WHERE torrent_file_name IS NOT NULL; -UPDATE Torrent SET torrent_file_name = '0' WHERE torrent_file_name IS NULL; - -INSERT INTO _tmp_Torrent -SELECT torrent_id, infohash, name, length, creation_date, num_files, insert_time, secret, relevance, category_id, -status_id, num_seeders, num_leechers, comment, dispersy_id, CAST(torrent_file_name AS INTEGER), -last_tracker_check, tracker_check_retries, next_tracker_check FROM Torrent; - -DROP VIEW IF EXISTS CollectedTorrent; -DROP TABLE Torrent; -ALTER TABLE _tmp_Torrent RENAME TO Torrent; - -CREATE VIEW CollectedTorrent AS SELECT * FROM Torrent WHERE is_collected == 1; -""") - - # update database version - self.db.write_version(26) - - def _upgrade_26_to_27(self): - self.status_update_func(u"Upgrading database from v%s to v%s..." % (26, 27)) - - # replace status_id and category_id in Torrent table with status and category - self.status_update_func(u"Updating Torrent table and removing unused tables...") - self.db.execute(u""" -CREATE TABLE _tmp_Torrent ( - torrent_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, - infohash text NOT NULL, - name text, - length integer, - creation_date integer, - num_files integer, - insert_time numeric, - secret integer, - relevance numeric DEFAULT 0, - category text, - status text DEFAULT 'unknown', - num_seeders integer, - num_leechers integer, - comment text, - dispersy_id integer, - is_collected integer DEFAULT 0, - last_tracker_check integer DEFAULT 0, - tracker_check_retries integer DEFAULT 0, - next_tracker_check integer DEFAULT 0 -); - -INSERT INTO _tmp_Torrent -SELECT torrent_id, infohash, T.name, length, creation_date, num_files, insert_time, secret, relevance, C.name, TS.name, -num_seeders, num_leechers, comment, dispersy_id, is_collected, last_tracker_check, tracker_check_retries, -next_tracker_check -FROM Torrent AS T -LEFT JOIN Category AS C ON T.category_id == C.category_id -LEFT JOIN TorrentStatus AS TS ON T.status_id == TS.status_id; - -DROP VIEW IF EXISTS CollectedTorrent; -DROP TABLE Torrent; -ALTER TABLE _tmp_Torrent RENAME TO Torrent; -CREATE VIEW CollectedTorrent AS SELECT * FROM Torrent WHERE is_collected == 1; - -DROP TABLE Category; -DROP TABLE TorrentStatus; -""") - - # update database version - self.db.write_version(27) - - def _upgrade_27_to_28(self): - self.status_update_func(u"Upgrading database from v%s to v%s..." % (27, 28)) - - # remove old metadata stuff - self.status_update_func(u"Removing old metadata tables...") - self.db.execute(u""" -DROP TABLE IF EXISTS MetadataMessage; -DROP TABLE IF EXISTS MetadataData; -""") - # replace type_id with type in ChannelMetadata - self.db.execute(u""" -DROP TABLE IF EXISTS _ChannelMetaData_new; - -CREATE TABLE _ChannelMetaData_new ( - id integer PRIMARY KEY ASC, - dispersy_id integer NOT NULL, - channel_id integer NOT NULL, - peer_id integer, - type text NOT NULL, - value text NOT NULL, - prev_modification integer, - prev_global_time integer, - time_stamp integer NOT NULL, - inserted integer DEFAULT (strftime('%s','now')), - deleted_at integer, - UNIQUE (dispersy_id) -); - -INSERT INTO _ChannelMetaData_new -SELECT _ChannelMetaData.id, dispersy_id, channel_id, peer_id, MetadataTypes.name, value, prev_modification, prev_global_time, time_stamp, inserted, deleted_at -FROM _ChannelMetaData -LEFT JOIN MetadataTypes ON _ChannelMetaData.type_id == MetadataTypes.id; - -DROP VIEW IF EXISTS ChannelMetaData; -DROP TABLE IF EXISTS _ChannelMetaData; - -ALTER TABLE _ChannelMetaData_new RENAME TO _ChannelMetaData; -CREATE VIEW ChannelMetaData AS SELECT * FROM _ChannelMetaData WHERE deleted_at IS NULL; -DROP TABLE IF EXISTS MetaDataTypes; -""") - - # update database version - self.db.write_version(28) - - def _upgrade_28_to_29(self): - self.status_update_func(u"Upgrading FTS engine...") - - self.db.execute(u""" -DROP TABLE IF EXISTS FullTextIndex; -CREATE VIRTUAL TABLE FullTextIndex USING fts4(swarmname, filenames, fileextensions); - """) - self.db.commit_now() - - self.status_update_func(u"Reindexing torrents...") - self.reindex_torrents() - - # update database version - self.db.write_version(29) - - def reimport_torrents(self): - """Import all torrent files in the collected torrent dir, all the files already in the database will be ignored. - """ - self.status_update_func("Opening TorrentDBHandler...") - # TODO(emilon): That's a freakishly ugly hack. - torrent_db_handler = TorrentDBHandler(self.session) - torrent_db_handler.category = Category() - - # TODO(emilon): It would be nice to drop the corrupted torrent data from the store as a bonus. - self.status_update_func("Registering recovered torrents...") - try: - for infoshash_str, torrent_data in self.torrent_store.iteritems(): - self.status_update_func("> %s" % infoshash_str) - torrentdef = TorrentDef.load_from_memory(torrent_data) - if torrentdef.is_finalized(): - infohash = torrentdef.get_infohash() - if not torrent_db_handler.hasTorrent(infohash): - self.status_update_func(u"Registering recovered torrent: %s" % hexlify(infohash)) - torrent_db_handler._addTorrentToDB(torrentdef, extra_info={"filename": infoshash_str}) - finally: - torrent_db_handler.close() - self.db.commit_now() - return self.torrent_store.flush() - - def reindex_torrents(self): - """ - Reindex all torrents in the database. Required when upgrading to a newer FTS engine. - """ - results = self.db.fetchall("SELECT torrent_id, name FROM Torrent") - for torrent_result in results: - if torrent_result[1] is None: - continue - - swarmname = split_into_keywords(torrent_result[1]) - files_results = self.db.fetchall("SELECT path FROM TorrentFiles WHERE torrent_id = ?", (torrent_result[0],)) - filenames = "" - fileexts = "" - for file_result in files_results: - filename, ext = os.path.splitext(file_result[0]) - parts = split_into_keywords(filename) - filenames += " ".join(parts) + " " - fileexts += ext[1:] + " " - - self.db.execute_write(u"INSERT INTO FullTextIndex (rowid, swarmname, filenames, fileextensions)" - u" VALUES(?,?,?,?)", - (torrent_result[0], " ".join(swarmname), filenames[:-1], fileexts[:-1])) - - self.db.commit_now() diff --git a/Tribler/Core/Upgrade/pickle_converter.py b/Tribler/Core/Upgrade/pickle_converter.py deleted file mode 100644 index 47c32097384..00000000000 --- a/Tribler/Core/Upgrade/pickle_converter.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import absolute_import - -import glob -import os -import pickle -from six.moves.configparser import RawConfigParser - -from Tribler.Core.simpledefs import PERSISTENTSTATE_CURRENTVERSION - - -class PickleConverter(object): - """ - This class is responsible for converting old .pickle files used for configuration files to a newer ConfigObj format. - """ - - def __init__(self, session): - self.session = session - - def convert(self): - """ - Calling this method will convert all configuration files to the ConfigObj.state format. - """ - self.convert_session_config() - self.convert_main_config() - self.convert_download_checkpoints() - - def convert_session_config(self): - """ - Convert the sessionconfig.pickle file to triblerd.conf. Do nothing if we do not have a pickle file. - Remove the pickle file after we are done. - """ - old_filename = os.path.join(self.session.config.get_state_dir(), 'sessconfig.pickle') - - if not os.path.exists(old_filename): - return - - with open(old_filename, "rb") as old_file: - sessconfig = pickle.load(old_file) - - # Upgrade to .state config - new_config = self.session.config - for key, value in sessconfig.iteritems(): - if key == 'minport': - new_config.config['libtorrent']['port'] = value - if key in ['state_dir', 'install_dir', 'eckeypairfilename', 'megacache']: - new_config.config['general'][key] = value - if key == 'mainline_dht': - new_config.config['mainline_dht']['enabled'] = value - if key == 'mainline_dht_port': - new_config.config['mainline_dht']['port'] = value - if key == 'torrent_checking': - new_config.config['torrent_checking']['enabled'] = value - if key in ['torrent_collecting', 'torrent_collecting_max_torrents', 'torrent_collecting_dir']: - new_config.config['torrent_collecting']['enabled' if key == 'torrent_collecting' else key] = value - if key in ['libtorrent', 'lt_proxytype', 'lt_proxyserver', 'lt_proxyauth']: - new_config.config['libtorrent']['enabled' if key == 'libtorrent' else key] = value - if key in ['dispersy_port', 'dispersy']: - new_config.config['dispersy']['enabled' if key == 'dispersy' else 'port'] = value - - # Save the new file, remove the old one - new_config.write() - os.remove(old_filename) - - def convert_main_config(self): - """ - Convert the abc.conf, user_download_choice.pickle, gui_settings and recent download history files - to triblerd.conf. - """ - new_config = self.session.config - - # Convert user_download_choice.pickle - udcfilename = os.path.join(self.session.config.get_state_dir(), 'user_download_choice.pickle') - if os.path.exists(udcfilename): - with open(udcfilename, "r") as udc_file: - choices = pickle.Unpickler(udc_file).load() - choices = dict([(k.encode('hex'), v) for k, v in choices["download_state"].iteritems()]) - new_config.config['user_download_states'] = choices - new_config.write() - os.remove(udcfilename) - - def convert_download_checkpoints(self): - """ - Convert all pickle download checkpoints to .state files. - """ - checkpoint_dir = self.session.get_downloads_pstate_dir() - - filelist = os.listdir(checkpoint_dir) - if not any([filename.endswith('.pickle') for filename in filelist]): - return - - if os.path.exists(checkpoint_dir): - for old_filename in glob.glob(os.path.join(checkpoint_dir, '*.pickle')): - try: - with open(old_filename, "rb") as old_file: - old_checkpoint = pickle.load(old_file) - except (EOFError, KeyError): - # Pickle file appears to be corrupted, remove it and continue - os.remove(old_filename) - continue - - new_checkpoint = RawConfigParser() - new_checkpoint.add_section('downloadconfig') - new_checkpoint.add_section('state') - for key, value in old_checkpoint['dlconfig'].iteritems(): - if key in ['saveas', 'max_upload_rate', 'max_download_rate', 'super_seeder', 'mode', - 'selected_files', 'correctedfilename']: - new_checkpoint.set('downloadconfig', key, value) - new_checkpoint.set('state', 'version', PERSISTENTSTATE_CURRENTVERSION) - new_checkpoint.set('state', 'engineresumedata', old_checkpoint['engineresumedata']) - new_checkpoint.set('state', 'dlstate', old_checkpoint['dlstate']) - new_checkpoint.set('state', 'metainfo', old_checkpoint['metainfo']) - with open(old_filename.replace('.pickle', '.state'), "wb") as new_file: - new_checkpoint.write(new_file) - - os.remove(old_filename) diff --git a/Tribler/Core/Upgrade/torrent_upgrade64.py b/Tribler/Core/Upgrade/torrent_upgrade64.py deleted file mode 100644 index e6f55871473..00000000000 --- a/Tribler/Core/Upgrade/torrent_upgrade64.py +++ /dev/null @@ -1,224 +0,0 @@ -""" -Migration scripts for migrating to 6.4 - -Author(s): Elric Milon -""" -from __future__ import absolute_import - -import logging -import os -from binascii import hexlify -from shutil import rmtree, move -from sqlite3 import Connection - -from six.moves import xrange -from Tribler.Core.TorrentDef import TorrentDef - - -class TorrentMigrator64(object): - - """ - Migration tool for upgrading the collected torrent files/thumbnails on disk - structure from Tribler version 6.3 to 6.4. - """ - - def __init__(self, torrent_collecting_dir, state_dir, status_update_func=None): - self._logger = logging.getLogger(self.__class__.__name__) - self.status_update_func = status_update_func if status_update_func else lambda _: None - - self.torrent_collecting_dir = torrent_collecting_dir - self.state_dir = state_dir - - self.swift_files_deleted = 0 - self.torrent_files_dropped = 0 - self.torrent_files_migrated = 0 - self.total_torrent_files_processed = 0 - - self.total_swift_file_count = 0 - self.total_torrent_file_count = 0 - - self.total_file_count = 0 - self.processed_file_count = 0 - - # an empty file, if it doesn't exist then we need still need to migrate the torrent collecting directory - self.tmp_migration_tcd_file = os.path.join(self.state_dir, u".tmp_migration_v64_tcd") - - # we put every migrated torrent file in a temporary directory - self.tmp_migration_dir = os.path.abspath(os.path.join(self.state_dir, u".tmp_migration_v64")) - - def start_migrate(self): - """ - Starts migrating from Tribler 6.3 to 6.4. - """ - # remove some previous left files - useless_files = [u"upgradingdb.txt", u"upgradingdb2.txt", u"upgradingdb3.txt", u"upgradingdb4.txt"] - for i in xrange(len(useless_files)): - useless_tmp_file = os.path.join(self.state_dir, useless_files[i]) - if os.path.exists(useless_tmp_file): - os.unlink(useless_tmp_file) - - self._migrate_torrent_collecting_dir() - - # remove the temporary file if exists - if os.path.exists(self.tmp_migration_tcd_file): - os.unlink(self.tmp_migration_tcd_file) - - def _migrate_torrent_collecting_dir(self): - """ - Migrates the torrent collecting directory. - """ - if os.path.exists(self.tmp_migration_tcd_file): - return - - # check and create the temporary migration directory if necessary - if not os.path.exists(self.tmp_migration_dir): - try: - os.mkdir(self.tmp_migration_dir) - except OSError as e: - msg = u"Failed to create temporary torrent collecting migration directory %s: %s" %\ - (self.tmp_migration_dir, e) - raise OSError(msg) - elif not os.path.isdir(self.tmp_migration_dir): - msg = u"The temporary torrent collecting migration path is not a directory: %s" % self.tmp_migration_dir - raise RuntimeError(msg) - - if not os.path.isdir(self.torrent_collecting_dir): - raise RuntimeError(u"The torrent collecting directory doesn't exist: %s", self.torrent_collecting_dir) - - self._delete_swift_reseeds() - - # get total file numbers and then start cleaning up - self._get_total_file_count() - self._delete_swift_files() - self._rename_torrent_files() - - # delete all directories in the torrent collecting directory, we don't migrate thumbnails - self._delete_all_directories() - - # replace the old directory with the new one - rmtree(self.torrent_collecting_dir) - move(self.tmp_migration_dir, self.torrent_collecting_dir) - - # create the empty file to indicate that we have finished the torrent collecting directory migration - open(self.tmp_migration_tcd_file, "wb").close() - - def _get_total_file_count(self): - """ - Walks through the torrent collecting directory and gets the total number of file. - """ - self.status_update_func( - u"Scanning torrent directory. This may take a while if you have a big torrent collection...") - for root, _, files in os.walk(self.torrent_collecting_dir): - for name in files: - if name.endswith(u".mbinmap") or name.endswith(u".mhash") or name.startswith(u"tmp_"): - self.total_swift_file_count += 1 - else: - self.total_torrent_file_count += 1 - self.total_file_count += 1 - self.status_update_func(u"Getting file count: %s..." % self.total_file_count) - # We don't want to walk through the child directories - break - - def _delete_swift_reseeds(self): - """ - Deletes the reseeds dir, not used anymore. - """ - reseeds_path = os.path.join(self.torrent_collecting_dir, u"swift_reseeds") - if os.path.exists(reseeds_path): - if not os.path.isdir(reseeds_path): - raise RuntimeError(u"The swift_reseeds path is not a directory: %s", reseeds_path) - rmtree(reseeds_path) - self.swift_files_deleted += 1 - - def _delete_swift_files(self): - """ - Deletes all partial swift downloads, also clean up obsolete .mhash and .mbinmap files. - """ - def update_status(): - progress = 1.0 - if self.total_swift_file_count > 0: - progress = float(self.swift_files_deleted) / self.total_swift_file_count - progress *= 100 - self.status_update_func(u"Deleting swift files %.1f%%..." % progress) - - for root, _, files in os.walk(self.torrent_collecting_dir): - for name in files: - if name.endswith(u".mbinmap") or name.endswith(u".mhash") or name.startswith(u"tmp_"): - os.unlink(os.path.join(root, name)) - # update progress - self.swift_files_deleted += 1 - self.processed_file_count += 1 - update_status() - - # We don't want to walk through the child directories - break - - def _rename_torrent_files(self): - """ - Renames all the torrent files to INFOHASH.torrent and delete unparseable ones. - """ - def update_status(): - progress = 1.0 - if self.total_torrent_file_count > 0: - progress = float(self.total_torrent_files_processed) / self.total_torrent_file_count - progress *= 100 - self.status_update_func(u"Migrating torrent files %.2f%%..." % progress) - - for root, _, files in os.walk(self.torrent_collecting_dir): - for name in files: - file_path = os.path.join(root, name) - try: - tdef = TorrentDef.load(file_path) - move(file_path, os.path.join(self.tmp_migration_dir, hexlify(tdef.infohash) + u".torrent")) - self.torrent_files_migrated += 1 - except Exception as e: - self._logger.error(u"dropping corrupted torrent file %s: %s", file_path, str(e)) - os.unlink(file_path) - self.torrent_files_dropped += 1 - self.total_torrent_files_processed += 1 - update_status() - - # We don't want to walk through the child directories - break - - def _delete_all_directories(self): - """ - Deletes all directories in the torrent collecting directory. - """ - self.status_update_func(u"Checking all directories in torrent collecting directory...") - for root, dirs, files in os.walk(self.torrent_collecting_dir): - for d in dirs: - dir_path = os.path.join(root, d) - rmtree(dir_path, ignore_errors=True) - - def _update_dispersy(self): - """ - Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database. - """ - db_path = os.path.join(self.state_dir, u"sqlite", u"dispersy.db") - if not os.path.isfile(db_path): - return - - communities_to_delete = (u"SearchCommunity", u"MetadataCommunity") - - connection = Connection(db_path) - cursor = connection.cursor() - - data_updated = False - for community in communities_to_delete: - try: - result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?", (community,))) - - for community_id, in result: - self._logger.info(u"deleting all data for community %s...", community_id) - cursor.execute(u"DELETE FROM community WHERE id == ?", (community_id,)) - cursor.execute(u"DELETE FROM meta_message WHERE community == ?", (community_id,)) - cursor.execute(u"DELETE FROM sync WHERE community == ?", (community_id,)) - data_updated = True - except StopIteration: - continue - - if data_updated: - connection.commit() - cursor.close() - connection.close() diff --git a/Tribler/Core/Upgrade/torrent_upgrade65.py b/Tribler/Core/Upgrade/torrent_upgrade65.py deleted file mode 100644 index 473c329289b..00000000000 --- a/Tribler/Core/Upgrade/torrent_upgrade65.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Migration scripts for migrating to 6.5 - -Author(s): Elric Milon -""" -import os -from binascii import hexlify -from shutil import rmtree - -from Tribler.Core.TorrentDef import TorrentDef -from .torrent_upgrade64 import TorrentMigrator64 - - -class TorrentMigrator65(TorrentMigrator64): - - def __init__(self, torrent_collecting_dir, state_dir, torrent_store, status_update_func=None): - super(TorrentMigrator65, self).__init__(torrent_collecting_dir, state_dir, status_update_func) - self.torrent_store = torrent_store - - def _migrate_torrent_collecting_dir(self): - """ - Migrates the torrent collecting directory. - """ - if self.torrent_collecting_dir is None or not os.path.isdir(self.torrent_collecting_dir): - self._logger.info(u"torrent collecting directory not found, skip: %s", self.torrent_collecting_dir) - return - - self._delete_swift_reseeds() - - # get total file numbers and then start cleaning up - self._get_total_file_count() - self._delete_swift_files() - self._ingest_torrent_files() - - # delete all directories in the torrent collecting directory, we don't migrate thumbnails - self._delete_all_directories() - - # replace the old directory with the new one - rmtree(self.torrent_collecting_dir) - - def _ingest_torrent_files(self): - """ - Renames all the torrent files to INFOHASH.torrent and delete unparseable ones. - """ - def update_status(): - progress = 1.0 - if self.total_torrent_file_count > 0: - progress = float(self.total_torrent_files_processed) / self.total_torrent_file_count - progress *= 100 - self.status_update_func(u"Ingesting torrent files %.1f%% (%d/%d)..." - % (progress, self.torrent_files_migrated, - self.torrent_files_dropped)) - - self.status_update_func("Ingesting torrent files...") - for root, _, files in os.walk(self.torrent_collecting_dir): - for name in files: - file_path = os.path.join(root, name) - try: - tdef = TorrentDef.load(file_path) - # TODO(emilon): This should be moved out of the try block so - # an error there doesn't wipe the whole torrent collection. - with open(file_path, 'rb') as torrent_file: - self.torrent_store[hexlify(tdef.infohash)] = torrent_file.read() - # self.torrent_store[hexlify(tdef.infohash)] = tdef.encode() - self.torrent_files_migrated += 1 - except Exception as e: - self._logger.error(u"dropping corrupted torrent file %s: %s", file_path, str(e)) - self.torrent_files_dropped += 1 - os.unlink(file_path) - self.total_torrent_files_processed += 1 - if not self.total_torrent_files_processed % 2000: - self.torrent_store.flush() - update_status() - - # We don't want to walk through the child directories - break - self.status_update_func("All torrent files processed.") diff --git a/Tribler/Core/Upgrade/upgrade.py b/Tribler/Core/Upgrade/upgrade.py index d9df37098f9..e090a416e58 100644 --- a/Tribler/Core/Upgrade/upgrade.py +++ b/Tribler/Core/Upgrade/upgrade.py @@ -1,30 +1,19 @@ +from __future__ import absolute_import + import logging import os -import shutil -from twisted.internet.defer import inlineCallbacks -from Tribler.Core.CacheDB.db_versions import LATEST_DB_VERSION, LOWEST_SUPPORTED_DB_VERSION +from Tribler.Core.Modules.MetadataStore.store import MetadataStore from Tribler.Core.Upgrade.config_converter import convert_config_to_tribler71 -from Tribler.Core.Upgrade.db_upgrader import DBUpgrader -from Tribler.Core.Upgrade.pickle_converter import PickleConverter -from Tribler.Core.Upgrade.torrent_upgrade65 import TorrentMigrator65 -from Tribler.Core.simpledefs import NTFY_UPGRADER, NTFY_FINISHED, NTFY_STARTED, NTFY_UPGRADER_TICK - - -# Database versions: -# *earlier versions are no longer supported -# 17 is used by Tribler 5.9.x - 6.0 -# 18 is used by Tribler 6.1.x - 6.2.0 -# 22 is used by Tribler 6.3.x -# 23 is used by Tribler 6.4 +from Tribler.Core.Upgrade.db72_to_pony import DispersyToPonyMigration, should_upgrade +from Tribler.Core.simpledefs import NTFY_FINISHED, NTFY_STARTED, NTFY_UPGRADER, NTFY_UPGRADER_TICK class TriblerUpgrader(object): - def __init__(self, session, db): + def __init__(self, session): self._logger = logging.getLogger(self.__class__.__name__) self.session = session - self.db = db self.notified = False self.is_done = False @@ -39,29 +28,33 @@ def run(self): Note that by default, upgrading is enabled in the config. It is then disabled after upgrading to Tribler 7. """ - self.current_status = u"Checking Tribler version..." - failed, has_to_upgrade = self.check_should_upgrade_database() - if has_to_upgrade and not failed: - self.notify_starting() - self.upgrade_database_to_current_version() - - # Convert old (pre 6.3 Tribler) pickle files to the newer .state format - pickle_converter = PickleConverter(self.session) - pickle_converter.convert() - - if self.failed: - self.notify_starting() - self.stash_database() + self.notify_starting() - self.upgrade_to_tribler7() + self.upgrade_72_to_pony() + # self.upgrade_config_to_71() + self.notify_done() def update_status(self, status_text): self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED, None, status_text) self.current_status = status_text - def upgrade_to_tribler7(self): + def upgrade_72_to_pony(self): + old_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb') + new_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db') + channels_dir = os.path.join(self.session.config.get_chant_channels_dir()) + + d = DispersyToPonyMigration(old_database_path, self.update_status, logger=self._logger) + if not should_upgrade(old_database_path, new_database_path, logger=self._logger): + return + # We have to create the Metadata Store object because the LaunchManyCore has not been started yet + mds = MetadataStore(new_database_path, channels_dir, self.session.trustchain_keypair) + d.initialize(mds) + d.do_migration() + mds.shutdown() + + def upgrade_config_to_71(self): """ - This method performs actions necessary to upgrade to Tribler 7. + This method performs actions necessary to upgrade the configuration files to Tribler 7.1. """ self.session.config = convert_config_to_tribler71(self.session.config) self.session.config.write() @@ -81,64 +74,3 @@ def notify_done(self): Broadcast a notification (event) that the upgrader is done. """ self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None) - - def check_should_upgrade_database(self): - self.failed = True - should_upgrade = False - if self.db.version > LATEST_DB_VERSION: - msg = u"The on-disk tribler database is newer than your tribler version. Your database will be backed up." - self.current_status = msg - self._logger.info(msg) - elif self.db.version < LOWEST_SUPPORTED_DB_VERSION: - msg = u"Database is too old %s < %s" % (self.db.version, LOWEST_SUPPORTED_DB_VERSION) - self.current_status = msg - elif self.db.version == LATEST_DB_VERSION: - self._logger.info(u"tribler is in the latest version, no need to upgrade") - self.failed = False - self.is_done = True - self.notify_done() - else: - should_upgrade = True - self.failed = False - - return (self.failed, should_upgrade) - - @inlineCallbacks - def upgrade_database_to_current_version(self): - """ Checks the database version and upgrade if it is not the latest version. - """ - try: - from Tribler.Core.leveldbstore import LevelDbStore - torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) - torrent_migrator = TorrentMigrator65( - self.session.config.get_torrent_collecting_dir(), self.session.config.get_state_dir(), - torrent_store=torrent_store, status_update_func=self.update_status) - yield torrent_migrator.start_migrate() - - db_migrator = DBUpgrader( - self.session, self.db, torrent_store=torrent_store, status_update_func=self.update_status) - yield db_migrator.start_migrate() - - # Import all the torrent files not in the database, we do this in - # case we have some unhandled torrent files left due to - # bugs/crashes, etc. - self.update_status("Recovering unregistered torrents...") - yield db_migrator.reimport_torrents() - - yield torrent_store.close() - del torrent_store - - self.failed = False - self.is_done = True - except Exception as e: - self._logger.exception(u"failed to upgrade: %s", e) - - def stash_database(self): - self.db.close() - old_dir = os.path.dirname(self.db.sqlite_db_path) - new_dir = u'%s_backup_%d' % (old_dir, LATEST_DB_VERSION) - shutil.move(old_dir, new_dir) - os.makedirs(old_dir) - self.db.initialize() - self.is_done = True - self.notify_done() diff --git a/Tribler/Core/Utilities/tracker_utils.py b/Tribler/Core/Utilities/tracker_utils.py index 4f0e8ddf535..da12b9c7e81 100644 --- a/Tribler/Core/Utilities/tracker_utils.py +++ b/Tribler/Core/Utilities/tracker_utils.py @@ -1,5 +1,7 @@ from __future__ import absolute_import +import re + from six import string_types, text_type from six.moves.http_client import HTTP_PORT from six.moves.urllib.parse import urlparse @@ -9,6 +11,21 @@ class MalformedTrackerURLException(Exception): pass +delimiters_regex = re.compile(r'[\r\n\x00\s\t;]*(%20)*') + + +url_regex = re.compile( + r'^(?:http|udp|wss)s?://' # http:// or https:// + r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... + r'localhost|' # localhost... + r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip + r'(?::\d+)?' # optional port + r'(?:/?|[/?]\S+)$', re.IGNORECASE) + +remove_trailing_junk = re.compile(r'[,*.:]+\Z') +truncated_url_detector = re.compile(r'\.\.\.') + + def get_uniformed_tracker_url(tracker_url): """ Parse a tracker url of string_types type. @@ -37,45 +54,56 @@ def get_uniformed_tracker_url(tracker_url): except UnicodeDecodeError: return None - url = urlparse(tracker_url) - - # accessing urlparse attributes may throw UnicodeError's or ValueError's - try: - # scheme must be either UDP or HTTP - if url.scheme == 'udp' or url.scheme == 'http': - uniformed_scheme = url.scheme - else: - return None - - uniformed_hostname = url.hostname - - if not url.port: - # UDP trackers must have a port + # Search the string for delimiters and try to get the first correct URL + for tracker_url in re.split(delimiters_regex, tracker_url): + # Rule out truncated URLs + if re.search(truncated_url_detector, tracker_url): + continue + # Try to match it against a simple regexp + if not re.match(url_regex, tracker_url): + continue + + tracker_url = re.sub(remove_trailing_junk, '', tracker_url) + url = urlparse(tracker_url) + + # accessing urlparse attributes may throw UnicodeError's or ValueError's + try: + # scheme must be either UDP or HTTP + if url.scheme == 'udp' or url.scheme == 'http': + uniformed_scheme = url.scheme + else: + continue + + uniformed_hostname = url.hostname + + if not url.port: + # UDP trackers must have a port + if url.scheme == 'udp': + continue + # HTTP trackers default to port HTTP_PORT + elif url.scheme == 'http': + uniformed_port = HTTP_PORT + else: + uniformed_port = url.port + + # UDP trackers have no path if url.scheme == 'udp': - return None - # HTTP trackers default to port HTTP_PORT - elif url.scheme == 'http': - uniformed_port = HTTP_PORT - else: - uniformed_port = url.port - - # UDP trackers have no path - if url.scheme == 'udp': - uniformed_path = '' + uniformed_path = '' + else: + uniformed_path = url.path.rstrip('/') + # HTTP trackers must have a path + if url.scheme == 'http' and not uniformed_path: + continue + + if url.scheme == 'http' and uniformed_port == HTTP_PORT: + uniformed_url = u'%s://%s%s' % (uniformed_scheme, uniformed_hostname, uniformed_path) + else: + uniformed_url = u'%s://%s:%d%s' % (uniformed_scheme, uniformed_hostname, uniformed_port, uniformed_path) + except ValueError: + continue else: - uniformed_path = url.path.rstrip('/') - # HTTP trackers must have a path - if url.scheme == 'http' and not url.path: - return None - - if url.scheme == 'http' and uniformed_port == HTTP_PORT: - uniformed_url = u'%s://%s%s' % (uniformed_scheme, uniformed_hostname, uniformed_path) - else: - uniformed_url = u'%s://%s:%d%s' % (uniformed_scheme, uniformed_hostname, uniformed_port, uniformed_path) - except (UnicodeError, ValueError): - return None - - return uniformed_url + return uniformed_url + return None def parse_tracker_url(tracker_url): diff --git a/Tribler/Core/__init__.py b/Tribler/Core/__init__.py index 0aba50dd3ff..3c29597bc9b 100644 --- a/Tribler/Core/__init__.py +++ b/Tribler/Core/__init__.py @@ -3,58 +3,3 @@ Author(s): Arno Bakker """ -import logging -from threading import RLock - -try: - long # pylint: disable=long-builtin -except NameError: - long = int # pylint: disable=redefined-builtin - -logger = logging.getLogger(__name__) - - -def warnIfDispersyThread(func): - """ - We'd rather not be on the Dispersy thread, but if we are lets continue and - hope for the best. This was introduced after the database thread stuffs - caused deadlocks. We weren't sure we got all of them, so we implemented - warnings instead of errors because they probably wouldn't cause a deadlock, - but if they did we would have the warning somewhere. - - Niels dixit. - """ - def invoke_func(*args, **kwargs): - from twisted.python.threadable import isInIOThread - from traceback import print_stack - - if isInIOThread(): - import inspect - caller = inspect.stack()[1] - callerstr = "%s %s:%s" % (caller[3], caller[1], caller[2]) - - from time import time - logger.error("%d CANNOT BE ON DISPERSYTHREAD %s %s:%s called by %s", long(time()), - func.__name__, func.func_code.co_filename, func.func_code.co_firstlineno, callerstr) - print_stack() - - return func(*args, **kwargs) - - invoke_func.__name__ = func.__name__ - return invoke_func - - -class NoDispersyRLock(): - - def __init__(self): - self.lock = RLock() - self.__enter__ = self.lock.__enter__ - self.__exit__ = self.lock.__exit__ - - @warnIfDispersyThread - def acquire(self, blocking=1): - return self.lock.acquire(blocking) - - @warnIfDispersyThread - def release(self): - return self.lock.release() diff --git a/Tribler/Core/exceptions.py b/Tribler/Core/exceptions.py index c61462c3722..d1d9f35619f 100644 --- a/Tribler/Core/exceptions.py +++ b/Tribler/Core/exceptions.py @@ -47,7 +47,7 @@ class InvalidSignatureException(TriblerException): pass -class DuplicateChannelNameError(TriblerException): +class DuplicateChannelIdError(TriblerException): """ The Channel name already exists in the ChannelManager channel list, i.e., one of your own Channels with the same name already exists. @@ -90,9 +90,3 @@ class InvalidConfigException(TriblerException): """The config file doesn't adhere to the config specification.""" def __init__(self, msg=None): TriblerException.__init__(self, msg) - - -class LevelDBKeyDeletionException(TriblerException): - """This error is used to indicate failure to delete a key from LevelDB. """ - def __init__(self, msg=None): - TriblerException.__init__(self, msg) diff --git a/Tribler/Core/leveldbstore.py b/Tribler/Core/leveldbstore.py deleted file mode 100644 index 38b7a4637e6..00000000000 --- a/Tribler/Core/leveldbstore.py +++ /dev/null @@ -1,161 +0,0 @@ -""" -LevelDBStore. - -Author(s): Elric Milon -""" -from __future__ import absolute_import - -import logging -import os -import sys -from collections import MutableMapping -from itertools import chain -from shutil import rmtree - -from twisted.internet import reactor -from twisted.internet.task import LoopingCall - -from Tribler.Core.exceptions import LevelDBKeyDeletionException -from Tribler.pyipv8.ipv8.taskmanager import TaskManager - - -def get_write_batch_leveldb(self, _): - from leveldb import WriteBatch - return WriteBatch() - - -def get_write_batch_plyvel(self, db): - from Tribler.Core.plyveladapter import WriteBatch - return WriteBatch(db) - -try: - from leveldb import LevelDB, LevelDBError - - use_leveldb = True - get_write_batch = get_write_batch_leveldb - -except ImportError: - from Tribler.Core.plyveladapter import LevelDB # pylint: disable=ungrouped-imports - - use_leveldb = False - get_write_batch = get_write_batch_plyvel - - -WRITEBACK_PERIOD = 120 - -# TODO(emilon): Make sure the caching makes an actual difference in IO and kill -# it if it doesn't as it complicates the code. - - -class LevelDbStore(MutableMapping, TaskManager): - _reactor = reactor - _leveldb = LevelDB - _writebatch = get_write_batch - - def __init__(self, store_dir): - super(LevelDbStore, self).__init__() - - self._store_dir = store_dir - self._pending_torrents = {} - self._logger = logging.getLogger(self.__class__.__name__) - # This is done to work around LevelDB's inability to deal with non-ascii paths on windows. - try: - db_path = store_dir.decode('windows-1252') if sys.platform == "win32" else store_dir - self._db = self._leveldb(db_path) - except ValueError: - # This can happen on Windows when the state dir and Tribler installation are on different disks. - # In this case, hope for the best by using the full path. - self._db = self._leveldb(store_dir) - except Exception as exc: - # We cannot simply catch LevelDBError since that class might not be available on some systems. - if use_leveldb and isinstance(exc, LevelDBError): - # The database might be corrupt, start with a fresh one - self._logger.error("Corrupt LevelDB store detected; recreating database") - rmtree(self._store_dir) - os.makedirs(self._store_dir) - self._db = self._leveldb(os.path.relpath(store_dir, os.getcwdu())) - else: # If something else goes wrong, we throw the exception again - raise - - self._writeback_lc = self.register_task("flush cache ", LoopingCall(self.flush)) - self._writeback_lc.clock = self._reactor - self._writeback_lc.start(WRITEBACK_PERIOD) - - def get_db(self): - return self._db - - def __getitem__(self, key): - try: - return self._pending_torrents[key] - except KeyError: - return self._db.Get(key) - - def __setitem__(self, key, value): - self._pending_torrents[key] = value - - def __delitem__(self, key): - if key in self._pending_torrents: - self._pending_torrents.pop(key) - try: - self._db.Delete(key) - except Exception: - raise LevelDBKeyDeletionException(msg="Failed to delete key: %s" % key) - - def __iter__(self): - for k in self._pending_torrents.iterkeys(): - yield k - for k, _ in self._db.RangeIter(): - yield k - - def __contains__(self, key): - if key in self._pending_torrents: - return True - try: - self.__getitem__(key) - return True - except KeyError: - pass - - return False - - def __len__(self): - return len(self._pending_torrents) + len(list(self.keys())) - - def keys(self): - return [k for k, _ in self._db.RangeIter()] - - def iteritems(self): - return chain(self._pending_torrents, self._db.RangeIter()) - - def put(self, k, v): - self.__setitem__(k, v) - - def rangescan(self, start=None, end=None): - if start is None and end is None: - return self._db.RangeIter() - elif end is None: - return self._db.RangeIter(key_from=start) - else: - return self._db.RangeIter(key_from=start, key_to=end) - - def flush(self, retry=3, write_batch=None): - if not write_batch and self._pending_torrents: - write_batch = self._writebatch(self._db) - for k, v in self._pending_torrents.iteritems(): - write_batch.Put(k, v) - self._pending_torrents.clear() - - if write_batch: - if not retry: - self._logger.error("Failed to flush LevelDB cache. Max retry done.") - return - try: - self._db.Write(write_batch) - except Exception as ex: - self._logger.error("Failed to flush LevelDB cache. Will retry %s times. Error:%s", retry-1, ex) - self.flush(retry=retry-1, write_batch=write_batch) - - def close(self): - self.shutdown_task_manager() - self.flush() - self._db = None diff --git a/Tribler/Core/permid.py b/Tribler/Core/permid.py index 5942e4a3e7f..d56f22db7f7 100644 --- a/Tribler/Core/permid.py +++ b/Tribler/Core/permid.py @@ -4,55 +4,11 @@ Author(s): Arno Bakker """ import logging -import os -from M2Crypto import Rand, EC, BIO from Tribler.pyipv8.ipv8.keyvault.private.libnaclkey import LibNaCLSK logger = logging.getLogger(__name__) -# Internal constants -KEYPAIR_ECC_CURVE = EC.NID_sect233k1 -NUM_RANDOM_BITS = 1024 * 8 # bits - -# Exported functions - -# a workaround is needed for Tribler to function on Windows 64 bit -# instead of invoking EC.load_key(filename), we should use the M2Crypto.BIO buffer -# see http://stackoverflow.com/questions/33720087/error-when-importing-m2crypto-in-python-on-windows-x64 - -def init(): - Rand.rand_seed(os.urandom(NUM_RANDOM_BITS / 8)) - - -def generate_keypair(): - ec_keypair = EC.gen_params(KEYPAIR_ECC_CURVE) - ec_keypair.gen_key() - return ec_keypair - - -def read_keypair(keypairfilename): - membuf = BIO.MemoryBuffer(open(keypairfilename, 'rb').read()) - key = EC.load_key_bio(membuf) - membuf.close() - return key - - -def save_keypair(keypair, keypairfilename): - membuf = BIO.MemoryBuffer() - keypair.save_key_bio(membuf, None) - with open(keypairfilename, 'w') as file: - file.write(membuf.read()) - membuf.close() - - -def save_pub_key(keypair, pubkeyfilename): - membuf = BIO.MemoryBuffer() - keypair.save_pub_key_bio(membuf) - with open(pubkeyfilename, 'w') as file: - file.write(membuf.read()) - membuf.close() - def generate_keypair_trustchain(): return LibNaCLSK() diff --git a/Tribler/Core/plyveladapter.py b/Tribler/Core/plyveladapter.py deleted file mode 100644 index dbbe4cd9dcc..00000000000 --- a/Tribler/Core/plyveladapter.py +++ /dev/null @@ -1,42 +0,0 @@ -import plyvel - - -class LevelDB(object): - - def __init__(self, store_dir, create_if_missing=True): - self._db = plyvel.DB(store_dir, create_if_missing=create_if_missing) - - def Get(self, key, verify_checksums=False, fill_cache=True): - val = self._db.get(key, verify_checksums=verify_checksums, fill_cache=fill_cache) - if val: - return val - raise KeyError('No value for key {key}'.format(key=key)) - - def Put(self, key, value, sync=False): - self._db.put(key, value, sync=sync) - - def Delete(self, key, sync=False): - return self._db.delete(key, sync=sync) - - def RangeIter(self, key_from=None, key_to=None, include_value=True, verify_checksums=False, fill_cache=True): - return self._db.iterator(start=key_from, stop=key_to, include_value=include_value, - verify_checksums=verify_checksums, fill_cache=fill_cache) - - def Write(self, write_batch, sync=False): - write_batch._batch.write() - - def GetStats(self): - pass # No such method in plyvel - - -class WriteBatch(object): - - def __init__(self, db): - # Using transaction and sync in Windows to prevent CorruptionError - self._batch = db._db.write_batch(transaction=True, sync=True) - - def Put(self, key, value): - self._batch.put(key, value) - - def Delete(self, key): - self._batch.delete(key) diff --git a/Tribler/Core/simpledefs.py b/Tribler/Core/simpledefs.py index e52e06ec2af..bd00438b52a 100644 --- a/Tribler/Core/simpledefs.py +++ b/Tribler/Core/simpledefs.py @@ -34,6 +34,8 @@ STATEDIR_DLPSTATE_DIR = u'dlcheckpoints' STATEDIR_WALLET_DIR = u'wallet' +STATEDIR_CHANNELS_DIR = u'channels' +STATEDIR_DB_DIR = u"sqlite" # For observer/callback mechanism, see Session.add_observer() # subjects @@ -144,17 +146,11 @@ STATE_EXCEPTION = "EXCEPTION" STATE_SHUTDOWN = "SHUTDOWN" -STATE_OPEN_DB = 'Opening database...' STATE_START_API = 'Starting HTTP API...' STATE_UPGRADING_READABLE = 'Upgrading Tribler...' STATE_LOAD_CHECKPOINTS = 'Loading download checkpoints...' -STATE_STARTING_DISPERSY = 'Starting Dispersy...' -STATE_LOADING_COMMUNITIES = 'Loading communities...' -STATE_INITIALIZE_CHANNEL_MGR = 'Initializing channel manager...' -STATE_START_MAINLINE_DHT = 'Starting mainline DHT...' STATE_START_LIBTORRENT = 'Starting libtorrent...' STATE_START_TORRENT_CHECKER = 'Starting torrent checker...' -STATE_START_REMOTE_TORRENT_HANDLER = 'Starting remote torrent handler...' STATE_START_API_ENDPOINTS = 'Starting API endpoints...' STATE_START_WATCH_FOLDER = 'Starting watch folder...' STATE_START_CREDIT_MINING = 'Starting credit mining...' diff --git a/Tribler/Core/statistics.py b/Tribler/Core/statistics.py index 6dc97d62203..88e9996edc9 100644 --- a/Tribler/Core/statistics.py +++ b/Tribler/Core/statistics.py @@ -3,11 +3,7 @@ import os import time -from six import text_type - -from Tribler.Core.CacheDB.sqlitecachedb import DB_FILE_RELATIVE_PATH from Tribler.Core.exceptions import OperationNotEnabledByConfigurationException -from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_TORRENTS DATA_NONE = u"None" @@ -25,60 +21,13 @@ def get_tribler_statistics(self): """ Return a dictionary with some general Tribler statistics. """ - torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) - channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - - torrent_stats = torrent_db_handler.getTorrentsStats() - torrent_total_size = 0 if torrent_stats[1] is None else torrent_stats[1] - - stats_dict = {"torrents": {"num_collected": torrent_stats[0], "total_size": torrent_total_size, - "num_files": torrent_stats[2]}, - - "num_channels": channel_db_handler.getNrChannels(), - "database_size": os.path.getsize( - os.path.join(self.session.config.get_state_dir(), DB_FILE_RELATIVE_PATH))} - - if self.session.lm.rtorrent_handler: - torrent_queue_stats = self.session.lm.rtorrent_handler.get_queue_stats() - torrent_queue_size_stats = self.session.lm.rtorrent_handler.get_queue_size_stats() - torrent_queue_bandwidth_stats = self.session.lm.rtorrent_handler.get_bandwidth_stats() - - stats_dict["torrent_queue_stats"] = torrent_queue_stats - stats_dict["torrent_queue_size_stats"] = torrent_queue_size_stats - stats_dict["torrent_queue_bandwidth_stats"] = torrent_queue_bandwidth_stats + db_size = os.path.getsize(self.session.lm.mds.db_filename) if self.session.lm.mds else 0 + stats_dict = {"db_size": db_size, + "num_channels": self.session.lm.mds.get_num_channels(), + "num_torrents": self.session.lm.mds.get_num_torrents()} return stats_dict - def get_dispersy_statistics(self): - """ - Return a dictionary with some general Dispersy statistics. - """ - dispersy = self.session.get_dispersy_instance() - dispersy.statistics.update() - stats = dispersy.statistics - return { - "wan_address": "%s:%d" % stats.wan_address, - "lan_address": "%s:%d" % stats.lan_address, - "connection": text_type(stats.connection_type), - "runtime": stats.timestamp - stats.start, - "total_downloaded": stats.total_down, - "total_uploaded": stats.total_up, - "packets_sent": stats.total_send, - "packets_received": stats.total_received, - "packets_success": stats.msg_statistics.success_count, - "packets_dropped": stats.msg_statistics.drop_count, - "packets_delayed_sent": stats.msg_statistics.delay_send_count, - "packets_delayed_received": stats.msg_statistics.delay_received_count, - "packets_delayed_success": stats.msg_statistics.delay_success_count, - "packets_delayed_timeout": stats.msg_statistics.delay_timeout_count, - "total_walk_attempts": stats.walk_attempt_count, - "total_walk_success": stats.walk_success_count, - "sync_messages_created": stats.msg_statistics.created_count, - "bloom_new": sum(c.sync_bloom_new for c in stats.communities), - "bloom_reused": sum(c.sync_bloom_reuse for c in stats.communities), - "bloom_skipped": sum(c.sync_bloom_skip for c in stats.communities), - } - def get_ipv8_statistics(self): """ Return generic IPv8 statistics. diff --git a/Tribler/Main/Build/Win/tribler.nsi b/Tribler/Main/Build/Win/tribler.nsi index 03219509fd6..497e7111fed 100644 --- a/Tribler/Main/Build/Win/tribler.nsi +++ b/Tribler/Main/Build/Win/tribler.nsi @@ -122,11 +122,7 @@ Section "!Main EXE" SecMain ; Install MSVCR 2008, 2012 and 2015 SetOutPath "$INSTDIR" - ; Libraries dependant on 2008 are: APSW - File vc_redist_90.exe - ExecWait "$INSTDIR\vc_redist_90.exe /q /norestart" - - ; Libraries dependant on 2012 are: LevelDB, LibTorrent + ; Libraries dependant on 2012 are: LibTorrent File vc_redist_110.exe ExecWait "$INSTDIR\vc_redist_110.exe /q /norestart" diff --git a/Tribler/Test/API/test_download.py b/Tribler/Test/API/test_download.py index 5ac62a0728e..96111120d53 100644 --- a/Tribler/Test/API/test_download.py +++ b/Tribler/Test/API/test_download.py @@ -1,16 +1,18 @@ +from __future__ import absolute_import + import logging import os import shutil from binascii import hexlify from unittest import skip -from Tribler.Test.tools import trial_timeout from twisted.internet.defer import Deferred from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Core.simpledefs import dlstatus_strings, DLSTATUS_DOWNLOADING -from Tribler.Test.common import UBUNTU_1504_INFOHASH, TORRENT_UBUNTU_FILE +from Tribler.Core.simpledefs import DLSTATUS_DOWNLOADING, dlstatus_strings +from Tribler.Test.common import TORRENT_UBUNTU_FILE, UBUNTU_1504_INFOHASH from Tribler.Test.test_as_server import TestAsServer +from Tribler.Test.tools import trial_timeout class TestDownload(TestAsServer): @@ -28,7 +30,6 @@ def setUpPreSession(self): super(TestDownload, self).setUpPreSession() self.config.set_libtorrent_enabled(True) - self.config.set_dispersy_enabled(False) self.config.set_libtorrent_max_conn_download(2) def on_download(self, download): diff --git a/Tribler/Test/Community/AbstractTestCommunity.py b/Tribler/Test/Community/AbstractTestCommunity.py deleted file mode 100644 index eb03403b0f7..00000000000 --- a/Tribler/Test/Community/AbstractTestCommunity.py +++ /dev/null @@ -1,28 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Test.test_as_server import AbstractServer -from Tribler.dispersy.dispersy import Dispersy -from Tribler.dispersy.endpoint import ManualEnpoint -from Tribler.dispersy.member import DummyMember - - -class AbstractTestCommunity(AbstractServer): - - # We have to initialize Dispersy and the tunnel community on the reactor thread - - @inlineCallbacks - def setUp(self): - yield super(AbstractTestCommunity, self).setUp() - self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) - self.dispersy._database.open() - self.master_member = DummyMember(self.dispersy, 1, "a" * 20) - self.member = self.dispersy.get_new_member(u"curve25519") - - @inlineCallbacks - def tearDown(self): - for community in self.dispersy.get_communities(): - yield community.unload_community() - - self.master_member = None - self.member = None - yield super(AbstractTestCommunity, self).tearDown() diff --git a/Tribler/Test/Community/Allchannel/__init__.py b/Tribler/Test/Community/Allchannel/__init__.py deleted file mode 100644 index c952975cc3e..00000000000 --- a/Tribler/Test/Community/Allchannel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests for the AllChannel community. -""" diff --git a/Tribler/Test/Community/Allchannel/test_allchannel_community.py b/Tribler/Test/Community/Allchannel/test_allchannel_community.py deleted file mode 100644 index 1b4c2b794fa..00000000000 --- a/Tribler/Test/Community/Allchannel/test_allchannel_community.py +++ /dev/null @@ -1,43 +0,0 @@ -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import inlineCallbacks - -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.community.channel.preview import PreviewChannelCommunity -from Tribler.dispersy.member import DummyMember -from Tribler.dispersy.message import Message -from Tribler.Test.Community.AbstractTestCommunity import AbstractTestCommunity - - -class TestAllChannelCommunity(AbstractTestCommunity): - - @inlineCallbacks - def setUp(self): - yield super(TestAllChannelCommunity, self).setUp() - self.community = AllChannelCommunity(self.dispersy, self.master_member, self.member) - self.dispersy._communities['a' * 20] = self.community - self.community.initialize(auto_join_channel=True) - - @trial_timeout(10) - def test_create_votecast(self): - """ - Testing whether a votecast can be created in the community - """ - def verify(message): - self.assertTrue(isinstance(message, Message.Implementation)) - - return self.community.disp_create_votecast("c" * 20, 2, 300).addCallback(verify) - - @trial_timeout(10) - def test_unload_preview(self): - """ - Test the unloading of the preview community - """ - def verify_unloaded(_): - self.assertEqual(len(self.dispersy.get_communities()), 1) - - preview_member = DummyMember(self.dispersy, 2, "c" * 20) - preview_community = PreviewChannelCommunity(self.dispersy, preview_member, self.member) - preview_community.initialize() - preview_community.init_timestamp = -500 - self.dispersy._communities['c' * 20] = preview_community - return self.community.unload_preview().addCallback(verify_unloaded) diff --git a/Tribler/Test/Community/Search/FullSession/__init__.py b/Tribler/Test/Community/Search/FullSession/__init__.py deleted file mode 100644 index adcba0c47f9..00000000000 --- a/Tribler/Test/Community/Search/FullSession/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests to test the remote search with real Tribler session. -""" diff --git a/Tribler/Test/Community/Search/FullSession/test_search_community.py b/Tribler/Test/Community/Search/FullSession/test_search_community.py deleted file mode 100644 index 13f0daaaa69..00000000000 --- a/Tribler/Test/Community/Search/FullSession/test_search_community.py +++ /dev/null @@ -1,179 +0,0 @@ -from __future__ import absolute_import -from six import unichr -from twisted.internet import reactor -from twisted.internet.defer import inlineCallbacks, Deferred - -from Tribler.Core.Session import Session -from Tribler.Core.simpledefs import NTFY_TORRENTS, SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, SIGNAL_TORRENT, \ - NTFY_CHANNELCAST -from Tribler.Test.test_as_server import TestAsServer -from Tribler.Test.tools import trial_timeout -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.community.search.community import SearchCommunity -from Tribler.dispersy.candidate import Candidate - -MASTER_KEY = "3081a7301006072a8648ce3d020106052b81040027038192000400f4771c58e65f2cc0385a14027a937a0eb54df0e" \ - "4ae2f72acd8f8286066a48a5e8dcff81c7dfa369fbc33bfe9823587057557cf168b41586dc9ff7615a7e5213f3ec6" \ - "c9b4f9f57f00dbc0dd8ca8b9f6d76fd63a432a56d5938ce9dd7bd291daa92bec52ffcd58d9718836163868f493063" \ - "77c3b8bf36d43ea99122c3276e1a89fb5b9b2ff3f7f6f1702d057dca3e8c0" -MASTER_KEY_SEARCH = "3081a7301006072a8648ce3d020106052b8104002703819200040759eff226a7e2efc62ff61538267f837c" \ - "34d2a32927a10ff31618a69773e4123e405a6d4a930ceeae9a01cfde07496ec21bdb60eb23c92009bf2c93" \ - "f9fd32653953f136e6704d04077c457497cea70d1b3809f7ee7c4fa40faad7d9ed00a622183ae8623fe64e" \ - "1017af273a53b347f11bc6a919c01e9db8f6a98eaf1fcea0a1f18b339b013c7eb134797c29d4c4c429" - - -class AllChannelCommunityTests(AllChannelCommunity): - """ - We define our own AllChannelCommunity. - """ - - @classmethod - def get_master_members(cls, dispersy): - return [dispersy.get_member(public_key=MASTER_KEY.decode("HEX"))] - - @property - def dispersy_enable_fast_candidate_walker(self): - return True - - def check_channelsearch_response(self, messages): - for message in messages: - yield message - - -class SearchCommunityTests(SearchCommunity): - """ - We define our own SearchCommunity. - """ - - @classmethod - def get_master_members(cls, dispersy): - return [dispersy.get_member(public_key=MASTER_KEY_SEARCH.decode("HEX"))] - - -class TestSearchCommunity(TestAsServer): - """ - Contains tests to test remote search with booted Tribler sessions. - """ - - @inlineCallbacks - def setUp(self): - yield super(TestSearchCommunity, self).setUp() - - self.config2 = None - self.session2 = None - self.dispersy2 = None - self.search_community = None - self.allchannel_community = None - - self.dispersy = self.session.get_dispersy_instance() - yield self.setup_peer() - - def setUpPreSession(self): - TestAsServer.setUpPreSession(self) - self.config.set_dispersy_enabled(True) - self.config.set_torrent_store_enabled(True) - self.config.set_torrent_search_enabled(True) - self.config.set_channel_search_enabled(True) - self.config.set_metadata_enabled(True) - self.config.set_channel_community_enabled(True) - self.config.set_preview_channel_community_enabled(True) - self.config.set_torrent_collecting_enabled(True) - self.config.set_torrent_checking_enabled(True) - self.config.set_megacache_enabled(True) - - @inlineCallbacks - def setup_peer(self): - """ - Setup a second peer that contains some search results. - """ - self.setUpPreSession() - - self.config2 = self.config.copy() - self.config2.set_state_dir(self.getStateDir(2)) - - self.session2 = Session(self.config2) - - yield self.session2.start() - self.dispersy2 = self.session2.get_dispersy_instance() - - @inlineCallbacks - def unload_communities(): - for community in self.dispersy.get_communities(): - if isinstance(community, SearchCommunity) or isinstance(community, AllChannelCommunity): - yield community.unload_community() - - for community in self.dispersy2.get_communities(): - if isinstance(community, SearchCommunity) or isinstance(community, AllChannelCommunity): - yield community.unload_community() - - def load_communities(): - self.search_community = \ - self.dispersy.define_auto_load(SearchCommunityTests, self.session.dispersy_member, load=True, - kargs={'tribler_session': self.session})[0] - self.dispersy2.define_auto_load(SearchCommunityTests, self.session2.dispersy_member, load=True, - kargs={'tribler_session': self.session2}) - - self.allchannel_community = \ - self.dispersy.define_auto_load(AllChannelCommunityTests, self.session.dispersy_member, load=True, - kargs={'tribler_session': self.session})[0] - self.dispersy2.define_auto_load(AllChannelCommunityTests, self.session2.dispersy_member, load=True, - kargs={'tribler_session': self.session2}) - - yield unload_communities() - load_communities() - - self.search_community.add_discovered_candidate(Candidate(self.dispersy2.lan_address, tunnel=False)) - self.allchannel_community.add_discovered_candidate(Candidate(self.dispersy2.lan_address, tunnel=False)) - - # Add some content to second session - torrent_db_handler = self.session2.open_dbhandler(NTFY_TORRENTS) - torrent_db_handler.addExternalTorrentNoDef(str(unichr(97)) * 20, 'test test', [('Test.txt', 1337)], [], 1337) - torrent_db_handler.updateTorrent(str(unichr(97)) * 20, is_collected=1) - - channel_db_handler = self.session2.open_dbhandler(NTFY_CHANNELCAST) - channel_db_handler.on_channel_from_dispersy('f' * 20, 42, "test", "channel for unit tests") - torrent_list = [ - [1, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []] - ] - channel_db_handler.on_torrents_from_dispersy(torrent_list) - - # We also need to add the channel to the database of the session initiating the search - channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - channel_db_handler.on_channel_from_dispersy('f' * 20, 42, "test", "channel for unit tests") - - @trial_timeout(20) - def test_torrent_search(self): - """ - Test whether we receive results when searching remotely for torrents - """ - test_deferred = Deferred() - - def on_search_results_torrents(_dummy1, _dummy2, _dummy3, results): - self.assertEqual(len(results['result_list']), 1) - test_deferred.callback(None) - - reactor.callLater(2, self.session.search_remote_torrents, [u"test"]) - self.session.add_observer(on_search_results_torrents, SIGNAL_TORRENT, [SIGNAL_ON_SEARCH_RESULTS]) - - return test_deferred - - @trial_timeout(20) - def test_channel_search(self): - """ - Test whether we receive results when searching remotely for channels - """ - test_deferred = Deferred() - - def on_search_results_channels(_dummy1, _dummy2, _dummy3, results): - self.assertEqual(len(results['result_list']), 1) - test_deferred.callback(None) - - reactor.callLater(5, self.session.search_remote_channels, [u"test"]) - self.session.add_observer(on_search_results_channels, SIGNAL_CHANNEL, [SIGNAL_ON_SEARCH_RESULTS]) - - return test_deferred - - @inlineCallbacks - def tearDown(self): - yield self.session2.shutdown() - yield super(TestSearchCommunity, self).tearDown() diff --git a/Tribler/Test/Community/Search/__init__.py b/Tribler/Test/Community/Search/__init__.py deleted file mode 100644 index 23b1102cd66..00000000000 --- a/Tribler/Test/Community/Search/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests for the Search community. -""" diff --git a/Tribler/Test/Community/Search/test_search_community.py b/Tribler/Test/Community/Search/test_search_community.py deleted file mode 100644 index 619bb8266b2..00000000000 --- a/Tribler/Test/Community/Search/test_search_community.py +++ /dev/null @@ -1,89 +0,0 @@ -import os - -from nose.tools import raises -from twisted.internet.defer import inlineCallbacks - -from Tribler.Test.Community.AbstractTestCommunity import AbstractTestCommunity -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.common import TESTS_DATA_DIR -from Tribler.community.search.community import SearchCommunity -from Tribler.community.search.conversion import SearchConversion -from Tribler.dispersy.message import DropPacket - - -class TestSearchCommunity(AbstractTestCommunity): - - @inlineCallbacks - def setUp(self): - yield super(TestSearchCommunity, self).setUp() - self.search_community = SearchCommunity(self.dispersy, self.master_member, self.member) - - @inlineCallbacks - def tearDown(self): - self.search_community.cancel_all_pending_tasks() - yield super(TestSearchCommunity, self).tearDown() - - def test_on_search(self): - """ - Test whether we are creating a search response when we receive a search request - """ - def log_incoming_searches(sock_addr, keywords): - log_incoming_searches.called = True - - log_incoming_searches.called = False - - def create_search_response(id, results, candidate): - create_search_response.called = True - self.assertEqual(id, "abc") - self.assertEqual(results, []) - self.assertEqual(candidate.sock_addr, "1234") - - create_search_response.called = False - - def search_names(keywords, local=False, keys=None): - return [] - - self.search_community._torrent_db = MockObject() - self.search_community._torrent_db.searchNames = search_names - - fake_message = MockObject() - fake_message.candidate = MockObject() - fake_message.candidate.sock_addr = "1234" - fake_message.payload = MockObject() - fake_message.payload.keywords = "test" - fake_message.payload.identifier = "abc" - - self.search_community._create_search_response = create_search_response - self.search_community.log_incoming_searches = log_incoming_searches - self.search_community.on_search([fake_message]) - - self.assertTrue(log_incoming_searches.called) - self.assertTrue(create_search_response.called) - - @raises(DropPacket) - def test_decode_response_invalid(self): - """ - Test whether decoding an invalid search response does not crash the program - """ - self.search_community._initialize_meta_messages() - search_conversion = SearchConversion(self.search_community) - search_conversion._decode_search_response(None, 0, "a[]") - - def test_create_torrent(self): - """ - Test the creation of a torrent in the search community - """ - with open(os.path.join(TESTS_DATA_DIR, "bak_single.torrent"), mode='rb') as torrent_file: - torrent_data = torrent_file.read() - - mock_session = MockObject() - mock_session.get_collected_torrent = lambda _: torrent_data - mock_session.open_dbhandler = lambda _: None - mock_session.notifier = None - mock_session.lm = MockObject() - mock_session.lm.rtorrent_handler = None - - self.search_community.initialize(mock_session) - self.search_community._torrent_db = MockObject() - self.search_community._torrent_db.updateTorrent = lambda *_, **ignored: None - self.assertTrue(self.search_community.create_torrent('a' * 20)) diff --git a/Tribler/Test/Community/Tunnel/FullSession/test_tunnel_base.py b/Tribler/Test/Community/Tunnel/FullSession/test_tunnel_base.py index 36b83bec8d1..4427213f5cb 100644 --- a/Tribler/Test/Community/Tunnel/FullSession/test_tunnel_base.py +++ b/Tribler/Test/Community/Tunnel/FullSession/test_tunnel_base.py @@ -63,7 +63,6 @@ def setUp(self): def setUpPreSession(self): TestAsServer.setUpPreSession(self) - self.config.set_dispersy_enabled(False) self.config.set_ipv8_enabled(True) self.config.set_libtorrent_enabled(True) self.config.set_trustchain_enabled(False) @@ -102,10 +101,9 @@ def setup_nodes(self, num_relays=1, num_exitnodes=1, seed_hops=0): self.tunnel_communities.append(self.tunnel_community) self._logger.info("Introducing all nodes to each other in tests") - for community_introduce in self.tunnel_communities + ([self.tunnel_community_seeder] if - self.tunnel_community_seeder else []): - for community in self.tunnel_communities + ([self.tunnel_community_seeder] if - self.tunnel_community_seeder else []): + other_tunnel_communities = [self.tunnel_community_seeder] if self.tunnel_community_seeder else [] + for community_introduce in self.tunnel_communities + other_tunnel_communities: + for community in self.tunnel_communities + other_tunnel_communities: if community != community_introduce: community.walk_to(community_introduce.endpoint.get_address()) @@ -152,7 +150,6 @@ def create_proxy(self, index, exitnode=False): self.setUpPreSession() config = self.config.copy() config.set_libtorrent_enabled(True) - config.set_dispersy_enabled(False) config.set_state_dir(self.getStateDir(index)) config.set_tunnel_community_socks5_listen_ports(self.get_socks5_ports()) @@ -170,7 +167,6 @@ def setup_tunnel_seeder(self, hops): self.seed_config = self.config.copy() self.seed_config.set_state_dir(self.getStateDir(2)) - self.seed_config.set_megacache_enabled(True) self.seed_config.set_tunnel_community_socks5_listen_ports(self.get_socks5_ports()) if self.session2 is None: self.session2 = Session(self.seed_config) diff --git a/Tribler/Test/Community/channel/__init__.py b/Tribler/Test/Community/channel/__init__.py deleted file mode 100644 index efca9dada69..00000000000 --- a/Tribler/Test/Community/channel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains unit tests for the channel community. -""" diff --git a/Tribler/Test/Community/channel/test_channel_base.py b/Tribler/Test/Community/channel/test_channel_base.py deleted file mode 100644 index fd7e1a5e537..00000000000 --- a/Tribler/Test/Community/channel/test_channel_base.py +++ /dev/null @@ -1,21 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Test.Community.AbstractTestCommunity import AbstractTestCommunity -from Tribler.community.channel.community import ChannelCommunity - - -class AbstractTestChannelCommunity(AbstractTestCommunity): - - # We have to initialize Dispersy and the tunnel community on the reactor thread - - @inlineCallbacks - def setUp(self): - yield super(AbstractTestChannelCommunity, self).setUp() - self.channel_community = ChannelCommunity(self.dispersy, self.master_member, self.member) - - @inlineCallbacks - def tearDown(self): - # Don't unload_community() as it never got registered in dispersy on the first place. - self.channel_community.cancel_all_pending_tasks() - self.channel_community = None - yield super(AbstractTestChannelCommunity, self).tearDown() diff --git a/Tribler/Test/Community/channel/test_channel_community.py b/Tribler/Test/Community/channel/test_channel_community.py deleted file mode 100644 index e430fe86f32..00000000000 --- a/Tribler/Test/Community/channel/test_channel_community.py +++ /dev/null @@ -1,56 +0,0 @@ -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Test.Community.channel.test_channel_base import AbstractTestChannelCommunity -from Tribler.Test.Core.base_test import MockObject - - -class TestChannelCommunity(AbstractTestChannelCommunity): - - def test_initialize(self): - def raise_runtime(): - raise RuntimeError() - self.channel_community._get_latest_channel_message = raise_runtime - self.channel_community.initialize() - self.assertIsNone(self.channel_community._channelcast_db) - - def test_remove_playlist_torrents(self): - """ - Testing whether the right methods are called when a torrent is removed from a playlist - """ - def mocked_load_message(undone, community, packet_id): - fake_message = MockObject() - fake_message.undone = undone - return fake_message - - def mocked_create_undo(_): - mocked_create_undo.called = True - mocked_create_undo.called = False - - def mocked_undo_playlist_torrent(_): - mocked_undo_playlist_torrent.called = True - mocked_undo_playlist_torrent.called = False - - self.channel_community.create_undo = mocked_create_undo - self.channel_community._disp_undo_playlist_torrent = mocked_undo_playlist_torrent - - self.channel_community._dispersy.load_message_by_packetid = \ - lambda community, pid: mocked_load_message(False, community, pid) - self.channel_community.remove_playlist_torrents(1234, [1234]) - self.assertTrue(mocked_create_undo.called) - - self.channel_community._dispersy.load_message_by_packetid = \ - lambda community, pid: mocked_load_message(True, community, pid) - self.channel_community.remove_playlist_torrents(1234, [1234]) - self.assertTrue(mocked_undo_playlist_torrent.called) - - def test_create_torrent_from_def(self): - """ - Testing whether a correct Dispersy message is created when we add a torrent to our channel - """ - metainfo = {"info": {"name": "my_torrent", "piece length": 12345, "pieces": "12345678901234567890", - "files": [{'path': ['test.txt'], 'length': 1234}]}} - torrent = TorrentDef.load_from_dict(metainfo) - self.channel_community.initialize() - - message = self.channel_community._disp_create_torrent_from_torrentdef(torrent, 12345) - self.assertEqual(message.payload.name, "my_torrent") - self.assertEqual(len(message.payload.files), 1) diff --git a/Tribler/Test/Community/channel/test_channel_conversion.py b/Tribler/Test/Community/channel/test_channel_conversion.py deleted file mode 100644 index 41f0c8a500f..00000000000 --- a/Tribler/Test/Community/channel/test_channel_conversion.py +++ /dev/null @@ -1,67 +0,0 @@ -import zlib -from struct import pack - -from twisted.internet.defer import inlineCallbacks - -from Tribler.Test.Community.channel.test_channel_base import AbstractTestChannelCommunity -from Tribler.Test.Core.base_test import MockObject -from Tribler.community.channel.conversion import ChannelConversion -from Tribler.dispersy.message import DropPacket -from Tribler.pyipv8.ipv8.messaging.deprecated.encoding import encode - - -class TestChannelConversion(AbstractTestChannelCommunity): - - @inlineCallbacks - def setUp(self): - yield super(TestChannelConversion, self).setUp() - self.channel_community.initialize() - self.conversion = ChannelConversion(self.channel_community) - - self.placeholder = MockObject() - - def test_encode_torrent(self): - """ - Test the encoding of a torrent file - """ - message = MockObject() - message.payload = MockObject() - - message.payload.name = u'test' - message.payload.infohash = 'a' * 20 - message.payload.timestamp = 1234 - message.payload.files = [(u'a', 1234)] - message.payload.trackers = ['udp://tracker.openbittorrent.com:80/announce', 'http://google.com'] - - meta = self.channel_community.get_meta_message(u"torrent") - msg = MockObject() - msg.meta = meta - - decoded_message = self.conversion._decode_torrent(msg, 0, self.conversion._encode_torrent(message)[0])[1] - self.assertEqual(len(decoded_message.files), 1) - self.assertEqual(len(decoded_message.trackers), 1) - - message.payload.files = [(u'a', 1234)] * 1000 - message.payload.trackers = ['udp://tracker.openbittorrent.com:80/announce'] * 100 - - decoded_message = self.conversion._decode_torrent(msg, 0, self.conversion._encode_torrent(message)[0])[1] - self.assertGreaterEqual(len(decoded_message.files), 133) - self.assertEqual(len(decoded_message.trackers), 10) - - def test_decode_torrent(self): - """ - Test the decoding of a torrent message - """ - self.assertRaises(DropPacket, self.conversion._decode_torrent, None, 0, "abcd") - self.assertRaises(DropPacket, self.conversion._decode_torrent, None, 0, zlib.compress("abcd")) - - # Test a successful decoding - meta = self.channel_community.get_meta_message(u"torrent") - msg = MockObject() - msg.meta = meta - - torrent_msg = encode((pack('!20sQ', 'a' * 20, 12345), u'torrent', ((u'a', 1234),), ('http://track.er',))) - _, msg = self.conversion._decode_torrent(msg, 0, zlib.compress(torrent_msg)) - - self.assertEqual(msg.infohash, 'a' * 20) - self.assertEqual(msg.name, u'torrent') diff --git a/Tribler/Test/Community/gigachannel/test_community.py b/Tribler/Test/Community/gigachannel/test_community.py index c9998226688..60a332f5383 100644 --- a/Tribler/Test/Community/gigachannel/test_community.py +++ b/Tribler/Test/Community/gigachannel/test_community.py @@ -1,229 +1,112 @@ +from __future__ import absolute_import + +import os + +from pony.orm import db_session + +from six.moves import xrange + from twisted.internet.defer import inlineCallbacks -from Tribler.community.gigachannel.community import ChannelDownloadCache, GigaChannelCommunity +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Utilities.random_utils import random_infohash +from Tribler.community.gigachannel.community import GigaChannelCommunity +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto from Tribler.pyipv8.ipv8.peer import Peer from Tribler.pyipv8.ipv8.test.base import TestBase -from Tribler.Test.mocking.channel import MockChannel -from Tribler.Test.mocking.download import MockDownload -from Tribler.Test.mocking.session import MockSession class TestGigaChannelUnits(TestBase): - """ Unit tests for the GigaChannel community which do not need a real Session. """ def setUp(self): super(TestGigaChannelUnits, self).setUp() - self.session = MockSession() - - self.initialize(GigaChannelCommunity, 1) + self.count = 0 + self.initialize(GigaChannelCommunity, 2) def create_node(self, *args, **kwargs): - kwargs['tribler_session'] = self.session - return super(TestGigaChannelUnits, self).create_node(*args, **kwargs) - - def _setup_fetch_next(self): - """ - Setup phase for fetch_next() tests. - - Provides: - - Database entry for a mocked Channel. - - download_channel() functionality for the mocked channel. - - Pending overlay.download_queue for the mocked channel. - """ - channel, download = self._setup_download_completed() - self.session.lm.set_download_channel(download) - self.nodes[0].overlay.download_queue = [channel.infohash] - - return channel, download - - def _setup_download_completed(self): - """ - Setup phase for the download_completed() tests. - - Provides: - - Database entry for a mocked Channel. - - Mocked (empty) download_channel() functionality. - """ - channel = MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0) - self.session.lm.mds.ChannelMetadata.add(channel) - download = MockDownload() - download.tdef.set_infohash(channel.infohash) - - return channel, download - - def test_select_random_none(self): - """ - No entries in the database should yield no results. - """ - channel_list = [] - self.session.lm.mds.ChannelMetadata.set_random_channels(channel_list) - - entries = self.nodes[0].overlay.get_random_entries() - - self.assertEqual(0, len(entries)) - - def test_select_random_one(self): - """ - One entry in the database should yield one result. - """ - channel_list = [MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0)] - self.session.lm.mds.ChannelMetadata.set_random_channels(channel_list) - - entries = self.nodes[0].overlay.get_random_entries() - - self.assertEqual(1, len(entries)) - self.assertEqual(entries[0].infohash, channel_list[0].infohash) - self.assertEqual(entries[0].public_key, channel_list[0].public_key[10:]) - self.assertEqual(entries[0].title, channel_list[0].title) - self.assertEqual(entries[0].version, channel_list[0].version) - - def test_select_random_many(self): - """ - Six entries in the database should yield six results. - """ - channel_list = [MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0)] * 6 - self.session.lm.mds.ChannelMetadata.set_random_channels(channel_list) - - entries = self.nodes[0].overlay.get_random_entries() - - self.assertEqual(6, len(entries)) - for entry in entries: - self.assertEqual(entry.infohash, channel_list[0].infohash) - self.assertEqual(entry.public_key, channel_list[0].public_key[10:]) - self.assertEqual(entry.title, channel_list[0].title) - self.assertEqual(entry.version, channel_list[0].version) - - def test_select_random_too_many(self): - """ - Ten entries in the database should be capped at seven results. - """ - channel_list = [MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0)] * 10 - self.session.lm.mds.ChannelMetadata.set_random_channels(channel_list) - - entries = self.nodes[0].overlay.get_random_entries() - - self.assertEqual(7, len(entries)) - for entry in entries: - self.assertEqual(entry.infohash, channel_list[0].infohash) - self.assertEqual(entry.public_key, channel_list[0].public_key[10:]) - self.assertEqual(entry.title, channel_list[0].title) - self.assertEqual(entry.version, channel_list[0].version) - - def test_update_with_download(self): - """ - Test if an update with a download extracts the seeder count as votes. - """ - channel, download = self._setup_download_completed() - - self.assertEqual(0, channel.votes) - - self.nodes[0].overlay.update_from_download(download) - - self.assertEqual(42, channel.votes) - - def test_download_completed_no_token(self): - """ - Test if the download completed callback extracts the seeder count as votes. - """ - channel, download = self._setup_download_completed() - - self.assertEqual(0, channel.votes) - - self.nodes[0].overlay.download_completed(download) - - self.assertEqual(42, channel.votes) - - def test_download_completed_with_token(self): - """ - Test if the download completed callback releases the download token. - """ - channel, download = self._setup_download_completed() - - token = ChannelDownloadCache(self.nodes[0].overlay.request_cache) - self.nodes[0].overlay.request_cache.add(token) - - self.nodes[0].overlay.download_completed(download) - - self.assertFalse(self.nodes[0].overlay.request_cache.has(token.prefix, token.number)) - - def test_fetch_next_no_token(self): - """ - Test if nothing happens when we fetch the next download without holding the download token. - """ - channel, download = self._setup_fetch_next() - - token = ChannelDownloadCache(self.nodes[0].overlay.request_cache) - self.nodes[0].overlay.request_cache.add(token) - - self.nodes[0].overlay.fetch_next() - - self.nodes[0].overlay.request_cache.pop(token.prefix, token.number) - - self.assertEqual(1, len(self.nodes[0].overlay.download_queue)) - - def test_fetch_next_already_known(self): - """ - Test if we throw out a download when we fetch a download we already know. - """ - channel, download = self._setup_fetch_next() - self.session.add_known_infohash(channel.infohash) - - self.nodes[0].overlay.fetch_next() - - self.assertEqual(0, len(self.nodes[0].overlay.download_queue)) + metadata_store = MetadataStore(os.path.join(self.temporary_directory(), "%d.db" % self.count), + self.temporary_directory(), default_eccrypto.generate_key(u"curve25519")) + kwargs['metadata_store'] = metadata_store + node = super(TestGigaChannelUnits, self).create_node(*args, **kwargs) + self.count += 1 + return node + + def add_random_torrent(self, metadata_cls): + torrent_metadata = metadata_cls.from_dict({ + "infohash": random_infohash(), + "title": "test", + "tags": "", + "size": 1234, + "status": NEW + }) + torrent_metadata.sign() @inlineCallbacks - def test_fetch_next(self): + def test_send_random_one_channel(self): """ - Test if we download a channel if we have nothing else to do. + Test whether sending a single channel with a single torrent to another peer works correctly """ - channel, download = self._setup_fetch_next() - - self.nodes[0].overlay.fetch_next() + with db_session: + channel = self.nodes[0].overlay.metadata_store.ChannelMetadata.create_channel("test", "bla") + self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata) + channel.commit_channel_torrent() - self.assertTrue(self.session.lm.downloading) + self.nodes[0].overlay.send_random_to(Peer(self.nodes[1].my_peer.public_key, self.nodes[1].endpoint.wan_address)) - self.assertEqual(0, channel.votes) - - self.session.lm.finish_download_channel() - - yield self.session.lm.downloaded_channel_deferred + yield self.deliver_messages() - self.assertFalse(self.session.lm.downloading) - self.assertEqual(42, channel.votes) + with db_session: + self.assertEqual(len(self.nodes[1].overlay.metadata_store.ChannelMetadata.select()), 1) + channel = self.nodes[1].overlay.metadata_store.ChannelMetadata.select()[:][0] + self.assertEqual(channel.contents_len, 1) @inlineCallbacks - def test_send_random_to_known_new(self): + def test_send_random_multiple_torrents(self): """ - Test if we do not add new downloads to the queue if we get sent a new channel. + Test whether sending a single channel with a multiple torrents to another peer works correctly """ - channel = MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0) - self.session.lm.mds.ChannelMetadata.set_random_channels([channel]) + with db_session: + channel = self.nodes[0].overlay.metadata_store.ChannelMetadata.create_channel("test", "bla") + for _ in xrange(20): + self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata) + channel.commit_channel_torrent() - self.nodes[0].overlay.send_random_to(Peer(self.nodes[0].my_peer.public_key, self.nodes[0].endpoint.wan_address)) + self.nodes[0].overlay.send_random_to(Peer(self.nodes[1].my_peer.public_key, self.nodes[1].endpoint.wan_address)) yield self.deliver_messages() - self.assertEqual(1, len(self.nodes[0].overlay.download_queue)) - self.assertIn(channel.infohash, self.nodes[0].overlay.download_queue) + with db_session: + self.assertEqual(len(self.nodes[1].overlay.metadata_store.ChannelMetadata.select()), 1) + channel = self.nodes[1].overlay.metadata_store.ChannelMetadata.select()[:][0] + self.assertLess(channel.contents_len, 20) @inlineCallbacks - def test_send_random_to_known_update(self): + def test_send_and_get_channel_update_back(self): """ - Test if we do not add new downloads to the queue if we get sent a new channel. + Test if sending back information on updated version of a channel works """ - old_channel = MockChannel('\x00' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 1, 0) - self.session.lm.mds.ChannelMetadata.add(old_channel) - new_channel = MockChannel('\x01' * 20, 'LibNaCLPK:' + '\x00' * 64, 'test', 2, 0) - self.session.lm.mds.ChannelMetadata.set_random_channels([new_channel]) + with db_session: + # Add channel to node 0 + channel = self.nodes[0].overlay.metadata_store.ChannelMetadata.create_channel("test", "bla") + for _ in xrange(20): + self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata) + channel.commit_channel_torrent() + channel_v1_dict = channel.to_dict() + self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata) + channel.commit_channel_torrent() + + # Add the outdated version of the channel to node 1 + self.nodes[1].overlay.metadata_store.ChannelMetadata.from_dict(channel_v1_dict) - self.nodes[0].overlay.send_random_to(Peer(self.nodes[0].my_peer.public_key, self.nodes[0].endpoint.wan_address)) + # node1 --outdated_channel--> node0 + self.nodes[1].overlay.send_random_to(Peer(self.nodes[0].my_peer.public_key, self.nodes[0].endpoint.wan_address)) yield self.deliver_messages() - self.assertEqual(1, len(self.nodes[0].overlay.download_queue)) - self.assertIn(old_channel.infohash, self.nodes[0].overlay.download_queue) - self.assertEqual(old_channel.infohash, new_channel.infohash) + with db_session: + self.assertEqual(self.nodes[1].overlay.metadata_store.ChannelMetadata.select()[:][0].timestamp, + self.nodes[0].overlay.metadata_store.ChannelMetadata.select()[:][0].timestamp) diff --git a/Tribler/Test/Community/gigachannel/test_community_fullsession.py b/Tribler/Test/Community/gigachannel/test_community_fullsession.py deleted file mode 100644 index 43c50c16a76..00000000000 --- a/Tribler/Test/Community/gigachannel/test_community_fullsession.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import absolute_import -import os - -from pony.orm import db_session -from six.moves import xrange -from twisted.internet import reactor -from twisted.internet.defer import inlineCallbacks -from twisted.internet.task import deferLater - -from Tribler.community.gigachannel.community import GigaChannelCommunity -from Tribler.Core.Session import Session -from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto -from Tribler.pyipv8.ipv8.peer import Peer -from Tribler.Test.test_as_server import TestAsServer - - -class TestGigaChannelCommunity(TestAsServer): - - @inlineCallbacks - def setUp(self): - yield TestAsServer.setUp(self) - - self.config2 = self.localize_config(self.config, 1) - self.session2 = Session(self.config2) - self.session2.upgrader_enabled = False - yield self.session2.start() - - self.sessions = [self.session, self.session2] - - self.test_class = GigaChannelCommunity - self.test_class.master_peer = Peer(default_eccrypto.generate_key(u"curve25519")) - - def localize_config(self, config, nr=0): - out = config.copy() - out.set_state_dir(self.getStateDir(nr)) - out.set_default_destination_dir(self.getDestDir(nr)) - out.set_permid_keypair_filename(os.path.join(self.getStateDir(nr), "keypair_" + str(nr))) - out.set_trustchain_keypair_filename(os.path.join(self.getStateDir(nr), "tc_keypair_" + str(nr))) - return out - - def setUpPreSession(self): - TestAsServer.setUpPreSession(self) - self.config.set_dispersy_enabled(False) - self.config.set_ipv8_enabled(True) - self.config.set_libtorrent_enabled(True) - self.config.set_trustchain_enabled(False) - self.config.set_resource_monitor_enabled(False) - self.config.set_tunnel_community_socks5_listen_ports(self.get_socks5_ports()) - self.config.set_chant_enabled(True) - self.config = self.localize_config(self.config) - - @inlineCallbacks - def tearDown(self): - yield self.session2.shutdown() - yield TestAsServer.tearDown(self) - - def _create_channel(self): - self.session.lm.mds.ChannelMetadata.create_channel('test' + ''.join(str(i) for i in range(100)), 'test') - my_key = self.session.trustchain_keypair - my_channel_id = my_key.pub().key_to_bin() - with db_session: - my_channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - for ind in xrange(20): - random_infohash = '\x00' * 20 - self.session.lm.mds.TorrentMetadata(title='test ind %d' % ind, tags='test', - size=1234, infohash=random_infohash) - my_channel.commit_channel_torrent() - torrent_path = os.path.join(self.session.lm.mds.channels_dir, my_channel.dir_name + ".torrent") - self.session.lm.updated_my_channel(torrent_path) - return my_channel_id - - def introduce_nodes(self): - self.session.lm.gigachannel_community.walk_to(self.session2.lm.gigachannel_community.my_estimated_lan) - return self.deliver_messages() - - @inlineCallbacks - def test_fetch_channel(self): - """ - Test if a fetch_next() call is answered with a channel. - """ - # Peer 1 creates a channel and introduces itself to peer 2 - channel_id = self._create_channel() - yield self.introduce_nodes() - - # Peer 1 sends its channel to peer 2 - peer2 = self.session2.lm.gigachannel_community.my_peer - peer2.address = self.session2.lm.gigachannel_community.my_estimated_lan - self.session.lm.gigachannel_community.send_random_to(peer2) - yield self.deliver_messages() - - # Peer 2 acts upon the known channels - self.session2.lm.gigachannel_community.fetch_next() - yield self.deliver_messages() - - with db_session: - channel_list1 = list(self.session.lm.mds.ChannelMetadata.select()) - channel_list2 = list(self.session2.lm.mds.ChannelMetadata.select()) - - self.assertEqual(1, len(channel_list1)) - self.assertEqual(1, len(channel_list2)) - self.assertEqual(channel_id, str(channel_list1[0].public_key)) - self.assertEqual(channel_id, str(channel_list2[0].public_key)) - self.assertTrue(self.session.has_download(str(channel_list1[0].infohash))) - self.assertTrue(self.session2.has_download(str(channel_list1[0].infohash))) - - @inlineCallbacks - def deliver_messages(self, timeout=.1): - """ - Allow peers to communicate. - The strategy is as follows: - 1. Measure the amount of working threads in the threadpool - 2. After 10 milliseconds, check if we are down to 0 twice in a row - 3. If not, go back to handling calls (step 2) or return, if the timeout has been reached - :param timeout: the maximum time to wait for messages to be delivered - """ - rtime = 0 - probable_exit = False - while rtime < timeout: - yield self.sleep(.01) - rtime += .01 - if len(reactor.getThreadPool().working) == 0: - if probable_exit: - break - probable_exit = True - else: - probable_exit = False - - @inlineCallbacks - def sleep(self, time=.05): - yield deferLater(reactor, time, lambda: None) diff --git a/Tribler/Test/Community/gigachannel/test_sync_strategy.py b/Tribler/Test/Community/gigachannel/test_sync_strategy.py index b3257005d41..0538fc6daaf 100644 --- a/Tribler/Test/Community/gigachannel/test_sync_strategy.py +++ b/Tribler/Test/Community/gigachannel/test_sync_strategy.py @@ -20,6 +20,7 @@ def fetch_next(self): def get_peers(self): return self.get_peers_return + class TestSyncChannels(TestBase): def setUp(self): @@ -29,12 +30,11 @@ def setUp(self): def test_strategy_no_peers(self): """ - If we have no peers, we should still inspect our download queue. + If we have no peers, no random entries should have been sent. """ self.strategy.take_step() self.assertListEqual([], self.community.send_random_to_called) - self.assertTrue(self.community.fetch_next_called) def test_strategy_one_peer(self): """ @@ -45,7 +45,6 @@ def test_strategy_one_peer(self): self.assertEqual(1, len(self.community.send_random_to_called)) self.assertEqual(self.community.get_peers_return[0], self.community.send_random_to_called[0]) - self.assertTrue(self.community.fetch_next_called) def test_strategy_multi_peer(self): """ @@ -59,4 +58,3 @@ def test_strategy_multi_peer(self): self.assertEqual(1, len(self.community.send_random_to_called)) self.assertIn(self.community.send_random_to_called[0], self.community.get_peers_return) - self.assertTrue(self.community.fetch_next_called) diff --git a/Tribler/Test/Community/popularity/test_community.py b/Tribler/Test/Community/popularity/test_community.py index 28bf583f55c..8634b2cd4e0 100644 --- a/Tribler/Test/Community/popularity/test_community.py +++ b/Tribler/Test/Community/popularity/test_community.py @@ -1,223 +1,41 @@ +from __future__ import absolute_import + +import os import random +from pony.orm import db_session + +from six.moves import xrange + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.Utilities.random_utils import random_infohash, random_string, random_utf8_string -from Tribler.Test.Core.base_test import MockObject -from Tribler.community.popularity import constants -from Tribler.community.popularity.community import PopularityCommunity, MSG_TORRENT_HEALTH_RESPONSE, \ - MSG_CHANNEL_HEALTH_RESPONSE, ERROR_UNKNOWN_PEER, ERROR_NO_CONTENT, \ - ERROR_UNKNOWN_RESPONSE -from Tribler.community.popularity.constants import SEARCH_TORRENT_REQUEST, MSG_TORRENT_INFO_RESPONSE, MSG_SUBSCRIPTION -from Tribler.community.popularity.payload import SearchResponseItemPayload, TorrentInfoResponsePayload, \ - TorrentHealthPayload, ContentSubscription -from Tribler.community.popularity.repository import TYPE_TORRENT_HEALTH -from Tribler.community.popularity.request import ContentRequest + +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.community.popularity.community import MSG_TORRENT_HEALTH_RESPONSE, PopularityCommunity +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto from Tribler.pyipv8.ipv8.test.base import TestBase from Tribler.pyipv8.ipv8.test.mocking.ipv8 import MockIPv8 -from Tribler.Test.tools import trial_timeout -class TestPopularityCommunityBase(TestBase): + +class TestPopularityCommunity(TestBase): NUM_NODES = 2 def setUp(self): - super(TestPopularityCommunityBase, self).setUp() + super(TestPopularityCommunity, self).setUp() + self.shared_key = default_eccrypto.generate_key(u"curve25519") self.initialize(PopularityCommunity, self.NUM_NODES) def create_node(self, *args, **kwargs): - def load_random_torrents(limit): - return [ - ['\xfdC\xf9+V\x11A\xe7QG\xfb\xb1*6\xef\xa5\xaeu\xc2\xe0', - random.randint(200, 250), random.randint(1, 10), 1525704192.166107] for _ in range(limit) - ] - - torrent_db = MockObject() - torrent_db.getTorrent = lambda *args, **kwargs: None - torrent_db.updateTorrent = lambda *args, **kwargs: None - torrent_db.getRecentlyCheckedTorrents = load_random_torrents - - channel_db = MockObject() - - return MockIPv8(u"curve25519", PopularityCommunity, torrent_db=torrent_db, channel_db=channel_db) - - -class MockRepository(object): - - def __init__(self): - super(MockRepository, self).__init__() - self.sample_torrents = [] - self.setup_torrents() - - def setup_torrents(self): - for _ in range(10): - infohash = random_infohash() - name = random_utf8_string() - length = random.randint(1000, 9999) - num_files = random.randint(1, 10) - category_list = ['video', 'audio'] - creation_date = random.randint(1000000, 111111111) - seeders = random.randint(10, 200) - leechers = random.randint(5, 1000) - cid = random_string(size=20) - - self.sample_torrents.append([infohash, name, length, num_files, category_list, creation_date, - seeders, leechers, cid]) - - def search_torrent(self, _): - sample_items = [] - for torrent in self.sample_torrents: - sample_items.append(SearchResponseItemPayload(*torrent)) - return sample_items - - def search_channels(self, _): - return [] - - def has_torrent(self, _): - return False - - def cleanup(self): - pass - - def update_from_search_results(self, results): - pass - - def get_torrent(self, _): - torrent = self.sample_torrents[0] - db_torrent = {'name': torrent[1], - 'length': torrent[2], - 'creation_date': torrent[5], - 'num_files': torrent[3], - 'comment': ''} - return db_torrent - - def get_top_torrents(self): - return self.sample_torrents - - def update_from_torrent_search_results(self, search_results): - pass - - -class TestPopularityCommunity(TestPopularityCommunityBase): - __testing__ = False - NUM_NODES = 2 - - @inlineCallbacks - def test_subscribe_peers(self): - """ - Tests subscribing to peers populate publishers and subscribers list. - """ - self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - # Node 0 should have a publisher added - self.assertGreater(len(self.nodes[0].overlay.publishers), 0, "Publisher expected") - # Node 1 should have a subscriber added - self.assertGreater(len(self.nodes[1].overlay.subscribers), 0, "Subscriber expected") + mds = MetadataStore(os.path.join(self.temporary_directory(), 'test.db'), self.temporary_directory(), + self.shared_key) - @inlineCallbacks - def test_subscribe_unsubscribe_individual_peers(self): - """ - Tests subscribing/subscribing an individual peer. - """ - self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe(self.nodes[1].my_peer, subscribe=True) - yield self.deliver_messages() - - self.assertEqual(len(self.nodes[0].overlay.publishers), 1, "Expected one publisher") - self.assertEqual(len(self.nodes[1].overlay.subscribers), 1, "Expected one subscriber") - - self.nodes[0].overlay.subscribe(self.nodes[1].my_peer, subscribe=False) - yield self.deliver_messages() - - self.assertEqual(len(self.nodes[0].overlay.publishers), 0, "Expected no publisher") - self.assertEqual(len(self.nodes[1].overlay.subscribers), 0, "Expected no subscriber") - - def test_unsubscribe_multiple_peers(self): - """ - Tests unsubscribing multiple peers works as expected. - """ - def send_popular_content_subscribe(my_peer, _, subscribe): - if not subscribe: - my_peer.unsubsribe_called += 1 - - self.nodes[0].overlay.subscribe = lambda peer, subscribe: \ - send_popular_content_subscribe(self.nodes[0], peer, subscribe) - - # Add some peers - num_peers = 10 - default_peers = [self.create_node() for _ in range(num_peers)] - self.nodes[0].overlay.get_peers = lambda: default_peers - self.assertEqual(len(self.nodes[0].overlay.get_peers()), num_peers) - - # Add some publishers - for peer in default_peers: - self.nodes[0].overlay.publishers.add(peer) - self.assertEqual(len(self.nodes[0].overlay.publishers), num_peers) + # Add some content to the metadata database + with db_session: + mds.ChannelMetadata.create_channel('test', 'test') + for torrent_ind in xrange(5): + torrent = mds.TorrentMetadata(title='torrent%d' % torrent_ind, infohash=('%d' % torrent_ind) * 20) + torrent.health.seeders = torrent_ind + 1 - # Unsubscribe all the peers - self.nodes[0].unsubsribe_called = 0 - self.nodes[0].overlay.unsubscribe_peers() - - # Check if unsubscription was successful - self.assertEqual(self.nodes[0].unsubsribe_called, num_peers) - self.assertEqual(len(self.nodes[0].overlay.publishers), 0) - - def test_refresh_peers(self): - """ - Tests if refresh_peer_list() updates the publishers and subscribers list - """ - default_peers = [self.create_node() for _ in range(10)] - - for peer in default_peers: - self.nodes[0].overlay.publishers.add(peer) - self.nodes[0].overlay.subscribers.add(peer) - - self.nodes[0].overlay.get_peers = lambda: default_peers - self.assertEqual(len(self.nodes[0].overlay.get_peers()), 10) - - # Remove half of the peers and refresh peer list - default_peers = default_peers[:5] - self.nodes[0].overlay.refresh_peer_list() - - # List of publishers and subscribers should be updated - self.assertEqual(len(self.nodes[0].overlay.get_peers()), 5) - self.assertEqual(len(self.nodes[0].overlay.subscribers), 5) - self.assertEqual(len(self.nodes[0].overlay.publishers), 5) - - @trial_timeout(6) - @inlineCallbacks - def test_start(self): - """ - Tests starting of the community. Peer should start subscribing to other connected peers. - """ - self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None - - def fake_refresh_peer_list(peer): - peer.called_refresh_peer_list = True - - def fake_publish_next_content(peer): - peer.called_publish_next_content = True - - self.nodes[0].called_refresh_peer_list = False - self.nodes[0].called_publish_next_content = False - self.nodes[0].overlay.refresh_peer_list = lambda: fake_refresh_peer_list(self.nodes[0]) - self.nodes[0].overlay.publish_next_content = lambda: fake_publish_next_content(self.nodes[0]) - - yield self.introduce_nodes() - self.nodes[0].overlay.start() - yield self.sleep(constants.PUBLISH_INTERVAL) - - # Node 0 should have a publisher added - self.assertEqual(len(self.nodes[0].overlay.publishers), 1, "Expected one publisher") - # Node 1 should have a subscriber added - self.assertEqual(len(self.nodes[1].overlay.subscribers), 1, "Expected one subscriber") - - self.assertTrue(self.nodes[0].called_refresh_peer_list) - self.assertTrue(self.nodes[0].called_publish_next_content) + return MockIPv8(u"curve25519", PopularityCommunity, metadata_store=mds) @inlineCallbacks def test_content_publishing(self): @@ -225,6 +43,7 @@ def test_content_publishing(self): Tests publishing next available content. :return: """ + def on_torrent_health_response(peer, source_address, data): peer.torrent_health_response_received = True @@ -238,7 +57,7 @@ def on_torrent_health_response(peer, source_address, data): # Add something to queue health_info = ('a' * 20, random.randint(1, 100), random.randint(1, 10), random.randint(1, 111111)) - self.nodes[1].overlay.queue_content(TYPE_TORRENT_HEALTH, health_info) + self.nodes[1].overlay.queue_content(health_info) self.nodes[1].overlay.publish_next_content() @@ -247,495 +66,22 @@ def on_torrent_health_response(peer, source_address, data): self.assertTrue(self.nodes[0].torrent_health_response_received, "Expected to receive torrent response") @inlineCallbacks - def test_publish_no_content(self): - """ - Tests publishing next content if no content is available. - """ - original_logger = self.nodes[0].overlay.logger - self.nodes[0].overlay.logger.debug = lambda *args, **kw: self.fake_logger_error(self.nodes[0], *args) - - # Assume a subscribers exist - self.nodes[0].overlay.subscribers = [self.create_node()] - # No content - self.nodes[0].overlay.content_repository.pop_content = lambda: (None, None) - - # Try publishing the next available content - self.nodes[0].no_content = False - self.nodes[0].overlay.publish_next_content() - yield self.deliver_messages() - - # Expect no content found to be logged - self.assertTrue(self.nodes[0].no_content) - - # Restore logger - self.nodes[0].overlay.logger = original_logger - - @inlineCallbacks - def test_send_torrent_health_response(self): - """ - Tests sending torrent health response. - """ - original_logger = self.nodes[0].overlay.logger - self.nodes[0].overlay.logger.debug = lambda *args, **kw: self.fake_logger_error(self.nodes[0], *args) - - self.nodes[0].overlay.create_message_packet = lambda _type, _payload: \ - self.fake_create_message_packet(self.nodes[0], _type, _payload) - self.nodes[0].overlay.broadcast_message = lambda packet, peer: \ - self.fake_broadcast_message(self.nodes[0], packet, peer) - - # Two default peers - default_peers = [self.create_node() for _ in range(2)] - # Assuming only one is connected - self.nodes[0].overlay.get_peers = lambda: default_peers[:1] - - # Case1: Try to send subscribe response to non-connected peer - self.nodes[0].unknown_peer_found = False - self.nodes[0].logger_error_called = False - payload = MockObject() - self.nodes[0].overlay.send_torrent_health_response(payload, peer=default_peers[1]) - yield self.deliver_messages() - - # Expected unknown peer error log - self.assertTrue(self.nodes[0].logger_error_called) - self.assertTrue(self.nodes[0].unknown_peer_found) - - # Case2: Try to send response to the connected peer - self.nodes[0].broadcast_called = False - self.nodes[0].broadcast_packet_type = None - self.nodes[0].overlay.send_torrent_health_response(payload, peer=default_peers[0]) - yield self.deliver_messages() - - # Expect message to be sent - self.assertTrue(self.nodes[0].packet_created, "Create packet failed") - self.assertEqual(self.nodes[0].packet_type, MSG_TORRENT_HEALTH_RESPONSE, "Unexpected payload type found") - self.assertTrue(self.nodes[0].broadcast_called, "Should send a message to the peer") - self.assertEqual(self.nodes[0].receiver, default_peers[0], "Intended receiver is different") - - # Restore logger - self.nodes[0].overlay.logger = original_logger - - @inlineCallbacks - def test_send_channel_health_response(self): - """ - Tests sending torrent health response. - """ - original_logger = self.nodes[0].overlay.logger - self.nodes[0].overlay.logger.debug = lambda *args, **kw: self.fake_logger_error(self.nodes[0], *args) - - self.nodes[0].overlay.create_message_packet = lambda _type, _payload: \ - self.fake_create_message_packet(self.nodes[0], _type, _payload) - self.nodes[0].overlay.broadcast_message = lambda packet, peer: \ - self.fake_broadcast_message(self.nodes[0], packet, peer) - - # Two default peers - default_peers = [self.create_node() for _ in range(2)] - # Assuming only one is connected - self.nodes[0].overlay.get_peers = lambda: default_peers[:1] - - # Case1: Try to send response to non-connected peer - self.nodes[0].unknown_peer_found = False - self.nodes[0].logger_error_called = False - payload = MockObject() - self.nodes[0].overlay.send_channel_health_response(payload, peer=default_peers[1]) - yield self.deliver_messages() - - # Expected unknown peer error log - self.assertTrue(self.nodes[0].logger_error_called) - self.assertTrue(self.nodes[0].unknown_peer_found) - - # Case2: Try to send response to the connected peer - self.nodes[0].broadcast_called = False - self.nodes[0].broadcast_packet_type = None - self.nodes[0].overlay.send_channel_health_response(payload, peer=default_peers[0]) - yield self.deliver_messages() - - # Expect message to be sent - self.assertTrue(self.nodes[0].packet_created, "Create packet failed") - self.assertEqual(self.nodes[0].packet_type, MSG_CHANNEL_HEALTH_RESPONSE, "Unexpected payload type found") - self.assertTrue(self.nodes[0].broadcast_called, "Should send a message to the peer") - self.assertEqual(self.nodes[0].receiver, default_peers[0], "Intended receiver is different") - - # Restore logger - self.nodes[0].overlay.logger = original_logger - - @inlineCallbacks - def test_send_torrent_info_request_response(self): - """ Test if torrent info request response works as expected. """ - self.nodes[1].called_send_torrent_info_response = False - original_send_torrent_info_response = self.nodes[1].overlay.send_torrent_info_response - - def send_torrent_info_response(node, infohash, peer): - node.called_infohash = infohash - node.called_peer = peer - node.called_send_torrent_info_response = True - - self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: \ - send_torrent_info_response(self.nodes[1], infohash, peer) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - infohash = 'a'*20 - self.nodes[0].overlay.send_torrent_info_request(infohash, self.nodes[1].my_peer) - yield self.deliver_messages() - - self.assertTrue(self.nodes[1].called_send_torrent_info_response) - self.nodes[1].overlay.send_torrent_info_response = original_send_torrent_info_response - - @inlineCallbacks - def test_send_content_info_request_response(self): - """ Test if content info request response works as expected """ - - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - self.nodes[1].called_send_content_info_response = False - - def send_content_info_response(node, peer, content_type): - node.called_send_content_info_response = True - node.called_peer = peer - node.called_content_type = content_type - - self.nodes[1].overlay.send_content_info_response = lambda peer, identifier, content_type, _: \ - send_content_info_response(self.nodes[1], peer, content_type) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - content_type = SEARCH_TORRENT_REQUEST - request_list = ['ubuntu'] - self.nodes[0].overlay.send_content_info_request(content_type, request_list, peer=self.nodes[1].my_peer) - yield self.deliver_messages() - - self.assertTrue(self.nodes[1].called_send_content_info_response) - - @inlineCallbacks - def test_on_torrent_health_response_from_unknown_peer(self): + def test_publish_latest_torrents(self): """ - Tests receiving torrent health response from unknown peer + Test publishing all latest torrents """ - original_logger = self.nodes[0].overlay.logger - self.nodes[0].overlay.logger.error = lambda *args, **kw: self.fake_logger_error(self.nodes[0], *args) - - infohash = 'a' * 20 - num_seeders = 10 - num_leechers = 5 - timestamp = 123123123 - - payload = TorrentHealthPayload(infohash, num_seeders, num_leechers, timestamp) - source_address = ('1.1.1.1', 1024) - data = self.nodes[0].overlay.create_message_packet(MSG_TORRENT_HEALTH_RESPONSE, payload) - - self.nodes[0].unknown_response = False - self.nodes[0].overlay.on_torrent_health_response(source_address, data) - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].unknown_response) - - # Restore logger - self.nodes[0].overlay.logger = original_logger - - @inlineCallbacks - def test_on_torrent_health_response(self): - """ - Tests receiving torrent health response from unknown peer - """ - def fake_update_torrent(peer): - peer.called_update_torrent = True - - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[0].overlay.content_repository.update_torrent_health = lambda payload, peer_trust: \ - fake_update_torrent(self.nodes[0]) - - infohash = 'a' * 20 - num_seeders = 10 - num_leechers = 5 - timestamp = 123123123 - - payload = TorrentHealthPayload(infohash, num_seeders, num_leechers, timestamp) - data = self.nodes[1].overlay.create_message_packet(MSG_TORRENT_HEALTH_RESPONSE, payload) - - yield self.introduce_nodes() - - # Add node 1 in publisher list of node 0 - self.nodes[0].overlay.publishers.add(self.nodes[1].my_peer) - self.nodes[0].overlay.on_torrent_health_response(self.nodes[1].my_peer.address, data) - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].called_update_torrent) - - @inlineCallbacks - def test_on_torrent_info_response(self): - """ - Tests receiving torrent health response. - """ - def fake_update_torrent_info(peer): - peer.called_update_torrent = True - - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[0].overlay.content_repository.update_torrent_info = lambda payload: \ - fake_update_torrent_info(self.nodes[0]) - - infohash = 'a' * 20 - name = "ubuntu" - length = 100 - creation_date = 123123123 - num_files = 33 - comment = '' - - payload = TorrentInfoResponsePayload(infohash, name, length, creation_date, num_files, comment) - data = self.nodes[1].overlay.create_message_packet(MSG_TORRENT_INFO_RESPONSE, payload) - - yield self.introduce_nodes() - - # Add node 1 in publisher list of node 0 - self.nodes[0].overlay.publishers.add(self.nodes[1].my_peer) - self.nodes[0].overlay.on_torrent_info_response(self.nodes[1].my_peer.address, data) - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].called_update_torrent) - - @inlineCallbacks - def test_on_torrent_info_response_from_unknown_peer(self): - """ - Tests receiving torrent health response from unknown peer. - """ - - def fake_update_torrent_info(peer): - peer.called_update_torrent = True - - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[0].overlay.content_repository.update_torrent_info = lambda payload: \ - fake_update_torrent_info(self.nodes[0]) - - infohash = 'a' * 20 - name = "ubuntu" - length = 100 - creation_date = 123123123 - num_files = 33 - comment = '' - - payload = TorrentInfoResponsePayload(infohash, name, length, creation_date, num_files, comment) - data = self.nodes[1].overlay.create_message_packet(MSG_TORRENT_INFO_RESPONSE, payload) - - yield self.introduce_nodes() - - self.nodes[0].called_update_torrent = False - self.nodes[0].overlay.on_torrent_info_response(self.nodes[1].my_peer.address, data) - yield self.deliver_messages() - - self.assertFalse(self.nodes[0].called_update_torrent) - - @inlineCallbacks - def test_on_subscription_status(self): - """ - Tests receiving subscription status. - """ - subscribe = True - identifier = 123123123 - payload = ContentSubscription(identifier, subscribe) - data = self.nodes[1].overlay.create_message_packet(MSG_SUBSCRIPTION, payload) - # Set the cache request - self.nodes[0].overlay.request_cache.pop = lambda prefix, identifer: MockObject() - self.nodes[0].overlay.request_cache.has = lambda prefix, identifer: True - yield self.introduce_nodes() - self.assertEqual(len(self.nodes[0].overlay.publishers), 0) - - self.nodes[0].overlay.on_subscription_status(self.nodes[1].my_peer.address, data) + self.nodes[1].overlay.subscribe_peers() yield self.deliver_messages() - self.assertEqual(len(self.nodes[0].overlay.publishers), 1) - - @inlineCallbacks - def test_on_subscription_status_no_cache(self): - """ - Tests receiving subscription status when request is not available in cache. - """ - subscribe = True - identifier = 123123123 - payload = ContentSubscription(identifier, subscribe) - data = self.nodes[1].overlay.create_message_packet(MSG_SUBSCRIPTION, payload) - - # Assume cache request is present - self.nodes[0].overlay.request_cache.has = lambda prefix, identifer: False - - yield self.introduce_nodes() - self.assertEqual(len(self.nodes[0].overlay.publishers), 0) + # Update the health of some torrents + with db_session: + torrents = self.nodes[0].overlay.content_repository.get_top_torrents() + torrents[0].health.seeders = 500 - self.nodes[0].overlay.on_subscription_status(self.nodes[1].my_peer.address, data) + self.nodes[0].overlay.publish_latest_torrents(self.nodes[1].overlay.my_peer) yield self.deliver_messages() - self.assertEqual(len(self.nodes[0].overlay.publishers), 0) - - @inlineCallbacks - def test_on_subscription_status_with_unsubscribe(self): - """ - Tests receiving subscription status with unsubscribe status. - """ - yield self.introduce_nodes() - self.nodes[0].overlay.publishers.add(self.nodes[1].my_peer) - self.assertEqual(len(self.nodes[0].overlay.publishers), 1) - # Set the cache request - self.nodes[0].overlay.request_cache.pop = lambda prefix, identifer: MockObject() - self.nodes[0].overlay.request_cache.has = lambda prefix, identifer: True - - subscribe = False - identifier = 123123123 - payload = ContentSubscription(identifier, subscribe) - data = self.nodes[1].overlay.create_message_packet(MSG_SUBSCRIPTION, payload) - - self.nodes[0].overlay.on_subscription_status(self.nodes[1].my_peer.address, data) - yield self.deliver_messages() - - self.assertEqual(len(self.nodes[0].overlay.publishers), 0) - - @inlineCallbacks - def test_search_request_response(self): - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - def fake_process_torrent_search_response(peer): - peer.called_process_torrent_search_response = True - - self.nodes[0].overlay.process_torrent_search_response = lambda query, payload: \ - fake_process_torrent_search_response(self.nodes[0]) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - # Create a search request - query = "ubuntu" - self.nodes[0].overlay.send_torrent_search_request(query) - - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].called_process_torrent_search_response) - - @inlineCallbacks - def test_process_search_response(self): - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - def fake_notify(peer, result_dict): - peer.called_search_result_notify = True - self.assertEqual(result_dict['keywords'], 'ubuntu') - self.assertGreater(len(result_dict['results']), 1) - - self.nodes[0].overlay.tribler_session = MockObject() - self.nodes[0].overlay.tribler_session.notifier = MockObject() - self.nodes[0].overlay.tribler_session.notifier.notify = lambda signal1, signal2, _, result_dict: \ - fake_notify(self.nodes[0], result_dict) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - # Create a search request - query = "ubuntu" - self.nodes[0].called_search_result_notify = False - - self.nodes[0].overlay.send_torrent_search_request(query) - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].called_search_result_notify) - - @inlineCallbacks - def test_send_content_info_request(self): - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - self.nodes[0].received_response = False - self.nodes[0].received_query = None - - def process_torrent_search_response(node, query): - node.received_response = True - node.received_query = query - - self.nodes[0].overlay.process_torrent_search_response = lambda query, data: \ - process_torrent_search_response(self.nodes[0], query) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - content_type = SEARCH_TORRENT_REQUEST - request_list = ["ubuntu"] - self.nodes[0].overlay.send_content_info_request(content_type, request_list, limit=5, peer=None) - yield self.deliver_messages() - - self.assertTrue(self.nodes[0].received_response) - self.assertEqual(self.nodes[0].received_query, request_list) - - @inlineCallbacks - def test_send_torrent_info_response(self): - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - - self.nodes[0].called_on_torrent_info_response = False - - def on_torrent_info_response(node): - node.called_on_torrent_info_response = True - - self.nodes[0].overlay.decode_map[chr(MSG_TORRENT_INFO_RESPONSE)] = lambda _source_address, _data: \ - on_torrent_info_response(self.nodes[0]) - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - infohash = 'a'*20 - self.nodes[1].overlay.send_torrent_info_response(infohash, self.nodes[0].my_peer) - yield self.deliver_messages() - self.assertTrue(self.nodes[0].called_on_torrent_info_response) - - @inlineCallbacks - def test_search_request_timeout(self): - """ - Test whether the callback is called with an empty list when the search request times out - """ - ContentRequest.CONTENT_TIMEOUT = 0.1 - - self.nodes[0].overlay.content_repository = MockRepository() - self.nodes[1].overlay.content_repository = MockRepository() - self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None - - yield self.introduce_nodes() - self.nodes[0].overlay.subscribe_peers() - yield self.deliver_messages() - - # Make sure that the other node does not respond to our search query - self.nodes[1].overlay.send_content_info_response = lambda *_, **__: None - - def on_results(results): - self.assertIsInstance(results, list) - self.assertFalse(results) - - content_type = SEARCH_TORRENT_REQUEST - deferred = self.nodes[0].overlay.send_content_info_request(content_type, ["ubuntu"], limit=5, peer=None) - yield deferred.addCallback(on_results) - - def fake_logger_error(self, my_peer, *args): - if ERROR_UNKNOWN_PEER in args[0]: - my_peer.unknown_peer_found = True - if ERROR_NO_CONTENT in args[0]: - my_peer.no_content = True - if ERROR_UNKNOWN_RESPONSE in args[0]: - my_peer.unknown_response = True - my_peer.logger_error_called = True - - def fake_create_message_packet(self, my_peer, _type, _payload): - my_peer.packet_created = True - my_peer.packet_type = _type - - def fake_broadcast_message(self, my_peer, _, peer): - my_peer.broadcast_called = True - my_peer.receiver = peer + with db_session: + torrents = self.nodes[1].overlay.content_repository.get_top_torrents() + self.assertEqual(torrents[0].health.seeders, 500) diff --git a/Tribler/Test/Community/popularity/test_payload.py b/Tribler/Test/Community/popularity/test_payload.py index eab877eabce..32858ae294c 100644 --- a/Tribler/Test/Community/popularity/test_payload.py +++ b/Tribler/Test/Community/popularity/test_payload.py @@ -4,13 +4,7 @@ import string from unittest import TestCase -from six.moves import xrange - -from Tribler.community.popularity.payload import (ChannelHealthPayload, ContentInfoRequest, ContentInfoResponse, - ContentSubscription, Pagination, SearchResponseItemPayload, - SearchResponsePayload, TorrentHealthPayload, - TorrentInfoResponsePayload, decode_values, encode_values, - unpack_responses) +from Tribler.community.popularity.payload import ContentSubscription, TorrentHealthPayload from Tribler.pyipv8.ipv8.messaging.serialization import Serializer @@ -25,12 +19,6 @@ def random_string(self, size=6, chars=string.ascii_uppercase + string.digits): def random_infohash(self): return ''.join(random.choice('0123456789abcdef') for _ in range(20)) - def test_encode_decode(self): - value_list = [u'\u0432\u0441\u0435\u043c', u'\u043f\u0440\u0438\u0432\u0435\u0442'] - encoded_value = encode_values(value_list) - decoded_value = decode_values(encoded_value) - self.assertEqual(value_list, decoded_value) - def test_content_subscription(self): """ Test serialization/deserialization of Content subscription """ subscribe = True @@ -63,155 +51,3 @@ def test_torrent_health_payload(self): self.assertEqual(num_seeders, deserialized_payload.num_seeders) self.assertEqual(num_leechers, deserialized_payload.num_leechers) self.assertEqual(timestamp, deserialized_payload.timestamp) - - def test_channel_health_payload(self): - """ Test serialization/deserialization of Channel health payload """ - channel_id = self.random_string(size=20) - num_votes = 100 - num_torrents = 5 - swarm_size_sum = 20 - timestamp = 123123123 - - health_payload = ChannelHealthPayload(channel_id, num_votes, num_torrents, swarm_size_sum, timestamp) - serialized = self.serializer.pack_multiple(health_payload.to_pack_list())[0] - - # Deserialize and test it - (deserialized, _) = self.serializer.unpack_multiple(ChannelHealthPayload.format_list, serialized) - deserialized_payload = ChannelHealthPayload.from_unpack_list(*deserialized) - - self.assertEqual(channel_id, deserialized_payload.channel_id) - self.assertEqual(num_votes, deserialized_payload.num_votes) - self.assertEqual(num_torrents, deserialized_payload.num_torrents) - self.assertEqual(swarm_size_sum, deserialized_payload.swarm_size_sum) - self.assertEqual(timestamp, deserialized_payload.timestamp) - - def test_torrent_info_response_payload_for_default_values(self): - """ Test serialization/deserialization of Torrent health info response payload for default values. """ - infohash = 'a' * 20 - name = None - length = None - creation_date = None - num_files = None - comment = None - - health_payload = TorrentInfoResponsePayload(infohash, name, length, creation_date, num_files, comment) - serialized = self.serializer.pack_multiple(health_payload.to_pack_list())[0] - - # Deserialize and test it - (deserialized, _) = self.serializer.unpack_multiple(TorrentInfoResponsePayload.format_list, serialized) - deserialized_payload = TorrentInfoResponsePayload.from_unpack_list(*deserialized) - - self.assertEqual(infohash, deserialized_payload.infohash) - self.assertEqual('', deserialized_payload.name) - self.assertEqual(0, deserialized_payload.length) - self.assertEqual(0, deserialized_payload.creation_date) - self.assertEqual(0, deserialized_payload.num_files) - self.assertEqual('', deserialized_payload.comment) - - def test_search_result_payload_serialization(self): - """ Test serialization & deserialization of search payload """ - # sample search response items - sample_items = [] - for index in range(10): - infohash = self.random_infohash() - name = self.random_string() - length = random.randint(1000, 9999) - num_files = random.randint(1, 10) - category_list = ['video', 'audio'] - creation_date = random.randint(1000000, 111111111) - seeders = random.randint(10, 200) - leechers = random.randint(5, 1000) - cid = self.random_string(size=20) - - sample_items.append(SearchResponseItemPayload(infohash, name, length, num_files, category_list, - creation_date, seeders, leechers, cid)) - - # Search identifier - identifier = 111 - response_type = 1 - - # Serialize the results - results = '' - for item in sample_items: - results += self.serializer.pack_multiple(item.to_pack_list())[0] - serialized_results = self.serializer.pack_multiple( - SearchResponsePayload(identifier, response_type, results).to_pack_list())[0] - - # De-serialize the response payload and check the identifier and get the results - response_format = SearchResponsePayload.format_list - (search_results, _) = self.serializer.unpack_multiple(response_format, serialized_results) - - # De-serialize each individual search result items - all_items = unpack_responses(search_results[2], True) - for index in xrange(len(all_items)): - response_item = all_items[index] - sample_item = sample_items[index] - - self.assertEqual(sample_item.infohash, response_item.infohash) - self.assertEqual(sample_item.name, response_item.name) - self.assertEqual(sample_item.length, response_item.length) - self.assertEqual(sample_item.num_files, response_item.num_files) - self.assertEqual(sample_item.creation_date, response_item.creation_date) - self.assertEqual(sample_item.category_list, response_item.category_list) - self.assertEqual(sample_item.seeders, response_item.seeders) - self.assertEqual(sample_item.leechers, response_item.leechers) - self.assertEqual(sample_item.cid, response_item.cid) - - def test_pagination(self): - """ Test if pagination serialization & deserialization works as expected. """ - page_num = 1 - page_size = 10 - max_results = 50 - more = False - - page = Pagination(page_num, page_size, max_results, more) - serialized_page = page.serialize() - - # Deserialize and test the parameters - deserialized_page = Pagination.deserialize(serialized_page) - self.assertEqual(page.page_number, deserialized_page.page_number) - self.assertEqual(page.page_size, deserialized_page.page_size) - self.assertEqual(page.max_results, deserialized_page.max_results) - self.assertEqual(page.more, deserialized_page.more) - - def test_content_info_request(self): - """ Test serialization & deserialization of content info request """ - identifier = 1 - content_type = 1 - query_list = "ubuntu 18.04".split() - limit = 10 - - # Serialize request - in_request = ContentInfoRequest(identifier, content_type, query_list, limit) - serialized_request = self.serializer.pack_multiple(in_request.to_pack_list())[0] - - # Deserialize request and test it - (deserialized_request, _) = self.serializer.unpack_multiple(ContentInfoRequest.format_list, serialized_request) - out_request = ContentInfoRequest.from_unpack_list(*deserialized_request) - self.assertEqual(in_request.identifier, out_request.identifier) - self.assertEqual(in_request.query_list, out_request.query_list) - self.assertEqual(in_request.content_type, out_request.content_type) - self.assertEqual(in_request.limit, out_request.limit) - - def test_content_info_response(self): - """ Test serialization & deserialization of content info response """ - identifier = 1 - content_type = 1 - response = self.random_string(size=128) - more = True - pagination = Pagination(1, 10, 50, more) - - # Serialize request - in_response = ContentInfoResponse(identifier, content_type, response, pagination) - serialized_response = self.serializer.pack_multiple(in_response.to_pack_list())[0] - - # Deserialize request and test it - (deserialized_response, _) = self.serializer.unpack_multiple(ContentInfoResponse.format_list, - serialized_response) - out_request = ContentInfoResponse.from_unpack_list(*deserialized_response) - self.assertEqual(in_response.identifier, out_request.identifier) - self.assertEqual(in_response.response, out_request.response) - self.assertEqual(in_response.content_type, out_request.content_type) - self.assertEqual(in_response.pagination.page_number, out_request.pagination.page_number) - self.assertEqual(in_response.pagination.page_size, out_request.pagination.page_size) - self.assertEqual(in_response.pagination.max_results, out_request.pagination.max_results) diff --git a/Tribler/Test/Community/popularity/test_pubsub_community.py b/Tribler/Test/Community/popularity/test_pubsub_community.py new file mode 100644 index 00000000000..d3f9f281d1a --- /dev/null +++ b/Tribler/Test/Community/popularity/test_pubsub_community.py @@ -0,0 +1,140 @@ +from __future__ import absolute_import + +from twisted.internet.defer import inlineCallbacks + +from Tribler.Test.tools import trial_timeout +from Tribler.community.popularity.constants import PUBLISH_INTERVAL +from Tribler.community.popularity.pubsub import PubSubCommunity +from Tribler.pyipv8.ipv8.test.base import TestBase +from Tribler.pyipv8.ipv8.test.mocking.ipv8 import MockIPv8 + + +class TestPubSubCommunity(TestBase): + NUM_NODES = 2 + + def setUp(self): + super(TestPubSubCommunity, self).setUp() + self.initialize(PubSubCommunity, self.NUM_NODES) + + def create_node(self, *args, **kwargs): + return MockIPv8(u"curve25519", PubSubCommunity) + + @inlineCallbacks + def test_subscribe_peers(self): + """ + Tests subscribing to peers populate publishers and subscribers list. + """ + self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None + yield self.introduce_nodes() + self.nodes[0].overlay.subscribe_peers() + yield self.deliver_messages() + + # Node 0 should have a publisher added + self.assertGreater(len(self.nodes[0].overlay.publishers), 0, "Publisher expected") + # Node 1 should have a subscriber added + self.assertGreater(len(self.nodes[1].overlay.subscribers), 0, "Subscriber expected") + + @inlineCallbacks + def test_subscribe_unsubscribe_individual_peers(self): + """ + Tests subscribing/subscribing an individual peer. + """ + self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None + self.nodes[1].overlay.publish_latest_torrents = lambda *args, **kwargs: None + + yield self.introduce_nodes() + self.nodes[0].overlay.subscribe(self.nodes[1].my_peer, subscribe=True) + yield self.deliver_messages() + + self.assertEqual(len(self.nodes[0].overlay.publishers), 1, "Expected one publisher") + self.assertEqual(len(self.nodes[1].overlay.subscribers), 1, "Expected one subscriber") + + self.nodes[0].overlay.subscribe(self.nodes[1].my_peer, subscribe=False) + yield self.deliver_messages() + + self.assertEqual(len(self.nodes[0].overlay.publishers), 0, "Expected no publisher") + self.assertEqual(len(self.nodes[1].overlay.subscribers), 0, "Expected no subscriber") + + def test_unsubscribe_multiple_peers(self): + """ + Tests unsubscribing multiple peers works as expected. + """ + + def send_popular_content_subscribe(my_peer, _, subscribe): + if not subscribe: + my_peer.unsubsribe_called += 1 + + self.nodes[0].overlay.subscribe = lambda peer, subscribe: \ + send_popular_content_subscribe(self.nodes[0], peer, subscribe) + + # Add some peers + num_peers = 10 + default_peers = [self.create_node() for _ in range(num_peers)] + self.nodes[0].overlay.get_peers = lambda: default_peers + self.assertEqual(len(self.nodes[0].overlay.get_peers()), num_peers) + + # Add some publishers + for peer in default_peers: + self.nodes[0].overlay.publishers.add(peer) + self.assertEqual(len(self.nodes[0].overlay.publishers), num_peers) + + # Unsubscribe all the peers + self.nodes[0].unsubsribe_called = 0 + self.nodes[0].overlay.unsubscribe_peers() + + # Check if unsubscription was successful + self.assertEqual(self.nodes[0].unsubsribe_called, num_peers) + self.assertEqual(len(self.nodes[0].overlay.publishers), 0) + + def test_refresh_peers(self): + """ + Tests if refresh_peer_list() updates the publishers and subscribers list + """ + default_peers = [self.create_node() for _ in range(10)] + + for peer in default_peers: + self.nodes[0].overlay.publishers.add(peer) + self.nodes[0].overlay.subscribers.add(peer) + + self.nodes[0].overlay.get_peers = lambda: default_peers + self.assertEqual(len(self.nodes[0].overlay.get_peers()), 10) + + # Remove half of the peers and refresh peer list + default_peers = default_peers[:5] + self.nodes[0].overlay.refresh_peer_list() + + # List of publishers and subscribers should be updated + self.assertEqual(len(self.nodes[0].overlay.get_peers()), 5) + self.assertEqual(len(self.nodes[0].overlay.subscribers), 5) + self.assertEqual(len(self.nodes[0].overlay.publishers), 5) + + @trial_timeout(6) + @inlineCallbacks + def test_start(self): + """ + Tests starting of the community. Peer should start subscribing to other connected peers. + """ + self.nodes[1].overlay.send_torrent_info_response = lambda infohash, peer: None + + def fake_refresh_peer_list(peer): + peer.called_refresh_peer_list = True + + def fake_publish_next_content(peer): + peer.called_publish_next_content = True + + self.nodes[0].called_refresh_peer_list = False + self.nodes[0].called_publish_next_content = False + self.nodes[0].overlay.refresh_peer_list = lambda: fake_refresh_peer_list(self.nodes[0]) + self.nodes[0].overlay.publish_next_content = lambda: fake_publish_next_content(self.nodes[0]) + + yield self.introduce_nodes() + self.nodes[0].overlay.start() + yield self.sleep(PUBLISH_INTERVAL) + + # Node 0 should have a publisher added + self.assertEqual(len(self.nodes[0].overlay.publishers), 1, "Expected one publisher") + # Node 1 should have a subscriber added + self.assertEqual(len(self.nodes[1].overlay.subscribers), 1, "Expected one subscriber") + + self.assertTrue(self.nodes[0].called_refresh_peer_list) + self.assertTrue(self.nodes[0].called_publish_next_content) diff --git a/Tribler/Test/Community/popularity/test_repository.py b/Tribler/Test/Community/popularity/test_repository.py index 029c4e4b2be..5a85f8fc15f 100644 --- a/Tribler/Test/Community/popularity/test_repository.py +++ b/Tribler/Test/Community/popularity/test_repository.py @@ -1,393 +1,84 @@ -import os -import random -import string -import tarfile +from __future__ import absolute_import + import time -import unittest -from binascii import unhexlify -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler -from Tribler.Core.CacheDB.sqlitecachedb import SQLiteCacheDB -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.Core.test_sqlitecachedbhandler import BUSYTIMEOUT -from Tribler.Test.common import TESTS_DATA_DIR +from pony.orm import db_session + +from six.moves import xrange + +from twisted.internet.defer import inlineCallbacks + +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Test.Core.base_test import TriblerCoreTest from Tribler.community.popularity.payload import TorrentHealthPayload -from Tribler.community.popularity.repository import ContentRepository, DEFAULT_FRESHNESS_LIMIT -from Tribler.pyipv8.ipv8.test.base import TestBase +from Tribler.community.popularity.repository import ContentRepository +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto -class TestContentRepository(unittest.TestCase): +class TestContentRepository(TriblerCoreTest): + @inlineCallbacks def setUp(self): - torrent_db = MockObject() - channel_db = MockObject() - self.content_repository = ContentRepository(torrent_db, channel_db) + yield super(TestContentRepository, self).setUp() + self.my_key = default_eccrypto.generate_key(u"curve25519") + mds = MetadataStore(':memory:', self.session_base_dir, self.my_key) + self.content_repository = ContentRepository(mds) + + # Add some content to the metadata database + with db_session: + mds.ChannelMetadata.create_channel('test', 'test') + for torrent_ind in xrange(5): + torrent = mds.TorrentMetadata(title='torrent%d' % torrent_ind, infohash=('%d' % torrent_ind) * 20) + torrent.health.seeders = torrent_ind + 1 + + def test_has_get_torrent(self): + """ + Test fetching a torrent from the metadata store + """ + self.assertFalse(self.content_repository.get_torrent('9' * 20)) + self.assertTrue(self.content_repository.get_torrent('0' * 20)) + self.assertFalse(self.content_repository.has_torrent('9' * 20)) + self.assertTrue(self.content_repository.has_torrent('0' * 20)) + self.assertFalse(self.content_repository.get_torrent('\x89' * 20)) + + @db_session + def test_get_top_torrents(self): + """ + Test fetching the top torrents from the metadata store + """ + torrents = self.content_repository.get_top_torrents() + self.assertEqual(len(torrents), 5) + self.assertEqual(torrents[0].health.seeders, 5) + + self.assertEqual(len(self.content_repository.get_top_torrents(limit=1)), 1) def test_add_content(self): """ Test adding and removing content works as expected. """ # Initial content queue is zero - self.assertEqual(self.content_repository.count_content(), 0, "No item expected in queue initially") + self.assertEqual(self.content_repository.queue_length(), 0, "No item expected in queue initially") # Add a sample content and check the size - sample_content = ('a' * 20, 6, 3, 123456789) - sample_content_type = 1 - self.content_repository.add_content(sample_content_type, sample_content) - self.assertEqual(self.content_repository.count_content(), 1, "One item expected in queue") + torrent = self.content_repository.get_torrent('0' * 20) + self.content_repository.add_content_to_queue(torrent) + self.assertEqual(self.content_repository.queue_length(), 1, "One item expected in queue") # Pop an item - (content_type, content) = self.content_repository.pop_content() - self.assertEqual(content_type, sample_content_type, "Content type should be equal") - self.assertEqual(content, sample_content, "Content should be equal") + content = self.content_repository.pop_content() + self.assertEqual(content, torrent, "Content should be equal") # Check size again - self.assertEqual(self.content_repository.count_content(), 0, "No item expected in queue") - - def test_get_top_torrents(self): - """ - Test if content repository returns expected top torrents. - """ - - def get_fake_torrents(limit): - return [[chr(x) * 20, x, 0, 1525704192] for x in range(limit)] - - self.content_repository.torrent_db.getRecentlyCheckedTorrents = get_fake_torrents - - limit = 10 - self.assertEqual(self.content_repository.get_top_torrents(limit=limit), get_fake_torrents(limit)) + self.assertEqual(self.content_repository.queue_length(), 0, "No item expected in queue") def test_update_torrent_health(self): """ Tests update torrent health. """ - def update_torrent(repo, _): - repo.update_torrent_called = True - - # Assume a fake torrent response - fake_torrent_health_payload = TorrentHealthPayload('a' * 20, 10, 4, time.time()) - - self.content_repository.torrent_db = MockObject() - self.content_repository.torrent_db.updateTorrent = lambda infohash, *args, **kw: \ - update_torrent(self.content_repository, infohash) - - # If torrent does not exist in the database, then it should be added to the database - self.content_repository.has_torrent = lambda infohash: False + fake_torrent_health_payload = TorrentHealthPayload('0' * 20, 10, 4, time.time()) self.content_repository.update_torrent_health(fake_torrent_health_payload, peer_trust=0) - self.assertTrue(self.content_repository.update_torrent_called) - - def test_update_torrent_with_higher_trust(self): - """ - Scenario: The database torrent has still fresh last_check_time and you receive a new response from - peer with trust > 1. - Expect: Torrent in database is updated. - """ - # last_check_time for existing torrent in database - db_last_time_check = time.time() - 10 - # Peer trust, higher than 1 in this scenario - peer_trust = 10 - - # Database record is expected to be updated - self.assertTrue(self.try_torrent_update_with_options(db_last_time_check, peer_trust)) - - def test_update_torrent_with_stale_check_time(self): - """ - Scenario: The database torrent has stale last_check_time and you receive a new response from - peer with no previous trust. - Expect: Torrent in database is still updated. - """ - # last_check_time for existing torrent in database - db_last_time_check = time.time() - DEFAULT_FRESHNESS_LIMIT - # Peer trust, higher than 1 in this scenario - peer_trust = 0 - - # Database record is expected to be updated - self.assertTrue(self.try_torrent_update_with_options(db_last_time_check, peer_trust)) - - def try_torrent_update_with_options(self, db_last_check_time, peer_trust): - """ - Tries updating torrent considering the given last check time of existing torrent and a new response - obtained from a peer with given peer_trust value. - """ - sample_infohash, seeders, leechers, timestamp = 'a' * 20, 10, 5, db_last_check_time - sample_payload = TorrentHealthPayload(sample_infohash, seeders, leechers, timestamp) - - def update_torrent(content_repo, _): - content_repo.update_torrent_called = True - - def get_torrent(infohash): - return {'infohash': infohash, 'num_seeders': seeders, - 'num_leechers': leechers, 'last_tracker_check': timestamp} - - self.content_repository.torrent_db.getTorrent = lambda infohash, **kw: get_torrent(infohash) - self.content_repository.torrent_db.hasTorrent = lambda infohash: infohash == sample_infohash - self.content_repository.torrent_db.updateTorrent = \ - lambda infohash, *args, **kw: update_torrent(self.content_repository, infohash) - - self.content_repository.update_torrent_called = False - self.content_repository.update_torrent_health(sample_payload, peer_trust=peer_trust) - - return self.content_repository.update_torrent_called - - def test_update_torrent_info(self): - """ Test updating torrent info """ - self.content_repository.called_update_torrent = False - - def fake_update_torrent(ref): - ref.called_update_torrent = True - - self.content_repository.torrent_db.updateTorrent = lambda infohash, **kw: \ - fake_update_torrent(self.content_repository) - self.content_repository.has_torrent = lambda infohash: False - torrent_info_response = MockObject() - torrent_info_response.infohash = 'a' * 20 - - torrent_info_response.name = 'ubuntu' - torrent_info_response.length = 123 - torrent_info_response.creation_date = 123123123 - torrent_info_response.num_files = 2 - torrent_info_response.comment = 'Ubuntu ISO' - - self.content_repository.update_torrent_info(torrent_info_response) - self.assertTrue(self.content_repository.called_update_torrent) - - def test_update_conflicting_torrent_info(self): - """ Test updating torrent info response with existing record in the database.""" - torrent_info_response = MockObject() - torrent_info_response.infohash = 'a' * 20 - torrent_info_response.name = 'ubuntu' - torrent_info_response.length = 123 - torrent_info_response.creation_date = 123123123 - torrent_info_response.num_files = 2 - torrent_info_response.comment = 'Ubuntu ISO' - - self.content_repository.called_update_torrent = False - - def fake_update_torrent(ref): - ref.called_update_torrent = True - - def fake_get_torrent(infohash, name): - torrent = {'infohash': infohash, 'name': name} - return torrent - - self.content_repository.torrent_db.updateTorrent = lambda infohash, **kw: fake_update_torrent( - self.content_repository) - self.content_repository.has_torrent = lambda infohash: True - self.content_repository.get_torrent = lambda infohash: fake_get_torrent(infohash, torrent_info_response.name) - - self.content_repository.update_torrent_info(torrent_info_response) - self.assertFalse(self.content_repository.called_update_torrent) - - def test_search_torrent(self): - """ Test torrent search """ - def random_string(size=6, chars=string.ascii_uppercase + string.digits): - return ''.join(random.choice(chars) for _ in range(size)) - - def random_infohash(): - return ''.join(random.choice('0123456789abcdef') for _ in range(20)) - - sample_torrents = [] - for _ in range(10): - infohash = random_infohash() - name = random_string() - length = random.randint(1000, 9999) - num_files = random.randint(1, 10) - category_list = ['video', 'audio'] - creation_date = random.randint(1000000, 111111111) - seeders = random.randint(10, 200) - leechers = random.randint(5, 1000) - cid = random_string(size=20) - - sample_torrents.append([infohash, name, length, num_files, category_list, creation_date, seeders, - leechers, cid]) - - def fake_torrentdb_search_names(_): - return sample_torrents - - self.content_repository.torrent_db.searchNames = lambda query, **kw: fake_torrentdb_search_names(query) - - search_query = "Ubuntu" - search_results = self.content_repository.search_torrent(search_query) - - for index in range(10): - db_torrent = sample_torrents[index] - search_result = search_results[index] - - self.assertEqual(db_torrent[0], search_result.infohash) - self.assertEqual(db_torrent[1], search_result.name) - self.assertEqual(db_torrent[2], search_result.length) - self.assertEqual(db_torrent[3], search_result.num_files) - self.assertEqual(db_torrent[6], search_result.seeders) - self.assertEqual(db_torrent[7], search_result.leechers) - - def test_search_channel(self): - """ Test channel search """ - def random_string(size=6, chars=string.ascii_uppercase + string.digits): - return ''.join(random.choice(chars) for _ in range(size)) - - sample_channels = [] - for index in range(10): - dbid = index - cid = random_string(size=20) - name = random_string() - description = random_string(20) - nr_torrents = random.randint(1, 10) - nr_favorite = random.randint(1, 10) - nr_spam = random.randint(1, 10) - my_vote = 1 - modified = random.randint(1, 10000000) - relevance_score = 0.0 - - sample_channels.append([dbid, cid, name, description, nr_torrents, nr_favorite, nr_spam, my_vote, - modified, relevance_score]) - - def fake_torrentdb_search_channels(_): - return sample_channels - - self.content_repository.channel_db.search_in_local_channels_db = lambda query, **kw: \ - fake_torrentdb_search_channels(query) - - search_query = "Ubuntu" - search_results = self.content_repository.search_channels(search_query) - - for index in range(10): - db_channel = sample_channels[index] - search_result = search_results[index] - - self.assertEqual(db_channel[0], search_result.id) - self.assertEqual(db_channel[1], search_result.cid) - self.assertEqual(db_channel[2], search_result.name) - self.assertEqual(db_channel[3], search_result.description) - self.assertEqual(db_channel[4], search_result.nr_torrents) - self.assertEqual(db_channel[5], search_result.nr_favorite) - self.assertEqual(db_channel[6], search_result.nr_spam) - self.assertEqual(db_channel[8], search_result.modified) - - def test_update_torrent_from_search_results(self): - """ Tests updating database from the search results """ - def random_string(size=6, chars=string.ascii_uppercase + string.digits): - return ''.join(random.choice(chars) for _ in range(size)) - - def random_infohash(): - return ''.join(random.choice('0123456789abcdef') for _ in range(20)) - - search_results = dict() - for _ in range(10): - infohash = random_infohash() - name = random_string() - length = random.randint(1000, 9999) - num_files = random.randint(1, 10) - category_list = ['video', 'audio'] - creation_date = random.randint(1000000, 111111111) - seeders = random.randint(10, 200) - leechers = random.randint(5, 1000) - cid = random_string(size=20) - - search_results[infohash] = [infohash, name, length, num_files, category_list, creation_date, - seeders, leechers, cid] - - def get_torrent(torrent_as_list): - return {'infohash': torrent_as_list[0], - 'name': torrent_as_list[1], - 'length': torrent_as_list[2], - 'num_files': torrent_as_list[3], - 'category_list': torrent_as_list[4], - 'creation_date': torrent_as_list[5], - 'seeders': torrent_as_list[6], - 'leechers': torrent_as_list[7], - 'cid': torrent_as_list[8]} - - def fake_update_torrent(ref): - ref.called_update_torrent = True - - def fake_add_or_get_torrent_id(ref): - ref.called_add_or_get_torrent_id = True - - self.content_repository.torrent_db.updateTorrent = lambda infohash, **kw: fake_update_torrent( - self.content_repository) - self.content_repository.torrent_db.addOrGetTorrentID = lambda infohash: fake_add_or_get_torrent_id( - self.content_repository) - - # Case 1: Assume torrent does not exist in the database - self.content_repository.has_torrent = lambda infohash: False - self.content_repository.get_torrent = lambda infohash: None - - self.content_repository.torrent_db._db = MockObject() - self.content_repository.torrent_db._db.commit_now = lambda x=None: None - - self.content_repository.called_update_torrent = False - self.content_repository.update_from_torrent_search_results(search_results.values()) - self.assertTrue(self.content_repository.called_update_torrent) - self.assertTrue(self.content_repository.called_add_or_get_torrent_id) - - # Case 2: Torrent already exist in the database - self.content_repository.has_torrent = lambda infohash: infohash in search_results - self.content_repository.get_torrent = lambda infohash: get_torrent(search_results[infohash]) - - self.content_repository.called_update_torrent = False - self.content_repository.called_add_or_get_torrent_id = False - self.content_repository.update_from_torrent_search_results(search_results.values()) - self.assertFalse(self.content_repository.called_update_torrent) - self.assertFalse(self.content_repository.called_add_or_get_torrent_id) - - -class TestContentRepositoryWithRealDatabase(TestBase): - """ - Tests content repository with real database. - """ - - def setUp(self): - super(TestContentRepositoryWithRealDatabase, self).setUp() - - session_base_dir = self.temporary_directory() - tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') - tar.extractall(session_base_dir) - db_path = os.path.join(session_base_dir, 'bak_new_tribler.sdb') - self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) - - session = MockObject() - session.sqlite_db = self.sqlitedb - session.notifier = MockObject() - - self.torrent_db = TorrentDBHandler(session) - channel_db = MockObject() - self.content_repository = ContentRepository(self.torrent_db, channel_db) - - def tearDown(self): - self.torrent_db.close() - self.sqlitedb.close() - return super(TestContentRepositoryWithRealDatabase, self).tearDown() - - def test_update_db_from_search_results(self): - """ - Test if database is properly updated with the search results. - Should not raise any UnicodeDecodeError. - """ - # Add a torrent infohash before updating from search results - infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') - self.content_repository.torrent_db.addOrGetTorrentID(infohash) - - # Sample search results - name = 'Puppy.Linux.manual.301.espa\xc3\xb1ol.pdf' - length = random.randint(1000, 9999) - num_files = random.randint(1, 10) - category_list = ['other'] - creation_date = random.randint(1000000, 111111111) - seeders = random.randint(10, 200) - leechers = random.randint(5, 1000) - cid = None - search_results = [[infohash, name, length, num_files, category_list, creation_date, seeders, leechers, cid]] - - # Update from search results - self.content_repository.update_from_torrent_search_results(search_results) - - # Check if database has correct results - torrent_info = self.content_repository.get_torrent(infohash) - expected_name = u'Puppy.Linux.manual.301.espa\xc3\xb1ol.pdf' - self.assertEqual(expected_name, torrent_info['name']) - self.assertEqual(seeders, torrent_info['num_seeders']) - self.assertEqual(leechers, torrent_info['num_leechers']) - self.assertEqual(creation_date, torrent_info['creation_date']) - self.assertEqual(num_files, torrent_info['num_files']) - self.assertEqual(length, torrent_info['length']) + with db_session: + torrent = self.content_repository.get_torrent('0' * 20) + self.assertEqual(torrent.health.seeders, 10) + self.assertEqual(torrent.health.leechers, 4) diff --git a/Tribler/Test/Core/Category/test_category.py b/Tribler/Test/Core/Category/test_category.py index 39c7502bd64..146f3688088 100644 --- a/Tribler/Test/Core/Category/test_category.py +++ b/Tribler/Test/Core/Category/test_category.py @@ -1,5 +1,9 @@ +from __future__ import absolute_import + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.Category.Category import Category, cmp_rank + +from Tribler.Core.Category.Category import cmp_rank, default_category_filter +from Tribler.Core.Category.FamilyFilter import default_xxx_filter from Tribler.Test.test_as_server import AbstractServer @@ -7,15 +11,11 @@ class TriblerCategoryTest(AbstractServer): def setUp(self): super(TriblerCategoryTest, self).setUp() - self.category = Category() - self.category.xxx_filter.xxx_terms.add("term1") - - def test_category_names_none_names(self): - self.category.category_info = None - self.assertFalse(self.category.getCategoryNames()) + self.category = default_category_filter + default_xxx_filter.xxx_terms.add("term1") def test_get_category_names(self): - self.assertEquals(len(self.category.category_info), 9) + self.assertEquals(len(self.category.category_info), 10) def test_calculate_category_multi_file(self): torrent_info = {"info": {"files": [{"path": "/my/path/video.avi", "length": 1234}]}, @@ -30,28 +30,17 @@ def test_calculate_category_single_file(self): def test_calculate_category_xxx(self): torrent_info = {"info": {"name": "term1", "length": 1234}, "announce-list": [["http://tracker.org"]], "comment": "lorem ipsum"} - self.assertEquals(self.category.calculateCategory(torrent_info, "my torrent"), 'xxx') + self.assertEquals('xxx', self.category.calculateCategory(torrent_info, "my torrent")) def test_calculate_category_invalid_announce_list(self): torrent_info = {"info": {"name": "term1", "length": 1234}, "announce-list": [[]], "comment": "lorem ipsum"} self.assertEquals(self.category.calculateCategory(torrent_info, "my torrent"), 'xxx') - def test_get_family_filter_sql(self): - self.assertFalse(self.category.get_family_filter_sql()) - self.category.set_family_filter(b=True) - self.assertTrue(self.category.get_family_filter_sql()) - def test_cmp_rank(self): self.assertEquals(cmp_rank({'bla': 3}, {'bla': 4}), 1) self.assertEquals(cmp_rank({'rank': 3}, {'bla': 4}), -1) - def test_non_existent_conf_file(self): - import Tribler.Core.Category.Category as category_file - category_file.CATEGORY_CONFIG_FILE = "thisfiledoesnotexist.conf" - test_category = Category() - self.assertEqual(test_category.category_info, []) - @inlineCallbacks def tearDown(self): import Tribler.Core.Category.Category as category_file diff --git a/Tribler/Test/Core/Category/test_family_filter.py b/Tribler/Test/Core/Category/test_family_filter.py index 1ad713deb7b..23274c96907 100644 --- a/Tribler/Test/Core/Category/test_family_filter.py +++ b/Tribler/Test/Core/Category/test_family_filter.py @@ -11,11 +11,6 @@ def setUp(self): self.family_filter.xxx_terms.add("term2") self.family_filter.xxx_searchterms.add("term3") - def test_filter_torrent(self): - self.assertFalse(self.family_filter.isXXXTorrent(["file1.txt"], "mytorrent", "http://tracker.org")) - self.assertFalse(self.family_filter.isXXXTorrent(["file1.txt"], "mytorrent", "")) - self.assertTrue(self.family_filter.isXXXTorrent(["term1.txt"], "term2", "")) - def test_is_xxx(self): self.assertFalse(self.family_filter.isXXX(None)) self.assertTrue(self.family_filter.isXXX("term1")) @@ -28,8 +23,10 @@ def test_is_xxx_term(self): self.assertTrue(self.family_filter.isXXXTerm("term1s")) self.assertFalse(self.family_filter.isXXXTerm("term0n")) - def test_invalid_filename_exception(self): - terms, searchterms = self.family_filter.initTerms("thisfiledoesnotexist.txt") - self.assertEqual(len(terms), 0) - self.assertEqual(len(searchterms), 0) - + def test_xxx_torrent_metadata_dict(self): + d = { + "title": "XXX", + "tags": "", + "tracker": "http://sooo.dfd/announce" + } + self.assertTrue(self.family_filter.isXXXTorrentMetadataDict(d)) diff --git a/Tribler/Test/Core/Config/test_tribler_config.py b/Tribler/Test/Core/Config/test_tribler_config.py index b12473a200f..e6add05238e 100644 --- a/Tribler/Test/Core/Config/test_tribler_config.py +++ b/Tribler/Test/Core/Config/test_tribler_config.py @@ -4,7 +4,7 @@ from configobj import ConfigObj -from Tribler.Core.Config.tribler_config import TriblerConfig, CONFIG_SPEC_PATH, FILENAME +from Tribler.Core.Config.tribler_config import CONFIG_SPEC_PATH, FILENAME, TriblerConfig from Tribler.Test.Core.base_test import TriblerCoreTest @@ -104,17 +104,9 @@ def test_get_set_methods_general(self): """ Check whether general get and set methods are working as expected. """ - self.tribler_config.set_family_filter_enabled(False) - self.assertEqual(self.tribler_config.get_family_filter_enabled(), False) - self.tribler_config.set_state_dir("TEST") self.assertEqual(self.tribler_config.get_state_dir(), "TEST") - self.tribler_config.set_permid_keypair_filename(None) - self.assertEqual(self.tribler_config.get_permid_keypair_filename(), os.path.join("TEST", "ec.pem")) - self.tribler_config.set_permid_keypair_filename("TEST") - self.assertEqual(self.tribler_config.get_permid_keypair_filename(), "TEST") - self.tribler_config.set_trustchain_keypair_filename(None) self.assertEqual(self.tribler_config.get_trustchain_keypair_filename(), os.path.join("TEST", "ec_multichain.pem")) @@ -127,9 +119,6 @@ def test_get_set_methods_general(self): self.tribler_config.set_trustchain_testnet_keypair_filename("TEST") self.assertEqual(self.tribler_config.get_trustchain_testnet_keypair_filename(), "TEST") - self.tribler_config.set_megacache_enabled(True) - self.assertEqual(self.tribler_config.get_megacache_enabled(), True) - self.tribler_config.set_testnet(True) self.assertTrue(self.tribler_config.get_testnet()) @@ -164,21 +153,14 @@ def test_get_set_methods_http_api(self): self.tribler_config.set_http_api_retry_port(True) self.assertTrue(self.tribler_config.get_http_api_retry_port()) - def test_get_set_methods_dispersy(self): - """ - Check whether dispersy get and set methods are working as expected. - """ - self.tribler_config.set_dispersy_enabled(True) - self.assertEqual(self.tribler_config.get_dispersy_enabled(), True) - self.tribler_config.set_dispersy_port(True) - self.assertEqual(self.tribler_config.get_dispersy_port(), True) - def test_get_set_methods_ipv8(self): """ Check whether IPv8 get and set methods are working as expected. """ self.tribler_config.set_ipv8_enabled(False) self.assertEqual(self.tribler_config.get_ipv8_enabled(), False) + self.tribler_config.set_ipv8_port(1234) + self.assertEqual(self.tribler_config.get_ipv8_port(), 1234) self.tribler_config.set_ipv8_bootstrap_override("127.0.0.1:12345") self.assertEqual(self.tribler_config.get_ipv8_bootstrap_override(), ("127.0.0.1", 12345)) self.tribler_config.set_ipv8_statistics(True) @@ -214,15 +196,6 @@ def test_get_set_methods_libtorrent(self): self.tribler_config.set_libtorrent_dht_enabled(False) self.assertFalse(self.tribler_config.get_libtorrent_dht_enabled()) - def test_get_set_methods_mainline_dht(self): - """ - Check whether mainline dht get and set methods are working as expected. - """ - self.tribler_config.set_mainline_dht_enabled(True) - self.assertEqual(self.tribler_config.get_mainline_dht_enabled(), True) - self.tribler_config.set_mainline_dht_port(True) - self.assertEqual(self.tribler_config.get_mainline_dht_port(), True) - def test_get_set_methods_video_server(self): """ Check whether video server get and set methods are working as expected. @@ -257,16 +230,6 @@ def test_get_set_methods_tunnel_community(self): self.tribler_config.set_tunnel_community_competing_slots(20) self.assertEqual(self.tribler_config.get_tunnel_community_competing_slots(), 20) - def test_get_set_methods_torrent_store(self): - """ - Check whether torrent store get and set methods are working as expected. - """ - self.tribler_config.set_torrent_store_enabled(True) - self.assertEqual(self.tribler_config.get_torrent_store_enabled(), True) - self.tribler_config.set_torrent_store_dir("TESTDIR") - self.tribler_config.set_state_dir("TEST") - self.assertEqual(self.tribler_config.get_torrent_store_dir(), os.path.join("TEST", "TESTDIR")) - def test_get_set_methods_wallets(self): """ Check whether wallet get and set methods are working as expected. @@ -282,8 +245,6 @@ def test_get_set_chant_methods(self): """ self.tribler_config.set_chant_enabled(False) self.assertFalse(self.tribler_config.get_chant_enabled()) - self.tribler_config.set_chant_channel_edit(True) - self.assertTrue(self.tribler_config.get_chant_channel_edit()) self.tribler_config.set_chant_channels_dir('test') self.assertEqual(self.tribler_config.get_chant_channels_dir(), os.path.join(self.tribler_config.get_state_dir(), 'test')) @@ -295,55 +256,6 @@ def test_get_set_is_matchmaker(self): self.tribler_config.set_is_matchmaker(False) self.assertFalse(self.tribler_config.get_is_matchmaker()) - def test_get_set_methods_metadata(self): - """ - Check whether metadata get and set methods are working as expected. - """ - self.tribler_config.set_metadata_enabled(True) - self.assertEqual(self.tribler_config.get_metadata_enabled(), True) - self.tribler_config.set_metadata_store_dir("TESTDIR") - self.tribler_config.set_state_dir("TEST") - self.assertEqual(self.tribler_config.get_metadata_store_dir(), os.path.join("TEST", "TESTDIR")) - - def test_get_set_methods_torrent_collecting(self): - """ - Check whether torrent collecting get and set methods are working as expected. - """ - self.tribler_config.set_torrent_collecting_enabled(True) - self.assertEqual(self.tribler_config.get_torrent_collecting_enabled(), True) - self.tribler_config.set_torrent_collecting_max_torrents(True) - self.assertEqual(self.tribler_config.get_torrent_collecting_max_torrents(), True) - self.tribler_config.set_torrent_collecting_dir(True) - self.assertEqual(self.tribler_config.get_torrent_collecting_dir(), True) - - def test_get_set_methods_search_community(self): - """ - Check whether search community get and set methods are working as expected. - """ - self.tribler_config.set_torrent_search_enabled(True) - self.assertEqual(self.tribler_config.get_torrent_search_enabled(), True) - - def test_get_set_methods_allchannel_community(self): - """ - Check whether allchannel community get and set methods are working as expected. - """ - self.tribler_config.set_channel_search_enabled(True) - self.assertEqual(self.tribler_config.get_channel_search_enabled(), True) - - def test_get_set_methods_channel_community(self): - """ - Check whether channel community get and set methods are working as expected. - """ - self.tribler_config.set_channel_community_enabled(True) - self.assertEqual(self.tribler_config.get_channel_community_enabled(), True) - - def test_get_set_methods_preview_channel_community(self): - """ - Check whether preview channel community get and set methods are working as expected. - """ - self.tribler_config.set_preview_channel_community_enabled(True) - self.assertEqual(self.tribler_config.get_preview_channel_community_enabled(), True) - def test_get_set_methods_popularity_community(self): """ Check whether popularity community get and set methods are working as expected. diff --git a/Tribler/Test/Core/CreditMining/test_credit_mining_manager.py b/Tribler/Test/Core/CreditMining/test_credit_mining_manager.py index 60c6a8bc819..663f8c6970d 100644 --- a/Tribler/Test/Core/CreditMining/test_credit_mining_manager.py +++ b/Tribler/Test/Core/CreditMining/test_credit_mining_manager.py @@ -98,7 +98,7 @@ class TestCreditMiningManager(TestAsServer): def __init__(self, *argv, **kwargs): super(TestCreditMiningManager, self).__init__(*argv, **kwargs) # Some fake data for convenience - self.cid = '0' * 40 + self.cid = '0' * 64 self.infohash = '0' * 40 self.infohash_bin = '\00' * 20 self.name = u'torrent' @@ -111,11 +111,9 @@ def setUp(self): def setUpPreSession(self): super(TestCreditMiningManager, self).setUpPreSession() - self.config.set_megacache_enabled(True) - self.config.set_dispersy_enabled(True) self.config.set_libtorrent_enabled(True) self.config.set_credit_mining_enabled(True) - self.config.set_market_community_enabled(False) + self.config.set_chant_enabled(True) def test_source_add_remove(self): self.credit_mining_manager.add_source(self.cid) diff --git a/Tribler/Test/Core/CreditMining/test_credit_mining_sources.py b/Tribler/Test/Core/CreditMining/test_credit_mining_sources.py index 531619d5d81..e7426c2c2dd 100644 --- a/Tribler/Test/Core/CreditMining/test_credit_mining_sources.py +++ b/Tribler/Test/Core/CreditMining/test_credit_mining_sources.py @@ -3,18 +3,15 @@ Author(s): Mihai Capota, Ardhi Putra """ +from __future__ import absolute_import -from binascii import unhexlify, hexlify -from twisted.internet import reactor +from pony.orm import db_session -from twisted.internet.defer import inlineCallbacks -from twisted.internet.task import deferLater +from twisted.internet.defer import Deferred from Tribler.Core.CreditMining.CreditMiningSource import ChannelSource -from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_DISCOVERED, NTFY_TORRENT -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.community.channel.community import ChannelCommunity from Tribler.Test.test_as_server import TestAsServer +from Tribler.Test.tools import trial_timeout class TestCreditMiningSources(TestAsServer): @@ -22,73 +19,18 @@ class TestCreditMiningSources(TestAsServer): Class to test the credit mining sources """ - def __init__(self, *argv, **kwargs): - super(TestCreditMiningSources, self).__init__(*argv, **kwargs) - # Fake channel id for testing - self.cid = '0' * 40 - def setUpPreSession(self): super(TestCreditMiningSources, self).setUpPreSession() - self.config.set_megacache_enabled(True) - self.config.set_dispersy_enabled(True) - self.config.set_channel_search_enabled(True) + self.config.set_chant_enabled(True) + @trial_timeout(5) def test_channel_lookup(self): - source = ChannelSource(self.session, self.cid, lambda: None) - source.start() - self.assertIsInstance(source.community, ChannelCommunity, 'ChannelSource failed to create ChannelCommunity') - source.stop() + test_deferred = Deferred() - def test_existing_channel_lookup(self): - # Find AllChannel - for community in self.session.lm.dispersy.get_communities(): - if isinstance(community, AllChannelCommunity): - allchannelcommunity = community + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.create_channel('test', 'test') + _ = self.session.lm.mds.TorrentMetadata(title='testtorrent') - # Load the channel - community = ChannelCommunity.init_community(self.session.lm.dispersy, - self.session.lm.dispersy.get_member(mid=unhexlify(self.cid)), - allchannelcommunity.my_member, - self.session) - - # Check if we find the channel - source = ChannelSource(self.session, self.cid, lambda: None) + source = ChannelSource(self.session, str(my_channel.public_key), lambda *_: test_deferred.callback(None)) source.start() - self.assertEqual(source.community, community, 'ChannelSource failed to find existing ChannelCommunity') - source.stop() - - @inlineCallbacks - def test_torrent_from_db(self): - # Torrent is a tuple: (channel_id, dispersy_id, peer_id, infohash, timestamp, name, files, trackers) - torrent = (0, self.cid, 42, '\00' * 20, 0, u'torrent', [], []) - channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - channel_db_handler.on_torrents_from_dispersy([torrent]) - - torrent_inserteds = [] - torrent_insert_callback = lambda source, infohash, name: torrent_inserteds.append((source, infohash, name)) - source = ChannelSource(self.session, self.cid, torrent_insert_callback) - source.start() - - yield deferLater(reactor, 1, lambda: None) - self.assertIn((self.cid, hexlify(torrent[3]), torrent[5]), torrent_inserteds, - 'ChannelSource failed to insert torrent') - - source.stop() - - def test_torrent_discovered(self): - torrent_inserteds = [] - torrent_insert_callback = lambda source, infohash, name: torrent_inserteds.append((source, infohash, name)) - source = ChannelSource(self.session, self.cid, torrent_insert_callback) - source.start() - - source.on_torrent_discovered(NTFY_TORRENT, NTFY_DISCOVERED, self.cid, {'dispersy_cid': self.cid, - 'infohash': '\00' * 20, - 'name': 'torrent'}) - self.assertIn((self.cid, '\00' * 20, 'torrent'), torrent_inserteds, 'ChannelSource failed to insert torrent') - - source.on_torrent_discovered(NTFY_TORRENT, NTFY_DISCOVERED, self.cid, {'dispersy_cid': '1' * 40, - 'infohash': '\01' * 20, - 'name': 'torrent'}) - self.assertTrue(len(torrent_inserteds) == 1, 'ChannelSource inserted torrent with wrong dispersy_cid') - - source.stop() + return test_deferred diff --git a/Tribler/Test/Core/Libtorrent/test_libtorrent_download_impl.py b/Tribler/Test/Core/Libtorrent/test_libtorrent_download_impl.py index 33ee2f32248..36680d0cf85 100644 --- a/Tribler/Test/Core/Libtorrent/test_libtorrent_download_impl.py +++ b/Tribler/Test/Core/Libtorrent/test_libtorrent_download_impl.py @@ -30,11 +30,7 @@ class TestLibtorrentDownloadImpl(TestAsServer): def setUpPreSession(self): super(TestLibtorrentDownloadImpl, self).setUpPreSession() self.config.set_torrent_checking_enabled(False) - self.config.set_megacache_enabled(True) - self.config.set_dispersy_enabled(False) self.config.set_tunnel_community_enabled(False) - self.config.set_mainline_dht_enabled(False) - self.config.set_torrent_collecting_enabled(False) self.config.set_libtorrent_enabled(True) self.config.set_video_server_enabled(False) @@ -481,7 +477,6 @@ def mocked_checkpoint(): self.libtorrent_download_impl.checkpoint = mocked_checkpoint self.libtorrent_download_impl.session = MockObject() self.libtorrent_download_impl.session.lm = MockObject() - self.libtorrent_download_impl.session.lm.rtorrent_handler = None self.libtorrent_download_impl.session.lm.torrent_db = None self.libtorrent_download_impl.handle.save_path = lambda: None self.libtorrent_download_impl.handle.prioritize_files = lambda _: None diff --git a/Tribler/Test/Core/Libtorrent/test_libtorrent_mgr.py b/Tribler/Test/Core/Libtorrent/test_libtorrent_mgr.py index 5c8e789eed7..07ef046d25f 100644 --- a/Tribler/Test/Core/Libtorrent/test_libtorrent_mgr.py +++ b/Tribler/Test/Core/Libtorrent/test_libtorrent_mgr.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import binascii import os import shutil @@ -5,18 +7,17 @@ from libtorrent import bencode -from twisted.internet.task import deferLater - -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import inlineCallbacks, Deferred from twisted.internet import reactor +from twisted.internet.defer import Deferred, inlineCallbacks +from twisted.internet.task import deferLater -from Tribler.Core.CacheDB.Notifier import Notifier from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr +from Tribler.Core.Notifier import Notifier from Tribler.Core.exceptions import TorrentFileException from Tribler.Test.Core.base_test import MockObject from Tribler.Test.test_as_server import AbstractServer +from Tribler.Test.tools import trial_timeout class TestLibtorrentMgr(AbstractServer): diff --git a/Tribler/Test/Core/Modules/Channel/__init__.py b/Tribler/Test/Core/Modules/Channel/__init__.py deleted file mode 100644 index e403d5f7a98..00000000000 --- a/Tribler/Test/Core/Modules/Channel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests for the channel management objects. -""" diff --git a/Tribler/Test/Core/Modules/Channel/test_channel.py b/Tribler/Test/Core/Modules/Channel/test_channel.py deleted file mode 100644 index a756a7a01dd..00000000000 --- a/Tribler/Test/Core/Modules/Channel/test_channel.py +++ /dev/null @@ -1,30 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Modules.channel.channel import ChannelObject -from Tribler.Core.Modules.channel.channel_rss import ChannelRssParser -from Tribler.Test.Core.base_test_channel import BaseTestChannel - - -class TestChannel(BaseTestChannel): - """ - This class contains some tests for the ChannelObject class. - """ - - @inlineCallbacks - def setUp(self): - """ - Setup the tests by creating the ChannelObject instance. - """ - yield super(TestChannel, self).setUp() - self.channel_object = ChannelObject(self.fake_session, self.fake_channel_community) - - def test_get_channel_id(self): - self.assertEqual(self.channel_object.channel_id, 42) - - def test_get_channel_name(self): - self.assertEqual(self.channel_object.name, "my fancy channel") - - def test_get_rss_feed_url_list(self): - rss_parser = ChannelRssParser(self.fake_session, self.fake_channel_community, 'a') - self.channel_object._rss_feed_dict['a'] = rss_parser - self.assertEqual(self.channel_object.get_rss_feed_url_list(), ['a']) diff --git a/Tribler/Test/Core/Modules/Channel/test_channel_manager.py b/Tribler/Test/Core/Modules/Channel/test_channel_manager.py deleted file mode 100644 index 1faff5ff528..00000000000 --- a/Tribler/Test/Core/Modules/Channel/test_channel_manager.py +++ /dev/null @@ -1,45 +0,0 @@ -from twisted.internet.defer import inlineCallbacks -from twisted.python.log import removeObserver - -from Tribler.Core.Config.tribler_config import TriblerConfig -from Tribler.Core.Modules.channel.channel import ChannelObject -from Tribler.Core.Modules.channel.channel_manager import ChannelManager -from Tribler.Core.Session import Session -from Tribler.Core.exceptions import DuplicateChannelNameError -from Tribler.Test.Core.base_test import TriblerCoreTest - - -class TestChannelManager(TriblerCoreTest): - - @inlineCallbacks - def setUp(self): - yield super(TestChannelManager, self).setUp() - self.session = None - - @inlineCallbacks - def tearDown(self): - removeObserver(self.session.unhandled_error_observer) - yield super(TestChannelManager, self).tearDown() - - def test_create_channel_duplicate_name_error(self): - config = TriblerConfig() - config.set_state_dir(self.getStateDir()) - self.session = Session(config) - - class LmMock(object): - channel_manager = ChannelManager(self.session) - - self.session.lm = LmMock() - - class MockCommunity(object): - cid = "" - - def get_channel_name(self): - return "Channel name" - - channel_obj = ChannelObject(self.session, MockCommunity(), is_created=True) - self.session.lm.channel_manager._channel_list = [channel_obj] - - with self.assertRaises(DuplicateChannelNameError) as cm: - self.session.lm.channel_manager.create_channel("Channel name", "description", "open") - self.assertEqual(cm.exception.message, u"Channel name already exists: Channel name") diff --git a/Tribler/Test/Core/Modules/Channel/test_channel_rss.py b/Tribler/Test/Core/Modules/Channel/test_channel_rss.py deleted file mode 100644 index 3a94f1fff07..00000000000 --- a/Tribler/Test/Core/Modules/Channel/test_channel_rss.py +++ /dev/null @@ -1,138 +0,0 @@ -import os -import shutil - -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Modules.channel.cache import SimpleCache -from Tribler.Core.Modules.channel.channel_rss import ChannelRssParser, RSSFeedParser -from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Test.Core.base_test import TriblerCoreTest -from Tribler.Test.Core.base_test_channel import BaseTestChannel -from Tribler.Test.common import TESTS_DATA_DIR - - -class TestChannelRss(BaseTestChannel): - - @inlineCallbacks - def setUp(self): - """ - Setup the tests by creating the ChannelRssParser instance and initializing it. - """ - yield super(TestChannelRss, self).setUp() - self.channel_rss = ChannelRssParser(self.fake_session, self.fake_channel_community, 'a') - self.channel_rss.initialize() - - # Setup a test rss file server - test_rss_file = os.path.join(TESTS_DATA_DIR, 'test_rss.xml') - files_path = os.path.join(self.session_base_dir, 'files') - os.mkdir(files_path) - shutil.copyfile(test_rss_file, os.path.join(files_path, 'test_rss.xml')) - self.file_server_port = get_random_port() - self.setUpFileServer(self.file_server_port, files_path) - - @inlineCallbacks - def tearDown(self): - if self.channel_rss.running: - self.channel_rss.shutdown() - - yield super(TestChannelRss, self).tearDown() - - @trial_timeout(10) - def test_task_scrape_no_stop(self): - self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port - self.channel_rss.cancel_all_pending_tasks() - test_deferred = self.channel_rss._task_scrape() - self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape")) - return test_deferred - - @trial_timeout(10) - def test_task_scrape_stop(self): - self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port - self.channel_rss.cancel_all_pending_tasks() - self.channel_rss._to_stop = True - test_deferred = self.channel_rss._task_scrape() - self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape")) - return test_deferred - - def test_initialize(self): - self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape")) - - def test_shutdown(self): - cache_path = self.channel_rss._url_cache._file_path - self.channel_rss._url_cache.add('a') - self.channel_rss.shutdown() - self.assertTrue(os.path.exists(cache_path)) - self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape")) - - @trial_timeout(10) - def test_parse_rss_feed(self): - """ - Test parsing a rss feed - """ - self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port - - def verify_rss(items): - self.assertEqual(len(items), 2) - - return self.channel_rss.parse_feed().addCallback(verify_rss) - - @trial_timeout(10) - def test_parse_no_rss(self): - """ - Test parsing a non-rss feed - """ - self.channel_rss.rss_url = 'http://localhost:%d/test_rsszz.xml' % self.file_server_port - - def verify_rss(items): - self.assertIsNone(items) - - return self.channel_rss.parse_feed().addCallback(verify_rss) - - @trial_timeout(10) - def test_parse_feed_stopped(self): - """ - Test whether items are not parsed anymore when the parse feeder is stopped - """ - self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port - self.channel_rss._url_cache = SimpleCache(os.path.join(self.session_base_dir, 'cache.txt')) - self.channel_rss._to_stop = True - - def verify_rss(items): - self.assertEqual(len(items), 0) - - return self.channel_rss.parse_feed().addCallback(verify_rss) - - -class TestRssParser(TriblerCoreTest): - - def test_parse_html(self): - parser = RSSFeedParser() - self.assertEqual(parser._parse_html("

Hi

"), set()) - self.assertEqual(parser._parse_html(""), {'abc'}) - self.assertEqual(parser._parse_html(""), {'abc', 'def'}) - - def test_html2plaintext(self): - parser = RSSFeedParser() - self.assertEqual(parser._html2plaintext("

test

"), "test\n") - self.assertEqual(parser._html2plaintext("test"), "test\n") - self.assertEqual(parser._html2plaintext("

test\ntest2

test3

"), "test\ntest2\ntest3\n") - - @trial_timeout(10) - def test_parse(self): - test_rss_file = os.path.join(TESTS_DATA_DIR, 'test_rss.xml') - files_path = os.path.join(self.session_base_dir, 'files') - os.mkdir(files_path) - shutil.copyfile(test_rss_file, os.path.join(files_path, 'test_rss.xml')) - file_server_port = get_random_port() - self.setUpFileServer(file_server_port, files_path) - - parser = RSSFeedParser() - cache = SimpleCache(os.path.join(self.session_base_dir, 'cache.txt')) - cache.add('http://localhost:RANDOMPORT/ubuntu.torrent') - - def on_items(rss_items): - self.assertEqual(len(rss_items), 2) - self.assertEqual(len(rss_items[0]['thumbnail_list']), 1) - - return parser.parse('http://localhost:%d/test_rss.xml' % file_server_port, cache).addCallback(on_items) diff --git a/Tribler/Test/Core/Modules/MetadataStore/gen_test_data.py b/Tribler/Test/Core/Modules/MetadataStore/gen_test_data.py new file mode 100644 index 00000000000..3f65ac42d5b --- /dev/null +++ b/Tribler/Test/Core/Modules/MetadataStore/gen_test_data.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import + +import os +import random +from datetime import datetime + +from pony.orm import db_session + +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.TorrentDef import TorrentDef +from Tribler.Test.Core.Modules.MetadataStore.test_channel_download import CHANNEL_METADATA, CHANNEL_METADATA_UPDATED, \ + CHANNEL_TORRENT, CHANNEL_TORRENT_UPDATED +from Tribler.Test.common import TORRENT_UBUNTU_FILE, TORRENT_VIDEO_FILE +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + +DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', '..', 'data') +SAMPLE_DIR = os.path.join(DATA_DIR, 'sample_channel') + +my_key = default_eccrypto.generate_key(u"curve25519") + + +def gen_random_entry(): + return { + "title": "test entry " + str(random.randint(0, 1000000)), + "infohash": str(random.getrandbits(160)), + "torrent_date": datetime(1970, 1, 1), + "size": 100 + random.randint(0, 10000), + "tags": "video", + "status": NEW + } + + +@db_session +def gen_sample_channel(mds): + my_channel = mds.ChannelMetadata.create_channel('test_channel', 'test description') + + _ = my_channel.add_torrent_to_channel(TorrentDef.load(TORRENT_UBUNTU_FILE), None) + my_channel.commit_channel_torrent() + + t2 = my_channel.add_torrent_to_channel(TorrentDef.load(TORRENT_VIDEO_FILE), None) + _ = mds.TorrentMetadata.from_dict(gen_random_entry()) + _ = mds.TorrentMetadata.from_dict(gen_random_entry()) + my_channel.commit_channel_torrent() + + my_channel.delete_torrent(t2.infohash) + my_channel.commit_channel_torrent() + + # Rename files to stable names + mdblob_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".mdblob") + torrent_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".torrent") + + os.rename(mdblob_name, CHANNEL_METADATA) + os.rename(torrent_name, CHANNEL_TORRENT) + + # Update channel + _ = mds.TorrentMetadata.from_dict(gen_random_entry()) + my_channel.commit_channel_torrent() + + # Rename updated files to stable names + os.rename(mdblob_name, CHANNEL_METADATA_UPDATED) + os.rename(torrent_name, CHANNEL_TORRENT_UPDATED) + + +if __name__ == "__main__": + mds = MetadataStore(":memory:", SAMPLE_DIR, my_key) + gen_sample_channel(mds) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_channel_download.py b/Tribler/Test/Core/Modules/MetadataStore/test_channel_download.py index ce647315d81..717716f8080 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_channel_download.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_channel_download.py @@ -1,12 +1,12 @@ import os from pony.orm import db_session + from twisted.internet.defer import inlineCallbacks from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload from Tribler.Core.TorrentDef import TorrentDef from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Core.exceptions import InvalidSignatureException from Tribler.Test.test_as_server import TestAsServer from Tribler.Test.tools import trial_timeout @@ -43,21 +43,18 @@ def test_channel_update_and_download(self): yield self.setup_seeder(channel_tdef, CHANNEL_DIR, libtorrent_port) payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA_UPDATED) - # Download the channel in our session - download, finished_deferred = self.session.lm.update_channel(payload) + with db_session: + self.session.lm.mds.process_payload(payload) + channel = self.session.lm.mds.ChannelMetadata.get(signature=payload.signature) + + download, finished_deferred = self.session.lm.gigachannel_manager.download_channel(channel) download.add_peer(("127.0.0.1", self.seeder_session.config.get_libtorrent_port())) yield finished_deferred with db_session: # There should be 4 torrents + 1 channel torrent - channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(payload.public_key) + channel2 = self.session.lm.mds.ChannelMetadata.get_channel_with_id(payload.public_key) self.assertEqual(5, len(list(self.session.lm.mds.TorrentMetadata.select()))) - self.assertEqual(4, channel.local_version) - - def test_wrong_signature_exception_on_channel_update(self): - # Test wrong signature exception - old_payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA) - payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA_UPDATED) - payload.signature = old_payload.signature - self.assertRaises(InvalidSignatureException, self.session.lm.update_channel, payload) + self.assertEqual(9, channel2.timestamp) + self.assertEqual(channel2.timestamp, channel2.local_version) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_channel_metadata.py b/Tribler/Test/Core/Modules/MetadataStore/test_channel_metadata.py index ff7d3acd1f5..3d631803373 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_channel_metadata.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_channel_metadata.py @@ -1,17 +1,22 @@ from __future__ import absolute_import import os +from binascii import unhexlify from datetime import datetime from pony.orm import db_session + from six.moves import xrange + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import entries_to_chunk -from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import CHANNEL_DIR_NAME_LENGTH, ROOT_CHANNEL_ID, \ + entries_to_chunk +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import COMMITTED, NEW, TODELETE +from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload, REGULAR_TORRENT from Tribler.Core.Modules.MetadataStore.store import MetadataStore from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.exceptions import DuplicateTorrentFileError, DuplicateChannelNameError +from Tribler.Core.exceptions import DuplicateChannelIdError, DuplicateTorrentFileError from Tribler.Test.Core.base_test import TriblerCoreTest from Tribler.Test.common import TORRENT_UBUNTU_FILE from Tribler.pyipv8.ipv8.database import database_blob @@ -35,8 +40,7 @@ def setUp(self): "tags": "video" } self.my_key = default_eccrypto.generate_key(u"curve25519") - self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, - self.my_key) + self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key) @inlineCallbacks def tearDown(self): @@ -51,11 +55,10 @@ def get_sample_torrent_dict(my_key): return { "infohash": database_blob("1" * 20), "size": 123, - "timestamp": datetime.utcnow(), "torrent_date": datetime.utcnow(), "tags": "bla", - "tc_pointer": 123, - "public_key": database_blob(my_key.pub().key_to_bin()), + "id_": 123, + "public_key": database_blob(my_key.pub().key_to_bin()[10:]), "title": "lalala" } @@ -64,7 +67,7 @@ def get_sample_channel_dict(my_key): """ Utility method to return a dictionary with a channel information. """ - return dict(TestChannelMetadata.get_sample_torrent_dict(my_key), votes=222, subscribed=False, version=1) + return dict(TestChannelMetadata.get_sample_torrent_dict(my_key), votes=222, subscribed=False, timestamp=1) @db_session def test_serialization(self): @@ -79,21 +82,19 @@ def test_list_contents(self): """ Test whether a correct list with channel content is returned from the database """ - pub_key1 = default_eccrypto.generate_key('low').pub().key_to_bin() - pub_key2 = default_eccrypto.generate_key('low').pub().key_to_bin() - - channel1 = self.mds.ChannelMetadata(public_key=pub_key1) - self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key1)) + self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low') + channel1 = self.mds.ChannelMetadata() + self.mds.TorrentMetadata.from_dict(dict(self.torrent_template)) - channel2 = self.mds.ChannelMetadata(public_key=pub_key2) - self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key2)) - self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key2)) + self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low') + channel2 = self.mds.ChannelMetadata() + self.mds.TorrentMetadata.from_dict(dict(self.torrent_template)) + self.mds.TorrentMetadata.from_dict(dict(self.torrent_template)) self.assertEqual(1, len(channel1.contents_list)) self.assertEqual(2, len(channel2.contents_list)) self.assertEqual(2, channel2.contents_len) - @db_session def test_create_channel(self): """ @@ -102,7 +103,7 @@ def test_create_channel(self): channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') self.assertTrue(channel_metadata) - self.assertRaises(DuplicateChannelNameError, + self.assertRaises(DuplicateChannelIdError, self.mds.ChannelMetadata.create_channel, 'test', 'test') @db_session @@ -114,7 +115,7 @@ def test_update_metadata(self): channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict) self.mds.TorrentMetadata.from_dict(self.torrent_template) update_dict = { - "tc_pointer": 222, + "id_": 222, "tags": "eee", "title": "qqq" } @@ -135,10 +136,10 @@ def test_process_channel_metadata_payload(self): self.assertEqual(len(self.mds.ChannelMetadata.select()), 1) # Check that we always take the latest version - channel_metadata.version -= 1 - self.assertEqual(channel_metadata.version, 2) + channel_metadata.timestamp -= 1 + self.assertEqual(channel_metadata.timestamp, 6) channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(payload) - self.assertEqual(channel_metadata.version, 3) + self.assertEqual(channel_metadata.timestamp, 7) self.assertEqual(len(self.mds.ChannelMetadata.select()), 1) @db_session @@ -149,7 +150,20 @@ def test_get_dirname(self): sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key) channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict) - self.assertEqual(len(channel_metadata.dir_name), 60) + self.assertEqual(len(channel_metadata.dir_name), CHANNEL_DIR_NAME_LENGTH) + + @db_session + def test_get_channel_with_dirname(self): + sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key) + channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict) + dirname = channel_metadata.dir_name + channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(dirname) + self.assertEqual(channel_metadata, channel_result) + + # Test for corner-case of channel PK starting with zeroes + channel_metadata.public_key = database_blob(unhexlify('0' * 128)) + channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(channel_metadata.dir_name) + self.assertEqual(channel_metadata, channel_result) @db_session def test_get_channel_with_id(self): @@ -166,12 +180,14 @@ def test_add_metadata_to_channel(self): Test whether adding new torrents to a channel works as expected """ channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') - self.mds.TorrentMetadata.from_dict( - dict(self.torrent_template, public_key=channel_metadata.public_key)) + original_channel = channel_metadata.to_dict() + md = self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, status=NEW)) channel_metadata.commit_channel_torrent() - self.assertEqual(channel_metadata.version, 1) - self.assertEqual(channel_metadata.size, 1) + self.assertEqual(channel_metadata.id_, ROOT_CHANNEL_ID) + self.assertLess(original_channel["timestamp"], channel_metadata.timestamp) + self.assertLess(md.timestamp, channel_metadata.timestamp) + self.assertEqual(channel_metadata.num_entries, 1) @db_session def test_add_torrent_to_channel(self): @@ -180,10 +196,39 @@ def test_add_torrent_to_channel(self): """ channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) - channel_metadata.add_torrent_to_channel(tdef, None) + channel_metadata.add_torrent_to_channel(tdef, {'description': 'blabla'}) self.assertTrue(channel_metadata.contents_list) self.assertRaises(DuplicateTorrentFileError, channel_metadata.add_torrent_to_channel, tdef, None) + @db_session + def test_restore_torrent_in_channel(self): + """ + Test if the torrent scheduled for deletion is restored/updated after the user + tries to re-add it. + """ + channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') + tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) + md = channel_metadata.add_torrent_to_channel(tdef, None) + + # Check correct re-add + md.status = TODELETE + md_updated = channel_metadata.add_torrent_to_channel(tdef, None) + self.assertEqual(md.status, COMMITTED) + self.assertEqual(md_updated, md) + self.assertTrue(md.has_valid_signature) + + # Check update of torrent properties from a new tdef + md.status = TODELETE + new_tracker_address = u'http://tribler.org/announce' + tdef.input['announce'] = new_tracker_address + md_updated = channel_metadata.add_torrent_to_channel(tdef, None) + self.assertEqual(md_updated, md) + self.assertEqual(md.status, NEW) + self.assertEqual(md.tracker_info, new_tracker_address) + self.assertTrue(md.has_valid_signature) + # In addition, check that the trackers table was properly updated + self.assertEqual(len(md.health.trackers), 2) + @db_session def test_delete_torrent_from_channel(self): """ @@ -194,17 +239,26 @@ def test_delete_torrent_from_channel(self): # Check that nothing is committed when deleting uncommited torrent metadata channel_metadata.add_torrent_to_channel(tdef, None) - channel_metadata.delete_torrent_from_channel(tdef.get_infohash()) + channel_metadata.delete_torrent(tdef.get_infohash()) self.assertEqual(0, len(channel_metadata.contents_list)) # Check append-only deletion process channel_metadata.add_torrent_to_channel(tdef, None) channel_metadata.commit_channel_torrent() self.assertEqual(1, len(channel_metadata.contents_list)) - channel_metadata.delete_torrent_from_channel(tdef.get_infohash()) + channel_metadata.delete_torrent(tdef.get_infohash()) channel_metadata.commit_channel_torrent() self.assertEqual(0, len(channel_metadata.contents_list)) + @db_session + def test_commit_channel_torrent(self): + channel = self.mds.ChannelMetadata.create_channel('test', 'test') + tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) + channel.add_torrent_to_channel(tdef, None) + # The first run should return the infohash, the second should return None, because nothing was really done + self.assertTrue(channel.commit_channel_torrent()) + self.assertFalse(channel.commit_channel_torrent()) + @db_session def test_consolidate_channel_torrent(self): """ @@ -219,12 +273,12 @@ def test_consolidate_channel_torrent(self): channel.commit_channel_torrent() # 2nd torrent - self.mds.TorrentMetadata.from_dict( - dict(self.torrent_template, public_key=channel.public_key)) + md = self.mds.TorrentMetadata.from_dict( + dict(self.torrent_template, public_key=channel.public_key, status=NEW)) channel.commit_channel_torrent() # Delete entry - channel.delete_torrent_from_channel(tdef.get_infohash()) + channel.delete_torrent(tdef.get_infohash()) channel.commit_channel_torrent() self.assertEqual(1, len(channel.contents_list)) @@ -236,3 +290,61 @@ def test_mdblob_dont_fit_exception(self): with db_session: md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, 1)] self.assertRaises(Exception, entries_to_chunk, md_list, chunk_size=1) + + @db_session + def test_get_channels(self): + """ + Test whether we can get channels + """ + + # First we create a few channels + for ind in xrange(10): + self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low') + _ = self.mds.ChannelMetadata(title='channel%d' % ind, subscribed=(ind % 2 == 0)) + channels = self.mds.ChannelMetadata.get_entries(first=1, last=5) + self.assertEqual(len(channels[0]), 5) + self.assertEqual(channels[1], 10) + + # Test filtering + channels = self.mds.ChannelMetadata.get_entries(first=1, last=5, query_filter='channel5') + self.assertEqual(len(channels[0]), 1) + + # Test sorting + channels = self.mds.ChannelMetadata.get_entries(first=1, last=10, sort_by='title', sort_asc=False) + self.assertEqual(len(channels[0]), 10) + self.assertEqual(channels[0][0].title, 'channel9') + + # Test fetching subscribed channels + channels = self.mds.ChannelMetadata.get_entries(first=1, last=10, sort_by='title', subscribed=True) + self.assertEqual(len(channels[0]), 5) + + @db_session + def test_get_channel_name(self): + infohash = "\x00" * 20 + title = "testchan" + chan = self.mds.ChannelMetadata(title=title, infohash=database_blob(infohash)) + dirname = chan.dir_name + + self.assertEqual(title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash)) + chan.infohash = "\x11" * 20 + self.assertEqual("OLD:" + title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash)) + chan.delete() + self.assertEqual(dirname, self.mds.ChannelMetadata.get_channel_name(dirname, infohash)) + + @db_session + def check_add(self, torrents_in_dir, errors, recursive): + TEST_TORRENTS_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), + '..', '..', '..', 'data', 'linux_torrents') + chan = self.mds.ChannelMetadata.create_channel(title='testchan') + torrents, e = chan.add_torrents_from_dir(TEST_TORRENTS_DIR, recursive) + self.assertEqual(torrents_in_dir, len(torrents)) + self.assertEqual(errors, len(e)) + with db_session: + q = self.mds.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT) + self.assertEqual(torrents_in_dir - len(e), q.count()) + + def test_add_torrents_from_dir(self): + self.check_add(9, 0, recursive=False) + + def test_add_torrents_from_dir_recursive(self): + self.check_add(11, 1, recursive=True) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_metadata.py b/Tribler/Test/Core/Modules/MetadataStore/test_metadata.py index 6a084e5374c..73fdc60188b 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_metadata.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_metadata.py @@ -1,25 +1,30 @@ +from __future__ import absolute_import + import os +from pony import orm from pony.orm import db_session + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.Modules.MetadataStore.serialization import MetadataPayload, KeysMismatchException +from Tribler.Core.Modules.MetadataStore.serialization import ChannelNodePayload, KeysMismatchException from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.exceptions import InvalidSignatureException from Tribler.Test.Core.base_test import TriblerCoreTest +from Tribler.pyipv8.ipv8.database import database_blob from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto class TestMetadata(TriblerCoreTest): """ - Contains various tests for the Metadata type. + Contains various tests for the ChannelNode type. """ @inlineCallbacks def setUp(self): yield super(TestMetadata, self).setUp() self.my_key = default_eccrypto.generate_key(u"curve25519") - self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, - self.my_key) + self.mds = MetadataStore(':memory:', self.session_base_dir, self.my_key) @inlineCallbacks def tearDown(self): @@ -31,7 +36,7 @@ def test_to_dict(self): """ Test whether converting metadata to a dictionary works """ - metadata = self.mds.Metadata.from_dict({}) + metadata = self.mds.ChannelNode.from_dict({}) self.assertTrue(metadata.to_dict()) @db_session @@ -39,18 +44,30 @@ def test_serialization(self): """ Test converting metadata to serialized data and back """ - metadata1 = self.mds.Metadata.from_dict({}) + metadata1 = self.mds.ChannelNode.from_dict({}) serialized1 = metadata1.serialized() metadata1.delete() + orm.flush() - metadata2 = self.mds.Metadata.from_payload(MetadataPayload.from_signed_blob(serialized1)) + metadata2 = self.mds.ChannelNode.from_payload(ChannelNodePayload.from_signed_blob(serialized1)) serialized2 = metadata2.serialized() self.assertEqual(serialized1, serialized2) + # Test no signature exception + metadata2_dict = metadata2.to_dict() + metadata2_dict.pop("signature") + self.assertRaises(InvalidSignatureException, ChannelNodePayload, **metadata2_dict) + + serialized3 = serialized2[:-5] + "\xee" * 5 + self.assertRaises(InvalidSignatureException, ChannelNodePayload.from_signed_blob, serialized3) + # Test bypass signature check + ChannelNodePayload.from_signed_blob(serialized3, check_signature=False) + + @db_session def test_key_mismatch_exception(self): mismatched_key = default_eccrypto.generate_key(u"curve25519") - metadata = self.mds.Metadata.from_dict({}) + metadata = self.mds.ChannelNode.from_dict({}) self.assertRaises(KeysMismatchException, metadata.serialized, key=mismatched_key) @db_session @@ -58,7 +75,7 @@ def test_to_file(self): """ Test writing metadata to a file """ - metadata = self.mds.Metadata.from_dict({}) + metadata = self.mds.ChannelNode.from_dict({}) file_path = os.path.join(self.session_base_dir, 'metadata.file') metadata.to_file(file_path) self.assertTrue(os.path.exists(file_path)) @@ -68,26 +85,38 @@ def test_has_valid_signature(self): """ Test whether a signature can be validated correctly """ - metadata = self.mds.Metadata.from_dict({}) + metadata = self.mds.ChannelNode.from_dict({}) self.assertTrue(metadata.has_valid_signature()) - saved_key = metadata.public_key - # Mess with the public key - metadata.public_key = 'a' - self.assertFalse(metadata.has_valid_signature()) + md_dict = metadata.to_dict() # Mess with the signature - metadata.public_key = saved_key metadata.signature = 'a' self.assertFalse(metadata.has_valid_signature()) + # Create metadata with wrong key + metadata.delete() + md_dict.update(public_key=database_blob("aaa")) + md_dict.pop("rowid") + + metadata = self.mds.ChannelNode(skip_key_check=True, **md_dict) + self.assertFalse(metadata.has_valid_signature()) + + key = default_eccrypto.generate_key(u"curve25519") + metadata2 = self.mds.ChannelNode(sign_with=key, **md_dict) + self.assertTrue(database_blob(key.pub().key_to_bin()[10:]), metadata2.public_key) + md_dict2 = metadata2.to_dict() + md_dict2["signature"] = md_dict["signature"] + self.assertRaises(InvalidSignatureException, self.mds.ChannelNode, **md_dict2) + @db_session def test_from_payload(self): """ Test converting a metadata payload to a metadata object """ - metadata = self.mds.Metadata.from_dict({}) + metadata = self.mds.ChannelNode.from_dict({}) metadata_dict = metadata.to_dict() metadata.delete() - metadata_payload = MetadataPayload(**metadata_dict) - self.assertTrue(self.mds.Metadata.from_payload(metadata_payload)) + orm.flush() + metadata_payload = ChannelNodePayload(**metadata_dict) + self.assertTrue(self.mds.ChannelNode.from_payload(metadata_payload)) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_serialize.py b/Tribler/Test/Core/Modules/MetadataStore/test_serialize.py index 0fa8f6cf0f2..50e2c9ffb46 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_serialize.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_serialize.py @@ -1,6 +1,8 @@ +from __future__ import absolute_import + import datetime -from Tribler.Core.Modules.MetadataStore.serialization import float2time, time2float, EPOCH +from Tribler.Core.Modules.MetadataStore.serialization import EPOCH, int2time, time2int from Tribler.Test.Core.base_test import TriblerCoreTest @@ -11,22 +13,22 @@ def test_time_convert(self): Test converting various datetime objects to float """ test_time_list = [ - datetime.datetime(2005, 7, 14, 12, 30, 12, 1234), - datetime.datetime(2039, 7, 14, 12, 30, 12, 1234), - datetime.datetime.utcnow() + datetime.datetime(2005, 7, 14, 12, 30, 12), + datetime.datetime(2039, 7, 14, 12, 30, 12), + datetime.datetime.utcnow().replace(second=0, microsecond=0) ] for test_time in test_time_list: - self.assertTrue(test_time == float2time(time2float(test_time))) + self.assertTrue(test_time == int2time(time2int(test_time))) def test_zero_time(self): """ Test whether a time of zero converts to the epoch time """ - self.assertTrue(float2time(0.0) == EPOCH) + self.assertTrue(int2time(0.0) == EPOCH) def test_negative_time(self): """ Test whether we are able to deal with time below the epoch time """ negative_time = EPOCH - datetime.timedelta(1) - self.assertTrue(negative_time == float2time(time2float(negative_time))) + self.assertTrue(negative_time == int2time(time2int(negative_time))) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_store.py b/Tribler/Test/Core/Modules/MetadataStore/test_store.py index 7d29a637460..69b00868995 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_store.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_store.py @@ -1,15 +1,20 @@ from __future__ import absolute_import + import os -from datetime import datetime +import random +import string +from binascii import unhexlify from pony.orm import db_session -from six.moves import xrange + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import entries_to_chunk -from Tribler.Core.Modules.MetadataStore.serialization import (ChannelMetadataPayload, MetadataPayload, - UnknownBlobTypeException) -from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import CHANNEL_DIR_NAME_LENGTH, entries_to_chunk +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW +from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload, DeletedMetadataPayload, \ + SignedPayload, UnknownBlobTypeException +from Tribler.Core.Modules.MetadataStore.store import DELETED_METADATA, GOT_SAME_VERSION, MetadataStore, NO_ACTION, \ + UNKNOWN_CHANNEL, UNKNOWN_TORRENT from Tribler.Test.Core.base_test import TriblerCoreTest from Tribler.pyipv8.ipv8.database import database_blob from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto @@ -17,9 +22,10 @@ def make_wrong_payload(filename): key = default_eccrypto.generate_key(u"curve25519") - metadata_payload = MetadataPayload(666, database_blob(key.pub().key_to_bin()), datetime.utcnow(), 123) + metadata_payload = SignedPayload(666, 0, database_blob(key.pub().key_to_bin()[10:]), + signature='\x00'*64, skip_key_check=True) with open(filename, 'wb') as output_file: - output_file.write(''.join(metadata_payload.serialized(key))) + output_file.write(''.join(metadata_payload.serialized())) class TestMetadataStore(TriblerCoreTest): @@ -27,43 +33,53 @@ class TestMetadataStore(TriblerCoreTest): This class contains tests for the metadata store. """ DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', '..', 'data') - CHANNEL_DIR = os.path.join(DATA_DIR, 'sample_channel', - 'd24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46') + SAMPLE_DIR = os.path.join(DATA_DIR, 'sample_channel') + # Just get the first and only subdir there, and assume it is the sample channel dir + CHANNEL_DIR = [os.path.join(SAMPLE_DIR, subdir) for subdir in os.listdir(SAMPLE_DIR) if + os.path.isdir(os.path.join(SAMPLE_DIR, subdir)) and len(subdir) == CHANNEL_DIR_NAME_LENGTH][0] CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel', 'channel.mdblob') @inlineCallbacks def setUp(self): yield super(TestMetadataStore, self).setUp() my_key = default_eccrypto.generate_key(u"curve25519") - - self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, - my_key) + self.mds = MetadataStore(":memory:", self.session_base_dir, my_key) @inlineCallbacks def tearDown(self): self.mds.shutdown() yield super(TestMetadataStore, self).tearDown() + + def test_store_clock(self): + my_key = default_eccrypto.generate_key(u"curve25519") + mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, my_key) + tick = mds2.clock.tick() + mds2.shutdown() + mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, my_key) + self.assertEqual(mds2.clock.clock, tick) + mds2.shutdown() + @db_session def test_process_channel_dir_file(self): """ - Test whether we are able to process files in a directory containing torrent metadata + Test whether we are able to process files in a directory containing node metadata """ - test_torrent_metadata = self.mds.TorrentMetadata(title='test') + test_node_metadata = self.mds.TorrentMetadata(title='test') metadata_path = os.path.join(self.session_base_dir, 'metadata.data') - test_torrent_metadata.to_file(metadata_path) + test_node_metadata.to_file(metadata_path) # We delete this TorrentMeta info now, it should be added again to the database when loading it - test_torrent_metadata.delete() + test_node_metadata.delete() loaded_metadata = self.mds.process_mdblob_file(metadata_path) - self.assertEqual(loaded_metadata[0].title, 'test') + self.assertEqual(loaded_metadata[0][0].title, 'test') # Test whether we delete existing metadata when loading a DeletedMetadata blob metadata = self.mds.TorrentMetadata(infohash='1' * 20) metadata.to_delete_file(metadata_path) loaded_metadata = self.mds.process_mdblob_file(metadata_path) # Make sure the original metadata is deleted - self.assertListEqual(loaded_metadata, []) + self.assertEqual(loaded_metadata[0], (None, 7)) self.assertIsNone(self.mds.TorrentMetadata.get(infohash='1' * 20)) # Test an unknown metadata type, this should raise an exception @@ -74,14 +90,30 @@ def test_process_channel_dir_file(self): @db_session def test_squash_mdblobs(self): chunk_size = self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT - md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, 10)] + md_list = [self.mds.TorrentMetadata( + title=''.join(random.choice(string.ascii_uppercase + string.digits) + for _ in range(20))) for _ in range(0, 10)] chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size) - self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk)) + dict_list = [d.to_dict()["signature"] for d in md_list] + for d in md_list: + d.delete() + self.assertListEqual(dict_list, [d[0].to_dict()["signature"] + for d in self.mds.process_compressed_mdblob(chunk)]) + @db_session + def test_squash_mdblobs_multiple_chunks(self): + md_list = [self.mds.TorrentMetadata(title=''.join(random.choice(string.ascii_uppercase + string.digits) + for _ in range(20))) for _ in range(0, 10)] # Test splitting into multiple chunks - chunk, index = entries_to_chunk(md_list, chunk_size=1000) - chunk += entries_to_chunk(md_list, chunk_size=1000, start_index=index)[0] - self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk)) + chunk, index = entries_to_chunk(md_list, chunk_size=900) + chunk2, _ = entries_to_chunk(md_list, chunk_size=900, start_index=index) + dict_list = [d.to_dict()["signature"] for d in md_list] + for d in md_list: + d.delete() + self.assertListEqual(dict_list[:index], [d[0].to_dict()["signature"] + for d in self.mds.process_compressed_mdblob(chunk)]) + self.assertListEqual(dict_list[index:], [d[0].to_dict()["signature"] + for d in self.mds.process_compressed_mdblob(chunk2)]) @db_session def test_multiple_squashed_commit_and_read(self): @@ -92,7 +124,7 @@ def test_multiple_squashed_commit_and_read(self): num_entries = 10 channel = self.mds.ChannelMetadata(title='testchan') - md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, num_entries)] + md_list = [self.mds.TorrentMetadata(title='test' + str(x), status=NEW) for x in range(0, num_entries)] channel.commit_channel_torrent() channel.local_version = 0 @@ -114,4 +146,52 @@ def test_process_channel_dir(self): self.assertFalse(channel.contents_list) self.mds.process_channel_dir(self.CHANNEL_DIR, channel.public_key) self.assertEqual(len(channel.contents_list), 3) - self.assertEqual(channel.local_version, 3) + self.assertEqual(channel.timestamp, 7) + self.assertEqual(channel.local_version, channel.timestamp) + + @db_session + def test_process_payload(self): + def get_payloads(entity_class): + c = entity_class() + payload = c._payload_class.from_signed_blob(c.serialized()) + deleted_payload = DeletedMetadataPayload.from_signed_blob(c.serialized_delete()) + return c, payload, deleted_payload + + _, node_payload, node_deleted_payload = get_payloads(self.mds.ChannelNode) + + self.assertEqual((None, GOT_SAME_VERSION), self.mds.process_payload(node_payload)) + self.assertEqual((None, DELETED_METADATA), self.mds.process_payload(node_deleted_payload)) + # Do nothing in case it is unknown/abstract payload type, like ChannelNode + self.assertEqual((None, NO_ACTION), self.mds.process_payload(node_payload)) + + # Check if node metadata object is properly created on payload processing + node, node_payload, node_deleted_payload = get_payloads(self.mds.TorrentMetadata) + node_dict = node.to_dict() + node.delete() + result = self.mds.process_payload(node_payload) + self.assertEqual(UNKNOWN_TORRENT, result[1]) + self.assertEqual(node_dict['metadata_type'], result[0].to_dict()['metadata_type']) + + # Check the same for a channel + node, node_payload, node_deleted_payload = get_payloads(self.mds.ChannelMetadata) + node_dict = node.to_dict() + node.delete() + # Check there is no action if the signature on the delete object is unknown + self.assertEqual((None, NO_ACTION), self.mds.process_payload(node_deleted_payload)) + result = self.mds.process_payload(node_payload) + self.assertEqual(UNKNOWN_CHANNEL, result[1]) + self.assertEqual(node_dict['metadata_type'], result[0].to_dict()['metadata_type']) + + @db_session + def test_get_num_channels_nodes(self): + self.mds.ChannelMetadata(title='testchan', id_=0) + self.mds.ChannelMetadata(title='testchan', id_=123) + self.mds.ChannelMetadata(title='testchan', id_=0, public_key=unhexlify('0'*20), + signature=unhexlify('0'*64), skip_key_check=True) + self.mds.ChannelMetadata(title='testchan', id_=0, public_key=unhexlify('1'*20), + signature=unhexlify('1'*64), skip_key_check=True) + + _ = [self.mds.TorrentMetadata(title='test' + str(x), status=NEW) for x in range(0, 3)] + + self.assertEqual(4, self.mds.get_num_channels()) + self.assertEqual(3, self.mds.get_num_torrents()) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_torrent_metadata.py b/Tribler/Test/Core/Modules/MetadataStore/test_torrent_metadata.py index 84eced82ce0..9f0fea9072f 100644 --- a/Tribler/Test/Core/Modules/MetadataStore/test_torrent_metadata.py +++ b/Tribler/Test/Core/Modules/MetadataStore/test_torrent_metadata.py @@ -1,10 +1,16 @@ # -*- coding: utf-8 -*- -import os +from __future__ import absolute_import + from datetime import datetime +from pony import orm from pony.orm import db_session + +from six.moves import xrange + from twisted.internet.defer import inlineCallbacks +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import TODELETE from Tribler.Core.Modules.MetadataStore.store import MetadataStore from Tribler.Test.Core.base_test import TriblerCoreTest from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto @@ -25,8 +31,8 @@ def setUp(self): "tags": "video" } self.my_key = default_eccrypto.generate_key(u"curve25519") - self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, - self.my_key) + self.mds = MetadataStore(':memory:', self.session_base_dir, self.my_key) + @inlineCallbacks def tearDown(self): self.mds.shutdown() @@ -47,7 +53,7 @@ def test_get_magnet(self): """ torrent_metadata = self.mds.TorrentMetadata.from_dict({}) self.assertTrue(torrent_metadata.get_magnet()) - torrent_metadata2 = self.mds.TorrentMetadata.from_dict({'title':u'\U0001f4a9'}) + torrent_metadata2 = self.mds.TorrentMetadata.from_dict({'title': u'\U0001f4a9'}) self.assertTrue(torrent_metadata2.get_magnet()) @db_session @@ -63,30 +69,35 @@ def test_search_keyword(self): dict(self.torrent_template, title="xoxoxo bar", tags="video")) self.mds.TorrentMetadata.from_dict( dict(self.torrent_template, title="xoxoxo bar", tags="audio")) + self.mds.TorrentMetadata.from_dict( + dict(self.torrent_template, title=u"\"", tags="audio")) + self.mds.TorrentMetadata.from_dict( + dict(self.torrent_template, title=u"\'", tags="audio")) + orm.flush() # Search for torrents with the keyword 'foo', it should return one result - results = self.mds.TorrentMetadata.search_keyword("foo") + results = self.mds.TorrentMetadata.search_keyword("foo")[:] self.assertEqual(len(results), 1) self.assertEqual(results[0].rowid, torrent1.rowid) # Search for torrents with the keyword 'eee', it should return one result - results = self.mds.TorrentMetadata.search_keyword("eee") + results = self.mds.TorrentMetadata.search_keyword("eee")[:] self.assertEqual(len(results), 1) self.assertEqual(results[0].rowid, torrent2.rowid) # Search for torrents with the keyword '123', it should return two results - results = self.mds.TorrentMetadata.search_keyword("123") + results = self.mds.TorrentMetadata.search_keyword("123")[:] self.assertEqual(len(results), 2) # Search for torrents with the keyword 'video', it should return three results - results = self.mds.TorrentMetadata.search_keyword("video") + results = self.mds.TorrentMetadata.search_keyword("video")[:] self.assertEqual(len(results), 3) def test_search_empty_query(self): """ Test whether an empty query returns nothing """ - self.assertFalse(self.mds.TorrentMetadata.search_keyword(None)) + self.assertFalse(self.mds.TorrentMetadata.search_keyword(None)[:]) @db_session def test_unicode_search(self): @@ -94,7 +105,7 @@ def test_unicode_search(self): Test searching in the database with unicode characters """ self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title=u"Ñ Ð¼Ð°Ð»ÐµÐ½ÑŒÐºÐ¸Ð¹ апельÑин")) - results = self.mds.TorrentMetadata.search_keyword(u"маленький") + results = self.mds.TorrentMetadata.search_keyword(u"маленький")[:] self.assertEqual(1, len(results)) @db_session @@ -104,9 +115,10 @@ def test_wildcard_search(self): """ self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title="foobar 123")) self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title="foobla 123")) - self.assertEqual(0, len(self.mds.TorrentMetadata.search_keyword("*"))) - self.assertEqual(1, len(self.mds.TorrentMetadata.search_keyword("foobl*"))) - self.assertEqual(2, len(self.mds.TorrentMetadata.search_keyword("foo*"))) + self.assertEqual(0, len(self.mds.TorrentMetadata.search_keyword("*")[:])) + self.assertEqual(1, len(self.mds.TorrentMetadata.search_keyword("foobl*")[:])) + self.assertEqual(2, len(self.mds.TorrentMetadata.search_keyword("foo*")[:])) + self.assertEqual(1, len(self.mds.TorrentMetadata.search_keyword("(\"12\"* AND \"foobl\"*)")[:])) @db_session def test_stemming_search(self): @@ -117,11 +129,11 @@ def test_stemming_search(self): dict(self.torrent_template, title="mountains sheep", tags="video")) # Search with the word 'mountain' should return the torrent with 'mountains' in the title - results = self.mds.TorrentMetadata.search_keyword("mountain") + results = self.mds.TorrentMetadata.search_keyword("mountain")[:] self.assertEqual(torrent.rowid, results[0].rowid) # Search with the word 'sheeps' should return the torrent with 'sheep' in the title - results = self.mds.TorrentMetadata.search_keyword("sheeps") + results = self.mds.TorrentMetadata.search_keyword("sheeps")[:] self.assertEqual(torrent.rowid, results[0].rowid) @db_session @@ -140,6 +152,9 @@ def test_get_autocomplete_terms(self): autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("shee", 10) self.assertIn('sheepish', autocomplete_terms) + autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("", 10) + self.assertEqual([], autocomplete_terms) + @db_session def test_get_autocomplete_terms_max(self): """ @@ -154,3 +169,45 @@ def test_get_autocomplete_terms_max(self): autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("sheep", 2) self.assertEqual(len(autocomplete_terms), 2) + # Check that we can chew the special character "." + autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(".", 2) + + @db_session + def test_get_entries(self): + """ + Test whether we can get torrents + """ + + # First we create a few channels and add some torrents to these channels + tlist = [] + for ind in xrange(5): + self.mds.ChannelNode._my_key = default_eccrypto.generate_key('curve25519') + _ = self.mds.ChannelMetadata(title='channel%d' % ind, subscribed=(ind % 2 == 0)) + tlist.extend([self.mds.TorrentMetadata(title='torrent%d' % torrent_ind) for torrent_ind in xrange(5)]) + tlist[-1].xxx = 1 + tlist[-2].status = TODELETE + + torrents, count = self.mds.TorrentMetadata.get_entries(first=1, last=5) + self.assertEqual(5, len(torrents)) + self.assertEqual(25, count) + + # Test fetching torrents in a channel + channel_pk = self.mds.ChannelNode._my_key.pub().key_to_bin()[10:] + torrents, count = self.mds.TorrentMetadata.get_entries(first=1, last=10, sort_by='title', channel_pk=channel_pk) + self.assertEqual(5, len(torrents)) + self.assertEqual(5, count) + + torrents, count = self.mds.TorrentMetadata.get_entries( + channel_pk=channel_pk, hide_xxx=True, exclude_deleted=True)[:] + + self.assertListEqual(tlist[-5:-2], list(torrents)) + self.assertEqual(count, 3) + + @db_session + def test_metadata_conflicting(self): + tdict = dict(self.torrent_template, title="lakes sheep", tags="video", infohash='\x00\xff') + md = self.mds.TorrentMetadata.from_dict(tdict) + self.assertFalse(md.metadata_conflicting(tdict)) + self.assertTrue(md.metadata_conflicting(dict(tdict, title="bla"))) + tdict.pop('title') + self.assertFalse(md.metadata_conflicting(tdict)) diff --git a/Tribler/Test/Core/Modules/MetadataStore/test_tracker_state.py b/Tribler/Test/Core/Modules/MetadataStore/test_tracker_state.py new file mode 100644 index 00000000000..680b38b0f31 --- /dev/null +++ b/Tribler/Test/Core/Modules/MetadataStore/test_tracker_state.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import + +from pony.orm import db_session + +from twisted.internet.defer import inlineCallbacks + +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Utilities.tracker_utils import MalformedTrackerURLException +from Tribler.Test.Core.base_test import TriblerCoreTest +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + + +class TestTrackerState(TriblerCoreTest): + """ + Contains various tests for the TrackerState class. + """ + @inlineCallbacks + def setUp(self): + yield super(TestTrackerState, self).setUp() + self.my_key = default_eccrypto.generate_key(u"curve25519") + self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key) + + @inlineCallbacks + def tearDown(self): + self.mds.shutdown() + yield super(TestTrackerState, self).tearDown() + + @db_session + def test_create_tracker_state(self): + ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce') + self.assertEqual(list(self.mds.TrackerState.select())[0], ts) + + @db_session + def test_canonicalize_tracker_state(self): + ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce/') + self.assertEqual(self.mds.TrackerState.get(url='http://tracker.tribler.org/announce'), ts) + + @db_session + def test_canonicalize_raise_on_malformed_url(self): + self.assertRaises(MalformedTrackerURLException, self.mds.TrackerState, + url='udp://tracker.tribler.org/announce/') diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/__init__.py b/Tribler/Test/Core/Modules/RestApi/Channels/__init__.py deleted file mode 100644 index f01f634857e..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests for the channels endpoints. -""" diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_discovered_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_discovered_endpoint.py deleted file mode 100644 index 3090671db3b..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_discovered_endpoint.py +++ /dev/null @@ -1,114 +0,0 @@ -from __future__ import absolute_import - -from binascii import hexlify -import json -import os - -import six -from pony.orm import db_session - -from Tribler.Core.Modules.MetadataStore.serialization import ChannelMetadataPayload -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import UNKNOWN_CHANNEL_RESPONSE_MSG -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint, \ - AbstractTestChantEndpoint -from Tribler.Test.test_as_server import TESTS_DATA_DIR -from Tribler.Test.tools import trial_timeout - - -class TestChannelsDiscoveredEndpoints(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_get_channel_info_non_existent(self): - """ - Testing whether the API returns error 404 if an unknown channel is queried - """ - self.should_check_equality = True - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/discovered/aabb', expected_code=404, expected_json=expected_json) - - @trial_timeout(10) - def test_get_channel_info(self): - """ - Testing whether the API returns the right JSON data if a channel overview is requested - """ - channel_json = {u'overview': {u'name': u'testname', u'description': u'testdescription', - u'identifier': six.text_type(hexlify(b'fake'))}} - self.insert_channel_in_db('fake', 3, channel_json[u'overview'][u'name'], - channel_json[u'overview'][u'description']) - - return self.do_request('channels/discovered/%s' % hexlify(b'fake'), expected_code=200, - expected_json=channel_json) - - -class TestChannelsDiscoveredChantEndpoints(AbstractTestChantEndpoint): - - @trial_timeout(10) - def test_get_discovered_chant_channel(self): - """ - Test whether we successfully retrieve a discovered chant channel - """ - - def verify_response(response): - json_response = json.loads(response) - self.assertTrue(json_response['channels']) - - self.create_my_channel('test', 'test') - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=200).addCallback(verify_response) - - @trial_timeout(10) - def test_create_my_channel(self): - """ - Test whether we can create a new chant channel using the API - """ - - def verify_created(_): - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - self.assertTrue(self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id)) - - post_params = {'name': 'test1', 'description': 'test'} - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=200, expected_json={}, - post_data=post_params, request_type='PUT').addCallback(verify_created) - - @trial_timeout(10) - def test_create_my_channel_twice(self): - """ - Test whether the API returns error 500 when we try to add a channel twice - """ - self.create_my_channel('test', 'test2') - post_params = {'name': 'test1', 'description': 'test'} - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=500, expected_json={}, - post_data=post_params, request_type='PUT') - - @trial_timeout(10) - def test_export_channel_mdblob(self): - """ - Test if export of a channel .mdblob through the endpoint works correctly - """ - with open(os.path.join(TESTS_DATA_DIR, 'channel.mdblob'), 'rb') as f: - mdblob = f.read() - payload = ChannelMetadataPayload.from_signed_blob(mdblob) - with db_session: - self.session.lm.mds.ChannelMetadata.from_payload(payload) - - def verify_exported_data(result): - self.assertEqual(mdblob, result) - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/mdblob' % hexlify(payload.public_key), - expected_code=200, request_type='GET').addCallback(verify_exported_data) - - @trial_timeout(10) - def test_export_channel_mdblob_notfound(self): - """ - Test if export of a channel .mdblob through the endpoint works correctly - """ - with open(os.path.join(TESTS_DATA_DIR, 'channel.mdblob'), 'rb') as f: - mdblob = f.read() - payload = ChannelMetadataPayload.from_signed_blob(mdblob) - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/mdblob' % hexlify(payload.public_key), - expected_code=404, request_type='GET') diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_endpoint.py deleted file mode 100644 index 5f9bc7d9156..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_endpoint.py +++ /dev/null @@ -1,147 +0,0 @@ -from __future__ import absolute_import -from pony.orm import db_session -from six.moves import xrange -from twisted.internet.defer import inlineCallbacks - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Modules.channel.channel import ChannelObject -from Tribler.Core.Modules.channel.channel_manager import ChannelManager -from Tribler.Core.exceptions import DuplicateChannelNameError -from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest -from Tribler.Test.Core.base_test_channel import BaseTestChannel -from Tribler.Test.tools import trial_timeout -from Tribler.pyipv8.ipv8.database import database_blob -from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto - - -class ChannelCommunityMock(object): - - def __init__(self, channel_id, name, description, mode): - self.cid = 'a' * 20 - self._channel_id = channel_id - self._channel_name = name - self._channel_description = description - self._channel_mode = mode - - def get_channel_id(self): - return self._channel_id - - def get_channel_name(self): - return self._channel_name - - def get_channel_description(self): - return self._channel_description - - def get_channel_mode(self): - return self._channel_mode - - -class AbstractTestChantEndpoint(AbstractApiTest): - - def setUpPreSession(self): - super(AbstractTestChantEndpoint, self).setUpPreSession() - self.config.set_libtorrent_enabled(True) - self.config.set_chant_enabled(True) - self.config.set_chant_channel_edit(True) - - @db_session - def create_my_channel(self, name, description): - """ - Create your channel, with a given name and description. - """ - return self.session.lm.mds.ChannelMetadata.create_channel(name, description) - - @db_session - def add_random_torrent_to_my_channel(self, name=None): - """ - Add a random torrent to your channel. - """ - return self.session.lm.mds.TorrentMetadata(title='test' if not name else name, - infohash='a' * 20) - - @db_session - def add_random_channel(self): - """ - Add a random channel to the metadata store. - :return: The metadata of the added channel. - """ - rand_key = default_eccrypto.generate_key('low') - new_channel = self.session.lm.mds.ChannelMetadata( - public_key=database_blob(rand_key.pub().key_to_bin()), title='test', tags='test') - new_channel.sign(rand_key) - return new_channel - - @db_session - def get_my_channel(self): - """ - Return the metadata object of your channel, or None if it does not exist yet. - """ - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - return self.session.lm.mds.ChannelMetadata.get_channel_with_id(my_channel_id) - - -class AbstractTestChannelsEndpoint(AbstractApiTest, BaseTestChannel): - - @inlineCallbacks - def setUp(self): - yield super(AbstractTestChannelsEndpoint, self).setUp() - self.channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid" - - def vote_for_channel(self, cid, vote_time): - self.votecast_db_handler.on_votes_from_dispersy([[cid, None, 'random', 2, vote_time]]) - - def create_my_channel(self, name, description): - self.channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid" - self.channel_db_handler.on_channel_from_dispersy('fakedispersyid', None, name, description) - return self.channel_db_handler.getMyChannelId() - - def create_fake_channel(self, name, description, mode=u'closed'): - # Use a fake ChannelCommunity object (we don't actually want to create a Dispersy community) - my_channel_id = self.create_my_channel(name, description) - self.session.lm.channel_manager = ChannelManager(self.session) - - channel_obj = ChannelObject(self.session, ChannelCommunityMock(my_channel_id, name, description, mode)) - self.session.lm.channel_manager._channel_list.append(channel_obj) - return my_channel_id - - def create_fake_channel_with_existing_name(self, name, description, mode=u'closed'): - raise DuplicateChannelNameError(u"Channel name already exists: %s" % name) - - -class TestChannelsEndpoint(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_channels_unknown_endpoint(self): - """ - Testing whether the API returns an error if an unknown endpoint is queried - """ - self.should_check_equality = False - return self.do_request('channels/thisendpointdoesnotexist123', expected_code=404) - - @trial_timeout(10) - def test_get_discovered_channels_no_channels(self): - """ - Testing whether the API returns no channels when fetching discovered channels - and there are no channels in the database - """ - expected_json = {u'channels': []} - return self.do_request('channels/discovered', expected_code=200, expected_json=expected_json) - - @trial_timeout(10) - def test_get_discovered_channels(self): - """ - Testing whether the API returns inserted channels when fetching discovered channels - """ - self.should_check_equality = False - for i in xrange(0, 10): - self.insert_channel_in_db('rand%d' % i, 42 + i, 'Test channel %d' % i, 'Test description %d' % i) - self.insert_channel_in_db('randbad', 100, 'badterm', 'Test description bad') - - def verify_channels(channels): - channels_json = json.loads(channels)['channels'] - self.assertEqual(len(channels_json), 10) - channels_json = sorted(channels_json, key=lambda channel: channel['name']) - for ind in xrange(len(channels_json)): - self.assertEqual(channels_json[ind]['name'], 'Test channel %d' % ind) - - return self.do_request('channels/discovered', expected_code=200).addCallback(verify_channels) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_playlist_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_playlist_endpoint.py deleted file mode 100644 index 18530a7ee17..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_playlist_endpoint.py +++ /dev/null @@ -1,488 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import UNKNOWN_CHANNEL_RESPONSE_MSG -import Tribler.Core.Utilities.json_util as json -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.tools import trial_timeout -from Tribler.dispersy.exception import CommunityNotFoundException - - -class AbstractTestChannelsPlaylistsEndpoint(AbstractTestChannelsEndpoint): - """ - This class is the base class for all playlist-related tests. - """ - - def create_playlist(self, channel_id, dispersy_id, peer_id, name, description): - self.channel_db_handler.on_playlist_from_dispersy(channel_id, dispersy_id, peer_id, name, description) - - def insert_torrent_into_playlist(self, playlist_disp_id, infohash): - self.channel_db_handler.on_playlist_torrent(42, playlist_disp_id, 42, infohash) - - -class TestChannelsPlaylistEndpoints(AbstractTestChannelsPlaylistsEndpoint): - - @trial_timeout(10) - def test_get_playlists_endpoint_without_channel(self): - """ - Testing whether the API returns error 404 if an unknown channel is queried for playlists - """ - self.should_check_equality = True - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/discovered/aabb/playlists', expected_code=404, expected_json=expected_json) - - @trial_timeout(10) - def test_playlists_endpoint_no_playlists(self): - """ - Testing whether the API returns the right JSON data if no playlists have been added to your channel - """ - channel_cid = 'fakedispersyid'.encode('hex') - self.create_my_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/%s/playlists' % channel_cid, - expected_code=200, expected_json={"playlists": []}) - - @trial_timeout(10) - def test_playlists_endpoint(self): - """ - Testing whether the API returns the right JSON data if playlists are fetched - """ - my_channel_id = self.create_my_channel("my channel", "this is a short description") - channel_cid = 'fakedispersyid'.encode('hex') - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - torrent_list = [ - [my_channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []], - [my_channel_id, 1, 1, ('b' * 40).decode('hex'), 1460000000, "badterm", [['file1.txt', 42]], []] - ] - self.insert_torrents_into_channel(torrent_list) - self.insert_torrent_into_playlist(1234, ('a' * 40).decode('hex')) - self.insert_torrent_into_playlist(1234, ('b' * 40).decode('hex')) - - def verify_playlists(results): - json_result = json.loads(results) - self.assertTrue('playlists' in json_result) - self.assertEqual(len(json_result['playlists']), 1) - self.assertTrue('torrents' in json_result['playlists'][0]) - self.assertEqual(len(json_result['playlists'][0]['torrents']), 1) - torrent = json_result['playlists'][0]['torrents'][0] - self.assertEqual(torrent['infohash'], 'a' * 40) - self.assertEqual(torrent['name'], 'ubuntu-torrent.iso') - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/playlists' % channel_cid, - expected_code=200).addCallback(verify_playlists) - - @trial_timeout(10) - def test_create_playlist_no_channel(self): - """ - Testing whether the API returns error 404 if the channel does not exist when creating a playlist - """ - self.create_my_channel("my channel", "this is a short description") - post_params = {"name": "test1", "description": "test2"} - return self.do_request('channels/discovered/abcd/playlists', expected_code=404, - post_data=post_params, request_type='PUT') - - @trial_timeout(10) - def test_create_playlist_no_name(self): - """ - Testing whether the API returns error 400 if the name is missing when creating a new playlist - """ - self.create_my_channel("my channel", "this is a short description") - expected_json = {"error": "name parameter missing"} - return self.do_request('channels/discovered/%s/playlists' % 'fakedispersyid'.encode('hex'), - expected_code=400, expected_json=expected_json, request_type='PUT') - - @trial_timeout(10) - def test_create_playlist_no_description(self): - """ - Testing whether the API returns error 400 if the description is missing when creating a new playlist - """ - self.create_my_channel("my channel", "this is a short description") - expected_json = {"error": "description parameter missing"} - post_params = {"name": "test"} - return self.do_request('channels/discovered/%s/playlists' % 'fakedispersyid'.encode('hex'), expected_code=400, - expected_json=expected_json, post_data=post_params, request_type='PUT') - - @trial_timeout(10) - def test_create_playlist_no_cmty(self): - """ - Testing whether the API returns error 404 if the the channel community is missing when creating a new playlist - """ - self.create_my_channel("my channel", "this is a short description") - expected_json = {"error": "description parameter missing"} - post_params = {"name": "test1", "description": "test2"} - - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - return self.do_request('channels/discovered/%s/playlists' % 'fakedispersyid'.encode('hex'), expected_code=404, - expected_json=expected_json, post_data=post_params, request_type='PUT') - - @trial_timeout(10) - def test_create_playlist(self): - """ - Testing whether the API can create a new playlist in a given channel - """ - mock_channel_community = MockObject() - mock_channel_community.called_create = False - self.create_fake_channel("channel", "") - - def verify_playlist_created(_): - self.assertTrue(mock_channel_community.called_create) - - def create_playlist_called(name, description, _): - self.assertEqual(name, "test1") - self.assertEqual(description, "test2") - mock_channel_community.called_create = True - - mock_channel_community.create_playlist = create_playlist_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - expected_json = {"created": True} - post_params = {"name": "test1", "description": "test2"} - - return self.do_request('channels/discovered/%s/playlists' % 'fakedispersyid'.encode('hex'), expected_code=200, - expected_json=expected_json, post_data=post_params, request_type='PUT')\ - .addCallback(verify_playlist_created) - - -class TestChannelsModifyPlaylistsEndpoints(AbstractTestChannelsPlaylistsEndpoint): - """ - This class contains tests to verify the modification of playlists. - """ - - @trial_timeout(10) - def test_delete_playlist_no_channel(self): - """ - Testing whether an error 404 is returned when a playlist is removed from a non-existent channel - """ - return self.do_request('channels/discovered/abcd/playlists/1', expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_delete_playlist_no_playlist(self): - """ - Testing whether an error 404 is returned when a non-existent playlist is removed from a channel - """ - channel_cid = 'fakedispersyid'.encode('hex') - self.create_my_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/%s/playlists/1' % channel_cid, - expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_delete_playlist_no_community(self): - """ - Testing whether an error 404 is returned when a playlist is removed from a channel without community - """ - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - channel_cid = 'fakedispersyid'.encode('hex') - my_channel_id = self.create_my_channel("my channel", "this is a short description") - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - return self.do_request('channels/discovered/%s/playlists/1' % channel_cid, - expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_delete_playlist(self): - """ - Testing whether a playlist is correctly removed - """ - mock_channel_community = MockObject() - mock_channel_community.called_remove = False - mock_channel_community.called_remove_torrents = False - my_channel_id = self.create_fake_channel("channel", "") - - def verify_playlist_removed(_): - self.assertTrue(mock_channel_community.called_remove_torrents) - self.assertTrue(mock_channel_community.called_remove) - - def remove_playlist_called(playlists): - self.assertEqual(playlists, [1234]) - mock_channel_community.called_remove = True - - def remove_torrents_called(playlist_id, torrents): - self.assertEqual(playlist_id, 1234) - self.assertEqual(torrents, [42]) - mock_channel_community.called_remove_torrents = True - - mock_channel_community.remove_playlists = remove_playlist_called - mock_channel_community.remove_playlist_torrents = remove_torrents_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - # Create a playlist and add a torrent to it - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - torrent_list = [[my_channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - self.insert_torrent_into_playlist(1234, ('a' * 40).decode('hex')) - - return self.do_request('channels/discovered/%s/playlists/1' % 'fakedispersyid'.encode('hex'), - expected_code=200, expected_json={"removed": True}, - request_type='DELETE').addCallback(verify_playlist_removed) - - @trial_timeout(10) - def test_edit_playlist_no_name(self): - """ - Testing whether an error 400 is returned when a playlist is edit without a name parameter passed - """ - post_params = {'description': 'test'} - expected_json = {'error': 'name parameter missing'} - return self.do_request('channels/discovered/abcd/playlists/1', expected_code=400, - post_data=post_params, request_type='POST', expected_json=expected_json) - - @trial_timeout(10) - def test_edit_playlist_no_description(self): - """ - Testing whether an error 400 is returned when a playlist is edit without a description parameter passed - """ - post_params = {'name': 'test'} - expected_json = {'error': 'description parameter missing'} - return self.do_request('channels/discovered/abcd/playlists/1', expected_code=400, - post_data=post_params, request_type='POST', expected_json=expected_json) - - @trial_timeout(10) - def test_edit_playlist_no_channel(self): - """ - Testing whether an error 404 is returned when a playlist is edit from a non-existent channel - """ - post_params = {'name': 'test', 'description': 'test'} - return self.do_request('channels/discovered/abcd/playlists/1', expected_code=404, - post_data=post_params, request_type='POST') - - @trial_timeout(10) - def test_edit_playlist_no_playlist(self): - """ - Testing whether an error 404 is returned when a non-existent playlist is edited - """ - post_params = {'name': 'test', 'description': 'test'} - channel_cid = 'fakedispersyid'.encode('hex') - self.create_my_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/%s/playlists/1' % channel_cid, - expected_code=404, request_type='POST', post_data=post_params) - - @trial_timeout(10) - def test_edit_playlist_no_community(self): - """ - Testing whether an error 404 is returned when a playlist is edited from a channel without community - """ - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - post_params = {'name': 'test', 'description': 'test'} - channel_cid = 'fakedispersyid'.encode('hex') - my_channel_id = self.create_my_channel("my channel", "this is a short description") - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - return self.do_request('channels/discovered/%s/playlists/1' % channel_cid, - expected_code=404, request_type='POST', post_data=post_params) - - @trial_timeout(10) - def test_edit_playlist(self): - """ - Testing whether a playlist is correctly modified - """ - mock_channel_community = MockObject() - mock_channel_community.called_modify = False - my_channel_id = self.create_fake_channel("channel", "") - - def verify_playlist_modified(_): - self.assertTrue(mock_channel_community.called_modify) - - def modify_playlist_called(playlist_id, modifications): - self.assertEqual(playlist_id, 1) - self.assertEqual(modifications['name'], 'test') - self.assertEqual(modifications['description'], 'test') - mock_channel_community.called_modify = True - - mock_channel_community.modifyPlaylist = modify_playlist_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - - post_params = {'name': 'test', 'description': 'test'} - return self.do_request('channels/discovered/%s/playlists/1' % 'fakedispersyid'.encode('hex'), - expected_code=200, expected_json={"modified": True}, post_data=post_params, - request_type='POST').addCallback(verify_playlist_modified) - - -class TestChannelsModifyPlaylistsAddTorrentEndpoints(AbstractTestChannelsPlaylistsEndpoint): - """ - This class contains tests to verify the addition of torrents to playlists. - """ - @trial_timeout(10) - def test_add_torrent_no_channel(self): - """ - Testing whether an error 404 is returned when a torrent is added to a playlist with a non-existent channel - """ - return self.do_request('channels/discovered/abcd/playlists/1/abcd', expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_no_playlist(self): - """ - Testing whether an error 404 is returned when a torrent is added to a non-existing playlist - """ - mock_channel_community = MockObject() - mock_channel_community.called_add = False - self.create_fake_channel("channel", "") - - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - channel_cid = 'fakedispersyid'.encode('hex') - self.create_my_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/%s/playlists/1/abcd' % channel_cid, - expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_no_community(self): - """ - Testing whether an error 404 is returned when a torrent is added to a playlist without channel community - """ - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - channel_cid = 'fakedispersyid'.encode('hex') - my_channel_id = self.create_my_channel("my channel", "this is a short description") - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - return self.do_request('channels/discovered/%s/playlists/1/abcd' % channel_cid, - expected_code=404, request_type='PUT') - - @trial_timeout(15) - @inlineCallbacks - def test_add_torrent_playlist(self): - """ - Testing whether a torrent can successfully be added to a playlist - """ - mock_channel_community = MockObject() - mock_channel_community.called_add = False - my_channel_id = self.create_fake_channel("channel", "") - - def verify_torrent_added(_): - self.assertTrue(mock_channel_community.called_add) - - def modify_add_called(playlist_id, torrents): - self.assertEqual(playlist_id, 1) - self.assertEqual(torrents, [('a' * 40).decode('hex')]) - mock_channel_community.called_add = True - - mock_channel_community.create_playlist_torrents = modify_add_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - - yield self.do_request('channels/discovered/%s/playlists/1/abcd' % 'fakedispersyid'.encode('hex'), - expected_code=404, request_type='PUT') - - torrent_list = [[my_channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - - yield self.do_request('channels/discovered/%s/playlists/1/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40), - expected_code=200, expected_json={'added': True}, request_type='PUT')\ - .addCallback(verify_torrent_added) - - self.insert_torrent_into_playlist(1234, ('a' * 40).decode('hex')) - - yield self.do_request('channels/discovered/%s/playlists/1/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40), - expected_code=409, request_type='PUT') - - -class TestChannelsModifyPlaylistsRemoveTorrentEndpoints(AbstractTestChannelsPlaylistsEndpoint): - """ - This class contains tests to verify the removal of torrents from playlists. - """ - - @trial_timeout(10) - def test_delete_torrent_no_channel(self): - """ - Testing whether an error 404 is returned when a torrent from a playlist is removed from a non-existent channel - """ - return self.do_request('channels/discovered/abcd/playlists/1/abcd', expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_delete_torrent_no_playlist(self): - """ - Testing whether an error 404 is returned when a torrent from a playlist is removed from a non-existent playlist - """ - channel_cid = 'fakedispersyid'.encode('hex') - self.create_my_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/%s/playlists/1/abcd' % channel_cid, - expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_remove_torrent_no_community(self): - """ - Testing whether an error 404 is returned when a torrent from a playlist without channel community - """ - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - channel_cid = 'fakedispersyid'.encode('hex') - my_channel_id = self.create_my_channel("my channel", "this is a short description") - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - return self.do_request('channels/discovered/%s/playlists/1/abcd' % channel_cid, - expected_code=404, request_type='DELETE') - - @trial_timeout(15) - @inlineCallbacks - def test_remove_torrent_playlist(self): - """ - Testing whether a torrent can be successfully removed from a playlist - """ - mock_channel_community = MockObject() - mock_channel_community.called_remove = False - my_channel_id = self.create_fake_channel("channel", "") - - def verify_torrent_removed(_): - self.assertTrue(mock_channel_community.called_remove) - - def modify_remove_called(playlist_id, torrents): - self.assertEqual(playlist_id, 1) - self.assertEqual(torrents, [42]) - mock_channel_community.called_remove = True - - mock_channel_community.remove_playlist_torrents = modify_remove_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.create_playlist(my_channel_id, 1234, 42, "test playlist", "test description") - - yield self.do_request('channels/discovered/%s/playlists/1/abcd' % 'fakedispersyid'.encode('hex'), - expected_code=404, request_type='DELETE') - - torrent_list = [[my_channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - self.insert_torrent_into_playlist(1234, ('a' * 40).decode('hex')) - - yield self.do_request('channels/discovered/%s/playlists/1/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40), - expected_code=200, request_type='DELETE', expected_json={'removed': True})\ - .addCallback(verify_torrent_removed) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_popular_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_popular_endpoint.py deleted file mode 100644 index ca4e039712e..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_popular_endpoint.py +++ /dev/null @@ -1,40 +0,0 @@ -from __future__ import absolute_import -from six.moves import xrange -from twisted.internet.defer import inlineCallbacks - -import Tribler.Core.Utilities.json_util as json -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint -from Tribler.Test.tools import trial_timeout - - -class TestChannelsPlaylistEndpoints(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - @inlineCallbacks - def test_popular_channels_endpoint(self): - """ - Testing whether the API returns some popular channels if the are queried - """ - def verify_channels_limit(results): - json_results = json.loads(results) - self.assertEqual(len(json_results['channels']), 5) - - def verify_channels(results): - json_results = json.loads(results) - self.assertEqual(len(json_results['channels']), 9) - - for i in xrange(0, 9): - self.insert_channel_in_db('rand%d' % i, 42 + i, 'Test channel %d' % i, 'Test description %d' % i) - - self.insert_channel_in_db('badterm1', 200, 'badterm', 'Test description badterm') - self.should_check_equality = False - yield self.do_request('channels/popular?limit=5', expected_code=200).addCallback(verify_channels_limit) - yield self.do_request('channels/popular', expected_code=200).addCallback(verify_channels) - - @trial_timeout(10) - def test_popular_channels_limit_neg(self): - """ - Testing whether error 400 is returned when a negative limit is passed to the request to fetch popular channels - """ - expected_json = {"error": "the limit parameter must be a positive number"} - return self.do_request('channels/popular?limit=-5', expected_code=400, expected_json=expected_json) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_rss_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_rss_endpoint.py deleted file mode 100644 index ba6d9580831..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_rss_endpoint.py +++ /dev/null @@ -1,178 +0,0 @@ -import os - -from twisted.internet.defer import fail -from Tribler.Core.Modules.channel.channel_manager import ChannelManager -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import UNKNOWN_CHANNEL_RESPONSE_MSG, \ - UNAUTHORIZED_RESPONSE_MSG -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint -from Tribler.Test.test_as_server import TESTS_DATA_DIR -from Tribler.Test.tools import trial_timeout - - -class TestChannelsRssEndpoints(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_rss_feeds_endpoint_with_channel(self): - """ - Testing whether the API returns the right JSON data if a rss feeds from a channel are fetched - """ - expected_json = {u'rssfeeds': [{u'url': u'http://test1.com/feed.xml'}, {u'url': u'http://test2.com/feed.xml'}]} - channel_name = "my channel" - self.create_fake_channel(channel_name, "this is a short description") - channel_obj = self.session.lm.channel_manager.get_channel(channel_name) - for rss_item in expected_json[u'rssfeeds']: - channel_obj.create_rss_feed(rss_item[u'url']) - - return self.do_request('channels/discovered/%s/rssfeeds' % 'fakedispersyid'.encode('hex'), - expected_code=200, expected_json=expected_json) - - @trial_timeout(10) - def test_add_rss_feed_no_my_channel(self): - """ - Testing whether the API returns a 404 if no channel has been created when adding a rss feed - """ - self.session.lm.channel_manager = ChannelManager(self.session) - channel_cid = 'fakedispersyid'.encode('hex') - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/discovered/' + channel_cid + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', - expected_code=404, expected_json=expected_json, request_type='PUT') - - @trial_timeout(10) - def test_add_rss_feed_conflict(self): - """ - Testing whether the API returns error 409 if a channel the rss feed already exists - """ - expected_json = {"error": "this rss feed already exists"} - my_channel_id = self.create_fake_channel("my channel", "this is a short description") - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - channel_obj.create_rss_feed("http://rssfeed.com/rss.xml") - - return self.do_request('channels/discovered/' + 'fakedispersyid'.encode('hex') + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', expected_code=409, - expected_json=expected_json, request_type='PUT') - - @trial_timeout(10) - def test_add_rss_feed_with_channel(self): - """ - Testing whether the API returns a 200 if a channel has been created and when adding a rss feed - """ - - def verify_rss_added(_): - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - self.assertEqual(channel_obj.get_rss_feed_url_list(), ["http://rssfeed.com/rss.xml"]) - - expected_json = {"added": True} - my_channel_id = self.create_fake_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/' + 'fakedispersyid'.encode('hex') + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', expected_code=200, - expected_json=expected_json, request_type='PUT')\ - .addCallback(verify_rss_added) - - @trial_timeout(10) - def test_remove_rss_feed_no_channel(self): - """ - Testing whether the API returns a 404 if no channel has been removed when adding a rss feed - """ - self.session.lm.channel_manager = ChannelManager(self.session) - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/discovered/' + 'fakedispersyid'.encode('hex') + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', - expected_code=404, expected_json=expected_json, request_type='DELETE') - - @trial_timeout(10) - def test_remove_rss_feed_invalid_url(self): - """ - Testing whether the API returns a 404 and error if the url parameter does not exist in the existing feeds - """ - expected_json = {"error": "this url is not added to your RSS feeds"} - self.create_fake_channel("my channel", "this is a short description") - return self.do_request('channels/discovered/' + 'fakedispersyid'.encode('hex') + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', expected_code=404, - expected_json=expected_json, request_type='DELETE') - - @trial_timeout(10) - def test_remove_rss_feed_with_channel(self): - """ - Testing whether the API returns a 200 if a channel has been created and when removing a rss feed - """ - def verify_rss_removed(_): - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - self.assertEqual(channel_obj.get_rss_feed_url_list(), []) - - expected_json = {"removed": True} - my_channel_id = self.create_fake_channel("my channel", "this is a short description") - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - channel_obj.create_rss_feed("http://rssfeed.com/rss.xml") - - return self.do_request('channels/discovered/' + 'fakedispersyid'.encode('hex') + - '/rssfeeds/http%3A%2F%2Frssfeed.com%2Frss.xml', expected_code=200, - expected_json=expected_json, request_type='DELETE').addCallback(verify_rss_removed) - - @trial_timeout(10) - def test_recheck_rss_feeds_no_channel(self): - """ - Testing whether the API returns a 404 if no channel has been created when rechecking rss feeds - """ - self.session.lm.channel_manager = ChannelManager(self.session) - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/discovered/%s/recheckfeeds' % 'fakedispersyid'.encode('hex'), - expected_code=404, expected_json=expected_json, request_type='POST') - - @trial_timeout(10) - def test_recheck_rss_feeds(self): - """ - Testing whether the API returns a 200 if the rss feeds are rechecked in your channel - """ - expected_json = {"rechecked": True} - my_channel_id = self.create_fake_channel("my channel", "this is a short description") - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - channel_obj._is_created = True - channel_obj.create_rss_feed(os.path.join(TESTS_DATA_DIR, 'test_rss_empty.xml')) - - return self.do_request('channels/discovered/%s/recheckfeeds' % 'fakedispersyid'.encode('hex'), - expected_code=200, expected_json=expected_json, request_type='POST') - - @trial_timeout(10) - def test_recheck_rss_feeds_error(self): - """ - Testing whether the API returns error 500 if refresh of rss feeds is failing - """ - my_channel_id = self.create_fake_channel("my channel", "this is a short description") - channel_obj = self.session.lm.channel_manager.get_my_channel(my_channel_id) - channel_obj._is_created = True - channel_obj.create_rss_feed(os.path.join(TESTS_DATA_DIR, 'test_rss_empty.xml')) - - def mocked_refresh_all_feeds(): - return fail(RuntimeError("test fail")) - - channel_obj.refresh_all_feeds = mocked_refresh_all_feeds - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/recheckfeeds' % 'fakedispersyid'.encode('hex'), - expected_code=500, request_type='POST') - - @trial_timeout(10) - def test_get_rss_feed_no_authorization(self): - """ - Testing whether the API returns unauthorized error if attempting to recheck feeds in another channel - """ - self.channel_db_handler.on_channel_from_dispersy('fake', 3, 'test name', 'test description') - - expected_json = {"error": UNAUTHORIZED_RESPONSE_MSG} - - return self.do_request('channels/discovered/%s/rssfeeds' % 'fake'.encode('hex'), - expected_code=401, expected_json=expected_json, request_type='GET') - - @trial_timeout(10) - def test_get_rss_feed_no_channel_obj(self): - """ - Testing whether the API returns error 404 if no channel object exists in the channel manager - """ - self.create_fake_channel("my channel", "this is a short description") - self.session.lm.channel_manager._channel_list = [] - - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - - return self.do_request('channels/discovered/%s/rssfeeds' % 'fakedispersyid'.encode('hex'), - expected_code=404, expected_json=expected_json, request_type='GET') diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_subscription_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_subscription_endpoint.py deleted file mode 100644 index 8a575f8d3a3..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_subscription_endpoint.py +++ /dev/null @@ -1,245 +0,0 @@ -from __future__ import absolute_import - -from binascii import hexlify -import time - -from pony.orm import db_session -import six -from six.moves import xrange -from twisted.internet.defer import succeed, fail, inlineCallbacks -from twisted.python.failure import Failure - -from Tribler.Core.Modules.restapi import VOTE_SUBSCRIBE, VOTE_UNSUBSCRIBE -from Tribler.Core.Modules.restapi.channels.base_channels_endpoint import UNKNOWN_CHANNEL_RESPONSE_MSG -from Tribler.Core.Modules.restapi.channels.channels_subscription_endpoint import ALREADY_SUBSCRIBED_RESPONSE_MSG, \ - NOT_SUBSCRIBED_RESPONSE_MSG, ChannelsModifySubscriptionEndpoint -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint, \ - AbstractTestChantEndpoint -from Tribler.Test.tools import trial_timeout - - -class TestChannelsSubscriptionEndpoint(AbstractTestChannelsEndpoint): - - @inlineCallbacks - def setUp(self): - """ - The startup method of this class creates a fake Dispersy instance with a fake AllChannel community. It also - inserts some random channels so we have some data to work with. - """ - yield super(TestChannelsSubscriptionEndpoint, self).setUp() - self.expected_votecast_cid = None - self.expected_votecast_vote = None - self.create_votecast_called = False - - fake_community = self.create_fake_allchannel_community() - fake_community.disp_create_votecast = self.on_dispersy_create_votecast - self.session.config.get_dispersy_enabled = lambda: True - self.session.lm.dispersy.attach_community(fake_community) - for i in xrange(0, 10): - self.insert_channel_in_db('rand%d' % i, 42 + i, 'Test channel %d' % i, 'Test description %d' % i) - - def on_dispersy_create_votecast(self, cid, vote, _): - """ - Check whether we have the expected parameters when this method is called. - """ - self.assertEqual(cid, self.expected_votecast_cid) - self.assertEqual(vote, self.expected_votecast_vote) - self.create_votecast_called = True - return succeed(None) - - @trial_timeout(10) - def test_subscribe_channel_already_subscribed(self): - """ - Testing whether the API returns error 409 when subscribing to an already subscribed channel - """ - cid = self.insert_channel_in_db('rand1', 42, 'Test channel', 'Test description') - self.vote_for_channel(cid, int(time.time())) - expected_json = {"error": ALREADY_SUBSCRIBED_RESPONSE_MSG} - - return self.do_request('channels/subscribed/%s' % hexlify(b'rand1'), - expected_code=409, expected_json=expected_json, request_type='PUT') - - @trial_timeout(10) - def test_subscribe_channel(self): - """ - Testing whether the API creates a request in the AllChannel community when subscribing to a channel - """ - - def verify_votecast_made(_): - self.assertTrue(self.create_votecast_called) - - expected_json = {"subscribed": True} - self.expected_votecast_cid = 'rand1' - self.expected_votecast_vote = VOTE_SUBSCRIBE - return self.do_request('channels/subscribed/%s' % hexlify(b'rand1'), expected_code=200, - expected_json=expected_json, request_type='PUT').addCallback(verify_votecast_made) - - @trial_timeout(10) - def test_sub_channel_throw_error(self): - """ - Testing whether an error is returned when we subscribe to a channel and an error pops up - """ - - def mocked_vote(*_): - return fail(Failure(RuntimeError("error"))) - - mod_sub_endpoint = ChannelsModifySubscriptionEndpoint(self.session, '') - mod_sub_endpoint.vote_for_channel = mocked_vote - subscribed_endpoint = self.session.lm.api_manager.root_endpoint.children['channels'].children["subscribed"] - subscribed_endpoint.getChild = lambda *_: mod_sub_endpoint - - self.should_check_equality = False - return self.do_request('channels/subscribed/', expected_code=500, request_type='PUT') - - @trial_timeout(10) - def test_unsubscribe_channel_not_exist(self): - """ - Testing whether the API returns an error when unsubscribing if the channel with the specified CID does not exist - """ - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/subscribed/abcdef', expected_code=404, expected_json=expected_json, - request_type='DELETE') - - @trial_timeout(10) - def test_unsubscribe_channel_not_subscribed(self): - """ - Testing whether the API returns error 404 when unsubscribing from an already unsubscribed channel - """ - expected_json = {"error": NOT_SUBSCRIBED_RESPONSE_MSG} - self.insert_channel_in_db('rand1', 42, 'Test channel', 'Test description') - return self.do_request('channels/subscribed/%s' % hexlify(b'rand1'), - expected_code=404, expected_json=expected_json, request_type='DELETE') - - @trial_timeout(10) - def test_get_subscribed_channels_no_subscriptions(self): - """ - Testing whether the API returns no channels when you have not subscribed to any channel - """ - expected_json = {"subscribed": []} - return self.do_request('channels/subscribed', expected_code=200, expected_json=expected_json) - - @trial_timeout(10) - def test_get_subscribed_channels_one_subscription(self): - """ - Testing whether the API returns the right channel when subscribed to one channel - """ - expected_json = {u'subscribed': [{u'description': u'This is a description', u'id': -1, - u'dispersy_cid': six.text_type(hexlify(b'rand')), - u'modified': int(time.time()), - u'name': u'Test Channel', u'spam': 0, - u'subscribed': True, u'torrents': 0, u'votes': 0}]} - - cid = self.insert_channel_in_db('rand', 42, expected_json[u'subscribed'][0][u'name'], - expected_json[u'subscribed'][0][u'description']) - expected_json[u'subscribed'][0][u'id'] = cid - self.vote_for_channel(cid, expected_json[u'subscribed'][0][u'modified']) - return self.do_request('channels/subscribed', expected_code=200, expected_json=expected_json) - - @trial_timeout(10) - def test_unsubscribe_channel(self): - """ - Testing whether the API creates a request in the AllChannel community when unsubscribing from a channel - """ - - def verify_votecast_made(_): - self.assertTrue(self.create_votecast_called) - - cid = self.insert_channel_in_db('rand1', 42, 'Test channel', 'Test description') - self.vote_for_channel(cid, int(time.time())) - - expected_json = {"unsubscribed": True} - self.expected_votecast_cid = 'rand1' - self.expected_votecast_vote = VOTE_UNSUBSCRIBE - return self.do_request('channels/subscribed/%s' % hexlify(b'rand1'), expected_code=200, - expected_json=expected_json, request_type='DELETE').addCallback(verify_votecast_made) - - @trial_timeout(10) - def test_is_channel_subscribed(self): - """ - Testing the subscription status of channel - """ - cid = self.insert_channel_in_db('rand1', 42, 'Test channel', 'Test description') - self.vote_for_channel(cid, int(time.time())) - - expected_json = {"subscribed": True, "votes": 0} # here votes represent previous dispersy votes which is zero - return self.do_request('channels/subscribed/%s' % hexlify(b'rand1'), expected_code=200, - expected_json=expected_json, request_type='GET') - - @trial_timeout(10) - def test_subscribed_status_of_non_existing_channel(self): - """ - Testing the subscription status of non-existing channel - """ - expected_json = {"error": UNKNOWN_CHANNEL_RESPONSE_MSG} - return self.do_request('channels/subscribed/deadbeef', expected_code=404, expected_json=expected_json, - request_type='GET') - - -class TestChannelsSubscriptionChantEndpoint(AbstractTestChantEndpoint): - - @trial_timeout(10) - def test_subscribe(self): - """ - Test subscribing to a (random) chant channel with the API - """ - random_channel = self.add_random_channel() - random_channel_id = hexlify(random_channel.public_key) - - def verify_response(_): - updated_channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(random_channel.public_key) - self.assertTrue(updated_channel.subscribed) - - self.should_check_equality = False - return self.do_request('channels/subscribed/%s' % random_channel_id, expected_code=200, request_type='PUT') \ - .addCallback(verify_response) - - @trial_timeout(10) - def test_subscribe_twice(self): - """ - Test whether an error is raised when subscribing to a channel we are already subscribed to - """ - with db_session: - random_channel = self.add_random_channel() - random_channel.subscribed = True - random_channel_id = hexlify(random_channel.public_key) - - self.should_check_equality = False - return self.do_request('channels/subscribed/%s' % random_channel_id, expected_code=409, request_type='PUT') - - @trial_timeout(10) - def test_subscribe_unknown_channel(self): - """ - Test whether an error is raised when subscribing to an unknown channel - """ - self.should_check_equality = False - return self.do_request('channels/subscribed/aaaa', expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_get_subscribed_channels_no_subscriptions(self): - """ - Testing whether the API returns no channels when you have not subscribed to any channel - """ - expected_json = {"subscribed": []} - return self.do_request('channels/subscribed', expected_code=200, expected_json=expected_json) - - @trial_timeout(10) - def test_get_subscribed_channels_one_subscription(self): - """ - Testing whether the API returns the right channel when subscribed to one channel - """ - with db_session: - md = self.session.lm.mds.ChannelMetadata(title="Test channel", subscribed=True) - title = md.title - cid = hexlify(md.public_key) - version = md.version - subscribed = md.subscribed - torrents = md.size - votes = md.votes - tags = md.tags - expected_json = {u'subscribed': [{u'description': six.text_type(tags), u'id': 0, - u'dispersy_cid': six.text_type(cid), - u'modified': version, - u'name': six.text_type(title), u'spam': 0, - u'subscribed': subscribed, u'torrents': torrents, u'votes': votes}]} - - return self.do_request('channels/subscribed', expected_code=200, expected_json=expected_json) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_torrents_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_torrents_endpoint.py deleted file mode 100644 index a4b9397c4c4..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_channels_torrents_endpoint.py +++ /dev/null @@ -1,614 +0,0 @@ -import base64 -import os -import shutil -import urllib - -from pony.orm import db_session - -from Tribler.Core.exceptions import HttpError -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.TorrentDef import TorrentDef -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint, \ - AbstractTestChantEndpoint -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.common import TORRENT_UBUNTU_FILE -from Tribler.dispersy.exception import CommunityNotFoundException -from Tribler.Test.Core.Modules.MetadataStore.test_channel_download import CHANNEL_DIR, CHANNEL_METADATA - - -class TestChannelTorrentsEndpoint(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_get_torrents_in_channel_invalid_cid(self): - """ - Testing whether the API returns error 404 if a non-existent channel is queried for torrents - """ - self.should_check_equality = False - return self.do_request('channels/discovered/abcd/torrents', expected_code=404) - - @trial_timeout(15) - @inlineCallbacks - def test_get_torrents_in_channel(self): - """ - Testing whether the API returns inserted torrents when fetching discovered channels, with and without filter - """ - def verify_torrents_filter(torrents): - torrents_json = json.loads(torrents) - self.assertEqual(len(torrents_json['torrents']), 1) - self.assertEqual(torrents_json['torrents'][0]['infohash'], 'a' * 40) - - def verify_torrents_no_filter(torrents): - torrents_json = json.loads(torrents) - self.assertEqual(len(torrents_json['torrents']), 2) - - self.should_check_equality = False - channel_id = self.insert_channel_in_db('rand', 42, 'Test channel', 'Test description') - - torrent_list = [ - [channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []], - [channel_id, 1, 1, ('b' * 40).decode('hex'), 1460000000, "badterm", [['file1.txt', 42]], []] - ] - self.insert_torrents_into_channel(torrent_list) - - yield self.do_request('channels/discovered/%s/torrents' % 'rand'.encode('hex'), expected_code=200)\ - .addCallback(verify_torrents_filter) - yield self.do_request('channels/discovered/%s/torrents?disable_filter=1' % 'rand'.encode('hex'), - expected_code=200).addCallback(verify_torrents_no_filter) - - @trial_timeout(10) - def test_add_torrent_to_channel(self): - """ - Testing whether adding a torrent to your channels works - """ - my_channel_id = self.create_fake_channel("channel", "") - torrent_path = TORRENT_UBUNTU_FILE - - def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): - self.assertEqual(my_channel_id, channel_id) - self.assertEqual(TorrentDef.load(torrent_path), torrent_def) - self.assertEqual({}, extra_info or {}) - self.assertEqual(True, forward) - - self.session.add_torrent_def_to_channel = verify_method_invocation - - with open(torrent_path, mode='rb') as torrent_file: - torrent_64 = base64.b64encode(torrent_file.read()) - - post_data = { - "torrent": torrent_64 - } - expected_json = {"added": True} - return self.do_request('channels/discovered/%s/torrents' % 'fakedispersyid'.encode('hex'), 200, - expected_json, 'PUT', post_data) - - @trial_timeout(10) - def test_add_torrent_to_channel_with_description(self): - """ - Testing whether adding a torrent with a description to a channel works - """ - my_channel_id = self.create_fake_channel("channel", "") - torrent_path = TORRENT_UBUNTU_FILE - - def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): - self.assertEqual(my_channel_id, channel_id) - self.assertEqual(TorrentDef.load(torrent_path), torrent_def) - self.assertEqual({"description": "video of my cat"}, extra_info or {}) - self.assertEqual(True, forward) - - self.session.add_torrent_def_to_channel = verify_method_invocation - - with open(torrent_path, mode='rb') as torrent_file: - torrent_64 = base64.b64encode(torrent_file.read()) - - post_data = { - "torrent": torrent_64, - "description": "video of my cat" - } - expected_json = {"added": True} - return self.do_request('channels/discovered/%s/torrents' % 'fakedispersyid'.encode('hex'), - 200, expected_json, 'PUT', post_data) - - @trial_timeout(10) - def test_add_torrent_to_channel_404(self): - """ - Testing whether adding a torrent to a non-existing channel returns error 404 - """ - return self.do_request('channels/discovered/%s/torrents' % 'fakedispersyid'.encode('hex'), - expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_to_channel_missing_parameter(self): - """ - Testing whether error 400 is returned when the torrent parameter is missing when adding a torrent to a channel - """ - self.create_fake_channel("channel", "") - expected_json = {"error": "torrent parameter missing"} - return self.do_request('channels/discovered/%s/torrents' % 'fakedispersyid'.encode('hex'), 400, - expected_json, 'PUT') - - @trial_timeout(10) - def test_add_torrent_to_channel_500(self): - """ - Testing whether the API returns a formatted 500 error if ValueError is raised - """ - self.create_fake_channel("channel", "") - torrent_path = TORRENT_UBUNTU_FILE - - def fake_error(channel_id, torrent_def, extra_info=None, forward=True): - raise HttpError(msg="Test error") - - self.session.add_torrent_def_to_channel = fake_error - - def verify_error_message(body): - error_response = json.loads(body) - expected_response = { - u"error": { - u"handled": True, - u"code": u"HttpError", - u"message": u"Test error" - } - } - self.assertDictContainsSubset(expected_response[u"error"], error_response[u"error"]) - - with open(torrent_path, mode='rb') as torrent_file: - torrent_64 = base64.b64encode(torrent_file.read()) - - post_data = { - "torrent": torrent_64 - } - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents' % 'fakedispersyid'.encode('hex'), - expected_code=500, expected_json=None, request_type='PUT', - post_data=post_data).addCallback(verify_error_message) - - -class TestModifyChannelTorrentEndpoint(AbstractTestChannelsEndpoint): - - @inlineCallbacks - def setUp(self): - yield super(TestModifyChannelTorrentEndpoint, self).setUp() - self.session.lm.ltmgr = MockObject() - self.session.lm.ltmgr.shutdown = lambda: True - - @trial_timeout(10) - def test_add_torrent_from_url_to_channel_with_description(self): - """ - Testing whether a torrent can be added to a channel using the API - """ - my_channel_id = self.create_fake_channel("channel", "") - - # Setup file server to serve torrent file - files_path = os.path.join(self.session_base_dir, 'http_torrent_files') - os.mkdir(files_path) - shutil.copyfile(TORRENT_UBUNTU_FILE, os.path.join(files_path, 'ubuntu.torrent')) - file_server_port = get_random_port() - self.setUpFileServer(file_server_port, files_path) - - def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): - self.assertEqual(my_channel_id, channel_id) - self.assertEqual(TorrentDef.load(TORRENT_UBUNTU_FILE), torrent_def) - self.assertEqual({"description": "test"}, extra_info or {}) - self.assertEqual(True, forward) - - self.session.add_torrent_def_to_channel = verify_method_invocation - - torrent_url = 'http://localhost:%d/ubuntu.torrent' % file_server_port - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), urllib.quote_plus(torrent_url)) - return self.do_request(url, expected_code=200, expected_json={"added": torrent_url}, request_type='PUT', - post_data={"description": "test"}) - - @trial_timeout(10) - def test_add_torrent_from_magnet_to_channel(self): - """ - Testing whether adding a torrent with a magnet link to a channel without description works - """ - my_channel_id = self.create_fake_channel("channel", "") - - def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): - meta_info = TorrentDef.load(TORRENT_UBUNTU_FILE).get_metainfo() - callback(meta_info) - - self.session.lm.ltmgr.get_metainfo = fake_get_metainfo - - def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): - self.assertEqual(my_channel_id, channel_id) - self.assertEqual(TorrentDef.load(TORRENT_UBUNTU_FILE), torrent_def) - self.assertEqual({}, extra_info or {}) - self.assertEqual(True, forward) - - self.session.add_torrent_def_to_channel = verify_method_invocation - - magnet_url = 'magnet:?fake' - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), urllib.quote_plus(magnet_url)) - return self.do_request(url, expected_code=200, expected_json={"added": magnet_url}, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_to_channel_404(self): - """ - Testing whether adding a torrent to a non-existing channel returns error code 404 - """ - self.should_check_equality = False - return self.do_request('channels/discovered/abcd/torrents/fake_url', - expected_code=404, expected_json=None, request_type='PUT') - - @trial_timeout(10) - def test_add_magnet_to_channel_500(self): - """ - Testing whether the API returns a formatted 500 error if ValueError is raised - """ - self.create_fake_channel("channel", "") - - def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): - raise ValueError(u"Test error") - - self.session.lm.ltmgr.get_metainfo = fake_get_metainfo - - def verify_error_message(body): - error_response = json.loads(body) - expected_response = { - u"error": { - u"handled": True, - u"code": u"ValueError", - u"message": u"Test error" - } - } - self.assertDictContainsSubset(expected_response[u"error"], error_response[u"error"]) - - torrent_url = 'magnet:fake' - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), urllib.quote_plus(torrent_url)) - self.should_check_equality = False - return self.do_request(url, expected_code=500, expected_json=None, request_type='PUT')\ - .addCallback(verify_error_message) - - @trial_timeout(10) - def test_timeout_on_add_torrent(self): - """ - Testing timeout in adding torrent - """ - self.create_fake_channel("channel", "") - - def on_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): - timeout_callback("infohash_whatever") - - self.session.lm.ltmgr.get_metainfo = on_get_metainfo - - def verify_error_message(body): - error_response = json.loads(body) - expected_response = { - u"error": { - u"handled": True, - u"code": u"RuntimeError", - u"message": u"Metainfo timeout" - } - } - self.assertDictContainsSubset(expected_response[u"error"], error_response[u"error"]) - - torrent_url = 'magnet:fake' - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), urllib.quote_plus(torrent_url)) - self.should_check_equality = False - return self.do_request(url, expected_code=500, expected_json=None, request_type='PUT')\ - .addCallback(verify_error_message) - - @trial_timeout(10) - def test_remove_tor_unknown_channel(self): - """ - Testing whether the API returns an error 500 if a torrent is removed from an unknown channel - """ - return self.do_request('channels/discovered/abcd/torrents/abcd', expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_remove_tor_unknown_infohash(self): - """ - Testing whether the API returns {"removed": False, "failed_torrents":[ infohash ]} if an unknown torrent is - removed from a channel - """ - unknown_torrent_infohash = 'a' * 40 - - mock_channel_community = MockObject() - mock_channel_community.called_remove = False - - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - - self.create_fake_channel("channel", "") - self.session.get_dispersy_instance = lambda: mock_dispersy - - def verify_delete_response(response): - json_response = json.loads(response) - self.assertFalse(json_response["removed"], "Tribler removed an unknown torrent") - self.assertTrue(unknown_torrent_infohash in json_response["failed_torrents"]) - self.assertFalse(mock_channel_community.called_remove) - - self.should_check_equality = False - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), unknown_torrent_infohash) - return self.do_request(url, expected_code=200, request_type='DELETE').addCallback(verify_delete_response) - - @trial_timeout(10) - def test_remove_tor_unknown_cmty(self): - """ - Testing whether the API returns an error 500 if torrent is removed from a channel without community - """ - channel_id = self.create_fake_channel("channel", "") - torrent_list = [[channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - - def mocked_get_community(_): - raise CommunityNotFoundException("abcd") - - mock_dispersy = MockObject() - mock_dispersy.get_community = mocked_get_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40) - return self.do_request(url, expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_remove_torrent(self): - """ - Testing whether the API can remove a torrent from a channel - """ - mock_channel_community = MockObject() - mock_channel_community.called_remove = False - - def verify_torrent_removed(_): - self.assertTrue(mock_channel_community.called_remove) - - channel_id = self.create_fake_channel("channel", "") - torrent_list = [[channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - - def remove_torrents_called(_): - mock_channel_community.called_remove = True - - mock_channel_community.remove_torrents = remove_torrents_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.should_check_equality = False - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40) - return self.do_request(url, expected_code=200, request_type='DELETE').addCallback(verify_torrent_removed) - - @trial_timeout(10) - def test_remove_selected_torrents(self): - """ - Testing whether the API can remove selected torrents from a channel - """ - mock_channel_community = MockObject() - mock_channel_community.called_remove = False - - def remove_torrents_called(_): - mock_channel_community.called_remove = True - - mock_channel_community.remove_torrents = remove_torrents_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - - channel_id = self.create_fake_channel("channel", "") - self.session.get_dispersy_instance = lambda: mock_dispersy - - torrent_list = [[channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", - [['file1.txt', 42]], []], - [channel_id, 1, 1, ('b' * 40).decode('hex'), 1460002000, "ubuntu-torrent2.iso", - [['file2.txt', 42]], []]] - self.insert_torrents_into_channel(torrent_list) - - def verify_torrent_removed(response): - json_response = json.loads(response) - self.assertTrue(json_response["removed"], "Removing selected torrents failed") - self.assertTrue(mock_channel_community.called_remove) - - self.should_check_equality = False - url = 'channels/discovered/%s/torrents/%s' % ('fakedispersyid'.encode('hex'), 'a' * 40 + "," + 'b' * 40) - return self.do_request(url, expected_code=200, request_type='DELETE').addCallback(verify_torrent_removed) - - -class TestChannelTorrentsChantEndpoint(AbstractTestChantEndpoint): - - @trial_timeout(10) - def test_get_torrents_unknown_channel(self): - """ - Test whether querying torrents in an unknown chant channel with the API results in an error - """ - return self.do_request('channels/discovered/%s/torrents' % ('a' * (74 * 2)), expected_code=404) - - @trial_timeout(10) - def test_get_torrents_from_my_channel(self): - """ - Test whether the API returns the correct torrents from our chant channel - """ - def verify_response(response): - json_response = json.loads(response) - self.assertEqual(len(json_response['torrents']), 1) - self.assertEqual(json_response['torrents'][0]['name'], 'forthetest') - - my_channel = self.create_my_channel('test', 'test') - self.add_random_torrent_to_my_channel(name='forthetest') - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents' % str(my_channel.public_key).encode('hex'), - expected_code=200).addCallback(verify_response) - - @trial_timeout(10) - def test_get_torrents_from_channel(self): - """ - Test whether the API returns the correct torrents from other's chant channel - """ - - with db_session: - channel = self.session.lm.mds.process_mdblob_file(CHANNEL_METADATA)[0] - public_key = channel.public_key - channel_dir = os.path.join(CHANNEL_DIR, channel.dir_name) - self.session.lm.mds.process_channel_dir(channel_dir, public_key) - channel_size = len(channel.contents_list) - - def verify_response(response): - json_response = json.loads(response) - self.assertEqual(len(json_response['torrents']), channel_size) - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents' % str(public_key).encode('hex'), - expected_code=200).addCallback(verify_response) - - @trial_timeout(10) - def test_add_torrent_to_external_channel(self): - """ - Test whether adding a torrent to a channel that you do not own, results in an error - """ - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents' % ('a' * (74 * 2)), - expected_code=405, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_to_non_existing_channel(self): - """ - Test whether adding a torrent to your non-existent channel results in an error - """ - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents' % my_channel_id.encode('hex'), - expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_add_torrent_to_channel(self): - """ - Test adding a torrent to a chant channel using the API - """ - my_channel = self.create_my_channel('test', 'test') - with open(TORRENT_UBUNTU_FILE, mode='rb') as torrent_file: - torrent_64 = base64.b64encode(torrent_file.read()) - - def verify_added(_): - updated_my_channel = self.get_my_channel() - with db_session: - self.assertEqual(len(updated_my_channel.contents_list), 1) - - self.should_check_equality = False - post_data = {'torrent': torrent_64, 'description': 'description'} - return self.do_request('channels/discovered/%s/torrents' % str(my_channel.public_key).encode('hex'), - expected_code=200, request_type='PUT', post_data=post_data).addCallback(verify_added) - - @trial_timeout(10) - @db_session - def test_add_torrent_to_channel_twice(self): - """ - Test whether adding a torrent to a chant channel twice results in an error - """ - my_channel = self.create_my_channel('test', 'test') - tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) - my_channel.add_torrent_to_channel(tdef, None) - - with open(TORRENT_UBUNTU_FILE, mode='rb') as torrent_file: - torrent_64 = base64.b64encode(torrent_file.read()) - - self.should_check_equality = False - post_data = {'torrent': torrent_64, 'description': 'description'} - return self.do_request('channels/discovered/%s/torrents' % str(my_channel.public_key).encode('hex'), - expected_code=500, request_type='PUT', post_data=post_data) - - @trial_timeout(10) - def test_add_invalid_torrent_to_channel(self): - my_channel = self.create_my_channel('test', 'test') - self.should_check_equality = False - post_data = {'torrent': base64.b64encode('test'), 'description': 'description'} - return self.do_request('channels/discovered/%s/torrents' % str(my_channel.public_key).encode('hex'), - expected_code=500, request_type='PUT', post_data=post_data) - - -class TestModifyChantChannelTorrentEndpoint(AbstractTestChantEndpoint): - - @trial_timeout(10) - def test_add_magnet_to_external_channel(self): - """ - Test whether adding a magnet URL to a channel that you do not own, results in an error - """ - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/fake_url' % ('a' * (74 * 2)), - expected_code=405, request_type='PUT') - - @trial_timeout(10) - def test_add_magnet_to_non_existing_channel(self): - """ - Test whether adding a magnet URL to your non-existent channel results in an error - """ - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/fake_url' % my_channel_id.encode('hex'), - expected_code=404, request_type='PUT') - - @trial_timeout(10) - def test_add_magnet_to_channel(self): - """ - Test adding a magnet to a chant channel using the API - """ - def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): - meta_info = TorrentDef.load(TORRENT_UBUNTU_FILE).get_metainfo() - callback(meta_info) - - self.session.lm.ltmgr.get_metainfo = fake_get_metainfo - my_channel = self.create_my_channel('test', 'test') - - def verify_added(_): - updated_my_channel = self.get_my_channel() - with db_session: - self.assertEqual(len(updated_my_channel.contents_list), 1) - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/magnet:?fake' % - str(my_channel.public_key).encode('hex'), - expected_code=200, request_type='PUT').addCallback(verify_added) - - @trial_timeout(10) - def test_remove_torrent_from_external_channel(self): - """ - Test whether removing a torrent from a channel that you do not own, results in an error - """ - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/%s' % ('a' * (74 * 2), 'a' * 40), - expected_code=405, request_type='DELETE') - - @trial_timeout(10) - def test_remove_torrent_from_unknown_channel(self): - """ - Test whether removing a torrent from your (non-existent) channel results in an error - """ - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/%s' % (my_channel_id.encode('hex'), 'a' * 40), - expected_code=404, request_type='DELETE') - - @trial_timeout(10) - def test_remove_single_torrent_from_my_channel(self): - """ - Test whether we can remove a torrent from your channel using the API - """ - with db_session: - my_channel = self.create_my_channel('test', 'test123') - random_torrent = self.add_random_torrent_to_my_channel(name='bla') - my_channel.commit_channel_torrent() - - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/%s' % - (str(my_channel.public_key).encode('hex'), str(random_torrent.infohash).encode('hex')), - expected_code=200, request_type='DELETE') - - @trial_timeout(10) - def test_remove_multiple_torrents_from_my_channel_fail(self): - """ - Test removing some torrents from your channel with the API, while that fails - """ - def verify_response(response): - json_response = json.loads(response) - self.assertIn('failed_torrents', json_response) - - my_channel = self.create_my_channel('test', 'test123') - self.should_check_equality = False - return self.do_request('channels/discovered/%s/torrents/%s' % - (str(my_channel.public_key).encode('hex'), 'aa'), - expected_code=200, request_type='DELETE').addCallback(verify_response) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_create_channel_endpoint.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_create_channel_endpoint.py deleted file mode 100644 index beedf2bf50b..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_create_channel_endpoint.py +++ /dev/null @@ -1,112 +0,0 @@ -import Tribler.Core.Utilities.json_util as json -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint -from Tribler.Test.tools import trial_timeout - - -class TestCreateChannelEndpoint(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_my_channel_endpoint_create(self): - """ - Testing whether the API returns the right JSON data if a channel is created - """ - - def verify_channel_created(body): - channel_obj = self.session.lm.channel_manager._channel_list[0] - self.assertEqual(channel_obj.name, post_data["name"]) - self.assertEqual(channel_obj.description, post_data["description"]) - self.assertEqual(channel_obj.mode, post_data["mode"]) - self.assertDictEqual(json.loads(body), {"added": channel_obj.channel_id}) - - post_data = { - "name": "John Smit's channel", - "description": "Video's of my cat", - "mode": "semi-open" - } - self.session.create_channel = self.create_fake_channel - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=200, expected_json=None, request_type='PUT', - post_data=post_data).addCallback(verify_channel_created) - - @trial_timeout(10) - def test_my_channel_endpoint_create_default_mode(self): - """ - Testing whether the API returns the right JSON data if a channel is created - """ - - def verify_channel_created(body): - channel_obj = self.session.lm.channel_manager._channel_list[0] - self.assertEqual(channel_obj.name, post_data["name"]) - self.assertEqual(channel_obj.description, post_data["description"]) - self.assertEqual(channel_obj.mode, u'closed') - self.assertDictEqual(json.loads(body), {"added": channel_obj.channel_id}) - - post_data = { - "name": "John Smit's channel", - "description": "Video's of my cat" - } - self.session.create_channel = self.create_fake_channel - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=200, expected_json=None, - request_type='PUT', post_data=post_data).addCallback(verify_channel_created) - - @trial_timeout(10) - def test_my_channel_endpoint_create_duplicate_name_error(self): - """ - Testing whether the API returns a formatted 500 error if DuplicateChannelNameError is raised - """ - - def verify_error_message(body): - error_response = json.loads(body) - expected_response = { - u"error": { - u"handled": True, - u"code": u"DuplicateChannelNameError", - u"message": u"Channel name already exists: %s" % post_data["name"] - } - } - self.assertDictContainsSubset(expected_response[u"error"], error_response[u"error"]) - - post_data = { - "name": "John Smit's channel", - "description": "Video's of my cat", - "mode": "semi-open" - } - self.session.create_channel = self.create_fake_channel_with_existing_name - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=500, expected_json=None, request_type='PUT', - post_data=post_data).addCallback(verify_error_message) - - @trial_timeout(10) - def test_my_channel_endpoint_create_no_name_param(self): - """ - Testing whether the API returns a 400 and error if the name parameter is not passed - """ - post_data = { - "description": "Video's of my cat", - "mode": "semi-open" - } - expected_json = {"error": "channel name cannot be empty"} - return self.do_request('channels/discovered', expected_code=400, expected_json=expected_json, - request_type='PUT', post_data=post_data) - - @trial_timeout(10) - def test_my_channel_endpoint_create_no_description_param(self): - """ - Testing whether the API returns the right JSON data if description parameter is not passed - """ - def verify_channel_created(body): - channel_obj = self.session.lm.channel_manager._channel_list[0] - self.assertEqual(channel_obj.name, post_data["name"]) - self.assertEqual(channel_obj.description, u'') - self.assertEqual(channel_obj.mode, post_data["mode"]) - self.assertDictEqual(json.loads(body), {"added": channel_obj.channel_id}) - - post_data = { - "name": "John Smit's channel", - "mode": "semi-open" - } - self.session.create_channel = self.create_fake_channel - self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=200, expected_json=None, request_type='PUT', - post_data=post_data).addCallback(verify_channel_created) diff --git a/Tribler/Test/Core/Modules/RestApi/Channels/test_my_channel_endpoints.py b/Tribler/Test/Core/Modules/RestApi/Channels/test_my_channel_endpoints.py deleted file mode 100644 index 68a59d72156..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/Channels/test_my_channel_endpoints.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import absolute_import - -from binascii import hexlify -import six -from pony.orm import db_session -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Modules.restapi.channels.my_channel_endpoint import NO_CHANNEL_CREATED_RESPONSE_MSG -from Tribler.Test.Core.Modules.RestApi.Channels.test_channels_endpoint import AbstractTestChannelsEndpoint, \ - AbstractTestChantEndpoint -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.tools import trial_timeout - - -class TestMyChannelEndpoints(AbstractTestChannelsEndpoint): - - @trial_timeout(10) - def test_my_channel_overview_endpoint_no_my_channel(self): - """ - Testing whether the API returns response code 404 if no channel has been created - """ - expected_json = {"error": NO_CHANNEL_CREATED_RESPONSE_MSG} - return self.do_request('mychannel', expected_json=expected_json, expected_code=404) - - @trial_timeout(10) - def test_my_channel_overview_endpoint_with_channel(self): - """ - Testing whether the API returns the right JSON data if a channel overview is requested - """ - channel_json = {u'mychannel': {u'name': u'testname', u'description': u'testdescription', - u'identifier': six.text_type(hexlify(b'fakedispersyid'))}} - self.create_my_channel(channel_json[u'mychannel'][u'name'], channel_json[u'mychannel'][u'description']) - - return self.do_request('mychannel', expected_code=200, expected_json=channel_json) - - @trial_timeout(10) - def test_edit_my_channel_no_channel(self): - """ - Testing whether an error 404 is returned when trying to edit your non-existing channel - """ - post_params = {'name': 'test'} - return self.do_request('mychannel', expected_code=404, request_type='POST', post_data=post_params) - - @trial_timeout(10) - def test_edit_my_channel_no_cmty(self): - """ - Testing whether an error 404 is returned when trying to edit your channel without community - """ - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: None - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.create_my_channel('test', 'test') - - post_params = {'name': 'test'} - return self.do_request('mychannel', expected_code=404, request_type='POST', post_data=post_params) - - @inlineCallbacks - def test_edit_channel(self): - """ - Testing whether a channel is correctly modified - """ - self.create_my_channel('test', 'test') - mock_channel_community = MockObject() - mock_channel_community.called_modify = False - - def verify_channel_modified(_): - self.assertTrue(mock_channel_community.called_modify) - - def modify_channel_called(modifications): - self.assertEqual(modifications['name'], 'test1') - self.assertEqual(modifications['description'], 'test2') - mock_channel_community.called_modify = True - - mock_channel_community.modifyChannel = modify_channel_called - mock_dispersy = MockObject() - mock_dispersy.get_community = lambda _: mock_channel_community - self.session.get_dispersy_instance = lambda: mock_dispersy - - self.should_check_equality = False - post_params = {'name': '', 'description': 'test2'} - yield self.do_request('mychannel', expected_code=400, post_data=post_params, request_type='POST') - - self.should_check_equality = True - post_params = {'name': 'test1', 'description': 'test2'} - yield self.do_request('mychannel', expected_code=200, expected_json={"modified": True}, post_data=post_params, - request_type='POST').addCallback(verify_channel_modified) - - -class TestMyChannelChantEndpoints(AbstractTestChantEndpoint): - - @trial_timeout(10) - def test_my_channel_overview_endpoint_no_my_channel(self): - """ - Testing whether the API returns response code 404 if no chant channel has been created - """ - expected_json = {"error": NO_CHANNEL_CREATED_RESPONSE_MSG} - return self.do_request('mychannel', expected_json=expected_json, expected_code=404) - - @trial_timeout(10) - def test_my_channel_overview_endpoint_with_channel(self): - """ - Testing whether the API returns the right JSON data if an existing chant channel overview is requested - """ - channel_json = {u'mychannel': {u'chant': True, u'name': u'testname', u'description': u'testdescription', - u'identifier': hexlify(self.session.trustchain_keypair.pub().key_to_bin())}} - self.create_my_channel(channel_json[u'mychannel'][u'name'], channel_json[u'mychannel'][u'description']) - - return self.do_request('mychannel', expected_code=200, expected_json=channel_json) - - @trial_timeout(10) - def test_edit_channel_not_exist(self): - """ - Test whether the API returns error 404 when trying to edit a non-existing channel - """ - post_params = {'name': 'new channel', 'description': 'new description'} - self.should_check_equality = False - return self.do_request('mychannel', request_type='POST', post_data=post_params, expected_code=404) - - @trial_timeout(10) - def test_edit_channel(self): - """ - Test editing your chant channel - """ - self.create_my_channel('my channel', 'fancy description') - self.add_random_torrent_to_my_channel() - post_params = {'name': 'new channel', 'description': 'new description', 'commit_changes': 1} - - @db_session - def verify_response(_): - my_channel = self.get_my_channel() - self.assertEqual(my_channel.title, 'new channel') - self.assertEqual(my_channel.tags, 'new description') - self.assertEqual(len(my_channel.contents_list), 1) - self.assertEqual(len(my_channel.staged_entries_list), 0) - - channel_json = {'modified': 1} - self.should_check_equality = False - return self.do_request('mychannel', request_type='POST', post_data=post_params, expected_json=channel_json, - expected_code=200).addCallback(verify_response) diff --git a/Tribler/Test/Core/Modules/RestApi/base_api_test.py b/Tribler/Test/Core/Modules/RestApi/base_api_test.py index c2ba56833ce..c4c0fe84ea2 100644 --- a/Tribler/Test/Core/Modules/RestApi/base_api_test.py +++ b/Tribler/Test/Core/Modules/RestApi/base_api_test.py @@ -1,16 +1,18 @@ +from __future__ import absolute_import + import os import urllib from twisted.internet import reactor -from twisted.internet.defer import succeed, inlineCallbacks -from twisted.python.threadable import isInIOThread -from twisted.web.client import Agent, readBody, HTTPConnectionPool +from twisted.internet.defer import inlineCallbacks, succeed +from twisted.web.client import Agent, HTTPConnectionPool, readBody from twisted.web.http_headers import Headers from twisted.web.iweb import IBodyProducer + from zope.interface import implements -from Tribler.Core.Modules.restapi import get_param import Tribler.Core.Utilities.json_util as json +from Tribler.Core.Modules.restapi import get_param from Tribler.Core.Utilities.network_utils import get_random_port from Tribler.Core.version import version_id from Tribler.Test.test_as_server import TestAsServer @@ -44,9 +46,6 @@ class AbstractBaseApiTest(TestAsServer): def setUp(self): yield super(AbstractBaseApiTest, self).setUp() self.connection_pool = HTTPConnectionPool(reactor, False) - terms = self.session.lm.category.xxx_filter.xxx_terms - terms.add("badterm") - self.session.lm.category.xxx_filter.xxx_terms = terms @inlineCallbacks def tearDown(self): @@ -60,7 +59,6 @@ def setUpPreSession(self): super(AbstractBaseApiTest, self).setUpPreSession() self.config.set_http_api_enabled(True) self.config.set_http_api_retry_port(True) - self.config.set_megacache_enabled(True) self.config.set_tunnel_community_enabled(False) # Make sure we select a random port for the HTTP API diff --git a/Tribler/Test/Core/Modules/RestApi/test_downloads_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_downloads_endpoint.py index 4d074682282..5c88669a58b 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_downloads_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_downloads_endpoint.py @@ -1,17 +1,21 @@ +from __future__ import absolute_import + import os -from binascii import hexlify +from binascii import hexlify, unhexlify from urllib import pathname2url from pony.orm import db_session + from twisted.internet.defer import fail import Tribler.Core.Utilities.json_util as json +from Tribler.Core import TorrentDef from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.DownloadState import DownloadState from Tribler.Core.Utilities.network_utils import get_random_port from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.common import UBUNTU_1504_INFOHASH, TESTS_DATA_DIR, TESTS_DIR +from Tribler.Test.common import TESTS_DATA_DIR, TESTS_DIR, UBUNTU_1504_INFOHASH from Tribler.Test.tools import trial_timeout @@ -20,8 +24,6 @@ class TestDownloadsEndpoint(AbstractApiTest): def setUpPreSession(self): super(TestDownloadsEndpoint, self).setUpPreSession() self.config.set_libtorrent_enabled(True) - self.config.set_megacache_enabled(True) - self.config.set_torrent_store_enabled(True) @trial_timeout(10) def test_get_downloads_no_downloads(self): @@ -483,18 +485,17 @@ def test_export_download(self): Testing whether the API returns the contents of the torrent file if a download is exported """ video_tdef, _ = self.create_local_torrent(os.path.join(TESTS_DATA_DIR, 'video.avi')) - self.session.start_download_from_tdef(video_tdef, DownloadStartupConfig()) - - with open(os.path.join(TESTS_DATA_DIR, 'bak_single.torrent')) as torrent_file: - raw_data = torrent_file.read() - self.session.get_collected_torrent = lambda _: raw_data + download = self.session.start_download_from_tdef(video_tdef, DownloadStartupConfig()) def verify_exported_data(result): - self.assertEqual(raw_data, result) + self.assertTrue(result) - self.should_check_equality = False - return self.do_request('downloads/%s/torrent' % video_tdef.get_infohash().encode('hex'), - expected_code=200, request_type='GET').addCallback(verify_exported_data) + def on_handle_available(_): + self.should_check_equality = False + return self.do_request('downloads/%s/torrent' % video_tdef.get_infohash().encode('hex'), + expected_code=200, request_type='GET').addCallback(verify_exported_data) + + return download.get_handle().addCallback(on_handle_available) @trial_timeout(10) def test_get_files_unknown_download(self): @@ -578,10 +579,22 @@ def verify_download(_): self.assertGreaterEqual(len(self.session.get_downloads()), 1) post_data = {'uri': 'file:%s' % os.path.join(TESTS_DIR, 'Core/data/sample_channel/channel.mdblob')} - expected_json = {'started': True, 'infohash': '24eb2ff24c3a738eb1257a2fb4575db064848e25'} + expected_json = {'started': True, 'infohash': '6853d25535a1c7593e716dd6a69fc3dd7a7bfcc8'} return self.do_request('downloads', expected_code=200, request_type='PUT', post_data=post_data, expected_json=expected_json).addCallback(verify_download) + @trial_timeout(10) + def test_add_metadata_download_already_added(self): + """ + Test adding a channel metadata download to the Tribler core + """ + with db_session: + self.session.lm.mds.process_mdblob_file(os.path.join(TESTS_DIR, 'Core/data/sample_channel/channel.mdblob')) + post_data = {'uri': 'file:%s' % os.path.join(TESTS_DIR, 'Core/data/sample_channel/channel.mdblob')} + expected_json = {u'error': u'Already subscribed'} + return self.do_request('downloads', expected_code=200, request_type='PUT', post_data=post_data, + expected_json=expected_json) + @trial_timeout(10) def test_add_metadata_download_invalid_sig(self): """ @@ -591,8 +604,9 @@ def test_add_metadata_download_invalid_sig(self): with open(file_path, "wb") as out_file: with db_session: my_channel = self.session.lm.mds.ChannelMetadata.create_channel('test', 'test') - my_channel.signature = "lalala" - out_file.write(my_channel.serialized()) + + hexed = hexlify(my_channel.serialized())[:-5] + "aaaaa" + out_file.write(unhexlify(hexed)) post_data = {'uri': 'file:%s' % file_path, 'metadata_download': '1'} expected_json = {'error': "Metadata has invalid signature"} @@ -608,3 +622,32 @@ def test_add_invalid_metadata_download(self): post_data = {'uri': 'file:%s' % os.path.join(TESTS_DATA_DIR, 'notexisting.mdblob'), 'metadata_download': '1'} self.should_check_equality = False return self.do_request('downloads', expected_code=400, request_type='PUT', post_data=post_data) + + @trial_timeout(20) + def test_get_downloads_with_channels(self): + """ + Testing whether the API returns the right download when a download is added + """ + + test_channel_name = 'testchan' + + def verify_download(downloads): + downloads_json = json.loads(downloads) + self.assertEqual(len(downloads_json['downloads']), 3) + self.assertEqual(test_channel_name, + [d for d in downloads_json["downloads"] if d["channel_download"]][0]["name"]) + + video_tdef, _ = self.create_local_torrent(os.path.join(TESTS_DATA_DIR, 'video.avi')) + self.session.start_download_from_tdef(video_tdef, DownloadStartupConfig()) + self.session.start_download_from_uri("file:" + pathname2url( + os.path.join(TESTS_DATA_DIR, "bak_single.torrent"))) + + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.create_channel(test_channel_name, 'test') + my_channel.add_torrent_to_channel(video_tdef) + torrent_dict = my_channel.commit_channel_torrent() + self.session.lm.gigachannel_manager.updated_my_channel(TorrentDef.TorrentDef.load_from_dict(torrent_dict)) + + self.should_check_equality = False + return self.do_request('downloads?get_peers=1&get_pieces=1', + expected_code=200).addCallback(verify_download) diff --git a/Tribler/Test/Core/Modules/RestApi/test_events_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_events_endpoint.py index 4c4ab1d2334..c75338222da 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_events_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_events_endpoint.py @@ -10,12 +10,11 @@ from twisted.web.http_headers import Headers import Tribler.Core.Utilities.json_util as json -from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DELETE, NTFY_DISCOVERED, NTFY_ERROR,\ - NTFY_FINISHED, NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID,\ - NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT,\ - NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TUNNEL,\ - NTFY_UPDATE, NTFY_UPGRADER, NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_CHANNEL,\ - SIGNAL_LOW_SPACE, SIGNAL_ON_SEARCH_RESULTS, SIGNAL_RESOURCE_CHECK, SIGNAL_TORRENT +from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DISCOVERED, NTFY_ERROR, NTFY_FINISHED,\ + NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_BID_TIMEOUT,\ + NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, NTFY_MARKET_ON_TRANSACTION_COMPLETE,\ + NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TUNNEL, NTFY_UPDATE, NTFY_UPGRADER,\ + NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_LOW_SPACE, SIGNAL_RESOURCE_CHECK from Tribler.Core.version import version_id from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.tools import trial_timeout @@ -26,6 +25,7 @@ class EventDataProtocol(Protocol): """ This class is responsible for reading the data received over the event socket. """ + def __init__(self, messages_to_wait_for, finished, response): self.json_buffer = [] self._logger = logging.getLogger(self.__class__.__name__) @@ -69,45 +69,20 @@ def on_event_socket_opened(self, response): def open_events_socket(self, _): agent = Agent(reactor, pool=self.connection_pool) return agent.request('GET', 'http://localhost:%s/events' % self.session.config.get_http_api_port(), - Headers({'User-Agent': ['Tribler ' + version_id]}), None)\ + Headers({'User-Agent': ['Tribler ' + version_id]}), None) \ .addCallback(self.on_event_socket_opened) def close_connections(self): return self.connection_pool.closeCachedConnections() - @trial_timeout(20) - def test_search_results(self): - """ - Testing whether the event endpoint returns search results when we have search results available - """ - def verify_search_results(results): - self.assertEqual(len(results), 2) - - self.messages_to_wait_for = 2 - - def send_notifications(_): - self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query() - - results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]} - self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - - self.socket_open_deferred.addCallback(send_notifications) - - return self.events_deferred.addCallback(verify_search_results) - @trial_timeout(20) def test_events(self): """ Testing whether various events are coming through the events endpoints """ - self.messages_to_wait_for = 22 + self.messages_to_wait_for = 20 def send_notifications(_): - self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query() - results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]} - self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) self.session.notifier.notify(NTFY_UPGRADER, NTFY_STARTED, None, None) self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED, None, None) self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None, None) @@ -115,7 +90,6 @@ def send_notifications(_): self.session.notifier.notify(NTFY_NEW_VERSION, NTFY_INSERT, None, None) self.session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, None) self.session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None, {'a': 'Invalid character \xa1'}) - self.session.notifier.notify(NTFY_TORRENT, NTFY_DELETE, None, {'a': 'b'}) self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, 'a' * 10, None) self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, 'a' * 10, 'This is an error message') self.session.notifier.notify(NTFY_MARKET_ON_ASK, NTFY_UPDATE, None, {'a': 'b'}) @@ -133,29 +107,3 @@ def send_notifications(_): self.socket_open_deferred.addCallback(send_notifications) return self.events_deferred - - @trial_timeout(20) - def test_family_filter_search(self): - """ - Testing the family filter when searching for torrents and channels - """ - self.messages_to_wait_for = 2 - - def send_searches(_): - events_endpoint = self.session.lm.api_manager.root_endpoint.events_endpoint - - channels = [['a', ] * 10, ['a', ] * 10] - channels[0][2] = 'badterm' - events_endpoint.on_search_results_channels(None, None, None, {"keywords": ["test"], - "result_list": channels}) - self.assertEqual(len(events_endpoint.channel_cids_sent), 1) - - torrents = [['a', ] * 10, ['a', ] * 10] - torrents[0][4] = 'xxx' - events_endpoint.on_search_results_torrents(None, None, None, {"keywords": ["test"], - "result_list": torrents}) - self.assertEqual(len(events_endpoint.infohashes_sent), 1) - - self.socket_open_deferred.addCallback(send_searches) - - return self.events_deferred diff --git a/Tribler/Test/Core/Modules/RestApi/test_market_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_market_endpoint.py index ca55ba60fc4..3a370e2cdd4 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_market_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_market_endpoint.py @@ -1,5 +1,14 @@ +from __future__ import absolute_import + import json +from twisted.internet.defer import inlineCallbacks, succeed + +from Tribler.Core.Modules.restapi.market import BaseMarketEndpoint +from Tribler.Core.Modules.wallet.dummy_wallet import DummyWallet1, DummyWallet2 +from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest +from Tribler.Test.Core.base_test import MockObject +from Tribler.Test.tools import trial_timeout from Tribler.community.market.community import MarketCommunity from Tribler.community.market.core.assetamount import AssetAmount from Tribler.community.market.core.assetpair import AssetPair @@ -11,13 +20,7 @@ from Tribler.community.market.core.timestamp import Timestamp from Tribler.community.market.core.trade import Trade from Tribler.community.market.core.wallet_address import WalletAddress -from Tribler.Core.Modules.restapi.market import BaseMarketEndpoint -from Tribler.Core.Modules.wallet.dummy_wallet import DummyWallet1, DummyWallet2 -from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.tools import trial_timeout from Tribler.pyipv8.ipv8.test.mocking.ipv8 import MockIPv8 -from twisted.internet.defer import inlineCallbacks, succeed class TestMarketEndpoint(AbstractApiTest): diff --git a/Tribler/Test/Core/Modules/RestApi/test_metadata_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_metadata_endpoint.py new file mode 100644 index 00000000000..47b86056cd6 --- /dev/null +++ b/Tribler/Test/Core/Modules/RestApi/test_metadata_endpoint.py @@ -0,0 +1,291 @@ +from __future__ import absolute_import + +import json +import sys +from binascii import hexlify +from unittest import skipIf + +from pony.orm import db_session + +import six +from six.moves import xrange + +from twisted.internet.defer import inlineCallbacks + +from Tribler.Core.TorrentChecker.torrent_checker import TorrentChecker +from Tribler.Core.Utilities.network_utils import get_random_port +from Tribler.Core.Utilities.random_utils import random_infohash +from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest +from Tribler.Test.Core.base_test import MockObject +from Tribler.Test.tools import trial_timeout +from Tribler.Test.util.Tracker.HTTPTracker import HTTPTracker +from Tribler.Test.util.Tracker.UDPTracker import UDPTracker +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + + +class BaseTestMetadataEndpoint(AbstractApiTest): + + @inlineCallbacks + def setUp(self): + yield super(BaseTestMetadataEndpoint, self).setUp() + self.infohashes = [] + + torrents_per_channel = 5 + # Add a few channels + with db_session: + for ind in xrange(10): + self.session.lm.mds.ChannelNode._my_key = default_eccrypto.generate_key('curve25519') + _ = self.session.lm.mds.ChannelMetadata(title='channel%d' % ind, subscribed=(ind % 2 == 0), + num_entries=torrents_per_channel, infohash=random_infohash()) + for torrent_ind in xrange(torrents_per_channel): + rand_infohash = random_infohash() + self.infohashes.append(rand_infohash) + _ = self.session.lm.mds.TorrentMetadata(title='torrent%d' % torrent_ind, infohash=rand_infohash) + + def setUpPreSession(self): + super(BaseTestMetadataEndpoint, self).setUpPreSession() + self.config.set_chant_enabled(True) + + +class TestChannelsEndpoint(BaseTestMetadataEndpoint): + + def test_get_channels(self): + """ + Test whether we can query some channels in the database with the REST API + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['channels']), 10) + + self.should_check_equality = False + return self.do_request('metadata/channels?sort_by=title', expected_code=200).addCallback(on_response) + + @skipIf(sys.platform == "darwin", "Skipping this test on Mac due to Pony bug") + def test_get_channels_sort_by_health(self): + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['channels']), 10) + + self.should_check_equality = False + return self.do_request('metadata/channels?sort_by=health', expected_code=200).addCallback(on_response) + + def test_get_channels_invalid_sort(self): + """ + Test whether we can query some channels in the database with the REST API and an invalid sort parameter + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['channels']), 10) + + self.should_check_equality = False + return self.do_request('metadata/channels?sort_by=fdsafsdf', expected_code=200).addCallback(on_response) + + def test_get_subscribed_channels(self): + """ + Test whether we can successfully query channels we are subscribed to with the REST API + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['channels']), 5) + + self.should_check_equality = False + return self.do_request('metadata/channels?subscribed=1', expected_code=200).addCallback(on_response) + + +class TestSpecificChannelEndpoint(BaseTestMetadataEndpoint): + + def test_subscribe_missing_parameter(self): + """ + Test whether an error is returned if we try to subscribe to a channel with the REST API and missing parameters + """ + self.should_check_equality = False + channel_pk = hexlify(self.session.lm.mds.ChannelNode._my_key.pub().key_to_bin()[10:]) + return self.do_request('metadata/channels/%s' % channel_pk, expected_code=400, request_type='POST') + + def test_subscribe_no_channel(self): + """ + Test whether an error is returned if we try to subscribe to a channel with the REST API and a missing channel + """ + self.should_check_equality = False + post_params = {'subscribe': '1'} + return self.do_request('metadata/channels/aa', expected_code=404, request_type='POST', post_data=post_params) + + def test_subscribe(self): + """ + Test whether we can subscribe to a channel with the REST API + """ + self.should_check_equality = False + post_params = {'subscribe': '1'} + channel_pk = hexlify(self.session.lm.mds.ChannelNode._my_key.pub().key_to_bin()[10:]) + return self.do_request('metadata/channels/%s' % channel_pk, expected_code=200, + request_type='POST', post_data=post_params) + + +class TestSpecificChannelTorrentsEndpoint(BaseTestMetadataEndpoint): + + def test_get_torrents(self): + """ + Test whether we can query some torrents in the database with the REST API + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['torrents']), 5) + + self.should_check_equality = False + channel_pk = hexlify(self.session.lm.mds.ChannelNode._my_key.pub().key_to_bin()[10:]) + return self.do_request('metadata/channels/%s/torrents' % channel_pk, expected_code=200).addCallback(on_response) + + +class TestPopularChannelsEndpoint(BaseTestMetadataEndpoint): + + def test_get_popular_channels_neg_limit(self): + """ + Test whether an error is returned if we use a negative value for the limit parameter + """ + self.should_check_equality = False + return self.do_request('metadata/channels/popular?limit=-1', expected_code=400) + + def test_get_popular_channels(self): + """ + Test whether we can retrieve popular channels with the REST API + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['channels']), 5) + + self.should_check_equality = False + return self.do_request('metadata/channels/popular?limit=5', expected_code=200).addCallback(on_response) + + +class TestSpecificTorrentEndpoint(BaseTestMetadataEndpoint): + + def test_get_info_torrent_not_exist(self): + """ + Test if an error is returned when querying information of a torrent that does not exist + """ + self.should_check_equality = False + return self.do_request('metadata/torrents/aabbcc', expected_code=404) + + def test_get_info_torrent(self): + """ + Test whether we can successfully query information about a torrent with the REST API + """ + self.should_check_equality = False + return self.do_request('metadata/torrents/%s' % hexlify(self.infohashes[0]), expected_code=200) + + +class TestRandomTorrentsEndpoint(BaseTestMetadataEndpoint): + + def test_get_random_torrents_neg_limit(self): + """ + Test if an error is returned if we query some random torrents with the REST API and a negative limit + """ + self.should_check_equality = False + return self.do_request('metadata/torrents/random?limit=-5', expected_code=400) + + def test_get_random_torrents(self): + """ + Test whether we can retrieve some random torrents with the REST API + """ + + def on_response(response): + json_dict = json.loads(response) + self.assertEqual(len(json_dict['torrents']), 5) + + self.should_check_equality = False + return self.do_request('metadata/torrents/random?limit=5', expected_code=200).addCallback(on_response) + + +class TestTorrentHealthEndpoint(AbstractApiTest): + + def setUpPreSession(self): + super(TestTorrentHealthEndpoint, self).setUpPreSession() + self.config.set_chant_enabled(True) + + @inlineCallbacks + def setUp(self): + yield super(TestTorrentHealthEndpoint, self).setUp() + + min_base_port, max_base_port = self.get_bucket_range_port() + + self.udp_port = get_random_port(min_port=min_base_port, max_port=max_base_port) + self.udp_tracker = UDPTracker(self.udp_port) + + self.http_port = get_random_port(min_port=min_base_port, max_port=max_base_port) + self.http_tracker = HTTPTracker(self.http_port) + + @inlineCallbacks + def tearDown(self): + self.session.lm.ltmgr = None + if self.udp_tracker: + yield self.udp_tracker.stop() + if self.http_tracker: + yield self.http_tracker.stop() + yield super(TestTorrentHealthEndpoint, self).tearDown() + + @trial_timeout(20) + @inlineCallbacks + def test_check_torrent_health(self): + """ + Test the endpoint to fetch the health of a chant-managed, infohash-only torrent + """ + infohash = 'a' * 20 + tracker_url = 'udp://localhost:%s/announce' % self.udp_port + self.udp_tracker.tracker_info.add_info_about_infohash(infohash, 12, 11, 1) + + with db_session: + tracker_state = self.session.lm.mds.TrackerState(url=tracker_url) + torrent_state = self.session.lm.mds.TorrentState(trackers=tracker_state, infohash=infohash) + self.session.lm.mds.TorrentMetadata(infohash=infohash, + title='ubuntu-torrent.iso', + size=42, + tracker_info=tracker_url, + health=torrent_state) + url = 'metadata/torrents/%s/health?timeout=10&refresh=1' % hexlify(infohash) + self.should_check_equality = False + + # Initialize the torrent checker + self.session.lm.torrent_checker = TorrentChecker(self.session) + self.session.lm.torrent_checker.initialize() + + def verify_response_no_trackers(response): + json_response = json.loads(response) + expected_dict = { + u"health": { + u"udp://localhost:%d" % self.udp_tracker.port: { + u"leechers": 11, + u"seeders": 12, + u"infohash": six.text_type(hexlify(infohash)) + }, + u"DHT": { + u"leechers": 2, + u"seeders": 1, + u"infohash": six.text_type(hexlify(infohash)) + } + } + } + self.assertDictEqual(json_response, expected_dict) + + # Add mock DHT response + def get_metainfo(infohash, callback, **_): + callback({"seeders": 1, "leechers": 2}) + + self.session.lm.ltmgr = MockObject() + self.session.lm.ltmgr.get_metainfo = get_metainfo + + # Left for compatibility with other tests in this object + self.udp_tracker.start() + self.http_tracker.start() + yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers) + + def verify_response_nowait(response): + json_response = json.loads(response) + self.assertDictEqual(json_response, {u'checking': u'1'}) + + yield self.do_request(url + '&nowait=1', expected_code=200, request_type='GET').addCallback( + verify_response_nowait) diff --git a/Tribler/Test/Core/Modules/RestApi/test_mychannel_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_mychannel_endpoint.py new file mode 100644 index 00000000000..601e3abbc57 --- /dev/null +++ b/Tribler/Test/Core/Modules/RestApi/test_mychannel_endpoint.py @@ -0,0 +1,363 @@ +from __future__ import absolute_import + +import base64 +import json +from binascii import hexlify + +from pony.orm import db_session + +from six.moves import xrange + +from twisted.internet.defer import inlineCallbacks + +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW, TODELETE +from Tribler.Core.TorrentDef import TorrentDef +from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest +from Tribler.Test.Core.base_test import MockObject +from Tribler.Test.common import TORRENT_UBUNTU_FILE +from Tribler.Test.tools import trial_timeout + + +class BaseTestMyChannelEndpoint(AbstractApiTest): + + @inlineCallbacks + def setUp(self): + yield super(BaseTestMyChannelEndpoint, self).setUp() + self.session.lm.gigachannel_manager = MockObject() + self.session.lm.gigachannel_manager.shutdown = lambda: None + self.session.lm.gigachannel_manager.updated_my_channel = lambda _: None + + def create_my_channel(self): + with db_session: + _ = self.session.lm.mds.ChannelMetadata.create_channel('test', 'test') + for ind in xrange(5): + _ = self.session.lm.mds.TorrentMetadata(title='torrent%d' % ind, status=NEW, infohash=('%d' % ind) * 20) + for ind in xrange(5, 9): + _ = self.session.lm.mds.TorrentMetadata(title='torrent%d' % ind, infohash=('%d' % ind) * 20) + + def setUpPreSession(self): + super(BaseTestMyChannelEndpoint, self).setUpPreSession() + self.config.set_chant_enabled(True) + + +class TestMyChannelEndpoint(BaseTestMyChannelEndpoint): + + @trial_timeout(10) + def test_get_channel_no_channel(self): + """ + Test whether receiving information from your uncreated channel results in an error + """ + self.should_check_equality = False + return self.do_request('mychannel', expected_code=404) + + @trial_timeout(10) + def test_get_channel(self): + """ + Test whether receiving information from your channel with the REST API works + """ + self.create_my_channel() + self.should_check_equality = False + return self.do_request('mychannel', expected_code=200) + + @trial_timeout(10) + def test_edit_channel_missing_params(self): + """ + Test whether updating your uncreated channel with missing parameters results in an error + """ + self.should_check_equality = False + return self.do_request('mychannel', request_type='POST', expected_code=400) + + @trial_timeout(10) + def test_edit_channel_no_channel(self): + """ + Test whether updating your uncreated channel results in an error + """ + self.should_check_equality = False + post_params = {'name': 'bla', 'description': 'bla'} + return self.do_request('mychannel', request_type='POST', post_data=post_params, expected_code=404) + + @trial_timeout(10) + def test_edit_channel(self): + """ + Test editing your channel with the REST API works + """ + def on_response(_): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + self.assertEqual(my_channel.title, 'bla') + + self.create_my_channel() + self.should_check_equality = False + post_params = {'name': 'bla', 'description': 'bla'} + return self.do_request('mychannel', request_type='POST', post_data=post_params, expected_code=200)\ + .addCallback(on_response) + + @trial_timeout(10) + def test_create_channel_missing_name(self): + """ + Test whether creating a channel with a missing name parameter results in an error + """ + self.should_check_equality = False + return self.do_request('mychannel', request_type='PUT', expected_code=400) + + @trial_timeout(10) + def test_create_channel_exists(self): + """ + Test whether creating a channel again results in an error + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {'name': 'bla', 'description': 'bla'} + return self.do_request('mychannel', request_type='PUT', post_data=post_params, expected_code=409) + + @trial_timeout(10) + def test_create_channel(self): + """ + Test editing your channel with the REST API works + """ + def on_response(_): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + self.assertTrue(my_channel) + self.assertEqual(my_channel.title, 'bla') + + self.should_check_equality = False + post_params = {'name': 'bla', 'description': 'bla'} + return self.do_request('mychannel', request_type='PUT', post_data=post_params, expected_code=200)\ + .addCallback(on_response) + + +class TestMyChannelCommitEndpoint(BaseTestMyChannelEndpoint): + + @trial_timeout(10) + def test_commit_no_channel(self): + """ + Test whether we get an error if we try to commit a channel without it being created + """ + self.should_check_equality = False + return self.do_request('mychannel/commit', expected_code=404, request_type='POST') + + @trial_timeout(10) + def test_commit(self): + """ + Test whether we can successfully commit changes to your channel with the REST API + """ + self.should_check_equality = False + self.create_my_channel() + return self.do_request('mychannel/commit', expected_code=200, request_type='POST') + + +class TestMyChannelTorrentsEndpoint(BaseTestMyChannelEndpoint): + + @trial_timeout(10) + def test_get_my_torrents_no_channel(self): + """ + Test whether we get an error if we try to get torrents from your channel without it being created + """ + self.should_check_equality = False + return self.do_request('mychannel/torrents', expected_code=404) + + @trial_timeout(10) + def test_get_my_torrents(self): + """ + Test whether we can query torrents from your channel + """ + def on_response(response): + json_response = json.loads(response) + self.assertEqual(len(json_response['torrents']), 9) + self.assertIn('status', json_response['torrents'][0]) + + self.create_my_channel() + self.should_check_equality = False + return self.do_request('mychannel/torrents', expected_code=200).addCallback(on_response) + + @trial_timeout(10) + def test_delete_all_torrents_no_channel(self): + """ + Test whether we get an error if we remove all torrents from your uncreated channel + """ + self.should_check_equality = False + return self.do_request('mychannel/torrents', request_type='DELETE', expected_code=404) + + @trial_timeout(10) + def test_delete_all_torrents(self): + """ + Test whether we can remove all torrents from your channel + """ + def on_response(_): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + torrents = my_channel.contents_list + for torrent in torrents: + self.assertEqual(torrent.status, TODELETE) + + self.should_check_equality = False + self.create_my_channel() + return self.do_request('mychannel/torrents', request_type='DELETE', expected_code=200).addCallback(on_response) + + @trial_timeout(10) + def test_update_my_torrents_invalid_params(self): + """ + Test whether we get an error if we pass invalid parameters when updating multiple torrents in your channel + """ + self.should_check_equality = False + return self.do_request('mychannel/torrents', request_type='POST', expected_code=400) + + @trial_timeout(10) + def test_update_my_torrents_no_channel(self): + """ + Test whether we get an error if we update multiple torrents in your uncreated channel + """ + self.should_check_equality = False + post_params = {'status': TODELETE, 'infohashes': '0' * 20} + return self.do_request('mychannel/torrents', request_type='POST', post_data=post_params, expected_code=404) + + @trial_timeout(10) + def test_update_my_torrents(self): + """ + Test whether we get an error if we update multiple torrents in your uncreated channel + """ + def on_response(_): + with db_session: + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + torrent = my_channel.get_torrent('0' * 20) + self.assertEqual(torrent.status, TODELETE) + + self.should_check_equality = False + self.create_my_channel() + post_params = {'status': TODELETE, 'infohashes': hexlify('0' * 20)} + return self.do_request('mychannel/torrents', request_type='POST', post_data=post_params, expected_code=200)\ + .addCallback(on_response) + + @trial_timeout(10) + def test_add_torrents_no_channel(self): + """ + Test whether an error is returned when we try to add a torrent to your unexisting channel + """ + self.should_check_equality = False + return self.do_request('mychannel/torrents', request_type='PUT', expected_code=404) + + @trial_timeout(10) + def test_add_torrents_no_dir(self): + """ + Test whether an error is returned when pointing to a file instead of a directory when adding torrents + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {'torrents_dir': 'nonexisting'} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=400) + + @trial_timeout(10) + def test_add_torrents_recursive_no_dir(self): + """ + Test whether an error is returned when recursively adding torrents without a specified directory + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {'recursive': True} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=400) + + @trial_timeout(10) + def test_add_torrents_from_dir(self): + """ + Test whether adding torrents from a directory to your channels works + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {'torrents_dir': self.session_base_dir, 'recursive': True} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=200) + + @trial_timeout(10) + def test_add_torrent_missing_torrent(self): + """ + Test whether an error is returned when adding a torrent to your channel but with a missing torrent parameter + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=400) + + @trial_timeout(10) + def test_add_invalid_torrent(self): + """ + Test whether an error is returned when adding an invalid torrent file to your channel + """ + self.create_my_channel() + self.should_check_equality = False + post_params = {'torrent': 'bla'} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=500) + + @trial_timeout(10) + @db_session + def test_add_torrent_duplicate(self): + """ + Test whether adding a duplicate torrent to you channel results in an error + """ + self.create_my_channel() + my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() + tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) + my_channel.add_torrent_to_channel(tdef, {'description': 'blabla'}) + + with open(TORRENT_UBUNTU_FILE, "r") as torrent_file: + base64_content = base64.b64encode(torrent_file.read()) + + self.should_check_equality = False + post_params = {'torrent': base64_content} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=500) + + @trial_timeout(10) + def test_add_torrent(self): + """ + Test adding a torrent to your channel + """ + self.create_my_channel() + + with open(TORRENT_UBUNTU_FILE, "rb") as torrent_file: + base64_content = base64.b64encode(torrent_file.read()) + + self.should_check_equality = False + post_params = {'torrent': base64_content} + return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=200) + + +class TestMyChannelSpecificTorrentEndpoint(BaseTestMyChannelEndpoint): + + @trial_timeout(10) + def test_update_my_torrent_no_status(self): + """ + Test whether an error is returned if we do not pass the status parameter + """ + self.should_check_equality = False + return self.do_request('mychannel/torrents/abcd', request_type='PATCH', expected_code=400) + + @trial_timeout(10) + def test_update_my_torrent_no_channel(self): + """ + Test whether an error is returned if your channel is not created when updating your torrents + """ + self.should_check_equality = False + post_params = {'status': TODELETE} + return self.do_request('mychannel/torrents/abcd', + post_data=post_params, request_type='PATCH', expected_code=404) + + @trial_timeout(10) + def test_update_my_torrent_no_torrent(self): + """ + Test whether an error is returned when updating an unknown torrent in your channel + """ + self.should_check_equality = False + self.create_my_channel() + post_params = {'status': TODELETE} + return self.do_request('mychannel/torrents/abcd', + post_data=post_params, request_type='PATCH', expected_code=404) + + @trial_timeout(10) + def test_update_my_torrent(self): + """ + Test whether you are able to update a torrent in your channel with the REST API + """ + self.should_check_equality = False + self.create_my_channel() + post_params = {'status': TODELETE} + return self.do_request('mychannel/torrents/%s' % hexlify('0' * 20), + post_data=post_params, request_type='PATCH', expected_code=200) diff --git a/Tribler/Test/Core/Modules/RestApi/test_rest_manager.py b/Tribler/Test/Core/Modules/RestApi/test_rest_manager.py index 6406197bfde..0ef120870eb 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_rest_manager.py +++ b/Tribler/Test/Core/Modules/RestApi/test_rest_manager.py @@ -1,16 +1,18 @@ from __future__ import absolute_import + import os -from Tribler.Core.exceptions import TriblerException import Tribler.Core.Utilities.json_util as json -from Tribler.Test.tools import trial_timeout +from Tribler.Core.Modules.restapi.settings_endpoint import SettingsEndpoint +from Tribler.Core.exceptions import TriblerException from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest +from Tribler.Test.tools import trial_timeout -class RestRequestTest(AbstractApiTest): +def RaiseException(*args, **kwargs): + raise TriblerException(u"Oops! Something went wrong. Please restart Tribler") - def throw_unhandled_exception(self, name, description, mode=u'closed'): - raise TriblerException(u"Oops! Something went wrong. Please restart Tribler") +class RestRequestTest(AbstractApiTest): @trial_timeout(10) def test_unhandled_exception(self): @@ -29,15 +31,11 @@ def verify_error_message(body): } self.assertDictContainsSubset(expected_response[u"error"], error_response[u"error"]) - post_data = { - "name": "John Smit's channel", - "description": "Video's of my cat", - "mode": "semi-open" - } - self.session.create_channel = self.throw_unhandled_exception + post_data = json.dumps({"settings": "bla", "ports": "bla"}) + SettingsEndpoint.parse_settings_dict = RaiseException self.should_check_equality = False - return self.do_request('channels/discovered', expected_code=500, expected_json=None, request_type='PUT', - post_data=post_data).addCallback(verify_error_message) + return self.do_request('settings', expected_code=500, raw_data=True, expected_json=None, request_type='POST', + post_data=post_data.encode('latin_1')).addCallback(verify_error_message) @trial_timeout(10) def test_tribler_shutting_down(self): diff --git a/Tribler/Test/Core/Modules/RestApi/test_search_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_search_endpoint.py index 6a8234ccde3..1d3373a9a1f 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_search_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_search_endpoint.py @@ -1,160 +1,95 @@ from __future__ import absolute_import -from six import unichr +import json +import random + from pony.orm import db_session + from six.moves import xrange + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.simpledefs import (NTFY_CHANNELCAST, NTFY_TORRENTS, SIGNAL_CHANNEL, - SIGNAL_ON_SEARCH_RESULTS, SIGNAL_TORRENT) -from Tribler.pyipv8.ipv8.database import database_blob from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.tools import trial_timeout - - -class FakeSearchManager(object): - """ - This class is used to test whether Tribler starts searching for channels/torrents when a search is performed. - """ - - def __init__(self, notifier): - self.notifier = notifier - - def search_for_torrents(self, keywords): - results_dict = {"keywords": keywords, "result_list": []} - self.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - - def search_for_channels(self, keywords): - results_dict = {"keywords": keywords, "result_list": []} - self.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict) - - def shutdown(self): - pass +from Tribler.pyipv8.ipv8.database import database_blob class TestSearchEndpoint(AbstractApiTest): - def __init__(self, *args, **kwargs): - super(TestSearchEndpoint, self).__init__(*args, **kwargs) - self.expected_events_messages = [] - - @inlineCallbacks - def setUp(self): - yield super(TestSearchEndpoint, self).setUp() - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - self.channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid" - self.torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) - - self.session.add_observer(self.on_search_results_channels, SIGNAL_CHANNEL, [SIGNAL_ON_SEARCH_RESULTS]) - self.session.add_observer(self.on_search_results_torrents, SIGNAL_TORRENT, [SIGNAL_ON_SEARCH_RESULTS]) - - self.results_torrents_called = False - self.results_channels_called = False - - self.search_results_list = [] # List of incoming torrent/channel results - self.expected_num_results_list = [] # List of expected number of results for each item in search_results_list - def setUpPreSession(self): super(TestSearchEndpoint, self).setUpPreSession() self.config.set_chant_enabled(True) - def on_search_results_torrents(self, subject, changetype, objectID, results): - self.search_results_list.append(results['result_list']) - self.results_torrents_called = True - - def on_search_results_channels(self, subject, changetype, objectID, results): - self.search_results_list.append(results['result_list']) - self.results_channels_called = True - - def insert_channels_in_db(self, num): - for i in xrange(0, num): - self.channel_db_handler.on_channel_from_dispersy('rand%d' % i, 42 + i, - 'Test channel %d' % i, 'Test description %d' % i) - - def insert_torrents_in_db(self, num): - for i in xrange(0, num): - self.torrent_db_handler.addExternalTorrentNoDef(str(unichr(97 + i)) * 20, - 'Test %d' % i, [('Test.txt', 1337)], [], 1337) - @trial_timeout(10) - def test_search_no_parameter(self): + def test_search_no_query(self): """ - Testing whether the API returns an error 400 if no search query is passed with the request + Testing whether the API returns an error 400 if no query is passed when doing a search """ - expected_json = {"error": "query parameter missing"} - return self.do_request('search', expected_code=400, expected_json=expected_json) - - def verify_search_results(self, _): - self.assertTrue(self.results_channels_called) - self.assertTrue(self.results_torrents_called) - self.assertEqual(len(self.search_results_list), len(self.expected_num_results_list)) - - for ind in xrange(len(self.search_results_list)): - self.assertEqual(len(self.search_results_list[ind]), self.expected_num_results_list[ind]) + self.should_check_equality = False + return self.do_request('search', expected_code=400) @trial_timeout(10) - def test_search_no_matches(self): + def test_search_wrong_mdtype(self): """ - Testing whether the API finds no channels/torrents when searching if they are not in the database + Testing whether the API returns an error 400 if wrong metadata type is passed in the query """ - self.insert_channels_in_db(5) - self.insert_torrents_in_db(6) - self.expected_num_results_list = [0, 0] - - expected_json = {"queried": True} - return self.do_request('search?q=tribler', expected_code=200, expected_json=expected_json)\ - .addCallback(self.verify_search_results) + self.should_check_equality = False + return self.do_request('search?filter=bla&metadata_type=ddd', expected_code=400) @trial_timeout(10) + @inlineCallbacks def test_search(self): """ - Testing whether the API finds channels/torrents when searching if there is some inserted data in the database + Test a search query that should return a few new type channels """ - self.insert_channels_in_db(5) - self.insert_torrents_in_db(6) - self.expected_num_results_list = [5, 6, 0, 0] + num_hay = 100 + with db_session: + _ = self.session.lm.mds.ChannelMetadata(title='test', tags='test', subscribed=True) + for x in xrange(0, num_hay): + self.session.lm.mds.TorrentMetadata(title='hay ' + str(x), infohash=database_blob( + bytearray(random.getrandbits(8) for _ in xrange(20)))) + self.session.lm.mds.TorrentMetadata(title='needle', + infohash=database_blob( + bytearray(random.getrandbits(8) for _ in xrange(20)))) + + self.should_check_equality = False - self.session.config.get_torrent_search_enabled = lambda: True - self.session.config.get_channel_search_enabled = lambda: True - self.session.lm.search_manager = FakeSearchManager(self.session.notifier) + result = yield self.do_request('search?filter=needle', expected_code=200) + parsed = json.loads(result) + self.assertEqual(len(parsed["results"]), 1) - expected_json = {"queried": True} - return self.do_request('search?q=test', expected_code=200, expected_json=expected_json)\ - .addCallback(self.verify_search_results) + result = yield self.do_request('search?filter=hay', expected_code=200) + parsed = json.loads(result) + self.assertEqual(len(parsed["results"]), 50) - @trial_timeout(10) - def test_search_chant(self): - """ - Test a search query that should return a few new type channels - """ - def verify_search_results(_): - self.assertTrue(self.search_results_list) + result = yield self.do_request('search?filter=test&type=channel', expected_code=200) + parsed = json.loads(result) + self.assertEqual(len(parsed["results"]), 1) - with db_session: - my_channel_id = self.session.trustchain_keypair.pub().key_to_bin() - self.session.lm.mds.ChannelMetadata(public_key=database_blob(my_channel_id), title='test', tags='test') + result = yield self.do_request('search?filter=needle&type=torrent', expected_code=200) + parsed = json.loads(result) + self.assertEqual(parsed["results"][0][u'name'], 'needle') - self.should_check_equality = False - self.expected_num_results_list = [] - return self.do_request('search?q=test', expected_code=200).addCallback(verify_search_results) + result = yield self.do_request('search?filter=needle&sort_by=name', expected_code=200) + parsed = json.loads(result) + self.assertEqual(len(parsed["results"]), 1) @trial_timeout(10) def test_completions_no_query(self): """ Testing whether the API returns an error 400 if no query is passed when getting search completion terms """ - expected_json = {"error": "query parameter missing"} - return self.do_request('search/completions', expected_code=400, expected_json=expected_json) + self.should_check_equality = False + return self.do_request('search/completions', expected_code=400) @trial_timeout(10) def test_completions(self): """ Testing whether the API returns the right terms when getting search completion terms """ - torrent_db_handler = self.session.open_dbhandler(NTFY_TORRENTS) - torrent_db_handler.getAutoCompleteTerms = lambda keyword, max_terms: ["%s %d" % (keyword, ind) - for ind in xrange(max_terms)] + def on_response(response): + json_response = json.loads(response) + self.assertEqual(json_response['completions'], []) - expected_json = {"completions": ["tribler %d" % ind for ind in xrange(5)]} - - return self.do_request('search/completions?q=tribler', expected_code=200, expected_json=expected_json) + self.should_check_equality = False + return self.do_request('search/completions?q=tribler', expected_code=200).addCallback(on_response) diff --git a/Tribler/Test/Core/Modules/RestApi/test_settings_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_settings_endpoint.py index 8e6f0c1ed37..30a89dc5a52 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_settings_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_settings_endpoint.py @@ -1,11 +1,13 @@ from __future__ import absolute_import + from six import unichr + from twisted.internet.defer import inlineCallbacks import Tribler.Core.Utilities.json_util as json +from Tribler.Core.DownloadConfig import DownloadConfigInterface from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.tools import trial_timeout -from Tribler.Core.DownloadConfig import DownloadConfigInterface class TestSettingsEndpoint(AbstractApiTest): @@ -18,9 +20,8 @@ def verify_settings(self, settings): """ Verify that the expected sections are present. """ - check_section = ['libtorrent', 'mainline_dht', 'torrent_store', 'general', 'torrent_checking', - 'allchannel_community', 'tunnel_community', 'http_api', 'torrent_collecting', 'dispersy', - 'trustchain', 'watch_folder', 'search_community', 'metadata'] + check_section = ['libtorrent', 'general', 'torrent_checking', + 'tunnel_community', 'http_api', 'trustchain', 'watch_folder'] settings_json = json.loads(settings) self.assertTrue(settings_json['settings']) @@ -99,16 +100,12 @@ def test_set_settings(self): download.get_credit_mining = lambda: False self.session.get_downloads = lambda: [download] - old_filter_setting = self.session.config.get_family_filter_enabled() - def verify_response1(_): - self.assertNotEqual(self.session.config.get_family_filter_enabled(), old_filter_setting) self.assertEqual(download.get_seeding_mode(), 'time') self.assertEqual(download.get_seeding_time(), 100) self.should_check_equality = False - post_data = json.dumps({'general': {'family_filter': not old_filter_setting}, - 'libtorrent': {'utp': False, 'max_download_rate': 50}, + post_data = json.dumps({'libtorrent': {'utp': False, 'max_download_rate': 50}, 'download_defaults': {'seeding_mode': 'time', 'seeding_time': 100}}) yield self.do_request('settings', expected_code=200, request_type='POST', post_data=post_data, raw_data=True) \ .addCallback(verify_response1) diff --git a/Tribler/Test/Core/Modules/RestApi/test_statistics_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_statistics_endpoint.py index 09f308aaba6..977b81a82fe 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_statistics_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_statistics_endpoint.py @@ -1,11 +1,15 @@ from __future__ import absolute_import +import os + from twisted.internet.defer import inlineCallbacks import Tribler.Core.Utilities.json_util as json +from Tribler.Core.Modules.MetadataStore.store import MetadataStore from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.tools import trial_timeout from Tribler.pyipv8.ipv8.attestation.trustchain.community import TrustChainCommunity +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto from Tribler.pyipv8.ipv8.test.mocking.ipv8 import MockIPv8 @@ -23,45 +27,35 @@ def setUp(self): self.mock_ipv8.endpoint.bytes_down = 20 self.session.lm.ipv8 = self.mock_ipv8 self.session.config.set_ipv8_enabled(True) + my_key = default_eccrypto.generate_key(u"curve25519") + self.session.lm.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir, + my_key) @inlineCallbacks def tearDown(self): + self.session.lm.mds.shutdown() self.session.lm.ipv8 = None yield self.mock_ipv8.unload() yield super(TestStatisticsEndpoint, self).tearDown() - def setUpPreSession(self): - super(TestStatisticsEndpoint, self).setUpPreSession() - self.config.set_dispersy_enabled(True) - self.config.set_torrent_collecting_enabled(True) - @trial_timeout(10) def test_get_tribler_statistics(self): """ Testing whether the API returns a correct Tribler statistics dictionary when requested """ - def verify_dict(data): - self.assertTrue(json.loads(data)["tribler_statistics"]) - - self.should_check_equality = False - return self.do_request('statistics/tribler', expected_code=200).addCallback(verify_dict) - @trial_timeout(10) - def test_get_dispersy_statistics(self): - """ - Testing whether the API returns a correct Dispersy statistics dictionary when requested - """ def verify_dict(data): - self.assertTrue(json.loads(data)["dispersy_statistics"]) + self.assertIn("tribler_statistics", json.loads(data)) self.should_check_equality = False - return self.do_request('statistics/dispersy', expected_code=200).addCallback(verify_dict) + return self.do_request('statistics/tribler', expected_code=200).addCallback(verify_dict) @trial_timeout(10) def test_get_ipv8_statistics(self): """ Testing whether the API returns a correct Dispersy statistics dictionary when requested """ + def verify_dict(data): self.assertTrue(json.loads(data)["ipv8_statistics"]) diff --git a/Tribler/Test/Core/Modules/RestApi/test_torrentinfo_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_torrentinfo_endpoint.py index 9a7ecb623a0..c7c19e6b1f0 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_torrentinfo_endpoint.py +++ b/Tribler/Test/Core/Modules/RestApi/test_torrentinfo_endpoint.py @@ -1,25 +1,25 @@ +from __future__ import absolute_import + import os import shutil from binascii import hexlify from urllib import pathname2url, quote_plus -from Tribler.Test.tools import trial_timeout from twisted.internet.defer import inlineCallbacks -from Tribler.Core.TorrentDef import TorrentDef import Tribler.Core.Utilities.json_util as json +from Tribler.Core.TorrentDef import TorrentDef from Tribler.Core.Utilities.network_utils import get_random_port from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.common import UBUNTU_1504_INFOHASH, TORRENT_UBUNTU_FILE -from Tribler.Test.test_as_server import TESTS_DATA_DIR +from Tribler.Test.common import TORRENT_UBUNTU_FILE, UBUNTU_1504_INFOHASH +from Tribler.Test.test_as_server import TESTS_DATA_DIR, TESTS_DIR +from Tribler.Test.tools import trial_timeout +SAMPLE_CHANNEL_FILES_DIR = os.path.join(TESTS_DIR, "Core", "data", "sample_channel") -class TestTorrentInfoEndpoint(AbstractApiTest): - def setUpPreSession(self): - super(TestTorrentInfoEndpoint, self).setUpPreSession() - self.config.set_torrent_store_enabled(True) +class TestTorrentInfoEndpoint(AbstractApiTest): @inlineCallbacks def test_get_torrentinfo(self): @@ -70,24 +70,11 @@ def get_metainfo_timeout(*args, **kwargs): self.session.lm.ltmgr.get_metainfo = get_metainfo self.session.lm.ltmgr.shutdown = lambda: None yield self.do_request('torrentinfo?uri=%s' % path, expected_code=200).addCallback(verify_valid_dict) - yield self.do_request('torrentinfo?uri=%s' % path, expected_code=200).addCallback(verify_valid_dict) # Cached - yield self.do_request('torrentinfo?uri=%s' % path, expected_code=200).addCallback(verify_valid_dict) # Cached # mdblob file - path_blob = "file:" + pathname2url(os.path.join(TESTS_DATA_DIR, "channel.mdblob")).encode('utf-8') + path_blob = "file:" + pathname2url(os.path.join(SAMPLE_CHANNEL_FILES_DIR, "channel.mdblob")).encode('utf-8') yield self.do_request('torrentinfo?uri=%s' % path_blob, expected_code=200).addCallback(verify_valid_dict) - # invalid mdblob file - path_blob = "file:" + pathname2url(os.path.join(TESTS_DATA_DIR, "bad.mdblob")).encode('utf-8') - yield self.do_request('torrentinfo?uri=%s' % path_blob, expected_code=500) - - # non-torrent mdblob file - path_blob = "file:" + pathname2url(os.path.join(TESTS_DATA_DIR, "delete.mdblob")).encode('utf-8') - yield self.do_request('torrentinfo?uri=%s' % path_blob, expected_code=500) - - self.session.get_collected_torrent = lambda _: 'a8fdsafsdjlfdsafs{}{{{[][]][' # invalid torrent file - yield self.do_request('torrentinfo?uri=%s' % path, expected_code=500) - path = 'magnet:?xt=urn:ed2k:354B15E68FB8F36D7CD88FF94116CDC1' # No infohash yield self.do_request('torrentinfo?uri=%s' % path, expected_code=400) @@ -95,11 +82,7 @@ def get_metainfo_timeout(*args, **kwargs): self.session.lm.ltmgr.get_metainfo = get_metainfo_timeout yield self.do_request('torrentinfo?uri=%s' % path, expected_code=408) - def mocked_save_torrent(*_): - raise TypeError() - self.session.lm.ltmgr.get_metainfo = get_metainfo - self.session.save_collected_torrent = mocked_save_torrent yield self.do_request('torrentinfo?uri=%s' % path, expected_code=200).addCallback(verify_valid_dict) path = 'http://fdsafksdlafdslkdksdlfjs9fsafasdf7lkdzz32.n38/324.torrent' diff --git a/Tribler/Test/Core/Modules/RestApi/test_torrents_endpoint.py b/Tribler/Test/Core/Modules/RestApi/test_torrents_endpoint.py deleted file mode 100644 index 6a2958335c0..00000000000 --- a/Tribler/Test/Core/Modules/RestApi/test_torrents_endpoint.py +++ /dev/null @@ -1,270 +0,0 @@ -from __future__ import absolute_import - -from binascii import hexlify, unhexlify -import time - -import six -from pony.orm import db_session -from twisted.internet.defer import inlineCallbacks - -import Tribler.Core.Utilities.json_util as json -from Tribler.Core.TorrentChecker.torrent_checker import TorrentChecker -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_TORRENTS -from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.tools import trial_timeout -from Tribler.Test.util.Tracker.HTTPTracker import HTTPTracker -from Tribler.Test.util.Tracker.UDPTracker import UDPTracker - - -class TestTorrentsEndpoint(AbstractApiTest): - - def setUpPreSession(self): - super(TestTorrentsEndpoint, self).setUpPreSession() - self.config.set_chant_enabled(True) - - @trial_timeout(10) - def test_get_random_torrents(self): - """ - Testing whether random torrents are returned if random torrents are fetched - """ - def verify_torrents(results): - json_results = json.loads(results) - self.assertEqual(len(json_results['torrents']), 2) - - channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid" - channel_id = channel_db_handler.on_channel_from_dispersy('rand', 42, 'Fancy channel', 'Fancy description') - - torrent_list = [ - [channel_id, 1, 1, unhexlify('a' * 40), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []], - [channel_id, 2, 2, unhexlify('b' * 40), 1470000000, "ubuntu2-torrent.iso", [['file2.txt', 42]], []], - [channel_id, 3, 3, unhexlify('c' * 40), 1480000000, "badterm", [['file1.txt', 42]], []], - [channel_id, 4, 4, unhexlify('d' * 40), 1490000000, "badterm", [['file2.txt', 42]], []], - [channel_id, 5, 5, unhexlify('e' * 40), 1500000000, "badterm", [['file3.txt', 42]], []], - ] - channel_db_handler.on_torrents_from_dispersy(torrent_list) - - self.should_check_equality = False - return self.do_request('torrents/random?limit=5', expected_code=200).addCallback(verify_torrents) - - @trial_timeout(10) - def test_random_torrents_negative(self): - """ - Testing whether error 400 is returned when a negative limit is passed to the request to fetch random torrents - """ - expected_json = {"error": "the limit parameter must be a positive number"} - return self.do_request('torrents/random?limit=-5', expected_code=400, expected_json=expected_json) - - @trial_timeout(10) - def test_info_torrent_404(self): - """ - Test whether we get an error 404 if we are fetching info from a non-existing torrent - """ - self.should_check_equality = False - return self.do_request('torrents/%s' % ('a' * 40), expected_code=404) - - @trial_timeout(10) - def test_info_torrent_chant(self): - """ - Testing whether the API returns the right information for a request of a specific chant-managed torrent - """ - infohash_hex = six.text_type(hexlify(b'a' * 20)) - with db_session: - self.session.lm.mds.TorrentMetadata(infohash=infohash_hex.decode('hex'), - title=u'ubuntu-torrent.iso', size=42) - return self.do_request('torrents/%s' % hexlify(b'a' * 20), expected_json={ - u"id": u'', - u"category": u"", - u"infohash": six.text_type(hexlify(b'a' * 20)), - u"name": u'ubuntu-torrent.iso', - u"size": 42, - u"trackers": [], - u"num_seeders": 0, - u"num_leechers": 0, - u"last_tracker_check": 0, - u'files': [] - }) - - @trial_timeout(10) - def test_info_torrent(self): - """ - Testing whether the API returns the right information for a request of a specific torrent - """ - torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu-torrent.iso', [['file1.txt', 42]], - ('udp://trackerurl.com:1234/announce',), time.time()) - - return self.do_request('torrents/%s' % hexlify(b'a' * 20), expected_json={ - u"id": 1, - u"infohash": six.text_type(hexlify(b'a' * 20)), - u"name": u'ubuntu-torrent.iso', - u"size": 42, - u"category": u"Compressed", - u"num_seeders": 0, - u"num_leechers": 0, - u"last_tracker_check": 0, - u"files": [{u"path": u"file1.txt", u"size": 42}], - u"trackers": [u"DHT", u"udp://trackerurl.com:1234"] - }) - - -class TestTorrentTrackersEndpoint(AbstractApiTest): - - @trial_timeout(10) - def test_get_torrent_trackers_404(self): - """ - Testing whether we get an error 404 if we are fetching the trackers of a non-existent torrent - """ - self.should_check_equality = False - return self.do_request('torrents/%s/trackers' % ('a' * 40), expected_code=404) - - @trial_timeout(10) - def test_get_torrent_trackers(self): - """ - Testing whether fetching the trackers of a non-existent torrent is successful - """ - torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu-torrent.iso', [['file1.txt', 42]], - ('udp://trackerurl.com:1234/announce', - 'http://trackerurl.com:4567/announce'), time.time()) - - def verify_trackers(trackers): - self.assertIn('DHT', trackers) - self.assertIn('udp://trackerurl.com:1234', trackers) - self.assertIn('http://trackerurl.com:4567/announce', trackers) - - self.should_check_equality = False - return self.do_request('torrents/%s/trackers' % hexlify(b'a' * 20), - expected_code=200).addCallback(verify_trackers) - - -class TestTorrentHealthEndpoint(AbstractApiTest): - - def setUpPreSession(self): - super(TestTorrentHealthEndpoint, self).setUpPreSession() - self.config.set_chant_enabled(True) - - @inlineCallbacks - def setUp(self): - yield super(TestTorrentHealthEndpoint, self).setUp() - - min_base_port, max_base_port = self.get_bucket_range_port() - - self.udp_port = get_random_port(min_port=min_base_port, max_port=max_base_port) - self.udp_tracker = UDPTracker(self.udp_port) - - self.http_port = get_random_port(min_port=min_base_port, max_port=max_base_port) - self.http_tracker = HTTPTracker(self.http_port) - - @inlineCallbacks - def tearDown(self): - self.session.lm.ltmgr = None - if self.udp_tracker: - yield self.udp_tracker.stop() - if self.http_tracker: - yield self.http_tracker.stop() - yield super(TestTorrentHealthEndpoint, self).tearDown() - - @trial_timeout(20) - @inlineCallbacks - def test_check_torrent_health(self): - """ - Test the endpoint to fetch the health of a torrent - """ - torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu-torrent.iso', [['file1.txt', 42]], - ('udp://localhost:%s/announce' % self.udp_port, - 'http://localhost:%s/announce' % self.http_port), time.time()) - - url = 'torrents/%s/health?timeout=10&refresh=1' % hexlify(b'a' * 20) - - self.should_check_equality = False - yield self.do_request(url, expected_code=400, request_type='GET') # No torrent checker - - def call_cb(infohash, callback, **_): - callback({"seeders": 1, "leechers": 2}) - - # Initialize the torrent checker - self.session.lm.torrent_checker = TorrentChecker(self.session) - self.session.lm.torrent_checker.initialize() - self.session.lm.ltmgr = MockObject() - self.session.lm.ltmgr.get_metainfo = call_cb - - yield self.do_request('torrents/%s/health' % ('f' * 40), expected_code=404, request_type='GET') - - def verify_response_no_trackers(response): - json_response = json.loads(response) - self.assertTrue('DHT' in json_response['health']) - - def verify_response_with_trackers(response): - hex_as = hexlify(b'a' * 20) - json_response = json.loads(response) - expected_dict = {u"health": - {u"DHT": - {u"leechers": 2, u"seeders": 1, u"infohash": hex_as}, - u"udp://localhost:%s" % self.udp_port: - {u"leechers": 20, u"seeders": 10, u"infohash": hex_as}, - u"http://localhost:%s/announce" % self.http_port: - {u"leechers": 30, u"seeders": 20, u"infohash": hex_as}}} - self.assertDictEqual(json_response, expected_dict) - - yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers) - - self.udp_tracker.start() - self.udp_tracker.tracker_info.add_info_about_infohash('a' * 20, 10, 20) - - self.http_tracker.start() - self.http_tracker.tracker_info.add_info_about_infohash('a' * 20, 20, 30) - - yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_with_trackers) - - @trial_timeout(20) - @inlineCallbacks - def test_check_torrent_health_chant(self): - """ - Test the endpoint to fetch the health of a chant-managed, infohash-only torrent - """ - infohash = 'a' * 20 - tracker_url = 'udp://localhost:%s/announce' % self.udp_port - - meta_info = {"info": {"name": "my_torrent", "piece length": 42, - "root hash": infohash, "files": [], - "url-list": tracker_url}} - tdef = TorrentDef.load_from_dict(meta_info) - - with db_session: - self.session.lm.mds.TorrentMetadata(infohash=tdef.infohash, - title='ubuntu-torrent.iso', - size=42, - tracker_info=tracker_url) - url = 'torrents/%s/health?timeout=10&refresh=1' % tdef.infohash.encode('hex') - self.should_check_equality = False - - def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): - meta_info_extended = meta_info.copy() - meta_info_extended['seeders'] = 12 - meta_info_extended['leechers'] = 11 - callback(meta_info_extended) - - # Initialize the torrent checker - self.session.lm.torrent_checker = TorrentChecker(self.session) - self.session.lm.torrent_checker.initialize() - self.session.lm.ltmgr = MockObject() - self.session.lm.ltmgr.get_metainfo = fake_get_metainfo - - def verify_response_no_trackers(response): - json_response = json.loads(response) - expected_dict = {u"health": - {u"DHT": - {u"leechers": 11, u"seeders": 12, - u"infohash": six.text_type(tdef.infohash.encode('hex'))}}} - self.assertDictEqual(json_response, expected_dict) - - # Left for compatibility with other tests in this object - self.udp_tracker.start() - self.http_tracker.start() - # TODO: add test for DHT timeout - yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers) diff --git a/Tribler/Test/Core/Modules/RestApi/test_util.py b/Tribler/Test/Core/Modules/RestApi/test_util.py index 2a8eaa29470..e5fd3c5109e 100644 --- a/Tribler/Test/Core/Modules/RestApi/test_util.py +++ b/Tribler/Test/Core/Modules/RestApi/test_util.py @@ -1,11 +1,10 @@ # -*- coding:utf-8 -*- -import struct +from __future__ import absolute_import from Tribler.Core.Config.tribler_config import TriblerConfig -from Tribler.Core.Modules.restapi.util import convert_search_torrent_to_json, convert_db_channel_to_json,\ - get_parameter, fix_unicode_array, fix_unicode_dict +from Tribler.Core.Modules.restapi.util import fix_unicode_array, fix_unicode_dict, get_parameter from Tribler.Core.Session import Session -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject +from Tribler.Test.Core.base_test import MockObject, TriblerCoreTest class TestRestApiUtil(TriblerCoreTest): @@ -23,41 +22,6 @@ def setUp(self): def tearDown(self): TriblerCoreTest.tearDown(self) - def test_convert_torrent_to_json_dict(self): - """ - Test whether the conversion from remote torrent dict to json works - """ - input = {'torrent_id': 42, 'infohash': 'a', 'name': 'test torrent', 'length': 43, - 'category': 'other', 'num_seeders': 1, 'num_leechers': 2} - output = {'id': 42, 'infohash': 'a'.encode('hex'), 'name': 'test torrent', 'size': 43, 'category': 'other', - 'num_seeders': 1, 'num_leechers': 2, 'last_tracker_check': 0} - self.assertEqual(convert_search_torrent_to_json(input), output) - - input['name'] = None - output['name'] = 'Unnamed torrent' - self.assertEqual(convert_search_torrent_to_json(input), output) - - input['name'] = ' \t\n\n\t \t' - output['name'] = 'Unnamed torrent' - self.assertEqual(convert_search_torrent_to_json(input), output) - - def test_convert_torrent_to_json_tuple(self): - """ - Test whether the conversion from db torrent tuple to json works - """ - input_tuple = (1, '2', 'abc', 4, 5, 6, 7, 8, 0, 0.123) - output = {'id': 1, 'infohash': '2'.encode('hex'), 'name': 'abc', 'size': 4, 'category': 5, - 'num_seeders': 6, 'num_leechers': 7, 'last_tracker_check': 8, 'relevance_score': 0.123} - self.assertEqual(convert_search_torrent_to_json(input_tuple), output) - - input_tuple = (1, '2', None, 4, 5, 6, 7, 8, 0, 0.123) - output['name'] = 'Unnamed torrent' - self.assertEqual(convert_search_torrent_to_json(input_tuple), output) - - input_tuple = (1, '2', ' \t\n\n\t \t', 4, 5, 6, 7, 8, 0, 0.123) - output['name'] = 'Unnamed torrent' - self.assertEqual(convert_search_torrent_to_json(input_tuple), output) - def test_get_parameter(self): """ Testing the get_parameters method in REST API util class @@ -65,15 +29,6 @@ def test_get_parameter(self): self.assertEqual(42, get_parameter({'test': [42]}, 'test')) self.assertEqual(None, get_parameter({}, 'test')) - def test_convert_db_channel_to_json(self): - """ - Test whether the conversion from a db channel tuple to json works - """ - input_tuple = (1, 'aaaa'.decode('hex'), 'test', 'desc', 42, 43, 44, 2, 1234, 0.123) - output = {'id': 1, 'dispersy_cid': 'aaaa', 'name': 'test', 'description': 'desc', 'torrents': 42, 'votes': 43, - 'spam': 44, 'subscribed': True, 'modified': 1234, 'relevance_score': 0.123} - self.assertEqual(convert_db_channel_to_json(input_tuple, include_rel_score=True), output) - def test_fix_unicode_array(self): """ Testing the fix of a unicode array diff --git a/Tribler/Test/Core/Modules/test_gigachannel_manager.py b/Tribler/Test/Core/Modules/test_gigachannel_manager.py new file mode 100644 index 00000000000..ff098e1cd40 --- /dev/null +++ b/Tribler/Test/Core/Modules/test_gigachannel_manager.py @@ -0,0 +1,174 @@ +from __future__ import absolute_import + +from datetime import datetime + +from pony.orm import db_session + +from twisted.internet.defer import Deferred, inlineCallbacks + +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Modules.gigachannel_manager import GigaChannelManager +from Tribler.Core.TorrentDef import TorrentDef +from Tribler.Test.Core.base_test import MockObject, TriblerCoreTest +from Tribler.Test.common import TORRENT_UBUNTU_FILE +from Tribler.pyipv8.ipv8.database import database_blob +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + + +class TestGigaChannelManager(TriblerCoreTest): + + @db_session + def generate_personal_channel(self): + chan = self.mock_session.lm.mds.ChannelMetadata.create_channel(title="my test chan", description="test") + tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) + chan.add_torrent_to_channel(tdef, None) + return chan + + @inlineCallbacks + def setUp(self): + yield super(TestGigaChannelManager, self).setUp() + self.torrent_template = { + "title": "", + "infohash": "", + "torrent_date": datetime(1970, 1, 1), + "tags": "video" + } + my_key = default_eccrypto.generate_key(u"curve25519") + self.mock_session = MockObject() + self.mock_session.lm = MockObject() + self.mock_session.lm.mds = MetadataStore(":memory:", self.session_base_dir, my_key) + + self.chanman = GigaChannelManager(self.mock_session) + self.torrents_added = 0 + + @inlineCallbacks + def tearDown(self): + self.mock_session.lm.mds.shutdown() + yield super(TestGigaChannelManager, self).tearDown() + + @db_session + def test_update_my_channel(self): + chan = self.generate_personal_channel() + chan.commit_channel_torrent() + + def mock_add(*_): + self.torrents_added = 1 + + self.mock_session.lm.add = mock_add + # self.mock_session.has_download = lambda x: x == str(chan.infohash) + + # Check add personal channel on startup + self.mock_session.has_download = lambda _: False + self.chanman.service_channels = lambda: None # Disable looping call + self.chanman.start() + self.chanman.check_channels_updates() + self.assertTrue(self.torrents_added) + self.chanman.shutdown() + + # Check skip already added personal channel + self.mock_session.has_download = lambda x: x == str(chan.infohash) + self.torrents_added = False + self.chanman.start() + self.chanman.check_channels_updates() + self.assertFalse(self.torrents_added) + self.chanman.shutdown() + + def test_check_channels_updates(self): + with db_session: + chan = self.generate_personal_channel() + chan.commit_channel_torrent() + chan.local_version -= 1 + _ = self.mock_session.lm.mds.ChannelMetadata(title="bla", public_key=database_blob(str(123)), + signature=database_blob(str(345)), skip_key_check=True, + timestamp=123, local_version=123, subscribed=True) + _ = self.mock_session.lm.mds.ChannelMetadata(title="bla", public_key=database_blob(str(124)), + signature=database_blob(str(346)), skip_key_check=True, + timestamp=123, local_version=122, subscribed=False) + self.mock_session.has_download = lambda _: False + self.torrents_added = 0 + + def mock_dl(_): + self.torrents_added += 1 + + self.chanman.download_channel = mock_dl + + self.chanman.check_channels_updates() + # download_channel should only fire once - for the original subscribed channel + self.assertEqual(1, self.torrents_added) + + def test_remove_cruft_channels(self): + with db_session: + # Our personal chan is created, then updated, so there are 2 files on disk and there are 2 torrents: + # the old one and the new one + my_chan = self.generate_personal_channel() + my_chan.commit_channel_torrent() + my_chan_old_infohash = my_chan.infohash + _ = self.mock_session.lm.mds.TorrentMetadata.from_dict(dict(self.torrent_template, status=NEW)) + my_chan.commit_channel_torrent() + + # Now we add external channel we are subscribed to. + chan2 = self.mock_session.lm.mds.ChannelMetadata(title="bla1", infohash=database_blob(str(123)), + public_key=database_blob(str(123)), + signature=database_blob(str(345)), skip_key_check=True, + timestamp=123, local_version=123, subscribed=True) + + # Another external channel, but there is a catch: we recently unsubscribed from it + chan3 = self.mock_session.lm.mds.ChannelMetadata(title="bla2", infohash=database_blob(str(124)), + public_key=database_blob(str(124)), + signature=database_blob(str(346)), skip_key_check=True, + timestamp=123, local_version=123, subscribed=False) + + class MockDownload(MockObject): + def __init__(self, infohash, dirname): + self.infohash = infohash + self.dirname = dirname + self.tdef = MockObject() + self.tdef.get_name_utf8 = lambda: self.dirname + self.tdef.get_infohash = lambda: infohash + + def get_def(self): + a = MockObject() + a.infohash = self.infohash + a.get_name_utf8 = lambda: self.dirname + a.get_infohash = lambda: self.infohash + return a + + # Double conversion is required to make sure that buffers signatures are not the same + mock_dl_list = [ + # Downloads for our personal channel + MockDownload(database_blob(bytes(my_chan_old_infohash)), my_chan.dir_name), + MockDownload(database_blob(bytes(my_chan.infohash)), my_chan.dir_name), + + # Downloads for the updated external channel: "old ones" and "recent" + MockDownload(database_blob(bytes(str(12331244))), chan2.dir_name), + MockDownload(database_blob(bytes(chan2.infohash)), chan2.dir_name), + + # Downloads for the unsubscribed external channel + MockDownload(database_blob(bytes(str(1231551))), chan3.dir_name), + MockDownload(database_blob(bytes(chan3.infohash)), chan3.dir_name), + # Orphaned download + MockDownload(database_blob(str(333)), u"blabla")] + + def mock_get_channel_downloads(): + return mock_dl_list + + self.remove_list = [] + + def mock_remove_download(infohash, remove_content=False): + d = Deferred() + d.callback(None) + self.remove_list.append((infohash, remove_content)) + return d + + self.chanman.session.remove_download = mock_remove_download + + self.mock_session.lm.get_channel_downloads = mock_get_channel_downloads + self.chanman.remove_cruft_channels() + # We want to remove torrents for (a) deleted channels and (b) unsubscribed channels + self.assertItemsEqual(self.remove_list, + [(mock_dl_list[0], False), + (mock_dl_list[2], False), + (mock_dl_list[4], True), + (mock_dl_list[5], True), + (mock_dl_list[6], True)]) diff --git a/Tribler/Test/Core/Modules/test_tracker_manager.py b/Tribler/Test/Core/Modules/test_tracker_manager.py index 5be1b9cdece..055957dbd3a 100644 --- a/Tribler/Test/Core/Modules/test_tracker_manager.py +++ b/Tribler/Test/Core/Modules/test_tracker_manager.py @@ -1,33 +1,17 @@ -from twisted.internet.defer import inlineCallbacks +from __future__ import absolute_import -from Tribler.Core.Config.tribler_config import TriblerConfig -from Tribler.Core.Modules.tracker_manager import TrackerManager -from Tribler.Core.Session import Session -from Tribler.Test.Core.base_test import TriblerCoreTest +from Tribler.Test.test_as_server import TestAsServer -class TestTrackerManager(TriblerCoreTest): +class TestTrackerManager(TestAsServer): def setUpPreSession(self): - self.config = TriblerConfig() - self.config.set_state_dir(self.getStateDir()) - - @inlineCallbacks - def setUp(self): - yield super(TestTrackerManager, self).setUp() - - self.setUpPreSession() - self.session = Session(self.config) - self.session.start_database() - self.tracker_manager = TrackerManager(self.session) - - @inlineCallbacks - def tearDown(self): - if self.session is not None: - yield self.session.shutdown() - assert self.session.has_shutdown() - self.session = None - yield super(TestTrackerManager, self).tearDown() + super(TestTrackerManager, self).setUpPreSession() + self.config.set_chant_enabled(True) + + @property + def tracker_manager(self): + return self.session.lm.tracker_manager def test_add_tracker(self): """ diff --git a/Tribler/Test/Core/Modules/test_watch_folder.py b/Tribler/Test/Core/Modules/test_watch_folder.py index e6074029d7e..64222ff0210 100644 --- a/Tribler/Test/Core/Modules/test_watch_folder.py +++ b/Tribler/Test/Core/Modules/test_watch_folder.py @@ -1,7 +1,9 @@ +from __future__ import absolute_import + import os import shutil -from Tribler.Test.common import TORRENT_UBUNTU_FILE, TESTS_DATA_DIR +from Tribler.Test.common import TESTS_DATA_DIR, TORRENT_UBUNTU_FILE from Tribler.Test.test_as_server import TestAsServer @@ -11,7 +13,6 @@ def setUpPreSession(self): super(TestWatchFolder, self).setUpPreSession() self.config.set_libtorrent_enabled(True) self.config.set_watch_folder_enabled(True) - self.config.set_dispersy_enabled(True) self.watch_dir = os.path.join(self.session_base_dir, 'watch') os.mkdir(self.watch_dir) diff --git a/Tribler/Test/Core/TFTP/__init__.py b/Tribler/Test/Core/TFTP/__init__.py deleted file mode 100644 index ca1d8f0d09a..00000000000 --- a/Tribler/Test/Core/TFTP/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains tests for the TFTP mechanism in Tribler. -""" diff --git a/Tribler/Test/Core/TFTP/test_tftp_handler.py b/Tribler/Test/Core/TFTP/test_tftp_handler.py deleted file mode 100644 index 40809a81f9a..00000000000 --- a/Tribler/Test/Core/TFTP/test_tftp_handler.py +++ /dev/null @@ -1,227 +0,0 @@ -from nose.tools import raises -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.TFTP.exception import FileNotFound -from Tribler.Core.TFTP.handler import TftpHandler, METADATA_PREFIX -from Tribler.Core.TFTP.packet import OPCODE_OACK, OPCODE_ERROR, OPCODE_RRQ -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject - - -class TestTFTPHandler(TriblerCoreTest): - """ - This class contains tests for the TFTP handler class. - """ - - @inlineCallbacks - def setUp(self): - yield TriblerCoreTest.setUp(self) - self.handler = TftpHandler(None, None, None) - - @inlineCallbacks - def tearDown(self): - self.handler.shutdown_task_manager() - yield TriblerCoreTest.tearDown(self) - - def test_download_file_not_running(self): - """ - Testing whether we do nothing if we are not running a session - """ - def mocked_add_new_session(_): - raise RuntimeError("_add_new_session not be called") - - self.handler._add_new_session = mocked_add_new_session - self.handler.download_file("test", "127.0.0.1", 1234) - - def test_check_session_timeout(self): - """ - Testing whether we fail if we exceed our maximum amount of retries - """ - mock_session = MockObject() - mock_session.retries = 2 - mock_session.timeout = 1 - mock_session.last_contact_time = 2 - self.handler._max_retries = 1 - self.assertTrue(self.handler._check_session_timeout(mock_session)) - - def test_schedule_callback_processing(self): - """ - Testing whether scheduling a TFTP callback works correctly - """ - self.assertFalse(self.handler.is_pending_task_active("tftp_process_callback")) - self.handler._callback_scheduled = True - self.handler._schedule_callback_processing() - self.assertFalse(self.handler.is_pending_task_active("tftp_process_callback")) - - def test_cleanup_session(self): - """ - Testing whether a tftp session is correctly cleaned up - """ - self.handler._session_id_dict["c"] = 1 - self.handler._session_dict = {"abc": "test"} - self.handler._cleanup_session("abc") - self.assertFalse('c' in self.handler._session_id_dict) - - def test_data_came_in(self): - """ - Testing whether we do nothing when data comes in and the handler is not running - """ - def mocked_process_packet(_dummy1, _dummy2): - raise RuntimeError("_process_packet may not be called") - - self.handler._process_packet = mocked_process_packet - self.handler._is_running = False - self.handler.data_came_in(None, None) - - def test_data_came_in_invalid_candidate(self): - """ - Testing whether we do nothing when data comes in from an invalid candidate - """ - setattr(self.handler, "_process_packet", - lambda x, y: (_ for _ in ()).throw(RuntimeError("_process_packet may not be called"))) - self.handler.data_came_in(('182.30.65.219', 0), None) - - def test_handle_new_request_no_metadata(self): - """ - When the metadata_store from LaunchManyCore is not available, return - from the function rather than trying to load the metadata. - - :return: - """ - # Make sure the packet appears to have the correct attributes - fake_packet = {"opcode": OPCODE_RRQ, - "file_name": METADATA_PREFIX + "abc", - "options": {"blksize": 1, - "timeout": 1}, - "session_id": 1} - self.handler._load_metadata = lambda _: self.fail("This line should not be called") - - def test_function(): - test_function.is_called = True - return False - test_function.is_called = False - self.handler.session = MockObject() - self.handler.session.config = MockObject() - self.handler.session.config.get_metadata_enabled = test_function - - self.handler._handle_new_request("123", "456", fake_packet) - self.assertTrue(test_function.is_called) - - def test_handle_new_request_no_torrent_store(self): - """ - When the torrent_store from LaunchManyCore is not available, return - from the function rather than trying to load the metadata. - - :return: - """ - self.handler.session = MockObject() - # Make sure the packet appears to have the correct attributes - fake_packet = {"opcode": OPCODE_RRQ, - "file_name": "abc", - "options": {"blksize": 1, - "timeout": 1}, - "session_id": 1} - self.handler._load_metadata = lambda _: self.fail("This line should not be called") - - def test_function(): - test_function.is_called = True - return False - test_function.is_called = False - self.handler.session.config = MockObject() - self.handler.session.config.get_torrent_store_enabled = test_function - - self.handler._handle_new_request("123", "456", fake_packet) - self.assertTrue(test_function.is_called) - - @raises(FileNotFound) - def test_load_metadata_not_found(self): - """ - Testing whether a FileNotFound exception is raised when metadata cannot be found - """ - self.handler.session = MockObject() - self.handler.session.lm = MockObject() - self.handler.session.lm.metadata_store = MockObject() - self.handler.session.lm.metadata_store.get = lambda _: None - self.handler._load_metadata("abc") - - @raises(FileNotFound) - def test_load_torrent_not_found(self): - """ - Testing whether a FileNotFound exception is raised when a torrent cannot be found - """ - self.handler.session = MockObject() - self.handler.session.lm = MockObject() - self.handler.session.lm.torrent_store = MockObject() - self.handler.session.lm.torrent_store.get = lambda _: None - self.handler._load_torrent("abc") - - def test_handle_packet_as_receiver(self): - """ - Testing the handle_packet_as_receiver method - """ - def mocked_handle_error(_dummy1, _dummy2, error_msg=None): - mocked_handle_error.called = True - - mocked_handle_error.called = False - self.handler._handle_error = mocked_handle_error - - mock_session = MockObject() - mock_session.last_received_packet = None - mock_session.block_size = 42 - mock_session.timeout = 44 - packet = {'opcode': OPCODE_OACK, 'options': {'blksize': 43, 'timeout': 45}} - self.handler._handle_packet_as_receiver(mock_session, packet) - self.assertTrue(mocked_handle_error.called) - - mocked_handle_error.called = False - packet['options']['blksize'] = 42 - self.handler._handle_packet_as_receiver(mock_session, packet) - self.assertTrue(mocked_handle_error.called) - - mock_session.last_received_packet = True - mocked_handle_error.called = False - self.handler._handle_packet_as_receiver(mock_session, packet) - self.assertTrue(mocked_handle_error.called) - - packet['options']['timeout'] = 44 - packet['opcode'] = OPCODE_ERROR - mocked_handle_error.called = False - self.handler._handle_packet_as_receiver(mock_session, packet) - self.assertTrue(mocked_handle_error.called) - - def test_handle_packet_as_sender(self): - """ - Testing the handle_packet_as_sender method - """ - def mocked_handle_error(_dummy1, _dummy2, error_msg=None): - mocked_handle_error.called = True - - mocked_handle_error.called = False - self.handler._handle_error = mocked_handle_error - - packet = {'opcode': OPCODE_ERROR} - self.handler._handle_packet_as_sender(None, packet) - self.assertTrue(mocked_handle_error.called) - - def test_handle_error(self): - """ - Testing the error handling of a tftp handler - """ - mock_session = MockObject() - mock_session.is_failed = False - self.handler._send_error_packet = lambda _dummy1, _dummy2, _dummy3: None - self.handler._handle_error(mock_session, None) - self.assertTrue(mock_session.is_failed) - - def test_send_error_packet(self): - """ - Testing whether a correct error message is sent in the tftp handler - """ - def mocked_send_packet(_, packet): - self.assertEqual(packet['session_id'], 42) - self.assertEqual(packet['error_code'], 43) - self.assertEqual(packet['error_msg'], "test") - - self.handler._send_packet = mocked_send_packet - mock_session = MockObject() - mock_session.session_id = 42 - self.handler._send_error_packet(mock_session, 43, "test") diff --git a/Tribler/Test/Core/TFTP/test_tftp_packet.py b/Tribler/Test/Core/TFTP/test_tftp_packet.py deleted file mode 100644 index ab1cd71d054..00000000000 --- a/Tribler/Test/Core/TFTP/test_tftp_packet.py +++ /dev/null @@ -1,104 +0,0 @@ -from nose.tools import raises - -from Tribler.Core.TFTP.exception import InvalidStringException, InvalidPacketException -from Tribler.Core.TFTP.packet import _get_string, _decode_options, _decode_data, _decode_ack, _decode_error, \ - decode_packet, OPCODE_ERROR, encode_packet -from Tribler.Test.Core.base_test import TriblerCoreTest - - -class TestTFTPPacket(TriblerCoreTest): - """ - This class contains tests for the TFTP packet class. - """ - - @raises(InvalidStringException) - def test_get_string_no_end(self): - """ - Testing whether the get_string method raises InvalidStringException when no zero terminator is found - """ - _get_string("", 0) - - @raises(InvalidPacketException) - def test_decode_options_no_option(self): - """ - Testing whether decoding the options raises InvalidPacketException if no options are found - """ - _decode_options({}, "\0a\0", 0) - - @raises(InvalidPacketException) - def test_decode_options_no_value(self): - """ - Testing whether decoding the options raises InvalidPacketException if no value is found - """ - _decode_options({}, "b\0\0", 0) - - @raises(InvalidPacketException) - def test_decode_options_unknown(self): - """ - Testing whether decoding the options raises InvalidPacketException if an invalid option is found - """ - _decode_options({}, "b\0a\0", 0) - - @raises(InvalidPacketException) - def test_decode_options_invalid(self): - """ - Testing whether decoding the options raises InvalidPacketException if an invalid option is found - """ - _decode_options({}, "blksize\0a\0", 0) - - @raises(InvalidPacketException) - def test_decode_data(self): - """ - Testing whether an InvalidPacketException is raised when our incoming data is too small - """ - _decode_data(None, "aa", 42) - - @raises(InvalidPacketException) - def test_decode_ack(self): - """ - Testing whether an InvalidPacketException is raised when our incoming ack has an invalid size - """ - _decode_ack(None, "aa", 42) - - @raises(InvalidPacketException) - def test_decode_error_too_small(self): - """ - Testing whether an InvalidPacketException is raised when our incoming error has an invalid size - """ - _decode_error(None, "aa", 42) - - @raises(InvalidPacketException) - def test_decode_error_no_message(self): - """ - Testing whether an InvalidPacketException is raised when our incoming error has an empty message - """ - _decode_error({}, "aa\0", 0) - - @raises(InvalidPacketException) - def test_decode_error_invalid_pkg(self): - """ - Testing whether an InvalidPacketException is raised when our incoming error has an invalid structure - """ - _decode_error({}, "aaa\0\0", 0) - - @raises(InvalidPacketException) - def test_decode_packet_too_small(self): - """ - Testing whether an InvalidPacketException is raised when our incoming packet is too small - """ - decode_packet("aaa") - - @raises(InvalidPacketException) - def test_decode_packet_opcode(self): - """ - Testing whether an InvalidPacketException is raised when our incoming packet contains an invalid opcode - """ - decode_packet("aaaaaaaaaa") - - def test_encode_packet_error(self): - """ - Testing whether the encoding of an error packet is correct - """ - encoded = encode_packet({'opcode': OPCODE_ERROR, 'session_id': 123, 'error_code': 1, 'error_msg': 'hi'}) - self.assertEqual(encoded[-3], 'h') - self.assertEqual(encoded[-2], 'i') diff --git a/Tribler/Test/Core/TorrentChecker/test_torrentchecker.py b/Tribler/Test/Core/TorrentChecker/test_torrentchecker.py index 12591d8fe30..31f0a650a04 100644 --- a/Tribler/Test/Core/TorrentChecker/test_torrentchecker.py +++ b/Tribler/Test/Core/TorrentChecker/test_torrentchecker.py @@ -1,18 +1,20 @@ +from __future__ import absolute_import + import socket import time +from binascii import hexlify + +from pony.orm import db_session -from Tribler.Test.tools import trial_timeout from twisted.internet.defer import Deferred, inlineCallbacks +from twisted.python.failure import Failure -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler -from Tribler.Core.Category.Category import Category from Tribler.Core.Modules.tracker_manager import TrackerManager from Tribler.Core.TorrentChecker.session import HttpTrackerSession, UdpSocketManager from Tribler.Core.TorrentChecker.torrent_checker import TorrentChecker -from Tribler.Core.simpledefs import NTFY_TORRENTS from Tribler.Test.Core.base_test import MockObject from Tribler.Test.test_as_server import TestAsServer -from Tribler.community.popularity.repository import TYPE_TORRENT_HEALTH +from Tribler.Test.tools import trial_timeout class TestTorrentChecker(TestAsServer): @@ -22,34 +24,43 @@ class TestTorrentChecker(TestAsServer): def setUpPreSession(self): super(TestTorrentChecker, self).setUpPreSession() - self.config.set_megacache_enabled(True) + self.config.set_chant_enabled(True) @inlineCallbacks def setUp(self): yield super(TestTorrentChecker, self).setUp() - self.session.lm.torrent_db = TorrentDBHandler(self.session) self.session.lm.torrent_checker = TorrentChecker(self.session) self.session.lm.tracker_manager = TrackerManager(self.session) self.session.lm.popularity_community = MockObject() self.torrent_checker = self.session.lm.torrent_checker - self.torrent_checker._torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) - self.torrent_checker._torrent_db.category = Category() self.torrent_checker.listen_on_udp = lambda: None + def get_metainfo(_, callback, **__): + callback({"seeders": 1, "leechers": 2}) + + self.session.lm.ltmgr = MockObject() + self.session.lm.ltmgr.get_metainfo = get_metainfo + self.session.lm.ltmgr.shutdown = lambda: None + + @inlineCallbacks + def tearDown(self): + yield self.torrent_checker.shutdown() + yield super(TestTorrentChecker, self).tearDown() + def test_initialize(self): """ Test the initialization of the torrent checker """ self.torrent_checker.initialize() - self.assertIsNotNone(self.torrent_checker._torrent_db) self.assertTrue(self.torrent_checker.is_pending_task_active("torrent_checker_tracker_selection")) def test_create_socket_or_schedule_fail(self): """ Test creation of the UDP socket of the torrent checker when it fails """ + def mocked_listen_on_udp(): raise socket.error("Something went wrong") @@ -72,10 +83,8 @@ def test_add_gui_request_no_trackers(self): Test whether adding a request to fetch health of a trackerless torrent fails """ test_deferred = Deferred() - self.torrent_checker._torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu.iso', [['a.test', 1234]], [], 5) - - # Remove the DHT tracker - self.torrent_checker._torrent_db._db.execute_write("DELETE FROM TorrentTrackerMapping",) + with db_session: + self.session.lm.mds.TorrentState(infohash='a' * 20) self.torrent_checker.add_gui_request('a' * 20).addErrback(lambda _: test_deferred.callback(None)) return test_deferred @@ -84,9 +93,10 @@ def test_add_gui_request_cached(self): """ Test whether cached results of a torrent are returned when fetching the health of a torrent """ - self.torrent_checker._torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu.iso', [['a.test', 1234]], [], 5) - self.torrent_checker._torrent_db.updateTorrentCheckResult( - 1, 'a' * 20, 5, 10, time.time(), time.time(), 'good', 0) + with db_session: + tracker = self.session.lm.mds.TrackerState(url="http://localhost/tracker") + self.session.lm.mds.TorrentState(infohash='a' * 20, seeders=5, leechers=10, trackers={tracker}, + last_check=int(time.time())) def verify_response(result): self.assertTrue('db' in result) @@ -108,8 +118,9 @@ def test_task_select_no_tracker(self): return self.torrent_checker._task_select_tracker() def test_task_select_tracker(self): - self.torrent_checker._torrent_db.addExternalTorrentNoDef( - 'a' * 20, 'ubuntu.iso', [['a.test', 1234]], ['http://google.com/announce'], 5) + with db_session: + tracker = self.session.lm.mds.TrackerState(url="http://localhost/tracker") + self.session.lm.mds.TorrentState(infohash='a' * 20, seeders=5, leechers=10, trackers={tracker}) controlled_session = HttpTrackerSession(None, None, None, None) controlled_session.connect_to_tracker = lambda: Deferred() @@ -124,35 +135,17 @@ def test_tracker_test_error_resolve(self): """ Test whether we capture the error when a tracker check fails """ + def verify_cleanup(_): # Verify whether we successfully cleaned up the session after an error self.assertEqual(len(self.torrent_checker._session_list), 1) - self.torrent_checker._torrent_db.addExternalTorrentNoDef( - 'a' * 20, 'ubuntu.iso', [['a.test', 1234]], ['udp://non123exiszzting456tracker89fle.abc:80/announce'], 5) + with db_session: + tracker = self.session.lm.mds.TrackerState(url="http://localhost/tracker") + self.session.lm.mds.TorrentState(infohash='a' * 20, seeders=5, leechers=10, trackers={tracker}, + last_check=int(time.time())) return self.torrent_checker._task_select_tracker().addCallback(verify_cleanup) - @trial_timeout(30) - def test_tracker_test_invalid_tracker(self): - """ - Test whether we do nothing when tracker URL is invalid - """ - tracker_url = u'udp://non123exiszzting456tracker89fle.abc:80' - bad_tracker_url = u'xyz://non123exiszzting456tracker89fle.abc:80' - - self.torrent_checker._torrent_db.addExternalTorrentNoDef( - 'a' * 20, 'ubuntu.iso', [['a.test', 1234]], [tracker_url], 5) - - # Write invalid url to the database - sql_stmt = u"UPDATE TrackerInfo SET tracker = ? WHERE tracker = ?" - self.session.sqlite_db.execute(sql_stmt, (bad_tracker_url, tracker_url)) - - def verify_response(resp): - self.assertFalse(self.session.lm.tracker_manager.get_tracker_info(bad_tracker_url)) - self.assertIsNone(resp) - - return self.torrent_checker._task_select_tracker().addCallback(verify_response) - @trial_timeout(10) def test_tracker_no_infohashes(self): """ @@ -192,15 +185,14 @@ def _fake_logger_info(torrent_checker, msg): original_logger_info = self.torrent_checker._logger.info self.torrent_checker._logger.info = lambda msg: _fake_logger_info(self.torrent_checker, msg) - def popularity_community_queue_content(torrent_checker, _type, _): + def popularity_community_queue_content(torrent_checker, _): torrent_checker.popularity_community_queue_content_called = True - torrent_checker.popularity_community_queue_content_called_type = _type - self.torrent_checker.tribler_session.lm.popularity_community.queue_content = lambda _type, _content: \ - popularity_community_queue_content(self.torrent_checker, _type, _content) + self.torrent_checker.tribler_session.lm.popularity_community.queue_content = lambda _content: \ + popularity_community_queue_content(self.torrent_checker, _content) # Case1: Fake torrent checker response, seeders:0 - fake_response = {'infohash': 'a'*20, 'seeders': 0, 'leechers': 0, 'last_check': time.time()} + fake_response = {'infohash': 'a' * 20, 'seeders': 0, 'leechers': 0, 'last_check': time.time()} self.torrent_checker.publish_torrent_result(fake_response) self.assertTrue(self.torrent_checker.zero_seed_torrent) @@ -211,7 +203,6 @@ def popularity_community_queue_content(torrent_checker, _type, _): self.torrent_checker.publish_torrent_result(fake_response) self.assertTrue(self.torrent_checker.popularity_community_queue_content_called) - self.assertEqual(self.torrent_checker.popularity_community_queue_content_called_type, TYPE_TORRENT_HEALTH) # Case3: Popular community is None self.torrent_checker.tribler_session.lm.popularity_community = None @@ -220,7 +211,43 @@ def popularity_community_queue_content(torrent_checker, _type, _): self.torrent_checker._logger.info = original_logger_info - @inlineCallbacks - def tearDown(self): - yield self.torrent_checker.shutdown() - yield super(TestTorrentChecker, self).tearDown() + def test_on_gui_request_completed(self): + tracker1 = 'udp://localhost:2801' + tracker2 = "http://badtracker.org/announce" + infohash_bin = '\xee'*20 + infohash_hex = hexlify(infohash_bin) + self.session.lm.popularity_community.queue_content = lambda _: None + + failure = Failure() + failure.tracker_url = tracker2 + result = [ + (True, {tracker1: [{'leechers': 1, 'seeders': 2, 'infohash': infohash_hex}]}), + (False, failure), + (True, {'DHT': [{'leechers': 12, 'seeders': 13, 'infohash': infohash_hex}]}) + ] + # Check that everything works fine even if the database contains no proper infohash + res_dict = { + 'DHT': { + 'leechers': 12, + 'seeders': 13, + 'infohash': infohash_hex + }, + 'http://badtracker.org/announce': { + 'error': '' + }, + 'udp://localhost:2801': { + 'leechers': 1, + 'seeders': 2, + 'infohash': infohash_hex + } + } + self.torrent_checker.on_gui_request_completed(infohash_bin, result) + self.assertDictEqual(self.torrent_checker.on_gui_request_completed(infohash_bin, result), res_dict) + + with db_session: + ts = self.session.lm.mds.TorrentState(infohash=infohash_bin) + previous_check = ts.last_check + self.torrent_checker.on_gui_request_completed(infohash_bin, result) + self.assertEqual(result[2][1]['DHT'][0]['leechers'], ts.leechers) + self.assertEqual(result[2][1]['DHT'][0]['seeders'], ts.seeders) + self.assertLess(previous_check, ts.last_check) diff --git a/Tribler/Test/Core/Upgrade/test_config_upgrade_70_71.py b/Tribler/Test/Core/Upgrade/test_config_upgrade_70_71.py index f1eecf6aa49..b2dc3079038 100644 --- a/Tribler/Test/Core/Upgrade/test_config_upgrade_70_71.py +++ b/Tribler/Test/Core/Upgrade/test_config_upgrade_70_71.py @@ -2,13 +2,14 @@ import os import shutil -from six.moves.configparser import RawConfigParser -from Tribler.Core.simpledefs import STATEDIR_DLPSTATE_DIR from configobj import ConfigObj -from Tribler.Core.Config.tribler_config import TriblerConfig, CONFIG_SPEC_PATH +from six.moves.configparser import RawConfigParser + +from Tribler.Core.Config.tribler_config import CONFIG_SPEC_PATH, TriblerConfig from Tribler.Core.Upgrade.config_converter import add_libtribler_config, add_tribler_config, convert_config_to_tribler71 +from Tribler.Core.simpledefs import STATEDIR_DLPSTATE_DIR from Tribler.Test.Core.base_test import TriblerCoreTest @@ -38,9 +39,7 @@ def test_read_test_libtribler_conf(self): old_config.read(os.path.join(self.CONFIG_PATH, "libtribler70.conf")) new_config = TriblerConfig() result_config = add_libtribler_config(new_config, old_config) - self.assertEqual(result_config.get_permid_keypair_filename(), "/anon/TriblerDir.gif") self.assertEqual(result_config.get_tunnel_community_socks5_listen_ports(), [1, 2, 3, 4, 5, 6]) - self.assertTrue(result_config.get_metadata_store_dir().endswith("/home/.Tribler/testFile")) self.assertEqual(result_config.get_anon_proxy_settings(), (2, ("127.0.0.1", [5, 4, 3, 2, 1]), '')) self.assertEqual(result_config.get_credit_mining_sources(), ['source1', 'source2']) self.assertEqual(result_config.get_log_dir(), '/a/b/c') @@ -73,9 +72,7 @@ def test_read_test_corr_libtribler_conf(self): result_config = add_libtribler_config(new_config, old_config) - self.assertTrue(result_config.get_permid_keypair_filename().endswith("ec.pem")) self.assertTrue(len(result_config.get_tunnel_community_socks5_listen_ports()), 5) - self.assertTrue(result_config.get_metadata_store_dir().endswith("collected_metadata")) self.assertEqual(result_config.get_anon_proxy_settings(), (2, ('127.0.0.1', [-1, -1, -1, -1, -1]), '')) self.assertEqual(result_config.get_credit_mining_sources(), new_config.get_credit_mining_sources()) diff --git a/Tribler/Test/Core/Upgrade/test_db72_to_pony.py b/Tribler/Test/Core/Upgrade/test_db72_to_pony.py new file mode 100644 index 00000000000..7d069fcc2d2 --- /dev/null +++ b/Tribler/Test/Core/Upgrade/test_db72_to_pony.py @@ -0,0 +1,180 @@ +from __future__ import absolute_import + +import os +import shutil +import sqlite3 + +from pony.orm import db_session +from twisted.internet.defer import inlineCallbacks + +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import LEGACY_ENTRY +from Tribler.Core.Modules.MetadataStore.store import MetadataStore +from Tribler.Core.Upgrade.db72_to_pony import DispersyToPonyMigration, CONVERSION_FINISHED, \ + CONVERSION_FROM_72, old_db_version_ok, cleanup_pony_experimental_db, new_db_version_ok, already_upgraded, \ + should_upgrade +from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject +from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto + +OLD_DB_SAMPLE = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', 'data', + 'upgrade_databases', 'tribler_v29.sdb') + + +class TestUpgradeDB72ToPony(TriblerCoreTest): + @inlineCallbacks + def setUp(self): + yield super(TestUpgradeDB72ToPony, self).setUp() + + self.my_key = default_eccrypto.generate_key(u"curve25519") + mds_db = os.path.join(self.session_base_dir, 'test.db') + mds_channels_dir = self.session_base_dir + + self.mds = MetadataStore(mds_db, mds_channels_dir, self.my_key) + self.m = DispersyToPonyMigration(OLD_DB_SAMPLE) + self.m.initialize(self.mds) + + @inlineCallbacks + def tearDown(self): + self.mds.shutdown() + yield super(TestUpgradeDB72ToPony, self).tearDown() + + def test_get_personal_channel_title(self): + self.assertTrue(self.m.personal_channel_title) + + def test_get_old_torrents_count(self): + self.assertEqual(self.m.get_old_torrents_count(), 19) + + def test_get_personal_torrents_count(self): + self.assertEqual(self.m.get_personal_channel_torrents_count(), 2) + + def test_convert_personal_channel(self): + self.m.convert_personal_channel() + my_channel = self.mds.ChannelMetadata.get_my_channel() + self.assertEqual(len(my_channel.contents_list), 2) + self.assertEqual(my_channel.num_entries, 2) + for t in my_channel.contents_list: + self.assertTrue(t.has_valid_signature()) + self.assertTrue(my_channel.has_valid_signature()) + self.assertEqual(self.m.personal_channel_title[:200], my_channel.title) + + @db_session + def test_convert_all_channels(self): + self.m.convert_discovered_torrents() + self.m.convert_discovered_channels() + chans = self.mds.ChannelMetadata.get_entries() + + self.assertEqual(len(chans[0]), 2) + for c in chans[0]: + self.assertNotEqual(self.m.personal_channel_title[:200], c.title) + self.assertEqual(c.status, LEGACY_ENTRY) + self.assertTrue(c.contents_list) + for t in c.contents_list: + self.assertEqual(t.status, LEGACY_ENTRY) + + @db_session + def test_update_trackers(self): + tr = self.mds.TrackerState(url="http://ipv6.torrent.ubuntu.com:6969/announce") + self.m.update_trackers_info() + self.assertEqual(tr.failures, 2) + self.assertEqual(tr.alive, True) + self.assertEqual(tr.last_check, 1548776649) + + +class TestUpgradePreconditionChecker(TriblerCoreTest): + + def test_old_db_version_check(self): + # Correct old database + self.assertTrue(old_db_version_ok(OLD_DB_SAMPLE)) + + # Wrong old database version + old_db = os.path.join(self.session_base_dir, 'old.db') + shutil.copyfile(OLD_DB_SAMPLE, old_db) + conn = sqlite3.connect(old_db) + with conn: + cursor = conn.cursor() + cursor.execute("UPDATE MyInfo SET value = 28 WHERE entry == 'version'") + self.assertFalse(old_db_version_ok(old_db)) + + def test_cleanup_pony_experimental_db(self): + # Create a Pony database of older experimental version + pony_db = os.path.join(self.session_base_dir, 'pony.db') + pony_db_bak = os.path.join(self.session_base_dir, 'pony2.db') + my_key = default_eccrypto.generate_key(u"curve25519") + mds = MetadataStore(pony_db, self.session_base_dir, my_key) + mds.shutdown() + shutil.copyfile(pony_db, pony_db_bak) + + connection = sqlite3.connect(pony_db) + with connection: + cursor = connection.cursor() + cursor.execute("DROP TABLE MiscData") + connection.close() + + # Assert older experimental version is deleted + self.assertFalse(cleanup_pony_experimental_db(pony_db)) + self.assertFalse(os.path.exists(pony_db)) + + # Assert recent database version is left untouched + self.assertFalse(cleanup_pony_experimental_db(pony_db_bak)) + self.assertTrue(os.path.exists(pony_db_bak)) + + # Assert True is returned for a garbled db and nothing is done with it + garbled_db = os.path.join(self.session_base_dir, 'garbled.db') + with open(garbled_db, 'w') as f: + f.write("123") + self.assertRaises(sqlite3.DatabaseError, cleanup_pony_experimental_db, garbled_db) + self.assertTrue(os.path.exists(garbled_db)) + + def test_new_db_version_ok(self): + pony_db = os.path.join(self.session_base_dir, 'pony.db') + my_key = default_eccrypto.generate_key(u"curve25519") + mds = MetadataStore(pony_db, self.session_base_dir, my_key) + mds.shutdown() + + self.assertTrue(new_db_version_ok(pony_db)) + + connection = sqlite3.connect(pony_db) + with connection: + cursor = connection.cursor() + cursor.execute("UPDATE MiscData SET value = 12313512 WHERE name == 'db_version'") + self.assertFalse(new_db_version_ok(pony_db)) + + def test_already_upgraded(self): + pony_db = os.path.join(self.session_base_dir, 'pony.db') + my_key = default_eccrypto.generate_key(u"curve25519") + mds = MetadataStore(pony_db, self.session_base_dir, my_key) + mds.shutdown() + + self.assertFalse(already_upgraded(pony_db)) + + mds = MetadataStore(pony_db, self.session_base_dir, my_key) + with db_session: + mds.MiscData(name=CONVERSION_FROM_72, value=CONVERSION_FINISHED) + mds.shutdown() + + self.assertTrue(already_upgraded(pony_db)) + + def test_should_upgrade(self): + from Tribler.Core.Upgrade import db72_to_pony + pony_db = os.path.join(self.session_base_dir, 'pony.db') + + # Old DB does not exist + self.assertFalse(should_upgrade(os.path.join(self.session_base_dir, 'nonexistent.db'), None)) + + # Old DB is not OK + db72_to_pony.old_db_version_ok = lambda _: False + self.assertFalse(should_upgrade(OLD_DB_SAMPLE, None)) + + # Pony DB does not exist + db72_to_pony.old_db_version_ok = lambda _: True + self.assertTrue(should_upgrade(OLD_DB_SAMPLE, pony_db)) + + + mock_logger = MockObject() + mock_logger.error = lambda _,a: None + + # Bad Pony DB + with open(pony_db, 'w') as f: + f.write("") + self.assertFalse(should_upgrade(OLD_DB_SAMPLE, pony_db, logger=mock_logger)) + + diff --git a/Tribler/Test/Core/Upgrade/test_db_upgrader.py b/Tribler/Test/Core/Upgrade/test_db_upgrader.py deleted file mode 100644 index a9a369e6993..00000000000 --- a/Tribler/Test/Core/Upgrade/test_db_upgrader.py +++ /dev/null @@ -1,64 +0,0 @@ -import os - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler -from Tribler.Core.CacheDB.db_versions import LATEST_DB_VERSION -from Tribler.Core.Upgrade.db_upgrader import DBUpgrader, VersionNoLongerSupportedError, DatabaseUpgradeError -from Tribler.Core.Utilities.utilities import fix_torrent -from Tribler.Core.leveldbstore import LevelDbStore -from Tribler.Test.Core.Upgrade.upgrade_base import AbstractUpgrader, MockTorrentStore -from Tribler.Test.common import TORRENT_UBUNTU_FILE, TORRENT_UBUNTU_FILE_INFOHASH - - -class TestDBUpgrader(AbstractUpgrader): - - def test_upgrade_from_obsolete_version(self): - """We no longer support DB versions older than 17 (Tribler 6.0)""" - self.copy_and_initialize_upgrade_database('tribler_v12.sdb') - - db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) - self.assertRaises(VersionNoLongerSupportedError, db_migrator.start_migrate) - - def test_upgrade_17_to_latest(self): - self.copy_and_initialize_upgrade_database('tribler_v17.sdb') - db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) - db_migrator.start_migrate() - self.assertEqual(self.sqlitedb.version, LATEST_DB_VERSION) - self.assertFalse(os.path.exists(os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1'))) - - def test_upgrade_17_to_latest_no_dispersy(self): - # upgrade without dispersy DB should not raise an error - self.copy_and_initialize_upgrade_database('tribler_v17.sdb') - os.unlink(os.path.join(self.session.config.get_state_dir(), 'sqlite', 'dispersy.db')) - db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) - db_migrator.start_migrate() - self.assertEqual(self.sqlitedb.version, LATEST_DB_VERSION) - - # Check whether the torrents in the database are reindexed - results = self.sqlitedb.fetchall("SELECT * FROM FullTextIndex") - self.assertEqual(len(results), 1) - self.assertTrue('test' in results[0][0]) - self.assertTrue('random' in results[0][1]) - self.assertTrue('tribler' in results[0][1]) - self.assertTrue('txt' in results[0][2]) - self.assertTrue('txt' in results[0][2]) - - def test_upgrade_wrong_version(self): - self.copy_and_initialize_upgrade_database('tribler_v17.sdb') - db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) - db_migrator.db._version = LATEST_DB_VERSION + 1 - self.assertRaises(DatabaseUpgradeError, db_migrator.start_migrate) - - def test_reimport_torrents(self): - self.copy_and_initialize_upgrade_database('tribler_v17.sdb') - self.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) - db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=self.torrent_store) - db_migrator.start_migrate() - - # Import a torrent - self.torrent_store[TORRENT_UBUNTU_FILE_INFOHASH] = fix_torrent(TORRENT_UBUNTU_FILE) - self.torrent_store.flush() - - db_migrator.reimport_torrents() - - torrent_db_handler = TorrentDBHandler(self.session) - self.assertEqual(torrent_db_handler.getTorrentID(TORRENT_UBUNTU_FILE_INFOHASH), 3) diff --git a/Tribler/Test/Core/Upgrade/test_pickle_converter.py b/Tribler/Test/Core/Upgrade/test_pickle_converter.py deleted file mode 100644 index db56d0a6e48..00000000000 --- a/Tribler/Test/Core/Upgrade/test_pickle_converter.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import pickle - -from Tribler.Core.Config.tribler_config import TriblerConfig, FILENAME as TRIBLER_CONFIG_FILENAME -from Tribler.Core.Upgrade.pickle_converter import PickleConverter -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject - - -class TestPickleConverter(TriblerCoreTest): - """ - This file contains tests for the converter that converts older pickle files to the .state format. - """ - - def setUp(self): - super(TestPickleConverter, self).setUp() - - self.mock_session = MockObject() - self.mock_session.get_downloads_pstate_dir = lambda: self.session_base_dir - self.mock_session.config = TriblerConfig() - self.mock_session.config.get_state_dir = lambda: self.session_base_dir - - def write_pickle_file(self, content, filename): - pickle_filepath = os.path.join(self.session_base_dir, filename) - pickle.dump(content, open(pickle_filepath, "wb")) - - def test_convert_session_config(self): - old_pickle_dict = {"state_dir": "/", "mainline_dht_port": 1337, "torrent_checking": "false", - "torrent_collecting": "true", "libtorrent": False, "dispersy_port": 1337, - "minport": 1234} - self.write_pickle_file(old_pickle_dict, "sessconfig.pickle") - - PickleConverter(self.mock_session).convert_session_config() - - self.assertTrue(os.path.exists(os.path.join(self.session_base_dir, TRIBLER_CONFIG_FILENAME))) - self.assertFalse(os.path.exists(os.path.join(self.session_base_dir, "sessconfig.pickle"))) - - # Check the content of the config file - config = TriblerConfig.load(config_path=os.path.join(self.session_base_dir, TRIBLER_CONFIG_FILENAME)) - self.assertEqual(config.get_state_dir(), '/') - self.assertEqual(config.get_mainline_dht_port(), 1337) - self.assertEqual(config.get_torrent_checking_enabled(), False) - self.assertEqual(config.get_torrent_collecting_enabled(), True) - self.assertFalse(config.get_libtorrent_enabled()) - self.assertEqual(config.get_dispersy_port(), 1337) - self.assertEqual(config.get_libtorrent_port(), 1234) - - def test_convert_download_checkpoints(self): - with open(os.path.join(self.session_base_dir, 'corrupt.pickle'), 'wb') as corrupt_file: - corrupt_file.write("This is not a pickle file!") - - old_pickle_dict = {"dlconfig": {"saveas": "dunno", "abc": "def"}, "engineresumedata": "test", - "dlstate": "test", "metainfo": "none"} - self.write_pickle_file(old_pickle_dict, "download.pickle") - - PickleConverter(self.mock_session).convert_download_checkpoints() - - self.assertTrue(os.path.exists(os.path.join(self.session_base_dir, 'download.state'))) - self.assertFalse(os.path.exists(os.path.join(self.session_base_dir, 'corrupt.pickle'))) - - def test_convert_main_config(self): - pickle_dict = {"download_state": {"abc": "stop"}} - self.write_pickle_file(pickle_dict, "user_download_choice.pickle") - - PickleConverter(self.mock_session).convert_main_config() - - self.assertFalse(os.path.exists(os.path.join(self.session_base_dir, "user_download_choice.pickle"))) - self.assertTrue(os.path.exists(os.path.join(self.session_base_dir, TRIBLER_CONFIG_FILENAME))) diff --git a/Tribler/Test/Core/Upgrade/test_torrent_upgrade_63_64.py b/Tribler/Test/Core/Upgrade/test_torrent_upgrade_63_64.py deleted file mode 100644 index 828c2635346..00000000000 --- a/Tribler/Test/Core/Upgrade/test_torrent_upgrade_63_64.py +++ /dev/null @@ -1,122 +0,0 @@ -import os -import shutil -from apsw import Connection -from nose.tools import raises -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Upgrade.torrent_upgrade64 import TorrentMigrator64 -from Tribler.Test.Core.base_test import TriblerCoreTest -from Tribler.Test.common import TORRENT_UBUNTU_FILE - - -class AbstractTorrentUpgrade63to64(TriblerCoreTest): - - FILE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) - DB_DATA_DIR = os.path.abspath(os.path.join(FILE_DIR, u"../data/upgrade_databases/")) - - def write_data_to_file(self, file_name): - with open(file_name, 'w') as file: - file.write("lorem ipsum") - file.close() - - - # This setup creates a directory with files that should be used for the 6.3 -> 6.4 upgrade - @inlineCallbacks - def setUp(self): - yield super(AbstractTorrentUpgrade63to64, self).setUp() - - self.torrent_collecting_dir = os.path.join(self.session_base_dir, "torrent_collecting") - self.sqlite_path = os.path.join(self.session_base_dir, "sqlite") - os.mkdir(self.torrent_collecting_dir) - os.mkdir(os.path.join(self.torrent_collecting_dir, "test_dir")) - os.mkdir(self.sqlite_path) - - # write and create files - self.write_data_to_file(os.path.join(self.session_base_dir, "upgradingdb.txt")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "test1.mbinmap")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "test2.mhash")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "tmp_test3")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "torrent1.torrent")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "torrent2.torrent")) - os.mkdir(os.path.join(self.torrent_collecting_dir, "swift_reseeds")) - shutil.copyfile(TORRENT_UBUNTU_FILE, os.path.join(self.torrent_collecting_dir, "torrent3.torrent")) - shutil.copyfile(os.path.join(self.DB_DATA_DIR, "torrent_upgrade_64_dispersy.db"), - os.path.join(self.sqlite_path, "dispersy.db")) - - self.torrent_upgrader = TorrentMigrator64(self.torrent_collecting_dir, self.session_base_dir) - - def assert_upgrade_successful(self): - self.assertFalse(os.path.isfile(os.path.join(self.session_base_dir, "upgradingdb.txt"))) - self.assertGreater(self.torrent_upgrader.swift_files_deleted, 0) - self.assertGreater(self.torrent_upgrader.total_swift_file_count, 0) - self.assertGreater(self.torrent_upgrader.total_torrent_file_count, 0) - self.assertGreater(self.torrent_upgrader.processed_file_count, 0) - self.assertGreater(self.torrent_upgrader.torrent_files_dropped, 0) - self.assertGreater(self.torrent_upgrader.total_file_count, 0) - self.assertGreater(self.torrent_upgrader.torrent_files_migrated, 0) - self.assertFalse(os.path.isdir(os.path.join(self.torrent_collecting_dir, "test_dir"))) - - -class TestUpgrade63to64(AbstractTorrentUpgrade63to64): - - def test_upgrade_success(self): - self.torrent_upgrader.start_migrate() - self.torrent_upgrader._update_dispersy() - self.assert_upgrade_successful() - - @raises(OSError) - def test_upgrade_no_valid_basedir(self): - self.torrent_upgrader = TorrentMigrator64(self.torrent_collecting_dir, - os.path.join(self.session_base_dir, "bla")) - self.torrent_upgrader.start_migrate() - - @raises(RuntimeError) - def test_upgrade_no_valid_torrent_collecting_dir(self): - self.torrent_upgrader = TorrentMigrator64(os.path.join(self.torrent_collecting_dir, "bla"), - self.session_base_dir) - self.torrent_upgrader.start_migrate() - - @raises(RuntimeError) - def test_upgrade_temp_torrent_dir_is_file(self): - self.write_data_to_file(os.path.join(self.session_base_dir, ".tmp_migration_v64")) - self.torrent_upgrader = TorrentMigrator64(self.torrent_collecting_dir, self.session_base_dir) - self.torrent_upgrader.start_migrate() - - @raises(RuntimeError) - def test_upgrade_swift_reseeds_dir_no_dir(self): - os.rmdir(os.path.join(self.torrent_collecting_dir, "swift_reseeds")) - self.write_data_to_file(os.path.join(self.torrent_collecting_dir, "swift_reseeds")) - self.torrent_upgrader.start_migrate() - - def test_upgrade_torrent_tcd_file_exists(self): - tcd_path = os.path.join(self.session_base_dir, ".tmp_migration_v64_tcd") - self.write_data_to_file(tcd_path) - self.torrent_upgrader.start_migrate() - self.assertFalse(os.path.exists(tcd_path)) - - def test_upgrade_migration_dir_already_exists(self): - os.mkdir(os.path.join(self.session_base_dir, ".tmp_migration_v64")) - self.torrent_upgrader.start_migrate() - self.assert_upgrade_successful() - - def test_upgrade_empty_torrent_dir(self): - shutil.rmtree(self.torrent_collecting_dir) - os.mkdir(self.torrent_collecting_dir) - self.torrent_upgrader.start_migrate() - self.assertEqual(self.torrent_upgrader.total_torrent_file_count, 0) - self.assertEqual(self.torrent_upgrader.total_swift_file_count, 0) - - def test_upgrade_dispersy_no_database(self): - os.unlink(os.path.join(self.sqlite_path, "dispersy.db")) - self.torrent_upgrader._update_dispersy() - - def test_upgrade_dispersy(self): - self.torrent_upgrader._update_dispersy() - - db_path = os.path.join(self.sqlite_path, u"dispersy.db") - connection = Connection(db_path) - cursor = connection.cursor() - self.assertFalse(list(cursor.execute(u"SELECT * FROM community WHERE classification == 'SearchCommunity'"))) - self.assertFalse(list(cursor.execute(u"SELECT * FROM community WHERE classification == 'MetadataCommunity'"))) - cursor.close() - connection.close() diff --git a/Tribler/Test/Core/Upgrade/test_torrent_upgrade_64_65.py b/Tribler/Test/Core/Upgrade/test_torrent_upgrade_64_65.py deleted file mode 100644 index 5c5f35567de..00000000000 --- a/Tribler/Test/Core/Upgrade/test_torrent_upgrade_64_65.py +++ /dev/null @@ -1,45 +0,0 @@ -import os -import shutil -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.Upgrade.torrent_upgrade65 import TorrentMigrator65 -from Tribler.Core.leveldbstore import LevelDbStore -from Tribler.Test.Core.Upgrade.test_torrent_upgrade_63_64 import AbstractTorrentUpgrade63to64 - - -class AbstractTorrentUpgrade64to65(AbstractTorrentUpgrade63to64): - - @inlineCallbacks - def setUp(self): - yield super(AbstractTorrentUpgrade64to65, self).setUp() - - leveldb_path = os.path.join(self.session_base_dir, "leveldbstore") - os.mkdir(leveldb_path) - self.torrent_store = LevelDbStore(leveldb_path) - self.torrent_upgrader = TorrentMigrator65(self.torrent_collecting_dir, - self.session_base_dir, self.torrent_store) - - def tearDown(self): - self.torrent_store.close() - super(AbstractTorrentUpgrade64to65, self).tearDown() - - def assert_upgrade_successful(self): - self.assertGreater(self.torrent_upgrader.torrent_files_migrated, 0) - self.assertGreater(self.torrent_upgrader.processed_file_count, 0) - self.assertGreater(len(self.torrent_store), 0) - - -class TestTorrentUpgrade63to64(AbstractTorrentUpgrade64to65): - - def test_upgrade_success(self): - self.torrent_upgrader._migrate_torrent_collecting_dir() - self.assert_upgrade_successful() - - def test_torrent_collecting_dir_no_dir(self): - shutil.rmtree(self.torrent_collecting_dir) - self.write_data_to_file(self.torrent_collecting_dir) - self.torrent_upgrader._migrate_torrent_collecting_dir() - - self.assertEqual(self.torrent_upgrader.torrent_files_migrated, 0) - self.assertEqual(self.torrent_upgrader.processed_file_count, 0) - self.assertEqual(len(self.torrent_store), 0) diff --git a/Tribler/Test/Core/Upgrade/test_upgrader.py b/Tribler/Test/Core/Upgrade/test_upgrader.py index 685fc6a55d0..55942bc7681 100644 --- a/Tribler/Test/Core/Upgrade/test_upgrader.py +++ b/Tribler/Test/Core/Upgrade/test_upgrader.py @@ -1,9 +1,15 @@ +from __future__ import absolute_import + import os +import shutil + +from pony.orm import db_session + from twisted.internet.defer import Deferred, inlineCallbacks -from Tribler.Core.CacheDB.db_versions import LATEST_DB_VERSION, LOWEST_SUPPORTED_DB_VERSION +from Tribler.Core.Modules.MetadataStore.store import MetadataStore from Tribler.Core.Upgrade.upgrade import TriblerUpgrader -from Tribler.Core.simpledefs import NTFY_UPGRADER_TICK, NTFY_STARTED +from Tribler.Core.simpledefs import NTFY_STARTED, NTFY_UPGRADER_TICK from Tribler.Test.Core.Upgrade.upgrade_base import AbstractUpgrader from Tribler.Test.tools import trial_timeout @@ -13,38 +19,7 @@ class TestUpgrader(AbstractUpgrader): @inlineCallbacks def setUp(self): yield super(TestUpgrader, self).setUp() - self.copy_and_initialize_upgrade_database('tribler_v17.sdb') - self.upgrader = TriblerUpgrader(self.session, self.sqlitedb) - - def test_stash_database(self): - self.upgrader.stash_database() - old_dir = os.path.dirname(self.sqlitedb.sqlite_db_path) - self.assertTrue(os.path.exists(u'%s_backup_%d' % (old_dir, LATEST_DB_VERSION))) - self.assertIsNotNone(self.sqlitedb._connection) - self.assertTrue(self.upgrader.is_done) - - def test_should_upgrade(self): - self.sqlitedb._version = LATEST_DB_VERSION + 1 - self.assertTrue(self.upgrader.check_should_upgrade_database()[0]) - self.assertFalse(self.upgrader.check_should_upgrade_database()[1]) - - self.sqlitedb._version = LOWEST_SUPPORTED_DB_VERSION - 1 - self.assertTrue(self.upgrader.check_should_upgrade_database()[0]) - self.assertFalse(self.upgrader.check_should_upgrade_database()[1]) - - self.sqlitedb._version = LATEST_DB_VERSION - self.assertFalse(self.upgrader.check_should_upgrade_database()[0]) - self.assertFalse(self.upgrader.check_should_upgrade_database()[1]) - - self.sqlitedb._version = LATEST_DB_VERSION - 1 - self.assertFalse(self.upgrader.check_should_upgrade_database()[0]) - self.assertTrue(self.upgrader.check_should_upgrade_database()[1]) - - def test_upgrade_with_upgrader_enabled(self): - self.upgrader.run() - - self.assertTrue(self.upgrader.is_done) - self.assertFalse(self.upgrader.failed) + self.upgrader = TriblerUpgrader(self.session) def test_run(self): """ @@ -70,3 +45,16 @@ def on_upgrade_tick(subject, changetype, objectID, status_text): self.session.notifier.add_observer(on_upgrade_tick, NTFY_UPGRADER_TICK, [NTFY_STARTED]) self.upgrader.update_status("12345") return test_deferred + + def test_upgrade_72_to_pony(self): + OLD_DB_SAMPLE = os.path.abspath(os.path.join(os.path.abspath( + os.path.dirname(os.path.realpath(__file__))), '..', 'data', 'upgrade_databases', 'tribler_v29.sdb')) + old_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb') + new_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db') + channels_dir = os.path.join(self.session.config.get_chant_channels_dir()) + shutil.copyfile(OLD_DB_SAMPLE, old_database_path) + self.upgrader.upgrade_72_to_pony() + mds = MetadataStore(new_database_path, channels_dir, self.session.trustchain_keypair) + with db_session: + self.assertEqual(mds.TorrentMetadata.select().count(), 24) + mds.shutdown() diff --git a/Tribler/Test/Core/Upgrade/upgrade_base.py b/Tribler/Test/Core/Upgrade/upgrade_base.py index 094d8e9908d..bd1e5c85661 100644 --- a/Tribler/Test/Core/Upgrade/upgrade_base.py +++ b/Tribler/Test/Core/Upgrade/upgrade_base.py @@ -1,12 +1,12 @@ +from __future__ import absolute_import + import os -import shutil from configobj import ConfigObj + from twisted.internet.defer import inlineCallbacks -import Tribler -from Tribler.Core.CacheDB.sqlitecachedb import SQLiteCacheDB -from Tribler.Core.Config.tribler_config import TriblerConfig, CONFIG_SPEC_PATH +from Tribler.Core.Config.tribler_config import CONFIG_SPEC_PATH, TriblerConfig from Tribler.Core.Session import Session from Tribler.Test.Core.base_test import TriblerCoreTest @@ -20,43 +20,9 @@ class AbstractUpgrader(TriblerCoreTest): FILE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) DATABASES_DIR = os.path.abspath(os.path.join(FILE_DIR, u"../data/upgrade_databases/")) - def write_data_to_file(self, file_name): - with open(file_name, 'w') as file: - file.write("lorem ipsum") - file.close() - @inlineCallbacks def setUp(self): yield super(AbstractUpgrader, self).setUp() self.config = TriblerConfig(ConfigObj(configspec=CONFIG_SPEC_PATH)) self.config.set_state_dir(self.getStateDir()) - self.config.set_torrent_collecting_dir(os.path.join(self.session_base_dir, 'torrent_collecting_dir')) self.session = Session(self.config) - self.sqlitedb = None - self.torrent_store = None - - def tearDown(self): - if self.torrent_store: - self.torrent_store.close() - - if self.sqlitedb: - self.sqlitedb.close() - self.sqlitedb = None - - super(AbstractUpgrader, self).tearDown() - - def copy_and_initialize_upgrade_database(self, db_name): - - # create a file to be removed in the thumbnails - os.mkdir(self.session.config.get_torrent_collecting_dir()) - os.mkdir(os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1')) - self.write_data_to_file(os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1', 'file1.txt')) - - os.mkdir(os.path.join(self.session_base_dir, 'sqlite')) - shutil.copyfile(os.path.join(self.DATABASES_DIR, db_name), - os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb')) - shutil.copyfile(os.path.join(self.DATABASES_DIR, 'torrent_upgrade_64_dispersy.db'), - os.path.join(self.session.config.get_state_dir(), 'sqlite', 'dispersy.db')) - db_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb') - self.sqlitedb = SQLiteCacheDB(db_path) - self.session.sqlite_db = self.sqlitedb diff --git a/Tribler/Test/Core/Utilities/test_tracker_utils.py b/Tribler/Test/Core/Utilities/test_tracker_utils.py index bfb9ad4dd84..9c7561edf80 100644 --- a/Tribler/Test/Core/Utilities/test_tracker_utils.py +++ b/Tribler/Test/Core/Utilities/test_tracker_utils.py @@ -1,7 +1,9 @@ +from __future__ import absolute_import + from nose.tools import raises -from Tribler.Core.Utilities.tracker_utils import parse_tracker_url, get_uniformed_tracker_url, \ - MalformedTrackerURLException +from Tribler.Core.Utilities.tracker_utils import MalformedTrackerURLException, get_uniformed_tracker_url,\ + parse_tracker_url from Tribler.Test.Core.base_test import TriblerCoreTest @@ -50,10 +52,35 @@ def test_uniform_http_default_port_given(self): result = get_uniformed_tracker_url("http://torrent.ubuntu.com:80/announce") self.assertEqual(result, u'http://torrent.ubuntu.com/announce') - def test_uniform_trailing_hex(self): + def test_uniform_trailing_zero_hex(self): result = get_uniformed_tracker_url("udp://tracker.1337x.org:80\x00") + self.assertEqual(result, u'udp://tracker.1337x.org:80') + + def test_uniform_trailing_hex(self): + result = get_uniformed_tracker_url("udp://tracker.1337x.org:80\xff") + self.assertIsNone(result) + + def test_uniform_bad_urlenc(self): + result = get_uniformed_tracker_url(u'http://btjunkie.org/?do=upload') + self.assertIsNone(result) + + def test_uniform_empty(self): + result = get_uniformed_tracker_url(u'') self.assertIsNone(result) + def test_skip_truncated_url(self): + result = get_uniformed_tracker_url(u'http://tracker.1337x.org:80/anno...') + self.assertIsNone(result) + + def test_skip_wrong_url_scheme(self): + result = get_uniformed_tracker_url(u'wss://tracker.1337x.org:80/announce') + self.assertIsNone(result) + + def test_skip_value_error(self): + result = get_uniformed_tracker_url("ftp://tracker.1337\xffx.org:80/announce") + self.assertIsNone(result) + + class TestParseTrackerUrl(TriblerCoreTest): """ diff --git a/Tribler/Test/Core/Video/test_vod.py b/Tribler/Test/Core/Video/test_vod.py index 31b1c8221f7..a3be21b2a9b 100644 --- a/Tribler/Test/Core/Video/test_vod.py +++ b/Tribler/Test/Core/Video/test_vod.py @@ -1,15 +1,16 @@ +from __future__ import absolute_import + import os from tempfile import mkstemp -from M2Crypto import Rand -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import inlineCallbacks, Deferred +from twisted.internet.defer import Deferred, inlineCallbacks from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import VODFile from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.simpledefs import dlstatus_strings, UPLOAD, DOWNLOAD, DLMODE_VOD +from Tribler.Core.simpledefs import DLMODE_VOD, DOWNLOAD, UPLOAD, dlstatus_strings from Tribler.Test.test_as_server import TestAsServer +from Tribler.Test.tools import trial_timeout class TestVideoOnDemand(TestAsServer): @@ -37,7 +38,7 @@ def setUpPreSession(self): def create_torrent(self): [srchandle, sourcefn] = mkstemp() - self.content = Rand.rand_bytes(self.contentlen) + self.content = '0' * self.contentlen os.write(srchandle, self.content) os.close(srchandle) diff --git a/Tribler/Test/Core/base_test_channel.py b/Tribler/Test/Core/base_test_channel.py deleted file mode 100644 index b14122cdbf5..00000000000 --- a/Tribler/Test/Core/base_test_channel.py +++ /dev/null @@ -1,74 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.simpledefs import NTFY_CHANNELCAST -from Tribler.Core.simpledefs import NTFY_VOTECAST -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.test_as_server import TestAsServer -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.dispersy.dispersy import Dispersy -from Tribler.dispersy.endpoint import ManualEnpoint -from Tribler.dispersy.member import DummyMember - - -class BaseTestChannel(TestAsServer): - - @inlineCallbacks - def setUp(self): - """ - Setup some classes and files that are used by the tests in this module. - """ - yield super(BaseTestChannel, self).setUp() - - self.fake_session = MockObject() - self.fake_session.add_observer = lambda a, b, c: False - - self.fake_session_config = MockObject() - self.fake_session_config.get_state_dir = lambda: self.session_base_dir - self.fake_session.config = self.fake_session_config - - fake_notifier = MockObject() - fake_notifier.add_observer = lambda a, b, c, d: False - fake_notifier.notify = lambda a, b, c, d: False - self.fake_session.notifier = fake_notifier - - self.fake_channel_community = MockObject() - self.fake_channel_community.get_channel_id = lambda: 42 - self.fake_channel_community.cid = 'a' * 20 - self.fake_channel_community.get_channel_name = lambda: "my fancy channel" - - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) - self.votecast_db_handler = self.session.open_dbhandler(NTFY_VOTECAST) - - self.session.get_dispersy = lambda: True - self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) - - def setUpPreSession(self): - super(BaseTestChannel, self).setUpPreSession() - self.config.set_megacache_enabled(True) - - def insert_channel_in_db(self, dispersy_cid, peer_id, name, description): - return self.channel_db_handler.on_channel_from_dispersy(dispersy_cid, peer_id, name, description) - - def insert_torrents_into_channel(self, torrent_list): - self.channel_db_handler.on_torrents_from_dispersy(torrent_list) - - def create_fake_allchannel_community(self): - """ - This method creates a fake AllChannel community so we can check whether a request is made in the community - when doing stuff with a channel. - """ - self.session.lm.dispersy._database.open() - fake_member = DummyMember(self.session.lm.dispersy, 1, "a" * 20) - member = self.session.lm.dispersy.get_new_member(u"curve25519") - fake_community = AllChannelCommunity(self.session.lm.dispersy, fake_member, member) - self.session.lm.dispersy._communities = {"allchannel": fake_community} - return fake_community - - @inlineCallbacks - def tearDown(self): - self.session.lm.dispersy.cancel_all_pending_tasks() - # Ugly way to check if database is open in Dispersy - if self.session.lm.dispersy._database._cursor: - yield self.session.lm.dispersy._database.close() - self.session.lm.dispersy = None - yield super(BaseTestChannel, self).tearDown() diff --git a/Tribler/Test/Core/data/config_files/config1.conf b/Tribler/Test/Core/data/config_files/config1.conf index 3c45fdd1256..945b53101ae 100644 --- a/Tribler/Test/Core/data/config_files/config1.conf +++ b/Tribler/Test/Core/data/config_files/config1.conf @@ -32,10 +32,6 @@ exitnode_enabled = False enabled = True store_dir = /Users/tribler/.Tribler/collected_metadata -[mainline_dht] -enabled = True -mainline_dht_port = -1 - [torrent_checking] enabled = 1 diff --git a/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000002.mdblob.lz4 b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000002.mdblob.lz4 new file mode 100644 index 00000000000..3328257a00d Binary files /dev/null and b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000002.mdblob.lz4 differ diff --git a/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000006.mdblob.lz4 b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000006.mdblob.lz4 new file mode 100644 index 00000000000..609cd7635d3 Binary files /dev/null and b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000006.mdblob.lz4 differ diff --git a/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000007.mdblob.lz4 b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000007.mdblob.lz4 new file mode 100644 index 00000000000..1d968bf915d Binary files /dev/null and b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000007.mdblob.lz4 differ diff --git a/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000009.mdblob.lz4 b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000009.mdblob.lz4 new file mode 100644 index 00000000000..d45aab16898 Binary files /dev/null and b/Tribler/Test/Core/data/sample_channel/893e876d3d09f0bb87bf95036b3d5e26/000000000009.mdblob.lz4 differ diff --git a/Tribler/Test/Core/data/sample_channel/channel.mdblob b/Tribler/Test/Core/data/sample_channel/channel.mdblob index 5b7b3bc6361..8037b5e2a40 100644 Binary files a/Tribler/Test/Core/data/sample_channel/channel.mdblob and b/Tribler/Test/Core/data/sample_channel/channel.mdblob differ diff --git a/Tribler/Test/Core/data/sample_channel/channel.torrent b/Tribler/Test/Core/data/sample_channel/channel.torrent index dc18fdeee63..0e751ae3caf 100644 --- a/Tribler/Test/Core/data/sample_channel/channel.torrent +++ b/Tribler/Test/Core/data/sample_channel/channel.torrent @@ -1 +1 @@ -d13:creation datei1540204365e4:infod5:filesld6:lengthi561e4:pathl19:000000000003.mdblobeed6:lengthi265e4:pathl19:000000000001.mdblobeee4:name60:d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e4612:piece lengthi16384e6:pieces20:Ô~€á «(ÑXlj>*`ÝR¡ee \ No newline at end of file +d13:creation datei1548884024e4:infod5:filesld6:lengthi538e4:pathl23:000000000006.mdblob.lz4eed6:lengthi283e4:pathl23:000000000002.mdblob.lz4eed6:lengthi211e4:pathl23:000000000007.mdblob.lz4eee4:name32:893e876d3d09f0bb87bf95036b3d5e2612:piece lengthi16384e6:pieces20:Ç ™'·’ ÜkÙ·Í{ {Ïä75ee \ No newline at end of file diff --git a/Tribler/Test/Core/data/sample_channel/channel_upd.mdblob b/Tribler/Test/Core/data/sample_channel/channel_upd.mdblob index 42aa539de92..5b252817503 100644 Binary files a/Tribler/Test/Core/data/sample_channel/channel_upd.mdblob and b/Tribler/Test/Core/data/sample_channel/channel_upd.mdblob differ diff --git a/Tribler/Test/Core/data/sample_channel/channel_upd.torrent b/Tribler/Test/Core/data/sample_channel/channel_upd.torrent index 57ddbe0b2ea..b72fc4d5a25 100644 --- a/Tribler/Test/Core/data/sample_channel/channel_upd.torrent +++ b/Tribler/Test/Core/data/sample_channel/channel_upd.torrent @@ -1 +1 @@ -d13:creation datei1540204617e4:infod5:filesld6:lengthi561e4:pathl19:000000000003.mdblobeed6:lengthi300e4:pathl19:000000000004.mdblobeed6:lengthi265e4:pathl19:000000000001.mdblobeee4:name60:d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e4612:piece lengthi16384e6:pieces20:ðF•)NÞ#d°„ÚÓŠŽT¯ee \ No newline at end of file +d13:creation datei1548884024e4:infod5:filesld6:lengthi538e4:pathl23:000000000006.mdblob.lz4eed6:lengthi283e4:pathl23:000000000002.mdblob.lz4eed6:lengthi223e4:pathl23:000000000009.mdblob.lz4eed6:lengthi211e4:pathl23:000000000007.mdblob.lz4eee4:name32:893e876d3d09f0bb87bf95036b3d5e2612:piece lengthi16384e6:pieces20:E™¨·SzN²I"ˆùÅÙ Íee \ No newline at end of file diff --git a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000001.mdblob b/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000001.mdblob deleted file mode 100644 index 1312de5a5e8..00000000000 Binary files a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000001.mdblob and /dev/null differ diff --git a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000003.mdblob b/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000003.mdblob deleted file mode 100644 index 9bbc582f643..00000000000 Binary files a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000003.mdblob and /dev/null differ diff --git a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000004.mdblob b/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000004.mdblob deleted file mode 100644 index ee9a6a99b82..00000000000 Binary files a/Tribler/Test/Core/data/sample_channel/d24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46/000000000004.mdblob and /dev/null differ diff --git a/Tribler/Test/Core/data/sqlite_scripts/script1.sql b/Tribler/Test/Core/data/sqlite_scripts/script1.sql deleted file mode 100644 index b2e27b6a827..00000000000 --- a/Tribler/Test/Core/data/sqlite_scripts/script1.sql +++ /dev/null @@ -1,13 +0,0 @@ -BEGIN TRANSACTION create_table; - ----------------------------------------- - -CREATE TABLE MyPreference ( - torrent_id integer PRIMARY KEY NOT NULL, - destination_path text NOT NULL, - creation_time integer NOT NULL -); - -------------------------------------- - -COMMIT TRANSACTION create_table; diff --git a/Tribler/Test/Core/data/upgrade_databases/torrent_upgrade_64_dispersy.db b/Tribler/Test/Core/data/upgrade_databases/torrent_upgrade_64_dispersy.db deleted file mode 100644 index 8975912bc25..00000000000 Binary files a/Tribler/Test/Core/data/upgrade_databases/torrent_upgrade_64_dispersy.db and /dev/null differ diff --git a/Tribler/Test/Core/data/upgrade_databases/tribler_v12.sdb b/Tribler/Test/Core/data/upgrade_databases/tribler_v12.sdb deleted file mode 100644 index 44ab32df471..00000000000 Binary files a/Tribler/Test/Core/data/upgrade_databases/tribler_v12.sdb and /dev/null differ diff --git a/Tribler/Test/Core/data/upgrade_databases/tribler_v17.sdb b/Tribler/Test/Core/data/upgrade_databases/tribler_v17.sdb deleted file mode 100644 index 444501e0d55..00000000000 Binary files a/Tribler/Test/Core/data/upgrade_databases/tribler_v17.sdb and /dev/null differ diff --git a/Tribler/Test/Core/data/upgrade_databases/tribler_v29.sdb b/Tribler/Test/Core/data/upgrade_databases/tribler_v29.sdb new file mode 100644 index 00000000000..429caabd0ab Binary files /dev/null and b/Tribler/Test/Core/data/upgrade_databases/tribler_v29.sdb differ diff --git a/Tribler/Test/Core/test_launch_many_cores.py b/Tribler/Test/Core/test_launch_many_cores.py index 0d5c75e9f8a..c3e33a15ba9 100644 --- a/Tribler/Test/Core/test_launch_many_cores.py +++ b/Tribler/Test/Core/test_launch_many_cores.py @@ -1,21 +1,23 @@ +from __future__ import absolute_import + import os +from threading import RLock from nose.tools import raises -from Tribler.Test.tools import trial_timeout + from twisted.internet.defer import Deferred -from Tribler.Core import NoDispersyRLock from Tribler.Core.APIImplementation.LaunchManyCore import TriblerLaunchMany from Tribler.Core.Modules.payout_manager import PayoutManager from Tribler.Core.TorrentDef import TorrentDef from Tribler.Core.Utilities.configparser import CallbackConfigParser -from Tribler.Core.simpledefs import DLSTATUS_STOPPED_ON_ERROR, DLSTATUS_SEEDING, DLSTATUS_DOWNLOADING -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject -from Tribler.Test.common import TESTS_DATA_DIR +from Tribler.Core.simpledefs import DLSTATUS_DOWNLOADING, DLSTATUS_SEEDING, DLSTATUS_STOPPED_ON_ERROR +from Tribler.Test.Core.base_test import MockObject, TriblerCoreTest from Tribler.Test.test_as_server import TestAsServer -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.community.search.community import SearchCommunity -from Tribler.dispersy.discovery.community import DiscoveryCommunity +from Tribler.Test.tools import trial_timeout +from Tribler.community.gigachannel.community import GigaChannelCommunity +from Tribler.community.market.community import MarketCommunity +from Tribler.pyipv8.ipv8.attestation.trustchain.community import TrustChainCommunity class TestLaunchManyCore(TriblerCoreTest): @@ -27,7 +29,7 @@ class TestLaunchManyCore(TriblerCoreTest): def setUp(self): TriblerCoreTest.setUp(self) self.lm = TriblerLaunchMany() - self.lm.session_lock = NoDispersyRLock() + self.lm.session_lock = RLock() self.lm.session = MockObject() self.lm.session.config = MockObject() self.lm.session.config.get_max_upload_rate = lambda: 100 @@ -150,29 +152,6 @@ def mocked_resume_download(filename, setupDelay=3): self.lm.load_checkpoint() self.assertTrue(mocked_resume_download.called) - def test_resume_download(self): - with open(os.path.join(TESTS_DATA_DIR, "bak_single.torrent"), mode='rb') as torrent_file: - torrent_data = torrent_file.read() - - def mocked_load_download_pstate(_): - raise ValueError() - - def mocked_add(tdef, dscfg, pstate, **_): - self.assertTrue(tdef) - self.assertTrue(dscfg) - self.assertIsNone(pstate) - mocked_add.called = True - mocked_add.called = False - - self.lm.load_download_pstate = mocked_load_download_pstate - self.lm.torrent_store = MockObject() - self.lm.torrent_store.get = lambda _: torrent_data - self.lm.add = mocked_add - self.lm.mypref_db = MockObject() - self.lm.mypref_db.getMyPrefStatsInfohash = lambda _: TESTS_DATA_DIR - self.lm.resume_download('%s.state' % ('a' * 20)) - self.assertTrue(mocked_add.called) - class TestLaunchManyCoreFullSession(TestAsServer): """ @@ -183,26 +162,24 @@ def setUpPreSession(self): TestAsServer.setUpPreSession(self) # Enable all communities - config_sections = ['search_community', 'trustchain', 'allchannel_community', 'channel_community', - 'preview_channel_community', 'tunnel_community', 'dispersy', 'ipv8', 'dht'] + config_sections = ['trustchain', 'tunnel_community', 'ipv8', 'dht', 'chant', 'market_community'] for section in config_sections: self.config.config[section]['enabled'] = True - self.config.set_megacache_enabled(True) self.config.set_tunnel_community_socks5_listen_ports(self.get_socks5_ports()) - self.config.set_ipv8_bootstrap_override("127.0.0.1:12345") + self.config.set_ipv8_bootstrap_override("127.0.0.1:12345") # So we do not contact the real trackers - def get_community(self, community_cls): - for community in self.session.get_dispersy_instance().get_communities(): - if isinstance(community, community_cls): - return community + def get_community(self, overlay_cls): + for overlay in self.session.get_ipv8_instance().overlays: + if isinstance(overlay, overlay_cls): + return overlay def test_load_communities(self): """ - Testing whether all Dispersy/IPv8 communities can be succesfully loaded + Testing whether all IPv8 communities can be succesfully loaded """ - self.assertTrue(self.get_community(DiscoveryCommunity)) self.assertTrue(self.session.lm.initComplete) - self.assertTrue(self.get_community(SearchCommunity)) - self.assertTrue(self.get_community(AllChannelCommunity)) + self.assertTrue(self.get_community(GigaChannelCommunity)) + self.assertTrue(self.get_community(MarketCommunity)) + self.assertTrue(self.get_community(TrustChainCommunity)) diff --git a/Tribler/Test/Core/test_leveldb_store.py b/Tribler/Test/Core/test_leveldb_store.py deleted file mode 100644 index 5d966ae7bbe..00000000000 --- a/Tribler/Test/Core/test_leveldb_store.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -Tests for the LevelDB. - -Author(s): Elric Milon -""" -import os - -from nose.tools import raises -from shutil import rmtree -from tempfile import mkdtemp -from twisted.internet.task import Clock - -from Tribler.Core.leveldbstore import LevelDbStore, WRITEBACK_PERIOD, get_write_batch_leveldb -from Tribler.Test.Core.base_test import MockObject -from Tribler.Test.test_as_server import BaseTestCase - - -K = "foo" -V = "bar" - - -class ClockedAbstractLevelDBStore(LevelDbStore): - _reactor = Clock() - - -class ClockedLevelDBStore(ClockedAbstractLevelDBStore): - from leveldb import LevelDB - _leveldb = LevelDB - _writebatch = get_write_batch_leveldb - - -class AbstractTestLevelDBStore(BaseTestCase): - - skip = True - _storetype = None - - def __init__(self, *argv, **kwargs): - super(AbstractTestLevelDBStore, self).__init__(*argv, **kwargs) - - self.store_dir = None - self.store = None - - def setUp(self): - self.openStore(mkdtemp(prefix=__name__)) - - def tearDown(self): - self.closeStore() - - def closeStore(self): - self.store.close() - rmtree(self.store_dir) - self.store = None - - def openStore(self, store_dir): - self.store_dir = store_dir - self.store = self._storetype(self.store_dir) - - def test_storeIsPersistent(self): - self.store.put(K, V) - self.assertEqual(self.store.get(K), V) - store_dir = self.store._store_dir - self.store.close() - self.openStore(store_dir) - self.assertEqual(self.store.get(K), V) - - def test_canPutAndDelete(self): - self.store[K] = V - self.assertEqual(self.store[K], V) - del self.store[K] - self.assertEqual(None, self.store.get(K)) - with self.assertRaises(KeyError) as raises: - self.store[K] - - def test_PutGet(self): - self.store._db.Put(K, V) - self.assertEqual(V, self.store._db.Get(K)) - - def test_cacheIsFlushed(self): - self.store[K] = V - self.assertEqual(1, len(self.store._pending_torrents)) - self.store._reactor.advance(WRITEBACK_PERIOD) - self.assertEqual(0, len(self.store._pending_torrents)) - - def test_len(self): - self.assertEqual(0, len(self.store)) - self.store[K] = V - self.assertEqual(1, len(self.store), 1) - # test that even after writing the cached data, the lenght is still the same - self.store.flush() - self.assertEqual(1, len(self.store), 2) - - def test_contains(self): - self.assertFalse(K in self.store) - self.store[K] = V - self.assertTrue(K in self.store) - - @raises(StopIteration) - def test_iter_empty(self): - iteritems = self.store.iteritems() - self.assertTrue(iteritems.next()) - - def test_iter_one_element(self): - self.store[K] = V - iteritems = self.store.iteritems() - self.assertEqual(iteritems.next(), K) - - def test_iter(self): - self.store[K] = V - for key in iter(self.store): - self.assertTrue(key) - - -class TestLevelDBStore(AbstractTestLevelDBStore): - skip = False - _storetype = ClockedLevelDBStore - - def test_invalid_handle(self): - self.store.close() - - open(os.path.join(self.store_dir, 'test.txt'), 'a').close() - - # Make the leveldb files corrupt - for dir_file in os.listdir(self.store_dir): - with open(os.path.join(self.store_dir, dir_file), 'a') as file_handler: - file_handler.write('abcde') - - self.openStore(self.store_dir) - self.assertFalse(os.path.exists(os.path.join(self.store_dir, 'test.txt'))) - - def test_flush(self): - """ Tests if flush() does multiple retries incase of failure. """ - def mock_db_write(store, _): - store.write_retry += 1 - raise Exception("SomeLevelDBError") - - self.store._db = MockObject() - self.store._db.Write = lambda batch: mock_db_write(self.store, batch) - - self.store.write_retry = 0 - - # No store operation yet, no write should be called on flush - self.store.flush() - self.assertEqual(self.store.write_retry, 0) - - # Store something and check if it is in cache - self.store[K] = V - self.assertIsNotNone(self.store._pending_torrents) - # Now flush; Mock DB write through an exception so flush() should be tried 3 times - self.store.flush() - self.assertEqual(self.store.write_retry, 3, "Three retry to flush was expected incase of error") diff --git a/Tribler/Test/Core/test_leveldb_store_plyvel.py b/Tribler/Test/Core/test_leveldb_store_plyvel.py deleted file mode 100644 index 6da8df03fa7..00000000000 --- a/Tribler/Test/Core/test_leveldb_store_plyvel.py +++ /dev/null @@ -1,13 +0,0 @@ -from Tribler.Core.leveldbstore import get_write_batch_plyvel -from Tribler.Test.Core.test_leveldb_store import ClockedAbstractLevelDBStore, AbstractTestLevelDBStore - - -class ClockedPlyvelStore(ClockedAbstractLevelDBStore): - from Tribler.Core.plyveladapter import LevelDB - _leveldb = LevelDB - _writebatch = get_write_batch_plyvel - - -class TestPlyvelStore(AbstractTestLevelDBStore): - skip = False - _storetype = ClockedPlyvelStore diff --git a/Tribler/Test/Core/test_notifier.py b/Tribler/Test/Core/test_notifier.py index e21e67b289f..6b549008784 100644 --- a/Tribler/Test/Core/test_notifier.py +++ b/Tribler/Test/Core/test_notifier.py @@ -1,7 +1,9 @@ -from twisted.internet.defer import inlineCallbacks, Deferred +from __future__ import absolute_import -from Tribler.Core.CacheDB.Notifier import Notifier -from Tribler.Core.simpledefs import NTFY_TORRENTS, NTFY_STARTED, NTFY_FINISHED +from twisted.internet.defer import Deferred, inlineCallbacks + +from Tribler.Core.Notifier import Notifier +from Tribler.Core.simpledefs import NTFY_FINISHED, NTFY_STARTED, NTFY_TORRENTS from Tribler.Test.Core.base_test import TriblerCoreTest from Tribler.Test.tools import trial_timeout diff --git a/Tribler/Test/Core/test_permid.py b/Tribler/Test/Core/test_permid.py index 8630e9ce4c2..d697f3db173 100644 --- a/Tribler/Test/Core/test_permid.py +++ b/Tribler/Test/Core/test_permid.py @@ -1,11 +1,12 @@ +from __future__ import absolute_import + import os -from M2Crypto.EC import EC from twisted.internet.defer import inlineCallbacks from Tribler.Core import permid -from Tribler.pyipv8.ipv8.keyvault.private.libnaclkey import LibNaCLSK from Tribler.Test.Core.base_test import TriblerCoreTest +from Tribler.pyipv8.ipv8.keyvault.private.libnaclkey import LibNaCLSK class TriblerCoreTestPermid(TriblerCoreTest): @@ -14,26 +15,10 @@ class TriblerCoreTestPermid(TriblerCoreTest): def setUp(self): yield super(TriblerCoreTestPermid, self).setUp() # All the files are in self.session_base_dir, so they will automatically be cleaned on tearDown() - self.pub_key_path = os.path.join(self.session_base_dir, 'pub_key.pem') - self.key_pair_path = os.path.join(self.session_base_dir, 'pair.pem') self.pub_key_path_trustchain = os.path.join(self.session_base_dir, 'pub_key_multichain.pem') self.key_pair_path_trustchain = os.path.join(self.session_base_dir, 'pair_multichain.pem') - def test_save_load_keypair_pubkey(self): - permid.init() - key = permid.generate_keypair() - - permid.save_keypair(key, self.key_pair_path) - permid.save_pub_key(key, self.pub_key_path) - - self.assertTrue(os.path.isfile(self.pub_key_path)) - self.assertTrue(os.path.isfile(self.key_pair_path)) - - loaded_key = permid.read_keypair(self.key_pair_path) - self.assertIsInstance(loaded_key, EC) - def test_save_load_keypair_pubkey_trustchain(self): - permid.init() key = permid.generate_keypair_trustchain() permid.save_keypair_trustchain(key, self.key_pair_path_trustchain) diff --git a/Tribler/Test/Core/test_session.py b/Tribler/Test/Core/test_session.py index 3a4d8070714..fed565b0c70 100644 --- a/Tribler/Test/Core/test_session.py +++ b/Tribler/Test/Core/test_session.py @@ -1,97 +1,26 @@ -from binascii import hexlify, unhexlify +from __future__ import absolute_import + +from binascii import unhexlify from nose.tools import raises -from Tribler.Test.tools import trial_timeout -from twisted.internet.defer import Deferred, inlineCallbacks -from Tribler.Core.Config.tribler_config import TriblerConfig +from twisted.internet.defer import inlineCallbacks + from Tribler.Core.DownloadConfig import DownloadStartupConfig -from Tribler.Core.Session import Session, SOCKET_BLOCK_ERRORCODE +from Tribler.Core.Session import SOCKET_BLOCK_ERRORCODE from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.exceptions import OperationNotEnabledByConfigurationException, DuplicateTorrentFileError -from Tribler.Core.leveldbstore import LevelDbStore -from Tribler.Core.simpledefs import NTFY_CHANNELCAST, SIGNAL_CHANNEL, SIGNAL_ON_CREATED -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject +from Tribler.Core.exceptions import OperationNotEnabledByConfigurationException +from Tribler.Test.Core.base_test import MockObject from Tribler.Test.common import TORRENT_UBUNTU_FILE from Tribler.Test.test_as_server import TestAsServer - - -class TestSession(TriblerCoreTest): - - @raises(OperationNotEnabledByConfigurationException) - def test_torrent_store_not_enabled(self): - config = TriblerConfig() - config.set_state_dir(self.getStateDir()) - config.set_torrent_store_enabled(False) - session = Session(config) - session.delete_collected_torrent(None) - - def test_torrent_store_delete(self): - config = TriblerConfig() - config.set_state_dir(self.getStateDir()) - config.set_torrent_store_enabled(True) - session = Session(config) - # Manually set the torrent store as we don't want to start the session. - session.lm.torrent_store = LevelDbStore(session.config.get_torrent_store_dir()) - session.lm.torrent_store[hexlify("fakehash")] = "Something" - self.assertEqual("Something", session.lm.torrent_store[hexlify("fakehash")]) - session.delete_collected_torrent("fakehash") - - raised_key_error = False - # This structure is needed because if we add a @raises above the test, we cannot close the DB - # resulting in a dirty reactor. - try: - self.assertRaises(KeyError,session.lm.torrent_store[hexlify("fakehash")]) - except KeyError: - raised_key_error = True - finally: - session.lm.torrent_store.close() - - self.assertTrue(raised_key_error) - - def test_create_channel(self): - """ - Test the pass through function of Session.create_channel to the ChannelManager. - """ - - class LmMock(object): - class ChannelManager(object): - invoked_name = None - invoked_desc = None - invoked_mode = None - - def create_channel(self, name, description, mode=u"closed"): - self.invoked_name = name - self.invoked_desc = description - self.invoked_mode = mode - - channel_manager = ChannelManager() - - config = TriblerConfig() - config.set_state_dir(self.getStateDir()) - session = Session(config) - session.lm = LmMock() - session.lm.api_manager = None - - session.create_channel("name", "description", "open") - self.assertEqual(session.lm.channel_manager.invoked_name, "name") - self.assertEqual(session.lm.channel_manager.invoked_desc, "description") - self.assertEqual(session.lm.channel_manager.invoked_mode, "open") +from Tribler.Test.tools import trial_timeout class TestSessionAsServer(TestAsServer): - def setUpPreSession(self): - super(TestSessionAsServer, self).setUpPreSession() - self.config.set_megacache_enabled(True) - self.config.set_torrent_collecting_enabled(True) - self.config.set_channel_search_enabled(True) - self.config.set_dispersy_enabled(True) - @inlineCallbacks def setUp(self): yield super(TestSessionAsServer, self).setUp() - self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) self.called = None def mock_endpoints(self): @@ -145,48 +74,6 @@ def on_tribler_exception(_): self.session.unhandled_error_observer({'isError': True, 'log_failure': 'exceptions.RuntimeError: invalid info-hash'}) - @trial_timeout(10) - def test_add_torrent_def_to_channel(self): - """ - Test whether adding a torrent def to a channel works - """ - test_deferred = Deferred() - - torrent_def = TorrentDef.load(TORRENT_UBUNTU_FILE) - - def on_channel_created(subject, change_type, object_id, channel_data): - channel_id = self.channel_db_handler.getMyChannelId() - self.session.add_torrent_def_to_channel(channel_id, torrent_def, {"description": "iso"}, forward=False) - self.assertTrue(self.channel_db_handler.hasTorrent(channel_id, torrent_def.get_infohash())) - test_deferred.callback(None) - - self.session.add_observer(on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED]) - self.session.create_channel("name", "description", "open") - - return test_deferred - - @trial_timeout(10) - def test_add_torrent_def_to_channel_duplicate(self): - """ - Test whether adding a torrent def twice to a channel raises an exception - """ - test_deferred = Deferred() - - torrent_def = TorrentDef.load(TORRENT_UBUNTU_FILE) - - def on_channel_created(subject, change_type, object_id, channel_data): - channel_id = self.channel_db_handler.getMyChannelId() - try: - self.session.add_torrent_def_to_channel(channel_id, torrent_def, forward=False) - self.session.add_torrent_def_to_channel(channel_id, torrent_def, forward=False) - except DuplicateTorrentFileError: - test_deferred.callback(None) - - self.session.add_observer(on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED]) - self.session.create_channel("name", "description", "open") - - return test_deferred - def test_load_checkpoint(self): self.load_checkpoint_called = False @@ -205,69 +92,6 @@ def test_get_libtorrent_process_not_enabled(self): self.session.config.get_libtorrent_enabled = lambda: False self.session.get_libtorrent_process() - @raises(OperationNotEnabledByConfigurationException) - def test_open_dbhandler(self): - """ - Opening the database without the megacache enabled should raise an exception. - """ - self.session.config.get_megacache_enabled = lambda: False - self.session.open_dbhandler("x") - - def test_close_dbhandler(self): - handler = MockObject() - self.called = False - - def verify_close_called(): - self.called = True - handler.close = verify_close_called - Session.close_dbhandler(handler) - self.assertTrue(self.called) - - def test_download_torrentfile(self): - """ - When libtorrent is not enabled, an exception should be thrown when downloading a torrentfile. - """ - self.called = False - - def verify_download_torrentfile_call(*args, **kwargs): - self.called = True - self.session.lm.rtorrent_handler.download_torrent = verify_download_torrentfile_call - - self.session.download_torrentfile() - self.assertTrue(self.called) - - def test_download_torrentfile_from_peer(self): - """ - When libtorrent is not enabled, an exception should be thrown when downloading a torrentfile from a peer. - """ - self.called = False - - def verify_download_torrentfile_call(*args, **kwargs): - self.called = True - self.session.lm.rtorrent_handler.download_torrent = verify_download_torrentfile_call - - self.session.download_torrentfile_from_peer("a") - self.assertTrue(self.called) - - def test_download_torrentmessage_from_peer(self): - """ - When libtorrent is not enabled, an exception should be thrown when downloading a torrentfile from a peer. - """ - self.called = False - - def verify_download_torrentmessage_call(*args, **kwargs): - self.called = True - self.session.lm.rtorrent_handler.download_torrentmessage = verify_download_torrentmessage_call - - self.session.download_torrentmessage_from_peer("a", "b", "c") - self.assertTrue(self.called) - - def test_get_permid(self): - """ - Retrieving the string encoded permid should be successful. - """ - self.assertIsInstance(self.session.get_permid(), str) - def test_remove_download_by_id_empty(self): """ Remove downloads method when empty. @@ -295,14 +119,6 @@ def verify_remove_download_called(*args, **kwargs): self.session.remove_download_by_id(infohash) self.assertTrue(self.called) - @raises(OperationNotEnabledByConfigurationException) - def test_get_dispersy_instance(self): - """ - Test whether the get dispersy instance throws an exception if dispersy is not enabled. - """ - self.session.config.get_dispersy_enabled = lambda: False - self.session.get_dispersy_instance() - @raises(OperationNotEnabledByConfigurationException) def test_get_ipv8_instance(self): """ @@ -311,54 +127,6 @@ def test_get_ipv8_instance(self): self.session.config.set_ipv8_enabled(False) self.session.get_ipv8_instance() - @raises(OperationNotEnabledByConfigurationException) - def test_has_collected_torrent(self): - """ - Test whether the has_collected_torrent throws an exception if dispersy is not enabled. - """ - self.session.config.get_torrent_store_enabled = lambda: False - self.session.has_collected_torrent(None) - - @raises(OperationNotEnabledByConfigurationException) - def test_get_collected_torrent(self): - """ - Test whether the get_collected_torrent throws an exception if dispersy is not enabled. - """ - self.session.config.get_torrent_store_enabled = lambda: False - self.session.get_collected_torrent(None) - - @raises(OperationNotEnabledByConfigurationException) - def test_save_collected_torrent(self): - """ - Test whether the save_collected_torrent throws an exception if dispersy is not enabled. - """ - self.session.config.get_torrent_store_enabled = lambda: False - self.session.save_collected_torrent(None, None) - - @raises(OperationNotEnabledByConfigurationException) - def test_delete_collected_torrent(self): - """ - Test whether the delete_collected_torrent throws an exception if dispersy is not enabled. - """ - self.session.config.get_torrent_store_enabled = lambda: False - self.session.delete_collected_torrent(None) - - @raises(OperationNotEnabledByConfigurationException) - def test_search_remote_channels(self): - """ - Test whether the search_remote_channels throws an exception if dispersy is not enabled. - """ - self.session.config.get_channel_search_enabled = lambda: False - self.session.search_remote_channels(None) - - @raises(OperationNotEnabledByConfigurationException) - def test_get_thumbnail_data(self): - """ - Test whether the get_thumbnail_data throws an exception if dispersy is not enabled. - """ - self.session.lm.metadata_store = None - self.session.get_thumbnail_data(None) - class TestSessionWithLibTorrent(TestSessionAsServer): diff --git a/Tribler/Test/Core/test_sqlitecachedb.py b/Tribler/Test/Core/test_sqlitecachedb.py deleted file mode 100644 index 8beb3500916..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedb.py +++ /dev/null @@ -1,231 +0,0 @@ -import os -import shutil -import sys -from unittest import skipIf - -from apsw import SQLError, CantOpenError -from nose.tools import raises -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.sqlitecachedb import SQLiteCacheDB, DB_SCRIPT_ABSOLUTE_PATH, CorruptedDatabaseError -from Tribler.Test.Core.base_test import TriblerCoreTest, MockObject - - -class TestSqliteCacheDB(TriblerCoreTest): - - FILE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) - SQLITE_SCRIPTS_DIR = os.path.abspath(os.path.join(FILE_DIR, u"data/sqlite_scripts/")) - - @inlineCallbacks - def setUp(self): - yield super(TestSqliteCacheDB, self).setUp() - - db_path = u":memory:" - - self.sqlite_test = SQLiteCacheDB(db_path) - self.sqlite_test.set_show_sql(True) - - def tearDown(self): - self.sqlite_test.close() - self.sqlite_test = None - super(TestSqliteCacheDB, self).tearDown() - - def test_create_db(self): - sql = u"CREATE TABLE person(lastname, firstname);" - self.sqlite_test.execute(sql) - - self.assertIsInstance(self.sqlite_test.version, int) - - @raises(OSError) - def test_no_file_db_error(self): - file_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) - sqlite_test_2 = SQLiteCacheDB(file_dir) - - def test_open_db_new_file(self): - db_path = os.path.join(self.session_base_dir, "test_db.db") - sqlite_test_2 = SQLiteCacheDB(db_path) - self.assertTrue(os.path.isfile(db_path)) - - @raises(OSError) - def test_open_db_script_file_invalid_location(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), u'myfakelocation') - - @raises(OSError) - def test_open_db_script_file_directory(self): - file_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), file_dir) - - def test_open_db_script_file(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), DB_SCRIPT_ABSOLUTE_PATH) - - sqlite_test_2.write_version(4) - self.assertEqual(sqlite_test_2.version, 4) - - @raises(SQLError) - def test_failed_commit(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), DB_SCRIPT_ABSOLUTE_PATH) - sqlite_test_2.initial_begin() - sqlite_test_2.write_version(4) - - @skipIf(sys.platform == "win32", "chmod does not work on Windows") - @raises(IOError) - def test_no_permission_on_script(self): - db_path = os.path.join(self.session_base_dir, "test_db.db") - new_script_path = os.path.join(self.session_base_dir, "script.sql") - shutil.copyfile(DB_SCRIPT_ABSOLUTE_PATH, new_script_path) - os.chmod(new_script_path, 0) - sqlite_test_2 = SQLiteCacheDB(db_path, new_script_path) - - @raises(CorruptedDatabaseError) - def test_no_version_info_in_database(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), - os.path.join(self.SQLITE_SCRIPTS_DIR, "script1.sql")) - - @raises(CorruptedDatabaseError) - def test_integrity_check_failed(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), - os.path.join(self.SQLITE_SCRIPTS_DIR, "script1.sql")) - - def execute(sql): - if sql == u"PRAGMA quick_check": - db_response = MockObject() - db_response.next = lambda: ("Error: database disk image is malformed", ) - return db_response - - sqlite_test_2.execute = execute - - def test_integrity_check_triggered(self): - """ Tests if integrity check is triggered if temporary rollback files are present.""" - def do_integrity_check(_): - do_integrity_check.called = True - - db_path = os.path.join(self.session_base_dir, "test_db.db") - sqlite_test = SQLiteCacheDB(db_path) - sqlite_test.do_quick_integrity_check = do_integrity_check - do_integrity_check.called = False - self.assertFalse(do_integrity_check.called) - - db_path2 = os.path.join(self.session_base_dir, "test_db2.db") - wal_file = open(os.path.join(self.session_base_dir, "test_db2.db-shm"), 'w') - wal_file.close() - - do_integrity_check.called = False - SQLiteCacheDB.do_quick_integrity_check = do_integrity_check - sqlite_test_2 = SQLiteCacheDB(db_path2) - self.assertTrue(do_integrity_check.called) - - def test_clean_db(self): - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), DB_SCRIPT_ABSOLUTE_PATH) - sqlite_test_2.clean_db(vacuum=True, exiting=False) - sqlite_test_2.close() - - @skipIf(sys.platform == "win32", "chmod does not work on Windows") - @raises(CantOpenError) - def test_open_db_connection_no_permission(self): - os.chmod(os.path.join(self.session_base_dir), 0) - sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db")) - - def test_insert(self): - self.test_create_db() - - self.sqlite_test.insert('person', lastname='a', firstname='b') - self.assertEqual(self.sqlite_test.size('person'), 1) - - def test_fetchone(self): - self.test_insert() - one = self.sqlite_test.fetchone(u"SELECT * FROM person") - self.assertEqual(one, ('a', 'b')) - - one = self.sqlite_test.fetchone(u"SELECT lastname FROM person WHERE firstname == 'b'") - self.assertEqual(one, 'a') - - one = self.sqlite_test.fetchone(u"SELECT lastname FROM person WHERE firstname == 'c'") - self.assertIsNone(one) - - def test_insertmany(self): - self.test_create_db() - - values = [] - for i in range(100): - value = (str(i), str(i ** 2)) - values.append(value) - self.sqlite_test.insertMany('person', values) - self.assertEqual(self.sqlite_test.size('person'), 100) - - def test_fetchall(self): - self.test_insertmany() - - all = self.sqlite_test.fetchall('select * from person') - self.assertEqual(len(all), 100) - - all = self.sqlite_test.fetchall("select * from person where lastname=='101'") - self.assertEqual(all, []) - - def test_insertorder(self): - self.test_insertmany() - - self.sqlite_test.insert('person', lastname='1', firstname='abc') - one = self.sqlite_test.fetchone("select firstname from person where lastname == '1'") - self.assertTrue(one == '1' or one == 'abc') - - all = self.sqlite_test.fetchall("select firstname from person where lastname == '1'") - self.assertEqual(len(all), 2) - - def test_update(self): - self.test_insertmany() - - self.sqlite_test.update('person', "lastname == '2'", firstname='56') - one = self.sqlite_test.fetchone("select firstname from person where lastname == '2'") - self.assertEqual(one, '56') - - self.sqlite_test.update('person', "lastname == '3'", firstname=65) - one = self.sqlite_test.fetchone("select firstname from person where lastname == '3'") - self.assertEqual(one, 65) - - self.sqlite_test.update('person', "lastname == '4'", firstname=654, lastname=44) - one = self.sqlite_test.fetchone("select firstname from person where lastname == 44") - self.assertEqual(one, 654) - - def test_delete_single_element(self): - """ - This test tests whether deleting using a single element as value works. - """ - self.test_insert() - self.sqlite_test.insert('person', lastname='x', firstname='z') - one = self.sqlite_test.fetchone(u"SELECT * FROM person") - self.assertEqual(one, ('a', 'b')) - self.sqlite_test.delete("person", lastname="a") - one = self.sqlite_test.fetchone(u"SELECT * FROM person") - self.assertEqual(one, ('x', 'z')) - - def test_delete_tuple(self): - """ - This test tests whether deleting using a tuple as value works. - """ - self.test_insert() - self.sqlite_test.insert('person', lastname='x', firstname='z') - one = self.sqlite_test.fetchone(u"SELECT * FROM person") - self.assertEqual(one, ('a', 'b')) - self.sqlite_test.delete("person", lastname=("LIKE", "a")) - one = self.sqlite_test.fetchone(u"SELECT * FROM person") - self.assertEqual(one, ('x', 'z')) - - def test_commit_now_error_non_exit(self): - """ - Test if commit_now raises an error when we are not exiting. - """ - self.test_insert() - self.sqlite_test.insert('person', lastname='x', firstname='z') - self.sqlite_test.execute(u"COMMIT;") - self.assertRaises(SQLError, self.sqlite_test.commit_now) - - def test_commit_now_error_on_exit(self): - """ - Test if commit_now does not raise an error when we are exiting. - - See also test_commit_now_error_non_exit. - """ - self.test_insert() - self.sqlite_test.insert('person', lastname='x', firstname='z') - self.sqlite_test.execute(u"COMMIT;") - self.assertIsNone(self.sqlite_test.commit_now(exiting=True)) diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler.py b/Tribler/Test/Core/test_sqlitecachedbhandler.py deleted file mode 100644 index 4fb77fad68a..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import tarfile - -from configobj import ConfigObj -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import (BasicDBHandler, LimitedOrderedDict) -from Tribler.Core.CacheDB.sqlitecachedb import SQLiteCacheDB -from Tribler.Core.Config.tribler_config import TriblerConfig, CONFIG_SPEC_PATH -from Tribler.Core.Session import Session -from Tribler.Test.Core.base_test import TriblerCoreTest -from Tribler.Test.common import TESTS_DATA_DIR - - -BUSYTIMEOUT = 5000 - - -class TestLimitedOrderedDict(TriblerCoreTest): - - def test_limited_ordered_dict(self): - od = LimitedOrderedDict(3) - od['foo'] = 'bar' - od['bar'] = 'foo' - od['foobar'] = 'foobar' - self.assertEqual(len(od), 3) - od['another'] = 'another' - self.assertEqual(len(od), 3) - - -class AbstractDB(TriblerCoreTest): - - def setUpPreSession(self): - self.config = TriblerConfig(ConfigObj(configspec=CONFIG_SPEC_PATH)) - self.config.set_state_dir(self.getStateDir()) - self.config.set_torrent_checking_enabled(False) - self.config.set_megacache_enabled(False) - self.config.set_dispersy_enabled(False) - self.config.set_mainline_dht_enabled(False) - self.config.set_torrent_collecting_enabled(False) - self.config.set_libtorrent_enabled(False) - self.config.set_video_server_enabled(False) - self.config.set_torrent_store_enabled(False) - - @inlineCallbacks - def setUp(self): - yield super(AbstractDB, self).setUp() - - self.setUpPreSession() - self.session = Session(self.config) - - tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') - tar.extractall(self.session_base_dir) - - db_path = os.path.join(self.session_base_dir, 'bak_new_tribler.sdb') - - self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) - self.session.sqlite_db = self.sqlitedb - - @inlineCallbacks - def tearDown(self): - self.sqlitedb.close() - self.sqlitedb = None - self.session = None - - yield super(AbstractDB, self).tearDown() - - -class TestSqliteBasicDBHandler(AbstractDB): - - @inlineCallbacks - def setUp(self): - yield super(TestSqliteBasicDBHandler, self).setUp() - self.db = BasicDBHandler(self.session, u"Peer") - - def test_size(self): - size = self.db.size() # there are 3995 peers in the table, however the upgrade scripts remove 8 superpeers - assert size == 3987, size diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler_channels.py b/Tribler/Test/Core/test_sqlitecachedbhandler_channels.py deleted file mode 100644 index 862bc2cd64b..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler_channels.py +++ /dev/null @@ -1,126 +0,0 @@ -from binascii import unhexlify - -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import ChannelCastDBHandler, TorrentDBHandler, VoteCastDBHandler -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB - - -class TestChannelDBHandler(AbstractDB): - - @inlineCallbacks - def setUp(self): - yield super(TestChannelDBHandler, self).setUp() - - self.cdb = ChannelCastDBHandler(self.session) - self.tdb = TorrentDBHandler(self.session) - self.vdb = VoteCastDBHandler(self.session) - self.cdb.votecast_db = self.vdb - self.cdb.torrent_db = self.tdb - - def test_get_metadata_torrents(self): - self.assertEqual(len(self.cdb.get_metadata_torrents()), 2) - self.assertEqual(len(self.cdb.get_metadata_torrents(is_collected=False)), 1) - - def test_get_torrent_metadata(self): - result = self.cdb.get_torrent_metadata(1) - self.assertEqual(result, {"thumb_hash": unhexlify("1234")}) - self.assertIsNone(self.cdb.get_torrent_metadata(200)) - - def test_get_dispersy_cid_from_channel_id(self): - self.assertEqual(self.cdb.getDispersyCIDFromChannelId(1), "1") - self.assertEqual(self.cdb.getDispersyCIDFromChannelId(3), "3") - - def test_get_channel_id_from_dispersy_cid(self): - self.assertEqual(self.cdb.getChannelIdFromDispersyCID(1), 1) - self.assertEqual(self.cdb.getChannelIdFromDispersyCID(3), 3) - - def test_get_count_max_from_channel_id(self): - self.assertEqual(self.cdb.getCountMaxFromChannelId(1), (2, 1457809687)) - self.assertEqual(self.cdb.getCountMaxFromChannelId(2), (1, 1457809861)) - - def test_search_channel(self): - self.assertEqual(len(self.cdb.searchChannels("another")), 1) - self.assertEqual(len(self.cdb.searchChannels("fancy")), 2) - - def test_get_channel(self): - channel = self.cdb.getChannel(1) - self.assertEqual(channel, (1, '1', u'Test Channel 1', u'Test', 3, 7, 5, 2, 1457795713, False)) - self.assertIsNone(self.cdb.getChannel(1234)) - - def test_get_channels(self): - channels = self.cdb.getChannels([1, 2, 3]) - self.assertEqual(len(channels), 3) - - def test_get_channels_by_cid(self): - self.assertEqual(len(self.cdb.getChannelsByCID(["3"])), 0) - - def test_get_all_channels(self): - self.assertEqual(len(self.cdb.getAllChannels()), 8) - - def test_get_new_channels(self): - self.assertEqual(len(self.cdb.getNewChannels()), 1) - - def test_get_latest_updated(self): - res = self.cdb.getLatestUpdated() - self.assertEqual(res[0][0], 6) - self.assertEqual(res[1][0], 7) - self.assertEqual(res[2][0], 5) - - def test_get_most_popular_channels(self): - res = self.cdb.getMostPopularChannels() - self.assertEqual(res[0][0], 6) - self.assertEqual(res[1][0], 7) - self.assertEqual(res[2][0], 8) - - def test_get_my_subscribed_channels(self): - res = self.cdb.getMySubscribedChannels(include_dispersy=True) - self.assertEqual(len(res), 1) - res = self.cdb.getMySubscribedChannels() - self.assertEqual(len(res), 0) - - def test_get_channels_no_votecast(self): - self.cdb.votecast_db = None - self.assertFalse(self.cdb._getChannels("SELECT id FROM channels")) - - def test_get_channel_empty_name(self): - update_channel = "INSERT INTO _Channels (dispersy_cid, peer_id, name, description) VALUES(?, ?, ?, ?)" - self.cdb._db.execute_write(update_channel, ('', '', '', 'unique_desc_123')) - - sql = "Select id, name, description, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam " + \ - "FROM Channels WHERE description = 'unique_desc_123'" - self.assertEqual(self.cdb._getChannels(sql)[0][2], 'Unnamed channel') - - def test_get_my_channel_id(self): - self.cdb._channel_id = 42 - self.assertEqual(self.cdb.getMyChannelId(), 42) - self.cdb._channel_id = None - self.assertEqual(self.cdb.getMyChannelId(), 1) - - def test_get_torrent_markings(self): - res = self.cdb.getTorrentMarkings(3) - self.assertEqual(res, [[u'test', 2, True], [u'another', 1, True]]) - res = self.cdb.getTorrentMarkings(1) - self.assertEqual(res, [[u'test', 1, True]]) - - def test_on_remove_playlist_torrent(self): - self.assertEqual(len(self.cdb.getTorrentsFromPlaylist(1, ['Torrent.torrent_id'])), 1) - self.cdb.on_remove_playlist_torrent(1, 1, str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg='), False) - self.assertEqual(len(self.cdb.getTorrentsFromPlaylist(1, ['Torrent.torrent_id'])), 0) - - def test_on_remove_torrent_from_dispersy(self): - self.assertEqual(self.cdb.getTorrentFromChannelTorrentId(1, ['ChannelTorrents.dispersy_id']), 3) - self.cdb.on_remove_torrent_from_dispersy(1, 3, False) - self.assertIsNone(self.cdb.getTorrentFromChannelTorrentId(1, ['ChannelTorrents.dispersy_id'])) - - def test_search_local_channels(self): - """ - Testing whether the right results are returned when searching in the local database for channels - """ - results = self.cdb.search_in_local_channels_db("fancy") - self.assertEqual(len(results), 2) - self.assertNotEqual(results[0][-1], 0.0) # Relevance score of result should not be zero - - results = self.cdb.search_in_local_channels_db("fdajlkerhui") - self.assertEqual(len(results), 0) diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler_peers.py b/Tribler/Test/Core/test_sqlitecachedbhandler_peers.py deleted file mode 100644 index e40ecbb628d..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler_peers.py +++ /dev/null @@ -1,96 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import PeerDBHandler -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB - - -FAKE_PERMID_X = 'fake_permid_x' + '0R0\x10\x00\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04' - - -class TestSqlitePeerDBHandler(AbstractDB): - - @inlineCallbacks - def setUp(self): - yield super(TestSqlitePeerDBHandler, self).setUp() - - self.p1 = str2bin( - 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAAA6SYI4NHxwQ8P7P8QXgWAP+v8SaMVzF5+fSUHdAMrs6NvL5Epe1nCNSdlBHIjNjEiC5iiwSFZhRLsr') - self.p2 = str2bin( - 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAABo69alKy95H7RHzvDCsolAurKyrVvtDdT9/DzNAGvky6YejcK4GWQXBkIoQGQgxVEgIn8dwaR9B+3U') - - self.pdb = PeerDBHandler(self.session) - - self.assertFalse(self.pdb.hasPeer(FAKE_PERMID_X)) - - @inlineCallbacks - def tearDown(self): - self.pdb.close() - self.pdb = None - yield super(TestSqlitePeerDBHandler, self).tearDown() - - def test_getList(self): - peer1 = self.pdb.getPeer(self.p1) - peer2 = self.pdb.getPeer(self.p2) - self.assertIsInstance(peer1, dict) - self.assertIsInstance(peer2, dict) - self.assertEqual(peer1[u'peer_id'], 1) - self.assertEqual(peer2[u'peer_id'], 2) - - def test_addPeer(self): - peer_x = {'permid': FAKE_PERMID_X, 'name': 'fake peer x'} - oldsize = self.pdb.size() - self.pdb.addPeer(FAKE_PERMID_X, peer_x) - self.assertEqual(self.pdb.size(), oldsize + 1) - - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertEqual(p['name'], 'fake peer x') - - self.assertEqual(self.pdb.getPeer(FAKE_PERMID_X, 'name'), 'fake peer x') - - self.pdb.deletePeer(FAKE_PERMID_X) - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertIsNone(p) - self.assertEqual(self.pdb.size(), oldsize) - - self.pdb.addPeer(FAKE_PERMID_X, peer_x) - self.pdb.addPeer(FAKE_PERMID_X, {'permid': FAKE_PERMID_X, 'name': 'faka peer x'}) - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertEqual(p['name'], 'faka peer x') - - def test_aa_hasPeer(self): - self.assertTrue(self.pdb.hasPeer(self.p1)) - self.assertTrue(self.pdb.hasPeer(self.p1, check_db=True)) - self.assertTrue(self.pdb.hasPeer(self.p2)) - self.assertFalse(self.pdb.hasPeer(FAKE_PERMID_X)) - - def test_deletePeer(self): - peer_x = {'permid': FAKE_PERMID_X, 'name': 'fake peer x'} - oldsize = self.pdb.size() - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertIsNone(p) - - self.pdb.addPeer(FAKE_PERMID_X, peer_x) - self.assertEqual(self.pdb.size(), oldsize + 1) - self.assertTrue(self.pdb.hasPeer(FAKE_PERMID_X)) - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertIsNotNone(p) - - self.pdb.deletePeer(FAKE_PERMID_X) - self.assertFalse(self.pdb.hasPeer(FAKE_PERMID_X)) - self.assertEqual(self.pdb.size(), oldsize) - - p = self.pdb.getPeer(FAKE_PERMID_X) - self.assertIsNone(p) - - self.assertFalse(self.pdb.deletePeer(FAKE_PERMID_X)) - - def test_add_or_get_peer(self): - self.assertIsInstance(self.pdb.addOrGetPeerID(FAKE_PERMID_X), int) - self.assertIsInstance(self.pdb.addOrGetPeerID(FAKE_PERMID_X), int) - - def test_get_peer_by_id(self): - self.assertEqual(self.pdb.getPeerById(1, ['name']), 'Peer 1') - p = self.pdb.getPeerById(1) - self.assertEqual(p['name'], 'Peer 1') - self.assertFalse(self.pdb.getPeerById(1234567)) diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler_preferences.py b/Tribler/Test/Core/test_sqlitecachedbhandler_preferences.py deleted file mode 100644 index 6482401351f..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler_preferences.py +++ /dev/null @@ -1,101 +0,0 @@ -from __future__ import absolute_import - -from six import string_types -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler, MyPreferenceDBHandler -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB - - -class TestMyPreferenceDBHandler(AbstractDB): - - @inlineCallbacks - def setUp(self): - yield super(TestMyPreferenceDBHandler, self).setUp() - - self.tdb = TorrentDBHandler(self.session) - self.mdb = MyPreferenceDBHandler(self.session) - self.mdb._torrent_db = self.tdb - - def tearDown(self): - self.mdb.close() - self.mdb = None - self.tdb.close() - self.tdb = None - - super(TestMyPreferenceDBHandler, self).tearDown() - - def test_getPrefList(self): - pl = self.mdb.getMyPrefListInfohash() - self.assertEqual(len(pl), 12) - - def test_addMyPreference_deletePreference(self): - p = self.mdb.getOne(('torrent_id', 'destination_path', 'creation_time'), torrent_id=126) - torrent_id = p[0] - infohash = self.tdb.getInfohash(torrent_id) - destpath = p[1] - creation_time = p[2] - self.mdb.deletePreference(torrent_id) - pl = self.mdb.getMyPrefListInfohash() - self.assertEqual(len(pl), 12) - self.assertIn(infohash, pl) - - data = {'destination_path': destpath} - self.mdb.addMyPreference(torrent_id, data) - p2 = self.mdb.getOne(('torrent_id', 'destination_path', 'creation_time'), torrent_id=126) - self.assertTrue(p2[0] == p[0]) - self.assertTrue(p2[1] == p[1]) - - self.mdb.deletePreference(torrent_id) - pl = self.mdb.getMyPrefListInfohash(returnDeleted=False) - self.assertEqual(len(pl), 11) - self.assertNotIn(infohash, pl) - - data = {'destination_path': destpath, 'creation_time': creation_time} - self.mdb.addMyPreference(torrent_id, data) - p3 = self.mdb.getOne(('torrent_id', 'destination_path', 'creation_time'), torrent_id=126) - self.assertEqual(p3, p) - - def test_getMyPrefListInfohash(self): - preflist = self.mdb.getMyPrefListInfohash() - for p in preflist: - self.assertTrue(not p or len(p) == 20) - self.assertEqual(len(preflist), 12) - - def test_get_my_pref_stats(self): - res = self.mdb.getMyPrefStats() - self.assertEqual(len(res), 12) - for k in res: - data = res[k] - self.assertIsInstance(data, string_types, "data is not destination_path: %s" % type(data)) - - res = self.mdb.getMyPrefStats(torrent_id=126) - self.assertEqual(len(res), 1) - - def test_my_pref_stats_infohash(self): - infohash = str2bin('AB8cTG7ZuPsyblbRE7CyxsrKUCg=') - self.assertIsNone(self.mdb.getMyPrefStatsInfohash(infohash)) - infohash = str2bin('ByJho7yj9mWY1ORWgCZykLbU1Xc=') - self.assertTrue(self.mdb.getMyPrefStatsInfohash(infohash)) - - def test_get_my_pref_list_infohash_limit(self): - self.assertEqual(len(self.mdb.getMyPrefListInfohash(limit=10)), 10) - - def test_add_my_preference(self): - self.assertTrue(self.mdb.addMyPreference(127, {'destination_path': 'C:/mytorrent'})) - self.assertTrue(self.mdb.addMyPreference(12345678, {'destination_path': 'C:/mytorrent'})) - self.assertFalse(self.mdb.addMyPreference(12345678, {'destination_path': 'C:/mytorrent'})) - - def test_delete_my_preference(self): - self.mdb.deletePreference(126) - res = self.mdb.getMyPrefStats(126) - self.assertFalse(res[126]) - self.mdb.deletePreference(12348934) - - def test_update_dest_dir(self): - self.mdb.updateDestDir(126, 'C:/mydest') - res = self.mdb.getMyPrefStats(126) - self.assertEqual(res[126], 'C:/mydest') - self.mdb.updateDestDir(126, {}) - self.assertEqual(res[126], 'C:/mydest') diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler_torrents.py b/Tribler/Test/Core/test_sqlitecachedbhandler_torrents.py deleted file mode 100644 index 149a920b04a..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler_torrents.py +++ /dev/null @@ -1,306 +0,0 @@ -import os -import struct -from binascii import unhexlify -from shutil import copy as copyfile - -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler, MyPreferenceDBHandler, ChannelCastDBHandler -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Core.Category.Category import Category -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.Core.leveldbstore import LevelDbStore -from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB -from Tribler.Test.common import TESTS_DATA_DIR - -S_TORRENT_PATH_BACKUP = os.path.join(TESTS_DATA_DIR, 'bak_single.torrent') -M_TORRENT_PATH_BACKUP = os.path.join(TESTS_DATA_DIR, 'bak_multiple.torrent') - - -class TestTorrentFullSessionDBHandler(AbstractDB): - - def setUpPreSession(self): - super(TestTorrentFullSessionDBHandler, self).setUpPreSession() - self.config.set_megacache_enabled(True) - - @inlineCallbacks - def setUp(self): - yield super(TestTorrentFullSessionDBHandler, self).setUp() - self.tdb = TorrentDBHandler(self.session) - - def test_initialize(self): - self.tdb.initialize() - self.assertIsNone(self.tdb.mypref_db) - self.assertIsNone(self.tdb.votecast_db) - self.assertIsNone(self.tdb.channelcast_db) - - -class TestTorrentDBHandler(AbstractDB): - - def addTorrent(self): - old_size = self.tdb.size() - old_tracker_size = self.tdb._db.size('TrackerInfo') - - s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09') - m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') - - single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent') - multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent') - - copyfile(S_TORRENT_PATH_BACKUP, single_torrent_file_path) - copyfile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path) - - single_tdef = TorrentDef.load(single_torrent_file_path) - self.assertEqual(s_infohash, single_tdef.get_infohash()) - multiple_tdef = TorrentDef.load(multiple_torrent_file_path) - self.assertEqual(m_infohash, multiple_tdef.get_infohash()) - - self.tdb.addExternalTorrent(single_tdef) - self.tdb.addExternalTorrent(multiple_tdef) - - single_torrent_id = self.tdb.getTorrentID(s_infohash) - multiple_torrent_id = self.tdb.getTorrentID(m_infohash) - - self.assertEqual(self.tdb.getInfohash(single_torrent_id), s_infohash) - - single_name = 'Tribler_4.1.7_src.zip' - multiple_name = 'Tribler_4.1.7_src' - - self.assertEqual(self.tdb.size(), old_size + 2) - new_tracker_table_size = self.tdb._db.size('TrackerInfo') - self.assertLess(old_tracker_size, new_tracker_table_size) - - sname = self.tdb.getOne('name', torrent_id=single_torrent_id) - self.assertEqual(sname, single_name) - mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id) - self.assertEqual(mname, multiple_name) - - s_size = self.tdb.getOne('length', torrent_id=single_torrent_id) - self.assertEqual(s_size, 1583233) - m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id) - self.assertEqual(m_size, 5358560) - - cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id) - self.assertEqual(cat, u'xxx') - - s_status = self.tdb.getOne('status', torrent_id=single_torrent_id) - self.assertEqual(s_status, u'unknown') - - m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id) - comments = 'www.tribler.org' - self.assertGreater(m_comment.find(comments), -1) - comments = 'something not inside' - self.assertEqual(m_comment.find(comments), -1) - - m_trackers = self.tdb.getTrackerListByInfohash(m_infohash) - self.assertEqual(len(m_trackers), 8) - self.assertIn('http://tpb.tracker.thepiratebay.org/announce', m_trackers) - - s_torrent = self.tdb.getTorrent(s_infohash) - m_torrent = self.tdb.getTorrent(m_infohash) - self.assertEqual(s_torrent['name'], 'Tribler_4.1.7_src.zip') - self.assertEqual(m_torrent['name'], 'Tribler_4.1.7_src') - self.assertEqual(m_torrent['last_tracker_check'], 0) - - def updateTorrent(self): - m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') - self.tdb.updateTorrent(m_infohash, relevance=3.1415926, category=u'Videoclips', - status=u'good', seeder=123, leecher=321, - last_tracker_check=1234567, - other_key1='abcd', other_key2=123) - multiple_torrent_id = self.tdb.getTorrentID(m_infohash) - category = self.tdb.getOne('category', torrent_id=multiple_torrent_id) - self.assertEqual(category, u'Videoclips') - status = self.tdb.getOne('status', torrent_id=multiple_torrent_id) - self.assertEqual(status, u'good') - seeder = self.tdb.getOne('num_seeders', torrent_id=multiple_torrent_id) - self.assertEqual(seeder, 123) - leecher = self.tdb.getOne('num_leechers', torrent_id=multiple_torrent_id) - self.assertEqual(leecher, 321) - last_tracker_check = self.tdb.getOne('last_tracker_check', torrent_id=multiple_torrent_id) - self.assertEqual(last_tracker_check, 1234567) - - def setUpPreSession(self): - super(TestTorrentDBHandler, self).setUpPreSession() - self.config.set_megacache_enabled(True) - self.config.set_torrent_store_enabled(True) - - @inlineCallbacks - def setUp(self): - yield super(TestTorrentDBHandler, self).setUp() - - from Tribler.Core.APIImplementation.LaunchManyCore import TriblerLaunchMany - from Tribler.Core.Modules.tracker_manager import TrackerManager - self.session.lm = TriblerLaunchMany() - self.session.lm.tracker_manager = TrackerManager(self.session) - self.tdb = TorrentDBHandler(self.session) - self.tdb.torrent_dir = TESTS_DATA_DIR - self.tdb.category = Category() - self.tdb.mypref_db = MyPreferenceDBHandler(self.session) - - @inlineCallbacks - def tearDown(self): - self.tdb.mypref_db.close() - self.tdb.mypref_db = None - self.tdb.close() - self.tdb = None - - yield super(TestTorrentDBHandler, self).tearDown() - - def test_hasTorrent(self): - infohash_str = 'AA8cTG7ZuPsyblbRE7CyxsrKUCg=' - infohash = str2bin(infohash_str) - self.assertTrue(self.tdb.hasTorrent(infohash)) - self.assertTrue(self.tdb.hasTorrent(infohash)) # cache will trigger - fake_infohash = 'fake_infohash_100000' - self.assertFalse(self.tdb.hasTorrent(fake_infohash)) - - def test_get_infohash(self): - self.assertTrue(self.tdb.getInfohash(1)) - self.assertFalse(self.tdb.getInfohash(1234567)) - - def test_add_update_torrent(self): - self.addTorrent() - self.updateTorrent() - - def test_update_torrent_from_metainfo(self): - # Add torrent first - infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') - # Only infohash is added to the database - self.tdb.addOrGetTorrentID(infohash) - - # Then update the torrent with metainfo - metainfo = {'info': {'files': [{'path': ['Something.something.pdf'], 'length': 123456789}, - {'path': ['Another-thing.jpg'], 'length': 100000000}], - 'piece length': 2097152, - 'name': '\xc3Something awesome (2015)', - 'pieces': ''}, - 'seeders': 0, 'initial peers': [], - 'leechers': 36, 'download_exists': False, 'nodes': []} - self.tdb.update_torrent_with_metainfo(infohash, metainfo) - - # Check updates are correct - torrent_id = self.tdb.getTorrentID(infohash) - name = self.tdb.getOne('name', torrent_id=torrent_id) - self.assertEqual(name, u'\xc3Something awesome (2015)') - num_files = self.tdb.getOne('num_files', torrent_id=torrent_id) - self.assertEqual(num_files, 2) - length = self.tdb.getOne('length', torrent_id=torrent_id) - self.assertEqual(length, 223456789) - - def test_add_external_torrent_no_def_existing(self): - infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=') - self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [], [], 1234) - self.assertTrue(self.tdb.hasTorrent(infohash)) - - def test_add_external_torrent_no_def_no_files(self): - infohash = unhexlify('48865489ac16e2f34ea0cd3043cfd970cc24ec09') - self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [], [], 1234) - self.assertFalse(self.tdb.hasTorrent(infohash)) - - def test_add_external_torrent_no_def_one_file(self): - infohash = unhexlify('49865489ac16e2f34ea0cd3043cfd970cc24ec09') - self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", 42)], - ['http://localhost/announce'], 1234) - self.assertTrue(self.tdb.getTorrentID(infohash)) - - def test_add_external_torrent_no_def_more_files(self): - infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09') - self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", 42), ("file2", 43)], - [], 1234, extra_info={"seeder": 2, "leecher": 3}) - self.assertTrue(self.tdb.getTorrentID(infohash)) - - def test_add_external_torrent_no_def_invalid(self): - infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09') - self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", {}), ("file2", 43)], - [], 1234) - self.assertFalse(self.tdb.getTorrentID(infohash)) - - def test_add_get_torrent_id(self): - infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=') - self.assertEqual(self.tdb.addOrGetTorrentID(infohash), 1) - - new_infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09') - self.assertEqual(self.tdb.addOrGetTorrentID(new_infohash), 4859) - - def test_add_get_torrent_ids_return(self): - infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=') - new_infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09') - tids, inserted = self.tdb.addOrGetTorrentIDSReturn([infohash, new_infohash]) - self.assertEqual(tids, [1, 4859]) - self.assertEqual(len(inserted), 1) - - def test_index_torrent_existing(self): - self.tdb._indexTorrent(1, "test", []) - - def test_getCollectedTorrentHashes(self): - res = self.tdb.getNumberCollectedTorrents() - self.assertEqual(res, 4847) - - def test_freeSpace(self): - # Manually set the torrent store because register is not called. - self.session.lm.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) - old_res = self.tdb.getNumberCollectedTorrents() - self.tdb.freeSpace(20) - res = self.tdb.getNumberCollectedTorrents() - self.session.lm.torrent_store.close() - self.assertEqual(res, old_res-20) - - def test_get_search_suggestions(self): - self.assertEqual(self.tdb.getSearchSuggestion(["content", "cont"]), ["content 1"]) - - def test_get_autocomplete_terms(self): - self.assertEqual(len(self.tdb.getAutoCompleteTerms("content", 100)), 0) - - def test_get_recently_randomly_collected_torrents(self): - self.assertEqual(len(self.tdb.getRecentlyCollectedTorrents(limit=10)), 10) - self.assertEqual(len(self.tdb.getRandomlyCollectedTorrents(100000000, limit=10)), 3) - - def test_get_recently_checked_torrents(self): - self.assertEqual(len(self.tdb.getRecentlyCheckedTorrents(limit=5)), 5) - - def test_select_torrents_to_collect(self): - infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=') - self.assertEqual(len(self.tdb.select_torrents_to_collect(infohash)), 0) - - def test_get_torrents_stats(self): - self.assertEqual(self.tdb.getTorrentsStats(), (4847, 6519179841442, 187195)) - - def test_get_library_torrents(self): - self.assertEqual(len(self.tdb.getLibraryTorrents(['infohash'])), 12) - - def test_search_names_no_sort(self): - """ - Test whether the right amount of torrents are returned when searching for torrents in db - """ - columns = ['T.torrent_id', 'infohash', 'status', 'num_seeders'] - self.tdb.channelcast_db = ChannelCastDBHandler(self.session) - self.assertEqual(len(self.tdb.searchNames(['content'], keys=columns, doSort=False)), 4849) - self.assertEqual(len(self.tdb.searchNames(['content', '1'], keys=columns, doSort=False)), 1) - - def test_search_names_sort(self): - """ - Test whether the right amount of sorted torrents are returned when searching for torrents in db - """ - columns = ['T.torrent_id', 'infohash', 'status', 'num_seeders'] - self.tdb.channelcast_db = ChannelCastDBHandler(self.session) - results = self.tdb.searchNames(['content'], keys=columns) - self.assertEqual(len(results), 4849) - self.assertEqual(results[0][3], 493785) - - def test_search_local_torrents(self): - """ - Test the search procedure in the local database when searching for torrents - """ - results = self.tdb.search_in_local_torrents_db('content', ['infohash', 'num_seeders']) - self.assertEqual(len(results), 4849) - self.assertNotEqual(results[0][-1], 0.0) # Relevance score of result should not be zero - results = self.tdb.search_in_local_torrents_db('fdsafasfds', ['infohash']) - self.assertEqual(len(results), 0) - - def test_rel_score_remote_torrent(self): - self.tdb.latest_matchinfo_torrent = struct.pack("I" * 12, *([1] * 12)), u"torrent" - self.assertNotEqual(self.tdb.relevance_score_remote_torrent("\xe2my-torrent.iso"), 0.0) - - self.tdb.latest_matchinfo_torrent = struct.pack("I" * 12, *([1] * 12)), "torrent" - self.assertNotEqual(self.tdb.relevance_score_remote_torrent(u"my-torrent.iso"), 0.0) diff --git a/Tribler/Test/Core/test_sqlitecachedbhandler_votecasts.py b/Tribler/Test/Core/test_sqlitecachedbhandler_votecasts.py deleted file mode 100644 index e91451f22df..00000000000 --- a/Tribler/Test/Core/test_sqlitecachedbhandler_votecasts.py +++ /dev/null @@ -1,82 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from Tribler.Core.CacheDB.SqliteCacheDBHandler import VoteCastDBHandler, ChannelCastDBHandler -from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB - - -class TestVotecastDBHandler(AbstractDB): - - @inlineCallbacks - def setUp(self): - yield super(TestVotecastDBHandler, self).setUp() - - self.cdb = ChannelCastDBHandler(self.session) - self.vdb = VoteCastDBHandler(self.session) - self.vdb.channelcast_db = self.cdb - - def tearDown(self): - self.cdb.close() - self.cdb = None - self.vdb.close() - self.vdb = None - - super(TestVotecastDBHandler, self).tearDown() - - def test_on_votes_from_dispersy(self): - self.vdb.my_votes = {} - votes = [[1, None, 1, 2, 12345], [1, None, 2, -1, 12346], [2, 3, 2, -1, 12347]] - self.vdb.on_votes_from_dispersy(votes) - self.vdb._flush_to_database() - self.assertEqual(self.vdb.getPosNegVotes(1), (3, 1)) - - self.vdb.my_votes = None - votes = [[4, None, 1, 2, 12346]] - self.vdb.on_votes_from_dispersy(votes) - self.assertEqual(self.vdb.updatedChannels, {4}) - - def test_on_remove_votes_from_dispersy(self): - remove_votes = [[12345, 2, 3]] - self.vdb.on_remove_votes_from_dispersy(remove_votes, False) - self.assertEqual(self.vdb.updatedChannels, {2}) - remove_votes = [[12345, 2, 3], [12346, 1, 3]] - self.vdb.on_remove_votes_from_dispersy(remove_votes, True) - - def test_flush_to_database(self): - self.assertEqual(self.vdb.getPosNegVotes(1), (7, 5)) - self.vdb.updatedChannels = {1} - self.vdb._flush_to_database() - self.assertEqual(self.vdb.getPosNegVotes(1), (2, 0)) - self.vdb.updatedChannels = {} - self.vdb._flush_to_database() - - def test_get_latest_vote_dispersy_id(self): - self.assertEqual(self.vdb.get_latest_vote_dispersy_id(2, 5), 3) - self.assertEqual(self.vdb.get_latest_vote_dispersy_id(1, None), 3) - - def test_get_pos_neg_votes(self): - self.assertEqual(self.vdb.getPosNegVotes(1), (7, 5)) - self.assertEqual(self.vdb.getPosNegVotes(2), (93, 83)) - self.assertEqual(self.vdb.getPosNegVotes(42), (0, 0)) - - def test_get_vote_on_channel(self): - self.assertEqual(self.vdb.getVoteOnChannel(3, 6), -1) - self.assertEqual(self.vdb.getVoteOnChannel(4, None), -1) - - def test_get_vote_for_my_channel(self): - self.vdb.channelcast_db._channel_id = 1 - self.assertEqual(self.vdb.getVoteForMyChannel(6), 2) - - def test_get_dispersy_id(self): - self.assertEqual(self.vdb.getDispersyId(2, 5), 3) - self.assertEqual(self.vdb.getDispersyId(2, None), 3) - - def test_get_timestamp(self): - self.assertEqual(self.vdb.getTimestamp(2, 5), 8440) - self.assertEqual(self.vdb.getTimestamp(2, None), 8439) - - def test_get_my_votes(self): - my_votes = self.vdb.getMyVotes() - self.assertEqual(my_votes, {1: 2, 2: -1, 4: -1}) - self.assertIsNotNone(self.vdb.my_votes) - my_votes = self.vdb.getMyVotes() - self.assertEqual(my_votes, {1: 2, 2: -1, 4: -1}) diff --git a/Tribler/Test/GUI/test_gui.py b/Tribler/Test/GUI/test_gui.py index ce81f18ff44..8c35e2a9945 100644 --- a/Tribler/Test/GUI/test_gui.py +++ b/Tribler/Test/GUI/test_gui.py @@ -5,13 +5,13 @@ import sys import threading import time -from random import randint from unittest import TestCase, skipIf, skipUnless from PyQt5.QtCore import QPoint, QTimer, Qt from PyQt5.QtGui import QPixmap, QRegion from PyQt5.QtTest import QTest from PyQt5.QtWidgets import QApplication, QListWidget, QTextEdit, QTreeWidget +from PyQt5.QtWidgets import QTableView from six.moves import xrange @@ -22,7 +22,6 @@ import TriblerGUI.defs from TriblerGUI.dialogs.feedbackdialog import FeedbackDialog from TriblerGUI.tribler_window import TriblerWindow -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem from TriblerGUI.widgets.home_recommended_item import HomeRecommendedItem from TriblerGUI.widgets.loading_list_item import LoadingListItem @@ -77,6 +76,8 @@ def generate_tribler_data(): def no_abort(*args, **kwargs): sys.__excepthook__(*args, **kwargs) + + sys.excepthook = no_abort @@ -141,6 +142,8 @@ def wait_for_list_populated(self, llist, num_items=1, timeout=10): elif isinstance(llist, QTreeWidget) and llist.topLevelItemCount() >= num_items: if not isinstance(llist.topLevelItem(0), LoadingListItem): return + elif isinstance(llist, QTableView) and llist.verticalHeader().count() >= num_items: + return # List was not populated in time, fail the test raise TimeoutException("The list was not populated within 10 seconds") @@ -206,6 +209,11 @@ def wait_for_qtext_edit_populated(self, qtext_edit, timeout=10): # QTextEdit was not populated in time, fail the test raise TimeoutException("QTextEdit was not populated within 10 seconds") + def get_index_of_row(self, table_view, row): + x = table_view.columnViewportPosition(0) + y = table_view.rowViewportPosition(row) + return table_view.indexAt(QPoint(x, y)) + @skipUnless(os.environ.get("TEST_GUI") == "yes", "Not testing the GUI by default") class TriblerGUITest(AbstractTriblerGUITest): @@ -233,12 +241,67 @@ def test_subscriptions(self): self.wait_for_list_populated(window.subscribed_channels_list) self.screenshot(window, name="subscriptions") - first_widget = window.subscribed_channels_list.itemWidget(window.subscribed_channels_list.item(0)) - QTest.mouseClick(first_widget.subscribe_button, Qt.LeftButton) - self.wait_for_signal(first_widget.subscriptions_widget.unsubscribed_channel) + # Sort + window.subscribed_channels_list.sortByColumn(1, 1) + self.wait_for_list_populated(window.subscribed_channels_list) + self.screenshot(window, name="subscriptions_sorted") + max_items = min(window.subscribed_channels_list.model().total_items, 50) + self.assertLessEqual(window.subscribed_channels_list.verticalHeader().count(), max_items) + + # Filter + old_num_items = window.subscribed_channels_list.verticalHeader().count() + QTest.keyClick(window.subscribed_channels_filter_input, '1') + self.wait_for_list_populated(window.subscribed_channels_list) + self.screenshot(window, name="subscriptions_filtered") + self.assertLessEqual(window.subscribed_channels_list.verticalHeader().count(), old_num_items) + window.subscribed_channels_filter_input.setText('') + self.wait_for_list_populated(window.subscribed_channels_list) + + # Unsubscribe and subscribe again + index = self.get_index_of_row(window.subscribed_channels_list, 0) + window.subscribed_channels_list.on_subscribe_control_clicked(index) + self.wait_for_signal(window.subscribed_channels_list.on_unsubscribed_channel) self.screenshot(window, name="unsubscribed") - QTest.mouseClick(first_widget.subscribe_button, Qt.LeftButton) - self.wait_for_signal(first_widget.subscriptions_widget.subscribed_channel) + + window.subscribed_channels_list.on_subscribe_control_clicked(index) + self.wait_for_signal(window.subscribed_channels_list.on_subscribed_channel) + + def test_discovered_page(self): + QTest.mouseClick(window.left_menu_button_discovered, Qt.LeftButton) + self.wait_for_list_populated(window.discovered_channels_list) + self.screenshot(window, name="discovered_page") + + # Sort + window.discovered_channels_list.sortByColumn(1, 1) + self.wait_for_list_populated(window.discovered_channels_list) + self.screenshot(window, name="discovered_sorted") + max_items = min(window.discovered_channels_list.model().total_items, 50) + self.assertLessEqual(window.discovered_channels_list.verticalHeader().count(), max_items) + + # Filter + old_num_items = window.discovered_channels_list.verticalHeader().count() + QTest.keyClick(window.discovered_channels_filter_input, '1') + self.wait_for_list_populated(window.discovered_channels_list) + self.screenshot(window, name="discovered_filtered") + self.assertLessEqual(window.discovered_channels_list.verticalHeader().count(), old_num_items) + + def test_channel_torrents(self): + QTest.mouseClick(window.left_menu_button_subscriptions, Qt.LeftButton) + self.wait_for_list_populated(window.subscribed_channels_list) + index = self.get_index_of_row(window.subscribed_channels_list, 0) + window.subscribed_channels_list.on_table_item_clicked(index) + self.wait_for_list_populated(window.channel_page_container.content_table) + self.screenshot(window, name="channel_torrents_loaded") + + # Toggle credit mining + QTest.mouseClick(window.credit_mining_button, Qt.LeftButton) + self.wait_for_signal(window.subscription_widget.credit_mining_toggled) + + # Click the first torrent + index = self.get_index_of_row(window.channel_page_container.content_table, 0) + window.channel_page_container.content_table.on_table_item_clicked(index) + QTest.qWait(100) + self.screenshot(window, name="channel_overview_details") def test_edit_channel_overview(self): QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) @@ -258,53 +321,55 @@ def test_edit_channel_torrents(self): self.wait_for_variable("edit_channel_page.channel_overview") QTest.mouseClick(window.edit_channel_torrents_button, Qt.LeftButton) self.screenshot(window, name="edit_channel_torrents_loading") - self.wait_for_list_populated(window.edit_channel_torrents_list) + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) self.screenshot(window, name="edit_channel_torrents") - first_widget = window.edit_channel_torrents_list.itemWidget(window.edit_channel_torrents_list.item(0)) - QTest.mouseClick(first_widget, Qt.LeftButton) - self.screenshot(window, name="edit_channel_torrents_selected") - QTest.mouseClick(window.edit_channel_torrents_remove_selected_button, Qt.LeftButton) - self.screenshot(window, name="remove_channel_torrent_dialog") - QTest.mouseClick(window.edit_channel_page.dialog.buttons[1], Qt.LeftButton) - - QTest.mouseClick(window.edit_channel_torrents_remove_all_button, Qt.LeftButton) - self.screenshot(window, name="remove_all_channel_torrent_dialog") - QTest.mouseClick(window.edit_channel_page.dialog.buttons[1], Qt.LeftButton) + # Sort + window.edit_channel_torrents_container.content_table.sortByColumn(2, 1) # Size + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) + self.screenshot(window, name="edit_channel_torrents_sorted") + max_items = min(window.discovered_channels_list.model().total_items, 50) + self.assertLessEqual(window.discovered_channels_list.verticalHeader().count(), max_items) + + # Filter + old_num_items = window.edit_channel_torrents_container.content_table.verticalHeader().count() + QTest.keyClick(window.edit_channel_torrents_filter, 'a') + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) + self.screenshot(window, name="edit_channel_torrents_filtered") + self.assertLessEqual(window.edit_channel_torrents_container.content_table.verticalHeader().count(), + old_num_items) + window.edit_channel_torrents_filter.setText('') + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) + + # Remove a single torrent + index = self.get_index_of_row(window.edit_channel_torrents_container.content_table, 0) + window.edit_channel_torrents_container.content_table.setCurrentIndex(index) + QTest.mouseClick(window.remove_selected_button, Qt.LeftButton) + self.screenshot(window, name="edit_channel_remove_torrent_dialog") + QTest.mouseClick(window.edit_channel_page.dialog.buttons[0], Qt.LeftButton) + self.wait_for_signal(window.edit_channel_page.on_torrents_removed) - def test_edit_channel_playlists(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_playlists_button, Qt.LeftButton) - self.screenshot(window, name="edit_channel_playlists_loading") - self.wait_for_list_populated(window.edit_channel_playlists_list) - self.screenshot(window, name="edit_channel_playlists") + # Remove all torrents + QTest.mouseClick(window.remove_all_button, Qt.LeftButton) + self.screenshot(window, name="edit_channel_remove_all_dialog") + QTest.mouseClick(window.edit_channel_page.dialog.buttons[0], Qt.LeftButton) + self.wait_for_signal(window.edit_channel_page.on_all_torrents_removed, no_args=True) + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) + self.screenshot(window, name="edit_channel_remove_all_pending") - def test_edit_channel_rssfeeds(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_rss_feeds_button, Qt.LeftButton) - self.screenshot(window, name="edit_channel_rssfeeds_loading") - self.wait_for_list_populated(window.edit_channel_rss_feeds_list) - self.screenshot(window, name="edit_channel_rssfeeds") + # Commit the result + QTest.mouseClick(window.edit_channel_commit_button, Qt.LeftButton) + self.wait_for_signal(window.edit_channel_page.on_commit, no_args=True) + self.screenshot(window, name="edit_channel_committed") - def test_add_remove_refresh_rssfeed(self): + def test_create_torrent(self): QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_rss_feeds_button, Qt.LeftButton) - self.wait_for_list_populated(window.edit_channel_rss_feeds_list) - QTest.mouseClick(window.edit_channel_details_rss_add_button, Qt.LeftButton) - self.screenshot(window, name="edit_channel_add_rssfeeds_dialog") - window.edit_channel_page.dialog.dialog_widget.dialog_input.setText("http://test.com/rss.xml") - QTest.mouseClick(window.edit_channel_page.dialog.buttons[0], Qt.LeftButton) - - # Remove item - window.edit_channel_rss_feeds_list.topLevelItem(0).setSelected(True) - QTest.mouseClick(window.edit_channel_details_rss_feeds_remove_selected_button, Qt.LeftButton) - self.screenshot(window, name="edit_channel_remove_rssfeeds_dialog") - QTest.mouseClick(window.edit_channel_page.dialog.buttons[0], Qt.LeftButton) - - QTest.mouseClick(window.edit_channel_details_rss_refresh_button, Qt.LeftButton) + QTest.mouseClick(window.edit_channel_torrents_button, Qt.LeftButton) + self.wait_for_list_populated(window.edit_channel_torrents_container.content_table) + window.edit_channel_page.on_create_torrent_from_files() + self.screenshot(window, name="create_torrent_page") + QTest.mouseClick(window.manage_channel_create_torrent_back, Qt.LeftButton) def test_settings(self): QTest.mouseClick(window.settings_button, Qt.LeftButton) @@ -366,7 +431,7 @@ def test_search_suggestions(self): def test_search(self): window.top_search_bar.setText("trib") QTest.keyClick(window.top_search_bar, Qt.Key_Enter) - self.wait_for_list_populated(window.search_results_list, num_items=20) + self.wait_for_list_populated(window.search_results_list) self.screenshot(window, name="search_results_all") QTest.mouseClick(window.search_results_channels_button, Qt.LeftButton) @@ -376,82 +441,6 @@ def test_search(self): self.wait_for_list_populated(window.search_results_list) self.screenshot(window, name="search_results_torrents") - def test_channel_playlist(self): - QTest.mouseClick(window.left_menu_button_subscriptions, Qt.LeftButton) - self.wait_for_list_populated(window.subscribed_channels_list) - first_widget = window.subscribed_channels_list.itemWidget(window.subscribed_channels_list.item(0)) - QTest.mouseClick(first_widget, Qt.LeftButton) - self.screenshot(window, name="channel_loading") - self.wait_for_list_populated(window.channel_torrents_list) - self.screenshot(window, name="channel") - - first_widget = window.channel_torrents_list.itemWidget(window.channel_torrents_list.item(0)) - QTest.mouseClick(first_widget, Qt.LeftButton) - self.screenshot(window, name="channel_playlist") - - def test_start_download(self): - QTest.mouseClick(window.left_menu_button_subscriptions, Qt.LeftButton) - self.wait_for_list_populated(window.subscribed_channels_list) - first_widget = window.subscribed_channels_list.itemWidget(window.subscribed_channels_list.item(0)) - QTest.mouseClick(first_widget, Qt.LeftButton) - self.wait_for_list_populated(window.channel_torrents_list) - - torrent_widget = None - for ind in xrange(window.channel_torrents_list.count()): - cur_widget = window.channel_torrents_list.itemWidget(window.channel_torrents_list.item(ind)) - if isinstance(cur_widget, ChannelTorrentListItem): - torrent_widget = cur_widget - break - - QTest.mouseClick(torrent_widget.torrent_download_button, Qt.LeftButton) - self.screenshot(window, name="start_download_dialog") - QTest.mouseClick(window.dialog.dialog_widget.cancel_button, Qt.LeftButton) - - def test_create_remove_playlist(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_playlists_button, Qt.LeftButton) - self.wait_for_list_populated(window.edit_channel_playlists_list) - old_count = window.edit_channel_playlists_list.count() - QTest.mouseClick(window.edit_channel_create_playlist_button, Qt.LeftButton) - self.screenshot(window, "create_playlist") - - # Create playlist - window.playlist_edit_name.setText("Unit test playlist") - window.playlist_edit_description.setText("Unit test playlist description") - QTest.mouseClick(window.playlist_edit_save_button, Qt.LeftButton) - self.wait_for_signal(window.edit_channel_page.playlists_loaded) - self.assertEqual(old_count + 1, window.edit_channel_playlists_list.count()) - - # Remove playlist - last_widget = window.edit_channel_playlists_list.itemWidget(window.edit_channel_playlists_list.item(old_count)) - QTest.mouseClick(last_widget.remove_playlist_button, Qt.LeftButton) - self.screenshot(window, name="remove_playlist_dialog") - QTest.mouseClick(window.edit_channel_page.dialog.buttons[0], Qt.LeftButton) - self.wait_for_signal(window.edit_channel_page.playlists_loaded) - self.assertEqual(old_count, window.edit_channel_playlists_list.count()) - - def test_edit_playlist(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_playlists_button, Qt.LeftButton) - self.wait_for_list_populated(window.edit_channel_playlists_list) - - first_widget = window.edit_channel_playlists_list.itemWidget(window.edit_channel_playlists_list.item(0)) - QTest.mouseClick(first_widget.edit_playlist_button, Qt.LeftButton) - self.screenshot(window, name="edit_playlist") - - rand_name = "Random name %d" % randint(1, 1000) - rand_desc = "Random description %d" % randint(1, 1000) - - window.playlist_edit_name.setText(rand_name) - window.playlist_edit_description.setText(rand_desc) - QTest.mouseClick(window.playlist_edit_save_button, Qt.LeftButton) - self.wait_for_signal(window.edit_channel_page.playlists_loaded) - - first_widget = window.edit_channel_playlists_list.itemWidget(window.edit_channel_playlists_list.item(0)) - self.assertEqual(first_widget.playlist_name.text(), rand_name) - def test_add_download_url(self): window.on_add_torrent_from_url() self.go_to_and_wait_for_downloads() @@ -525,11 +514,6 @@ def on_report_sent(response): QTimer.singleShot(1000, screenshot_dialog) dialog.exec_() - def test_discovered_page(self): - QTest.mouseClick(window.left_menu_button_discovered, Qt.LeftButton) - self.wait_for_list_populated(window.discovered_channels_list) - self.screenshot(window, name="discovered_page") - def test_debug_pane(self): self.wait_for_variable("tribler_settings") QTest.mouseClick(window.settings_button, Qt.LeftButton) @@ -596,7 +580,7 @@ def test_debug_pane(self): # logs from FakeTriblerApi fake_logs = ''.join(["Sample log [%d]\n" % i for i in xrange(10)]).strip() - window.debug_window.log_tab_widget.setCurrentIndex(0) # Core tab + window.debug_window.log_tab_widget.setCurrentIndex(0) # Core tab self.wait_for_qtext_edit_populated(window.debug_window.core_log_display_area) core_logs = window.debug_window.core_log_display_area.toPlainText().strip() self.assertEqual(core_logs, fake_logs, "Core logs found different than expected.") @@ -626,37 +610,6 @@ def test_debug_pane(self): window.debug_window.close() - def test_create_torrent(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_torrents_button, Qt.LeftButton) - self.wait_for_list_populated(window.edit_channel_torrents_list) - window.edit_channel_page.on_create_torrent_from_files() - self.screenshot(window, name="create_torrent_page") - QTest.mouseClick(window.manage_channel_create_torrent_back, Qt.LeftButton) - - def test_manage_playlist(self): - QTest.mouseClick(window.left_menu_button_my_channel, Qt.LeftButton) - self.wait_for_variable("edit_channel_page.channel_overview") - QTest.mouseClick(window.edit_channel_playlists_button, Qt.LeftButton) - self.wait_for_list_populated(window.edit_channel_playlists_list) - first_widget = window.edit_channel_playlists_list.itemWidget(window.edit_channel_playlists_list.item(0)) - QTest.mouseClick(first_widget, Qt.LeftButton) - QTest.mouseClick(window.edit_channel_playlist_manage_torrents_button, Qt.LeftButton) - self.wait_for_list_populated(window.playlist_manage_in_playlist_list) - self.screenshot(window, name="manage_playlist_before") - - # Swap the first item of the lists around - window.playlist_manage_in_playlist_list.setCurrentRow(0) - QTest.mouseClick(window.playlist_manage_remove_from_playlist, Qt.LeftButton) - - window.playlist_manage_in_channel_list.setCurrentRow(0) - QTest.mouseClick(window.playlist_manage_add_to_playlist, Qt.LeftButton) - - self.screenshot(window, name="manage_playlist_after") - - QTest.mouseClick(window.edit_channel_manage_playlist_save_button, Qt.LeftButton) - def test_trust_page(self): QTest.mouseClick(window.token_balance_widget, Qt.LeftButton) self.wait_for_variable("trust_page.blocks") diff --git a/Tribler/Test/data/Prebloc.2010.Xvid-VODO.torrent b/Tribler/Test/data/Prebloc.2010.Xvid-VODO.torrent deleted file mode 100644 index e9a0fe7da44..00000000000 Binary files a/Tribler/Test/data/Prebloc.2010.Xvid-VODO.torrent and /dev/null differ diff --git a/Tribler/Test/data/bak_multiple.torrent b/Tribler/Test/data/bak_multiple.torrent deleted file mode 100644 index 2c7d51446c9..00000000000 Binary files a/Tribler/Test/data/bak_multiple.torrent and /dev/null differ diff --git a/Tribler/Test/data/bak_new_tribler.sdb.tar.gz b/Tribler/Test/data/bak_new_tribler.sdb.tar.gz deleted file mode 100644 index 1f67cc983ef..00000000000 Binary files a/Tribler/Test/data/bak_new_tribler.sdb.tar.gz and /dev/null differ diff --git a/Tribler/Test/data/bak_old_tribler.sdb.tar.gz b/Tribler/Test/data/bak_old_tribler.sdb.tar.gz deleted file mode 100644 index 6dc887f5b0d..00000000000 Binary files a/Tribler/Test/data/bak_old_tribler.sdb.tar.gz and /dev/null differ diff --git a/Tribler/Test/data/file.wmv b/Tribler/Test/data/file.wmv deleted file mode 100644 index ff5f772b6aa..00000000000 Binary files a/Tribler/Test/data/file.wmv and /dev/null differ diff --git a/Tribler/Test/data/linux_torrents/nested_dir/corrupt_torrent.torrent b/Tribler/Test/data/linux_torrents/nested_dir/corrupt_torrent.torrent new file mode 100644 index 00000000000..da78bf233fa --- /dev/null +++ b/Tribler/Test/data/linux_torrents/nested_dir/corrupt_torrent.torrent @@ -0,0 +1 @@ +This is not a valid torrent file. diff --git a/Tribler/Test/data/linux_torrents/nested_dir/ubuntu-15.04-desktop-amd64.iso.torrent b/Tribler/Test/data/linux_torrents/nested_dir/ubuntu-15.04-desktop-amd64.iso.torrent new file mode 100644 index 00000000000..cf47669abb5 Binary files /dev/null and b/Tribler/Test/data/linux_torrents/nested_dir/ubuntu-15.04-desktop-amd64.iso.torrent differ diff --git a/Tribler/Test/mocking/channel.py b/Tribler/Test/mocking/channel.py deleted file mode 100644 index 44c8ec00e9d..00000000000 --- a/Tribler/Test/mocking/channel.py +++ /dev/null @@ -1,9 +0,0 @@ -class MockChannel(object): - - def __init__(self, infohash, public_key, title, version, votes=0, local_version=0): - self.infohash = infohash - self.public_key = public_key - self.title = title - self.version = version - self.votes = votes - self.local_version = local_version diff --git a/Tribler/Test/mocking/download.py b/Tribler/Test/mocking/download.py deleted file mode 100644 index 645b57d105c..00000000000 --- a/Tribler/Test/mocking/download.py +++ /dev/null @@ -1,17 +0,0 @@ -class MockDownload(object): - - class MockTdef(object): - - def __init__(self): - self.infohash = "" - - def set_infohash(self, infohash): - self.infohash = infohash - - def get_infohash(self): - return self.infohash - - tdef = MockTdef() - - def get_num_connected_seeds_peers(self): - return 42, 1337 diff --git a/Tribler/Test/mocking/session.py b/Tribler/Test/mocking/session.py deleted file mode 100644 index 18cb0536c21..00000000000 --- a/Tribler/Test/mocking/session.py +++ /dev/null @@ -1,66 +0,0 @@ -from twisted.internet.defer import Deferred - -from .channel import MockChannel - -class MockSession(object): - - class MockLm(object): - - class MockMds(object): - - class MockChannelMetadata(object): - - def __init__(self): - self.random_channels = [] - self.channel_with_infohash = {} - self.channel_with_id = {} - - def set_random_channels(self, channel_list): - self.random_channels = channel_list - - def get_random_channels(self, limit): - return self.random_channels[:limit] - - def add(self, channel): - self.channel_with_infohash[channel.infohash] = channel - self.channel_with_id[channel.public_key] = channel - - def get_channel_with_infohash(self, infohash): - return self.channel_with_infohash.get(infohash, None) - - def get_channel_with_id(self, public_key): - return self.channel_with_id.get(public_key, None) - - def from_dict(self, dictionary): - return MockChannel(**dictionary) - - ChannelMetadata = MockChannelMetadata() - - mds = MockMds() - - def __init__(self): - self.downloaded_channel = None - self.downloaded_channel_deferred = Deferred() - self.downloading = False - - def set_download_channel(self, download): - self.downloaded_channel = download - - def finish_download_channel(self): - self.downloading = False - self.downloaded_channel_deferred.callback(self.downloaded_channel) - - def download_channel(self, channel): - self.downloading = True - return self.downloaded_channel, self.downloaded_channel_deferred - - lm = MockLm() - - def __init__(self): - self.known_infohashes = [] - - def add_known_infohash(self, infohash): - self.known_infohashes.append(infohash) - - def has_download(self, infohash): - return infohash in self.known_infohashes diff --git a/Tribler/Test/test_as_server.py b/Tribler/Test/test_as_server.py index 699b99d006a..320739cca20 100644 --- a/Tribler/Test/test_as_server.py +++ b/Tribler/Test/test_as_server.py @@ -4,6 +4,7 @@ Author(s): Arno Bakker, Jie Yang, Niels Zeilemaker """ from __future__ import absolute_import + import functools import inspect import logging @@ -16,13 +17,15 @@ from threading import enumerate as enumerate_threads from configobj import ConfigObj + import six from six.moves import xrange + import twisted from twisted.internet import interfaces from twisted.internet import reactor from twisted.internet.base import BasePort -from twisted.internet.defer import maybeDeferred, inlineCallbacks, Deferred, succeed +from twisted.internet.defer import Deferred, inlineCallbacks, maybeDeferred, succeed from twisted.internet.task import deferLater from twisted.internet.tcp import Client from twisted.trial import unittest @@ -30,13 +33,13 @@ from twisted.web.server import Site from twisted.web.static import File -from Tribler.Core.Config.tribler_config import TriblerConfig, CONFIG_SPEC_PATH +from Tribler.Core.Config.tribler_config import CONFIG_SPEC_PATH, TriblerConfig from Tribler.Core.DownloadConfig import DownloadStartupConfig from Tribler.Core.Session import Session from Tribler.Core.TorrentDef import TorrentDef from Tribler.Core.Utilities.instrumentation import WatchDog from Tribler.Core.Utilities.network_utils import get_random_port -from Tribler.Core.simpledefs import dlstatus_strings, DLSTATUS_SEEDING +from Tribler.Core.simpledefs import DLSTATUS_SEEDING, dlstatus_strings from Tribler.Test.util.util import process_unhandled_exceptions, process_unhandled_twisted_exceptions TESTS_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) @@ -286,17 +289,9 @@ def setUpPreSession(self): self.config.set_default_destination_dir(self.dest_dir) self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking_enabled(False) - self.config.set_megacache_enabled(False) - self.config.set_dispersy_enabled(False) self.config.set_ipv8_enabled(False) - self.config.set_mainline_dht_enabled(False) - self.config.set_torrent_store_enabled(False) - self.config.set_torrent_search_enabled(False) - self.config.set_channel_search_enabled(False) - self.config.set_torrent_collecting_enabled(False) self.config.set_libtorrent_enabled(False) self.config.set_video_server_enabled(False) - self.config.set_metadata_enabled(False) self.config.set_http_api_enabled(False) self.config.set_tunnel_community_enabled(False) self.config.set_credit_mining_enabled(False) @@ -349,18 +344,10 @@ def create_local_torrent(self, source_file): def setup_seeder(self, tdef, seed_dir, port=None): self.seed_config = TriblerConfig() self.seed_config.set_torrent_checking_enabled(False) - self.seed_config.set_megacache_enabled(False) - self.seed_config.set_dispersy_enabled(False) self.seed_config.set_ipv8_enabled(False) - self.seed_config.set_mainline_dht_enabled(False) - self.seed_config.set_torrent_store_enabled(False) - self.seed_config.set_torrent_search_enabled(False) - self.seed_config.set_channel_search_enabled(False) self.seed_config.set_http_api_enabled(False) - self.seed_config.set_torrent_collecting_enabled(False) self.seed_config.set_libtorrent_enabled(True) self.seed_config.set_video_server_enabled(False) - self.seed_config.set_metadata_enabled(False) self.seed_config.set_tunnel_community_enabled(False) self.seed_config.set_market_community_enabled(False) self.seed_config.set_dht_enabled(False) diff --git a/Tribler/Test/util/Tracker/TrackerInfo.py b/Tribler/Test/util/Tracker/TrackerInfo.py index 3f2d3c3a525..61aa1bd4bf6 100644 --- a/Tribler/Test/util/Tracker/TrackerInfo.py +++ b/Tribler/Test/util/Tracker/TrackerInfo.py @@ -2,6 +2,7 @@ Keeping track of information about a tracker. """ + class TrackerInfo(object): """ This class keeps track of info about a tracker. This info is used when a request to a tracker is performed. diff --git a/Tribler/community/allchannel/__init__.py b/Tribler/community/allchannel/__init__.py deleted file mode 100644 index 7daf1e9839c..00000000000 --- a/Tribler/community/allchannel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -The allchannel community is used to collect votes for channels and thereby discover which channels are most popular. -""" diff --git a/Tribler/community/allchannel/community.py b/Tribler/community/allchannel/community.py deleted file mode 100644 index 591a8f7192c..00000000000 --- a/Tribler/community/allchannel/community.py +++ /dev/null @@ -1,698 +0,0 @@ -from random import sample -from time import time -from twisted.internet.defer import returnValue, inlineCallbacks -from twisted.internet.task import LoopingCall -from twisted.python.threadable import isInIOThread - -from Tribler.community.allchannel.message import DelayMessageReqChannelMessage -from Tribler.community.allchannel.payload import (ChannelCastRequestPayload, ChannelCastPayload, VoteCastPayload, - ChannelSearchPayload, ChannelSearchResponsePayload) -from Tribler.community.channel.community import ChannelCommunity -from Tribler.community.channel.preview import PreviewChannelCommunity -from Tribler.dispersy.authentication import MemberAuthentication -from Tribler.dispersy.community import Community -from Tribler.dispersy.conversion import DefaultConversion -from Tribler.dispersy.database import IgnoreCommits -from Tribler.dispersy.destination import CandidateDestination, CommunityDestination -from Tribler.dispersy.distribution import FullSyncDistribution, DirectDistribution -from Tribler.dispersy.exception import CommunityNotFoundException -from Tribler.dispersy.message import Message, BatchConfiguration -from Tribler.dispersy.resolution import PublicResolution -from .conversion import AllChannelConversion - -CHANNELCAST_FIRST_MESSAGE = 3.0 -CHANNELCAST_INTERVAL = 15.0 -CHANNELCAST_BLOCK_PERIOD = 10.0 * 60.0 # block for 10 minutes -UNLOAD_COMMUNITY_INTERVAL = 60.0 - -DEBUG = False - - -class AllChannelCommunity(Community): - """ - A single community that all Tribler members join and use to disseminate .torrent files. - - The dissemination of .torrent files, using 'community-propagate' messages, is NOT done using a - dispersy sync mechanism. We prefer more specific dissemination mechanism than dispersy - provides. Dissemination occurs by periodically sending: - - - N most recently received .torrent files - - M random .torrent files - - O most recent .torrent files, created by ourselves - - P randomly choosen .torrent files, created by ourselves - """ - @classmethod - def get_master_members(cls, dispersy): -# generated: Fri Nov 25 10:51:27 2011 -# curve: high <<< NID_sect571r1 >>> -# len: 571 bits ~ 144 bytes signature -# pub: 170 3081a7301006072a8648ce3d020106052b81040027038192000405548a13626683d4788ab19393fa15c9e9d6f5ce0ff47737747fa511af6c4e956f523dc3d1ae8d7b83b850f21ab157dd4320331e2f136aa01e70d8c96df665acd653725e767da9b5079f25cebea808832cd16015815797906e90753d135ed2d796b9dfbafaf1eae2ebea3b8846716c15814e96b93ae0f5ffaec44129688a38ea35f879205fdbe117323e73076561f112 -# pub-sha1 8164f55c2f828738fa779570e4605a81fec95c9d -# -----BEGIN PUBLIC KEY----- -# MIGnMBAGByqGSM49AgEGBSuBBAAnA4GSAAQFVIoTYmaD1HiKsZOT+hXJ6db1zg/0 -# dzd0f6URr2xOlW9SPcPRro17g7hQ8hqxV91DIDMeLxNqoB5w2Mlt9mWs1lNyXnZ9 -# qbUHnyXOvqgIgyzRYBWBV5eQbpB1PRNe0teWud+6+vHq4uvqO4hGcWwVgU6WuTrg -# 9f+uxEEpaIo46jX4eSBf2+EXMj5zB2Vh8RI= -# -----END PUBLIC KEY----- - master_key = "3081a7301006072a8648ce3d020106052b81040027038192000405548a13626683d4788ab19393fa15c9e9d6f5ce0ff47737747fa511af6c4e956f523dc3d1ae8d7b83b850f21ab157dd4320331e2f136aa01e70d8c96df665acd653725e767da9b5079f25cebea808832cd16015815797906e90753d135ed2d796b9dfbafaf1eae2ebea3b8846716c15814e96b93ae0f5ffaec44129688a38ea35f879205fdbe117323e73076561f112".decode("HEX") - master = dispersy.get_member(public_key=master_key) - return [master] - - @property - def dispersy_sync_bloom_filter_strategy(self): - return self._dispersy_claim_sync_bloom_filter_modulo - - def initiate_meta_messages(self): - batch_delay = 1.0 - - return super(AllChannelCommunity, self).initiate_meta_messages() + [ - Message(self, u"channelcast", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - ChannelCastPayload(), - self.check_channelcast, - self.on_channelcast), - Message(self, u"channelcast-request", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - ChannelCastRequestPayload(), - self.check_channelcast_request, - self.on_channelcast_request), - Message(self, u"channelsearch", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CommunityDestination(node_count=10), - ChannelSearchPayload(), - self.check_channelsearch, - self.on_channelsearch), - Message(self, u"channelsearch-response", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - ChannelSearchResponsePayload(), - self.check_channelsearch_response, - self.on_channelsearch_response), - Message(self, u"votecast", - MemberAuthentication(), - PublicResolution(), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - VoteCastPayload(), - self.check_votecast, - self.on_votecast, - self.undo_votecast, - batch=BatchConfiguration(max_window=batch_delay)) - ] - - def __init__(self, *args, **kwargs): - super(AllChannelCommunity, self).__init__(*args, **kwargs) - - self._blocklist = {} - self._recentlyRequested = [] - - self.tribler_session = None - self.auto_join_channel = None - - self._channelcast_db = None - self._votecast_db = None - self._peer_db = None - - def initialize(self, tribler_session=None, auto_join_channel=False): - super(AllChannelCommunity, self).initialize() - - self.tribler_session = tribler_session - self.auto_join_channel = auto_join_channel - - if tribler_session is not None: - from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_VOTECAST, NTFY_PEERS - - # tribler channelcast database - self._channelcast_db = tribler_session.open_dbhandler(NTFY_CHANNELCAST) - self._votecast_db = tribler_session.open_dbhandler(NTFY_VOTECAST) - self._peer_db = tribler_session.open_dbhandler(NTFY_PEERS) - - else: - self._channelcast_db = ChannelCastDBStub(self._dispersy) - self._votecast_db = VoteCastDBStub(self._dispersy) - self._peer_db = PeerDBStub(self._dispersy) - - self.register_task(u"channelcast", - LoopingCall(self.create_channelcast)).start(CHANNELCAST_FIRST_MESSAGE, now=True) - - self.register_task(u"unload preview", - LoopingCall(self.unload_preview)).start(UNLOAD_COMMUNITY_INTERVAL, now=False) - - def initiate_conversions(self): - return [DefaultConversion(self), AllChannelConversion(self)] - - @property - def dispersy_auto_download_master_member(self): - # there is no dispersy-identity for the master member, so don't try to download - return False - - @property - def dispersy_sync_response_limit(self): - return 25 * 1024 - - def create_channelcast(self): - assert isInIOThread() - now = time() - - favoriteTorrents = None - normalTorrents = None - - # cleanup blocklist - for candidate in self._blocklist.keys(): - if self._blocklist[candidate] + CHANNELCAST_BLOCK_PERIOD < now: # unblock address - self._blocklist.pop(candidate) - - mychannel_id = self._channelcast_db.getMyChannelId() - - # loop through all candidates to see if we can find a non-blocked address - for candidate in [candidate for candidate in self._iter_categories([u'walk', u'stumble'], once=True) if candidate not in self._blocklist]: - if not candidate: - continue - - didFavorite = False - # only check if we actually have a channel - if mychannel_id: - peer_ids = set() - key = candidate.get_member().public_key - peer_ids.add(self._peer_db.addOrGetPeerID(key)) - - # see if all members on this address are subscribed to my channel - didFavorite = len(peer_ids) > 0 - for peer_id in peer_ids: - vote = self._votecast_db.getVoteForMyChannel(peer_id) - if vote != 2: - didFavorite = False - break - - # Modify type of message depending on if all peers have marked my channels as their favorite - if didFavorite: - if not favoriteTorrents: - favoriteTorrents = self._channelcast_db.getRecentAndRandomTorrents(0, 0, 25, 25, 5) - torrents = favoriteTorrents - else: - if not normalTorrents: - normalTorrents = self._channelcast_db.getRecentAndRandomTorrents() - torrents = normalTorrents - - # torrents is a dictionary of channel_id (key) and infohashes (value) - if len(torrents) > 0: - meta = self.get_meta_message(u"channelcast") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), payload=(torrents,)) - - self._dispersy._forward([message]) - - # we've send something to this address, add to blocklist - self._blocklist[candidate] = now - - nr_torrents = sum(len(infohashes) for infohashes in torrents.itervalues()) - self._logger.debug("sending channelcast message containing %s torrents to %s didFavorite %s", - nr_torrents, candidate.sock_addr, didFavorite) - # we're done - break - - else: - self._logger.debug("Did not send channelcast messages, no candidates or torrents") - - def get_nr_connections(self): - return len(list(self.dispersy_yield_candidates())) - - def check_channelcast(self, messages): - with self._dispersy.database: - for message in messages: - for cid in message.payload.torrents.iterkeys(): - channel_id = self._get_channel_id(cid) - if not channel_id: - community = self._get_channel_community(cid) - yield DelayMessageReqChannelMessage(message, community, includeSnapshot=True) - break - else: - yield message - - # ensure that no commits occur - raise IgnoreCommits() - - def on_channelcast(self, messages): - for message in messages: - toCollect = {} - for cid, infohashes in message.payload.torrents.iteritems(): - for infohash in self._selectTorrentsToCollect(cid, infohashes): - toCollect.setdefault(cid, set()).add(infohash) - - nr_requests = sum([len(infohashes) for infohashes in toCollect.values()]) - if nr_requests > 0: - self.create_channelcast_request(toCollect, message.candidate) - - def create_channelcast_request(self, toCollect, candidate): - # create channelcast request message - meta = self.get_meta_message(u"channelcast-request") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), payload=(toCollect,)) - self._dispersy._forward([message]) - - nr_requests = sum([len(torrents) for torrents in toCollect.itervalues()]) - self._logger.debug("requesting %s torrents from %s", nr_requests, candidate) - - def check_channelcast_request(self, messages): - # no timeline check because PublicResolution policy is used - return messages - - def on_channelcast_request(self, messages): - for message in messages: - requested_packets = [] - for cid, infohashes in message.payload.torrents.iteritems(): - requested_packets.extend(self._get_packets_from_infohashes(cid, infohashes)) - - if requested_packets: - self._dispersy._send_packets([message.candidate], requested_packets, - self, "-caused by channelcast-request-") - - self._logger.debug("got request for %s torrents from %s", len(requested_packets), message.candidate) - - def create_channelsearch(self, keywords): - # clear searchcallbacks if new search - query = " ".join(keywords) - - meta = self.get_meta_message(u"channelsearch") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), - payload=(keywords,)) - - self._logger.debug("searching for channel matching '%s'", query) - - return self._dispersy._forward([message]) - - def check_channelsearch(self, messages): - # no timeline check because PublicResolution policy is used - return messages - - def on_channelsearch(self, messages): - for message in messages: - keywords = message.payload.keywords - query = " ".join(keywords) - - self._logger.debug("got search request for '%s'", query) - - results = self._channelcast_db.searchChannelsTorrent(query, 7, 7, dispersyOnly=True) - if len(results) > 0: - responsedict = {} - for channel_id, dispersy_cid, name, infohash, torname, time_stamp in results: - infohashes = responsedict.setdefault(dispersy_cid, set()) - infohashes.add(infohash) - - self._logger.debug("found cid: %s infohash: %s", dispersy_cid.encode("HEX"), infohash.encode("HEX")) - - self.create_channelsearch_response(keywords, responsedict, message.candidate) - - else: - self._logger.debug("no results") - - def create_channelsearch_response(self, keywords, torrents, candidate): - # create channelsearch-response message - meta = self.get_meta_message(u"channelsearch-response") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), payload=(keywords, torrents)) - - self._dispersy._forward([message]) - - nr_requests = sum([len(tors) for tors in torrents.values()]) - self._logger.debug("sending %s results", nr_requests) - - def check_channelsearch_response(self, messages): - with self._dispersy.database: - for message in messages: - for cid in message.payload.torrents.iterkeys(): - channel_id = self._get_channel_id(cid) - if not channel_id: - community = self._get_channel_community(cid) - yield DelayMessageReqChannelMessage(message, community, includeSnapshot=True) - break - else: - yield message - - # ensure that no commits occur - raise IgnoreCommits() - - def on_channelsearch_response(self, messages): - # request missing torrents - self.on_channelcast(messages) - - for message in messages: - # show results in gui - keywords = message.payload.keywords - query = " ".join(keywords) - - self._logger.debug("got search response for '%s'", query) - - # emit a results signal if integrated with Tribler - if self.tribler_session is not None: - from Tribler.Core.simpledefs import SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS - torrents = message.payload.torrents - results = {'keywords': keywords, - 'torrents': torrents} - self.tribler_session.notifier.notify(SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, None, results) - - @inlineCallbacks - def disp_create_votecast(self, cid, vote, timestamp, store=True, update=True, forward=True): - # reclassify community - if vote == 2: - communityclass = ChannelCommunity - else: - communityclass = PreviewChannelCommunity - - community_old = self._get_channel_community(cid) - community = yield self.dispersy.reclassify_community(community_old, communityclass) - community._candidates = community_old._candidates - - # check if we need to cancel a previous vote - latest_dispersy_id = self._votecast_db.get_latest_vote_dispersy_id(community._channel_id, None) - if latest_dispersy_id: - message = self._dispersy.load_message_by_packetid(self, latest_dispersy_id) - if message: - self.create_undo(message) - - # create new vote message - meta = self.get_meta_message(u"votecast") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.claim_global_time(),), - payload=(cid, vote, timestamp)) - self._dispersy.store_update_forward([message], store, update, forward) - - returnValue(message) - - def check_votecast(self, messages): - with self._dispersy.database: - communities = {} - channel_ids = {} - for cid in set([message.payload.cid for message in messages]): - channel_id = self._get_channel_id(cid) - if channel_id: - channel_ids[cid] = channel_id - else: - communities[cid] = self._get_channel_community(cid) - - for message in messages: - community = communities.get(message.payload.cid) - if community: - # at this point we should NOT have the channel message for this community - if __debug__: - try: - self._dispersy.database.execute( - u"SELECT * FROM sync WHERE community = ? AND meta_message = ? AND undone = 0", - (community.database_id, community.get_meta_message(u"channel").database_id)).next() - self._logger.error("We already have the channel message... no need to wait for it %s", - community.cid.encode("HEX")) - except StopIteration: - pass - - self._logger.debug("Did not receive channel, requesting channel message '%s' from %s", - community.cid.encode("HEX"), message.candidate.sock_addr) - # request torrents if positive vote - yield DelayMessageReqChannelMessage(message, community, includeSnapshot=message.payload.vote > 0) - - else: - message.channel_id = channel_ids[message.payload.cid] - yield message - - # ensure that no commits occur - raise IgnoreCommits() - - def on_votecast(self, messages): - if self.tribler_session is not None: - votelist = [] - for message in messages: - dispersy_id = message.packet_id - channel_id = getattr(message, "channel_id", 0) - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - - # if channel_id is not found, then this is a manual join - # insert placeholder into database which will be replaced after channelmessage has been received - if not channel_id: - select_channel = "SELECT id FROM _Channels WHERE dispersy_cid = ?" - channel_id = self._channelcast_db._db.fetchone(select_channel, (buffer(message.payload.cid),)) - - if not channel_id: - insert_channel = "INSERT INTO _Channels (dispersy_cid, peer_id, name) " \ - "VALUES (?, ?, ?); SELECT last_insert_rowid();" - channel_id = self._channelcast_db._db.fetchone(insert_channel, - (buffer(message.payload.cid), -1, '')) - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - votelist.append((channel_id, peer_id, dispersy_id, message.payload.vote, message.payload.timestamp)) - - self._votecast_db.on_votes_from_dispersy(votelist) - - def undo_votecast(self, descriptors, redo=False): - if self.tribler_session is not None: - contains_my_vote = False - votelist = [] - now = long(time()) - for _, _, packet in descriptors: - message = packet.load_message() - dispersy_id = message.packet_id - - channel_id = self._get_channel_id(message.payload.cid) - votelist.append((None if redo else now, channel_id, dispersy_id)) - - authentication_member = message.authentication.member - my_vote = authentication_member == self._my_member - if my_vote: - contains_my_vote = True - - self._votecast_db.on_remove_votes_from_dispersy(votelist, contains_my_vote) - - def _get_channel_community(self, cid): - assert isinstance(cid, str) - assert len(cid) == 20 - - try: - return self._dispersy.get_community(cid, True) - except CommunityNotFoundException: - if self.auto_join_channel: - self._logger.info("join channel community %s", cid.encode("HEX")) - return ChannelCommunity.init_community(self._dispersy, self._dispersy.get_member(mid=cid), - self._my_member, tribler_session=self.tribler_session) - else: - self._logger.info("join preview community %s", cid.encode("HEX")) - return PreviewChannelCommunity.init_community(self._dispersy, self._dispersy.get_member(mid=cid), - self._my_member, tribler_session=self.tribler_session) - - @inlineCallbacks - def unload_preview(self): - cleanpoint = time() - 300 - inactive = [community for community in self.dispersy._communities.itervalues() if isinstance( - community, PreviewChannelCommunity) and community.init_timestamp < cleanpoint] - self._logger.debug("cleaning %d/%d previewchannel communities", len(inactive), len(self.dispersy._communities)) - - for community in inactive: - yield community.unload_community() - - def _get_channel_id(self, cid): - assert isinstance(cid, str) - assert len(cid) == 20 - - return self._channelcast_db.getChannelIdFromDispersyCID(buffer(cid)) - - def _selectTorrentsToCollect(self, cid, infohashes): - channel_id = self._get_channel_id(cid) - - row = self._channelcast_db.getCountMaxFromChannelId(channel_id) - if row: - nrTorrrents, latestUpdate = row - else: - nrTorrrents = 0 - latestUpdate = 0 - - collect = [] - - # filter infohashes using recentlyRequested - infohashes = filter(lambda infohash: infohash not in self._recentlyRequested, infohashes) - - # only request updates if nrT < 100 or we have not received an update in the last half hour - if nrTorrrents < 100 or latestUpdate < (time() - 1800): - infohashes = list(infohashes) - haveTorrents = self._channelcast_db.hasTorrents(channel_id, infohashes) - for i in range(len(infohashes)): - if not haveTorrents[i]: - collect.append(infohashes[i]) - - self._recentlyRequested.extend(collect) - self._recentlyRequested = self._recentlyRequested[:100] - - return collect - - def _get_packets_from_infohashes(self, cid, infohashes): - assert all(isinstance(infohash, str) for infohash in infohashes) - assert all(len(infohash) == 20 for infohash in infohashes) - - channel_id = self._get_channel_id(cid) - - packets = [] - for infohash in infohashes: - dispersy_id = self._channelcast_db.getTorrentFromChannelId( - channel_id, infohash, ['ChannelTorrents.dispersy_id']) - - if dispersy_id and dispersy_id > 0: - try: - # 2. get the message - packets.append(self._get_packet_from_dispersy_id(dispersy_id, "torrent")) - except RuntimeError: - pass - - return packets - - def _get_packet_from_dispersy_id(self, dispersy_id, messagename): - try: - packet, = self._dispersy.database.execute( - u"SELECT sync.packet FROM community JOIN sync ON sync.community = community.id WHERE sync.id = ?", (dispersy_id,)).next() - except StopIteration: - raise RuntimeError("Unknown dispersy_id") - return str(packet) - - -class ChannelCastDBStub(): - - def __init__(self, dispersy): - self._dispersy = dispersy - self.channel_id = None - self.mychannel = False - self.latest_result = 0 - - self.cachedTorrents = None - self.recentTorrents = [] - - def convert_to_messages(self, results): - messages = self._dispersy.convert_packets_to_messages(str(packet) for packet, _ in results) - for packet_id, message in zip((packet_id for _, packet_id in results), messages): - if message: - message.packet_id = packet_id - yield message.community.cid, message - - def getChannelIdFromDispersyCID(self, cid): - return self.channel_id - - def getCountMaxFromChannelId(self, channel_id): - if self.cachedTorrents: - return len(self.cachedTorrents), self.latest_result - - def getRecentAndRandomTorrents(self, NUM_OWN_RECENT_TORRENTS=15, NUM_OWN_RANDOM_TORRENTS=10, NUM_OTHERS_RECENT_TORRENTS=15, NUM_OTHERS_RANDOM_TORRENTS=10, NUM_OTHERS_DOWNLOADED=5): - torrent_dict = {} - - for _, payload in self.recentTorrents[:max(NUM_OWN_RECENT_TORRENTS, NUM_OTHERS_RECENT_TORRENTS)]: - torrent_dict.setdefault(self.channel_id, set()).add(payload.infohash) - - if len(self.recentTorrents) >= NUM_OWN_RECENT_TORRENTS: - for infohash in self.getRandomTorrents(self.channel_id, max(NUM_OWN_RANDOM_TORRENTS, NUM_OTHERS_RANDOM_TORRENTS)): - torrent_dict.setdefault(self.channel_id, set()).add(infohash) - - return torrent_dict - - def getRandomTorrents(self, channel_id, limit=15): - torrents = self._cachedTorrents.keys() - if len(torrents) > limit: - return sample(torrents, limit) - return torrents - - def newTorrent(self, message): - self._cachedTorrents[message.payload.infohash] = message - - self.recentTorrents.append((message.distribution.global_time, message.payload)) - self.recentTorrents.sort(reverse=True) - self.recentTorrents[:50] - - self.latest_result = time() - - def setChannelId(self, channel_id, mychannel): - self.channel_id = channel_id - self.mychannel = mychannel - - def getMyChannelId(self): - if self.mychannel: - return self.channel_id - - def hasTorrents(self, channel_id, infohashes): - returnAr = [] - for infohash in infohashes: - if infohash in self._cachedTorrents: - returnAr.append(True) - else: - returnAr.append(False) - return returnAr - - def getTorrentFromChannelId(self, channel_id, infohash, keys): - if infohash in self._cachedTorrents: - return self._cachedTorrents[infohash].packet_id - - def on_dynamic_settings(self, channel_id): - pass - - @property - def _cachedTorrents(self): - if self.cachedTorrents is None: - self.cachedTorrents = {} - self._cacheTorrents() - - return self.cachedTorrents - - def _cacheTorrents(self): - sql = u"SELECT sync.packet, sync.id FROM sync JOIN meta_message ON sync.meta_message = meta_message.id JOIN community ON community.id = sync.community WHERE meta_message.name = 'torrent'" - results = list(self._dispersy.database.execute(sql)) - messages = self.convert_to_messages(results) - - for _, message in messages: - self._cachedTorrents[message.payload.infohash] = message - self.recentTorrents.append((message.distribution.global_time, message.payload)) - - self.recentTorrents.sort(reverse=True) - self.recentTorrents[:50] - - -class VoteCastDBStub(): - - def __init__(self, dispersy): - self._dispersy = dispersy - self._votecache = {} - - def getDispersyId(self, cid, public_key): - if public_key in self._votecache: - return self._votecache[public_key] - - sql = u"SELECT sync.id FROM sync JOIN member ON sync.member = member.id JOIN community ON community.id = sync.community JOIN meta_message ON sync.meta_message = meta_message.id WHERE community.classification = 'AllChannelCommunity' AND meta_message.name = 'votecast' AND member.public_key = ? ORDER BY global_time DESC LIMIT 1" - try: - id, = self._dispersy.database.execute(sql, (buffer(public_key),)).next() - self._votecache[public_key] = int(id) - return self._votecache[public_key] - except StopIteration: - return - - def getVoteForMyChannel(self, public_key): - id = self.getDispersyId(None, public_key) - if id: # if we have a votecastmessage from this peer in our sync table, then signal a mark as favorite - return 2 - return 0 - - def get_latest_vote_dispersy_id(self, channel_id, voter_id): - return - - -class PeerDBStub(): - - def __init__(self, dispersy): - self._dispersy = dispersy - - def addOrGetPeerID(self, public_key): - return public_key diff --git a/Tribler/community/allchannel/conversion.py b/Tribler/community/allchannel/conversion.py deleted file mode 100644 index e103f61f427..00000000000 --- a/Tribler/community/allchannel/conversion.py +++ /dev/null @@ -1,130 +0,0 @@ -from random import choice, sample -from struct import pack, unpack_from - -from Tribler.dispersy.conversion import BinaryConversion -from Tribler.dispersy.message import DropPacket -from Tribler.pyipv8.ipv8.messaging.deprecated.encoding import encode, decode - - -class AllChannelConversion(BinaryConversion): - - def __init__(self, community): - super(AllChannelConversion, self).__init__(community, "\x01") - self.define_meta_message(chr(1), community.get_meta_message(u"channelcast"), - self._encode_channelcast, self._decode_channelcast) - self.define_meta_message(chr(2), community.get_meta_message(u"channelcast-request"), - self._encode_channelcast, self._decode_channelcast) - self.define_meta_message(chr(3), community.get_meta_message(u"channelsearch"), - self._encode_channelsearch, self._decode_channelsearch) - self.define_meta_message(chr(4), community.get_meta_message(u"channelsearch-response"), - self._encode_channelsearch_response, self._decode_channelsearch_response) - self.define_meta_message(chr(5), community.get_meta_message(u"votecast"), - self._encode_votecast, self._decode_votecast) - - def _encode_channelcast(self, message): - max_len = self._community.dispersy_sync_bloom_filter_bits / 8 - - def create_msg(): - return encode(message.payload.torrents) - - packet = create_msg() - while len(packet) > max_len: - community = choice(message.payload.torrents.keys()) - nrTorrents = len(message.payload.torrents[community]) - if nrTorrents == 1: - del message.payload.torrents[community] - else: - message.payload.torrents[community] = set(sample(message.payload.torrents[community], nrTorrents - 1)) - - packet = create_msg() - - return packet, - - def _decode_channelcast(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the channelcast-payload") - - if not isinstance(payload, dict): - raise DropPacket("Invalid payload type") - - for cid, infohashes in payload.iteritems(): - if not (isinstance(cid, str) and len(cid) == 20): - raise DropPacket("Invalid 'cid' type or value") - - for infohash in infohashes: - if not (isinstance(infohash, str) and len(infohash) == 20): - raise DropPacket("Invalid 'infohash' type or value") - return offset, placeholder.meta.payload.implement(payload) - - def _encode_channelsearch(self, message): - packet = encode(message.payload.keywords) - return packet, - - def _decode_channelsearch(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the channelcast-payload") - - if not isinstance(payload, list): - raise DropPacket("Invalid payload type") - - for keyword in payload: - if not isinstance(keyword, unicode): - raise DropPacket("Invalid 'keyword' type") - return offset, placeholder.meta.payload.implement(payload) - - def _encode_channelsearch_response(self, message): - packet = encode((message.payload.keywords, message.payload.torrents)) - return packet, - - def _decode_channelsearch_response(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the channelcast-payload") - - if not isinstance(payload, tuple): - raise DropPacket("Invalid payload type") - - keywords, torrents = payload - for keyword in keywords: - if not isinstance(keyword, unicode): - raise DropPacket("Invalid 'keyword' type") - - for cid, infohashes in torrents.iteritems(): - if not (isinstance(cid, str) and len(cid) == 20): - raise DropPacket("Invalid 'cid' type or value") - - for infohash in infohashes: - if not (isinstance(infohash, str) and len(infohash) == 20): - raise DropPacket("Invalid 'infohash' type or value") - - return offset, placeholder.meta.payload.implement(keywords, torrents) - - def _encode_votecast(self, message): - return pack('!20shl', message.payload.cid, message.payload.vote, message.payload.timestamp), - - def _decode_votecast(self, placeholder, offset, data): - if len(data) < offset + 26: - raise DropPacket("Unable to decode the payload") - - cid, vote, timestamp = unpack_from('!20shl', data, offset) - if not vote in [-1, 0, 2]: - raise DropPacket("Invalid 'vote' type or value") - - return offset + 26, placeholder.meta.payload.implement(cid, vote, timestamp) - - # def _encode_torrent_request(self, message): - # return message.payload.infohash, - - # def _decode_torrent_request(self, placeholder, offset, data): - # if len(data) < offset + 20: - # raise DropPacket("Insufficient packet size") - - # infohash = data[offset:offset+20] - # offset += 20 - - # return offset, placeholder.meta.payload.implement(infohash) diff --git a/Tribler/community/allchannel/message.py b/Tribler/community/allchannel/message.py deleted file mode 100644 index 0c006642d87..00000000000 --- a/Tribler/community/allchannel/message.py +++ /dev/null @@ -1,27 +0,0 @@ -from Tribler.community.channel.community import ChannelCommunity -from Tribler.dispersy.message import DelayMessage - - -class DelayMessageReqChannelMessage(DelayMessage): - """ - Raised during ChannelCommunity.check_ if the channel message has not been received yet. - """ - - def __init__(self, delayed, channel_community, includeSnapshot=False): - super(DelayMessageReqChannelMessage, self).__init__(delayed) - if __debug__: - from Tribler.dispersy.message import Message - assert isinstance(delayed, Message.Implementation), type(delayed) - assert isinstance(channel_community, ChannelCommunity), type(channel_community) - - self._channel_community = channel_community - self._includeSnapshot = includeSnapshot - - @property - def match_info(self): - # we return the channel_community cid here, to register the delay at that community - return (self._channel_community.cid, u"channel", None, None, []), - - def send_request(self, community, candidate): - # the request is sent from within the channel_community - self._channel_community.disp_create_missing_channel(candidate, self._includeSnapshot) diff --git a/Tribler/community/allchannel/payload.py b/Tribler/community/allchannel/payload.py deleted file mode 100644 index e202ee1e4b7..00000000000 --- a/Tribler/community/allchannel/payload.py +++ /dev/null @@ -1,116 +0,0 @@ -from Tribler.dispersy.payload import Payload - - -class ChannelCastPayload(Payload): - """ - Propagate semi random channel data. - - One channel-propagate message could contain a list with the following ChannelCommunity packets: - - torrent - """ - class Implementation(Payload.Implementation): - - def __init__(self, meta, torrents): - if __debug__: - assert isinstance(torrents, dict), 'torrents should be a dictionary containing cid:set(infohashes)' - for cid, infohashes in torrents.iteritems(): - assert isinstance(cid, str) - assert len(cid) == 20 - assert isinstance(infohashes, set) - assert not filter(lambda x: not isinstance(x, str), infohashes) - assert not filter(lambda x: not len(x) == 20, infohashes) - assert len(infohashes) > 0 - - super(ChannelCastPayload.Implementation, self).__init__(meta) - self._torrents = torrents - - @property - def torrents(self): - return self._torrents - - -class ChannelCastRequestPayload(ChannelCastPayload): - pass - - -class ChannelSearchPayload(Payload): - - """ - Propagate a search for a channel - """ - class Implementation(Payload.Implementation): - - def __init__(self, meta, keywords): - if __debug__: - assert isinstance(keywords, list), 'keywords should be list' - for keyword in keywords: - assert isinstance(keyword, unicode), '%s is type %s' % (keyword, type(keyword)) - assert len(keyword) > 0 - - super(ChannelSearchPayload.Implementation, self).__init__(meta) - self._keywords = keywords - - @property - def keywords(self): - return self._keywords - - -class ChannelSearchResponsePayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, keywords, torrents): - if __debug__: - assert isinstance(keywords, list), 'keywords should be list' - assert isinstance(torrents, dict), 'torrents should be a dictionary containing cid:set(infohashes)' - for cid, infohashes in torrents.iteritems(): - assert isinstance(cid, str) - assert len(cid) == 20 - assert isinstance(infohashes, set) - assert not filter(lambda x: not isinstance(x, str), infohashes) - assert not filter(lambda x: not len(x) == 20, infohashes) - assert len(infohashes) > 0 - - super(ChannelSearchResponsePayload.Implementation, self).__init__(meta) - self._keywords = keywords - self._torrents = torrents - - @property - def keywords(self): - return self._keywords - - @property - def torrents(self): - return self._torrents - - -class VoteCastPayload(Payload): - - """ - Propagate vote for a channel - """ - class Implementation(Payload.Implementation): - - def __init__(self, meta, cid, vote, timestamp): - assert isinstance(cid, str) - assert len(cid) == 20 - assert isinstance(vote, int) - assert vote in [-1, 0, 2] - assert isinstance(timestamp, (int, long)) - - super(VoteCastPayload.Implementation, self).__init__(meta) - self._cid = cid - self._vote = vote - self._timestamp = timestamp - - @property - def cid(self): - return self._cid - - @property - def vote(self): - return self._vote - - @property - def timestamp(self): - return self._timestamp diff --git a/Tribler/community/channel/__init__.py b/Tribler/community/channel/__init__.py deleted file mode 100644 index cc89965b023..00000000000 --- a/Tribler/community/channel/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Channels are lists "favorite" .torrents create by one or more users. -""" diff --git a/Tribler/community/channel/community.py b/Tribler/community/channel/community.py deleted file mode 100644 index 3db59a2f7cd..00000000000 --- a/Tribler/community/channel/community.py +++ /dev/null @@ -1,1286 +0,0 @@ -import logging -from binascii import hexlify -from struct import pack -from time import time -from traceback import print_stack -from twisted.python.threadable import isInIOThread - -from Tribler.Core.CacheDB.sqlitecachedb import str2bin -from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_TORRENT -from Tribler.Core.simpledefs import NTFY_DISCOVERED -import Tribler.Core.Utilities.json_util as json -from Tribler.community.channel.payload import ModerationPayload -from Tribler.dispersy.authentication import MemberAuthentication, NoAuthentication -from Tribler.dispersy.candidate import CANDIDATE_WALK_LIFETIME -from Tribler.dispersy.community import Community -from Tribler.dispersy.conversion import DefaultConversion -from Tribler.dispersy.destination import CandidateDestination, CommunityDestination -from Tribler.dispersy.distribution import FullSyncDistribution, DirectDistribution -from Tribler.dispersy.exception import MetaNotFoundException -from Tribler.dispersy.message import BatchConfiguration, Message, DropMessage, DelayMessageByProof -from Tribler.dispersy.resolution import LinearResolution, PublicResolution, DynamicResolution -from Tribler.dispersy.util import call_on_reactor_thread -from .conversion import ChannelConversion -from .message import DelayMessageReqChannelMessage -from .payload import (ChannelPayload, TorrentPayload, PlaylistPayload, CommentPayload, ModificationPayload, - PlaylistTorrentPayload, MissingChannelPayload, MarkTorrentPayload) - -logger = logging.getLogger(__name__) - - -METADATA_TYPES = [u'name', u'description', u'swift-url', u'swift-thumbnails', u'video-info', u'metadata-json'] - - -def warnIfNotDispersyThread(func): - def invoke_func(*args, **kwargs): - if not isInIOThread(): - logger.critical("This method MUST be called on the DispersyThread") - print_stack() - return None - else: - return func(*args, **kwargs) - - invoke_func.__name__ = func.__name__ - return invoke_func - - -class ChannelCommunity(Community): - - """ - Each user owns zero or more ChannelCommunities that other can join and use to discuss. - """ - - def __init__(self, *args, **kwargs): - super(ChannelCommunity, self).__init__(*args, **kwargs) - - self._channel_id = None - self._channel_name = None - self._channel_description = None - - self.tribler_session = None - self.integrate_with_tribler = None - - self._peer_db = None - self._channelcast_db = None - - def initialize(self, tribler_session=None): - self.tribler_session = tribler_session - self.integrate_with_tribler = tribler_session is not None - - super(ChannelCommunity, self).initialize() - - if self.integrate_with_tribler: - from Tribler.Core.simpledefs import NTFY_PEERS, NTFY_CHANNELCAST - - # tribler channelcast database - self._peer_db = tribler_session.open_dbhandler(NTFY_PEERS) - self._channelcast_db = tribler_session.open_dbhandler(NTFY_CHANNELCAST) - - # tribler channel_id - result = self._channelcast_db._db.fetchone( - u"SELECT id, name, description FROM Channels WHERE dispersy_cid = ? and (peer_id <> -1 or peer_id ISNULL)", - (buffer(self._master_member.mid), - )) - if result is not None: - self._channel_id, self._channel_name, self._channel_description = result - - else: - try: - message = self._get_latest_channel_message() - if message: - self._channel_id = self.cid - except (MetaNotFoundException, RuntimeError): - pass - - from Tribler.community.allchannel.community import AllChannelCommunity - for community in self.dispersy.get_communities(): - if isinstance(community, AllChannelCommunity): - self._channelcast_db = community._channelcast_db - - def initiate_meta_messages(self): - batch_delay = 3.0 - - # 30/11/11 Boudewijn: we frequently see dropped packets when joining a channel. this can be - # caused when a sync results in both torrent and modification messages. when the - # modification messages are processed first they will all cause the associated torrent - # message to be requested, when these are received they are duplicates. solution: ensure - # that the modification messages are processed after messages that they can request. normal - # priority is 128, therefore, modification_priority is one less - modification_priority = 128 - 1 - - return super(ChannelCommunity, self).initiate_meta_messages() + [ - Message(self, u"channel", - MemberAuthentication(), - LinearResolution(), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=130), - CommunityDestination(node_count=10), - ChannelPayload(), - self._disp_check_channel, - self._disp_on_channel), - Message(self, u"torrent", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=129), - CommunityDestination(node_count=10), - TorrentPayload(), - self._disp_check_torrent, - self._disp_on_torrent, - self._disp_undo_torrent, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"playlist", - MemberAuthentication(), - LinearResolution(), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - PlaylistPayload(), - self._disp_check_playlist, - self._disp_on_playlist, - self._disp_undo_playlist, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"comment", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - CommentPayload(), - self._disp_check_comment, - self._disp_on_comment, - self._disp_undo_comment, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"modification", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, - synchronization_direction=u"DESC", - priority=modification_priority), - CommunityDestination(node_count=10), - ModificationPayload(), - self._disp_check_modification, - self._disp_on_modification, - self._disp_undo_modification, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"playlist_torrent", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - PlaylistTorrentPayload(), - self._disp_check_playlist_torrent, - self._disp_on_playlist_torrent, - self._disp_undo_playlist_torrent, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"moderation", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - ModerationPayload(), - self._disp_check_moderation, - self._disp_on_moderation, - self._disp_undo_moderation, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"mark_torrent", - MemberAuthentication(), - DynamicResolution(LinearResolution(), PublicResolution()), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"DESC", priority=128), - CommunityDestination(node_count=10), - MarkTorrentPayload(), - self._disp_check_mark_torrent, - self._disp_on_mark_torrent, - self._disp_undo_mark_torrent, - batch=BatchConfiguration(max_window=batch_delay)), - Message(self, u"missing-channel", - NoAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - MissingChannelPayload(), - self._disp_check_missing_channel, - self._disp_on_missing_channel), - ] - - @property - def dispersy_sync_response_limit(self): - return 25 * 1024 - - def initiate_conversions(self): - return [DefaultConversion(self), ChannelConversion(self)] - - CHANNEL_CLOSED, CHANNEL_SEMI_OPEN, CHANNEL_OPEN, CHANNEL_MODERATOR = range(4) - CHANNEL_ALLOWED_MESSAGES = ([], - [u"comment", u"mark_torrent"], - [u"torrent", - u"comment", - u"modification", - u"playlist_torrent", - u"moderation", - u"mark_torrent"], - [u"channel", - u"torrent", - u"playlist", - u"comment", - u"modification", - u"playlist_torrent", - u"moderation", - u"mark_torrent"]) - - def get_channel_id(self): - return self._channel_id - - def get_channel_name(self): - return self._channel_name - - def get_channel_description(self): - return self._channel_description - - def get_channel_mode(self): - public = set() - permitted = set() - - for meta in self.get_meta_messages(): - if isinstance(meta.resolution, DynamicResolution): - policy, _ = self._timeline.get_resolution_policy(meta, self.global_time + 1) - else: - policy = meta.resolution - - if isinstance(policy, PublicResolution): - public.add(meta.name) - else: - allowed, _ = self._timeline.allowed(meta) - if allowed: - permitted.add(meta.name) - - def isCommunityType(state, checkPermitted=False): - for type in ChannelCommunity.CHANNEL_ALLOWED_MESSAGES[state]: - if type not in public: - if checkPermitted and type in permitted: - continue - return False - return True - - isModerator = isCommunityType(ChannelCommunity.CHANNEL_MODERATOR, True) - if isCommunityType(ChannelCommunity.CHANNEL_OPEN): - return ChannelCommunity.CHANNEL_OPEN, isModerator - - if isCommunityType(ChannelCommunity.CHANNEL_SEMI_OPEN): - return ChannelCommunity.CHANNEL_SEMI_OPEN, isModerator - - return ChannelCommunity.CHANNEL_CLOSED, isModerator - - def set_channel_mode(self, mode): - curmode, isModerator = self.get_channel_mode() - if isModerator and mode != curmode: - public_messages = ChannelCommunity.CHANNEL_ALLOWED_MESSAGES[mode] - - new_policies = [] - for meta in self.get_meta_messages(): - if isinstance(meta.resolution, DynamicResolution): - if meta.name in public_messages: - new_policies.append((meta, meta.resolution.policies[1])) - else: - new_policies.append((meta, meta.resolution.policies[0])) - - self.create_dynamic_settings(new_policies) - - def create_channel(self, name, description, store=True, update=True, forward=True): - self._disp_create_channel(name, description, store, update, forward) - - @call_on_reactor_thread - def _disp_create_channel(self, name, description, store=True, update=True, forward=True): - name = unicode(name[:255]) - description = unicode(description[:1023]) - - meta = self.get_meta_message(u"channel") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.claim_global_time(),), - payload=(name, description)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_channel(self, messages): - for message in messages: - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - continue - - yield message - - def _disp_on_channel(self, messages): - if self.integrate_with_tribler: - for message in messages: - assert self._cid == self._master_member.mid - logger.debug("%s %s", message.candidate, self._cid.encode("HEX")) - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - self._channel_id = self._channelcast_db.on_channel_from_dispersy(self._master_member.mid, - peer_id, - message.payload.name, - message.payload.description) - - self.tribler_session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, - {"name": message.payload.name, - "description": message.payload.description, - "dispersy_cid": self._cid.encode("hex")}) - - # emit signal of channel creation if the channel is created by us - if authentication_member == self._my_member: - self._channel_name = message.payload.name - self._channel_description = message.payload.description - - from Tribler.Core.simpledefs import SIGNAL_CHANNEL, SIGNAL_ON_CREATED - channel_data = {u'channel': self, - u'name': message.payload.name, - u'description': message.payload.description} - self.tribler_session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_CREATED, None, channel_data) - else: - for message in messages: - self._channel_id = self._master_member.mid - authentication_member = message.authentication.member - - self._channelcast_db.setChannelId(self._channel_id, authentication_member == self._my_member) - - def _disp_create_torrent_from_torrentdef(self, torrentdef, timestamp, store=True, update=True, forward=True): - files = torrentdef.get_files_with_length() - return (self._disp_create_torrent(torrentdef.get_infohash(), timestamp, - torrentdef.get_name_as_unicode(), tuple(files), - torrentdef.get_trackers_as_single_tuple(), store, update, forward)) - - def _disp_create_torrent(self, infohash, timestamp, name, files, trackers, store=True, update=True, forward=True): - meta = self.get_meta_message(u"torrent") - - global_time = self.claim_global_time() - current_policy, _ = self._timeline.get_resolution_policy(meta, global_time) - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(global_time,), - payload=(infohash, timestamp, name, files, trackers)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_create_torrents(self, torrentlist, store=True, update=True, forward=True): - messages = [] - - meta = self.get_meta_message(u"torrent") - current_policy, _ = self._timeline.get_resolution_policy(meta, self.global_time + 1) - for infohash, timestamp, name, files, trackers in torrentlist: - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(self.claim_global_time(),), - payload=(infohash, timestamp, name, files, trackers)) - - messages.append(message) - - self._dispersy.store_update_forward(messages, store, update, forward) - return messages - - def _disp_check_torrent(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - continue - yield message - - def _disp_on_torrent(self, messages): - if self.integrate_with_tribler: - torrentlist = [] - for message in messages: - dispersy_id = message.packet_id - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - # sha_other_peer = (sha1(str(message.candidate.sock_addr) + self.my_member.mid)) - torrentlist.append( - (self._channel_id, - dispersy_id, - peer_id, - message.payload.infohash, - message.payload.timestamp, - message.payload.name, - message.payload.files, - message.payload.trackers)) - self._logger.debug("torrent received: %s on channel: %s", hexlify(message.payload.infohash), self._master_member) - - self.tribler_session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None, - {"infohash": hexlify(message.payload.infohash), - "timestamp": message.payload.timestamp, - "name": message.payload.name, - "files": message.payload.files, - "trackers": message.payload.trackers, - "dispersy_cid": self._cid.encode("hex")}) - - self._channelcast_db.on_torrents_from_dispersy(torrentlist) - else: - for message in messages: - self._channelcast_db.newTorrent(message) - self._logger.debug("torrent received: %s on channel: %s", message.payload.infohash, self._master_member) - - def _disp_undo_torrent(self, descriptors, redo=False): - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - self._channelcast_db.on_remove_torrent_from_dispersy(self._channel_id, dispersy_id, redo) - - def remove_torrents(self, dispersy_ids): - for dispersy_id in dispersy_ids: - message = self._dispersy.load_message_by_packetid(self, dispersy_id) - if message: - if not message.undone: - self.create_undo(message) - - else: # hmm signal gui that this message has been removed already - self._disp_undo_torrent([(None, None, message)]) - - def remove_playlists(self, dispersy_ids): - for dispersy_id in dispersy_ids: - message = self._dispersy.load_message_by_packetid(self, dispersy_id) - if message: - if not message.undone: - self.create_undo(message) - - else: # hmm signal gui that this message has been removed already - self._disp_undo_playlist([(None, None, message)]) - - # create, check or receive playlists - @call_on_reactor_thread - def create_playlist(self, name, description, infohashes=[], store=True, update=True, forward=True): - message = self._disp_create_playlist(name, description) - if len(infohashes) > 0: - self._disp_create_playlist_torrents(message, infohashes, store, update, forward) - - @call_on_reactor_thread - def _disp_create_playlist(self, name, description, store=True, update=True, forward=True): - name = unicode(name[:255]) - description = unicode(description[:1023]) - - meta = self.get_meta_message(u"playlist") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.claim_global_time(),), - payload=(name, description)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_playlist(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - continue - yield message - - def _disp_on_playlist(self, messages): - if self.integrate_with_tribler: - for message in messages: - dispersy_id = message.packet_id - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - self._channelcast_db.on_playlist_from_dispersy(self._channel_id, - dispersy_id, - peer_id, - message.payload.name, - message.payload.description) - - def _disp_undo_playlist(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - self._channelcast_db.on_remove_playlist_from_dispersy(self._channel_id, dispersy_id, redo) - - # create, check or receive comments - @call_on_reactor_thread - def create_comment(self, text, timestamp, reply_to, reply_after, playlist_id, infohash, store=True, update=True, - forward=True): - reply_to_message = reply_to - reply_after_message = reply_after - playlist_message = playlist_id - - if reply_to: - reply_to_message = self._dispersy.load_message_by_packetid(self, reply_to) - if reply_after: - reply_after_message = self._dispersy.load_message_by_packetid(self, reply_after) - if playlist_id: - playlist_message = self._get_message_from_playlist_id(playlist_id) - self._disp_create_comment(text, timestamp, reply_to_message, - reply_after_message, playlist_message, - infohash, store, update, forward) - - @call_on_reactor_thread - def _disp_create_comment(self, text, timestamp, reply_to_message, reply_after_message, playlist_message, infohash, - store=True, update=True, forward=True): - reply_to_mid = None - reply_to_global_time = None - if reply_to_message: - message = reply_to_message.load_message() - reply_to_mid = message.authentication.member.mid - reply_to_global_time = message.distribution.global_time - - reply_after_mid = None - reply_after_global_time = None - if reply_after_message: - message = reply_after_message.load_message() - reply_after_mid = message.authentication.member.mid - reply_after_global_time = message.distribution.global_time - - text = unicode(text[:1023]) - - meta = self.get_meta_message(u"comment") - global_time = self.claim_global_time() - current_policy, _ = self._timeline.get_resolution_policy(meta, global_time) - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(global_time,), payload=(text, - timestamp, reply_to_mid, reply_to_global_time, - reply_after_mid, reply_after_global_time, - playlist_message, infohash)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_comment(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - continue - yield message - - def _disp_on_comment(self, messages): - if self.integrate_with_tribler: - - for message in messages: - dispersy_id = message.packet_id - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - mid_global_time = pack('!20sQ', message.authentication.member.mid, message.distribution.global_time) - - reply_to_id = None - if message.payload.reply_to_mid: - try: - reply_to_id = self._get_packet_id( - message.payload.reply_to_global_time, - message.payload.reply_to_mid) - except: - reply_to_id = pack('!20sQ', message.payload.reply_to_mid, message.payload.reply_to_global_time) - - reply_after_id = None - if message.payload.reply_after_mid: - try: - reply_after_id = self._get_packet_id( - message.payload.reply_after_global_time, - message.payload.reply_after_mid) - except: - reply_after_id = pack( - '!20sQ', - message.payload.reply_after_mid, - message.payload.reply_after_global_time) - - playlist_dispersy_id = None - if message.payload.playlist_packet: - playlist_dispersy_id = message.payload.playlist_packet.packet_id - - self._channelcast_db.on_comment_from_dispersy(self._channel_id, - dispersy_id, - mid_global_time, - peer_id, - message.payload.text, - message.payload.timestamp, - reply_to_id, - reply_after_id, - playlist_dispersy_id, - message.payload.infohash) - - def _disp_undo_comment(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - - message = packet.load_message() - infohash = message.payload.infohash - self._channelcast_db.on_remove_comment_from_dispersy(self._channel_id, dispersy_id, infohash, redo) - - def remove_comment(self, dispersy_id): - message = self._dispersy.load_message_by_packetid(self, dispersy_id) - if message: - self.create_undo(message) - - # modify channel, playlist or torrent - @call_on_reactor_thread - def modifyChannel(self, modifications, store=True, update=True, forward=True): - latest_modifications = {} - for type, value in modifications.iteritems(): - type = unicode(type) - latest_modifications[type] = self._get_latest_modification_from_channel_id(type) - modification_on_message = self._get_latest_channel_message() - - for type, value in modifications.iteritems(): - type = unicode(type) - timestamp = long(time()) - self._disp_create_modification(type, value, timestamp, - modification_on_message, - latest_modifications[type], store, - update, forward) - - @call_on_reactor_thread - def modifyPlaylist(self, playlist_id, modifications, store=True, update=True, forward=True): - latest_modifications = {} - for type, value in modifications.iteritems(): - type = unicode(type) - latest_modifications[type] = self._get_latest_modification_from_playlist_id(playlist_id, type) - - modification_on_message = self._get_message_from_playlist_id(playlist_id) - for type, value in modifications.iteritems(): - type = unicode(type) - timestamp = long(time()) - self._disp_create_modification(type, value, timestamp, - modification_on_message, - latest_modifications[type], store, - update, forward) - - @call_on_reactor_thread - def modifyTorrent(self, channeltorrent_id, modifications, store=True, update=True, forward=True): - latest_modifications = {} - for type, value in modifications.iteritems(): - type = unicode(type) - try: - latest_modifications[type] = self._get_latest_modification_from_torrent_id(channeltorrent_id, type) - except: - logger.error(exc_info=True) - - modification_on_message = self._get_message_from_torrent_id(channeltorrent_id) - for type, value in modifications.iteritems(): - timestamp = long(time()) - self._disp_create_modification(type, value, timestamp, - modification_on_message, - latest_modifications[type], store, - update, forward) - - def _disp_create_modification(self, modification_type, modifcation_value, timestamp, modification_on, - latest_modification, store=True, update=True, forward=True): - modification_type = unicode(modification_type) - modifcation_value = unicode(modifcation_value[:1023]) - - latest_modification_mid = None - latest_modification_global_time = None - if latest_modification: - message = latest_modification.load_message() - latest_modification_mid = message.authentication.member.mid - latest_modification_global_time = message.distribution.global_time - - meta = self.get_meta_message(u"modification") - global_time = self.claim_global_time() - current_policy, _ = self._timeline.get_resolution_policy(meta, global_time) - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(global_time,), - payload=(modification_type, modifcation_value, - timestamp, modification_on, latest_modification, - latest_modification_mid, - latest_modification_global_time)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_modification(self, messages): - th_handler = self.tribler_session.lm.rtorrent_handler - - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - continue - - if message.payload.modification_on.name == u"torrent" and message.payload.modification_type == u"metadata-json": - try: - data = json.loads(message.payload.modification_value) - thumbnail_hash = data[u'thumb_hash'].decode('hex') - except ValueError: - yield DropMessage(message, "Not compatible json format") - continue - else: - modifying_dispersy_id = message.payload.modification_on.packet_id - torrent_id = self._channelcast_db._db.fetchone( - u"SELECT torrent_id FROM _ChannelTorrents WHERE dispersy_id = ?", - (modifying_dispersy_id,)) - infohash = self._channelcast_db._db.fetchone( - u"SELECT infohash FROM Torrent WHERE torrent_id = ?", (torrent_id,)) - if infohash: - infohash = str2bin(infohash) - logger.debug( - "Incoming metadata-json with infohash %s from %s", - infohash.encode("HEX"), - message.candidate.sock_addr[0]) - - if not th_handler.has_metadata(thumbnail_hash): - @call_on_reactor_thread - def callback(_, message=message): - self.on_messages([message]) - logger.debug( - "Will try to download metadata-json thumbnail with infohash %s from %s", - infohash.encode("HEX"), - message.candidate.sock_addr[0]) - th_handler.download_metadata(message.candidate, thumbnail_hash, usercallback=callback, - timeout=CANDIDATE_WALK_LIFETIME) - continue - - yield message - - def _disp_on_modification(self, messages): - if self.integrate_with_tribler: - channeltorrentDict = {} - playlistDict = {} - - for message in messages: - dispersy_id = message.packet_id - message_name = message.payload.modification_on.name - mid_global_time = "%s@%d" % (message.authentication.member.mid, message.distribution.global_time) - - modifying_dispersy_id = message.payload.modification_on.packet_id - modification_type = unicode(message.payload.modification_type) - modification_value = message.payload.modification_value - timestamp = message.payload.timestamp - - if message.payload.prev_modification_packet: - prev_modification_id = message.payload.prev_modification_packet.packet_id - else: - prev_modification_id = message.payload.prev_modification_id - prev_modification_global_time = message.payload.prev_modification_global_time - - # load local ids from database - if message_name == u"torrent": - channeltorrent_id = self._get_torrent_id_from_message(modifying_dispersy_id) - if not channeltorrent_id: - self._logger.info("CANNOT FIND channeltorrent_id %s", modifying_dispersy_id) - channeltorrentDict[modifying_dispersy_id] = channeltorrent_id - - elif message_name == u"playlist": - playlist_id = self._get_playlist_id_from_message(modifying_dispersy_id) - playlistDict[modifying_dispersy_id] = playlist_id - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - # always store metadata - self._channelcast_db.on_metadata_from_dispersy(message_name, - channeltorrentDict.get(modifying_dispersy_id, None), - playlistDict.get(modifying_dispersy_id, None), - self._channel_id, - dispersy_id, - peer_id, - mid_global_time, - modification_type, - modification_value, - timestamp, - prev_modification_id, - prev_modification_global_time) - - for message in messages: - dispersy_id = message.packet_id - message_name = message.payload.modification_on.name - - modifying_dispersy_id = message.payload.modification_on.packet_id - modification_type = unicode(message.payload.modification_type) - modification_value = message.payload.modification_value - - # see if this is new information, if so call on_X_from_dispersy to update local 'cached' information - if message_name == u"torrent": - channeltorrent_id = channeltorrentDict[modifying_dispersy_id] - - if channeltorrent_id: - latest = self._get_latest_modification_from_torrent_id(channeltorrent_id, modification_type) - if not latest or latest.packet_id == dispersy_id: - self._channelcast_db.on_torrent_modification_from_dispersy( - channeltorrent_id, modification_type, modification_value) - - elif message_name == u"playlist": - playlist_id = playlistDict[modifying_dispersy_id] - - latest = self._get_latest_modification_from_playlist_id(playlist_id, modification_type) - if not latest or latest.packet_id == dispersy_id: - self._channelcast_db.on_playlist_modification_from_dispersy( - playlist_id, modification_type, modification_value) - - elif message_name == u"channel": - latest = self._get_latest_modification_from_channel_id(modification_type) - if not latest or latest.packet_id == dispersy_id: - self._channelcast_db.on_channel_modification_from_dispersy( - self._channel_id, modification_type, modification_value) - - def _disp_undo_modification(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - - message = packet.load_message() - message_name = message.name - modifying_dispersy_id = message.payload.modification_on.packet_id - modification_type = unicode(message.payload.modification_type) - - # load local ids from database - playlist_id = channeltorrent_id = None - if message_name == u"torrent": - channeltorrent_id = self._get_torrent_id_from_message(modifying_dispersy_id) - - elif message_name == u"playlist": - playlist_id = self._get_playlist_id_from_message(modifying_dispersy_id) - self._channelcast_db.on_remove_metadata_from_dispersy(self._channel_id, dispersy_id, redo) - - if message_name == u"torrent": - latest = self._get_latest_modification_from_torrent_id(channeltorrent_id, modification_type) - - if not latest or latest.packet_id == dispersy_id: - modification_value = latest.payload.modification_value if latest else '' - self._channelcast_db.on_torrent_modification_from_dispersy( - channeltorrent_id, modification_type, modification_value) - - elif message_name == u"playlist": - latest = self._get_latest_modification_from_playlist_id(playlist_id, modification_type) - - if not latest or latest.packet_id == dispersy_id: - modification_value = latest.payload.modification_value if latest else '' - self._channelcast_db.on_playlist_modification_from_dispersy( - playlist_id, modification_type, modification_value) - - elif message_name == u"channel": - latest = self._get_latest_modification_from_channel_id(modification_type) - - if not latest or latest.packet_id == dispersy_id: - modification_value = latest.payload.modification_value if latest else '' - self._channelcast_db.on_channel_modification_from_dispersy( - self._channel_id, modification_type, modification_value) - - # create, check or receive playlist_torrent messages - @call_on_reactor_thread - def create_playlist_torrents(self, playlist_id, infohashes, store=True, update=True, forward=True): - playlist_packet = self._get_message_from_playlist_id(playlist_id) - self._disp_create_playlist_torrents(playlist_packet, infohashes, store, update, forward) - - def remove_playlist_torrents(self, playlist_id, dispersy_ids): - for dispersy_id in dispersy_ids: - message = self._dispersy.load_message_by_packetid(self, dispersy_id) - if message: - if not message.undone: - self.create_undo(message) - else: - self._disp_undo_playlist_torrent([(None, None, message)]) - - @call_on_reactor_thread - def _disp_create_playlist_torrents(self, playlist_packet, infohashes, store=True, update=True, forward=True): - meta = self.get_meta_message(u"playlist_torrent") - current_policy, _ = self._timeline.get_resolution_policy(meta, self.global_time + 1) - - messages = [] - for infohash in infohashes: - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(self.claim_global_time(),), - payload=(infohash, playlist_packet)) - messages.append(message) - - self._dispersy.store_update_forward(messages, store, update, forward) - return message - - def _disp_check_playlist_torrent(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - yield message - - def _disp_on_playlist_torrent(self, messages): - if self.integrate_with_tribler: - for message in messages: - dispersy_id = message.packet_id - playlist_dispersy_id = message.payload.playlist.packet_id - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - self._channelcast_db.on_playlist_torrent(dispersy_id, - playlist_dispersy_id, - peer_id, - message.payload.infohash) - - def _disp_undo_playlist_torrent(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - message = packet.load_message() - infohash = message.payload.infohash - playlist_dispersy_id = message.payload.playlist.packet_id - - self._channelcast_db.on_remove_playlist_torrent(self._channel_id, playlist_dispersy_id, infohash, redo) - - # check or receive moderation messages - @call_on_reactor_thread - def _disp_create_moderation(self, text, timestamp, severity, cause, store=True, update=True, forward=True): - causemessage = self._dispersy.load_message_by_packetid(self, cause) - if causemessage: - text = unicode(text[:1023]) - - meta = self.get_meta_message(u"moderation") - global_time = self.claim_global_time() - current_policy, _ = self._timeline.get_resolution_policy(meta, global_time) - - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(global_time,), - payload=(text, timestamp, severity, causemessage)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_moderation(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - - yield message - - def _disp_on_moderation(self, messages): - if self.integrate_with_tribler: - for message in messages: - dispersy_id = message.packet_id - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - # if cause packet is present, it is enforced by conversion - cause = message.payload.causepacket.packet_id - cause_message = message.payload.causepacket.load_message() - authentication_member = cause_message.authentication.member - if authentication_member == self._my_member: - by_peer_id = None - else: - by_peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - - # determine if we are reverting latest - updateTorrent = False - - modifying_dispersy_id = cause_message.payload.modification_on.packet_id - channeltorrent_id = self._get_torrent_id_from_message(modifying_dispersy_id) - if channeltorrent_id: - modification_type = unicode(cause_message.payload.modification_type) - - latest = self._get_latest_modification_from_torrent_id(channeltorrent_id, modification_type) - if not latest or latest.packet_id == cause_message.packet_id: - updateTorrent = True - - self._channelcast_db.on_moderation(self._channel_id, - dispersy_id, peer_id, - by_peer_id, cause, - message.payload.text, - message.payload.timestamp, - message.payload.severity) - - if updateTorrent: - latest = self._get_latest_modification_from_torrent_id(channeltorrent_id, modification_type) - - modification_value = latest.payload.modification_value if latest else '' - self._channelcast_db.on_torrent_modification_from_dispersy( - channeltorrent_id, modification_type, modification_value) - - def _disp_undo_moderation(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - self._channelcast_db.on_remove_moderation(self._channel_id, dispersy_id, redo) - - # check or receive torrent_mark messages - @call_on_reactor_thread - def _disp_create_mark_torrent(self, infohash, type, timestamp, store=True, update=True, forward=True): - meta = self.get_meta_message(u"mark_torrent") - global_time = self.claim_global_time() - current_policy, _ = self._timeline.get_resolution_policy(meta, global_time) - - message = meta.impl(authentication=(self._my_member,), - resolution=(current_policy.implement(),), - distribution=(global_time,), - payload=(infohash, type, timestamp)) - self._dispersy.store_update_forward([message], store, update, forward) - return message - - def _disp_check_mark_torrent(self, messages): - for message in messages: - if not self._channel_id: - yield DelayMessageReqChannelMessage(message) - continue - - accepted, proof = self._timeline.check(message) - if not accepted: - yield DelayMessageByProof(message) - yield message - - def _disp_on_mark_torrent(self, messages): - if self.integrate_with_tribler: - for message in messages: - dispersy_id = message.packet_id - global_time = message.distribution.global_time - - authentication_member = message.authentication.member - if authentication_member == self._my_member: - peer_id = None - else: - peer_id = self._peer_db.addOrGetPeerID(authentication_member.public_key) - self._channelcast_db.on_mark_torrent( - self._channel_id, - dispersy_id, - global_time, - peer_id, - message.payload.infohash, - message.payload.type, - message.payload.timestamp) - - def _disp_undo_mark_torrent(self, descriptors, redo=False): - if self.integrate_with_tribler: - for _, _, packet in descriptors: - dispersy_id = packet.packet_id - self._channelcast_db.on_remove_mark_torrent(self._channel_id, dispersy_id, redo) - - def disp_create_missing_channel(self, candidate, includeSnapshot): - logger.debug("%s sending missing-channel %s %s", candidate, self._cid.encode("HEX"), includeSnapshot) - meta = self._meta_messages[u"missing-channel"] - request = meta.impl(distribution=(self.global_time,), destination=(candidate,), payload=(includeSnapshot,)) - self._dispersy._forward([request]) - - # check or receive missing channel messages - def _disp_check_missing_channel(self, messages): - return messages - - def _disp_on_missing_channel(self, messages): - channelmessage = self._get_latest_channel_message() - packets = None - - for message in messages: - if message.payload.includeSnapshot: - if packets is None: - packets = [] - packets.append(channelmessage.packet) - - torrents = self._channelcast_db.getRandomTorrents(self._channel_id) - for infohash in torrents: - tormessage = self._get_message_from_torrent_infohash(infohash) - if tormessage: - packets.append(tormessage.packet) - - self._dispersy._send_packets([message.candidate], packets, - self, "-caused by missing-channel-response-snapshot-") - - else: - self._dispersy._send_packets([message.candidate], [channelmessage.packet], - self, "-caused by missing-channel-response-") - - def on_dynamic_settings(self, *args, **kwargs): - Community.on_dynamic_settings(self, *args, **kwargs) - if self._channel_id and self.integrate_with_tribler: - self._channelcast_db.on_dynamic_settings(self._channel_id) - - # helper functions - @warnIfNotDispersyThread - def _get_latest_channel_message(self): - channel_meta = self.get_meta_message(u"channel") - - # 1. get the packet - try: - packet, packet_id = self._dispersy.database.execute( - u"SELECT packet, id FROM sync WHERE meta_message = ? ORDER BY global_time DESC LIMIT 1", - (channel_meta.database_id,)).next() - except StopIteration: - raise RuntimeError("Could not find requested packet") - - message = self._dispersy.convert_packet_to_message(str(packet)) - if message: - assert message.name == u"channel", "Expecting a 'channel' message" - message.packet_id = packet_id - else: - raise RuntimeError("Unable to convert packet, could not find channel-message for channel %d" % - channel_meta.database_id) - - return message - - def _get_message_from_playlist_id(self, playlist_id): - assert isinstance(playlist_id, (int, long)) - - # 1. get the dispersy identifier from the channel_id - dispersy_id, _ = self._channelcast_db.getPlaylist(playlist_id, ('Playlists.dispersy_id',)) - - # 2. get the message - if dispersy_id and dispersy_id > 0: - return self._dispersy.load_message_by_packetid(self, dispersy_id) - - def _get_playlist_id_from_message(self, dispersy_id): - assert isinstance(dispersy_id, (int, long)) - return self._channelcast_db._db.fetchone(u"SELECT id FROM _Playlists WHERE dispersy_id = ?", (dispersy_id,)) - - def _get_message_from_torrent_id(self, torrent_id): - assert isinstance(torrent_id, (int, long)) - - # 1. get the dispersy identifier from the channel_id - dispersy_id = self._channelcast_db.getTorrentFromChannelTorrentId(torrent_id, ['ChannelTorrents.dispersy_id']) - - # 2. get the message - if dispersy_id and dispersy_id > 0: - return self._dispersy.load_message_by_packetid(self, dispersy_id) - - def _get_message_from_torrent_infohash(self, torrent_infohash): - assert isinstance(torrent_infohash, str), 'infohash is a %s' % type(torrent_infohash) - assert len(torrent_infohash) == 20, 'infohash has length %d' % len(torrent_infohash) - - # 1. get the dispersy identifier from the channel_id - dispersy_id = self._channelcast_db.getTorrentFromChannelId(self._channel_id, - torrent_infohash, - ['ChannelTorrents.dispersy_id']) - - if dispersy_id and dispersy_id > 0: - # 2. get the message - return self._dispersy.load_message_by_packetid(self, dispersy_id) - - def _get_torrent_id_from_message(self, dispersy_id): - assert isinstance(dispersy_id, (int, long)), "dispersy_id type is '%s'" % type(dispersy_id) - - return self._channelcast_db._db.fetchone(u"SELECT id FROM _ChannelTorrents WHERE dispersy_id = ?", (dispersy_id,)) - - def _get_latest_modification_from_channel_id(self, type_name): - assert isinstance(type_name, basestring), "type_name is not a basestring: %s" % repr(type_name) - - # 1. get the dispersy identifier from the channel_id - dispersy_ids = self._channelcast_db._db.fetchall( - u"SELECT dispersy_id, prev_global_time " + \ - u"FROM ChannelMetaData WHERE type = ? " + \ - u"AND channel_id = ? " + \ - u"AND id NOT IN (SELECT metadata_id FROM MetaDataTorrent) " + \ - u"AND id NOT IN (SELECT metadata_id FROM MetaDataPlaylist) " + \ - u"AND dispersy_id not in (SELECT cause FROM Moderations " + \ - u"WHERE channel_id = ?) ORDER BY prev_global_time DESC", - (type_name, self._channel_id, self._channel_id)) - return self._determine_latest_modification(dispersy_ids) - - def _get_latest_modification_from_torrent_id(self, channeltorrent_id, type_name): - assert isinstance(channeltorrent_id, (int, long)), "channeltorrent_id type is '%s'" % type(channeltorrent_id) - assert isinstance(type_name, basestring), "type_name is not a basestring: %s" % repr(type_name) - - # 1. get the dispersy identifier from the channel_id - dispersy_ids = self._channelcast_db._db.fetchall(u"SELECT dispersy_id, prev_global_time " + \ - u"FROM ChannelMetaData, MetaDataTorrent " + \ - u"WHERE ChannelMetaData.id = MetaDataTorrent.metadata_id " + \ - u"AND type = ? AND channeltorrent_id = ? " + \ - u"AND dispersy_id not in " + \ - u"(SELECT cause FROM Moderations WHERE channel_id = ?) " + \ - u"ORDER BY prev_global_time DESC", - (type_name, channeltorrent_id, self._channel_id)) - return self._determine_latest_modification(dispersy_ids) - - def _get_latest_modification_from_playlist_id(self, playlist_id, type_name): - assert isinstance(playlist_id, (int, long)), "playlist_id type is '%s'" % type(playlist_id) - assert isinstance(type_name, basestring), "type_name is not a basestring: %s" % repr(type_name) - - # 1. get the dispersy identifier from the channel_id - dispersy_ids = self._channelcast_db._db.fetchall(u"SELECT dispersy_id, prev_global_time " + \ - u"FROM ChannelMetaData, MetaDataPlaylist " + \ - u"WHERE ChannelMetaData.id = MetaDataPlaylist.metadata_id " + \ - u"AND type = ? AND playlist_id = ? " + \ - u"AND dispersy_id not in " + \ - u"(SELECT cause FROM Moderations WHERE channel_id = ?) " + \ - u"ORDER BY prev_global_time DESC", - (type_name, playlist_id, self._channel_id)) - return self._determine_latest_modification(dispersy_ids) - - @warnIfNotDispersyThread - def _determine_latest_modification(self, list): - - if len(list) > 0: - # 1. determine if we have a conflict - max_global_time = list[0][1] - conflicting_messages = [] - for dispersy_id, prev_global_time in list: - if prev_global_time >= max_global_time: - try: - message = self._dispersy.load_message_by_packetid(self, dispersy_id) - if message: - message = message.load_message() - conflicting_messages.append(message) - - max_global_time = prev_global_time - except RuntimeError: - pass - else: - break - - # 2. see if we have a conflict - if len(conflicting_messages) > 1: - - # 3. solve conflict using mid to sort on - def cleverSort(message_a, message_b): - public_key_a = message_a.authentication.member.public_key - public_key_b = message_a.authentication.member.public_key - - if public_key_a == public_key_b: - return cmp(message_b.distribution.global_time, message_a.distribution.global_time) - - return cmp(public_key_a, public_key_b) - - conflicting_messages.sort(cleverSort) - - if len(conflicting_messages) > 0: - # 4. return first message - return conflicting_messages[0] - - @warnIfNotDispersyThread - def _get_packet_id(self, global_time, mid): - if global_time and mid: - try: - packet_id, = self._dispersy.database.execute(u""" - SELECT sync.id - FROM sync - JOIN member ON (member.id = sync.member) - JOIN meta_message ON (meta_message.id = sync.meta_message) - WHERE sync.community = ? AND sync.global_time = ? AND member.mid = ?""", - (self.database_id, global_time, buffer(mid))).next() - except StopIteration: - pass - return packet_id diff --git a/Tribler/community/channel/conversion.py b/Tribler/community/channel/conversion.py deleted file mode 100644 index 7ddd6674f00..00000000000 --- a/Tribler/community/channel/conversion.py +++ /dev/null @@ -1,448 +0,0 @@ -import zlib -from random import sample -from struct import pack, unpack_from - -from Tribler.Core.Utilities.tracker_utils import get_uniformed_tracker_url -from Tribler.dispersy.conversion import BinaryConversion -from Tribler.dispersy.message import DropPacket, Packet, DelayPacketByMissingMessage, DelayPacketByMissingMember -from Tribler.pyipv8.ipv8.messaging.deprecated.encoding import encode, decode - -DEBUG = False - - -class ChannelConversion(BinaryConversion): - - def __init__(self, community): - super(ChannelConversion, self).__init__(community, "\x01") - self.define_meta_message(chr(1), community.get_meta_message(u"channel"), - self._encode_channel, - self._decode_channel) - self.define_meta_message(chr(2), community.get_meta_message(u"torrent"), - self._encode_torrent, - self._decode_torrent) - self.define_meta_message(chr(3), community.get_meta_message(u"playlist"), - self._encode_playlist, - self._decode_playlist) - self.define_meta_message(chr(4), community.get_meta_message(u"comment"), - self._encode_comment, - self._decode_comment) - self.define_meta_message(chr(5), - community.get_meta_message(u"modification"), - self._encode_modification, - self._decode_modification) - self.define_meta_message(chr(6), - community.get_meta_message(u"playlist_torrent"), - self._encode_playlist_torrent, - self._decode_playlist_torrent) - self.define_meta_message(chr(7), - community.get_meta_message(u"missing-channel"), - self._encode_missing_channel, - self._decode_missing_channel) - self.define_meta_message(chr(8), - community.get_meta_message(u"moderation"), - self._encode_moderation, - self._decode_moderation) - self.define_meta_message(chr(9), community.get_meta_message(u"mark_torrent"), - self._encode_mark_torrent, - self._decode_mark_torrent) - - def _encode_channel(self, message): - return encode((message.payload.name, message.payload.description)), - - def _decode_channel(self, placeholder, offset, data): - try: - offset, values = decode(data, offset) - if len(values) != 2: - raise ValueError - except ValueError: - raise DropPacket("Unable to decode the channel-payload") - - name = values[0] - if not (isinstance(name, unicode) and len(name) < 256): - raise DropPacket("Invalid 'name' type or value") - - description = values[1] - if not (isinstance(description, unicode) and len(description) < 1024): - raise DropPacket("Invalid 'description' type or value") - - return offset, placeholder.meta.payload.implement(name, description) - - def _encode_playlist(self, message): - return self._encode_channel(message) - - def _decode_playlist(self, placeholder, offset, data): - return self._decode_channel(placeholder, offset, data) - - def _encode_torrent(self, message): - files = message.payload.files - trackers = list(message.payload.trackers) - name = message.payload.name - - # Filter out invalid trackers - for tracker in trackers: - if not get_uniformed_tracker_url(tracker) or len(tracker) > 200: - trackers.remove(tracker) - - # files is a tuple of tuples (actually a list in tuple form) - max_len = self._community.dispersy_sync_bloom_filter_bits / 8 - base_len = 20 + 8 + len(name) # infohash, timestamp, name - tracker_len = sum([len(tracker) for tracker in trackers]) - file_len = sum([len(f[0]) + 8 for f in files]) # file name, length - # Check if the message fits in the bloomfilter - if (base_len + tracker_len + file_len > max_len) and (len(trackers) > 10): - # only use first 10 trackers, .torrents in the wild have been seen to have 1000+ trackers... - trackers = trackers[:10] - tracker_len = sum([len(tracker) for tracker in trackers]) - if base_len + tracker_len + file_len > max_len: - # reduce files by the amount we are currently to big - reduce_by = max_len / (base_len + tracker_len + file_len * 1.0) - nr_files_to_include = int(len(files) * reduce_by) - files = sample(files, nr_files_to_include) - - normal_msg = (pack('!20sQ', message.payload.infohash, message.payload.timestamp), message.payload.name, - tuple(files), tuple(trackers)) - - return zlib.compress(encode(normal_msg)), - - def _decode_torrent(self, placeholder, offset, data): - try: - uncompressed_data = zlib.decompress(data[offset:]) - except zlib.error: - raise DropPacket("Invalid zlib data") - offset = len(data) - - try: - _, values = decode(uncompressed_data) - except ValueError: - raise DropPacket("Unable to decode the torrent-payload") - - infohash_time, name, files, trackers = values - if len(infohash_time) != 28: - raise DropPacket("Unable to decode the torrent-payload, got %d bytes expected 28" % (len(infohash_time))) - infohash, timestamp = unpack_from('!20sQ', infohash_time) - - if not isinstance(name, unicode): - raise DropPacket("Invalid 'name' type") - - if not isinstance(files, tuple): - raise DropPacket("Invalid 'files' type") - - if len(files) == 0: - raise DropPacket("Should have at least one file") - - for file in files: - if len(file) != 2: - raise DropPacket("Invalid 'file_len' type") - - path, length = file - if not isinstance(path, unicode): - raise DropPacket("Invalid 'files_path' type is %s" % type(path)) - if not isinstance(length, (int, long)): - raise DropPacket("Invalid 'files_length' type is %s" % type(length)) - - if not isinstance(trackers, tuple): - raise DropPacket("Invalid 'trackers' type") - for tracker in trackers: - if not isinstance(tracker, str): - raise DropPacket("Invalid 'tracker' type") - - return offset, placeholder.meta.payload.implement(infohash, timestamp, name, files, trackers) - - def _encode_comment(self, message): - dict = {"text": message.payload.text, - "timestamp": message.payload.timestamp} - - playlist_packet = message.payload.playlist_packet - infohash = message.payload.infohash - - if message.payload.reply_to_mid: - dict["reply-to-mid"] = message.payload.reply_to_mid - dict["reply-to-global-time"] = message.payload.reply_to_global_time - - if message.payload.reply_after_mid: - dict["reply-after-mid"] = message.payload.reply_after_mid - dict["reply-after-global-time"] = message.payload.reply_after_global_time - - if playlist_packet: - message = playlist_packet.load_message() - dict["playlist-mid"] = message.authentication.member.mid - dict["playlist-global-time"] = message.distribution.global_time - - if infohash: - dict['infohash'] = infohash - return encode(dict), - - def _decode_comment(self, placeholder, offset, data): - try: - offset, dic = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the payload") - - if not "text" in dic: - raise DropPacket("Missing 'text'") - text = dic["text"] - if not (isinstance(text, unicode) and len(text) < 1024): - raise DropPacket("Invalid 'text' type or value") - - if not "timestamp" in dic: - raise DropPacket("Missing 'timestamp'") - timestamp = dic["timestamp"] - if not isinstance(timestamp, (int, long)): - raise DropPacket("Invalid 'timestamp' type or value") - - reply_to_mid = dic.get("reply-to-mid", None) - if reply_to_mid and not (isinstance(reply_to_mid, str) and len(reply_to_mid) == 20): - raise DropPacket("Invalid 'reply-to-mid' type or value") - - reply_to_global_time = dic.get("reply-to-global-time", None) - if reply_to_global_time and not isinstance(reply_to_global_time, (int, long)): - raise DropPacket("Invalid 'reply-to-global-time' type") - - reply_after_mid = dic.get("reply-after-mid", None) - if reply_after_mid and not (isinstance(reply_after_mid, str) and len(reply_after_mid) == 20): - raise DropPacket("Invalid 'reply-after-mid' type or value") - - reply_after_global_time = dic.get("reply-after-global-time", None) - if reply_after_global_time and not isinstance(reply_after_global_time, (int, long)): - raise DropPacket("Invalid 'reply-after-global-time' type") - - playlist_mid = dic.get("playlist-mid", None) - if playlist_mid and not (isinstance(playlist_mid, str) and len(playlist_mid) == 20): - raise DropPacket("Invalid 'playlist-mid' type or value") - - playlist_global_time = dic.get("playlist-global-time", None) - if playlist_global_time and not isinstance(playlist_global_time, (int, long)): - raise DropPacket("Invalid 'playlist-global-time' type") - - if playlist_mid and playlist_global_time: - try: - packet_id, packet, message_name = self._get_message(playlist_global_time, playlist_mid) - playlist = Packet(self._community.get_meta_message(message_name), packet, packet_id) - except DropPacket: - member = self._community.get_member(mid=playlist_mid) - if not member: - raise DelayPacketByMissingMember(self._community, playlist_mid) - raise DelayPacketByMissingMessage(self._community, member, playlist_global_time) - else: - playlist = None - - infohash = dic.get("infohash", None) - if infohash and not (isinstance(infohash, str) and len(infohash) == 20): - raise DropPacket("Invalid 'infohash' type or value") - return offset, placeholder.meta.payload.implement(text, timestamp, reply_to_mid, reply_to_global_time, reply_after_mid, reply_after_global_time, playlist, infohash) - - def _encode_moderation(self, message): - dict = {"text": message.payload.text, - "timestamp": message.payload.timestamp, - "severity": message.payload.severity} - - dict["cause-mid"] = message.payload.cause_mid - dict["cause-global-time"] = message.payload.cause_global_time - return encode(dict), - - def _decode_moderation(self, placeholder, offset, data): - try: - offset, dic = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the payload") - - if not "text" in dic: - raise DropPacket("Missing 'text'") - text = dic["text"] - if not (isinstance(text, unicode) and len(text) < 1024): - raise DropPacket("Invalid 'text' type or value") - - if not "timestamp" in dic: - raise DropPacket("Missing 'timestamp'") - timestamp = dic["timestamp"] - if not isinstance(timestamp, (int, long)): - raise DropPacket("Invalid 'timestamp' type or value") - - if not "severity" in dic: - raise DropPacket("Missing 'severity'") - severity = dic["severity"] - if not isinstance(severity, (int, long)): - raise DropPacket("Invalid 'severity' type or value") - - cause_mid = dic.get("cause-mid", None) - if not (isinstance(cause_mid, str) and len(cause_mid) == 20): - raise DropPacket("Invalid 'cause-mid' type or value") - - cause_global_time = dic.get("cause-global-time", None) - if not isinstance(cause_global_time, (int, long)): - raise DropPacket("Invalid 'cause-global-time' type") - - try: - packet_id, packet, message_name = self._get_message(cause_global_time, cause_mid) - cause_packet = Packet(self._community.get_meta_message(message_name), packet, packet_id) - - except DropPacket: - member = self._community.get_member(mid=cause_mid) - if not member: - raise DelayPacketByMissingMember(self._community, cause_mid) - raise DelayPacketByMissingMessage(self._community, member, cause_global_time) - - return offset, placeholder.meta.payload.implement(text, timestamp, severity, cause_packet) - - def _encode_mark_torrent(self, message): - dict = {"infohash": message.payload.infohash, - "timestamp": message.payload.timestamp, - "type": message.payload.type} - - return encode(dict), - - def _decode_mark_torrent(self, placeholder, offset, data): - try: - offset, dic = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the payload") - - if not "infohash" in dic: - raise DropPacket("Missing 'infohash'") - infohash = dic["infohash"] - if not (isinstance(infohash, str) and len(infohash) == 20): - raise DropPacket("Invalid 'infohash' type or value") - - if not "timestamp" in dic: - raise DropPacket("Missing 'timestamp'") - timestamp = dic["timestamp"] - if not isinstance(timestamp, (int, long)): - raise DropPacket("Invalid 'timestamp' type or value") - - if not "type" in dic: - raise DropPacket("Missing 'type'") - type = dic["type"] - if not (isinstance(type, unicode) and len(type) < 25): - raise DropPacket("Invalid 'type' type or value") - - return offset, placeholder.meta.payload.implement(infohash, type, timestamp) - - def _encode_modification(self, message): - modification_on = message.payload.modification_on.load_message() - dict = {"modification-type": message.payload.modification_type, - "modification-value": message.payload.modification_value, - "timestamp": message.payload.timestamp, - "modification-on-mid": modification_on.authentication.member.mid, - "modification-on-global-time": modification_on.distribution.global_time} - - prev_modification = message.payload.prev_modification_packet - if prev_modification: - message = prev_modification.load_message() - dict["prev-modification-mid"] = message.authentication.member.mid - dict["prev-modification-global-time"] = message.distribution.global_time - - return encode(dict), - - def _decode_modification(self, placeholder, offset, data): - try: - offset, dic = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the payload") - - if not "modification-type" in dic: - raise DropPacket("Missing 'modification-type'") - modification_type = dic["modification-type"] - if not isinstance(modification_type, unicode): - raise DropPacket("Invalid 'modification_type' type") - - if not "modification-value" in dic: - raise DropPacket("Missing 'modification-value'") - modification_value = dic["modification-value"] - if not (isinstance(modification_value, unicode) and len(modification_value) < 1024): - raise DropPacket("Invalid 'modification_value' type or value") - - if not "timestamp" in dic: - raise DropPacket("Missing 'timestamp'") - timestamp = dic["timestamp"] - if not isinstance(timestamp, (int, long)): - raise DropPacket("Invalid 'timestamp' type or value") - - if not "modification-on-mid" in dic: - raise DropPacket("Missing 'modification-on-mid'") - modification_on_mid = dic["modification-on-mid"] - if not (isinstance(modification_on_mid, str) and len(modification_on_mid) == 20): - raise DropPacket("Invalid 'modification-on-mid' type or value") - - if not "modification-on-global-time" in dic: - raise DropPacket("Missing 'modification-on-global-time'") - modification_on_global_time = dic["modification-on-global-time"] - if not isinstance(modification_on_global_time, (int, long)): - raise DropPacket("Invalid 'modification-on-global-time' type") - - try: - packet_id, packet, message_name = self._get_message(modification_on_global_time, modification_on_mid) - modification_on = Packet(self._community.get_meta_message(message_name), packet, packet_id) - except DropPacket: - member = self._community.get_member(mid=modification_on_mid) - if not member: - raise DelayPacketByMissingMember(self._community, modification_on_mid) - raise DelayPacketByMissingMessage(self._community, member, modification_on_global_time) - - prev_modification_mid = dic.get("prev-modification-mid", None) - if prev_modification_mid and not (isinstance(prev_modification_mid, str) and len(prev_modification_mid) == 20): - raise DropPacket("Invalid 'prev-modification-mid' type or value") - - prev_modification_global_time = dic.get("prev-modification-global-time", None) - if prev_modification_global_time and not isinstance(prev_modification_global_time, (int, long)): - raise DropPacket("Invalid 'prev-modification-global-time' type") - - try: - packet_id, packet, message_name = self._get_message(prev_modification_global_time, prev_modification_mid) - prev_modification_packet = Packet(self._community.get_meta_message(message_name), packet, packet_id) - except: - prev_modification_packet = None - - return offset, placeholder.meta.payload.implement(modification_type, modification_value, timestamp, modification_on, prev_modification_packet, prev_modification_mid, prev_modification_global_time) - - def _encode_playlist_torrent(self, message): - playlist = message.payload.playlist.load_message() - return pack('!20s20sQ', message.payload.infohash, playlist.authentication.member.mid, playlist.distribution.global_time), - - def _decode_playlist_torrent(self, placeholder, offset, data): - if len(data) < offset + 48: - raise DropPacket("Unable to decode the payload") - - infohash, playlist_mid, playlist_global_time = unpack_from('!20s20sQ', data, offset) - try: - packet_id, packet, message_name = self._get_message(playlist_global_time, playlist_mid) - - except DropPacket: - member = self._community.dispersy.get_member(mid=playlist_mid) - if not member: - raise DelayPacketByMissingMember(self._community, playlist_mid) - raise DelayPacketByMissingMessage(self._community, member, playlist_global_time) - - playlist = Packet(self._community.get_meta_message(message_name), packet, packet_id) - return offset + 48, placeholder.meta.payload.implement(infohash, playlist) - - def _get_message(self, global_time, mid): - assert isinstance(global_time, (int, long)) - assert isinstance(mid, str) - assert len(mid) == 20 - if global_time and mid: - try: - packet_id, packet, message_name = self._community.dispersy.database.execute( - u""" SELECT sync.id, sync.packet, meta_message.name - FROM sync - JOIN member ON (member.id = sync.member) - JOIN meta_message ON (meta_message.id = sync.meta_message) - WHERE sync.community = ? AND sync.global_time = ? AND member.mid = ?""", - (self._community.database_id, global_time, buffer(mid))).next() - except StopIteration: - raise DropPacket("Missing message") - - return packet_id, str(packet), message_name - - def _encode_missing_channel(self, message): - return pack('!B', int(message.payload.includeSnapshot)), - - def _decode_missing_channel(self, placeholder, offset, data): - if len(data) < offset + 1: - raise DropPacket("Unable to decode the payload") - - includeSnapshot, = unpack_from('!B', data, offset) - if not (includeSnapshot == 0 or includeSnapshot == 1): - raise DropPacket("Unable to decode includeSnapshot") - includeSnapshot = bool(includeSnapshot) - - return offset + 1, placeholder.meta.payload.implement(includeSnapshot) diff --git a/Tribler/community/channel/message.py b/Tribler/community/channel/message.py deleted file mode 100644 index f3dded93d45..00000000000 --- a/Tribler/community/channel/message.py +++ /dev/null @@ -1,21 +0,0 @@ -from Tribler.dispersy.message import DelayMessage - - -class DelayMessageReqChannelMessage(DelayMessage): - """ - Raised during ChannelCommunity.check_ if the channel message has not been received yet. - """ - - def __init__(self, delayed, includeSnapshot=False): - super(DelayMessageReqChannelMessage, self).__init__(delayed) - if __debug__: - from Tribler.dispersy.message import Message - assert isinstance(delayed, Message.Implementation) - self._includeSnapshot = includeSnapshot - - @property - def match_info(self): - return (self._cid, u"channel", None, None, []), - - def send_request(self, community, candidate): - self._community.disp_create_missing_channel(candidate, self._includeSnapshot) diff --git a/Tribler/community/channel/payload.py b/Tribler/community/channel/payload.py deleted file mode 100644 index 01ea412eb18..00000000000 --- a/Tribler/community/channel/payload.py +++ /dev/null @@ -1,325 +0,0 @@ -from Tribler.dispersy.message import Packet -from Tribler.dispersy.payload import Payload - - -class ChannelPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, name, description): - assert isinstance(name, unicode) - assert len(name) < 256 - assert isinstance(description, unicode) - assert len(description) < 1024 - super(ChannelPayload.Implementation, self).__init__(meta) - self._name = name - self._description = description - - @property - def name(self): - return self._name - - @property - def description(self): - return self._description - - -class PlaylistPayload(ChannelPayload): - pass - - -class TorrentPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, infohash, timestamp, name, files, trackers): - assert isinstance(infohash, str), 'infohash is a %s' % type(infohash) - assert len(infohash) == 20, 'infohash has length %d' % len(infohash) - assert isinstance(timestamp, (int, long)) - - assert isinstance(name, unicode) - assert isinstance(files, tuple) - for path, length in files: - assert isinstance(path, unicode) - assert isinstance(length, (int, long)) - - assert isinstance(trackers, tuple) - for tracker in trackers: - assert isinstance(tracker, str), 'tracker is a %s' % type(tracker) - - super(TorrentPayload.Implementation, self).__init__(meta) - self._infohash = infohash - self._timestamp = timestamp - self._name = name - self._files = files - self._trackers = trackers - - @property - def infohash(self): - return self._infohash - - @property - def timestamp(self): - return self._timestamp - - @property - def name(self): - return self._name - - @property - def files(self): - return self._files - - @property - def trackers(self): - return self._trackers - - -class CommentPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, text, timestamp, reply_to_mid, reply_to_global_time, reply_after_mid, - reply_after_global_time, playlist_packet, infohash): - assert isinstance(text, unicode) - assert len(text) < 1024 - assert isinstance(timestamp, (int, long)) - - assert not reply_to_mid or isinstance(reply_to_mid, str), 'reply_to_mid is a %s' % type(reply_to_mid) - assert not reply_to_mid or len(reply_to_mid) == 20, 'reply_to_mid has length %d' % len(reply_to_mid) - assert not reply_to_global_time or isinstance(reply_to_global_time, ( - int, long)), 'reply_to_global_time is a %s' % type(reply_to_global_time) - - assert not reply_after_mid or isinstance( - reply_after_mid, str), 'reply_after_mid is a %s' % type(reply_after_mid) - assert not reply_after_mid or len( - reply_after_mid) == 20, 'reply_after_mid has length %d' % len(reply_after_global_time) - assert not reply_after_global_time or isinstance(reply_after_global_time, ( - int, long)), 'reply_after_global_time is a %s' % type(reply_to_global_time) - - assert not playlist_packet or isinstance(playlist_packet, Packet) - - assert not infohash or isinstance(infohash, str), 'infohash is a %s' % type(infohash) - assert not infohash or len(infohash) == 20, 'infohash has length %d' % len(infohash) - - super(CommentPayload.Implementation, self).__init__(meta) - self._text = text - self._timestamp = timestamp - self._reply_to_mid = reply_to_mid - self._reply_to_global_time = reply_to_global_time - - self._reply_after_mid = reply_after_mid - self._reply_after_global_time = reply_after_global_time - - self._playlist_packet = playlist_packet - self._infohash = infohash - - @property - def text(self): - return self._text - - @property - def timestamp(self): - return self._timestamp - - @property - def reply_to_mid(self): - return self._reply_to_mid - - @property - def reply_to_global_time(self): - return self._reply_to_global_time - - @property - def reply_after_mid(self): - return self._reply_after_mid - - @property - def reply_after_global_time(self): - return self._reply_after_global_time - - @property - def playlist_packet(self): - return self._playlist_packet - - @property - def infohash(self): - return self._infohash - - -class ModerationPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, text, timestamp, severity, causepacket): - - assert isinstance(causepacket, Packet) - - assert isinstance(text, unicode) - assert len(text) < 1024 - assert isinstance(timestamp, (int, long)) - assert isinstance(severity, (int, long)) - - super(ModerationPayload.Implementation, self).__init__(meta) - self._text = text - self._timestamp = timestamp - self._severity = severity - self._causepacket = causepacket - - message = causepacket.load_message() - self._mid = message.authentication.member.mid - self._global_time = message.distribution.global_time - - @property - def text(self): - return self._text - - @property - def timestamp(self): - return self._timestamp - - @property - def severity(self): - return self._severity - - @property - def causepacket(self): - return self._causepacket - - @property - def cause_mid(self): - return self._mid - - @property - def cause_global_time(self): - return self._global_time - - -class MarkTorrentPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, infohash, type_str, timestamp): - assert isinstance(infohash, str), 'infohash is a %s' % type(infohash) - assert len(infohash) == 20, 'infohash has length %d' % len(infohash) - - assert isinstance(type_str, unicode) - assert len(type_str) < 25 - assert isinstance(timestamp, (int, long)) - - super(MarkTorrentPayload.Implementation, self).__init__(meta) - self._infohash = infohash - self._type = type_str - self._timestamp = timestamp - - @property - def infohash(self): - return self._infohash - - @property - def type(self): - return self._type - - @property - def timestamp(self): - return self._timestamp - - -class ModificationPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, modification_type, modification_value, timestamp, modification_on, prev_modification_packet, prev_modification_mid, prev_modification_global_time): - assert isinstance(modification_type, unicode) - assert modification_value is not None - assert isinstance(modification_value, unicode) - assert len(modification_value) < 1024 - assert isinstance(modification_on, Packet) - - assert not prev_modification_packet or isinstance(prev_modification_packet, Packet) - assert not prev_modification_mid or isinstance( - prev_modification_mid, str), 'prev_modification_mid is a %s' % type(prev_modification_mid) - assert not prev_modification_mid or len( - prev_modification_mid) == 20, 'prev_modification_mid has length %d' % len(prev_modification_mid) - assert not prev_modification_global_time or isinstance(prev_modification_global_time, ( - int, long)), 'prev_modification_global_time is a %s' % type(prev_modification_global_time) - - super(ModificationPayload.Implementation, self).__init__(meta) - self._modification_type = modification_type - self._modification_value = modification_value - self._timestamp = timestamp - - self._modification_on = modification_on - - self._prev_modification_packet = prev_modification_packet - self._prev_modification_mid = prev_modification_mid - self._prev_modification_global_time = prev_modification_global_time - - @property - def modification_type(self): - return self._modification_type - - @property - def modification_value(self): - return self._modification_value - - @property - def timestamp(self): - return self._timestamp - - @property - def modification_on(self): - return self._modification_on - - @property - def prev_modification_packet(self): - return self._prev_modification_packet - - @property - def prev_modification_id(self): - if self._prev_modification_mid and self._prev_modification_global_time: - return "%s@%d" % (self._prev_modification_mid, self._prev_modification_global_time) - - @property - def prev_modification_mid(self): - return self._prev_modification_mid - - @property - def prev_modification_global_time(self): - return self._prev_modification_global_time - - -class PlaylistTorrentPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, infohash, playlist): - assert isinstance(infohash, str), 'infohash is a %s' % type(infohash) - assert len(infohash) == 20, 'infohash has length %d' % len(infohash) - assert isinstance(playlist, Packet), type(playlist) - super(PlaylistTorrentPayload.Implementation, self).__init__(meta) - self._infohash = infohash - self._playlist = playlist - - @property - def infohash(self): - return self._infohash - - @property - def playlist(self): - return self._playlist - - -class MissingChannelPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, includeSnapshot=False): - assert isinstance(includeSnapshot, bool), 'includeSnapshot is a %s' % type(includeSnapshot) - super(MissingChannelPayload.Implementation, self).__init__(meta) - - self._includeSnapshot = includeSnapshot - - @property - def includeSnapshot(self): - return self._includeSnapshot diff --git a/Tribler/community/channel/preview.py b/Tribler/community/channel/preview.py deleted file mode 100644 index 6f9bbbad6d8..00000000000 --- a/Tribler/community/channel/preview.py +++ /dev/null @@ -1,25 +0,0 @@ -from time import time - -from Tribler.community.channel.community import ChannelCommunity - - -class PreviewChannelCommunity(ChannelCommunity): - """ - The PreviewChannelCommunity extends the ChannelCommunity to allow ChannelCommunity messages to - be decoded while not actually joining or participating in an actual ChannelCommunity. - """ - - def __init__(self, *args, **kargs): - super(PreviewChannelCommunity, self).__init__(*args, **kargs) - self.init_timestamp = time() - - @property - def dispersy_enable_bloom_filter_sync(self): - return False - - @property - def dispersy_enable_candidate_walker(self): - return False - - def get_channel_mode(self): - return ChannelCommunity.CHANNEL_CLOSED, False diff --git a/Tribler/community/gigachannel/community.py b/Tribler/community/gigachannel/community.py index d56813380bb..4524afafd69 100644 --- a/Tribler/community/gigachannel/community.py +++ b/Tribler/community/gigachannel/community.py @@ -1,32 +1,36 @@ -from time import time +from __future__ import absolute_import + +from binascii import unhexlify from pony.orm import db_session -from Tribler.community.gigachannel.payload import TruncatedChannelPayload, TruncatedChannelPlayloadBlob -from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import CHANNEL_DIR_NAME_LENGTH +from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_metadata import entries_to_chunk +from Tribler.Core.Modules.MetadataStore.serialization import CHANNEL_TORRENT +from Tribler.Core.Modules.MetadataStore.store import GOT_NEWER_VERSION from Tribler.pyipv8.ipv8.community import Community from Tribler.pyipv8.ipv8.lazy_community import lazy_wrapper +from Tribler.pyipv8.ipv8.messaging.payload import Payload from Tribler.pyipv8.ipv8.messaging.payload_headers import BinMemberAuthenticationPayload from Tribler.pyipv8.ipv8.peer import Peer -from Tribler.pyipv8.ipv8.requestcache import NumberCache, RequestCache +minimal_blob_size = 200 +maximum_payload_size = 1024 +max_entries = maximum_payload_size // minimal_blob_size -class ChannelDownloadCache(NumberCache): - """ - Token for channel downloads. - This token is held for a maximum of 10 seconds or until the current download finishes. - """ +class RawBlobPayload(Payload): + format_list = ['raw'] - def __init__(self, request_cache): - super(ChannelDownloadCache, self).__init__(request_cache, u"channel-download-cache", 0) + def __init__(self, raw_blob): + super(RawBlobPayload, self).__init__() + self.raw_blob = raw_blob - @property - def timeout_delay(self): - return 10.0 + def to_pack_list(self): + return [('raw', self.raw_blob)] - def on_timeout(self): - pass + @classmethod + def from_unpack_list(cls, raw_blob): + return RawBlobPayload(raw_blob) class GigaChannelCommunity(Community): @@ -34,155 +38,73 @@ class GigaChannelCommunity(Community): Community to gossip around gigachannels. """ - master_peer = Peer("3081a7301006072a8648ce3d020106052b81040027038192000400118911f5102bac4fca2d6ee5c3cb41978a4b657" - "e9707ce2031685c7face02bb3bf42b74a47c1d2c5f936ea2fa2324af12de216abffe01f10f97680e8fe548b82dedf" - "362eb29d3b074187bcfbce6869acb35d8bcef3bb8713c9e9c3b3329f59ff3546c3cd560518f03009ca57895a5421b" - "4afc5b90a59d2096b43eb22becfacded111e84d605a01e91a600e2b55a79d".decode('hex')) + master_peer = Peer(unhexlify("3081a7301006072a8648ce3d020106052b8104002703819200040448a078b597b62d3761a061872cd86" + "10f58cb513f1dc21e66dd59f1e01d582f633b182d9ca6e5859a9a34e61eb77b768e5e9202f642fd50c6" + "0b89d8d8b0bdc355cdf8caac262f6707c80da00b1bcbe7bf91ed5015e5163a76a2b2e630afac96925f5" + "daa8556605043c6da4db7d26113cba9f9cbe63fddf74625117598317e05cb5b8cbd606d0911683570ad" + "bb921c91")) + + NEWS_PUSH_MESSAGE = 1 - def __init__(self, my_peer, endpoint, network, tribler_session): + def __init__(self, my_peer, endpoint, network, metadata_store): super(GigaChannelCommunity, self).__init__(my_peer, endpoint, network) - self.tribler_session = tribler_session - self.download_queue = [] - self.request_cache = RequestCache() + self.metadata_store = metadata_store + self.auth = BinMemberAuthenticationPayload(self.my_peer.public_key.key_to_bin()).to_pack_list() self.decode_map.update({ - chr(1): self.on_truncated_blob + chr(self.NEWS_PUSH_MESSAGE): self.on_blob }) - def get_random_entries(self): - """ - Fetch some random entries from our subscribed channels. - - :return: the truncated payloads to share with other peers - :rtype: [TruncatedChannelPayload] - """ - out = [] - with db_session: - for channel in self.tribler_session.lm.mds.ChannelMetadata.get_random_channels(7): - out.append(TruncatedChannelPayload(str(channel.infohash), str(channel.title), - str(channel.public_key[10:]), int(channel.version))) - return out - def send_random_to(self, peer): """ Send random entries from our subscribed channels to another peer. :param peer: the peer to send to :type peer: Peer - :returs: None - """ - entries = self.get_random_entries() - if entries: - payload = TruncatedChannelPlayloadBlob(entries).to_pack_list() - auth = BinMemberAuthenticationPayload(self.my_peer.public_key.key_to_bin()).to_pack_list() - self.endpoint.send(peer.address, self._ez_pack(self._prefix, 1, [auth, payload])) - - @lazy_wrapper(TruncatedChannelPlayloadBlob) - def on_truncated_blob(self, peer, blob): - """ - Callback for when a TruncatedChannelPlayloadBlob message comes in. - - :param peer: the peer that sent us the blob - :type peer: Peer - :param blob: the truncated channel message - :type blob: TruncatedChannelPlayloadBlob :returns: None """ - for truncated_channel in blob.payload_list: - # The database stores the long format of the keys - longpk = "LibNaCLPK:" + truncated_channel.public_key - if truncated_channel.infohash not in self.download_queue: - with db_session: - channel = self.tribler_session.lm.mds.ChannelMetadata.get_channel_with_id(longpk) - if not channel: - # Insert a new channel entry into the database. - # We set the version to 0 so that we receive the up-to-date information later. - self.tribler_session.lm.mds.ChannelMetadata.from_dict({ - 'infohash': truncated_channel.infohash, - 'public_key': longpk, - 'title': truncated_channel.title, - 'version': 0 - }) - self.download_queue.append(truncated_channel.infohash) - elif truncated_channel.version > channel.local_version: - # The sent version is newer than the one we have, queue the download. - channel.infohash = truncated_channel.infohash - self.download_queue.append(truncated_channel.infohash) - # We don't update anything if the channel version is older than the one we know. - - def update_from_download(self, download): - """ - Given a channel download, update the amount of votes. - - :param download: the channel download to inspect - :type download: LibtorrentDownloadImpl - :returns: None - """ - infohash = download.tdef.get_infohash() + # Choose some random entries and try to pack them into maximum_payload_size bytes + md_list = [] with db_session: - channel = self.tribler_session.lm.mds.ChannelMetadata.get_channel_with_infohash(infohash) - if channel: - channel.votes = download.get_num_connected_seeds_peers()[0] - else: - # We have an older version in our list, decide what to do with it - my_key_hex = str(self.tribler_session.lm.mds.my_key.pub().key_to_bin()).encode('hex') - dirname = my_key_hex[-CHANNEL_DIR_NAME_LENGTH:] - if download.tdef.get_name() != dirname or time() - download.tdef.get_creation_date() > 604800: - # This is not our channel or more than a week old version of our channel: delete it - self.logger.debug("Removing old channel version %s", infohash.encode('hex')) - self.tribler_session.remove_download(download) - - def download_completed(self, download): - """ - Callback for when a channel download finished. - - :param download: the channel download which completed - :type download: LibtorrentDownloadImpl - :returns: None - """ - if self.request_cache.has(u"channel-download-cache", 0): - self.request_cache.pop(u"channel-download-cache", 0) - self.update_from_download(download) - - def update_states(self, states_list): + # TODO: when the health table will be there, send popular torrents instead + channel_l = self.metadata_store.ChannelMetadata.get_random_channels(1, only_subscribed=True)[:] + if not channel_l: + return + channel = channel_l[0] + md_list.append(channel) + md_list.extend(list(channel.get_random_torrents(max_entries - 1))) + blob = entries_to_chunk(md_list, maximum_payload_size)[0] if md_list else None + self.endpoint.send(peer.address, self._ez_pack(self._prefix, self.NEWS_PUSH_MESSAGE, + [self.auth, RawBlobPayload(blob).to_pack_list()])) + + @lazy_wrapper(RawBlobPayload) + def on_blob(self, peer, blob): """ - Callback for when the download states are updated in Tribler. - We still need to filter out the channel downloads from this list. + Callback for when a MetadataBlob message comes in. - :param states_list: the list of download states - :type states_list: [DownloadState] - :returns: None + :param peer: the peer that sent us the blob + :param data: payload raw data """ - for ds in states_list: - if ds.get_download().dlconfig.get('download_defaults', 'channel_download'): - self.update_from_download(ds.get_download()) - def fetch_next(self): - """ - If we have nothing to process right now, start downloading a new channel. + with db_session: + md_list = self.metadata_store.process_compressed_mdblob(blob.raw_blob) + # Check if the guy who send us this metadata actually has an older version of this md than + # we do, and queue to send it back. - :returns: None - """ - if self.request_cache.has(u"channel-download-cache", 0): - return - if self.download_queue: - infohash = self.download_queue.pop(0) - if not self.tribler_session.has_download(infohash): - self._logger.info("Starting channel download with infohash %s", infohash.encode('hex')) - # Reserve the token - self.request_cache.add(ChannelDownloadCache(self.request_cache)) - # Start downloading this channel - with db_session: - channel = self.tribler_session.lm.mds.ChannelMetadata.get_channel_with_infohash(infohash) - finished_deferred = self.tribler_session.lm.download_channel(channel)[1] - finished_deferred.addCallback(self.download_completed) + reply_list = [md for md, result in md_list if + (md and (md.metadata_type == CHANNEL_TORRENT)) and (result == GOT_NEWER_VERSION)] + reply_blob = entries_to_chunk(reply_list, maximum_payload_size)[0] if reply_list else None + if reply_blob: + self.endpoint.send(peer.address, + self._ez_pack(self._prefix, 1, [self.auth, RawBlobPayload(reply_blob).to_pack_list()])) class GigaChannelTestnetCommunity(GigaChannelCommunity): """ This community defines a testnet for the giga channels, used for testing purposes. """ - master_peer = Peer("3081a7301006072a8648ce3d020106052b8104002703819200040726f5b6558151e1b82c3d30c08175c446f5f696b" - "e9b005ee23050fe55f7e4f73c1b84bf30eb0a254c350705f89369ba2c6b6795a50f0aa562b3095bfa8aa069747221" - "c0fb92e207052b7d03fa8a76e0b236d74ac650de37e5dfa02cbd6b9fe2146147f3555bfa7410b9c499a8ec49a80ac" - "84b433fb2bf1740a15e96a5bad2b90b0488bdc791633ee7d829dcd583ee5f".decode('hex')) + master_peer = Peer(unhexlify("3081a7301006072a8648ce3d020106052b81040027038192000401b9f303778e7727b35a4c26487481f" + "a7011e252cc4a6f885f3756bd8898c9620cf1c32e79dd5e75ae277a56702a47428ce47676d005e262fa" + "fd1a131a2cb66be744d52cb1e0fca503658cb3368e9ebe232e7b8c01e3172ebfdb0620b316467e5b2c4" + "c6809565cf2142e8d4322f66a3d13a8c4bb18059c9ed97975a97716a085a93e3e62b0387e63f0bf389a" + "0e9bffe6")) diff --git a/Tribler/community/gigachannel/payload.py b/Tribler/community/gigachannel/payload.py deleted file mode 100644 index a398add060e..00000000000 --- a/Tribler/community/gigachannel/payload.py +++ /dev/null @@ -1,54 +0,0 @@ -from Tribler.pyipv8.ipv8.messaging.payload import Payload - - -class TruncatedChannelPayload(Payload): - """ - Small representation of a channel containing a: - - - 20 character infohash - - 64 character channel title (possibly truncated) - - 64 character public key (LibNaCLPK without "LibNaCLPK:" prefix) - - 8 byte channel version - - In total this message is 156 bytes. - """ - - format_list = ['20s', '64s', '64s', 'Q'] - - def __init__(self, infohash, title, public_key, version): - self.infohash = infohash - self.title = title - self.public_key = public_key - self.version = version - - def to_pack_list(self): - return [('20s', self.infohash), - ('64s', self.title), - ('64s', self.public_key), - ('Q', self.version)] - - @classmethod - def from_unpack_list(cls, infohash, title, public_key, version): - return cls(infohash, title, public_key, version) - - -class TruncatedChannelPlayloadBlob(Payload): - """ - Collection of TruncatedChannelPayloads. - - This message can fit from 1 up to 7 TruncatedChannelPayloads. - The size of this message is therefore from 156 up to 1092 bytes. - """ - - format_list = [TruncatedChannelPayload] - optional_format_list = [TruncatedChannelPayload] * 6 - - def __init__(self, payload_list): - self.payload_list = payload_list - - def to_pack_list(self): - return [('payload', payload) for payload in self.payload_list[:7]] - - @classmethod - def from_unpack_list(cls, *args): - return cls(args) diff --git a/Tribler/community/gigachannel/sync_strategy.py b/Tribler/community/gigachannel/sync_strategy.py index 8051373656d..d754c057ce1 100644 --- a/Tribler/community/gigachannel/sync_strategy.py +++ b/Tribler/community/gigachannel/sync_strategy.py @@ -7,9 +7,7 @@ class SyncChannels(DiscoveryStrategy): """ Synchronization strategy for gigachannels. - On each tick we: - 1. Send a random peer some of our random subscribed channels. - 2. Check if we have any pending channels to download. + On each tick we send a random peer some of our random subscribed channels. """ def take_step(self): @@ -19,5 +17,3 @@ def take_step(self): if peers: peer = choice(peers) self.overlay.send_random_to(peer) - # Try to fetch pending channels - self.overlay.fetch_next() diff --git a/Tribler/community/popularity/community.py b/Tribler/community/popularity/community.py index f245bf23265..4880fb0af2e 100644 --- a/Tribler/community/popularity/community.py +++ b/Tribler/community/popularity/community.py @@ -1,20 +1,14 @@ from __future__ import absolute_import +from pony.orm import db_session + from twisted.internet.defer import inlineCallbacks -from Tribler.Core.simpledefs import SIGNAL_ON_SEARCH_RESULTS, SIGNAL_SEARCH_COMMUNITY -from Tribler.community.popularity.constants import (ERROR_NO_CONTENT, ERROR_UNKNOWN_PEER, ERROR_UNKNOWN_RESPONSE, - MAX_PACKET_PAYLOAD_SIZE, MSG_CHANNEL_HEALTH_RESPONSE, - MSG_CONTENT_INFO_REQUEST, MSG_CONTENT_INFO_RESPONSE, - MSG_TORRENT_HEALTH_RESPONSE, MSG_TORRENT_INFO_REQUEST, - MSG_TORRENT_INFO_RESPONSE, SEARCH_TORRENT_REQUEST, - SEARCH_TORRENT_RESPONSE) -from Tribler.community.popularity.payload import (ContentInfoRequest, ContentInfoResponse, ContentSubscription, - Pagination, TorrentHealthPayload, TorrentInfoRequestPayload, - TorrentInfoResponsePayload, unpack_responses) +from Tribler.community.popularity.constants import ERROR_UNKNOWN_PEER, ERROR_UNKNOWN_RESPONSE, \ + MSG_TORRENT_HEALTH_RESPONSE +from Tribler.community.popularity.payload import ContentSubscription, TorrentHealthPayload from Tribler.community.popularity.pubsub import PubSubCommunity -from Tribler.community.popularity.repository import ContentRepository, TYPE_TORRENT_HEALTH -from Tribler.community.popularity.request import ContentRequest +from Tribler.community.popularity.repository import ContentRepository from Tribler.pyipv8.ipv8.peer import Peer @@ -30,22 +24,15 @@ class PopularityCommunity(PubSubCommunity): master_peer = Peer(MASTER_PUBLIC_KEY.decode('hex')) def __init__(self, *args, **kwargs): - self.torrent_db = kwargs.pop('torrent_db', None) - self.channel_db = kwargs.pop('channel_db', None) - self.trustchain = kwargs.pop('trustchain_community', None) + self.metadata_store = kwargs.pop('metadata_store', None) self.tribler_session = kwargs.pop('session', None) super(PopularityCommunity, self).__init__(*args, **kwargs) - self.content_repository = ContentRepository(self.torrent_db, self.channel_db) + self.content_repository = ContentRepository(self.metadata_store) self.decode_map.update({ - chr(MSG_TORRENT_HEALTH_RESPONSE): self.on_torrent_health_response, - chr(MSG_CHANNEL_HEALTH_RESPONSE): self.on_channel_health_response, - chr(MSG_TORRENT_INFO_REQUEST): self.on_torrent_info_request, - chr(MSG_TORRENT_INFO_RESPONSE): self.on_torrent_info_response, - chr(MSG_CONTENT_INFO_REQUEST): self.on_content_info_request, - chr(MSG_CONTENT_INFO_RESPONSE): self.on_content_info_response + chr(MSG_TORRENT_HEALTH_RESPONSE): self.on_torrent_health_response }) self.logger.info('Popular Community initialized (peer mid %s)', self.my_peer.mid.encode('HEX')) @@ -82,81 +69,12 @@ def on_torrent_health_response(self, source_address, data): infohash = payload.infohash if not self.content_repository.has_torrent(infohash): - self.send_torrent_info_request(infohash, peer=peer) + # TODO(Martijn): we should probably try to fetch the torrent info from the other peer + return peer_trust = self.trustchain.get_trust(peer) if self.trustchain else 0 self.content_repository.update_torrent_health(payload, peer_trust) - def on_channel_health_response(self, source_address, data): - """ - Message handler for channel health response. Currently, not sure how to handle it. - """ - - def on_torrent_info_request(self, source_address, data): - """ - Message handler for torrent info request. - """ - self.logger.debug("Got torrent info request from %s", source_address) - auth, _, payload = self._ez_unpack_auth(TorrentInfoRequestPayload, data) - peer = self.get_peer_from_auth(auth, source_address) - - if peer not in self.subscribers: - self.logger.error(ERROR_UNKNOWN_RESPONSE) - return - - self.send_torrent_info_response(payload.infohash, peer=peer) - - def on_torrent_info_response(self, source_address, data): - """ - Message handler for torrent info response. - """ - self.logger.debug("Got torrent info response from %s", source_address) - auth, _, payload = self._ez_unpack_auth(TorrentInfoResponsePayload, data) - peer = self.get_peer_from_auth(auth, source_address) - - if peer not in self.publishers: - self.logger.error(ERROR_UNKNOWN_RESPONSE) - return - - self.content_repository.update_torrent_info(payload) - - def on_content_info_request(self, source_address, data): - auth, _, payload = self._ez_unpack_auth(ContentInfoRequest, data) - peer = self.get_peer_from_auth(auth, source_address) - - if payload.content_type == SEARCH_TORRENT_REQUEST: - db_results = self.content_repository.search_torrent(payload.query_list) - self.send_content_info_response(peer, payload.identifier, SEARCH_TORRENT_RESPONSE, db_results) - - def on_content_info_response(self, source_address, data): - _, _, payload = self._ez_unpack_auth(ContentInfoResponse, data) - - identifier = int(payload.identifier) - if not self.request_cache.has(u'request', identifier): - return - cache = self.request_cache.get(u'request', identifier) - - if payload.content_type == SEARCH_TORRENT_RESPONSE: - self.process_torrent_search_response(cache.query, payload) - - if not payload.pagination.more: - cache = self.request_cache.pop(u'request', identifier) - cache.finish() - - def process_torrent_search_response(self, query, payload): - response = unpack_responses(payload.response) - - self.content_repository.update_from_torrent_search_results(response) - - result_dict = dict() - result_dict['keywords'] = query - result_dict['results'] = response - result_dict['candidate'] = None - - if self.tribler_session: - self.tribler_session.notifier.notify(SIGNAL_SEARCH_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, None, - result_dict) - # MESSAGE SENDING FUNCTIONS def send_torrent_health_response(self, payload, peer=None): @@ -171,124 +89,21 @@ def send_torrent_health_response(self, payload, peer=None): packet = self.create_message_packet(MSG_TORRENT_HEALTH_RESPONSE, payload) self.broadcast_message(packet, peer=peer) - def send_channel_health_response(self, payload, peer=None): - """ - Method to send channel health response. This message is sent to all the subscribers by default but if a - peer is specified then only that peer receives this message. - """ - if peer and peer not in self.get_peers(): - self.logger.debug(ERROR_UNKNOWN_PEER) - return - - packet = self.create_message_packet(MSG_CHANNEL_HEALTH_RESPONSE, payload) - self.broadcast_message(packet, peer=peer) - - def send_torrent_info_request(self, infohash, peer): - """ - Method to request information about a torrent with given infohash to a peer. - """ - if peer not in self.get_peers(): - self.logger.debug(ERROR_UNKNOWN_PEER) - return - - info_request = TorrentInfoRequestPayload(infohash) - packet = self.create_message_packet(MSG_TORRENT_INFO_REQUEST, info_request) - self.broadcast_message(packet, peer=peer) - - def send_torrent_info_response(self, infohash, peer): - """ - Method to send information about a torrent with given infohash to the requesting peer. - """ - if peer not in self.get_peers(): - self.logger.debug(ERROR_UNKNOWN_PEER) - return - - db_torrent = self.content_repository.get_torrent(infohash) - info_response = TorrentInfoResponsePayload(infohash, db_torrent['name'], db_torrent['length'], - db_torrent['creation_date'], db_torrent['num_files'], - db_torrent['comment']) - packet = self.create_message_packet(MSG_TORRENT_INFO_RESPONSE, info_response) - self.broadcast_message(packet, peer=peer) - - def send_content_info_request(self, content_type, request_list, limit=25, peer=None): - """ - Sends the generic content request of given content_type. - :param content_type: request content type - :param request_list: List request queries - :param limit: Number of expected responses - :param peer: Peer to send this request to - :return a Deferred that fires when we receive the content - :rtype Deferred - """ - cache = self.request_cache.add(ContentRequest(self.request_cache, content_type, request_list)) - self.logger.debug("Sending search request query:%s, identifier:%s", request_list, cache.number) - - content_request = ContentInfoRequest(cache.number, content_type, request_list, limit) - packet = self.create_message_packet(MSG_CONTENT_INFO_REQUEST, content_request) - - if peer: - self.broadcast_message(packet, peer=peer) - else: - for connected_peer in self.get_peers(): - self.broadcast_message(packet, peer=connected_peer) - - return cache.deferred - - def send_content_info_response(self, peer, identifier, content_type, response_list): - """ - Sends the generic content info response with payload response list. - :param peer: Receiving peer - :param identifier: Request identifier - :param content_type: Message content type - :param response_list: Content response - """ - num_results = len(response_list) - current_index = 0 - page_num = 1 - while current_index < num_results: - serialized_results, current_index, page_size = self.pack_sized(response_list, MAX_PACKET_PAYLOAD_SIZE, - start_index=current_index) - if not serialized_results: - self.logger.info("Item too big probably to fit into package. Skipping it") - current_index += 1 - else: - pagination = Pagination(page_num, page_size, num_results, more=current_index == num_results) - response_payload = ContentInfoResponse(identifier, content_type, serialized_results, pagination) - packet = self.create_message_packet(MSG_CONTENT_INFO_RESPONSE, response_payload) - self.broadcast_message(packet, peer=peer) - - def send_torrent_search_request(self, query): - """ - Sends torrent search query as a content info request with content_type as SEARCH_TORRENT_REQUEST. - """ - self.send_content_info_request(SEARCH_TORRENT_REQUEST, query) - - def send_channel_search_request(self, query): - """ - Sends channel search query to All Channel 2.0 to get a list of channels. - """ - # TODO: Not implemented yet. Waiting for All Channel 2.0 - # CONTENT REPOSITORY STUFFS def publish_next_content(self): """ Publishes the next content from the queue to the subscribers. - Does nothing if there are none subscribers. + Does nothing if there are no subscribers. Only Torrent health response is published at the moment. """ - self.logger.info("Content to publish: %d", self.content_repository.count_content()) + self.logger.info("Content to publish: %d", self.content_repository.queue_length()) if not self.subscribers: self.logger.info("No subscribers found. Not publishing anything") return - content_type, content = self.content_repository.pop_content() - if content_type is None: - self.logger.debug(ERROR_NO_CONTENT) - return - - self.logger.info("Publishing content[type:%d]", content_type) - if content_type == TYPE_TORRENT_HEALTH: + content = self.content_repository.pop_content() + if content: infohash, seeders, leechers, timestamp = content payload = TorrentHealthPayload(infohash, seeders, leechers, timestamp) self.send_torrent_health_response(payload) @@ -297,15 +112,17 @@ def publish_latest_torrents(self, peer): """ Publishes the latest torrents in local database to the given peer. """ - torrents = self.content_repository.get_top_torrents() - self.logger.info("Publishing %d torrents to peer %s", len(torrents), peer) - for torrent in torrents: - infohash, seeders, leechers, timestamp = torrent[:4] - payload = TorrentHealthPayload(infohash, seeders, leechers, timestamp) + with db_session: + torrents = self.content_repository.get_top_torrents() + self.logger.info("Publishing %d torrents to peer %s", len(torrents), peer) + + to_send = [TorrentHealthPayload(str(torrent.infohash), torrent.health.seeders, torrent.health.leechers, + torrent.health.last_check) for torrent in torrents] + for payload in to_send: self.send_torrent_health_response(payload, peer=peer) - def queue_content(self, content_type, content): + def queue_content(self, content): """ Basically adds a given content to the queue of content repository. """ - self.content_repository.add_content(content_type, content) + self.content_repository.add_content_to_queue(content) diff --git a/Tribler/community/popularity/constants.py b/Tribler/community/popularity/constants.py index 70542090734..cff151fd64f 100644 --- a/Tribler/community/popularity/constants.py +++ b/Tribler/community/popularity/constants.py @@ -2,31 +2,11 @@ MSG_SUBSCRIBE = 1 MSG_SUBSCRIPTION = 2 MSG_TORRENT_HEALTH_RESPONSE = 3 -MSG_CHANNEL_HEALTH_RESPONSE = 4 -MSG_TORRENT_INFO_REQUEST = 5 -MSG_TORRENT_INFO_RESPONSE = 6 -MSG_SEARCH_REQUEST = 7 -MSG_SEARCH_RESPONSE = 8 -MSG_CONTENT_INFO_REQUEST = 9 -MSG_CONTENT_INFO_RESPONSE = 10 MAX_SUBSCRIBERS = 10 MAX_PUBLISHERS = 10 PUBLISH_INTERVAL = 5 -TORRENT_SEARCH_RESPONSE_TYPE = 0 -CHANNEL_SEARCH_RESPONSE_TYPE = 1 - -# Search types -TYPE_TORRENT = 0 -TYPE_CHANNEL = 1 - -# Request types -SEARCH_TORRENT_REQUEST = 11 -SEARCH_TORRENT_RESPONSE = 12 -SEARCH_CHANNEL_REQUEST = 13 -SEARCH_CHANNEL_RESPONSE = 14 - # Maximum packet payload size in bytes MAX_PACKET_PAYLOAD_SIZE = 500 diff --git a/Tribler/community/popularity/payload.py b/Tribler/community/popularity/payload.py index 2be274e1f6a..f0794e33064 100644 --- a/Tribler/community/popularity/payload.py +++ b/Tribler/community/popularity/payload.py @@ -1,45 +1,6 @@ from __future__ import absolute_import -import struct -from struct import calcsize, pack, unpack_from - from Tribler.pyipv8.ipv8.messaging.payload import Payload -from Tribler.pyipv8.ipv8.messaging.serialization import default_serializer - - -def encode_values(values): - encoded_list = [value.encode('utf-8') for value in values] - return ''.join([pack('!H', len(encoded)) + encoded for encoded in encoded_list]) - - -def decode_values(values_str): - values = [] - index = 0 - while index < len(values_str): - length = unpack_from('!H', values_str[index:])[0] - index += calcsize('!H') - values.append(values_str[index:index + length].decode('utf-8')) - index += length - return values - - -def unpack_responses(serialized_responses, as_payload=False): - """ - Unpack a custom serialization of a list of SearchResponseItemPayload payloads. - - :param serialized_responses: the serialized data - :return: [[str, str, int, int, str, int, int, int, str]] or SearchResponseItemPayload - """ - data = serialized_responses - out = [] - while data: - unpacked, data = default_serializer.unpack_to_serializables([SearchResponseItemPayload], data) - if as_payload: - out.append(unpacked) - else: - out.append([unpacked.infohash, unpacked.name, unpacked.length, unpacked.num_files, unpacked.category_list, - unpacked.creation_date, unpacked.seeders, unpacked.leechers, unpacked.cid]) - return out class ContentSubscription(Payload): @@ -85,245 +46,3 @@ def to_pack_list(self): def from_unpack_list(cls, *args): (infohash, num_seeders, num_leechers, timestamp) = args return TorrentHealthPayload(infohash, num_seeders, num_leechers, timestamp) - - -class ChannelHealthPayload(Payload): - """ - Payload for a channel popularity message in the popularity community. - """ - - format_list = ['varlenI', 'I', 'I', 'I', 'Q'] - - def __init__(self, channel_id, num_votes, num_torrents, swarm_size_sum, timestamp): - super(ChannelHealthPayload, self).__init__() - self.channel_id = channel_id - self.num_votes = num_votes or 0 - self.num_torrents = num_torrents or 0 - self.swarm_size_sum = swarm_size_sum or 0 - self.timestamp = timestamp or 0 - - def to_pack_list(self): - data = [('varlenI', self.channel_id), - ('I', self.num_votes), - ('I', self.num_torrents), - ('I', self.swarm_size_sum), - ('Q', self.timestamp)] - - return data - - @classmethod - def from_unpack_list(cls, *args): - (channel_id, num_votes, num_torrents, swarm_size_sum, timestamp) = args - return ChannelHealthPayload(channel_id, num_votes, num_torrents, swarm_size_sum, timestamp) - - -class TorrentInfoRequestPayload(Payload): - """ - Payload for requesting torrent info for a given infohash. - """ - format_list = ['20s'] - - def __init__(self, infohash): - super(TorrentInfoRequestPayload, self).__init__() - self.infohash = infohash - - def to_pack_list(self): - data = [('20s', str(self.infohash))] - return data - - @classmethod - def from_unpack_list(cls, *args): - (infohash, ) = args - return TorrentInfoRequestPayload(infohash) - - -class TorrentInfoResponsePayload(Payload): - """ - Payload for torrent info response. - """ - format_list = ['20s', 'varlenH', 'Q', 'Q', 'I', 'varlenH'] - - def __init__(self, infohash, name, length, creation_date, num_files, comment): - super(TorrentInfoResponsePayload, self).__init__() - self.infohash = infohash - self.name = name or '' - self.length = length or 0 - self.creation_date = creation_date or 0 - self.num_files = num_files or 0 - self.comment = comment or '' - - def to_pack_list(self): - data = [('20s', self.infohash), - ('varlenH', self.name.encode('utf-8')), - ('Q', self.length), - ('Q', self.creation_date), - ('I', self.num_files), - ('varlenH', str(self.comment))] - return data - - @classmethod - def from_unpack_list(cls, *args): - (infohash, name, length, creation_date, num_files, comment) = args - return TorrentInfoResponsePayload(infohash, name.decode('utf-8'), length, creation_date, num_files, comment) - - -class SearchResponseItemPayload(Payload): - """ - Payload for search response items - """ - - format_list = ['20s', 'varlenH', 'Q', 'I', 'varlenH', 'Q', 'I', 'I', '20s'] - is_list_descriptor = True - - def __init__(self, infohash, name, length, num_files, category_list, creation_date, seeders, leechers, cid): - self.infohash = infohash - self.name = name - self.length = length or 0 - self.num_files = num_files or 0 - self.category_list = category_list or [] - self.creation_date = creation_date or 0 - self.seeders = seeders or 0 - self.leechers = leechers or 0 - self.cid = cid - - def to_pack_list(self): - data = [('20s', str(self.infohash)), - ('varlenH', self.name.encode('utf-8')), - ('Q', self.length), - ('I', self.num_files), - ('varlenH', encode_values(self.category_list)), - ('Q', self.creation_date), - ('I', self.seeders), - ('I', self.leechers), - ('20s', self.cid if self.cid else '')] - return data - - @classmethod - def from_unpack_list(cls, *args): - (infohash, name, length, num_files, category_list_str, creation_date, seeders, leechers, cid) = args - category_list = decode_values(category_list_str) - return SearchResponseItemPayload(infohash, name.decode('utf-8'), length, num_files, category_list, - creation_date, seeders, leechers, cid) - - -class ChannelItemPayload(Payload): - """ - Payload for search response channel items - """ - format_list = ['I', '20s', 'varlenH', 'varlenH', 'I', 'I', 'I', 'Q'] - is_list_descriptor = True - - def __init__(self, dbid, dispersy_cid, name, description, nr_torrents, nr_favorite, nr_spam, modified): - self.id = dbid - self.name = name - self.description = description or '' - self.cid = dispersy_cid - self.modified = modified or 0 - self.nr_torrents = nr_torrents or 0 - self.nr_favorite = nr_favorite or 0 - self.nr_spam = nr_spam or 0 - - def to_pack_list(self): - data = [('I', id), - ('20s', str(self.cid)), - ('varlenH', self.name), - ('varlenH', self.description.encode('utf-8')), - ('I', self.nr_torrents), - ('I', self.nr_favorite), - ('I', self.nr_spam), - ('Q', self.modified)] - return data - - @classmethod - def from_unpack_list(cls, dbid, dispersy_cid, name, description, nr_torrents, nr_favorite, nr_spam, modified): - return ChannelItemPayload(dbid, dispersy_cid, name.decode('utf-8'), description.decode('utf-8'), nr_torrents, - nr_favorite, nr_spam, modified) - - -class SearchResponsePayload(Payload): - """ - Payload for search response - """ - format_list = ['varlenI', 'I', 'varlenH'] - - def __init__(self, identifier, response_type, results): - self.identifier = identifier - self.response_type = response_type - self.results = results - - def to_pack_list(self): - data = [('varlenI', str(self.identifier)), - ('I', self.response_type), - ('varlenH', self.results)] - return data - - @classmethod - def from_unpack_list(cls, *args): - (identifier, response_type, results) = args - return SearchResponsePayload(identifier, response_type, results) - - -class Pagination(object): - - def __init__(self, page_number, page_size, max_results, more): - self.page_number = page_number - self.page_size = page_size - self.max_results = max_results - self.more = more - - def serialize(self): - return struct.pack('!HHH?', self.page_number, self.page_size, self.max_results, self.more) - - @classmethod - def deserialize(cls, pagination_str): - return Pagination(*struct.unpack('!HHH?', pagination_str)) - - -class ContentInfoRequest(Payload): - """ Payload for generic content request """ - - format_list = ['I', 'I', 'varlenH', 'I'] - - def __init__(self, identifier, content_type, query_list, limit): - self.identifier = identifier - self.content_type = content_type - self.query_list = query_list - self.limit = limit - - def to_pack_list(self): - data = [('I', self.identifier), - ('I', self.content_type), - ('varlenH', encode_values(self.query_list)), - ('I', self.limit)] - return data - - @classmethod - def from_unpack_list(cls, *args): - (identifier, content_type, query_list_str, limit) = args - query_list = decode_values(query_list_str) - return ContentInfoRequest(identifier, content_type, query_list, limit) - - -class ContentInfoResponse(Payload): - """ Payload for generic content response """ - - format_list = ['I', 'I', 'varlenH', 'varlenH'] - - def __init__(self, identifier, content_type, response, pagination): - self.identifier = identifier - self.content_type = content_type - self.response = response - self.pagination = pagination - - def to_pack_list(self): - data = [('I', self.identifier), - ('I', self.content_type), - ('varlenH', self.response), - ('varlenH', self.pagination.serialize())] - return data - - @classmethod - def from_unpack_list(cls, *args): - (identifier, content_type, response, pagination_str) = args - pagination = Pagination.deserialize(pagination_str) - return ContentInfoResponse(identifier, content_type, response, pagination) diff --git a/Tribler/community/popularity/pubsub.py b/Tribler/community/popularity/pubsub.py index 313b3a843f8..5c1d8e9fe4c 100644 --- a/Tribler/community/popularity/pubsub.py +++ b/Tribler/community/popularity/pubsub.py @@ -1,13 +1,14 @@ from __future__ import absolute_import import logging -from abc import abstractmethod +from binascii import unhexlify from copy import copy + from twisted.internet.defer import inlineCallbacks from twisted.internet.task import LoopingCall -from Tribler.community.popularity.constants import MSG_SUBSCRIPTION, ERROR_UNKNOWN_PEER, MAX_SUBSCRIBERS, \ - MSG_SUBSCRIBE, MAX_PUBLISHERS, PUBLISH_INTERVAL +from Tribler.community.popularity.constants import ERROR_UNKNOWN_PEER, MAX_PUBLISHERS, MAX_SUBSCRIBERS, MSG_SUBSCRIBE, \ + MSG_SUBSCRIPTION, PUBLISH_INTERVAL from Tribler.community.popularity.payload import ContentSubscription from Tribler.community.popularity.request import ContentRequest from Tribler.pyipv8.ipv8.community import Community @@ -25,9 +26,16 @@ class PubSubCommunity(Community): All the derived community should implement publish_next_content() method which is responsible for publishing the next available content to all the subscribers. """ + MASTER_PUBLIC_KEY = "3081a7301006072a8648ce3d020106052b8104002703819200040504278d20d6776ce7081ad57d99fe066bb2a93" \ + "ce7cc92405a534ef7175bab702be557d8c7d3b725ea0eb09c686e798f6c7ad85e8781a4c3b20e54c15ede38077c" \ + "8f5c801b71d13105f261da7ddcaa94ae14bd177bf1a05a66f595b9bb99117d11f73b4c8d3dcdcdc2b3f838b8ba3" \ + "5a9f600d2c543e8b3ba646083307b917bbbccfc53fc5ab6ded90b711d7eeda46f5f" + + master_peer = Peer(unhexlify(MASTER_PUBLIC_KEY)) def __init__(self, *args, **kwargs): super(PubSubCommunity, self).__init__(*args, **kwargs) + self.trustchain = kwargs.pop('trustchain_community', None) self.logger = logging.getLogger(self.__class__.__name__) self.request_cache = RequestCache() @@ -233,7 +241,6 @@ def pack_sized(self, payload_list, fit_size, start_index=0): current_index += 1 return serialized_results, current_index, current_index - start_index - @abstractmethod def publish_next_content(self): """ Method responsible for publishing content during periodic push """ pass diff --git a/Tribler/community/popularity/repository.py b/Tribler/community/popularity/repository.py index d71168a5aca..65bc2fcb544 100644 --- a/Tribler/community/popularity/repository.py +++ b/Tribler/community/popularity/repository.py @@ -4,9 +4,10 @@ import time from collections import deque -import six +from pony.orm import db_session, desc -from Tribler.community.popularity.payload import SearchResponseItemPayload, ChannelItemPayload +from Tribler.Core.Modules.MetadataStore.serialization import REGULAR_TORRENT +from Tribler.pyipv8.ipv8.database import database_blob try: long # pylint: disable=long-builtin @@ -19,7 +20,6 @@ DEFAULT_FRESHNESS_LIMIT = 60 TYPE_TORRENT_HEALTH = 1 -TYPE_CHANNEL_HEALTH = 2 class ContentRepository(object): @@ -30,161 +30,65 @@ class ContentRepository(object): It also maintains a content queue which stores the content for publishing in the next publishing cycle. """ - def __init__(self, torrent_db, channel_db): + def __init__(self, metadata_store): super(ContentRepository, self).__init__() self.logger = logging.getLogger(self.__class__.__name__) - self.torrent_db = torrent_db - self.channel_db = channel_db + self.metadata_store = metadata_store self.queue = deque(maxlen=MAX_CACHE) def cleanup(self): - self.torrent_db = None self.queue = None - def add_content(self, content_type, content): + def add_content_to_queue(self, content): if self.queue is not None: - self.queue.append((content_type, content)) + self.queue.append(content) - def count_content(self): + def queue_length(self): return len(self.queue) if self.queue else 0 def pop_content(self): - return self.queue.pop() if self.queue else (None, None) + return self.queue.pop() if self.queue else None + @db_session def get_top_torrents(self, limit=DEFAULT_TORRENT_LIMIT): - return self.torrent_db.getRecentlyCheckedTorrents(limit) + return list(self.metadata_store.TorrentMetadata.select( + lambda g: g.metadata_type == REGULAR_TORRENT).sort_by(desc("g.health.seeders")).limit(limit)) + @db_session def update_torrent_health(self, torrent_health_payload, peer_trust=0): - - def update_torrent(db_handler, health_payload): - db_handler.updateTorrent(infohash, notify=False, num_seeders=health_payload.num_seeders, - num_leechers=health_payload.num_leechers, - last_tracker_check=int(health_payload.timestamp), - status=u"good" if health_payload.num_seeders > 1 else u"unknown") - - if not self.torrent_db: - self.logger.error("Torrent DB is not available. Skipping torrent health update.") + """ + Update the health of a torrent in the database. + """ + if not self.metadata_store: + self.logger.error("Metadata store is not available. Skipping torrent health update.") return infohash = torrent_health_payload.infohash - if self.has_torrent(infohash): - db_torrent = self.get_torrent(infohash) - is_fresh = time.time() - db_torrent['last_tracker_check'] < DEFAULT_FRESHNESS_LIMIT - if is_fresh and peer_trust < 2: - self.logger.info("Database record is already fresh and the sending peer trust " - "score is too low so we just ignore the response.") - return - - # Update the torrent health anyway. A torrent info request should be sent separately to request additional info. - update_torrent(self.torrent_db, torrent_health_payload) - - def update_torrent_info(self, torrent_info_response): - infohash = torrent_info_response.infohash - if self.has_torrent(infohash): - db_torrent = self.get_torrent(infohash) - if db_torrent['name'] and db_torrent['name'] == torrent_info_response.name: - self.logger.info("Conflicting names for torrent. Ignoring the response") - return - - # Update local database - self.torrent_db.updateTorrent(infohash, notify=False, name=torrent_info_response.name, - length=torrent_info_response.length, - creation_date=torrent_info_response.creation_date, - num_files=torrent_info_response.num_files, - comment=torrent_info_response.comment) + if not self.has_torrent(infohash): + return + torrent = self.get_torrent(infohash) + is_fresh = time.time() - torrent.health.last_check < DEFAULT_FRESHNESS_LIMIT + if is_fresh and peer_trust < 2: + self.logger.info("Database record is already fresh and the sending peer trust " + "score is too low so we just ignore the response.") + else: + # Update the torrent health anyway. A torrent info request should be sent separately + # to request additional info. + torrent.health.seeders = torrent_health_payload.num_seeders + torrent.health.leechers = torrent_health_payload.num_leechers + torrent.health.last_check = int(torrent_health_payload.timestamp) + + @db_session def get_torrent(self, infohash): - keys = ('name', 'length', 'creation_date', 'num_files', 'num_seeders', 'num_leechers', 'comment', - 'last_tracker_check') - return self.torrent_db.getTorrent(infohash, keys=keys, include_mypref=False) - - def has_torrent(self, infohash): - return self.get_torrent(infohash) is not None - - def search_torrent(self, query): """ - Searches for best torrents for the given query and packs them into a list of SearchResponseItemPayload. - :param query: Search query - :return: List + Return a torrent with a specific infohash from the database. """ + results = list(self.metadata_store.TorrentMetadata.select( + lambda g: g.infohash == database_blob(infohash) and g.metadata_type == REGULAR_TORRENT).limit(1)) + if results: + return results[0] + return None - db_results = self.torrent_db.searchNames(query, local=True, - keys=['infohash', 'T.name', 'T.length', 'T.num_files', 'T.category', - 'T.creation_date', 'T.num_seeders', 'T.num_leechers']) - if not db_results: - return [] - - results = [] - for dbresult in db_results: - channel_details = dbresult[-10:] - - dbresult = list(dbresult[:8]) - dbresult[2] = long(dbresult[2]) # length - dbresult[3] = int(dbresult[3]) # num_files - dbresult[4] = [dbresult[4]] # category - dbresult[5] = long(dbresult[5]) # creation_date - dbresult[6] = int(dbresult[6] or 0) # num_seeders - dbresult[7] = int(dbresult[7] or 0) # num_leechers - - # cid - if channel_details[1]: - channel_details[1] = str(channel_details[1]) - dbresult.append(channel_details[1]) - - results.append(SearchResponseItemPayload(*tuple(dbresult))) - - return results - - def search_channels(self, query): - """ - Search best channels for the given query. - :param query: Search query - :return: List - """ - db_channels = self.channel_db.search_in_local_channels_db(query) - if not db_channels: - return [] - - results = [] - if db_channels: - for channel in db_channels: - channel_payload = channel[:8] - channel_payload[7] = channel[8] # modified - results.append(ChannelItemPayload(*channel_payload)) - return results - - def update_from_torrent_search_results(self, search_results): - """ - Updates the torrent database with the provided search results. It also checks for conflicting torrents, meaning - if torrent already exists in the database, we simply ignore the search result. - """ - for result in search_results: - (infohash, name, length, num_files, category_list, creation_date, seeders, leechers, cid) = result - name = u''.join([six.unichr(ord(c)) for c in name]) - torrent_item = SearchResponseItemPayload(infohash, name, length, num_files, category_list, - creation_date, seeders, leechers, cid) - if self.has_torrent(infohash): - db_torrent = self.get_torrent(infohash) - if db_torrent['name'] and db_torrent['name'] == torrent_item.name: - self.logger.info("Conflicting names for torrent. Ignoring the response") - continue - else: - self.logger.debug("Adding new torrent from search results to database") - self.torrent_db.addOrGetTorrentID(infohash) - self.torrent_db._db.commit_now() - - # Update local database - self.torrent_db.updateTorrent(infohash, notify=False, name=torrent_item.name, - length=torrent_item.length, - creation_date=torrent_item.creation_date, - num_files=torrent_item.num_files, - seeder=seeders, - leecher=leechers, - comment='') - - def update_from_channel_search_results(self, all_items): - """ - TODO: updates the channel database with the search results. - Waiting for all channel 2.0 - """ - pass + def has_torrent(self, infohash): + return self.get_torrent(infohash) is not None diff --git a/Tribler/community/search/__init__.py b/Tribler/community/search/__init__.py deleted file mode 100644 index 4a117311fe6..00000000000 --- a/Tribler/community/search/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -A Dispersy community used to implement decentralized search in Tribler. It allows peers to discover new .torrents. -""" diff --git a/Tribler/community/search/community.py b/Tribler/community/search/community.py deleted file mode 100644 index 5efe96f2061..00000000000 --- a/Tribler/community/search/community.py +++ /dev/null @@ -1,738 +0,0 @@ -""" -Peer to peer search functionality and discovering new torrents. - -Author(s): Niels Zeilemaker -""" -from binascii import hexlify -from random import shuffle -from time import time -from traceback import print_exc -from twisted.internet.task import LoopingCall - -from Tribler.Core.CacheDB.sqlitecachedb import bin2str -from Tribler.Core.TorrentDef import TorrentDef -from Tribler.community.channel.payload import TorrentPayload -from Tribler.community.channel.preview import PreviewChannelCommunity -from Tribler.community.search.conversion import SearchConversion -from Tribler.community.search.payload import (SearchRequestPayload, SearchResponsePayload, TorrentRequestPayload, - TorrentCollectRequestPayload, TorrentCollectResponsePayload, - TasteIntroPayload) -from Tribler.dispersy.authentication import MemberAuthentication -from Tribler.dispersy.bloomfilter import BloomFilter -from Tribler.dispersy.candidate import CANDIDATE_WALK_LIFETIME, WalkCandidate -from Tribler.dispersy.community import Community -from Tribler.dispersy.conversion import DefaultConversion -from Tribler.dispersy.database import IgnoreCommits -from Tribler.dispersy.destination import CandidateDestination, CommunityDestination -from Tribler.dispersy.distribution import DirectDistribution, FullSyncDistribution -from Tribler.dispersy.exception import CommunityNotFoundException -from Tribler.dispersy.message import Message -from Tribler.dispersy.requestcache import RandomNumberCache, IntroductionRequestCache -from Tribler.dispersy.resolution import PublicResolution - -DEBUG = False -SWIFT_INFOHASHES = 0 -CREATE_TORRENT_COLLECT_INTERVAL = 5 - - -class SearchCommunity(Community): - - """ - A single community that all Tribler members join and use to disseminate .torrent files. - """ - @classmethod - def get_master_members(cls, dispersy): -# generated: Mon Nov 24 10:37:11 2014 -# curve: NID_sect571r1 -# len: 571 bits ~ 144 bytes signature -# pub: 170 3081a7301006072a8648ce3d020106052b810400270381920004034a9031d07ed6d5d98b0a98cacd4bef2e19125ea7635927708babefa8e66deeb6cb4e78cc0efda39a581a679032a95ebc4a0fbdf913aa08af31f14753839b620cb5547c6e6cf42f03629b1b3dc199a3b1a262401c7ae615e87a1cf13109c7fb532f45c492ba927787257bf994e989a15fb16f20751649515fc58d87e0c861ca5b467a5c450bf57f145743d794057e75 -# pub-sha1 fb04df93369587ec8fd9b74559186fa356cffda8 -# -----BEGIN PUBLIC KEY----- -# MIGnMBAGByqGSM49AgEGBSuBBAAnA4GSAAQDSpAx0H7W1dmLCpjKzUvvLhkSXqdj -# WSdwi6vvqOZt7rbLTnjMDv2jmlgaZ5AyqV68Sg+9+ROqCK8x8UdTg5tiDLVUfG5s -# 9C8DYpsbPcGZo7GiYkAceuYV6Hoc8TEJx/tTL0XEkrqSd4cle/mU6YmhX7FvIHUW -# SVFfxY2H4MhhyltGelxFC/V/FFdD15QFfnU= -# -----END PUBLIC KEY----- - master_key = "3081a7301006072a8648ce3d020106052b810400270381920004034a9031d07ed6d5d98b0a98cacd4bef2e19125ea7635927708babefa8e66deeb6cb4e78cc0efda39a581a679032a95ebc4a0fbdf913aa08af31f14753839b620cb5547c6e6cf42f03629b1b3dc199a3b1a262401c7ae615e87a1cf13109c7fb532f45c492ba927787257bf994e989a15fb16f20751649515fc58d87e0c861ca5b467a5c450bf57f145743d794057e75".decode("HEX") - master = dispersy.get_member(public_key=master_key) - return [master] - - def __init__(self, *args, **kwargs): - super(SearchCommunity, self).__init__(*args, **kwargs) - self.tribler_session = None - self.integrate_with_tribler = None - self.log_incoming_searches = None - self.taste_buddies = [] - - self._channelcast_db = None - self._torrent_db = None - self._mypref_db = None - self._notifier = None - - self._rtorrent_handler = None - - self.taste_bloom_filter = None - self.taste_bloom_filter_key = None - - self.torrent_cache = None - - def initialize(self, tribler_session=None, log_incoming_searches=False): - self.tribler_session = tribler_session - self.integrate_with_tribler = tribler_session is not None - self.log_incoming_searches = log_incoming_searches - - super(SearchCommunity, self).initialize() - # To always connect to a peer uncomment/modify the following line - # self.taste_buddies.append([1, time(), Candidate(("127.0.0.1", 1234), False)) - - if self.integrate_with_tribler: - from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_TORRENTS, NTFY_MYPREFERENCES - - # tribler channelcast database - self._channelcast_db = tribler_session.open_dbhandler(NTFY_CHANNELCAST) - self._torrent_db = tribler_session.open_dbhandler(NTFY_TORRENTS) - self._mypref_db = tribler_session.open_dbhandler(NTFY_MYPREFERENCES) - self._notifier = tribler_session.notifier - - # torrent collecting - self._rtorrent_handler = tribler_session.lm.rtorrent_handler - else: - self._channelcast_db = ChannelCastDBStub(self._dispersy) - self._torrent_db = None - self._mypref_db = None - self._notifier = None - - self.register_task(u"create torrent collect requests", - LoopingCall(self.create_torrent_collect_requests)).start(CREATE_TORRENT_COLLECT_INTERVAL, - now=True) - - def initiate_meta_messages(self): - return super(SearchCommunity, self).initiate_meta_messages() + [ - Message(self, u"search-request", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - SearchRequestPayload(), - self._generic_timeline_check, - self.on_search), - Message(self, u"search-response", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - SearchResponsePayload(), - self._generic_timeline_check, - self.on_search_response), - Message(self, u"torrent-request", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - TorrentRequestPayload(), - self._generic_timeline_check, - self.on_torrent_request), - Message(self, u"torrent-collect-request", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - TorrentCollectRequestPayload(), - self._generic_timeline_check, - self.on_torrent_collect_request), - Message(self, u"torrent-collect-response", - MemberAuthentication(), - PublicResolution(), - DirectDistribution(), - CandidateDestination(), - TorrentCollectResponsePayload(), - self._generic_timeline_check, - self.on_torrent_collect_response), - Message(self, u"torrent", - MemberAuthentication(), - PublicResolution(), - FullSyncDistribution(enable_sequence_number=False, synchronization_direction=u"ASC", priority=128), - CommunityDestination(node_count=0), - TorrentPayload(), - self._generic_timeline_check, - self.on_torrent), - ] - - def _initialize_meta_messages(self): - super(SearchCommunity, self)._initialize_meta_messages() - - ori = self._meta_messages[u"dispersy-introduction-request"] - new = Message(self, ori.name, ori.authentication, ori.resolution, ori.distribution, ori.destination, TasteIntroPayload(), ori.check_callback, ori.handle_callback) - self._meta_messages[u"dispersy-introduction-request"] = new - - def initiate_conversions(self): - return [DefaultConversion(self), SearchConversion(self)] - - @property - def dispersy_enable_fast_candidate_walker(self): - return self.integrate_with_tribler - - @property - def dispersy_auto_download_master_member(self): - # there is no dispersy-identity for the master member, so don't try to download - return False - - @property - def dispersy_enable_bloom_filter_sync(self): - # 1. disable bloom filter sync in walker - # 2. accept messages in any global time range - return False - - def add_taste_buddies(self, new_taste_buddies): - for new_tb_tuple in new_taste_buddies[:]: - for tb_tuple in self.taste_buddies: - if tb_tuple[-1].sock_addr == new_tb_tuple[-1].sock_addr: - - # update similarity - tb_tuple[0] = max(new_tb_tuple[0], tb_tuple[0]) - new_taste_buddies.remove(new_tb_tuple) - break - else: - self.taste_buddies.append(new_tb_tuple) - - self.taste_buddies.sort(reverse=True) - self.taste_buddies = self.taste_buddies[:10] - - # Send ping to all new candidates - if len(new_taste_buddies) > 0: - self.create_torrent_collect_requests([tb_tuple[-1] for tb_tuple in new_taste_buddies]) - - def get_nr_connections(self): - return len(self.get_connections()) - - def get_connections(self): - # add 10 taste buddies and 20 - len(taste_buddies) to candidates - candidates = set(candidate for _, _, candidate in self.taste_buddies) - sock_addresses = set(candidate.sock_addr for _, _, candidate in self.taste_buddies) - - for candidate in self.dispersy_yield_verified_candidates(): - if candidate.sock_addr not in sock_addresses: - candidates.add(candidate) - sock_addresses.add(candidate.sock_addr) - - if len(candidates) == 20: - break - return candidates - - def __calc_similarity(self, candidate, myPrefs, hisPrefs, overlap): - if myPrefs > 0 and hisPrefs > 0: - my_root = 1.0 / (myPrefs ** .5) - sim = overlap * (my_root * (1.0 / (hisPrefs ** .5))) - return [sim, time(), candidate] - - return [0, time(), candidate] - - def create_introduction_request(self, destination, allow_sync, is_fast_walker=False): - assert isinstance(destination, WalkCandidate), [type(destination), destination] - - if DEBUG: - self._logger.debug(u"SearchCommunity: sending introduction request to %s", destination) - - advice = True - if not is_fast_walker: - my_preferences = sorted(self._mypref_db.getMyPrefListInfohash(limit=500)) - num_preferences = len(my_preferences) - - my_pref_key = u",".join(map(bin2str, my_preferences)) - if my_pref_key != self.taste_bloom_filter_key: - if num_preferences > 0: - # no prefix changing, we want false positives (make sure it is a single char) - self.taste_bloom_filter = BloomFilter(0.005, len(my_preferences), prefix=' ') - self.taste_bloom_filter.add_keys(my_preferences) - else: - self.taste_bloom_filter = None - - self.taste_bloom_filter_key = my_pref_key - - taste_bloom_filter = self.taste_bloom_filter - - cache = self._request_cache.add(IntroductionRequestCache(self, destination)) - payload = (destination.sock_addr, self._dispersy._lan_address, self._dispersy._wan_address, advice, self._dispersy._connection_type, None, cache.number, num_preferences, taste_bloom_filter) - else: - cache = self._request_cache.add(IntroductionRequestCache(self, destination)) - payload = (destination.sock_addr, self._dispersy._lan_address, self._dispersy._wan_address, advice, self._dispersy._connection_type, None, cache.number, 0, None) - - destination.walk(time()) - self.add_candidate(destination) - - meta_request = self.get_meta_message(u"dispersy-introduction-request") - request = meta_request.impl(authentication=(self.my_member,), - distribution=(self.global_time,), - destination=(destination,), - payload=payload) - - self._logger.debug(u"%s %s sending introduction request to %s", self.cid.encode("HEX"), type(self), destination) - - self._dispersy._forward([request]) - return request - - def on_introduction_request(self, messages): - super(SearchCommunity, self).on_introduction_request(messages) - - if any(message.payload.taste_bloom_filter for message in messages): - my_preferences = self._mypref_db.getMyPrefListInfohash(limit=500) - else: - my_preferences = [] - - new_taste_buddies = [] - for message in messages: - taste_bloom_filter = message.payload.taste_bloom_filter - num_preferences = message.payload.num_preferences - if taste_bloom_filter: - overlap = sum(infohash in taste_bloom_filter for infohash in my_preferences) - else: - overlap = 0 - - new_taste_buddies.append(self.__calc_similarity(message.candidate, len(my_preferences), num_preferences, overlap)) - - if len(new_taste_buddies) > 0: - self.add_taste_buddies(new_taste_buddies) - - if self._notifier: - from Tribler.Core.simpledefs import NTFY_ACT_MEET, NTFY_ACTIVITIES, NTFY_INSERT - for message in messages: - self._notifier.notify(NTFY_ACTIVITIES, NTFY_INSERT, NTFY_ACT_MEET, - "%s:%d" % message.candidate.sock_addr) - - class SearchRequest(RandomNumberCache): - - def __init__(self, request_cache, keywords): - super(SearchCommunity.SearchRequest, self).__init__(request_cache, u"search") - self.keywords = keywords - - @property - def timeout_delay(self): - return 30.0 - - def on_timeout(self): - pass - - def create_search(self, keywords): - candidates = self.get_connections() - if len(candidates) > 0: - if DEBUG: - self._logger.debug(u"sending search request for %s to %s", keywords, map(str, candidates)) - - # register callback/fetch identifier - cache = self._request_cache.add(SearchCommunity.SearchRequest(self._request_cache, keywords)) - - # create search request message - meta = self.get_meta_message(u"search-request") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), payload=(cache.number, keywords)) - - self._dispersy._send(candidates, [message]) - - return len(candidates) - - def on_search(self, messages): - for message in messages: - keywords = message.payload.keywords - - if DEBUG: - self._logger.debug(u"got search request for %s", keywords) - - if self.log_incoming_searches: - self.log_incoming_searches(message.candidate.sock_addr, keywords) - - results = [] - dbresults = self._torrent_db.searchNames(keywords, local=False, keys=['infohash', 'T.name', 'T.length', 'T.num_files', 'T.category', 'T.creation_date', 'T.num_seeders', 'T.num_leechers']) - if len(dbresults) > 0: - for dbresult in dbresults: - channel_details = dbresult[-10:] - - dbresult = list(dbresult[:8]) - dbresult[2] = long(dbresult[2]) # length - dbresult[3] = int(dbresult[3]) # num_files - dbresult[4] = [dbresult[4]] # category - dbresult[5] = long(dbresult[5]) # creation_date - dbresult[6] = int(dbresult[6] or 0) # num_seeders - dbresult[7] = int(dbresult[7] or 0) # num_leechers - - # cid - if channel_details[1]: - channel_details[1] = str(channel_details[1]) - dbresult.append(channel_details[1]) - - results.append(tuple(dbresult)) - elif DEBUG: - self._logger.debug(u"no results") - - self._create_search_response(message.payload.identifier, results, message.candidate) - - def _create_search_response(self, identifier, results, candidate): - # create search-response message - meta = self.get_meta_message(u"search-response") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), payload=(identifier, results)) - self._dispersy._forward([message]) - - if DEBUG: - self._logger.debug(u"returning %s results to %s", len(results), candidate) - - def on_search_response(self, messages): - # _get_channel_community could cause multiple commits, using this with clause this is reduced to only one. - with self._dispersy.database: - for message in messages: - # fetch callback using identifier - search_request = self._request_cache.get(u"search", message.payload.identifier) - if search_request: - if DEBUG: - self._logger.debug(u"SearchCommunity: got search response for %s %s %s", - search_request.keywords, len(message.payload.results), message.candidate) - - if len(message.payload.results) > 0: - self._torrent_db.on_search_response(message.payload.results) - - # emit signal of search results - if self.tribler_session is not None: - from Tribler.Core.simpledefs import SIGNAL_SEARCH_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS - search_results = {'keywords': search_request.keywords, - 'results': message.payload.results, - 'candidate': message.candidate} - self._notifier.notify(SIGNAL_SEARCH_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, None, - search_results) - - # see if we need to join some channels - channels = set([result[8] for result in message.payload.results if result[8]]) - if channels: - channels = self._get_unknown_channels(channels) - - if DEBUG: - self._logger.debug(u"SearchCommunity: joining %d preview communities", len(channels)) - - for cid in channels: - community = self._get_channel_community(cid) - community.disp_create_missing_channel(message.candidate, includeSnapshot=False) - else: - if DEBUG: - self._logger.debug(u"SearchCommunity: got search response identifier not found %s", - message.payload.identifier) - - # ensure that no commits occur - raise IgnoreCommits() - - def create_torrent_request(self, infohash, candidate): - torrentdict = {} - torrentdict[self._master_member.mid] = set([infohash, ]) - - # create torrent-request message - meta = self.get_meta_message(u"torrent-request") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), payload=(torrentdict,)) - self._dispersy._forward([message]) - - if DEBUG: - nr_requests = sum([len(cid_torrents) for cid_torrents in torrentdict.values()]) - self._logger.debug(u"requesting %s TorrentMessages from %s", nr_requests, candidate) - - def on_torrent_request(self, messages): - for message in messages: - requested_packets = [] - for cid, torrents in message.payload.torrents.iteritems(): - requested_packets.extend(self._get_packets_from_infohashes(cid, torrents)) - - if requested_packets: - self._dispersy._send_packets([message.candidate], requested_packets, - self, u"-caused by on-torrent-request-") - - if DEBUG: - self._logger.debug(u"got request for %s torrents from %s", len(requested_packets), message.candidate) - - class PingRequestCache(RandomNumberCache): - - def __init__(self, community, candidate): - super(SearchCommunity.PingRequestCache, self).__init__(community._request_cache, u"ping") - - self.community = community - self.candidate = candidate - - @property - def timeout_delay(self): - return 10.5 - - def on_timeout(self): - refresh_if = time() - CANDIDATE_WALK_LIFETIME - remove = None - for taste_buddy in self.community.taste_buddies: - if taste_buddy[2] == self.candidate: - if taste_buddy[1] < refresh_if: - remove = taste_buddy - break - - if remove: - self.community.taste_buddies.remove(remove) - - def create_torrent_collect_requests(self, candidates=None): - if candidates is None: - refresh_if = time() - CANDIDATE_WALK_LIFETIME - # determine to which peers we need to send a ping - candidates = [candidate for _, prev, candidate in self.taste_buddies if prev < refresh_if] - - if len(candidates) > 0: - self._create_pingpong(u"torrent-collect-request", candidates) - - def on_torrent_collect_request(self, messages): - candidates = [message.candidate for message in messages] - identifiers = [message.payload.identifier for message in messages] - - self._create_pingpong(u"torrent-collect-response", candidates, identifiers) - self._process_collect_request_response(messages) - - def on_torrent_collect_response(self, messages): - self._process_collect_request_response(messages) - - def _process_collect_request_response(self, messages): - to_insert_list = [] - to_collect_dict = {} - to_popularity_dict = {} - for message in messages: - # check if the identifier is still in the request_cache because it could be timed out - if not self.request_cache.has(u"ping", message.payload.identifier): - self._logger.warn(u"message from %s cannot be found in the request cache, skipping it", - message.candidate) - continue - self.request_cache.pop(u"ping", message.payload.identifier) - - if message.payload.hashtype == SWIFT_INFOHASHES: - for infohash, seeders, leechers, ago in message.payload.torrents: - if not infohash: - continue - elif infohash not in to_insert_list: - to_insert_list.append(infohash) - to_popularity_dict[infohash] = [seeders, leechers, time() - (ago * 60)] - to_collect_dict.setdefault(infohash, []).append(message.candidate) - - if len(to_insert_list) > 0: - while to_insert_list: - self._torrent_db.on_torrent_collect_response(to_insert_list[:50]) - to_insert_list = to_insert_list[50:] - - infohashes_to_collect = [infohash for infohash in to_collect_dict - if infohash and not self.tribler_session.has_collected_torrent(infohash)] - if infohashes_to_collect: - for infohash in infohashes_to_collect[:5]: - for candidate in to_collect_dict[infohash]: - self._logger.debug(u"requesting .torrent after receiving ping/pong %s %s", - candidate, hexlify(infohash)) - - # low_prio changes, hence we need to import it here - from Tribler.Core.RemoteTorrentHandler import LOW_PRIO_COLLECTING - self._rtorrent_handler.download_torrent(candidate, infohash, priority=LOW_PRIO_COLLECTING, - timeout=CANDIDATE_WALK_LIFETIME) - - sock_addrs = [message.candidate.sock_addr for message in messages] - for taste_buddy in self.taste_buddies: - if taste_buddy[2].sock_addr in sock_addrs: - taste_buddy[1] = time() - - def _create_pingpong(self, meta_name, candidates, identifiers=None): - max_len = self.dispersy_sync_bloom_filter_bits / 8 - torrents = self.__get_torrents(int(max_len / 44)) - for index, candidate in enumerate(candidates): - if identifiers: - identifier = identifiers[index] - else: - cache = self._request_cache.add(SearchCommunity.PingRequestCache(self, candidate)) - identifier = cache.number - - # create torrent-collect-request/response message - meta = self.get_meta_message(meta_name) - message = meta.impl(authentication=(self._my_member,), - distribution=(self.global_time,), destination=(candidate,), - payload=(identifier, SWIFT_INFOHASHES, torrents)) - - self._dispersy._forward([message]) - self._logger.debug(u"send %s to %s", meta_name, candidate) - - def __get_torrents(self, limit): - cache_timeout = CANDIDATE_WALK_LIFETIME - if self.torrent_cache and self.torrent_cache[0] > (time() - cache_timeout): - return self.torrent_cache[1] - - # we want roughly 1/3 random, 2/3 recent - limit_recent = int(limit * 0.66) - limit_random = limit - limit_recent - - torrents = self._torrent_db.getRecentlyCollectedTorrents(limit=limit_recent) or [] - if len(torrents) == limit_recent: - # index 4 is insert_time - least_recent = torrents[-1][4] - random_torrents = self._torrent_db.getRandomlyCollectedTorrents(least_recent, limit=limit_random) or [] - else: - random_torrents = [] - - torrents = [[tor[0], tor[1], tor[2], tor[3]] for tor in torrents] - random_torrents = [[tor[0], tor[1], tor[2], tor[3]] for tor in random_torrents] - - # combine random and recent + shuffle to obscure categories - torrents = torrents + random_torrents - shuffle(torrents) - - # fix leechers, seeders to max 2**16 (shift values +2 to accomodate -2 and -1 values) - max_value = (2 ** 16) - 1 - for torrent in torrents: - # index 1 and 2 are num_seeders and num_leechers respectively - torrent[1] = min(max_value, (torrent[1] or -1) + 2) - torrent[2] = min(max_value, (torrent[2] or -1) + 2) - - # index 3 is last_tracker_check, convert to minutes - torrent[3] /= 60 - if torrent[3] > max_value or torrent[3] < 0: - torrent[3] = max_value - - self.torrent_cache = (time(), torrents) - return torrents - - def create_torrent(self, infohash, store=True, update=True, forward=True): - torrent_data = self.tribler_session.get_collected_torrent(infohash) - if torrent_data is not None: - try: - torrentdef = TorrentDef.load_from_memory(torrent_data) - files = torrentdef.get_files_with_length() - - meta = self.get_meta_message(u"torrent") - message = meta.impl(authentication=(self._my_member,), - distribution=(self.claim_global_time(),), - payload=(torrentdef.get_infohash(), long(time()), torrentdef.get_name_as_unicode(), - tuple(files), torrentdef.get_trackers_as_single_tuple())) - - self._dispersy.store_update_forward([message], store, update, forward) - self._torrent_db.updateTorrent(torrentdef.get_infohash(), notify=False, dispersy_id=message.packet_id) - - return message - except ValueError: - pass - except: - print_exc() - return False - - def on_torrent(self, messages): - for message in messages: - self._torrent_db.addExternalTorrentNoDef(message.payload.infohash, message.payload.name, message.payload.files, message.payload.trackers, message.payload.timestamp, {'dispersy_id': message.packet_id}) - - def _get_channel_id(self, cid): - assert isinstance(cid, str) - assert len(cid) == 20 - - return self._channelcast_db._db.fetchone(u"SELECT id FROM Channels WHERE dispersy_cid = ?", (buffer(cid),)) - - def _get_unknown_channels(self, cids): - assert all(isinstance(cid, str) for cid in cids) - assert all(len(cid) == 20 for cid in cids) - - parameters = u",".join(["?"] * len(cids)) - known_cids = self._channelcast_db._db.fetchall(u"SELECT dispersy_cid FROM Channels WHERE dispersy_cid in (" + parameters + u")", map(buffer, cids)) - known_cids = map(str, known_cids) - return [cid for cid in cids if cid not in known_cids] - - def _get_channel_community(self, cid): - assert isinstance(cid, str) - assert len(cid) == 20 - - try: - return self._dispersy.get_community(cid, True) - except CommunityNotFoundException: - self._logger.debug(u"join preview community %s", cid.encode("HEX")) - return PreviewChannelCommunity.init_community(self._dispersy, self._dispersy.get_member(mid=cid), - self._my_member, tribler_session=self.tribler_session) - - def _get_packets_from_infohashes(self, cid, infohashes): - packets = [] - - def add_packet(dispersy_id): - if dispersy_id and dispersy_id > 0: - try: - packet = self._get_packet_from_dispersy_id(dispersy_id, "torrent") - if packet: - packets.append(packet) - except RuntimeError: - pass - - if cid == self._master_member.mid: - channel_id = None - else: - channel_id = self._get_channel_id(cid) - - for infohash in infohashes: - dispersy_id = None - - # 1. try to find the torrentmessage for this cid, infohash combination - if channel_id: - dispersy_id = self._channelcast_db.getTorrentFromChannelId(channel_id, infohash, ['ChannelTorrents.dispersy_id']) - else: - torrent = self._torrent_db.getTorrent(infohash, ['dispersy_id'], include_mypref=False) - if torrent: - dispersy_id = torrent['dispersy_id'] - - # 2. if still not found, create a new torrentmessage and return this one - if not dispersy_id: - message = self.create_torrent(infohash, store=True, update=False, forward=False) - if message: - packets.append(message.packet) - add_packet(dispersy_id) - return packets - - def _get_packet_from_dispersy_id(self, dispersy_id, messagename): - # 1. get the packet - try: - packet, _ = self._dispersy.database.execute(u"SELECT sync.packet, sync.id FROM community JOIN sync ON sync.community = community.id WHERE sync.id = ?", (dispersy_id,)).next() - except StopIteration: - raise RuntimeError(u"Unknown dispersy_id") - - return str(packet) - - -class ChannelCastDBStub(object): - - def __init__(self, dispersy): - self._dispersy = dispersy - - self.cachedTorrents = None - - def convert_to_messages(self, results): - messages = self._dispersy.convert_packets_to_messages(str(packet) for packet, _ in results) - for packet_id, message in zip((packet_id for _, packet_id in results), messages): - if message: - message.packet_id = packet_id - yield message.community.cid, message - - def newTorrent(self, message): - self._cachedTorrents[message.payload.infohash] = message - - def hasTorrents(self, channel_id, infohashes): - returnAr = [] - for infohash in infohashes: - if infohash in self._cachedTorrents: - returnAr.append(True) - else: - returnAr.append(False) - return returnAr - - def getTorrentFromChannelId(self, channel_id, infohash, keys): - if infohash in self._cachedTorrents: - return self._cachedTorrents[infohash].packet_id - - def on_dynamic_settings(self, channel_id): - pass - - @property - def _cachedTorrents(self): - if self.cachedTorrents is None: - self.cachedTorrents = {} - self._cacheTorrents() - - return self.cachedTorrents - - def _cacheTorrents(self): - sql = u"SELECT sync.packet, sync.id FROM sync JOIN meta_message ON sync.meta_message = meta_message.id JOIN community ON community.id = sync.community WHERE meta_message.name = 'torrent'" - results = list(self._dispersy.database.execute(sql)) - messages = self.convert_to_messages(results) - - for _, message in messages: - self._cachedTorrents[message.payload.infohash] = message diff --git a/Tribler/community/search/conversion.py b/Tribler/community/search/conversion.py deleted file mode 100644 index 21b2f1c0a71..00000000000 --- a/Tribler/community/search/conversion.py +++ /dev/null @@ -1,332 +0,0 @@ -""" -Data conversions for the search community. - -Author(s): Niels Zeilemaker -""" -import zlib -from math import ceil -from random import choice, sample -from struct import pack, unpack_from - -from Tribler.dispersy.bloomfilter import BloomFilter -from Tribler.dispersy.conversion import BinaryConversion -from Tribler.dispersy.message import DropPacket -from Tribler.pyipv8.ipv8.messaging.deprecated.encoding import encode, decode - - -class SearchConversion(BinaryConversion): - - def __init__(self, community): - super(SearchConversion, self).__init__(community, "\x02") - self.define_meta_message(chr(1), community.get_meta_message(u"search-request"), self._encode_search_request, self._decode_search_request) - self.define_meta_message(chr(2), community.get_meta_message(u"search-response"), self._encode_search_response, self._decode_search_response) - self.define_meta_message(chr(3), community.get_meta_message(u"torrent-request"), self._encode_torrent_request, self._decode_torrent_request) - self.define_meta_message(chr(4), community.get_meta_message(u"torrent-collect-request"), self._encode_torrent_collect_request, self._decode_torrent_collect_request) - self.define_meta_message(chr(5), community.get_meta_message(u"torrent-collect-response"), self._encode_torrent_collect_response, self._decode_torrent_collect_response) - self.define_meta_message(chr(6), community.get_meta_message(u"torrent"), self._encode_torrent, self._decode_torrent) - - def _encode_introduction_request(self, message): - data = BinaryConversion._encode_introduction_request(self, message) - - if message.payload.taste_bloom_filter: - data.extend((pack('!IBH', message.payload.num_preferences, message.payload.taste_bloom_filter.functions, message.payload.taste_bloom_filter.size), message.payload.taste_bloom_filter.prefix, message.payload.taste_bloom_filter.bytes)) - return data - - def _decode_introduction_request(self, placeholder, offset, data): - offset, payload = BinaryConversion._decode_introduction_request(self, placeholder, offset, data) - - # if there's still bytes in this request, treat them as taste_bloom_filter - has_stuff = len(data) > offset - if has_stuff: - if len(data) < offset + 8: - raise DropPacket("Insufficient packet size") - - num_preferences, functions, size = unpack_from('!IBH', data, offset) - offset += 7 - - prefix = data[offset] - offset += 1 - - if not 0 < num_preferences: - raise DropPacket("Invalid num_preferences value") - if not 0 < functions: - raise DropPacket("Invalid functions value") - if not 0 < size: - raise DropPacket("Invalid size value") - if not size % 8 == 0: - raise DropPacket("Invalid size value, must be a multiple of eight") - - length = int(ceil(size / 8)) - if not length == len(data) - offset: - raise DropPacket("Invalid number of bytes available (irq) %d, %d, %d" % (length, len(data) - offset, size)) - - taste_bloom_filter = BloomFilter(data[offset:offset + length], functions, prefix=prefix) - offset += length - - payload.set_num_preferences(num_preferences) - payload.set_taste_bloom_filter(taste_bloom_filter) - - return offset, payload - - def _encode_search_request(self, message): - packet = pack('!H', message.payload.identifier), message.payload.keywords - if message.payload.bloom_filter: - packet = packet + (message.payload.bloom_filter.functions, message.payload.bloom_filter.prefix, message.payload.bloom_filter.bytes) - packet = encode(packet) - return packet, - - def _decode_search_request(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decodr 21, 2012 e the search-payload") - - if len(payload) < 2: - raise DropPacket("Invalid payload length") - - identifier, keywords = payload[:2] - - if len(identifier) != 2: - raise DropPacket("Unable to decode the search-payload, got %d bytes expected 2" % (len(identifier))) - identifier, = unpack_from('!H', identifier) - - if not isinstance(keywords, list): - raise DropPacket("Invalid 'keywords' type") - for keyword in keywords: - if not isinstance(keyword, unicode): - raise DropPacket("Invalid 'keyword' type") - - if len(payload) > 5: - functions, prefix, bytes_ = payload[2:6] - - if not isinstance(functions, int): - raise DropPacket("Invalid functions type") - if not 0 < functions: - raise DropPacket("Invalid functions value") - - size = len(bytes_) - if not 0 < size: - raise DropPacket("Invalid size of bloomfilter") - if not size % 8 == 0: - raise DropPacket("Invalid size of bloomfilter, must be a multiple of eight") - - if not isinstance(prefix, str): - raise DropPacket("Invalid prefix type") - if not 0 <= len(prefix) < 256: - raise DropPacket("Invalid prefix length") - - bloom_filter = BloomFilter(bytes_, functions, prefix=prefix) - else: - bloom_filter = None - - return offset, placeholder.meta.payload.implement(identifier, keywords, bloom_filter) - - def _encode_search_response(self, message): - packet = pack('!H', message.payload.identifier), message.payload.results - return encode(packet), - - def _decode_search_response(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except (ValueError, KeyError): - raise DropPacket("Unable to decode the search-reponse-payload") - - if len(payload) < 2: - raise DropPacket("Invalid payload length") - - identifier, results = payload[:2] - - if len(identifier) != 2: - raise DropPacket("Unable to decode the search-response-payload, got %d bytes expected 2" % (len(identifier))) - identifier, = unpack_from('!H', identifier) - - if not isinstance(results, list): - raise DropPacket("Invalid 'results' type") - - for result in results: - if not isinstance(result, tuple): - raise DropPacket("Invalid result type") - - if len(result) < 9: - raise DropPacket("Invalid result length") - - infohash, swarmname, length, nrfiles, category_list, creation_date, seeders, leechers, cid = result[:9] - - if not isinstance(infohash, str): - raise DropPacket("Invalid infohash type") - if len(infohash) != 20: - raise DropPacket("Invalid infohash length") - - if not isinstance(swarmname, unicode): - raise DropPacket("Invalid swarmname type") - - if not isinstance(length, long): - raise DropPacket("Invalid length type '%s'" % type(length)) - - if not isinstance(nrfiles, int): - raise DropPacket("Invalid nrfiles type") - - if not isinstance(category_list, list) or not all(isinstance(key, unicode) for key in category_list): - raise DropPacket("Invalid category_list type") - - if not isinstance(creation_date, long): - raise DropPacket("Invalid creation_date type") - - if not isinstance(seeders, int): - raise DropPacket("Invalid seeders type '%s'" % type(seeders)) - - if not isinstance(leechers, int): - raise DropPacket("Invalid leechers type '%s'" % type(leechers)) - - if cid: - if not isinstance(cid, str): - raise DropPacket("Invalid cid type") - - if len(cid) != 20: - raise DropPacket("Invalid cid length") - - return offset, placeholder.meta.payload.implement(identifier, results) - - def _encode_torrent_request(self, message): - max_len = self._community.dispersy_sync_bloom_filter_bits / 8 - - def create_msg(): - return encode(message.payload.torrents) - - packet = create_msg() - while len(packet) > max_len: - community = choice(message.payload.torrents.keys()) - nrTorrents = len(message.payload.torrents[community]) - if nrTorrents == 1: - del message.payload.torrents[community] - else: - message.payload.torrents[community] = set(sample(message.payload.torrents[community], nrTorrents - 1)) - - packet = create_msg() - return packet, - - def _decode_torrent_request(self, placeholder, offset, data): - try: - offset, payload = decode(data, offset) - except ValueError: - raise DropPacket("Unable to decode the torrent-request") - - if not isinstance(payload, dict): - raise DropPacket("Invalid payload type") - - for cid, infohashes in payload.iteritems(): - if not (isinstance(cid, str) and len(cid) == 20): - raise DropPacket("Invalid 'cid' type or value") - - for infohash in infohashes: - if not (isinstance(infohash, str) and len(infohash) == 20): - raise DropPacket("Invalid 'infohash' type or value") - return offset, placeholder.meta.payload.implement(payload) - - def _encode_torrent_collect_request(self, message): - for torrent in message.payload.torrents: - if torrent[1] > 2 ** 16 or torrent[1] < 0: - self._logger.info("seeder value is incorrect %s", torrent[1]) - if torrent[2] > 2 ** 16 or torrent[2] < 0: - self._logger.info("leecher value is incorrect %s", torrent[2]) - if torrent[3] > 2 ** 16 or torrent[3] < 0: - self._logger.info("since value is incorrect %s", torrent[3]) - - hashpack = '20sHHH' * len(message.payload.torrents) - torrents = [item for sublist in message.payload.torrents for item in sublist] - return pack('!HH' + hashpack, message.payload.identifier, message.payload.hashtype, *torrents), - - def _decode_torrent_collect_request(self, placeholder, offset, data): - if len(data) < offset + 4: - raise DropPacket("Insufficient packet size") - - identifier, hashtype = unpack_from('!HH', data, offset) - offset += 4 - - length = len(data) - offset - if length % 26 != 0: - raise DropPacket("Invalid number of bytes available (tcr)") - - if length: - hashpack = '20sHHH' * (length / 26) - hashes = unpack_from('!' + hashpack, data, offset) - offset += length - - torrents = [] - for i in range(0, len(hashes), 4): - torrents.append([hashes[i], hashes[i + 1], hashes[i + 2], hashes[i + 3]]) - else: - torrents = [] - return offset, placeholder.meta.payload.implement(identifier, hashtype, torrents) - - def _encode_torrent_collect_response(self, message): - return self._encode_torrent_collect_request(message) - - def _decode_torrent_collect_response(self, placeholder, offset, data): - return self._decode_torrent_collect_request(placeholder, offset, data) - - def _encode_torrent(self, message): - max_len = self._community.dispersy_sync_bloom_filter_bits / 8 - - files = message.payload.files - trackers = message.payload.trackers - - def create_msg(): - normal_msg = pack('!20sQ', message.payload.infohash, message.payload.timestamp), message.payload.name, tuple(files), tuple(trackers) - normal_msg = encode(normal_msg) - return zlib.compress(normal_msg) - - compressed_msg = create_msg() - while len(compressed_msg) > max_len: - if len(trackers) > 10: - # only use first 10 trackers, .torrents in the wild have been seen to have 1000+ trackers... - trackers = trackers[:10] - else: - # reduce files by the amount we are currently to big - reduce_by = max_len / (len(compressed_msg) * 1.0) - nr_files_to_include = int(len(files) * reduce_by) - files = sample(files, nr_files_to_include) - - compressed_msg = create_msg() - return compressed_msg, - - def _decode_torrent(self, placeholder, offset, data): - uncompressed_data = zlib.decompress(data[offset:]) - offset = len(data) - - try: - _, values = decode(uncompressed_data) - except ValueError: - raise DropPacket("Unable to decode the torrent-payload") - - infohash_time, name, files, trackers = values - if len(infohash_time) != 28: - raise DropPacket("Unable to decode the torrent-payload, got %d bytes expected 28" % (len(infohash_time))) - infohash, timestamp = unpack_from('!20sQ', infohash_time) - - if not isinstance(name, unicode): - raise DropPacket("Invalid 'name' type") - - if not isinstance(files, tuple): - raise DropPacket("Invalid 'files' type") - - if len(files) == 0: - raise DropPacket("Should have at least one file") - - for file in files: - if len(file) != 2: - raise DropPacket("Invalid 'file_len' type") - - path, length = file - if not isinstance(path, unicode): - raise DropPacket("Invalid 'files_path' type is %s" % type(path)) - if not isinstance(length, (int, long)): - raise DropPacket("Invalid 'files_length' type is %s" % type(length)) - - if not isinstance(trackers, tuple): - raise DropPacket("Invalid 'trackers' type") - for tracker in trackers: - if not isinstance(tracker, str): - raise DropPacket("Invalid 'tracker' type") - - return offset, placeholder.meta.payload.implement(infohash, timestamp, name, files, trackers) diff --git a/Tribler/community/search/payload.py b/Tribler/community/search/payload.py deleted file mode 100644 index e05a8941ffb..00000000000 --- a/Tribler/community/search/payload.py +++ /dev/null @@ -1,170 +0,0 @@ -""" -Dispersy Payload implementation for the search community. - -Author(s): Niels Zeilemaker -""" -from Tribler.dispersy.bloomfilter import BloomFilter -from Tribler.dispersy.payload import Payload, IntroductionRequestPayload - - -class TasteIntroPayload(IntroductionRequestPayload): - - class Implementation(IntroductionRequestPayload.Implementation): - - def __init__(self, meta, destination_address, source_lan_address, source_wan_address, advice, connection_type, sync, identifier, num_preferences=0, taste_bloom_filter=None): - IntroductionRequestPayload.Implementation.__init__(self, meta, destination_address, source_lan_address, source_wan_address, advice, connection_type, sync, identifier) - - self._num_preferences = num_preferences - self._taste_bloom_filter = taste_bloom_filter - - def set_num_preferences(self, num_preferences): - self._num_preferences = num_preferences - - def set_taste_bloom_filter(self, taste_bloom_filter): - self._taste_bloom_filter = taste_bloom_filter - - @property - def num_preferences(self): - return self._num_preferences - - @property - def taste_bloom_filter(self): - return self._taste_bloom_filter - - -class SearchRequestPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, identifier, keywords, bloom_filter=None): - if __debug__: - assert isinstance(identifier, int), type(identifier) - assert isinstance(keywords, list), 'keywords should be list' - for keyword in keywords: - assert isinstance(keyword, unicode), '%s is type %s' % (keyword, type(keyword)) - assert len(keyword) > 0 - - assert not bloom_filter or isinstance(bloom_filter, BloomFilter), type(bloom_filter) - - super(SearchRequestPayload.Implementation, self).__init__(meta) - self._identifier = identifier - self._keywords = keywords - self._bloom_filter = bloom_filter - - @property - def identifier(self): - return self._identifier - - @property - def keywords(self): - return self._keywords - - @property - def bloom_filter(self): - return self._bloom_filter - - -class SearchResponsePayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, identifier, results): - if __debug__: - assert isinstance(identifier, int), type(identifier) - assert isinstance(results, list), type(results) - for result in results: - assert isinstance(result, tuple), type(result) - assert len(result) > 8 - - infohash, swarmname, length, nrfiles, category_list, creation_date, seeders, leechers, cid = result[:9] - assert isinstance(infohash, str), type(infohash) - assert len(infohash) == 20 - assert isinstance(swarmname, unicode), type(swarmname) - assert isinstance(length, long), type(length) - assert isinstance(nrfiles, int), type(nrfiles) - assert isinstance(category_list, list), type(category_list) - assert all(isinstance(key, unicode) for key in category_list), category_list - assert isinstance(creation_date, long), type(creation_date) - assert isinstance(seeders, int), type(seeders) - assert isinstance(leechers, int), type(leechers) - assert not cid or isinstance(cid, str), type(cid) - assert not cid or len(cid) == 20, cid - - super(SearchResponsePayload.Implementation, self).__init__(meta) - self._identifier = identifier - self._results = results - - @property - def identifier(self): - return self._identifier - - @property - def results(self): - return self._results - - -class TorrentRequestPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, torrents): - if __debug__: - assert isinstance(torrents, dict), type(torrents) - for cid, infohashes in torrents.iteritems(): - assert isinstance(cid, str) - assert len(cid) == 20 - assert isinstance(infohashes, set) - assert not filter(lambda x: not isinstance(x, str), infohashes) - assert not filter(lambda x: not len(x) == 20, infohashes) - assert len(infohashes) > 0 - - super(TorrentRequestPayload.Implementation, self).__init__(meta) - self._torrents = torrents - - @property - def torrents(self): - return self._torrents - - -class TorrentCollectRequestPayload(Payload): - - class Implementation(Payload.Implementation): - - def __init__(self, meta, identifier, hashtype, torrents): - if __debug__: - assert isinstance(identifier, int), type(identifier) - assert isinstance(torrents, list), type(torrents) - for infohash, seeders, leechers, ago in torrents: - assert isinstance(infohash, str) - assert len(infohash) == 20, "%d, %s" % (len(infohash), infohash) - assert isinstance(seeders, int), type(seeders) - assert 0 <= seeders < 2 ** 16, seeders - assert isinstance(leechers, int), type(leechers) - assert 0 <= leechers < 2 ** 16, leechers - assert isinstance(ago, int), type(ago) - assert 0 <= ago < 2 ** 16, ago - - assert isinstance(hashtype, int), type(hashtype) - assert 0 <= hashtype < 2 ** 16, hashtype - - super(TorrentCollectRequestPayload.Implementation, self).__init__(meta) - - self._identifier = identifier - self._hashtype = hashtype - self._torrents = torrents - - @property - def identifier(self): - return self._identifier - - @property - def hashtype(self): - return self._hashtype - - @property - def torrents(self): - return self._torrents - - -class TorrentCollectResponsePayload(TorrentCollectRequestPayload): - pass diff --git a/Tribler/dispersy b/Tribler/dispersy deleted file mode 160000 index 51045631865..00000000000 --- a/Tribler/dispersy +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 5104563186541a3413da34f6cce5123d972a2a47 diff --git a/TriblerGUI/debug_window.py b/TriblerGUI/debug_window.py index a4c80f156bb..51608893391 100644 --- a/TriblerGUI/debug_window.py +++ b/TriblerGUI/debug_window.py @@ -275,13 +275,9 @@ def on_tribler_statistics(self, data): self.window().general_tree_widget.clear() self.create_and_add_widget_item("Tribler version", self.tribler_version, self.window().general_tree_widget) self.create_and_add_widget_item("Number of channels", data["num_channels"], self.window().general_tree_widget) - self.create_and_add_widget_item("Database size", format_size(data["database_size"]), + self.create_and_add_widget_item("Database size", format_size(data["db_size"]), self.window().general_tree_widget) - self.create_and_add_widget_item("Number of collected torrents", data["torrents"]["num_collected"], - self.window().general_tree_widget) - self.create_and_add_widget_item("Number of torrent files", data["torrents"]["num_files"], - self.window().general_tree_widget) - self.create_and_add_widget_item("Total size of torrent files", format_size(data["torrents"]["total_size"]), + self.create_and_add_widget_item("Number of known torrents", data["num_torrents"], self.window().general_tree_widget) self.create_and_add_widget_item("", "", self.window().general_tree_widget) diff --git a/TriblerGUI/defs.py b/TriblerGUI/defs.py index 1915646f2c0..514d76f071e 100644 --- a/TriblerGUI/defs.py +++ b/TriblerGUI/defs.py @@ -14,16 +14,15 @@ PAGE_VIDEO_PLAYER = 5 PAGE_SUBSCRIBED_CHANNELS = 6 PAGE_DOWNLOADS = 7 -PAGE_PLAYLIST_DETAILS = 8 -PAGE_LOADING = 9 -PAGE_DISCOVERING = 10 -PAGE_DISCOVERED = 11 -PAGE_TRUST = 12 -PAGE_MARKET = 13 -PAGE_MARKET_TRANSACTIONS = 14 -PAGE_MARKET_WALLETS = 15 -PAGE_MARKET_ORDERS = 16 -PAGE_TOKEN_MINING_PAGE = 17 +PAGE_LOADING = 8 +PAGE_DISCOVERING = 9 +PAGE_DISCOVERED = 10 +PAGE_TRUST = 11 +PAGE_MARKET = 12 +PAGE_MARKET_TRANSACTIONS = 13 +PAGE_MARKET_WALLETS = 14 +PAGE_MARKET_ORDERS = 15 +PAGE_TOKEN_MINING_PAGE = 16 PAGE_CHANNEL_CONTENT = 0 PAGE_CHANNEL_COMMENTS = 1 @@ -32,12 +31,7 @@ PAGE_EDIT_CHANNEL_OVERVIEW = 0 PAGE_EDIT_CHANNEL_SETTINGS = 1 PAGE_EDIT_CHANNEL_TORRENTS = 2 -PAGE_EDIT_CHANNEL_PLAYLISTS = 3 -PAGE_EDIT_CHANNEL_RSS_FEEDS = 4 -PAGE_EDIT_CHANNEL_PLAYLIST_EDIT = 5 -PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS = 6 -PAGE_EDIT_CHANNEL_PLAYLIST_MANAGE = 7 -PAGE_EDIT_CHANNEL_CREATE_TORRENT = 8 +PAGE_EDIT_CHANNEL_CREATE_TORRENT = 3 PAGE_SETTINGS_GENERAL = 0 PAGE_SETTINGS_CONNECTION = 1 @@ -92,16 +86,26 @@ STATUS_UNKNOWN = 1 STATUS_DEAD = 2 -# Torrent channel commit status -COMMITTED = 0 -UNCOMMITTED = 1 -TODELETE = 2 - # Tribler shutdown grace period in milliseconds SHUTDOWN_WAITING_PERIOD = 120000 # Garbage collection timer (in minutes) GC_TIMEOUT = 10 +ACTION_BUTTONS = u'action_buttons' + +# Torrent commit status constants +COMMIT_STATUS_NEW = 0 +COMMIT_STATUS_TODELETE = 1 +COMMIT_STATUS_COMMITTED = 2 + + +HEALTH_CHECKING = u'Checking..' +HEALTH_DEAD = u'No peers' +HEALTH_ERROR = u'Error' +HEALTH_MOOT = u'Peers found' +HEALTH_GOOD = u'Seeds found' +HEALTH_UNCHECKED = u'Unknown' + # Interval for refreshing the results in the debug pane DEBUG_PANE_REFRESH_TIMEOUT = 5000 # 5 seconds diff --git a/TriblerGUI/dialogs/confirmationdialog.py b/TriblerGUI/dialogs/confirmationdialog.py index 2e7a8ade6f3..519cfb3c3c7 100644 --- a/TriblerGUI/dialogs/confirmationdialog.py +++ b/TriblerGUI/dialogs/confirmationdialog.py @@ -1,7 +1,10 @@ +from __future__ import absolute_import + from PyQt5 import uic -from PyQt5.QtCore import pyqtSignal, Qt +from PyQt5.QtCore import Qt, pyqtSignal from PyQt5.QtGui import QCursor from PyQt5.QtWidgets import QSizePolicy, QSpacerItem + from TriblerGUI.defs import BUTTON_TYPE_NORMAL from TriblerGUI.dialogs.dialogcontainer import DialogContainer from TriblerGUI.utilities import get_ui_file_path @@ -9,10 +12,9 @@ class ConfirmationDialog(DialogContainer): - button_clicked = pyqtSignal(int) - def __init__(self, parent, title, main_text, buttons, show_input=False): + def __init__(self, parent, title, main_text, buttons, show_input=False, checkbox_text=None): DialogContainer.__init__(self, parent) uic.loadUi(get_ui_file_path('buttonsdialog.ui'), self.dialog_widget) @@ -23,12 +25,18 @@ def __init__(self, parent, title, main_text, buttons, show_input=False): self.dialog_widget.dialog_main_text_label.setText(main_text) self.dialog_widget.dialog_main_text_label.adjustSize() + self.checkbox = self.dialog_widget.checkbox if not show_input: self.dialog_widget.dialog_input.setHidden(True) else: self.dialog_widget.dialog_input.returnPressed.connect(lambda: self.button_clicked.emit(0)) + if not checkbox_text: + self.dialog_widget.checkbox.setHidden(True) + else: + self.dialog_widget.checkbox.setText(checkbox_text) + hspacer_left = QSpacerItem(1, 1, QSizePolicy.Expanding, QSizePolicy.Fixed) self.dialog_widget.dialog_button_container.layout().addSpacerItem(hspacer_left) diff --git a/TriblerGUI/dialogs/feedbackdialog.py b/TriblerGUI/dialogs/feedbackdialog.py index 58c6bff1c4c..4c83a8309c9 100644 --- a/TriblerGUI/dialogs/feedbackdialog.py +++ b/TriblerGUI/dialogs/feedbackdialog.py @@ -2,19 +2,18 @@ import json import os -from urllib import quote_plus -from PyQt5 import uic -from PyQt5.QtWidgets import QApplication -from PyQt5.QtWidgets import QDialog, QTreeWidgetItem, QAction -import sys import platform +import sys import time +from PyQt5 import uic +from PyQt5.QtWidgets import QAction, QApplication, QDialog, QMessageBox, QTreeWidgetItem + from six.moves import xrange -from PyQt5.QtWidgets import QMessageBox + from TriblerGUI.event_request_manager import received_events from TriblerGUI.tribler_action_menu import TriblerActionMenu -from TriblerGUI.tribler_request_manager import performed_requests as tribler_performed_requests, TriblerRequestManager +from TriblerGUI.tribler_request_manager import TriblerRequestManager, performed_requests as tribler_performed_requests from TriblerGUI.utilities import get_ui_file_path @@ -128,20 +127,24 @@ def on_send_clicked(self): sys_info = "" for ind in xrange(self.env_variables_list.topLevelItemCount()): item = self.env_variables_list.topLevelItem(ind) - sys_info += "%s\t%s\n" % (quote_plus(item.text(0)), quote_plus(item.text(1))) + sys_info += "%s\t%s\n" % (item.text(0), item.text(1)) comments = self.comments_text_edit.toPlainText() if len(comments) == 0: comments = "Not provided" - comments = quote_plus(comments) - - stack = quote_plus(self.error_text_edit.toPlainText()) - - post_data = "version=%s&machine=%s&os=%s×tamp=%s&sysinfo=%s&comments=%s&stack=%s" % \ - (self.tribler_version, platform.machine(), platform.platform(), - int(time.time()), sys_info, comments, stack) - - self.request_mgr.perform_request(endpoint, self.on_report_sent, data=str(post_data), method='POST') + stack = self.error_text_edit.toPlainText() + + post_data = { + "version": self.tribler_version, + "machine": platform.machine(), + "os": platform.platform(), + "timestamp": int(time.time()), + "sysinfo": sys_info, + "comments": comments, + "stack": stack + } + + self.request_mgr.perform_request(endpoint, self.on_report_sent, data=post_data, method='POST') def closeEvent(self, close_event): QApplication.quit() diff --git a/TriblerGUI/event_request_manager.py b/TriblerGUI/event_request_manager.py index 54f729200d7..97a4e370398 100644 --- a/TriblerGUI/event_request_manager.py +++ b/TriblerGUI/event_request_manager.py @@ -1,8 +1,11 @@ +from __future__ import absolute_import + import logging -from PyQt5.QtCore import QUrl, pyqtSignal, QTimer -from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest, QNetworkReply import time +from PyQt5.QtCore import QTimer, QUrl, pyqtSignal +from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QNetworkRequest + import Tribler.Core.Utilities.json_util as json received_events = [] @@ -13,6 +16,7 @@ class EventRequestManager(QNetworkAccessManager): The EventRequestManager class handles the events connection over which important events in Tribler are pushed. """ + torrent_info_updated = pyqtSignal(object) received_search_result_channel = pyqtSignal(object) received_search_result_torrent = pyqtSignal(object) tribler_started = pyqtSignal() @@ -80,10 +84,8 @@ def on_read_data(self): if len(received_events) > 100: # Only buffer the last 100 events received_events.pop() - if json_dict["type"] == "search_result_channel": - self.received_search_result_channel.emit(json_dict["event"]["result"]) - elif json_dict["type"] == "search_result_torrent": - self.received_search_result_torrent.emit(json_dict["event"]["result"]) + if json_dict["type"] == "torrent_info_updated": + self.torrent_info_updated.emit(json_dict["event"]) elif json_dict["type"] == "tribler_started" and not self.emitted_tribler_started: self.tribler_started.emit() self.emitted_tribler_started = True diff --git a/TriblerGUI/images/check.svg b/TriblerGUI/images/check.svg new file mode 100644 index 00000000000..383b1a04f10 --- /dev/null +++ b/TriblerGUI/images/check.svg @@ -0,0 +1,54 @@ + + + + + + image/svg+xml + + + + + + + + diff --git a/TriblerGUI/images/minus.svg b/TriblerGUI/images/minus.svg new file mode 100644 index 00000000000..df2fc2a01ec --- /dev/null +++ b/TriblerGUI/images/minus.svg @@ -0,0 +1,69 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + diff --git a/TriblerGUI/images/plus.svg b/TriblerGUI/images/plus.svg new file mode 100644 index 00000000000..20c9f690bea --- /dev/null +++ b/TriblerGUI/images/plus.svg @@ -0,0 +1,45 @@ + +image/svg+xml \ No newline at end of file diff --git a/TriblerGUI/images/trash.svg b/TriblerGUI/images/trash.svg new file mode 100644 index 00000000000..b612031895d --- /dev/null +++ b/TriblerGUI/images/trash.svg @@ -0,0 +1,61 @@ + + + + + + image/svg+xml + + + + + + + + trash + + + + diff --git a/TriblerGUI/images/undo.svg b/TriblerGUI/images/undo.svg new file mode 100644 index 00000000000..2ed5c52936e --- /dev/null +++ b/TriblerGUI/images/undo.svg @@ -0,0 +1,50 @@ + +image/svg+xml \ No newline at end of file diff --git a/TriblerGUI/qt_resources/buttonsdialog.ui b/TriblerGUI/qt_resources/buttonsdialog.ui index a8e156276c3..9c4a7a57e86 100644 --- a/TriblerGUI/qt_resources/buttonsdialog.ui +++ b/TriblerGUI/qt_resources/buttonsdialog.ui @@ -166,6 +166,46 @@ padding: 4px; + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + color: #B5B5B5; + + + CheckBox + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + diff --git a/TriblerGUI/qt_resources/channel_list_item.ui b/TriblerGUI/qt_resources/channel_list_item.ui deleted file mode 100644 index 7e8eaa5df31..00000000000 --- a/TriblerGUI/qt_resources/channel_list_item.ui +++ /dev/null @@ -1,430 +0,0 @@ - - - Form - - - - 0 - 0 - 585 - 60 - - - - - 0 - 0 - - - - - 0 - 60 - - - - - 16777215 - 60 - - - - PointingHandCursor - - - Form - - - false - - - background-color: #666; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 14 - 20 - - - - - - - - 0 - - - 0 - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 10 - 7 - - - - - - - - 4 - - - - - - 0 - 0 - - - - - 58 - 18 - - - - - 58 - 18 - - - - border: 1px solid #B5B5B5; -border-radius: 9px; -color: #B5B5B5; -font-size: 12px; -background-color: transparent; - - - channel - - - Qt::AlignCenter - - - - - - - - 0 - 0 - - - - QLabel { -color: #eee; -border: none; -background-color: transparent; -font-size: 15px; -} - - - TextLabel - - - - - - - - - -1 - - - - - - 0 - 0 - - - - - 120 - 0 - - - - - 16777215 - 16777215 - - - - QLabel { -color: #B5B5B5; -border: none; -background-color: transparent; -font-size: 15px; -} - - - active 6 days ago • 143 items - - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 7 - - - - - - - - - - - 0 - 0 - - - - - 0 - 0 - - - - - 10000 - 16777215 - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - - 28 - 28 - - - - - 28 - 28 - - - - QPushButton { -border: none; -background-color: transparent; -} - - - - - - - ../images/subscribed_not.png../images/subscribed_not.png - - - - 18 - 18 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 2 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - color: #B5B5B5; -background-color: transparent; - - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 5 - 20 - - - - - - - - - 0 - 0 - - - - - 28 - 28 - - - - - 28 - 28 - - - - QPushButton { -border: none; -background-color: transparent; -} - - - - - - - ../images/credit_mining_not.png../images/credit_mining_not.png - - - - 18 - 18 - - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 14 - 20 - - - - - - - - - SubscriptionsWidget - QWidget -
TriblerGUI.widgets.subscriptionswidget.h
- 1 -
-
- - -
diff --git a/TriblerGUI/qt_resources/channel_torrent_list_item.ui b/TriblerGUI/qt_resources/channel_torrent_list_item.ui deleted file mode 100644 index f988f92b9be..00000000000 --- a/TriblerGUI/qt_resources/channel_torrent_list_item.ui +++ /dev/null @@ -1,628 +0,0 @@ - - - Form - - - - 0 - 0 - 585 - 60 - - - - - 0 - 0 - - - - - 0 - 60 - - - - - 16777215 - 60 - - - - ArrowCursor - - - Form - - - QWidget { -background-color: #666; -} - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 60 - 42 - - - - - 60 - 42 - - - - PO - - - Qt::AlignCenter - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - 0 - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 7 - - - - - - - - 4 - - - - - - 0 - 0 - - - - - 0 - 18 - - - - - 200 - 18 - - - - border: 1px solid #B5B5B5; -border-radius: 9px; -color: #B5B5B5; -font-size: 12px; -background-color: transparent; -padding-left: 5px; -padding-right: 5px; - - - video - - - Qt::AlignCenter - - - - - - - - 0 - 0 - - - - color: #eee; -border: none; -background-color: transparent; -font-size: 15px; - - - TextLabel - - - - - - - - - 0 - - - - - - 0 - 0 - - - - - 120 - 0 - - - - - 120 - 16777215 - - - - color: #b5b5b5; -border: none; -background-color: transparent; -font-size: 13px; - - - 384MB (3 files) - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 10 - 10 - - - - - 10 - 10 - - - - background-color: orange; -border-radius: 5px; - - - - - - - - - 0 - 0 - - - - - 0 - 20 - - - - - 16777215 - 20 - - - - color: #b5b5b5; -border: none; -background-color: transparent; -font-size: 13px; -padding-left: 2px; -padding-bottom: 1px; - - - unknown health - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 7 - - - - - - - - - - - - color: #bbb; -border: none; -background-color: transparent; -font-size: 15px; - - - STATE - - - Qt::NoTextInteraction - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 5 - 0 - - - - - - - - - - - 0 - 0 - - - - - 0 - 30 - - - - - 16777215 - 30 - - - - background-color: transparent; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 28 - 28 - - - - - 28 - 28 - - - - border-radius: 14px; - - - - - - - - ../images/delete.png../images/delete.png - - - - 12 - 12 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - - - background-color: transparent; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 28 - 28 - - - - - 28 - 28 - - - - border-radius: 14px; -padding-left: 2px; - - - - - - - ../images/play.png../images/play.png - - - - 14 - 14 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 6 - 20 - - - - - - - - - 28 - 28 - - - - - 28 - 28 - - - - border-radius: 14px; - - - - - - - ../images/downloads.png../images/downloads.png - - - - 14 - 14 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 6 - 20 - - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 14 - 20 - - - - - - - - - CircleButton - QToolButton -
TriblerGUI.widgets.circlebutton.h
-
- - ThumbnailWidget - QLabel -
TriblerGUI.widgets.thumbnailwidget.h
-
-
- - -
diff --git a/TriblerGUI/qt_resources/mainwindow.ui b/TriblerGUI/qt_resources/mainwindow.ui index ab9aa32d9b3..6ff8153f1e4 100644 --- a/TriblerGUI/qt_resources/mainwindow.ui +++ b/TriblerGUI/qt_resources/mainwindow.ui @@ -6,7 +6,7 @@ 0 0 - 875 + 969 777 @@ -48,6 +48,46 @@ background-color: red; } QStatusBar::item { border: 0px solid black; +} +QTableView { +border: none; +font-size: 13px; +outline: 0; +} +QTableView::item::hover { +background-color: rgba(255,255,255, 50); +} +QTableView::item { +color: white; +height: 40px; +border-bottom: 1px solid #303030; +} + +QHeaderView { +background-color: transparent; +} +QHeaderView::section { +background-color: transparent; +border: none; +color: #B5B5B5; +padding: 10px; +font-size: 14px; +border-bottom: 1px solid #303030; +} +QHeaderView::section:hover { +color: white; +} +QTableCornerButton::section { +background-color: transparent; +} +QHeaderView::section:up-arrow { +color: white; +} +QHeaderView::section:down-arrow { +color: white; +} +QHeaderView { +qproperty-defaultAlignment: AlignLeft; } @@ -1050,7 +1090,7 @@ background-color: #e67300; - 12 + 1 @@ -1203,6 +1243,25 @@ margin: 10px; + + + + + 0 + 0 + + + + color: white; font-size: 16px; border-top: 1px solid #555; + + + No recommended torrents found. + + + Qt::AlignCenter + + + @@ -1218,6 +1277,11 @@ padding-left: 10px; QTableWidget::item { padding-right: 10px; padding-bottom: 10px; +border: none; +} + +QTableWidget::item::hover { +background: transparent; } @@ -1816,62 +1880,6 @@ padding-bottom: 4px; - - - - - 0 - 36 - - - - - 16777215 - 36 - - - - PointingHandCursor - - - - - - PLAYLISTS - - - true - - - - - - - - 0 - 36 - - - - - 16777215 - 36 - - - - PointingHandCursor - - - - - - RSS FEEDS - - - true - - - @@ -1906,7 +1914,7 @@ font-size: 14px; 1 - 0 + 2 @@ -1929,7 +1937,7 @@ font-size: 14px; font-size: 14px; - <html><head/><body><p>Welcome to the management interface of your channel!</p><p>Here, you can change settings of you channel, manage your shared torrents, manage your playlists and add rss feeds which are periodically polled.</p></body></html> + <html><head/><body><p>Welcome to the management interface of your channel!</p><p>Here, you can change settings of you channel and manage your shared torrents.</p></body></html> true @@ -2356,297 +2364,128 @@ font-size: 14px; 0 - - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - + + + + 0 + 50 + - - + + + 16777215 + 50 + - + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + - + Qt::Horizontal + + QSizePolicy::Fixed + - 40 + 10 20 - - - false + + + + 0 + 24 + + + + + 16777215 + 24 + + + + PointingHandCursor + + + border-radius: 12px; +padding-left: 4px; +padding-right: 4px; - Your channel has uncommitted and/or deleted torrents. + ADD - + Qt::Horizontal + + QSizePolicy::Fixed + - 40 + 10 20 - - - true - - - - 0 - 0 - - + 0 - 35 + 24 - - - - - - - 32 - 32 - 32 - - - - - - - 186 - 189 - 182 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - - - 32 - 32 - 32 - - - - - - - 186 - 189 - 182 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - - - - - 32 - 32 - 32 - - - - - - - 190 - 190 - 190 - - - - - - - 32 - 32 - 32 - - - - - - - 32 - 32 - 32 - - - - - + + + 16777215 + 24 + - - Apply changes to your channel and publish the new version + + PointingHandCursor - - false + + border-radius: 12px; +padding-left: 4px; +padding-right: 4px; - Apply changes - - - false - - - false + REMOVE ALL - + Qt::Horizontal - QSizePolicy::Fixed + QSizePolicy::Maximum @@ -2656,849 +2495,24 @@ font-size: 14px; - - - - - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item:selected { -background-color: #404040; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -} - - - QAbstractItemView::ContiguousSelection - - - QAbstractItemView::ScrollPerPixel - - - false - - - - - - - - 8 - - - 8 - - - 8 - - - 8 - - - 8 - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - REMOVE SELECTED - - - - - - - - 0 - 0 - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - REMOVE ALL - - - - - - - - 0 - 0 - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - ADD - - - - - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item:selected { -background-color: #404040; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -} - - - QAbstractItemView::NoSelection - - - QAbstractItemView::ScrollPerPixel - - - - - - - - 8 - - - 8 - - - 8 - - - 8 - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - NEW PLAYLIST - - - - - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - QTreeWidget { -border: none; -font-size: 14px; -} -QTreeWidget::item { -color: white; -border-bottom: 1px solid #303030; -padding-left: 14px; -height: 40px; -} -QTreeWidget::item::selected { -background-color: #404040;; -} -QTreeWidget::item:hover { -background-color: #303030; -} -QHeaderView { -background-color: transparent; -} -QHeaderView::section { -background-color: transparent; -border: none; -color: #B5B5B5; -padding: 10px; -padding-left: 20px; -font-size: 14px; -border-bottom: 1px solid #303030; -} -QTableCornerButton::section { -background-color: transparent; -} - - - false - - - QAbstractItemView::ContiguousSelection - - - QAbstractItemView::ScrollPerPixel - - - 0 - - - - RSS FEED URL - - - - - - - - - 8 - - - 8 - - - 8 - - - 8 - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - REMOVE SELECTED - - - - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - ADD - - - - - - - - 0 - 24 - - - - - 16777215 - 24 - - - - PointingHandCursor - - - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; - - - REFRESH ALL - - - - - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - margin: 10px; - - - Please enter the details of your playlist below. - - - - - - - - QFormLayout::ExpandingFieldsGrow - - - - - color: #B5B5B5; - - - Playlist name - - - - - - - - - - Playlist name - - - - - - - color: #B5B5B5; - - - Playlist description - - - - - - - - - - Playlist description - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - 0 - 26 - - - - - 16777215 - 26 - - - - PointingHandCursor - - - border-radius: 13px; -font-size: 14px; - - - CANCEL - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 26 - - - - - 6000 - 26 - - - - PointingHandCursor - - - border-radius: 13px; -font-size: 14px; -padding-left: 4px; -padding-right: 4px; - - - SAVE - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - - - Qt::Vertical - - - QSizePolicy::Minimum - - - - 20 - 2 - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 10 - - - - - - - - - - - Qt::Vertical - - - - 20 - 40 - - - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 16 - 18 - - - - - 16 - 18 - - - - PointingHandCursor - - - border: none; -border-image: url(images/page_back.png) 0 0 0 0 stretch stretch; -background: none; - - - - - - - - - - margin: 10px; -font-size: 15px; -font-weight: bold; -margin-left: 5px; -color: white; - - - Torrents in playlist 'bla' - - - - - - - - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item:selected { -background-color: #404040; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -} - - - QAbstractItemView::NoSelection - - - - - - - - 8 - - - 8 - - - 8 - - - 8 - - - + Qt::Horizontal + + QSizePolicy::Fixed + - 506 + 5 20 - + 0 @@ -3520,347 +2534,217 @@ padding-left: 4px; padding-right: 4px; - MANAGE TORRENTS + REMOVE SELECTED - - - - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - + Qt::Horizontal - - QSizePolicy::Fixed - - 10 + 40 20 - - - - 16 - 18 - + + + + 0 + 0 + - + - 16 - 18 + 50 + 0 - - PointingHandCursor - - - border: none; -border-image: url(images/page_back.png) 0 0 0 0 stretch stretch; -background: none; - - - - - - - - - - margin: 10px; -font-size: 15px; -font-weight: bold; -margin-left: 5px; -color: white; - - - Manage torrents in playlist 'bla' - - - - - - - - - - QListWidget { -background-color: #303030; -border: none; -} -QListWidget::item { -color: white; -} -QToolButton { -font-size: 16px; -border-radius: 13px; -color: white; -} - - - - - - - 6 - - - 0 - - - 0 - - - 0 - - - 0 - + - - - Qt::Vertical - - - - 20 - 40 - + + + + 0 + 0 + - - - - - 26 - 26 + 0 + 30 - 26 - 26 + 16777215 + 30 - - PointingHandCursor - - + font-size: 14px; background-color: #cc6600; +color: #eee; - < + Your channel has uncommitted changes. + + + Qt::AlignCenter - + - Qt::Vertical - - - QSizePolicy::Fixed + Qt::Horizontal - 20 - 10 + 10 + 20 - + - 26 - 26 + 0 + 28 - 26 - 26 + 16777215 + 28 - - PointingHandCursor - - + border-radius: 12px; +padding-left: 4px; +padding-right: 4px; - > + APPLY CHANGES - - - - Qt::Vertical - - - - 20 - 40 - - - - - - - - QAbstractItemView::ExtendedSelection - - - QAbstractItemView::ScrollPerPixel - - - QAbstractItemView::ScrollPerPixel - - - - - - - QAbstractItemView::ExtendedSelection - - - QAbstractItemView::ScrollPerPixel - - - QAbstractItemView::ScrollPerPixel - - - - - - - Torrents in playlist - - - - - - - Torrents in channel but not in playlist - - - - - - - - - - - 8 - - - 8 - - - 8 - - - 8 - - + Qt::Horizontal + + QSizePolicy::Maximum + - 581 + 16 20 - + - + 0 0 - 0 - 24 + 180 + 28 + + + + + 180 + 28 + + + + + 200 + 0 - + + QLineEdit { +border-radius: 3px; +} +QLineEdit:focus, QLineEdit::hover { +background-color: #404040; +color: white; +} + + + + + + Filter + + + + + + + Qt::Horizontal + + + QSizePolicy::Fixed + + - 16777215 - 24 + 10 + 20 - - PointingHandCursor - + + + + - border-radius: 12px; -padding-left: 4px; -padding-right: 4px; + - SAVE + 0 items + + + + Qt::Horizontal + + + QSizePolicy::Fixed + + + + 10 + 0 + + + + + + + @@ -4429,7 +3313,7 @@ font-weight: bold; color: #B5B5B5; - - results + 0 results @@ -4611,13 +3495,86 @@ color: #B5B5B5; - - - - 0 - 0 - - + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + QSplitter::handle { background-color: #555; } + + + Qt::Vertical + + + false + + + false + + + + Qt::ScrollBarAlwaysOff + + + QAbstractItemView::SingleSelection + + + QAbstractItemView::SelectRows + + + false + + + true + + + false + + + false + + + false + + + + + + 0 + 0 + + + + + 0 + 200 + + + + + 16777215 + 200 + + + + + + @@ -4805,6 +3762,56 @@ font-weight: bold; + + + + + 0 + 0 + + + + + 180 + 28 + + + + + 180 + 28 + + + + QLineEdit { +border-radius: 3px; +} +QLineEdit:focus, QLineEdit::hover { +background-color: #404040; +color: white; +} + + + Filter + + + + + + + Qt::Horizontal + + + QSizePolicy::Minimum + + + + 16 + 20 + + + + @@ -5005,7 +4012,7 @@ background-color: transparent; - + @@ -5411,8 +4418,8 @@ border-top: 1px solid #555; 0 0 - 300 - 616 + 755 + 646 @@ -5495,7 +4502,8 @@ color: white; - Family filter enabled? + Family filter enabled? +(requires Tribler restart) @@ -5506,6 +4514,32 @@ color: white; + + + + font-weight: bold; +color: white; + + + Personal channel settings + + + + + + + Commit changes automatically +(requires Tribler restart) + + + + + + + + + + @@ -5832,34 +4866,6 @@ color: white; - - - - font-weight: bold; -color: white; - - - Beta features - - - - - - - Enable Channel 2.0 editing - - - - - - - margin-top: 2px; - - - - - - @@ -7661,6 +6667,12 @@ font-size: 12px; + + + 0 + 0 + + 0 @@ -7680,7 +6692,7 @@ font-size: 12px; - + 0 0 @@ -7709,6 +6721,34 @@ margin: 10px; + + + + + 180 + 28 + + + + + 180 + 28 + + + + QLineEdit { +border-radius: 3px; +} +QLineEdit:focus, QLineEdit::hover { +background-color: #404040; +color: white; +} + + + filter + + + @@ -7726,32 +6766,24 @@ margin: 10px; - - - - 42 - 24 - - - - - 42 - 24 - - - - PointingHandCursor + + + + 0 + 0 + - + font-size: 14px; +color: #B5B5B5; - ADD + 0 items - + Qt::Horizontal @@ -7770,22 +6802,28 @@ margin: 10px; - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -border-top: 1px solid #555; -} + + + Qt::ScrollBarAlwaysOff - - QAbstractItemView::ScrollPerPixel + + QAbstractItemView::NoSelection + + + false + + + true + + + false + + false + + + false + @@ -7891,7 +6929,6 @@ color: white; QLineEdit { -background-color: transparent; border-radius: 3px; } QLineEdit:focus, QLineEdit::hover { @@ -8668,8 +7705,8 @@ QTabBar::tab:selected { 0 0 - 121 - 238 + 133 + 246 @@ -9184,206 +8221,6 @@ background-color: #303030; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 16 - 18 - - - - - 16 - 18 - - - - PointingHandCursor - - - border: none; -border-image: url(images/page_back.png) 0 0 0 0 stretch stretch; - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 50 - - - - - 16777215 - 50 - - - - color: #eee; -background-color: transparent; -font-size: 20px; -font-weight: bold; - - - My Playlist - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - font-size: 14px; -color: #B5B5B5; - - - 23 items - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item:selected { -background-color: #404040; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -border-top: 1px solid #555; -} - - - QAbstractItemView::ScrollPerPixel - - - - - @@ -9610,6 +8447,12 @@ font-weight:bold; + + + 0 + 0 + + 0 @@ -9658,6 +8501,34 @@ margin: 10px; + + + + + 180 + 28 + + + + + 180 + 28 + + + + QLineEdit { +border-radius: 3px; +} +QLineEdit:focus, QLineEdit::hover { +background-color: #404040; +color: white; +} + + + filter + + + @@ -9711,22 +8582,34 @@ color: #B5B5B5; - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -border-top: 1px solid #555; -} + + + Qt::ScrollBarAlwaysOff + + + QAbstractItemView::NoSelection + + + false - - QAbstractItemView::ScrollPerPixel + + true + + + false + + false + + + false + + + false + + + false + @@ -12038,7 +10921,7 @@ QTabBar::tab:selected { } - 1 + 0 @@ -12618,7 +11501,7 @@ margin-top: 9px; QWidget { -background-color: #383838; +background-color: #383838; } QLabel { border: none; @@ -12702,7 +11585,7 @@ border: none; QWidget { -background-color: #383838; +background-color: #383838; } QLabel { border: none; @@ -12775,7 +11658,7 @@ border: none; QWidget { -background-color: #383838; +background-color: #383838; } QLabel { border: none; @@ -12983,7 +11866,7 @@ color: #eee; font-size: 14px; - Transaction in process, Please don't close Tribler. + Transaction in progress, Please don't close Tribler. Qt::AlignCenter @@ -12998,17 +11881,17 @@ color: #eee; + + EllipseButton + QToolButton +
TriblerGUI.widgets.ellipsebutton.h
+
SubscriptionsWidget QWidget
TriblerGUI.widgets.subscriptionswidget.h
1
- - EllipseButton - QToolButton -
TriblerGUI.widgets.ellipsebutton.h
-
VideoPlayerPage QWidget @@ -13061,11 +11944,6 @@ color: #eee;
TriblerGUI.widgets.downloadspage.h
1
- - LazyLoadList - QListWidget -
TriblerGUI.widgets.lazyloadlist.h
-
SubscribedChannelsPage QWidget @@ -13089,24 +11967,12 @@ color: #eee;
TriblerGUI.widgets.homepage.h
1
- - PlaylistPage - QWidget -
TriblerGUI.widgets.playlistpage.h
- 1 -
DownloadsDetailsTabWidget QTabWidget
TriblerGUI.widgets.downloadsdetailstabwidget.h
1
- - ManagePlaylistPage - QWidget -
TriblerGUI.widgets.manageplaylistpage.h
- 1 -
CreateTorrentPage QWidget @@ -13183,6 +12049,27 @@ color: #eee;
TriblerGUI.widgets.marketorderspage.h
1
+ + TorrentsListWidget + QWidget +
TriblerGUI.widgets.torrentslistwidget.h
+
+ + ChannelsTableView + QTableView +
TriblerGUI.widgets.lazytableview.h
+
+ + TorrentDetailsContainer + QWidget +
TriblerGUI.widgets.torrentdetailscontainer.h
+ 1 +
+ + SearchResultsTableView + QTableView +
TriblerGUI.widgets.lazytableview.h
+
TokenMiningPage QWidget @@ -13193,45 +12080,45 @@ color: #eee; - top_search_bar - returnPressed() + wallets_back_button + clicked() MainWindow - on_top_search_button_click() + on_page_back_clicked() - 308 - 24 + 218 + 75 427 - 317 + 327 - add_torrent_button + transactions_back_button clicked() MainWindow - on_add_torrent_button_click() + on_page_back_clicked() - 826 - 24 + 218 + 75 427 - 317 + 327 - top_menu_button - clicked() + top_search_bar + textChanged(QString) MainWindow - on_top_menu_button_click() + on_search_text_change() - 17 + 308 24 @@ -13241,14 +12128,14 @@ color: #eee; - subscribed_channels_list - itemClicked(QListWidgetItem*) + top_search_bar + returnPressed() MainWindow - on_channel_item_click(QListWidgetItem*) + on_top_search_button_click() - 250 - 65 + 308 + 24 427 @@ -13257,14 +12144,14 @@ color: #eee; - left_menu_button_home + top_menu_button clicked() MainWindow - clicked_menu_button_home() + on_top_menu_button_click() - 100 - 77 + 17 + 24 427 @@ -13273,62 +12160,62 @@ color: #eee; - left_menu_button_my_channel + settings_button clicked() MainWindow - clicked_menu_button_my_channel() + on_settings_button_click() - 100 - 121 + 789 + 24 427 - 317 + 327 - left_menu_button_video_player + orders_back_button clicked() MainWindow - clicked_menu_button_video_player() + on_page_back_clicked() - 100 - 237 + 218 + 75 427 - 317 + 327 - left_menu_button_downloads + market_back_button clicked() MainWindow - clicked_menu_button_downloads() + on_page_back_clicked() - 100 - 197 + 218 + 75 427 - 317 + 327 - left_menu_button_subscriptions + left_menu_button_video_player clicked() MainWindow - clicked_menu_button_subscriptions() + clicked_menu_button_video_player() 100 - 157 + 237 427 @@ -13337,14 +12224,14 @@ color: #eee; - top_search_bar - textChanged(QString) + left_menu_button_subscriptions + clicked() MainWindow - on_search_text_change() + clicked_menu_button_subscriptions() - 308 - 24 + 100 + 157 427 @@ -13353,30 +12240,30 @@ color: #eee; - channel_back_button + left_menu_button_search clicked() MainWindow - on_page_back_clicked() + clicked_menu_button_search() - 218 - 75 + 94 + 111 427 - 317 + 327 - playlist_back_button + left_menu_button_my_channel clicked() MainWindow - on_page_back_clicked() + clicked_menu_button_my_channel() - 221 - 75 + 100 + 121 427 @@ -13385,62 +12272,46 @@ color: #eee; - left_menu_button_discovered + left_menu_button_home clicked() MainWindow - clicked_menu_button_discovered() + clicked_menu_button_home() 100 - 117 - - - 427 - 327 - - - - - discovered_channels_list - itemClicked(QListWidgetItem*) - MainWindow - on_channel_item_click(QListWidgetItem*) - - - 527 - 366 + 77 427 - 327 + 317 - left_menu_button_debug + left_menu_button_downloads clicked() MainWindow - clicked_menu_button_debug() + clicked_menu_button_downloads() - 94 - 327 + 100 + 197 427 - 327 + 317 - settings_button + left_menu_button_discovered clicked() MainWindow - on_settings_button_click() + clicked_menu_button_discovered() - 789 - 24 + 100 + 117 427 @@ -13449,31 +12320,15 @@ color: #eee; - left_menu_button_search + left_menu_button_debug clicked() MainWindow - clicked_menu_button_search() + clicked_menu_button_debug() 94 - 111 - - - 427 327 - - - - market_back_button - clicked() - MainWindow - on_page_back_clicked() - - - 218 - 75 - 427 327 @@ -13481,23 +12336,23 @@ color: #eee; - transactions_back_button + force_shutdown_btn clicked() MainWindow - on_page_back_clicked() + clicked_force_shutdown() - 218 - 75 + 20 + 20 - 427 - 327 + 20 + 20 - wallets_back_button + channel_back_button clicked() MainWindow on_page_back_clicked() @@ -13508,39 +12363,23 @@ color: #eee; 427 - 327 + 317 - orders_back_button + add_torrent_button clicked() MainWindow - on_page_back_clicked() + on_add_torrent_button_click() - 218 - 75 + 826 + 24 427 - 327 - - - - - force_shutdown_btn - clicked() - MainWindow - clicked_force_shutdown() - - - 20 - 20 - - - 20 - 20 + 317 @@ -13585,4 +12424,3 @@ color: #eee; clicked_force_shutdown() - diff --git a/TriblerGUI/qt_resources/playlist_list_item.ui b/TriblerGUI/qt_resources/playlist_list_item.ui deleted file mode 100644 index fd77e4ae63b..00000000000 --- a/TriblerGUI/qt_resources/playlist_list_item.ui +++ /dev/null @@ -1,414 +0,0 @@ - - - Form - - - - 0 - 0 - 585 - 60 - - - - - 0 - 0 - - - - - 0 - 60 - - - - - 16777215 - 60 - - - - PointingHandCursor - - - Form - - - QWidget { -background-color: #666; -} - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 60 - 42 - - - - - 60 - 42 - - - - PO - - - Qt::AlignCenter - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - 0 - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 7 - - - - - - - - 4 - - - - - - 0 - 0 - - - - - 54 - 18 - - - - - 54 - 18 - - - - border: 1px solid #B5B5B5; -border-radius: 9px; -color: #B5B5B5; -font-size: 12px; -background-color: transparent; - - - playlist - - - Qt::AlignCenter - - - - - - - - 0 - 0 - - - - color: #eee; -border: none; -background-color: transparent; -font-size: 15px; - - - TextLabel - - - - - - - - - - - - 0 - 0 - - - - color: #b5b5b5; -border: none; -background-color: transparent; -font-size: 15px; - - - 34 items - - - - - - - - - Qt::Vertical - - - QSizePolicy::Fixed - - - - 20 - 7 - - - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 10 - 20 - - - - - - - - - 0 - 0 - - - - - 0 - 30 - - - - - 16777215 - 30 - - - - background: transparent; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 28 - 28 - - - - - 28 - 28 - - - - border-radius: 14px; - - - - - - - ../images/edit_white.png../images/edit_white.png - - - - 12 - 12 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 6 - 20 - - - - - - - - - 28 - 28 - - - - - 28 - 28 - - - - PointingHandCursor - - - border-radius: 14px; - - - - - - - ../images/delete.png../images/delete.png - - - - 12 - 12 - - - - - - - - Qt::Horizontal - - - QSizePolicy::Fixed - - - - 14 - 20 - - - - - - - - - - - - CircleButton - QToolButton -
TriblerGUI.widgets.circlebutton.h
-
- - ThumbnailWidget - QLabel -
TriblerGUI.widgets.thumbnailwidget.h
-
-
- - -
diff --git a/TriblerGUI/qt_resources/torrent_channel_list_container.ui b/TriblerGUI/qt_resources/torrent_channel_list_container.ui deleted file mode 100644 index bd46b656277..00000000000 --- a/TriblerGUI/qt_resources/torrent_channel_list_container.ui +++ /dev/null @@ -1,453 +0,0 @@ - - - torrents_channels_container - - - - 0 - 0 - 830 - 536 - - - - - 0 - 0 - - - - - 0 - 0 - - - - - 16777215 - 16777215 - - - - ArrowCursor - - - Form - - - false - - - background-color: #202020; - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - - 0 - 0 - - - - QSplitter::handle { background-color: #555; } - - - Qt::Vertical - - - - QListWidget::item:hover { -background-color: #303030; -} -QListWidget::item:selected { -background-color: #404040; -} -QListWidget::item { -border-bottom: 1px solid #303030; -} -QListWidget { -border: none; -border-top: 1px solid #555; -background-color: #202020; -} - - - - - QLabel { -color: white; -} -QTabWidget { -border: none; -background-color: #202020; -} -QTabBar::tab { - color: white; - background-color: #555; -} -QTabBar::tab:selected { - color: #555; - background-color: #777; -} - - - 0 - - - - background-color: #202020; - - - Details - - - - 0 - - - 12 - - - 12 - - - 12 - - - 12 - - - - - border: none; - - - true - - - - - 0 - 0 - 806 - 239 - - - - - Qt::AlignCenter - - - Qt::AlignLeading|Qt::AlignLeft|Qt::AlignTop - - - - - font-weight: bold; - - - Name - - - - - - - - - - - - - - font-weight: bold; - - - Category - - - - - - - font-weight: bold; - - - Size - - - - - - - - - - - - - - - - - - - - - font-weight: bold; margin-top:5px - - - Health - - - - - - - - Qt::AlignCenter - - - Qt::AlignLeading|Qt::AlignLeft|Qt::AlignTop - - - - - - - - - - - - Re-check - - - - EllipseButton{ - border: 1px solid #b5b5b5; - border-radius: 4px; - color: white - } - EllipseButton::hover{ - color: #333; - background-color:#c5c5c5; - } - - - - PointingHandCursor - - - - - - - - - - - - - - - Files - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - QTreeWidget { -border: none; -font-size: 13px; -} -QTreeWidget::item { -color: white; -border-bottom: 1px solid #303030; -} -QTreeWidget::item:hover { -background-color: #303030; -} -QTreeWidget::item::selected { -background-color: #444; -} -QHeaderView { -background-color: transparent; -} -QHeaderView::section { -background-color: transparent; -border: none; -color: #B5B5B5; -padding: 10px; -font-size: 14px; -border-bottom: 1px solid #303030; -} -QHeaderView::drop-down { -color: red; -} -QHeaderView::section:hover { -color: white; -} -QTableCornerButton::section { -background-color: transparent; -} - - - QAbstractItemView::NoSelection - - - true - - - 0 - - - 300 - - - false - - - true - - - - PATH - - - - - SIZE - - - - - - - - - Trackers - - - - 0 - - - 0 - - - 0 - - - 0 - - - 0 - - - - - QTreeWidget { -border: none; -font-size: 13px; -} -QTreeWidget::item { -color: white; -border-bottom: 1px solid #303030; -} -QTreeWidget::item:hover { -background-color: #303030; -} -QTreeWidget::item::selected { -background-color: #444; -} -QHeaderView { -background-color: transparent; -} -QHeaderView::section { -background-color: transparent; -border: none; -color: #B5B5B5; -padding: 10px; -font-size: 14px; -border-bottom: 1px solid #303030; -} -QHeaderView::drop-down { -color: red; -} -QHeaderView::section:hover { -color: white; -} -QTableCornerButton::section { -background-color: transparent; -} - - - 0 - - - - NAME - - - - - - - - - - - - - - LazyLoadList - QListWidget -
TriblerGUI.widgets.lazyloadlist.h
-
- - TorrentDetailsTabWidget - QTabWidget -
TriblerGUI.widgets.torrentdetailstabwidget.h
- 1 -
- - EllipseButton - QToolButton -
TriblerGUI.widgets.ellipsebutton.h
-
-
- - -
diff --git a/TriblerGUI/qt_resources/torrent_details_container.ui b/TriblerGUI/qt_resources/torrent_details_container.ui new file mode 100644 index 00000000000..24a2e899c88 --- /dev/null +++ b/TriblerGUI/qt_resources/torrent_details_container.ui @@ -0,0 +1,345 @@ + + + details_container + + + + 0 + 0 + 585 + 241 + + + + + 0 + 0 + + + + + 0 + 0 + + + + + 16777215 + 16777215 + + + + ArrowCursor + + + Form + + + false + + + background-color: #202020; + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 0 + 0 + + + + + 16777215 + 16777215 + + + + QLabel { +color: white; +} +QTabWidget { +border: none; +background-color: #202020; +} +QTabBar::tab { + color: white; + background-color: #555; +} +QTabBar::tab:selected { + color: #555; + background-color: #777; +} + + + 0 + + + + background-color: #202020; + + + Details + + + + 0 + + + 12 + + + 12 + + + 12 + + + 12 + + + + + border: none; + + + true + + + + + 0 + 0 + 561 + 196 + + + + + Qt::AlignCenter + + + Qt::AlignLeading|Qt::AlignLeft|Qt::AlignTop + + + + + font-weight: bold; + + + Name + + + + + + + + + + true + + + + + + + font-weight: bold; + + + Category + + + + + + + + + + + + + + font-weight: bold; + + + Size + + + + + + + + + + + + + + Qt::AlignCenter + + + Qt::AlignLeading|Qt::AlignLeft|Qt::AlignTop + + + + + + + + + + + + PointingHandCursor + + + + EllipseButton{ + border: 1px solid #b5b5b5; + border-radius: 4px; + color: white + } + EllipseButton::hover{ + color: #333; + background-color:#c5c5c5; + } + + + + Re-check + + + + + + + + + font-weight: bold; + + + Health + + + + + + + + + + + + Trackers + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + QTreeWidget { +border: none; +font-size: 13px; +} +QTreeWidget::item { +color: white; +border-bottom: 1px solid #303030; +} +QTreeWidget::item:hover { +background-color: #303030; +} +QTreeWidget::item::selected { +background-color: #444; +} +QHeaderView { +background-color: transparent; +} +QHeaderView::section { +background-color: transparent; +border: none; +color: #B5B5B5; +padding: 10px; +font-size: 14px; +border-bottom: 1px solid #303030; +} +QHeaderView::drop-down { +color: red; +} +QHeaderView::section:hover { +color: white; +} +QTableCornerButton::section { +background-color: transparent; +} + + + 0 + + + + NAME + + + + + + + + + + + + + EllipseButton + QToolButton +
TriblerGUI.widgets.ellipsebutton.h
+
+ + TorrentDetailsTabWidget + QTabWidget +
TriblerGUI.widgets.torrentdetailstabwidget.h
+ 1 +
+
+ + +
diff --git a/TriblerGUI/qt_resources/torrents_list.ui b/TriblerGUI/qt_resources/torrents_list.ui new file mode 100644 index 00000000000..a08c129326e --- /dev/null +++ b/TriblerGUI/qt_resources/torrents_list.ui @@ -0,0 +1,181 @@ + + + torrents_list + + + + 0 + 0 + 830 + 569 + + + + + 0 + 0 + + + + + 0 + 0 + + + + + 16777215 + 16777215 + + + + ArrowCursor + + + Form + + + false + + + background-color: #202020; + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + QSplitter::handle { background-color: #555; } + + + Qt::Vertical + + + false + + + false + + + + QTableView { +border: none; +font-size: 13px; +outline: 0; +} + +QTableView::item { +color: white; +height: 40px; +border-bottom: 1px solid #303030; +} + + +QTableView::item::hover { + background-color: rgba(255,255,255, 50); + } + + QHeaderView { + background-color: transparent; + } + QHeaderView::section { + background-color: transparent; + border: none; + color: #B5B5B5; + padding: 10px; + font-size: 14px; + border-bottom: 1px solid #303030; + } + QHeaderView::section:hover { + color: white; + } + QTableCornerButton::section { + background-color: transparent; + } + QHeaderView::section:up-arrow { + color: white; + } + QHeaderView::section:down-arrow { + color: white; + } + + + + Qt::ScrollBarAlwaysOff + + + QAbstractItemView::SingleSelection + + + QAbstractItemView::SelectRows + + + false + + + true + + + false + + + false + + + false + + + + + + 0 + 0 + + + + + 0 + 200 + + + + + 16777215 + 200 + + + + + + + + + + TorrentDetailsContainer + QWidget +
TriblerGUI.widgets.torrentdetailscontainer.h
+ 1 +
+ + TorrentsTableView + QTableView +
TriblerGUI.widgets.lazytableview.h
+
+
+ + +
diff --git a/TriblerGUI/tribler_app.py b/TriblerGUI/tribler_app.py index c77cfae5f66..f598ff51daa 100644 --- a/TriblerGUI/tribler_app.py +++ b/TriblerGUI/tribler_app.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import os import sys diff --git a/TriblerGUI/tribler_request_manager.py b/TriblerGUI/tribler_request_manager.py index 24f8c35764c..7fd6cfaa84f 100644 --- a/TriblerGUI/tribler_request_manager.py +++ b/TriblerGUI/tribler_request_manager.py @@ -1,16 +1,47 @@ -from collections import deque, namedtuple +from __future__ import absolute_import + import logging +from collections import deque, namedtuple from threading import RLock from time import time +from urllib import quote_plus -from PyQt5.QtCore import QUrl, pyqtSignal, QIODevice, QBuffer, QObject +from PyQt5.QtCore import QBuffer, QIODevice, QObject, QUrl, pyqtSignal from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest +from six import string_types, text_type +from six.moves import xrange + import Tribler.Core.Utilities.json_util as json -from TriblerGUI.defs import BUTTON_TYPE_NORMAL, DEFAULT_API_PORT, DEFAULT_API_PROTOCOL, DEFAULT_API_HOST + +from TriblerGUI.defs import BUTTON_TYPE_NORMAL, DEFAULT_API_HOST, DEFAULT_API_PORT, DEFAULT_API_PROTOCOL from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog +def tribler_urlencode(data): + # Convert all values that are an array to uri-encoded values + for key in data.keys(): + value = data[key] + if isinstance(value, list): + if value: + data[key + "[]"] = "&".join(value) + else: + del data[key] + + # Convert all keys and values in the data to utf-8 unicode strings + utf8_items = [] + for key, value in data.items(): + utf8_key = quote_plus(text_type(key).encode('utf-8')) + # Convert bool values to ints + if isinstance(value, bool): + value = int(value) + utf8_value = quote_plus(text_type(value).encode('utf-8')) + utf8_items.append("%s=%s" % (utf8_key, utf8_value)) + + data = "&".join(utf8_items) + return data + + class QueuePriorityEnum(object): """ Enum for HTTP request priority. @@ -45,7 +76,7 @@ def __init__(self, max_outstanding=50, timeout=15): self.medium_queue = [] self.low_queue = [] - self.lock = RLock() # Don't allow asynchronous access to the queue + self.lock = RLock() # Don't allow asynchronous access to the queue def parse_queue(self): """ @@ -115,7 +146,7 @@ def enqueue(self, request_manager, method, endpoint, data, read_callback, captur self.high_queue.append(queue_item) else: # Get the last item of the queue - last_item = self.high_queue.pop(self.max_outstanding -1) + last_item = self.high_queue.pop(self.max_outstanding - 1) # Add the original queue_item to the front of the queue self.high_queue.insert(0, queue_item) # reduce the priority of last_item and try to put in medium queue @@ -234,31 +265,42 @@ def __init__(self, window=None): def set_reply_handle(self, reply): self.reply = reply - def perform_request(self, endpoint, read_callback, data="", method='GET', capture_errors=True, - priority=QueuePriorityEnum.CRITICAL, on_cancel=lambda: None): + def perform_request(self, endpoint, read_callback, url_params=None, data=None, raw_data="", method='GET', + capture_errors=True, priority=QueuePriorityEnum.CRITICAL, on_cancel=lambda: None): """ Perform a HTTP request. :param endpoint: the endpoint to call (i.e. "statistics") :param read_callback: the callback to be called with result info when we have the data + :param url_params: an optional dictionary with parameters that should be included in the URL :param data: optional POST data to be sent with the request + :param raw_data: optional raw data to include in the request, will get priority over data if defined :param method: the HTTP verb (GET/POST/PUT/PATCH) :param capture_errors: whether errors should be handled by this class (defaults to True) + :param priority: the priority of this request + :param on_cancel: optional callback to invoke when the request has been cancelled """ self.on_cancel = on_cancel if read_callback: self.received_json.connect(read_callback) + url = endpoint + (("?" + tribler_urlencode(url_params)) if url_params else "") + + if data and not raw_data: + data = tribler_urlencode(data) + elif raw_data: + data = raw_data.encode('utf-8') + def reply_callback(reply, log): log[-1] = reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) self.on_finished(reply, capture_errors) - request_queue.enqueue(self, method, endpoint, data, reply_callback, priority) + request_queue.enqueue(self, method, url, data, reply_callback, priority) @staticmethod def get_message_from_error(error): return_error = None - if isinstance(error['error'], (str, unicode)): + if isinstance(error['error'], string_types): return_error = error['error'] elif 'message' in error['error']: return_error = error['error']['message'] diff --git a/TriblerGUI/tribler_window.py b/TriblerGUI/tribler_window.py index 468a04b8d79..510113116e3 100644 --- a/TriblerGUI/tribler_window.py +++ b/TriblerGUI/tribler_window.py @@ -11,15 +11,11 @@ from urllib import pathname2url, unquote from PyQt5 import uic -from PyQt5.QtCore import QCoreApplication, QObject, QPoint, QSettings, QStringListModel, QTimer, QUrl, Qt, \ +from PyQt5.QtCore import QCoreApplication, QDir, QObject, QPoint, QSettings, QStringListModel, QTimer, QUrl, Qt, \ pyqtSignal, pyqtSlot -from PyQt5.QtCore import QDir -from PyQt5.QtGui import QDesktopServices, QIcon -from PyQt5.QtGui import QKeySequence -from PyQt5.QtGui import QPixmap +from PyQt5.QtGui import QDesktopServices, QIcon, QKeySequence, QPixmap from PyQt5.QtWidgets import QAction, QApplication, QCompleter, QFileDialog, QLineEdit, QListWidget, QMainWindow, \ - QStyledItemDelegate, QSystemTrayIcon, QTreeWidget -from PyQt5.QtWidgets import QShortcut + QShortcut, QStyledItemDelegate, QSystemTrayIcon, QTreeWidget import six @@ -29,19 +25,17 @@ from TriblerGUI.debug_window import DebugWindow from TriblerGUI.defs import BUTTON_TYPE_CONFIRM, BUTTON_TYPE_NORMAL, DEFAULT_API_PORT, PAGE_CHANNEL_DETAILS, \ PAGE_DISCOVERED, PAGE_DISCOVERING, PAGE_DOWNLOADS, PAGE_EDIT_CHANNEL, PAGE_HOME, PAGE_LOADING, \ - PAGE_PLAYLIST_DETAILS, PAGE_SEARCH_RESULTS, PAGE_SETTINGS, PAGE_SUBSCRIBED_CHANNELS, PAGE_TRUST, \ - PAGE_VIDEO_PLAYER, SHUTDOWN_WAITING_PERIOD + PAGE_SEARCH_RESULTS, PAGE_SETTINGS, PAGE_SUBSCRIBED_CHANNELS, PAGE_TRUST, PAGE_VIDEO_PLAYER, SHUTDOWN_WAITING_PERIOD from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog from TriblerGUI.dialogs.feedbackdialog import FeedbackDialog from TriblerGUI.dialogs.startdownloaddialog import StartDownloadDialog from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.tribler_request_manager import TriblerRequestManager, dispatcher, request_queue -from TriblerGUI.utilities import get_gui_setting, get_image_path, get_ui_file_path, is_dir_writable, quote_plus_unicode +from TriblerGUI.utilities import get_gui_setting, get_image_path, get_ui_file_path, is_dir_writable +from TriblerGUI.widgets.triblertablecontrollers import sanitize_for_fts # Pre-load form UI classes -fc_channel_torrent_list_item, _ = uic.loadUiType(get_ui_file_path('channel_torrent_list_item.ui')) -fc_channel_list_item, _ = uic.loadUiType(get_ui_file_path('channel_list_item.ui')) -fc_playlist_list_item, _ = uic.loadUiType(get_ui_file_path('playlist_list_item.ui')) + fc_home_recommended_item, _ = uic.loadUiType(get_ui_file_path('home_recommended_item.ui')) fc_loading_list_item, _ = uic.loadUiType(get_ui_file_path('loading_list_item.ui')) @@ -135,19 +129,6 @@ def __init__(self, core_args=None, core_env=None, api_port=None): TriblerRequestManager.window = self self.tribler_status_bar.hide() - # Load dynamic widgets - uic.loadUi(get_ui_file_path('torrent_channel_list_container.ui'), self.channel_page_container) - self.channel_torrents_list = self.channel_page_container.items_list - self.channel_torrents_detail_widget = self.channel_page_container.details_tab_widget - self.channel_torrents_detail_widget.initialize_details_widget() - self.channel_torrents_list.itemSelectionChanged.connect(self.channel_page.clicked_item) - - uic.loadUi(get_ui_file_path('torrent_channel_list_container.ui'), self.search_page_container) - self.search_results_list = self.search_page_container.items_list - self.search_torrents_detail_widget = self.search_page_container.details_tab_widget - self.search_torrents_detail_widget.initialize_details_widget() - self.search_results_list.itemClicked.connect(self.on_channel_item_click) - self.search_results_list.itemSelectionChanged.connect(self.search_results_page.clicked_item) self.token_balance_widget.mouseReleaseEvent = self.on_token_balance_click def on_state_update(new_state): @@ -170,15 +151,16 @@ def on_state_update(new_state): self.left_menu_button_downloads, self.left_menu_button_discovered] self.video_player_page.initialize_player() - self.search_results_page.initialize_search_results_page() + self.search_results_page.initialize_search_results_page(self.gui_settings) self.settings_page.initialize_settings_page() self.subscribed_channels_page.initialize() - self.edit_channel_page.initialize_edit_channel_page() + self.edit_channel_page.initialize_edit_channel_page(self.gui_settings) self.downloads_page.initialize_downloads_page() self.home_page.initialize_home_page() self.loading_page.initialize_loading_page() self.discovering_page.initialize_discovering_page() - self.discovered_page.initialize_discovered_page() + self.discovered_page.initialize_discovered_page(self.gui_settings) + self.channel_page.initialize_channel_page(self.gui_settings) self.trust_page.initialize_trust_page() self.token_mining_page.initialize_token_mining_page() @@ -249,10 +231,6 @@ def on_state_update(new_state): # Start Tribler self.core_manager.start(core_args=core_args, core_env=core_env) - self.core_manager.events_manager.received_search_result_channel.connect( - self.search_results_page.received_search_result_channel) - self.core_manager.events_manager.received_search_result_torrent.connect( - self.search_results_page.received_search_result_torrent) self.core_manager.events_manager.torrent_finished.connect(self.on_torrent_finished) self.core_manager.events_manager.new_version_available.connect(self.on_new_version_available) self.core_manager.events_manager.tribler_started.connect(self.on_tribler_started) @@ -408,18 +386,19 @@ def perform_start_download_request(self, uri, anon_download, safe_seeding, desti ConfirmationDialog.show_message(self.window(), "Download error %s" % uri, gui_error_message, "OK") return - selected_files_uri = "" + selected_files_list = [] if len(selected_files) != total_files: # Not all files included - selected_files_uri = u'&' + u''.join(u"selected_files[]=%s&" % - quote_plus_unicode(filename) for filename in selected_files)[:-1] + selected_files_list = [filename for filename in selected_files] anon_hops = int(self.tribler_settings['download_defaults']['number_hops']) if anon_download else 0 safe_seeding = 1 if safe_seeding else 0 - post_data = "uri=%s&anon_hops=%d&safe_seeding=%d&destination=%s%s" % (quote_plus_unicode(uri), anon_hops, - safe_seeding, destination, - selected_files_uri) - post_data = post_data.encode('utf-8') # We need to send bytes in the request, not unicode - + post_data = { + "uri": uri, + "anon_hops": anon_hops, + "safe_seeding": safe_seeding, + "destination": destination, + "selected_files": selected_files_list + } request_mgr = TriblerRequestManager() request_mgr.perform_request("downloads", callback if callback else self.on_download_added, method='PUT', data=post_data) @@ -464,7 +443,7 @@ def on_new_version_dialog_done(self, version, action): def on_search_text_change(self, text): self.search_suggestion_mgr = TriblerRequestManager() self.search_suggestion_mgr.perform_request( - "search/completions?q=%s" % text, self.on_received_search_completions) + "search/completions", self.on_received_search_completions, url_params={'q': sanitize_for_fts(text)}) def on_received_search_completions(self, completions): if completions is None: @@ -489,8 +468,6 @@ def received_settings(self, settings): self.video_player_page.video_player_port = settings["ports"]["video_server~port"] # Disable various components based on the settings - if not self.tribler_settings['search_community']['enabled']: - self.window().top_search_bar.setHidden(True) if not self.tribler_settings['video_server']['enabled']: self.left_menu_button_video_player.setHidden(True) self.downloads_creditmining_button.setHidden(not self.tribler_settings["credit_mining"]["enabled"]) @@ -521,8 +498,6 @@ def on_top_search_button_click(self): self.has_search_results = True self.clicked_menu_button_search() self.search_results_page.perform_search(current_search_query) - self.search_request_mgr = TriblerRequestManager() - self.search_request_mgr.perform_request("search?q=%s" % current_search_query, None) self.last_search_query = current_search_query self.last_search_time = current_ts @@ -566,7 +541,7 @@ def received_trustchain_statistics(self, statistics): self.trust_page.load_blocks() def set_token_balance(self, balance): - if abs(balance) > 1024 ** 4: # Balance is over a TB + if abs(balance) > 1024 ** 4: # Balance is over a TB balance /= 1024.0 ** 4 self.token_balance_label.setText("%.1f TB" % balance) elif abs(balance) > 1024 ** 3: # Balance is over a GB @@ -704,9 +679,9 @@ def on_confirm_add_directory_dialog(self, action): escaped_uri = u"file:%s" % pathname2url(torrent_file.encode('utf-8')) self.perform_start_download_request(escaped_uri, self.window().tribler_settings['download_defaults'][ - 'anonymity_enabled'], + 'anonymity_enabled'], self.window().tribler_settings['download_defaults'][ - 'safeseeding_enabled'], + 'safeseeding_enabled'], self.tribler_settings['download_defaults']['saveas'], [], 0) if self.dialog: @@ -801,9 +776,9 @@ def clicked_menu_button_video_player(self): self.show_left_menu_playlist() def clicked_menu_button_downloads(self): + self.deselect_all_menu_buttons(self.left_menu_button_downloads) self.raise_window() self.left_menu_button_downloads.setChecked(True) - self.deselect_all_menu_buttons(self.left_menu_button_downloads) self.stackedWidget.setCurrentIndex(PAGE_DOWNLOADS) self.navigation_stack = [] self.hide_left_menu_playlist() @@ -815,8 +790,8 @@ def clicked_menu_button_debug(self): def clicked_menu_button_subscriptions(self): self.deselect_all_menu_buttons(self.left_menu_button_subscriptions) - self.subscribed_channels_page.load_subscribed_channels() self.stackedWidget.setCurrentIndex(PAGE_SUBSCRIBED_CHANNELS) + self.subscribed_channels_page.load_subscribed_channels() self.navigation_stack = [] self.hide_left_menu_playlist() @@ -830,34 +805,15 @@ def show_left_menu_playlist(self): self.left_menu_playlist_label.setHidden(False) self.left_menu_playlist.setHidden(False) - def on_channel_item_click(self, channel_list_item): - list_widget = channel_list_item.listWidget() - from TriblerGUI.widgets.channel_list_item import ChannelListItem - if isinstance(list_widget.itemWidget(channel_list_item), ChannelListItem): - channel_info = channel_list_item.data(Qt.UserRole) - self.channel_page.initialize_with_channel(channel_info) - self.navigation_stack.append(self.stackedWidget.currentIndex()) - self.stackedWidget.setCurrentIndex(PAGE_CHANNEL_DETAILS) - - def on_playlist_item_click(self, playlist_list_item): - list_widget = playlist_list_item.listWidget() - from TriblerGUI.widgets.playlist_list_item import PlaylistListItem - if isinstance(list_widget.itemWidget(playlist_list_item), PlaylistListItem): - playlist_info = playlist_list_item.data(Qt.UserRole) - self.playlist_page.initialize_with_playlist(playlist_info) - self.navigation_stack.append(self.stackedWidget.currentIndex()) - self.stackedWidget.setCurrentIndex(PAGE_PLAYLIST_DETAILS) + def on_channel_clicked(self, channel_info): + self.channel_page.initialize_with_channel(channel_info) + self.navigation_stack.append(self.stackedWidget.currentIndex()) + self.stackedWidget.setCurrentIndex(PAGE_CHANNEL_DETAILS) def on_page_back_clicked(self): try: prev_page = self.navigation_stack.pop() self.stackedWidget.setCurrentIndex(prev_page) - if prev_page == PAGE_SEARCH_RESULTS: - self.stackedWidget.widget(prev_page).load_search_results_in_list() - if prev_page == PAGE_SUBSCRIBED_CHANNELS: - self.stackedWidget.widget(prev_page).load_subscribed_channels() - if prev_page == PAGE_DISCOVERED: - self.stackedWidget.widget(prev_page).load_discovered_channels() except IndexError: logging.exception("Unknown page found in stack") @@ -895,7 +851,6 @@ def show_force_shutdown(): self.show_loading_screen() self.hide_status_bar() self.loading_text_label.setText("Shutting down...") - if self.debug_window: self.debug_window.setHidden(True) diff --git a/TriblerGUI/utilities.py b/TriblerGUI/utilities.py index a0c9134b8de..d7d0d7c2175 100644 --- a/TriblerGUI/utilities.py +++ b/TriblerGUI/utilities.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import hashlib import os import re @@ -6,7 +8,13 @@ from urllib import quote_plus import TriblerGUI -from TriblerGUI.defs import VIDEO_EXTS +from TriblerGUI.defs import HEALTH_DEAD, HEALTH_GOOD, HEALTH_MOOT, HEALTH_UNCHECKED, VIDEO_EXTS + + +def index2uri(index): + infohash = index.model().data_items[index.row()][u'infohash'] + name = index.model().data_items[index.row()][u'name'] + return u"magnet:?xt=urn:btih:%s&dn=%s" % (infohash, name) def format_size(num, suffix='B'): @@ -175,27 +183,6 @@ def get_image_path(filename): return os.path.join(get_base_path(), 'images/%s' % filename) -def bisect_right(item, item_list, is_torrent): - """ - This method inserts a channel/torrent in a sorted list. The sorting is based on relevance score. - The implementation is based on bisect_right. - """ - lo = 0 - hi = len(item_list) - while lo < hi: - mid = (lo+hi) // 2 - if item['relevance_score'] == item_list[mid]['relevance_score'] and is_torrent: - if len(split_into_keywords(item['name'])) < len(split_into_keywords(item_list[mid]['name'])): - hi = mid - else: - lo = mid + 1 - elif item['relevance_score'] > item_list[mid]['relevance_score']: - hi = mid - else: - lo = mid + 1 - return lo - - def get_gui_setting(gui_settings, value, default, is_bool=False): """ Utility method to get a specific GUI setting. The is_bool flag defines whether we expect a boolean so we convert it @@ -254,3 +241,14 @@ def prec_div(number, precision): Divide a given number by 10^precision. """ return float(number) / float(10 ** precision) + + +def get_health(seeders, leechers, last_tracker_check): + if last_tracker_check == 0: + return HEALTH_UNCHECKED + if seeders > 0: + return HEALTH_GOOD + elif leechers > 0: + return HEALTH_MOOT + else: + return HEALTH_DEAD diff --git a/TriblerGUI/widgets/channel_list_item.py b/TriblerGUI/widgets/channel_list_item.py deleted file mode 100644 index a5f05f1beda..00000000000 --- a/TriblerGUI/widgets/channel_list_item.py +++ /dev/null @@ -1,21 +0,0 @@ -from PyQt5.QtWidgets import QWidget -from TriblerGUI.tribler_window import fc_channel_list_item - - -class ChannelListItem(QWidget, fc_channel_list_item): - """ - This class is responsible for managing the item in the list of channels. - The list item supports a fade-in effect, which can be enabled with the should_fade parameter in the constructor. - """ - - def __init__(self, parent, channel): - QWidget.__init__(self, parent) - fc_channel_list_item.__init__(self) - - self.setupUi(self) - - self.channel_info = channel - self.channel_name.setText(channel["name"]) - self.channel_description_label.setText("%d items" % channel["torrents"]) - - self.subscriptions_widget.initialize_with_channel(channel) diff --git a/TriblerGUI/widgets/channel_torrent_list_item.py b/TriblerGUI/widgets/channel_torrent_list_item.py deleted file mode 100644 index 9162540532d..00000000000 --- a/TriblerGUI/widgets/channel_torrent_list_item.py +++ /dev/null @@ -1,198 +0,0 @@ -from urllib import quote_plus - -import logging -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QWidget -from TriblerGUI.defs import STATUS_GOOD, STATUS_DEAD, COMMITTED, TODELETE, UNCOMMITTED -from TriblerGUI.defs import STATUS_UNKNOWN - -from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.tribler_window import fc_channel_torrent_list_item -from TriblerGUI.utilities import format_size, get_image_path, get_gui_setting - - -class ChannelTorrentListItem(QWidget, fc_channel_torrent_list_item): - """ - This class is responsible for managing the item in the torrents list of a channel. - """ - - def __init__(self, parent, torrent, show_controls=False, on_remove_clicked=None): - QWidget.__init__(self, parent) - fc_channel_torrent_list_item.__init__(self) - - self.torrent_info = torrent - self._logger = logging.getLogger('TriblerGUI') - - self.setupUi(self) - self.show_controls = show_controls - self.remove_control_button_container.setHidden(True) - self.control_buttons_container.setHidden(True) - self.is_health_checking = False - self.has_health = False - self.health_request_mgr = None - self.request_mgr = None - self.download_uri = None - self.dialog = None - - self.channel_torrent_name.setText(torrent["name"]) - if torrent["size"] is None: - self.channel_torrent_description.setText("Size: -") - else: - self.channel_torrent_description.setText("Size: %s" % format_size(float(torrent["size"]))) - - if torrent["category"]: - self.channel_torrent_category.setText(torrent["category"].lower()) - else: - self.channel_torrent_category.setText("unknown") - self.thumbnail_widget.initialize(torrent["name"], 24) - - if torrent["last_tracker_check"] > 0: - self.has_health = True - self.update_health(int(torrent["num_seeders"]), int(torrent["num_leechers"])) - - if "commit_status" in torrent: - self.update_commit_status(torrent["commit_status"]) - else: - self.commit_state_label.setHidden(True) - - self.torrent_play_button.clicked.connect(self.on_play_button_clicked) - self.torrent_download_button.clicked.connect(self.on_download_clicked) - - if not self.window().vlc_available: - self.torrent_play_button.setHidden(True) - - if on_remove_clicked is not None: - self.remove_torrent_button.clicked.connect(lambda: on_remove_clicked(self)) - - def on_download_clicked(self): - self.download_uri = (u"magnet:?xt=urn:btih:%s&dn=%s" % - (self.torrent_info["infohash"], self.torrent_info['name'])).encode('utf-8') - self.window().start_download_from_uri(self.download_uri) - - def on_play_button_clicked(self): - self.download_uri = (u"magnet:?xt=urn:btih:%s&dn=%s" % - (self.torrent_info["infohash"], self.torrent_info['name'])).encode('utf-8') - - self.window().perform_start_download_request(self.download_uri, - self.window().tribler_settings['download_defaults'][ - 'anonymity_enabled'], - self.window().tribler_settings['download_defaults'][ - 'safeseeding_enabled'], - self.window().tribler_settings['download_defaults']['saveas'], - [], 0, callback=self.on_play_request_done) - - def on_play_request_done(self, result): - if not self: - return - self.window().left_menu_button_video_player.click() - self.window().video_player_page.play_media_item(self.torrent_info["infohash"], -1) - - def show_buttons(self): - if not self.show_controls: - self.remove_control_button_container.setHidden(True) - self.control_buttons_container.setHidden(False) - self.torrent_play_button.setIcon(QIcon(get_image_path('play.png'))) - self.torrent_download_button.setIcon(QIcon(get_image_path('downloads.png'))) - else: - self.control_buttons_container.setHidden(True) - self.remove_control_button_container.setHidden(False) - self.remove_torrent_button.setIcon(QIcon(get_image_path('delete.png'))) - - def hide_buttons(self): - self.remove_control_button_container.setHidden(True) - self.control_buttons_container.setHidden(True) - - def enterEvent(self, _): - self.show_buttons() - - def leaveEvent(self, _): - self.hide_buttons() - - def on_cancel_health_check(self): - """ - The request for torrent health could not be queued. - Go back to the intial state. - """ - try: - self.health_text.setText("unknown health") - self.set_health_indicator(STATUS_UNKNOWN) - self.is_health_checking = False - self.has_health = False - except RuntimeError: - self._logger.error("The underlying GUI widget has already been removed.") - - def check_health(self): - """ - Perform a request to check the health of the torrent that is represented by this widget. - Don't do this if we are already checking the health or if we have the health info. - """ - if self.is_health_checking or self.has_health: # Don't check health again - return - - self.health_text.setText("checking health...") - self.set_health_indicator(STATUS_UNKNOWN) - self.is_health_checking = True - self.health_request_mgr = TriblerRequestManager() - self.health_request_mgr.perform_request("torrents/%s/health?timeout=15" % self.torrent_info["infohash"], - self.on_health_response, capture_errors=False, priority="LOW", - on_cancel=self.on_cancel_health_check) - - def on_health_response(self, response): - """ - When we receive a health response, update the health status. - """ - if not self: # The channel list item might have been deleted already (i.e. by doing another search). - return - - self.has_health = True - total_seeders = 0 - total_leechers = 0 - - if not response or 'error' in response: - self.update_health(0, 0) # Just set the health to 0 seeders, 0 leechers - return - - for _, status in response['health'].iteritems(): - if 'error' in status: - continue # Timeout or invalid status - - total_seeders += int(status['seeders']) - total_leechers += int(status['leechers']) - - self.is_health_checking = False - self.update_health(total_seeders, total_leechers) - - def update_health(self, seeders, leechers): - try: - if seeders > 0: - self.health_text.setText("good health (S%d L%d)" % (seeders, leechers)) - self.set_health_indicator(STATUS_GOOD) - elif leechers > 0: - self.health_text.setText("unknown health (found peers)") - self.set_health_indicator(STATUS_UNKNOWN) - else: - self.health_text.setText("no peers found") - self.set_health_indicator(STATUS_DEAD) - except RuntimeError: - self._logger.error("The underlying GUI widget has already been removed.") - - def update_commit_status(self, status): - if status == COMMITTED: - self.commit_state_label.setText("Committed") - if status == TODELETE: - self.commit_state_label.setText("To delete") - self.remove_torrent_button.setHidden(True) - if status == UNCOMMITTED: - self.commit_state_label.setText("Uncommitted") - - def set_health_indicator(self, status): - color = "orange" - if status == STATUS_GOOD: - color = "green" - elif status == STATUS_UNKNOWN: - color = "orange" - elif status == STATUS_DEAD: - color = "red" - - self.health_indicator.setStyleSheet("background-color: %s; border-radius: %dpx" - % (color, self.health_indicator.height() / 2)) diff --git a/TriblerGUI/widgets/channelpage.py b/TriblerGUI/widgets/channelpage.py index ec190cea4ef..d62fabea39b 100644 --- a/TriblerGUI/widgets/channelpage.py +++ b/TriblerGUI/widgets/channelpage.py @@ -3,35 +3,36 @@ from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QWidget -from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.utilities import get_image_path -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem -from TriblerGUI.widgets.loading_list_item import LoadingListItem -from TriblerGUI.widgets.playlist_list_item import PlaylistListItem -from TriblerGUI.widgets.text_list_item import TextListItem +from TriblerGUI.utilities import get_gui_setting, get_image_path +from TriblerGUI.widgets.tablecontentmodel import TorrentsContentModel +from TriblerGUI.widgets.triblertablecontrollers import TorrentsTableViewController class ChannelPage(QWidget): """ - The ChannelPage is the page with an overview of each channel and displays the list of torrents/playlist available. + The ChannelPage displays a list of a channel's contents. """ def __init__(self): QWidget.__init__(self) - - self.playlists = [] - self.torrents = [] - self.loaded_channels = False - self.loaded_playlists = False self.channel_info = None - - self.get_torents_in_channel_manager = None - self.get_playlists_in_channel_manager = None + self.model = None + self.controller = None + self.gui_settings = None + + def initialize_channel_page(self, gui_settings): + self.gui_settings = gui_settings + self.model = TorrentsContentModel(hide_xxx=get_gui_setting(self.gui_settings, "family_filter", True, + is_bool=True) if self.gui_settings else True) + self.window().core_manager.events_manager.torrent_info_updated.connect(self.model.update_torrent_info) + self.controller = TorrentsTableViewController(self.model, self.window().channel_page_container, + None, self.window().channel_torrents_filter_input) + + # Remove the commit control from the delegate for performance + commit_control = self.window().channel_page_container.content_table.delegate.commit_control + self.window().channel_page_container.content_table.delegate.controls.remove(commit_control) def initialize_with_channel(self, channel_info): - if not channel_info: - return - self.playlists = [] self.torrents = [] self.loaded_channels = False @@ -42,77 +43,18 @@ def initialize_with_channel(self, channel_info): self.channel_info = channel_info - self.window().channel_torrents_list.set_data_items([(LoadingListItem, None)]) - self.window().channel_torrents_detail_widget.hide() - self.window().channel_preview_label.setHidden(channel_info['subscribed']) self.window().channel_back_button.setIcon(QIcon(get_image_path('page_back.png'))) - self.get_torents_in_channel_manager = TriblerRequestManager() - self.get_torents_in_channel_manager.perform_request("channels/discovered/%s/torrents" % - channel_info['dispersy_cid'], - self.received_torrents_in_channel) - - if len(channel_info['dispersy_cid']) == 148: # Check-hack for Channel2.0 style address - self.loaded_playlists = True - else: - self.get_playlists_in_channel_manager = TriblerRequestManager() - self.get_playlists_in_channel_manager.perform_request("channels/discovered/%s/playlists" % - channel_info['dispersy_cid'], - self.received_playlists_in_channel) - # initialize the page about a channel self.window().channel_name_label.setText(channel_info['name']) self.window().num_subs_label.setText(str(channel_info['votes'])) self.window().subscription_widget.initialize_with_channel(channel_info) + self.window().channel_page_container.details_container.hide() - def clicked_item(self): - if len(self.window().channel_torrents_list.selectedItems()) != 1: - self.window().channel_torrents_detail_widget.hide() - else: - item = self.window().channel_torrents_list.selectedItems()[0] - list_widget = item.listWidget() - list_item = list_widget.itemWidget(item) - if isinstance(list_item, ChannelTorrentListItem): - self.window().channel_torrents_detail_widget.update_with_torrent(list_item.torrent_info) - self.window().channel_torrents_detail_widget.show() - else: - self.window().channel_torrents_detail_widget.hide() - - def update_result_list(self): - if self.loaded_channels and self.loaded_playlists: - self.window().channel_torrents_list.set_data_items(self.playlists + self.torrents) - - def received_torrents_in_channel(self, results): - if not results: - return - def sort_key(torrent): - """ Scoring algorithm for sorting the torrent to show liveness. The score is basically the sum of number - of seeders and leechers. If swarm info is unknown, we give unknown seeder and leecher as 0.5 & 0.4 so - that the sum is less than 1 and higher than zero. This means unknown torrents will have higher score - than dead torrent with no seeders and leechers and lower score than any barely alive torrent with a - single seeder or leecher. - """ - seeder_score = torrent['num_seeders'] if torrent['num_seeders'] or torrent['last_tracker_check'] > 0\ - else 0.5 - leecher_score = torrent['num_leechers'] if torrent['num_leechers'] or torrent['last_tracker_check'] > 0\ - else 0.5 - return seeder_score + .5 * leecher_score - - for result in sorted(results['torrents'], key=sort_key, reverse=True): - self.torrents.append((ChannelTorrentListItem, result)) - - if not self.channel_info['subscribed']: - self.torrents.append((TextListItem, "You're looking at a preview of this channel.\n" - "Subscribe to this channel to see the full content.")) - - self.loaded_channels = True - self.update_result_list() + self.model.channel_pk = channel_info['public_key'] + self.load_torrents() - def received_playlists_in_channel(self, results): - if not results: - return - for result in results['playlists']: - self.playlists.append((PlaylistListItem, result)) - self.loaded_playlists = True - self.update_result_list() + def load_torrents(self): + self.controller.model.reset() + self.controller.load_torrents(1, 50) # Load the first 50 torrents diff --git a/TriblerGUI/widgets/createtorrentpage.py b/TriblerGUI/widgets/createtorrentpage.py index 91b82067273..451c3341525 100644 --- a/TriblerGUI/widgets/createtorrentpage.py +++ b/TriblerGUI/widgets/createtorrentpage.py @@ -1,14 +1,16 @@ +from __future__ import absolute_import + import os -import urllib from PyQt5.QtCore import QDir from PyQt5.QtGui import QIcon +from PyQt5.QtWidgets import QAction, QFileDialog, QWidget -from PyQt5.QtWidgets import QWidget, QFileDialog, QAction +from six.moves import xrange -from TriblerGUI.tribler_action_menu import TriblerActionMenu -from TriblerGUI.defs import PAGE_EDIT_CHANNEL_TORRENTS, BUTTON_TYPE_NORMAL +from TriblerGUI.defs import BUTTON_TYPE_NORMAL, PAGE_EDIT_CHANNEL_TORRENTS from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog +from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.tribler_request_manager import TriblerRequestManager from TriblerGUI.utilities import get_image_path @@ -27,8 +29,7 @@ def __init__(self): self.selected_item_index = -1 self.initialized = False - def initialize(self, identifier): - self.channel_identifier = identifier + def initialize(self): self.window().create_torrent_name_field.setText('') self.window().create_torrent_description_field.setText('') self.window().create_torrent_files_list.clear() @@ -81,14 +82,18 @@ def on_create_clicked(self): self.window().edit_channel_create_torrent_button.setEnabled(False) - files_str = u"" + files_list = [] for ind in xrange(self.window().create_torrent_files_list.count()): - files_str += u"files[]=%s&" % urllib.quote_plus( - self.window().create_torrent_files_list.item(ind).text().encode('utf-8')) - - name = urllib.quote_plus(self.window().create_torrent_name_field.text().encode('utf-8')) - description = urllib.quote_plus(self.window().create_torrent_description_field.toPlainText().encode('utf-8')) - post_data = (u"%s&name=%s&description=%s" % (files_str[:-1], name, description)).encode('utf-8') + file_str = self.window().create_torrent_files_list.item(ind).text() + files_list.append(file_str) + + name = self.window().create_torrent_name_field.text() + description = self.window().create_torrent_description_field.toPlainText() + post_data = { + "name": name, + "description": description, + "files": files_list + } url = "createtorrent?download=1" if self.window().seed_after_adding_checkbox.isChecked() else "createtorrent" self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request(url, self.on_torrent_created, data=post_data, method='POST') @@ -107,11 +112,9 @@ def on_torrent_created(self, result): self.add_torrent_to_channel(result['torrent']) def add_torrent_to_channel(self, torrent): - post_data = str("torrent=%s" % urllib.quote_plus(torrent)) self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("channels/discovered/%s/torrents" % - self.channel_identifier, self.on_torrent_to_channel_added, - data=post_data, method='PUT') + self.request_mgr.perform_request("mychannel/torrents", self.on_torrent_to_channel_added, + data={"torrent": torrent}, method='PUT') def on_torrent_to_channel_added(self, result): if not result: @@ -119,7 +122,7 @@ def on_torrent_to_channel_added(self, result): self.window().edit_channel_create_torrent_progress_label.hide() if 'added' in result: self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_TORRENTS) - self.window().edit_channel_page.load_channel_torrents() + self.window().edit_channel_page.load_my_torrents() def on_remove_entry(self): self.window().create_torrent_files_list.takeItem(self.selected_item_index) diff --git a/TriblerGUI/widgets/discoveredpage.py b/TriblerGUI/widgets/discoveredpage.py index 6e9874e3e07..7035bd2dfc2 100644 --- a/TriblerGUI/widgets/discoveredpage.py +++ b/TriblerGUI/widgets/discoveredpage.py @@ -1,7 +1,11 @@ +from __future__ import absolute_import + +from PyQt5.QtCore import Qt from PyQt5.QtWidgets import QWidget -from TriblerGUI.widgets.channel_list_item import ChannelListItem -from TriblerGUI.tribler_request_manager import TriblerRequestManager +from TriblerGUI.utilities import get_gui_setting +from TriblerGUI.widgets.tablecontentmodel import ChannelsContentModel +from TriblerGUI.widgets.triblertablecontrollers import ChannelsTableViewController class DiscoveredPage(QWidget): @@ -11,42 +15,25 @@ class DiscoveredPage(QWidget): def __init__(self): QWidget.__init__(self) - self.discovered_channels = [] - self.request_mgr = None self.initialized = False + self.model = None + self.controller = None + self.gui_settings = None - def initialize_discovered_page(self): + def initialize_discovered_page(self, gui_settings): if not self.initialized: - self.window().core_manager.events_manager.discovered_channel.connect(self.on_discovered_channel) self.initialized = True + self.gui_settings = gui_settings + self.model = ChannelsContentModel(hide_xxx=get_gui_setting(self.gui_settings, "family_filter", True, + is_bool=True) if self.gui_settings else True) + # Set the default sorting column/order to num_torrents/descending + default_sort_column = self.model.columns.index(u'torrents') + self.window().discovered_channels_list.horizontalHeader().setSortIndicator( + default_sort_column, Qt.AscendingOrder) + self.controller = ChannelsTableViewController(self.model, self.window().discovered_channels_list, + self.window().num_discovered_channels_label, + self.window().discovered_channels_filter_input) def load_discovered_channels(self): - self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("channels/discovered", self.received_discovered_channels) - - def received_discovered_channels(self, results): - if not results or 'channels' not in results: - return - - self.discovered_channels = [] - self.window().discovered_channels_list.set_data_items([]) - items = [] - - results['channels'].sort(key=lambda x: x['torrents'], reverse=True) - - for result in results['channels']: - items.append((ChannelListItem, result)) - self.discovered_channels.append(result) - self.update_num_label() - self.window().discovered_channels_list.set_data_items(items) - - def on_discovered_channel(self, channel_info): - channel_info['torrents'] = 0 - channel_info['subscribed'] = False - channel_info['votes'] = 0 - self.window().discovered_channels_list.append_item((ChannelListItem, channel_info)) - self.discovered_channels.append(channel_info) - self.update_num_label() - - def update_num_label(self): - self.window().num_discovered_channels_label.setText("%d items" % len(self.discovered_channels)) + self.controller.model.reset() + self.controller.load_channels(1, 50) # Load the first 50 discovered channels diff --git a/TriblerGUI/widgets/downloadsdetailstabwidget.py b/TriblerGUI/widgets/downloadsdetailstabwidget.py index 4737c2a96dc..b74ca3394db 100644 --- a/TriblerGUI/widgets/downloadsdetailstabwidget.py +++ b/TriblerGUI/widgets/downloadsdetailstabwidget.py @@ -1,10 +1,10 @@ -from urllib import quote_plus +from __future__ import absolute_import from PyQt5.QtCore import Qt -from PyQt5.QtWidgets import QTabWidget, QTreeWidgetItem, QAction +from PyQt5.QtWidgets import QAction, QTabWidget, QTreeWidgetItem -from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.defs import * +from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.tribler_request_manager import TriblerRequestManager from TriblerGUI.utilities import format_size, format_speed, is_video_file from TriblerGUI.widgets.downloadfilewidgetitem import DownloadFileWidgetItem @@ -225,7 +225,7 @@ def on_play_file(self, file_info): self.get_video_file_index(file_info["index"])) def set_included_files(self, files): - data_str = ''.join("selected_files[]=%s&" % ind for ind in files)[:-1] + post_data = {"selected_files": [ind for ind in files]} self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % self.current_download['infohash'], lambda _: None, - method='PATCH', data=data_str) + method='PATCH', data=post_data) diff --git a/TriblerGUI/widgets/downloadspage.py b/TriblerGUI/widgets/downloadspage.py index 2f9e2949177..057d9cb1e80 100644 --- a/TriblerGUI/widgets/downloadspage.py +++ b/TriblerGUI/widgets/downloadspage.py @@ -290,7 +290,7 @@ def on_start_download_clicked(self): infohash = selected_item.download_info["infohash"] self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % infohash, self.on_download_resumed, - method='PATCH', data="state=resume") + method='PATCH', data={"state": "resume"}) def on_download_resumed(self, json_result): if json_result and 'modified' in json_result: @@ -305,7 +305,7 @@ def on_stop_download_clicked(self): infohash = selected_item.download_info["infohash"] self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % infohash, self.on_download_stopped, - method='PATCH', data="state=stop") + method='PATCH', data={"state": "stop"}) def on_play_download_clicked(self): self.window().left_menu_button_video_player.click() @@ -341,7 +341,7 @@ def on_remove_download_dialog(self, action): self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % infohash, self.on_download_removed, - method='DELETE', data="remove_data=%d" % action) + method='DELETE', data={"remove_data": action}) if self.dialog: self.dialog.close_dialog() self.dialog = None @@ -356,7 +356,7 @@ def on_force_recheck_download(self): infohash = selected_item.download_info["infohash"] self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % infohash, self.on_forced_recheck, - method='PATCH', data='state=recheck') + method='PATCH', data={"state": "recheck"}) def on_forced_recheck(self, result): if result and "modified" in result: @@ -371,7 +371,7 @@ def change_anonymity(self, hops): infohash = selected_item.download_info["infohash"] self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("downloads/%s" % infohash, lambda _: None, - method='PATCH', data='anon_hops=%d' % hops) + method='PATCH', data={"anon_hops": hops}) def on_explore_files(self): for selected_item in self.selected_items: diff --git a/TriblerGUI/widgets/editchannelpage.py b/TriblerGUI/widgets/editchannelpage.py index d1d0e469413..552b17a00d0 100644 --- a/TriblerGUI/widgets/editchannelpage.py +++ b/TriblerGUI/widgets/editchannelpage.py @@ -1,92 +1,109 @@ -import base64 -import glob +from __future__ import absolute_import + import os import urllib -from urllib import pathname2url - -from PyQt5.QtCore import Qt, pyqtSignal, QDir -from PyQt5.QtGui import QIcon, QCursor +from base64 import b64encode -from PyQt5.QtWidgets import QWidget, QAction, QTreeWidgetItem, QFileDialog +from PyQt5.QtCore import QDir, QTimer, pyqtSignal +from PyQt5.QtGui import QCursor +from PyQt5.QtWidgets import QAction, QFileDialog, QWidget -from TriblerGUI.tribler_action_menu import TriblerActionMenu -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem -from TriblerGUI.defs import PAGE_EDIT_CHANNEL_OVERVIEW, BUTTON_TYPE_NORMAL, BUTTON_TYPE_CONFIRM, \ - PAGE_EDIT_CHANNEL_PLAYLISTS, PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS, PAGE_EDIT_CHANNEL_PLAYLIST_MANAGE, \ - PAGE_EDIT_CHANNEL_PLAYLIST_EDIT, PAGE_EDIT_CHANNEL_SETTINGS, PAGE_EDIT_CHANNEL_TORRENTS,\ - PAGE_EDIT_CHANNEL_RSS_FEEDS, PAGE_EDIT_CHANNEL_CREATE_TORRENT +from TriblerGUI.defs import BUTTON_TYPE_CONFIRM, BUTTON_TYPE_NORMAL, COMMIT_STATUS_TODELETE, \ + PAGE_EDIT_CHANNEL_CREATE_TORRENT, PAGE_EDIT_CHANNEL_OVERVIEW, PAGE_EDIT_CHANNEL_SETTINGS, PAGE_EDIT_CHANNEL_TORRENTS from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog -from TriblerGUI.widgets.loading_list_item import LoadingListItem -from TriblerGUI.widgets.playlist_list_item import PlaylistListItem +from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.utilities import get_image_path +from TriblerGUI.utilities import get_gui_setting +from TriblerGUI.widgets.tablecontentmodel import MyTorrentsContentModel +from TriblerGUI.widgets.triblertablecontrollers import MyTorrentsTableViewController +CHANNEL_COMMIT_DELAY = 30000 # milliseconds -chant_welcome_text = \ -"""Welcome to the management interface of your channel! -Here, you can change settings of you channel and manage your shared torrents. -Note that this is a New-style channel, which is still experimental.""" class EditChannelPage(QWidget): """ - This class is responsible for managing lists and data on your channel page, including torrents, playlists - and rss feeds. + This class is responsible for managing lists and data on your channel page """ - playlists_loaded = pyqtSignal(object) + on_torrents_removed = pyqtSignal(list) + on_all_torrents_removed = pyqtSignal() + on_commit = pyqtSignal() def __init__(self): QWidget.__init__(self) - self.remove_torrent_requests = [] self.channel_overview = None - self.playlists = None - self.editing_playlist = None - self.viewing_playlist = None + self.chosen_dir = None self.dialog = None self.editchannel_request_mgr = None + self.model = None + self.controller = None + self.channel_dirty = False + self.gui_settings = None + self.commit_timer = None + self.autocommit_enabled = None + + def initialize_edit_channel_page(self, gui_settings): + self.gui_settings = gui_settings - def initialize_edit_channel_page(self): self.window().create_channel_intro_button.clicked.connect(self.on_create_channel_intro_button_clicked) self.window().create_channel_form.hide() + self.update_channel_commit_views() self.window().edit_channel_stacked_widget.setCurrentIndex(1) self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_OVERVIEW) self.window().create_channel_button.clicked.connect(self.on_create_channel_button_pressed) self.window().edit_channel_save_button.clicked.connect(self.on_edit_channel_save_button_pressed) - - self.window().edit_channel_torrents_remove_selected_button.clicked.connect( - self.on_torrents_remove_selected_clicked) - self.window().edit_channel_torrents_remove_all_button.clicked.connect(self.on_torrents_remove_all_clicked) - self.window().edit_channel_torrents_add_button.clicked.connect(self.on_torrents_add_clicked) - - self.window().edit_channel_details_playlist_manage.playlist_saved.connect(self.load_channel_playlists) - - self.window().edit_channel_playlist_torrents_back.clicked.connect(self.on_playlist_torrents_back_clicked) - self.window().edit_channel_playlists_list.itemClicked.connect(self.on_playlist_item_clicked) - self.window().edit_channel_playlist_manage_torrents_button.clicked.connect(self.on_playlist_manage_clicked) - self.window().edit_channel_create_playlist_button.clicked.connect(self.on_playlist_created_clicked) - - self.window().playlist_edit_save_button.clicked.connect(self.on_playlist_edit_save_clicked) - self.window().playlist_edit_cancel_button.clicked.connect(self.on_playlist_edit_cancel_clicked) - - self.window().edit_channel_details_rss_feeds_remove_selected_button.clicked.connect( - self.on_rss_feeds_remove_selected_clicked) - self.window().edit_channel_details_rss_add_button.clicked.connect(self.on_rss_feed_add_clicked) - self.window().edit_channel_details_rss_refresh_button.clicked.connect(self.on_rss_feeds_refresh_clicked) + self.window().edit_channel_commit_button.clicked.connect(self.clicked_edit_channel_commit_button) # Tab bar buttons self.window().channel_settings_tab.initialize() self.window().channel_settings_tab.clicked_tab_button.connect(self.clicked_tab_button) - # Chant publish widget is hidden by default and only shown when necessary - self.window().dirty_channel_widget.setHidden(True) - self.window().edit_channel_commit_button.clicked.connect(self.clicked_edit_channel_commit_button) - self.window().export_channel_button.clicked.connect(self.on_export_mdblob) - self.window().export_channel_button.setHidden(True) + # TODO: re-enable remove_selected button + self.window().remove_selected_button.setHidden(True) + + # Connect torrent addition/removal buttons + self.window().remove_selected_button.clicked.connect(self.on_torrents_remove_selected_clicked) + self.window().remove_all_button.clicked.connect(self.on_torrents_remove_all_clicked) + self.window().add_button.clicked.connect(self.on_torrents_add_clicked) + + self.model = MyTorrentsContentModel() + self.controller = MyTorrentsTableViewController(self.model, self.window().edit_channel_torrents_container, + self.window().edit_channel_torrents_num_items_label, + self.window().edit_channel_torrents_filter) + self.window().edit_channel_torrents_container.details_container.hide() + self.autocommit_enabled = get_gui_setting(self.gui_settings, "autocommit_enabled", True, + is_bool=True) if self.gui_settings else True + + # Commit the channel just in case there are uncommitted changes left since the last time (e.g. Tribler crashed) + # The timer thing here is a workaround for race condition with the core startup + if self.autocommit_enabled: + if not self.commit_timer: + self.commit_timer = QTimer() + self.commit_timer.setSingleShot(True) + self.commit_timer.timeout.connect(self.autocommit_fired) + + self.controller.table_view.setColumnHidden(3, True) + self.model.exclude_deleted = True + self.commit_timer.stop() + self.commit_timer.start(10000) + else: + self.controller.table_view.setColumnHidden(4, True) + self.model.exclude_deleted = False + + def update_channel_commit_views(self, deleted_index=None): + if self.channel_dirty and self.autocommit_enabled: + self.commit_timer.stop() + self.commit_timer.start(CHANNEL_COMMIT_DELAY) + if deleted_index: + # TODO: instead of reloading the whole table, just remove the deleted row and update start and end + self.load_my_torrents() + + self.window().commit_control_bar.setHidden(not self.channel_dirty or self.autocommit_enabled) def load_my_channel_overview(self): if not self.channel_overview: @@ -104,86 +121,22 @@ def initialize_with_channel_overview(self, overview): return self.channel_overview = overview["mychannel"] - if "chant" in self.channel_overview: - self.window().edit_channel_playlists_button.setHidden(True) - self.window().edit_channel_rss_feeds_button.setHidden(True) - self.window().label_7.setText(chant_welcome_text) - self.window().export_channel_button.setHidden(False) + self.channel_dirty = self.channel_overview['dirty'] + self.update_channel_commit_views() + + self.window().export_channel_button.setHidden(False) self.window().edit_channel_name_label.setText("My channel") self.window().edit_channel_overview_name_label.setText(self.channel_overview["name"]) self.window().edit_channel_description_label.setText(self.channel_overview["description"]) - self.window().edit_channel_identifier_label.setText(self.channel_overview["identifier"]) + self.window().edit_channel_identifier_label.setText(self.channel_overview["public_key"]) self.window().edit_channel_name_edit.setText(self.channel_overview["name"]) self.window().edit_channel_description_edit.setText(self.channel_overview["description"]) self.window().edit_channel_stacked_widget.setCurrentIndex(1) - def load_channel_torrents(self): - self.window().edit_channel_torrents_list.set_data_items([(LoadingListItem, None)]) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/torrents?disable_filter=1" % - self.channel_overview["identifier"], self.initialize_with_torrents) - - def initialize_with_torrents(self, torrents): - if not torrents: - return - self.window().edit_channel_torrents_list.set_data_items([]) - - self.window().dirty_channel_widget.setHidden(not("chant_dirty" in torrents and torrents["chant_dirty"])) - items = [] - for result in torrents['torrents']: - items.append((ChannelTorrentListItem, result, - {"show_controls": True, "on_remove_clicked": self.on_torrent_remove_clicked})) - self.window().edit_channel_torrents_list.set_data_items(items) - - def load_channel_playlists(self): - self.window().edit_channel_playlists_list.set_data_items([(LoadingListItem, None)]) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/playlists?disable_filter=1" % - self.channel_overview["identifier"], - self.initialize_with_playlists) - - def initialize_with_playlists(self, playlists): - if not playlists: - return - self.playlists_loaded.emit(playlists) - self.playlists = playlists - self.window().edit_channel_playlists_list.set_data_items([]) - - self.update_playlist_list() - - viewing_playlist_index = self.get_index_of_viewing_playlist() - if viewing_playlist_index != -1: - self.viewing_playlist = self.playlists['playlists'][viewing_playlist_index] - self.update_playlist_torrent_list() - - def load_channel_rss_feeds(self): - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/rssfeeds" % - self.channel_overview["identifier"], - self.initialize_with_rss_feeds) - - def initialize_with_rss_feeds(self, rss_feeds): - if not rss_feeds: - return - self.window().edit_channel_rss_feeds_list.clear() - for feed in rss_feeds["rssfeeds"]: - item = QTreeWidgetItem(self.window().edit_channel_rss_feeds_list) - item.setText(0, feed["url"]) - - self.window().edit_channel_rss_feeds_list.addTopLevelItem(item) - - def on_torrent_remove_clicked(self, item): - if "chant" in self.channel_overview: - self.on_torrents_remove_selected_action(0, item) - return - self.dialog = ConfirmationDialog(self, "Remove selected torrent", - "Are you sure that you want to remove the selected torrent from this channel?", - [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(lambda action: self.on_torrents_remove_selected_action(action, item)) - self.dialog.show() + self.model.channel_pk = self.channel_overview["public_key"] def on_create_channel_button_pressed(self): channel_name = self.window().new_channel_name_edit.text() @@ -192,12 +145,13 @@ def on_create_channel_button_pressed(self): self.window().new_channel_name_label.setStyleSheet("color: red;") return - self.window().create_channel_button.setEnabled(False) + post_data = { + "name": channel_name, + "description": channel_description + } self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered", self.on_channel_created, - data=unicode('name=%s&description=%s' % - (channel_name, channel_description)).encode('utf-8'), - method='PUT') + self.editchannel_request_mgr.perform_request("mychannel", self.on_channel_created, + data=post_data, method='PUT') def on_channel_created(self, result): if not result: @@ -209,24 +163,14 @@ def on_channel_created(self, result): def on_edit_channel_save_button_pressed(self): channel_name = self.window().edit_channel_name_edit.text() channel_description = self.window().edit_channel_description_edit.toPlainText() + post_data = { + "name": channel_name, + "description": channel_description + } self.editchannel_request_mgr = TriblerRequestManager() self.editchannel_request_mgr.perform_request("mychannel", self.on_channel_edited, - data=unicode('name=%s&description=%s' % - (channel_name, channel_description)).encode('utf-8'), - method='POST') - - def clicked_edit_channel_commit_button(self): - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("mychannel", self.on_channel_committed, - data=unicode('commit_changes=1').encode('utf-8'), - method='POST') - - def on_channel_committed(self, result): - if not result: - return - if 'modified' in result: - self.load_channel_torrents() + data=post_data, method='POST') def on_channel_edited(self, result): if not result: @@ -236,401 +180,37 @@ def on_channel_edited(self, result): self.window().edit_channel_description_label.setText( self.window().edit_channel_description_edit.toPlainText()) - def on_torrents_remove_selected_clicked(self): - num_selected = len(self.window().edit_channel_torrents_list.selectedItems()) - if num_selected == 0: - return - - selected_torrent_items = [self.window().edit_channel_torrents_list.itemWidget(list_widget_item) - for list_widget_item in self.window().edit_channel_torrents_list.selectedItems()] - - self.dialog = ConfirmationDialog(self, "Remove %s selected torrents" % num_selected, - "Are you sure that you want to remove %s selected torrents " - "from your channel?" % num_selected, - [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(lambda action: - self.on_torrents_remove_selected_action(action, selected_torrent_items)) - self.dialog.show() - - def on_torrents_remove_all_clicked(self): - self.dialog = ConfirmationDialog(self.window(), "Remove all torrents", - "Are you sure that you want to remove all torrents from your channel? " - "You cannot undo this action.", - [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(self.on_torrents_remove_all_action) - self.dialog.show() - - def on_torrents_add_clicked(self): - menu = TriblerActionMenu(self) - - browse_files_action = QAction('Import torrent from file', self) - browse_dir_action = QAction('Import torrent(s) from dir', self) - add_url_action = QAction('Add URL', self) - create_torrent_action = QAction('Create torrent from file(s)', self) - - browse_files_action.triggered.connect(self.on_add_torrent_browse_file) - browse_dir_action.triggered.connect(self.on_add_torrents_browse_dir) - add_url_action.triggered.connect(self.on_add_torrent_from_url) - create_torrent_action.triggered.connect(self.on_create_torrent_from_files) - - menu.addAction(browse_files_action) - menu.addAction(browse_dir_action) - menu.addAction(add_url_action) - menu.addAction(create_torrent_action) - - menu.exec_(QCursor.pos()) - - def add_torrent_to_channel(self, filename): - with open(filename, "rb") as torrent_file: - torrent_content = urllib.quote_plus(base64.b64encode(torrent_file.read())) - editchannel_request_mgr = TriblerRequestManager() - editchannel_request_mgr.perform_request("channels/discovered/%s/torrents" % - self.channel_overview['identifier'], - self.on_torrent_to_channel_added, method='PUT', - data='torrent=%s' % torrent_content) - - def on_add_torrent_browse_file(self): - filename = QFileDialog.getOpenFileName(self, "Please select the .torrent file", "", "Torrent files (*.torrent)") - if len(filename[0]) == 0: - return - self.add_torrent_to_channel(filename[0]) - - - def on_add_torrent_from_url(self): - self.dialog = ConfirmationDialog(self, "Add torrent from URL/magnet link", - "Please enter the URL/magnet link in the field below:", - [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], - show_input=True) - self.dialog.dialog_widget.dialog_input.setPlaceholderText('URL/magnet link') - self.dialog.button_clicked.connect(self.on_torrent_from_url_dialog_done) - self.dialog.show() - - def on_torrent_from_url_dialog_done(self, action): - if action == 0: - url = urllib.quote_plus(self.dialog.dialog_widget.dialog_input.text()) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/torrents/%s" % - (self.channel_overview['identifier'], url), - self.on_torrent_to_channel_added, method='PUT') - self.dialog.close_dialog() - self.dialog = None - - def on_torrent_to_channel_added(self, result): - if not result: - return - if 'added' in result: - self.load_channel_torrents() - - def on_create_torrent_from_files(self): - self.window().edit_channel_details_create_torrent.initialize(self.channel_overview['identifier']) - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_CREATE_TORRENT) - - def on_playlist_torrents_back_clicked(self): - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLISTS) - - def on_playlist_item_clicked(self, item): - playlist_info = item.data(Qt.UserRole) - if not playlist_info: - return - self.window().edit_channel_playlist_torrents_list.set_data_items([]) - self.window().edit_channel_details_playlist_torrents_header.setText("Torrents in '%s'" % playlist_info['name']) - self.window().edit_channel_playlist_torrents_back.setIcon(QIcon(get_image_path('page_back.png'))) - - self.viewing_playlist = playlist_info - self.update_playlist_torrent_list() - - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS) - - def update_playlist_list(self): - self.playlists['playlists'].sort(key=lambda torrent: len(torrent['torrents']), reverse=True) - - items = [] - for result in self.playlists['playlists']: - items.append((PlaylistListItem, result, - {"show_controls": True, "on_remove_clicked": self.on_playlist_remove_clicked, - "on_edit_clicked": self.on_playlist_edit_clicked})) - self.window().edit_channel_playlists_list.set_data_items(items) - - def update_playlist_torrent_list(self): - items = [] - for torrent in self.viewing_playlist["torrents"]: - items.append((ChannelTorrentListItem, torrent, - {"show_controls": True, "on_remove_clicked": self.on_playlist_torrent_remove_clicked})) - self.window().edit_channel_playlist_torrents_list.set_data_items(items) - - def on_playlist_manage_clicked(self): - self.window().edit_channel_details_playlist_manage.initialize(self.channel_overview, self.viewing_playlist) - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_MANAGE) - - def on_playlist_torrent_remove_clicked(self, item): - self.dialog = ConfirmationDialog(self, - "Remove selected torrent from playlist", - "Are you sure that you want to remove the selected torrent " - "from this playlist?", - [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(lambda action: self.on_playlist_torrent_remove_selected_action(item, action)) - self.dialog.show() - - def on_playlist_torrent_remove_selected_action(self, item, action): - if action == 0: - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/playlists/%s/%s" % - (self.channel_overview["identifier"], - self.viewing_playlist['id'], item.torrent_info['infohash']), - lambda result: self.on_playlist_torrent_removed( - result, item.torrent_info), - method='DELETE') - - self.dialog.close_dialog() - self.dialog = None - - def on_playlist_torrent_removed(self, result, torrent): - if not result: - return - self.remove_torrent_from_playlist(torrent) - - def get_index_of_viewing_playlist(self): - if self.viewing_playlist is None: - return -1 - - for index in xrange(len(self.playlists['playlists'])): - if self.playlists['playlists'][index]['id'] == self.viewing_playlist['id']: - return index - - return -1 - - def remove_torrent_from_playlist(self, torrent): - playlist_index = self.get_index_of_viewing_playlist() - - torrent_index = -1 - for index in xrange(len(self.viewing_playlist['torrents'])): - if self.viewing_playlist['torrents'][index]['infohash'] == torrent['infohash']: - torrent_index = index - break - - if torrent_index != -1: - del self.playlists['playlists'][playlist_index]['torrents'][torrent_index] - self.viewing_playlist = self.playlists['playlists'][playlist_index] - self.update_playlist_list() - self.update_playlist_torrent_list() - - def on_playlist_edit_save_clicked(self): - if len(self.window().playlist_edit_name.text()) == 0: - return - - name = self.window().playlist_edit_name.text() - description = self.window().playlist_edit_description.toPlainText() - - self.editchannel_request_mgr = TriblerRequestManager() - if self.editing_playlist is None: - self.editchannel_request_mgr.perform_request("channels/discovered/%s/playlists" % - self.channel_overview["identifier"], self.on_playlist_created, - data=unicode('name=%s&description=%s' % - (name, description)).encode('utf-8'), - method='PUT') - else: - self.editchannel_request_mgr.perform_request("channels/discovered/%s/playlists/%s" % - (self.channel_overview["identifier"], - self.editing_playlist["id"]), self.on_playlist_edited, - data=unicode('name=%s&description=%s' % - (name, description)).encode('utf-8'), - method='POST') - - def on_playlist_created(self, json_result): - if not json_result: - return - if 'created' in json_result and json_result['created']: - self.on_playlist_edited_done() - - def on_playlist_edited(self, json_result): - if not json_result: - return - if 'modified' in json_result and json_result['modified']: - self.on_playlist_edited_done() - - def on_playlist_edited_done(self): - self.window().playlist_edit_name.setText('') - self.window().playlist_edit_description.setText('') - self.load_channel_playlists() - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLISTS) - - def on_playlist_edit_cancel_clicked(self): - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLISTS) - - def on_playlist_created_clicked(self): - self.editing_playlist = None - self.window().playlist_edit_save_button.setText("CREATE") - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_EDIT) - - def on_playlist_remove_clicked(self, item): - self.dialog = ConfirmationDialog(self, "Remove selected playlist", - "Are you sure that you want to remove the selected playlist " - "from your channel?", - [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(lambda action: self.on_playlist_remove_selected_action(item, action)) - self.dialog.show() - - def on_playlist_remove_selected_action(self, item, action): - if action == 0: - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/playlists/%s" % - (self.channel_overview["identifier"], - item.playlist_info['id']), - self.on_playlist_removed, method='DELETE') - - self.dialog.close_dialog() - self.dialog = None - - def on_playlist_removed(self, json_result): - if not json_result: - return - if 'removed' in json_result and json_result['removed']: - self.load_channel_playlists() - - def on_playlist_edit_clicked(self, item): - self.editing_playlist = item.playlist_info - self.window().playlist_edit_save_button.setText("CREATE") - self.window().playlist_edit_name.setText(item.playlist_info["name"]) - self.window().playlist_edit_description.setText(item.playlist_info["description"]) - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_EDIT) - - def on_torrents_remove_selected_action(self, action, items): - if action == 0: - if isinstance(items, list): - infohash = ",".join([torrent_item.torrent_info['infohash'] for torrent_item in items]) - else: - infohash = items.torrent_info['infohash'] - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/torrents/%s" % - (self.channel_overview["identifier"], - infohash), - self.on_torrent_removed, method='DELETE') - if self.dialog: - self.dialog.close_dialog() - self.dialog = None - - def on_torrent_removed(self, json_result): - if not json_result: - return - if 'removed' in json_result and json_result['removed']: - self.load_channel_torrents() - - def on_torrents_remove_all_action(self, action): - if action == 0: - for torrent_ind in xrange(self.window().edit_channel_torrents_list.count()): - torrent_data = self.window().edit_channel_torrents_list.item(torrent_ind).data(Qt.UserRole) - request_mgr = TriblerRequestManager() - request_mgr.perform_request("channels/discovered/%s/torrents/%s" % - (self.channel_overview["identifier"], torrent_data['infohash']), - None, method='DELETE') - self.remove_torrent_requests.append(request_mgr) - - self.window().edit_channel_torrents_list.set_data_items([]) - if "chant" in self.channel_overview: - self.load_channel_torrents() - - self.dialog.close_dialog() - self.dialog = None - def clicked_tab_button(self, tab_button_name): if tab_button_name == "edit_channel_overview_button": self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_OVERVIEW) elif tab_button_name == "edit_channel_settings_button": self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_SETTINGS) elif tab_button_name == "edit_channel_torrents_button": + self.load_my_torrents() self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_TORRENTS) - self.load_channel_torrents() - elif tab_button_name == "edit_channel_playlists_button": - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLISTS) - self.load_channel_playlists() - elif tab_button_name == "edit_channel_rss_feeds_button": - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_RSS_FEEDS) - self.load_channel_rss_feeds() + + def load_my_torrents(self): + self.controller.model.reset() + self.controller.load_torrents(1, 50) # Load the first 50 torrents def on_create_channel_intro_button_clicked(self): self.window().create_channel_form.show() self.window().create_channel_intro_button_container.hide() self.window().create_new_channel_intro_label.setText("Please enter your channel details below.") - def on_rss_feed_add_clicked(self): - self.dialog = ConfirmationDialog(self, "Add RSS feed", "Please enter the RSS feed URL in the field below:", - [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], - show_input=True) - self.dialog.dialog_widget.dialog_input.setPlaceholderText('RSS feed URL') - self.dialog.button_clicked.connect(self.on_rss_feed_dialog_added) - self.dialog.show() - - def on_rss_feed_dialog_added(self, action): - if action == 0: - url = urllib.quote_plus(self.dialog.dialog_widget.dialog_input.text()) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/rssfeeds/%s" % - (self.channel_overview["identifier"], url), - self.on_rss_feed_added, method='PUT') - - self.dialog.close_dialog() - self.dialog = None - - def on_rss_feed_added(self, json_result): - if not json_result: - return - if json_result['added']: - self.load_channel_rss_feeds() - - def on_rss_feeds_remove_selected_clicked(self): - if len(self.window().edit_channel_rss_feeds_list.selectedItems()) == 0: - ConfirmationDialog.show_message(self, "Remove RSS Feeds", - "Selection is empty. Please select the feeds to remove.", "OK") - return - self.dialog = ConfirmationDialog(self, "Remove RSS feed", - "Are you sure you want to remove the selected RSS feed?", - [('REMOVE', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) - self.dialog.button_clicked.connect(self.on_rss_feed_dialog_removed) - self.dialog.show() - - def on_rss_feed_dialog_removed(self, action): - if action == 0: - url = urllib.quote_plus(self.window().edit_channel_rss_feeds_list.selectedItems()[0].text(0)) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request("channels/discovered/%s/rssfeeds/%s" % - (self.channel_overview["identifier"], url), - self.on_rss_feed_removed, method='DELETE') - - self.dialog.close_dialog() - self.dialog = None - - def on_rss_feed_removed(self, json_result): - if not json_result: - return - if json_result['removed']: - self.load_channel_rss_feeds() - - def on_rss_feeds_refresh_clicked(self): - self.window().edit_channel_details_rss_refresh_button.setEnabled(False) - self.editchannel_request_mgr = TriblerRequestManager() - self.editchannel_request_mgr.perform_request('channels/discovered/%s/recheckfeeds' % - self.channel_overview["identifier"], self.on_rss_feeds_refreshed,\ - method='POST') - - def on_rss_feeds_refreshed(self, json_result): - if not json_result: - return - if json_result["rechecked"]: - self.window().edit_channel_details_rss_refresh_button.setEnabled(True) - def on_export_mdblob(self): - - export_dir = QFileDialog.getExistingDirectory(self, "Please select the destination directory", "", QFileDialog.ShowDirsOnly) + export_dir = QFileDialog.getExistingDirectory(self, "Please select the destination directory", "", + QFileDialog.ShowDirsOnly) if len(export_dir) == 0: return # Show confirmation dialog where we specify the name of the file - mdblob_name = self.channel_overview["identifier"] + mdblob_name = self.channel_overview["public_key"] dialog = ConfirmationDialog(self, "Export mdblob file", - "Please enter the name of the channel metadata file:", - [('SAVE', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], - show_input=True) + "Please enter the name of the channel metadata file:", + [('SAVE', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], + show_input=True) def on_export_download_dialog_done(action): if action == 0: @@ -659,31 +239,201 @@ def on_export_download_request_done(dest_path, data): dialog.button_clicked.connect(on_export_download_dialog_done) dialog.show() + # Torrent removal-related methods + def on_torrents_remove_selected_clicked(self): + selected_items = self.controller.table_view.selectedIndexes() + num_selected = len(selected_items) + if num_selected == 0: + return + + selected_infohashes = [self.model.data_items[row][u'infohash'] for row in + set([index.row() for index in selected_items])] + self.dialog = ConfirmationDialog(self, "Remove %s selected torrents" % len(selected_infohashes), + "Are you sure that you want to remove %s selected torrents " + "from your channel?" % len(selected_infohashes), + [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) + self.dialog.button_clicked.connect(lambda action: + self.on_torrents_remove_selected_action(action, selected_infohashes)) + self.dialog.show() + + def on_torrents_remove_selected_action(self, action, items): + if action == 0: + items = [str(item) for item in items] + infohashes = ",".join(items) + + post_data = { + "infohashes": infohashes, + "status": COMMIT_STATUS_TODELETE + } + + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents", + lambda response: self.on_torrents_removed_response(response, items), + data=post_data, method='POST') + if self.dialog: + self.dialog.close_dialog() + self.dialog = None + + def on_torrents_removed_response(self, json_result, infohashes): + if not json_result: + return + + if 'success' in json_result and json_result['success']: + self.on_torrents_removed.emit(infohashes) + self.load_my_torrents() + + def on_torrents_remove_all_clicked(self): + self.dialog = ConfirmationDialog(self.window(), "Remove all torrents", + "Are you sure that you want to remove all torrents from your channel?", + [('CONFIRM', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) + self.dialog.button_clicked.connect(self.on_torrents_remove_all_action) + self.dialog.show() + def on_torrents_remove_all_action(self, action): + if action == 0: + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents", self.on_all_torrents_removed_response, method='DELETE') + + self.dialog.close_dialog() + self.dialog = None + + def on_all_torrents_removed_response(self, json_result): + if not json_result: + return + + if 'success' in json_result and json_result['success']: + self.on_all_torrents_removed.emit() + self.load_my_torrents() + + # Torrent addition-related methods def on_add_torrents_browse_dir(self): chosen_dir = QFileDialog.getExistingDirectory(self, "Please select the directory containing the .torrent files", QDir.homePath(), QFileDialog.ShowDirsOnly) - if len(chosen_dir) == 0: + if not chosen_dir: return - self.selected_torrent_files = [torrent_file for torrent_file in glob.glob(chosen_dir + "/*.torrent")] + self.chosen_dir = chosen_dir self.dialog = ConfirmationDialog(self, "Add torrents from directory", - "Are you sure you want to add %d torrents to your Tribler channel?" % - len(self.selected_torrent_files), - [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)]) + "Add all torrent files from the following directory " + "to your Tribler channel:\n\n%s" % + chosen_dir, + [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], + checkbox_text="Include subdirectories (recursive mode)") self.dialog.button_clicked.connect(self.on_confirm_add_directory_dialog) self.dialog.show() def on_confirm_add_directory_dialog(self, action): if action == 0: - for filename in self.selected_torrent_files: - self.add_torrent_to_channel(filename) + self.add_dir_to_channel(self.chosen_dir, recursive=self.dialog.checkbox.isChecked()) if self.dialog: self.dialog.close_dialog() self.dialog = None + self.chosen_dir = None + + def on_torrents_add_clicked(self): + menu = TriblerActionMenu(self) + + browse_files_action = QAction('Import torrent from file', self) + browse_dir_action = QAction('Import torrent(s) from dir', self) + add_url_action = QAction('Add URL', self) + create_torrent_action = QAction('Create torrent from file(s)', self) + + browse_files_action.triggered.connect(self.on_add_torrent_browse_file) + browse_dir_action.triggered.connect(self.on_add_torrents_browse_dir) + add_url_action.triggered.connect(self.on_add_torrent_from_url) + create_torrent_action.triggered.connect(self.on_create_torrent_from_files) + + menu.addAction(browse_files_action) + menu.addAction(browse_dir_action) + menu.addAction(add_url_action) + menu.addAction(create_torrent_action) + + menu.exec_(QCursor.pos()) + + def on_create_torrent_from_files(self): + self.window().edit_channel_details_create_torrent.initialize() + self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_CREATE_TORRENT) + def on_add_torrent_browse_file(self): + filename = QFileDialog.getOpenFileName(self, "Please select the .torrent file", "", "Torrent files (*.torrent)") + if not filename[0]: + return + self.add_torrent_to_channel(filename[0]) + def on_add_torrent_from_url(self): + self.dialog = ConfirmationDialog(self, "Add torrent from URL/magnet link", + "Please enter the URL/magnet link in the field below:", + [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], + show_input=True) + self.dialog.dialog_widget.dialog_input.setPlaceholderText('URL/magnet link') + self.dialog.button_clicked.connect(self.on_torrent_from_url_dialog_done) + self.dialog.show() + def on_torrent_from_url_dialog_done(self, action): + if action == 0: + url = urllib.quote_plus(self.dialog.dialog_widget.dialog_input.text()) + self.add_torrent_url_to_channel(url) + self.dialog.close_dialog() + self.dialog = None + + def autocommit_fired(self): + def commit_channel(overview): + try: + if overview and overview['mychannel']['dirty']: + TriblerRequestManager().perform_request("mychannel/commit", lambda _: None, method='POST', + capture_errors=False) + except KeyError: + return + + if self.channel_overview: + self.clicked_edit_channel_commit_button() + else: + TriblerRequestManager().perform_request("mychannel", commit_channel, capture_errors=False) + + # Commit button-related methods + def clicked_edit_channel_commit_button(self): + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/commit", self.on_channel_committed, + method='POST') + + def on_channel_committed(self, result): + if not result: + return + if 'success' in result and result['success']: + self.channel_dirty = False + self.update_channel_commit_views() + self.on_commit.emit() + if not self.autocommit_enabled: + self.load_my_torrents() + + def add_torrent_to_channel(self, filename): + with open(filename, "rb") as torrent_file: + torrent_content = b64encode(torrent_file.read()) + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents", + self.on_torrent_to_channel_added, method='PUT', + data={"torrent": torrent_content}) + + def add_dir_to_channel(self, dirname, recursive=False): + post_data = { + "torrents_dir": dirname, + "recursive": int(recursive) + } + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents", + self.on_torrent_to_channel_added, method='PUT', data=post_data) + + def add_torrent_url_to_channel(self, url): + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents/%s" % url, + self.on_torrent_to_channel_added, method='PUT') + + def on_torrent_to_channel_added(self, result): + if not result: + return + + if 'added' in result: + self.load_my_torrents() diff --git a/TriblerGUI/widgets/home_recommended_item.py b/TriblerGUI/widgets/home_recommended_item.py index bf5434e3878..4c0740d4803 100644 --- a/TriblerGUI/widgets/home_recommended_item.py +++ b/TriblerGUI/widgets/home_recommended_item.py @@ -5,7 +5,7 @@ from PyQt5.QtWidgets import QLabel, QSizePolicy, QToolButton, QWidget from TriblerGUI.tribler_window import fc_home_recommended_item -from TriblerGUI.utilities import format_size, get_image_path, pretty_date +from TriblerGUI.utilities import format_size, get_image_path HOME_ITEM_FONT_SIZE = 44 @@ -96,7 +96,7 @@ def update_with_channel(self, channel): self.thumbnail_widget.initialize(channel["name"], HOME_ITEM_FONT_SIZE) self.main_label.setText(channel["name"]) - self.detail_label.setText("Updated " + pretty_date(channel["modified"])) + self.detail_label.setText("%d torrents" % channel["torrents"]) self.category_label.setHidden(True) self.setCursor(Qt.PointingHandCursor) diff --git a/TriblerGUI/widgets/homepage.py b/TriblerGUI/widgets/homepage.py index cfacc62b52c..f99832d730e 100644 --- a/TriblerGUI/widgets/homepage.py +++ b/TriblerGUI/widgets/homepage.py @@ -1,9 +1,13 @@ +from __future__ import absolute_import, division + +from PyQt5.QtCore import QTimer from PyQt5.QtWidgets import QWidget +from six.moves import xrange + from TriblerGUI.defs import PAGE_CHANNEL_DETAILS -from TriblerGUI.widgets.home_recommended_item import HomeRecommendedItem -from TriblerGUI.widgets.loading_list_item import LoadingListItem from TriblerGUI.tribler_request_manager import TriblerRequestManager +from TriblerGUI.widgets.home_recommended_item import HomeRecommendedItem class HomePage(QWidget): @@ -14,9 +18,9 @@ class HomePage(QWidget): def __init__(self): QWidget.__init__(self) - self.has_loaded_cells = False self.recommended_request_mgr = None self.show_channels = False + self.resize_event_timer = None def initialize_home_page(self): self.window().home_page_table_view.cellClicked.connect(self.on_home_page_item_clicked) @@ -24,72 +28,78 @@ def initialize_home_page(self): self.window().home_tab.initialize() self.window().home_tab.clicked_tab_button.connect(self.clicked_tab_button) - def load_cells(self): + def load_cells(self, num_items): self.window().home_page_table_view.clear() - for x in xrange(0, 3): - for y in xrange(0, 3): + for y in xrange(0, 3): + for x in xrange(0, 3): widget_item = HomeRecommendedItem(self) - self.window().home_page_table_view.setCellWidget(x, y, widget_item) - self.has_loaded_cells = True + self.window().home_page_table_view.setCellWidget(y, x, widget_item) + if y * 3 + x >= num_items - 1: + return def load_popular_torrents(self): self.recommended_request_mgr = TriblerRequestManager() - self.recommended_request_mgr.perform_request("torrents/random?limit=50", self.received_popular_torrents) + self.recommended_request_mgr.perform_request("metadata/torrents/random?limit=50", + self.received_popular_torrents) def clicked_tab_button(self, tab_button_name): if tab_button_name == "home_tab_channels_button": self.recommended_request_mgr = TriblerRequestManager() - self.recommended_request_mgr.perform_request("channels/popular?limit=50", self.received_popular_channels) + self.recommended_request_mgr.perform_request("metadata/channels/popular?limit=50", + self.received_popular_channels) elif tab_button_name == "home_tab_torrents_button": self.load_popular_torrents() - def set_no_results_table(self, label_text): - self.has_loaded_cells = False - self.window().home_page_table_view.clear() - for x in xrange(0, 3): - for y in xrange(0, 3): - widget_item = LoadingListItem(self, label_text="") - self.window().home_page_table_view.setCellWidget(x, y, widget_item) - - self.window().home_page_table_view.setCellWidget( - 0, 1, LoadingListItem(self, label_text=label_text)) - self.window().resizeEvent(None) - def received_popular_channels(self, result): if not result: return self.show_channels = True - if not self.has_loaded_cells: - self.load_cells() if len(result["channels"]) == 0: - self.set_no_results_table(label_text="No recommended channels") + self.update_home_page_views(False) + self.window().home_page_no_items_label.setText("No recommended channels found.") return cur_ind = 0 + self.update_home_page_views(True) + self.load_cells(len(result["channels"][:9])) for channel in result["channels"][:9]: - self.window().home_page_table_view.cellWidget(cur_ind % 3, cur_ind / 3).update_with_channel(channel) + self.window().home_page_table_view.cellWidget(cur_ind / 3, cur_ind % 3).update_with_channel(channel) cur_ind += 1 - self.window().resizeEvent(None) + self.start_resize_timer() + + def update_home_page_views(self, has_results): + self.window().home_page_table_view.setHidden(not has_results) + self.window().home_page_no_items_label.setHidden(has_results) def received_popular_torrents(self, result): if not result: return self.show_channels = False - if not self.has_loaded_cells: - self.load_cells() if len(result["torrents"]) == 0: - self.set_no_results_table(label_text="No recommended torrents") + self.update_home_page_views(False) + self.window().home_page_no_items_label.setText("No recommended torrents found.") return cur_ind = 0 + self.update_home_page_views(True) + self.load_cells(len(result["torrents"][:9])) for torrent in result["torrents"][:9]: - self.window().home_page_table_view.cellWidget(cur_ind % 3, cur_ind / 3).update_with_torrent(torrent) + self.window().home_page_table_view.cellWidget(cur_ind / 3, cur_ind % 3).update_with_torrent(torrent) cur_ind += 1 - self.window().resizeEvent(None) + self.start_resize_timer() + + def start_resize_timer(self): + """ + For some magic Qt reason, invoking the resizeEvent immediately after loading the cell widgets is not working + correctly. As a workaround, call the resizeEvent after a small period of time. + """ + self.resize_event_timer = QTimer() + self.resize_event_timer.timeout.connect(lambda: self.window().resizeEvent(None)) + self.resize_event_timer.start(100) def on_home_page_item_clicked(self, row, col): cell_widget = self.window().home_page_table_view.cellWidget(row, col) diff --git a/TriblerGUI/widgets/lazyloadlist.py b/TriblerGUI/widgets/lazyloadlist.py deleted file mode 100644 index 0c9f8347a48..00000000000 --- a/TriblerGUI/widgets/lazyloadlist.py +++ /dev/null @@ -1,79 +0,0 @@ -from PyQt5.QtCore import QSize, Qt -from PyQt5.QtWidgets import QListWidget, QListWidgetItem, QAbstractItemView - -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem - -ITEM_LOAD_BATCH = 30 - - -class LazyLoadList(QListWidget): - """ - This class implements a list where widget items are lazy-loaded. When the user has reached the end of the list - when scrolling, the next items are created and displayed. - """ - def __init__(self, parent): - QListWidget.__init__(self, parent) - self.setSelectionMode(QAbstractItemView.ExtendedSelection) - self.verticalScrollBar().valueChanged.connect(self.on_list_scroll) - self.itemSelectionChanged.connect(self.on_item_clicked) - self.data_items = [] # Tuple of (ListWidgetClass, json data) - - def load_next_items(self): - for i in range(self.count(), min(self.count() + ITEM_LOAD_BATCH, len(self.data_items))): - self.load_item(i) - - def load_item(self, index): - if index < len(self.data_items): - item = QListWidgetItem() - item.setSizeHint(QSize(-1, 60)) - data_item = self.data_items[index] - item.setData(Qt.UserRole, data_item[1]) - if len(data_item) > 2: - widget_item = data_item[0](self, data_item[1], **data_item[2]) - else: - widget_item = data_item[0](self, data_item[1]) - self.insertItem(index, item) - self.setItemWidget(item, widget_item) - - def insert_item(self, index, item): - self.data_items.insert(index, item) - if index < ITEM_LOAD_BATCH: - self.load_item(index) - - def set_data_items(self, items): - self.clear() - self.data_items = items - self.load_next_items() - - def append_item(self, item): - self.data_items.append(item) - if self.count() < ITEM_LOAD_BATCH: - self.load_item(self.count()) - - def on_list_scroll(self, event): - if self.verticalScrollBar().value() == self.verticalScrollBar().maximum(): - self.load_next_items() - - def get_first_items(self, num, cls=None): - """ - Return the first num widget items with type cls. - This can be useful when for instance you need the first five search results. - """ - result = [] - for i in xrange(self.count()): - widget_item = self.itemWidget(self.item(i)) - if not cls or (cls and isinstance(widget_item, cls)): - result.append(widget_item) - - if len(result) >= num: - break - - return result - - def on_item_clicked(self): - if len(self.selectedItems()) == 0: - return - - for item_widget in (self.itemWidget(widget) for widget in self.selectedItems()): - if isinstance(item_widget, ChannelTorrentListItem): - item_widget.check_health() diff --git a/TriblerGUI/widgets/lazytableview.py b/TriblerGUI/widgets/lazytableview.py new file mode 100644 index 00000000000..397f132c391 --- /dev/null +++ b/TriblerGUI/widgets/lazytableview.py @@ -0,0 +1,233 @@ +from __future__ import absolute_import, division + +from abc import abstractmethod + +from PyQt5.QtCore import QModelIndex, QPoint, pyqtSignal +from PyQt5.QtWidgets import QTableView + +from TriblerGUI.defs import ACTION_BUTTONS, COMMIT_STATUS_COMMITTED, COMMIT_STATUS_NEW, COMMIT_STATUS_TODELETE, \ + PAGE_CHANNEL_DETAILS +from TriblerGUI.tribler_request_manager import TriblerRequestManager +from TriblerGUI.utilities import index2uri +from TriblerGUI.widgets.tablecontentdelegate import ChannelsButtonsDelegate, SearchResultsDelegate, \ + TorrentsButtonsDelegate +from TriblerGUI.widgets.tablecontentmodel import MyTorrentsContentModel + + +class LazyTableView(QTableView): + """ + This table view is designed to support lazy loading. + When the user reached the end of the table, it will ask the model for more items, and load them dynamically. + """ + pass + + +class TriblerContentTableView(LazyTableView): + # TODO: add redraw when the mouse leaves the view through the header + # overloading leaveEvent method could be used for that + mouse_moved = pyqtSignal(QPoint, QModelIndex) + + on_channel_clicked = pyqtSignal(dict) + on_torrent_clicked = pyqtSignal(QModelIndex, dict) + + def __init__(self, parent=None): + LazyTableView.__init__(self, parent) + self.setMouseTracking(True) + + self.delegate = self.init_delegate() + + self.setItemDelegate(self.delegate) + self.mouse_moved.connect(self.delegate.on_mouse_moved) + self.delegate.redraw_required.connect(self.redraw) + + @abstractmethod + def init_delegate(self): + # This method should create a QT Delegate object and return it + pass + + def mouseMoveEvent(self, event): + index = QModelIndex(self.indexAt(event.pos())) + self.mouse_moved.emit(event.pos(), index) + + def redraw(self): + self.viewport().update() + + +class DownloadButtonMixin(TriblerContentTableView): + def on_download_button_clicked(self, index): + self.window().start_download_from_uri(index2uri(index)) + + +class PlayButtonMixin(TriblerContentTableView): + def on_play_button_clicked(self, index): + infohash = index.model().data_items[index.row()][u'infohash'] + + def on_play_request_done(_): + if not self: + return + self.window().left_menu_button_video_player.click() + self.window().video_player_page.play_media_item(infohash, -1) + + self.window().perform_start_download_request(index2uri(index), + self.window().tribler_settings['download_defaults'][ + 'anonymity_enabled'], + self.window().tribler_settings['download_defaults'][ + 'safeseeding_enabled'], + self.window().tribler_settings['download_defaults']['saveas'], + [], 0, callback=on_play_request_done) + + +class SubscribeButtonMixin(TriblerContentTableView): + def on_subscribe_control_clicked(self, index): + if index.model().data_items[index.row()][u'status'] == 6: # LEGACY ENTRIES! + return + if index.model().data_items[index.row()][u'my_channel']: + return + status = int(index.model().data_items[index.row()][u'subscribed']) + public_key = index.model().data_items[index.row()][u'public_key'] + request_mgr = TriblerRequestManager() + request_mgr.perform_request("metadata/channels/%s" % public_key, + (lambda _: self.on_unsubscribed_channel.emit(index)) if status else + (lambda _: self.on_subscribed_channel.emit(index)), + data={"subscribe": int(not status)}, method='POST') + index.model().data_items[index.row()][u'subscribed'] = int(not status) + + +class ItemClickedMixin(TriblerContentTableView): + def on_table_item_clicked(self, item): + column_position = self.model().column_position + if (ACTION_BUTTONS in column_position and item.column() == column_position[ACTION_BUTTONS]) or \ + (u'status' in column_position and item.column() == column_position[u'status']) or \ + (u'subscribed' in column_position and item.column() == column_position[u'subscribed']): + return + + content_info = self.model().data_items[item.row()] + # Safely determine if the thing is a channel. A little bit hackish + if 'torrents' in content_info: + self.window().channel_page.initialize_with_channel(content_info) + self.window().navigation_stack.append(self.window().stackedWidget.currentIndex()) + self.window().stackedWidget.setCurrentIndex(PAGE_CHANNEL_DETAILS) + self.on_channel_clicked.emit(content_info) + else: + self.on_torrent_clicked.emit(item, content_info) + + +class CommitControlMixin(TriblerContentTableView): + + def on_commit_control_clicked(self, index): + infohash = index.model().data_items[index.row()][u'infohash'] + status = index.model().data_items[index.row()][u'status'] + + new_status = COMMIT_STATUS_COMMITTED + if status == COMMIT_STATUS_NEW or status == COMMIT_STATUS_COMMITTED: + new_status = COMMIT_STATUS_TODELETE + + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents/%s" % infohash, + lambda response: self.on_torrent_status_updated(response, index), + data={"status": new_status}, method='PATCH') + + def on_torrent_status_updated(self, json_result, index): + if not json_result: + return + + if 'success' in json_result and json_result['success']: + index.model().data_items[index.row()][u'status'] = json_result['new_status'] + + self.window().edit_channel_page.channel_dirty = json_result['dirty'] + self.window().edit_channel_page.update_channel_commit_views(deleted_index=index) + + +class DeleteButtonMixin(CommitControlMixin): + def on_delete_button_clicked(self, index): + request_mgr = TriblerRequestManager() + request_mgr.perform_request("mychannel/torrents/%s" % index.model().data_items[index.row()][u'infohash'], + lambda response: self.on_torrent_status_updated(response, index), + data={"status" : COMMIT_STATUS_TODELETE}, method='PATCH') + + +class SearchResultsTableView(ItemClickedMixin, DownloadButtonMixin, PlayButtonMixin, SubscribeButtonMixin, + TriblerContentTableView): + on_subscribed_channel = pyqtSignal(QModelIndex) + on_unsubscribed_channel = pyqtSignal(QModelIndex) + + """ + This table displays search results, which can be both torrents and channels. + """ + + def __init__(self, parent=None): + TriblerContentTableView.__init__(self, parent) + + # Mix-in connects + self.clicked.connect(self.on_table_item_clicked) + self.delegate.play_button.clicked.connect(self.on_play_button_clicked) + self.delegate.subscribe_control.clicked.connect(self.on_subscribe_control_clicked) + self.delegate.download_button.clicked.connect(self.on_download_button_clicked) + + def init_delegate(self): + return SearchResultsDelegate() + + def resizeEvent(self, _): + self.setColumnWidth(0, 100) + self.setColumnWidth(2, 100) + self.setColumnWidth(3, 100) + self.setColumnWidth(1, self.width() - 304) # Few pixels offset so the horizontal scrollbar does not appear + + +class TorrentsTableView(ItemClickedMixin, DeleteButtonMixin, DownloadButtonMixin, PlayButtonMixin, + TriblerContentTableView): + """ + This table displays various torrents. + """ + + def __init__(self, parent=None): + TriblerContentTableView.__init__(self, parent) + + # Mix-in connects + self.clicked.connect(self.on_table_item_clicked) + self.delegate.play_button.clicked.connect(self.on_play_button_clicked) + self.delegate.commit_control.clicked.connect(self.on_commit_control_clicked) + self.delegate.delete_button.clicked.connect(self.on_delete_button_clicked) + self.delegate.download_button.clicked.connect(self.on_download_button_clicked) + + def init_delegate(self): + return TorrentsButtonsDelegate() + + def resizeEvent(self, _): + if isinstance(self.model(), MyTorrentsContentModel): + self.setColumnWidth(0, 100) + self.setColumnWidth(2, 100) + self.setColumnWidth(3, 100) + self.setColumnWidth(4, 100) + self.setColumnWidth(1, self.width() - 404) # Few pixels offset so the horizontal scrollbar does not appear + else: + self.setColumnWidth(0, 100) + self.setColumnWidth(2, 100) + self.setColumnWidth(3, 100) + self.setColumnWidth(4, 100) + self.setColumnWidth(1, self.width() - 404) # Few pixels offset so the horizontal scrollbar does not appear + + +class ChannelsTableView(ItemClickedMixin, SubscribeButtonMixin, + TriblerContentTableView): + on_subscribed_channel = pyqtSignal(QModelIndex) + on_unsubscribed_channel = pyqtSignal(QModelIndex) + + """ + This table displays various channels. + """ + + def __init__(self, parent=None): + TriblerContentTableView.__init__(self, parent) + + # Mix-in connects + self.clicked.connect(self.on_table_item_clicked) + self.delegate.subscribe_control.clicked.connect(self.on_subscribe_control_clicked) + + def init_delegate(self): + return ChannelsButtonsDelegate() + + def resizeEvent(self, _): + self.setColumnWidth(1, 150) + self.setColumnWidth(2, 100) + self.setColumnWidth(0, self.width() - 254) # Few pixels offset so the horizontal scrollbar does not appear diff --git a/TriblerGUI/widgets/leftmenuplaylist.py b/TriblerGUI/widgets/leftmenuplaylist.py index 159bc08f102..81223650685 100644 --- a/TriblerGUI/widgets/leftmenuplaylist.py +++ b/TriblerGUI/widgets/leftmenuplaylist.py @@ -1,6 +1,9 @@ +from __future__ import absolute_import + from PyQt5.QtCore import QTimer from PyQt5.QtCore import pyqtSignal from PyQt5.QtWidgets import QListWidget + from TriblerGUI.tribler_request_manager import TriblerRequestManager from TriblerGUI.utilities import is_video_file diff --git a/TriblerGUI/widgets/manageplaylistpage.py b/TriblerGUI/widgets/manageplaylistpage.py deleted file mode 100644 index 68b8d31a353..00000000000 --- a/TriblerGUI/widgets/manageplaylistpage.py +++ /dev/null @@ -1,154 +0,0 @@ -from PyQt5.QtCore import Qt, pyqtSignal -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QWidget, QListWidgetItem -from TriblerGUI.defs import PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS -from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.utilities import get_image_path - - -class ManagePlaylistPage(QWidget): - """ - On this page, users can add or remove torrents from/to a playlist. - """ - - playlist_saved = pyqtSignal() - - def __init__(self): - QWidget.__init__(self) - - self.channel_info = None - self.playlist_info = None - self.request_mgr = None - - self.torrents_in_playlist = [] - self.torrents_in_channel = [] - - self.torrents_to_create = [] - self.torrents_to_remove = [] - - self.pending_requests = [] - self.requests_done = 0 - - def initialize(self, channel_info, playlist_info): - self.channel_info = channel_info - self.playlist_info = playlist_info - self.window().edit_channel_details_manage_playlist_header.setText("Manage torrents in playlist '%s'" % - playlist_info['name']) - self.window().manage_channel_playlist_torrents_back.setIcon(QIcon(get_image_path('page_back.png'))) - - self.window().playlist_manage_add_to_playlist.clicked.connect(self.on_add_clicked) - self.window().playlist_manage_remove_from_playlist.clicked.connect(self.on_remove_clicked) - self.window().edit_channel_manage_playlist_save_button.clicked.connect(self.on_save_clicked) - self.window().manage_channel_playlist_torrents_back.clicked.connect(self.on_playlist_manage_back_clicked) - - # Load torrents in your channel - self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("channels/discovered/%s/torrents?disable_filter=1" % - channel_info["identifier"], self.on_received_channel_torrents) - - self.torrents_in_playlist = [] - self.torrents_in_channel = [] - - self.torrents_to_create = [] - self.torrents_to_remove = [] - - self.pending_requests = [] - self.requests_done = 0 - - def on_playlist_manage_back_clicked(self): - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS) - - def update_lists(self): - self.window().playlist_manage_in_channel_list.clear() - self.window().playlist_manage_in_playlist_list.clear() - - for torrent in self.torrents_in_channel: - item = QListWidgetItem(torrent["name"], self.window().playlist_manage_in_channel_list) - item.setData(Qt.UserRole, torrent) - self.window().playlist_manage_in_channel_list.addItem(item) - - for torrent in self.torrents_in_playlist: - item = QListWidgetItem(torrent["name"], self.window().playlist_manage_in_playlist_list) - item.setData(Qt.UserRole, torrent) - self.window().playlist_manage_in_playlist_list.addItem(item) - - @staticmethod - def remove_torrent_from_list(torrent, remove_from_list): - index = -1 - for torrent_index in xrange(len(remove_from_list)): - if remove_from_list[torrent_index]['infohash'] == torrent['infohash']: - index = torrent_index - break - - if index != -1: - del remove_from_list[index] - - def on_received_channel_torrents(self, result): - if not result: - return - self.torrents_in_playlist = self.playlist_info['torrents'] - - self.torrents_in_channel = [] - for torrent in result['torrents']: - if not ManagePlaylistPage.list_contains_torrent(self.torrents_in_playlist, torrent): - self.torrents_in_channel.append(torrent) - - self.update_lists() - - @staticmethod - def list_contains_torrent(torrent_list, torrent): - for playlist_torrent in torrent_list: - if torrent['infohash'] == playlist_torrent['infohash']: - return True - return False - - def on_add_clicked(self): - for item in self.window().playlist_manage_in_channel_list.selectedItems(): - torrent = item.data(Qt.UserRole) - ManagePlaylistPage.remove_torrent_from_list(torrent, self.torrents_in_channel) - self.torrents_in_playlist.append(torrent) - - if ManagePlaylistPage.list_contains_torrent(self.torrents_to_remove, torrent): - ManagePlaylistPage.remove_torrent_from_list(torrent, self.torrents_to_remove) - self.torrents_to_create.append(torrent) - - self.update_lists() - - def on_remove_clicked(self): - for item in self.window().playlist_manage_in_playlist_list.selectedItems(): - torrent = item.data(Qt.UserRole) - ManagePlaylistPage.remove_torrent_from_list(torrent, self.torrents_in_playlist) - self.torrents_in_channel.append(torrent) - - if ManagePlaylistPage.list_contains_torrent(self.torrents_to_create, torrent): - ManagePlaylistPage.remove_torrent_from_list(torrent, self.torrents_to_create) - self.torrents_to_remove.append(torrent) - - self.update_lists() - - def on_save_clicked(self): - self.requests_done = 0 - self.pending_requests = [] - for torrent in self.torrents_to_create: - request = TriblerRequestManager() - request.perform_request("channels/discovered/%s/playlists/%s/%s" % - (self.channel_info["identifier"], self.playlist_info['id'], - torrent['infohash']), self.on_request_done, method="PUT") - self.pending_requests.append(request) - for torrent in self.torrents_to_remove: - request = TriblerRequestManager() - request.perform_request("channels/discovered/%s/playlists/%s/%s" % - (self.channel_info["identifier"], self.playlist_info['id'], torrent['infohash']), - self.on_request_done, method="DELETE") - self.pending_requests.append(request) - - def on_request_done(self, result): - if not result: - return - self.requests_done += 1 - if self.requests_done == len(self.pending_requests): - self.on_requests_done() - - def on_requests_done(self): - self.window().edit_channel_details_stacked_widget.setCurrentIndex(PAGE_EDIT_CHANNEL_PLAYLIST_TORRENTS) - self.playlist_saved.emit() diff --git a/TriblerGUI/widgets/marketpage.py b/TriblerGUI/widgets/marketpage.py index 88dbd67bee8..e9d6c6a3e09 100644 --- a/TriblerGUI/widgets/marketpage.py +++ b/TriblerGUI/widgets/marketpage.py @@ -1,14 +1,14 @@ +from __future__ import absolute_import + import datetime -from PyQt5.QtCore import Qt -from PyQt5.QtCore import pyqtSignal -from PyQt5.QtGui import QCursor -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QAction -from PyQt5.QtWidgets import QSizePolicy -from PyQt5.QtWidgets import QSpacerItem -from PyQt5.QtWidgets import QWidget - -from TriblerGUI.defs import PAGE_MARKET_TRANSACTIONS, PAGE_MARKET_WALLETS, PAGE_MARKET_ORDERS + +from PyQt5.QtCore import Qt, pyqtSignal +from PyQt5.QtGui import QCursor, QIcon +from PyQt5.QtWidgets import QAction, QSizePolicy, QSpacerItem, QWidget + +from six.moves import xrange + +from TriblerGUI.defs import PAGE_MARKET_ORDERS, PAGE_MARKET_TRANSACTIONS, PAGE_MARKET_WALLETS from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog from TriblerGUI.dialogs.newmarketorderdialog import NewMarketOrderDialog from TriblerGUI.tribler_action_menu import TriblerActionMenu @@ -246,8 +246,12 @@ def create_order(self, is_ask, asset1_amount, asset1_type, asset2_amount, asset2 """ Create a new ask or bid order. """ - post_data = str("first_asset_amount=%d&first_asset_type=%s&second_asset_amount=%d&second_asset_type=%s" % - (asset1_amount, asset1_type, asset2_amount, asset2_type)) + post_data = { + "first_asset_amount": asset1_amount, + "first_asset_type": asset1_type, + "second_asset_amount": asset2_amount, + "second_asset_type": asset2_type + } self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("market/%s" % ('asks' if is_ask else 'bids'), lambda response: self.on_order_created(response, is_ask), diff --git a/TriblerGUI/widgets/marketwalletspage.py b/TriblerGUI/widgets/marketwalletspage.py index 886e995d766..8339c253c41 100644 --- a/TriblerGUI/widgets/marketwalletspage.py +++ b/TriblerGUI/widgets/marketwalletspage.py @@ -1,13 +1,11 @@ +from __future__ import absolute_import + from PIL.ImageQt import ImageQt -from PyQt5 import QtGui, QtCore -from PyQt5.QtGui import QCursor -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QAction, QPushButton, QSizePolicy -from PyQt5.QtWidgets import QTreeWidgetItem -from PyQt5.QtWidgets import QWidget +from PyQt5 import QtCore, QtGui +from PyQt5.QtGui import QCursor, QIcon +from PyQt5.QtWidgets import QAction, QPushButton, QTreeWidgetItem, QWidget -from TriblerGUI.defs import BUTTON_TYPE_NORMAL, BUTTON_TYPE_CONFIRM from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog from TriblerGUI.tribler_action_menu import TriblerActionMenu from TriblerGUI.tribler_request_manager import TriblerRequestManager @@ -172,7 +170,7 @@ def should_create_wallet(self, wallet_id): return self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("wallets/%s" % wallet_id, self.on_wallet_created, method='PUT', data='') + self.request_mgr.perform_request("wallets/%s" % wallet_id, self.on_wallet_created, method='PUT') def on_wallet_created(self, response): if not response: diff --git a/TriblerGUI/widgets/playlist_list_item.py b/TriblerGUI/widgets/playlist_list_item.py deleted file mode 100644 index 4cce3950e82..00000000000 --- a/TriblerGUI/widgets/playlist_list_item.py +++ /dev/null @@ -1,45 +0,0 @@ -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QWidget - -from TriblerGUI.tribler_window import fc_playlist_list_item -from TriblerGUI.utilities import get_image_path - - -class PlaylistListItem(QWidget, fc_playlist_list_item): - """ - This class is responsible for managing the playlist item widget. - """ - - def __init__(self, parent, playlist, show_controls=False, on_remove_clicked=None, on_edit_clicked=None): - QWidget.__init__(self, parent) - fc_playlist_list_item.__init__(self) - - self.setupUi(self) - - self.playlist_info = playlist - - self.edit_playlist_button.setIcon(QIcon(get_image_path("edit_white.png"))) - self.remove_playlist_button.setIcon(QIcon(get_image_path("delete.png"))) - - self.playlist_name.setText(playlist["name"]) - self.playlist_num_items.setText("%d items" % len(playlist["torrents"])) - - self.thumbnail_widget.initialize(playlist["name"], 24) - - self.controls_container.setHidden(True) - self.show_controls = show_controls - - if on_remove_clicked is not None: - self.remove_playlist_button.clicked.connect(lambda: on_remove_clicked(self)) - - if on_edit_clicked is not None: - self.edit_playlist_button.clicked.connect(lambda: on_edit_clicked(self)) - - def enterEvent(self, _): - if self.show_controls: - self.controls_container.setHidden(False) - self.edit_playlist_button.setIcon(QIcon(get_image_path('edit_white.png'))) - self.remove_playlist_button.setIcon(QIcon(get_image_path('delete.png'))) - - def leaveEvent(self, _): - self.controls_container.setHidden(True) diff --git a/TriblerGUI/widgets/playlistpage.py b/TriblerGUI/widgets/playlistpage.py deleted file mode 100644 index 4d2eaafa946..00000000000 --- a/TriblerGUI/widgets/playlistpage.py +++ /dev/null @@ -1,26 +0,0 @@ -from PyQt5.QtGui import QIcon -from PyQt5.QtWidgets import QWidget - -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem -from TriblerGUI.utilities import get_image_path - - -class PlaylistPage(QWidget): - """ - This page shows torrents inside a specific playlist. - """ - - def __init__(self): - QWidget.__init__(self) - self.playlist = None - - def initialize_with_playlist(self, playlist): - self.playlist = playlist - self.window().playlist_name_label.setText(playlist["name"]) - self.window().playlist_num_items_label.setText("%d items" % len(playlist["torrents"])) - self.window().playlist_back_button.setIcon(QIcon(get_image_path('page_back.png'))) - - items = [] - for result in playlist['torrents']: - items.append((ChannelTorrentListItem, result)) - self.window().playlist_torrents_list.set_data_items(items) diff --git a/TriblerGUI/widgets/searchresultspage.py b/TriblerGUI/widgets/searchresultspage.py index 6e18fcf3c91..7d7b4db6ab8 100644 --- a/TriblerGUI/widgets/searchresultspage.py +++ b/TriblerGUI/widgets/searchresultspage.py @@ -1,9 +1,10 @@ -from PyQt5.QtCore import QTimer +from __future__ import absolute_import + from PyQt5.QtWidgets import QWidget -from TriblerGUI.widgets.channel_list_item import ChannelListItem -from TriblerGUI.widgets.channel_torrent_list_item import ChannelTorrentListItem -from TriblerGUI.utilities import bisect_right +from TriblerGUI.utilities import get_gui_setting +from TriblerGUI.widgets.tablecontentmodel import SearchResultsContentModel +from TriblerGUI.widgets.triblertablecontrollers import SearchResultsTableViewController class SearchResultsPage(QWidget): @@ -13,119 +14,45 @@ class SearchResultsPage(QWidget): def __init__(self): QWidget.__init__(self) - self.search_results = {'channels': [], 'torrents': []} - self.health_timer = None - self.show_torrents = True - self.show_channels = True + self.query = None + self.controller = None + self.model = None + self.gui_settings = None - def initialize_search_results_page(self): + def initialize_search_results_page(self, gui_settings): + self.gui_settings = gui_settings self.window().search_results_tab.initialize() self.window().search_results_tab.clicked_tab_button.connect(self.clicked_tab_button) - self.window().search_torrents_detail_widget.hide() + self.model = SearchResultsContentModel(hide_xxx=get_gui_setting(self.gui_settings, "family_filter", True, + is_bool=True) if self.gui_settings else True) + self.controller = SearchResultsTableViewController(self.model, self.window().search_results_list, + self.window().search_details_container, + self.window().num_search_results_label) + self.window().search_details_container.details_tab_widget.initialize_details_widget() def perform_search(self, query): - self.search_results = {'channels': [], 'torrents': []} + self.query = query + self.model.reset() + self.window().num_search_results_label.setText("") + self.window().search_details_container.hide() trimmed_query = query if len(query) < 50 else "%s..." % query[:50] self.window().search_results_header_label.setText("Search results for: %s" % trimmed_query) - self.window().search_results_list.set_data_items([]) # To clean the list - self.window().search_results_tab.on_tab_button_click(self.window().search_results_all_button) - - # Start the health timer that checks the health of the first five results - if self.health_timer: - self.health_timer.stop() - - self.health_timer = QTimer() - self.health_timer.setSingleShot(True) - self.health_timer.timeout.connect(self.check_health_of_results) - self.health_timer.start(2000) - - def check_health_of_results(self): - first_torrents = self.window().search_results_list.get_first_items(5, cls=ChannelTorrentListItem) - for torrent_item in first_torrents: - torrent_item.check_health() - - def clicked_tab_button(self, tab_button_name): - if tab_button_name == "search_results_all_button": - self.show_torrents = True - self.show_channels = True - self.load_search_results_in_list() - elif tab_button_name == "search_results_channels_button": - self.show_torrents = False - self.show_channels = True - self.load_search_results_in_list() - elif tab_button_name == "search_results_torrents_button": - self.show_torrents = True - self.show_channels = False - self.load_search_results_in_list() - - def update_num_search_results(self): - self.window().num_search_results_label.setText("%d results" % - (len(self.search_results['channels']) + - len(self.search_results['torrents']))) - - def clicked_item(self): - if len(self.window().search_results_list.selectedItems()) != 1: - self.window().search_torrents_detail_widget.hide() - else: - item = self.window().search_results_list.selectedItems()[0] - list_widget = item.listWidget() - list_item = list_widget.itemWidget(item) - if isinstance(list_item, ChannelTorrentListItem): - self.window().search_torrents_detail_widget.update_with_torrent(list_item.torrent_info) - self.window().search_torrents_detail_widget.show() - else: - self.window().search_torrents_detail_widget.hide() - - def load_search_results_in_list(self): - all_items = [] - if self.show_channels: - for channel_item in self.search_results['channels']: - all_items.append((ChannelListItem, channel_item)) - - if self.show_torrents: - self.search_results['torrents'] = sorted(self.search_results['torrents'], - key=lambda item: item['relevance_score'], - reverse=True) - for torrent_item in self.search_results['torrents']: - all_items.append((ChannelTorrentListItem, torrent_item)) - - self.window().search_results_list.set_data_items(all_items) - - def received_search_result_channel(self, result): - # Ignore channels that have a small amount of torrents or have no votes - if result['torrents'] <= 2 or result['votes'] == 0: - return - if self.is_duplicate_channel(result): - return - channel_index = bisect_right(result, self.search_results['channels'], is_torrent=False) - if self.show_channels: - self.window().search_results_list.insert_item(channel_index, (ChannelListItem, result)) - - self.search_results['channels'].insert(channel_index, result) - self.update_num_search_results() - def received_search_result_torrent(self, result): - if self.is_duplicate_torrent(result): - return - torrent_index = bisect_right(result, self.search_results['torrents'], is_torrent=True) - num_channels_visible = len(self.search_results['channels']) if self.show_channels else 0 - if self.show_torrents: - self.window().search_results_list.insert_item( - torrent_index + num_channels_visible, (ChannelTorrentListItem, result)) + self.controller.load_search_results(query, 1, 50) - self.search_results['torrents'].insert(torrent_index, result) - self.update_num_search_results() + def set_columns_visibility(self, column_names, hide=True): + for column_name in column_names: + self.window().search_page_container.content_table.setColumnHidden( + self.model_torrents.column_position[column_name], not hide) - def is_duplicate_channel(self, result): - for channel_item in self.search_results['channels']: - if result[u'dispersy_cid'] == channel_item[u'dispersy_cid']: - return True - return False + def clicked_tab_button(self, _): + if self.window().search_results_tab.get_selected_index() == 0: + self.model.type_filter = None + elif self.window().search_results_tab.get_selected_index() == 1: + self.model.type_filter = 'channel' + elif self.window().search_results_tab.get_selected_index() == 2: + self.model.type_filter = 'torrent' - def is_duplicate_torrent(self, result): - for torrent_item in self.search_results['torrents']: - if result[u'infohash'] == torrent_item[u'infohash']: - return True - return False + self.perform_search(self.query) diff --git a/TriblerGUI/widgets/settingspage.py b/TriblerGUI/widgets/settingspage.py index cc25c166520..04d3b26dc24 100644 --- a/TriblerGUI/widgets/settingspage.py +++ b/TriblerGUI/widgets/settingspage.py @@ -1,10 +1,19 @@ +from __future__ import absolute_import, division + import sys from PIL.ImageQt import ImageQt -from PyQt5 import QtGui, QtCore -from PyQt5.QtWidgets import QSizePolicy -from PyQt5.QtWidgets import QWidget, QLabel, QFileDialog +from PyQt5 import QtCore, QtGui +from PyQt5.QtWidgets import QFileDialog, QLabel, QSizePolicy, QWidget + +import Tribler.Core.Utilities.json_util as json + +from TriblerGUI.defs import BUTTON_TYPE_CONFIRM, BUTTON_TYPE_NORMAL, DEFAULT_API_PORT, PAGE_SETTINGS_ANONYMITY, \ + PAGE_SETTINGS_BANDWIDTH, PAGE_SETTINGS_CONNECTION, PAGE_SETTINGS_DEBUG, PAGE_SETTINGS_GENERAL, PAGE_SETTINGS_SEEDING +from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog +from TriblerGUI.tribler_request_manager import TriblerRequestManager +from TriblerGUI.utilities import get_gui_setting, is_dir_writable, seconds_to_hhmm_string, string_to_seconds try: import qrcode @@ -13,14 +22,6 @@ except ImportError: has_qr = False -import Tribler.Core.Utilities.json_util as json -from TriblerGUI.defs import PAGE_SETTINGS_GENERAL, PAGE_SETTINGS_CONNECTION, PAGE_SETTINGS_BANDWIDTH, \ - PAGE_SETTINGS_SEEDING, PAGE_SETTINGS_ANONYMITY, BUTTON_TYPE_NORMAL, BUTTON_TYPE_CONFIRM, DEFAULT_API_PORT, \ - PAGE_SETTINGS_DEBUG -from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog -from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.utilities import string_to_seconds, get_gui_setting, seconds_to_hhmm_string, is_dir_writable - DEPENDENCY_ERROR_TITLE = "Dependency missing" DEPENDENCY_ERROR_MESSAGE = "'qrcode' module is missing. This module can be installed through apt-get or pip" @@ -50,6 +51,8 @@ def initialize_settings_page(self): self.window().download_location_chooser_button.clicked.connect(self.on_choose_download_dir_clicked) self.window().watch_folder_chooser_button.clicked.connect(self.on_choose_watch_dir_clicked) + self.window().channel_autocommit_checkbox.stateChanged.connect(self.on_channel_autocommit_checkbox_changed) + self.window().family_filter_checkbox.stateChanged.connect(self.on_family_filter_checkbox_changed) self.window().developer_mode_enabled_checkbox.stateChanged.connect(self.on_developer_mode_checkbox_changed) self.window().use_monochrome_icon_checkbox.stateChanged.connect(self.on_use_monochrome_icon_checkbox_changed) self.window().download_settings_anon_checkbox.stateChanged.connect(self.on_anon_download_state_changed) @@ -155,6 +158,12 @@ def on_emptying_tokens(self, data): else: ConfirmationDialog.show_error(self.window(), DEPENDENCY_ERROR_TITLE, DEPENDENCY_ERROR_MESSAGE) + def on_channel_autocommit_checkbox_changed(self, _): + self.window().gui_settings.setValue("autocommit_enabled", self.window().channel_autocommit_checkbox.isChecked()) + + def on_family_filter_checkbox_changed(self, _): + self.window().gui_settings.setValue("family_filter", self.window().family_filter_checkbox.isChecked()) + def on_developer_mode_checkbox_changed(self, _): self.window().gui_settings.setValue("debug", self.window().developer_mode_enabled_checkbox.isChecked()) self.window().left_menu_button_debug.setHidden(not self.window().developer_mode_enabled_checkbox.isChecked()) @@ -214,7 +223,8 @@ def initialize_with_settings(self, settings): gui_settings = self.window().gui_settings # General settings - self.window().family_filter_checkbox.setChecked(settings['general']['family_filter']) + self.window().family_filter_checkbox.setChecked(get_gui_setting(gui_settings, 'family_filter', + True, is_bool=True)) self.window().use_monochrome_icon_checkbox.setChecked(get_gui_setting(gui_settings, "use_monochrome_icon", False, is_bool=True)) self.window().download_location_input.setText(settings['download_defaults']['saveas']) @@ -226,6 +236,10 @@ def initialize_with_settings(self, settings): self.window().watchfolder_enabled_checkbox.setChecked(settings['watch_folder']['enabled']) self.window().watchfolder_location_input.setText(settings['watch_folder']['directory']) + # Channel settings + self.window().channel_autocommit_checkbox.setChecked( + get_gui_setting(gui_settings, "autocommit_enabled", True, is_bool=True)) + # Log directory self.window().log_location_input.setText(settings['general']['log_dir']) @@ -267,13 +281,11 @@ def initialize_with_settings(self, settings): self.window().credit_mining_enabled_checkbox.setChecked(settings['credit_mining']['enabled']) self.window().max_disk_space_input.setText(str(settings['credit_mining']['max_disk_space'])) - # chant settings - self.window().chant_channel_edit.setChecked(settings['chant']['channel_edit']) - # Debug self.window().developer_mode_enabled_checkbox.setChecked(get_gui_setting(gui_settings, "debug", False, is_bool=True)) self.window().checkbox_enable_resource_log.setChecked(settings['resource_monitor']['enabled']) + cpu_priority = 1 if 'cpu_priority' in settings['resource_monitor']: cpu_priority = int(settings['resource_monitor']['cpu_priority']) @@ -330,10 +342,8 @@ def save_settings(self): settings_data = {'general': {}, 'Tribler': {}, 'download_defaults': {}, 'libtorrent': {}, 'watch_folder': {}, 'tunnel_community': {}, 'trustchain': {}, 'credit_mining': {}, 'resource_monitor': {}, 'ipv8': {}, 'chant': {}} - settings_data['general']['family_filter'] = self.window().family_filter_checkbox.isChecked() settings_data['download_defaults']['saveas'] = self.window().download_location_input.text().encode('utf-8') settings_data['general']['log_dir'] = self.window().log_location_input.text() - settings_data['chant']['channel_edit'] = self.window().chant_channel_edit.isChecked() settings_data['watch_folder']['enabled'] = self.window().watchfolder_enabled_checkbox.isChecked() if settings_data['watch_folder']['enabled']: @@ -354,8 +364,7 @@ def save_settings(self): else: settings_data['libtorrent']['proxy_server'] = ":" - if len(self.window().lt_proxy_username_input.text()) > 0 and \ - len(self.window().lt_proxy_password_input.text()) > 0: + if self.window().lt_proxy_username_input.text() and self.window().lt_proxy_password_input.text(): settings_data['libtorrent']['proxy_auth'] = "%s:%s" % (self.window().lt_proxy_username_input.text(), self.window().lt_proxy_password_input.text()) else: @@ -390,7 +399,7 @@ def save_settings(self): except ValueError: ConfirmationDialog.show_error(self.window(), "Invalid value for bandwidth limit", "You've entered an invalid value for the maximum upload/download rate. " - "Please enter a whole number (max: %d)" % (sys.maxsize/1000)) + "Please enter a whole number (max: %d)" % (sys.maxsize / 1000)) return try: @@ -446,12 +455,16 @@ def save_settings(self): self.settings_request_mgr = TriblerRequestManager() self.settings_request_mgr.perform_request("settings", self.on_settings_saved, - method='POST', data=json.dumps(settings_data)) + method='POST', raw_data=json.dumps(settings_data)) def on_settings_saved(self, data): if not data: return # Now save the GUI settings + self.window().gui_settings.setValue("family_filter", + self.window().family_filter_checkbox.isChecked()) + self.window().gui_settings.setValue("autocommit_enabled", + self.window().channel_autocommit_checkbox.isChecked()) self.window().gui_settings.setValue("ask_download_settings", self.window().always_ask_location_checkbox.isChecked()) self.window().gui_settings.setValue("use_monochrome_icon", diff --git a/TriblerGUI/widgets/subscribedchannelspage.py b/TriblerGUI/widgets/subscribedchannelspage.py index 432cb6a53e4..bdd9aa17bc2 100644 --- a/TriblerGUI/widgets/subscribedchannelspage.py +++ b/TriblerGUI/widgets/subscribedchannelspage.py @@ -1,10 +1,9 @@ +from __future__ import absolute_import + from PyQt5.QtWidgets import QWidget -from TriblerGUI.widgets.channel_list_item import ChannelListItem -from TriblerGUI.defs import BUTTON_TYPE_NORMAL, BUTTON_TYPE_CONFIRM -from TriblerGUI.dialogs.confirmationdialog import ConfirmationDialog -from TriblerGUI.widgets.loading_list_item import LoadingListItem -from TriblerGUI.tribler_request_manager import TriblerRequestManager +from TriblerGUI.widgets.tablecontentmodel import ChannelsContentModel +from TriblerGUI.widgets.triblertablecontrollers import ChannelsTableViewController class SubscribedChannelsPage(QWidget): @@ -14,53 +13,17 @@ class SubscribedChannelsPage(QWidget): def __init__(self): QWidget.__init__(self) - self.dialog = None self.request_mgr = None + self.model = None + self.controller = None def initialize(self): - self.window().add_subscription_button.clicked.connect(self.on_add_subscription_clicked) + self.model = ChannelsContentModel(subscribed=True) + self.controller = ChannelsTableViewController(self.model, self.window().subscribed_channels_list, + self.window().num_subscribed_channels_label, + self.window().subscribed_channels_filter_input) def load_subscribed_channels(self): - self.window().subscribed_channels_list.set_data_items([(LoadingListItem, None)]) - - self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("channels/subscribed", self.received_subscribed_channels) - - def received_subscribed_channels(self, results): - if not results: - return - self.window().subscribed_channels_list.set_data_items([]) - items = [] - - if len(results['subscribed']) == 0: - self.window().subscribed_channels_list.set_data_items( - [(LoadingListItem, "You are not subscribed to any channel.")]) - return - - for result in results['subscribed']: - items.append((ChannelListItem, result)) - self.window().subscribed_channels_list.set_data_items(items) - - def on_add_subscription_clicked(self): - self.dialog = ConfirmationDialog(self, "Add subscribed channel", - "Please enter the identifier of the channel you want to subscribe to below. " - "It can take up to a minute before the channel is visible in your list of " - "subscribed channels.", - [('ADD', BUTTON_TYPE_NORMAL), ('CANCEL', BUTTON_TYPE_CONFIRM)], - show_input=True) - self.dialog.dialog_widget.dialog_input.setPlaceholderText('Channel identifier') - self.dialog.button_clicked.connect(self.on_subscription_added) - self.dialog.show() - - def on_subscription_added(self, action): - if action == 0: - self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("channels/subscribed/%s" % self.dialog.dialog_widget.dialog_input.text(), - self.on_channel_subscribed, method='PUT') - - self.dialog.close_dialog() - self.dialog = None - - def on_channel_subscribed(self, _): - pass + self.controller.model.reset() + self.controller.load_channels(1, 50) # Load the first 50 subscribed channels diff --git a/TriblerGUI/widgets/subscriptionswidget.py b/TriblerGUI/widgets/subscriptionswidget.py index 5b223026caf..94d0849f7ed 100644 --- a/TriblerGUI/widgets/subscriptionswidget.py +++ b/TriblerGUI/widgets/subscriptionswidget.py @@ -15,6 +15,7 @@ class SubscriptionsWidget(QWidget): unsubscribed_channel = pyqtSignal(object) subscribed_channel = pyqtSignal(object) + credit_mining_toggled = pyqtSignal(bool) def __init__(self, parent): QWidget.__init__(self, parent) @@ -40,13 +41,14 @@ def initialize_with_channel(self, channel): self.update_subscribe_button() def update_subscribe_button(self, remote_response=None): + if remote_response and 'subscribed' in remote_response: self.channel_info["subscribed"] = remote_response['subscribed'] if remote_response and 'votes' in remote_response: self.channel_info["votes"] = remote_response['votes'] - if self.channel_info["subscribed"]: + if int(self.channel_info["subscribed"]): self.subscribe_button.setIcon(QIcon(QPixmap(get_image_path('subscribed_yes.png')))) else: self.subscribe_button.setIcon(QIcon(QPixmap(get_image_path('subscribed_not.png')))) @@ -55,28 +57,35 @@ def update_subscribe_button(self, remote_response=None): if self.window().tribler_settings: # It could be that the settings are not loaded yet self.credit_mining_button.setHidden(not self.window().tribler_settings["credit_mining"]["enabled"]) - if self.channel_info["dispersy_cid"] in self.window().tribler_settings["credit_mining"]["sources"]: + if self.channel_info["public_key"] in self.window().tribler_settings["credit_mining"]["sources"]: self.credit_mining_button.setIcon(QIcon(QPixmap(get_image_path('credit_mining_yes.png')))) else: self.credit_mining_button.setIcon(QIcon(QPixmap(get_image_path('credit_mining_not.png')))) else: self.credit_mining_button.hide() + # Disable channel control buttons for LEGACY_ENTRY channels + hide_controls = (self.channel_info["status"] == 6) + self.num_subs_label.setHidden(hide_controls) + self.subscribe_button.setHidden( + hide_controls or ("my_channel" in self.channel_info and self.channel_info["my_channel"])) + self.credit_mining_button.setHidden(hide_controls) + def on_subscribe_button_click(self): self.request_mgr = TriblerRequestManager() - if self.channel_info["subscribed"]: - self.request_mgr.perform_request("channels/subscribed/%s" % - self.channel_info['dispersy_cid'], - self.on_channel_unsubscribed, method='DELETE') + if int(self.channel_info["subscribed"]): + self.request_mgr.perform_request("metadata/channels/%s" % + self.channel_info['public_key'], + self.on_channel_unsubscribed, data={"subscribe": 0}, method='POST') else: - self.request_mgr.perform_request("channels/subscribed/%s" % - self.channel_info['dispersy_cid'], - self.on_channel_subscribed, method='PUT') + self.request_mgr.perform_request("metadata/channels/%s" % + self.channel_info['public_key'], + self.on_channel_subscribed, data={"subscribe": 1}, method='POST') def on_channel_unsubscribed(self, json_result): if not json_result or not self: return - if json_result["unsubscribed"]: + if json_result["success"]: self.unsubscribed_channel.emit(self.channel_info) self.channel_info["subscribed"] = False self.channel_info["votes"] -= 1 @@ -85,7 +94,7 @@ def on_channel_unsubscribed(self, json_result): def on_channel_subscribed(self, json_result): if not json_result or not self: return - if json_result["subscribed"]: + if json_result["success"]: self.subscribed_channel.emit(self.channel_info) self.channel_info["subscribed"] = True self.channel_info["votes"] += 1 @@ -93,28 +102,26 @@ def on_channel_subscribed(self, json_result): def on_credit_mining_button_click(self): old_sources = self.window().tribler_settings["credit_mining"]["sources"] - new_sources = [] if self.channel_info["dispersy_cid"] in old_sources else [self.channel_info["dispersy_cid"]] + new_sources = [] if self.channel_info["public_key"] in old_sources \ + else [self.channel_info["public_key"]] settings = {"credit_mining": {"sources": new_sources}} self.request_mgr = TriblerRequestManager() self.request_mgr.perform_request("settings", self.on_credit_mining_sources, - method='POST', data=json.dumps(settings)) + method='PUT', raw_data=json.dumps(settings)) def on_credit_mining_sources(self, json_result): if not json_result: return if json_result["modified"]: old_source = next(iter(self.window().tribler_settings["credit_mining"]["sources"]), None) + if self.channel_info["public_key"] != old_source: + self.credit_mining_toggled.emit(True) + new_sources = [self.channel_info["public_key"]] + else: + self.credit_mining_toggled.emit(False) + new_sources = [] - new_sources = [self.channel_info["dispersy_cid"]] if self.channel_info["dispersy_cid"] != old_source else [] self.window().tribler_settings["credit_mining"]["sources"] = new_sources self.update_subscribe_button() - - channels_list = self.window().discovered_channels_list - for index, data_item in enumerate(channels_list.data_items): - if data_item[1]['dispersy_cid'] == old_source: - channel_item = channels_list.itemWidget(channels_list.item(index)) - if channel_item: - channel_item.subscriptions_widget.update_subscribe_button() - break diff --git a/TriblerGUI/widgets/tabbuttonpanel.py b/TriblerGUI/widgets/tabbuttonpanel.py index c9280c84f70..d3390f1a4d1 100644 --- a/TriblerGUI/widgets/tabbuttonpanel.py +++ b/TriblerGUI/widgets/tabbuttonpanel.py @@ -29,3 +29,9 @@ def deselect_all_buttons(self, except_select=None): button.setEnabled(True) button.setChecked(False) except_select.setChecked(True) + + def get_selected_index(self): + for index, button in enumerate(self.buttons): + if button.isChecked(): + return index + return -1 diff --git a/TriblerGUI/widgets/tablecontentdelegate.py b/TriblerGUI/widgets/tablecontentdelegate.py new file mode 100644 index 00000000000..db74e173abd --- /dev/null +++ b/TriblerGUI/widgets/tablecontentdelegate.py @@ -0,0 +1,520 @@ +from __future__ import absolute_import, division + +from abc import abstractmethod + +from PyQt5.QtCore import QEvent, QModelIndex, QObject, QRect, QSize, Qt, pyqtSignal +from PyQt5.QtGui import QBrush, QColor, QIcon, QPainter, QPen +from PyQt5.QtWidgets import QStyle, QStyledItemDelegate + +from TriblerGUI.defs import ACTION_BUTTONS, COMMIT_STATUS_COMMITTED, COMMIT_STATUS_NEW, COMMIT_STATUS_TODELETE, \ + HEALTH_CHECKING, HEALTH_DEAD, HEALTH_ERROR, HEALTH_GOOD, HEALTH_MOOT, HEALTH_UNCHECKED +from TriblerGUI.utilities import get_health, get_image_path +from TriblerGUI.widgets.tableiconbuttons import DeleteIconButton, DownloadIconButton, PlayIconButton + + +class TriblerButtonsDelegate(QStyledItemDelegate): + redraw_required = pyqtSignal() + + def __init__(self, parent=None): + QStyledItemDelegate.__init__(self, parent) + self.no_index = QModelIndex() + self.hoverrow = None + self.hover_index = None + self.controls = [] + + # We have to control if mouse is in the buttons box to add some tolerance for vertical mouse + # misplacement around the buttons. The button box effectively overlaps upper and lower rows. + # row 0 + # --------- <- tolerance zone + # row 1 |buttons| + # --------- <- tolerance zone + # row 2 + # button_box_extended_border_ration controls the thickness of the tolerance zone + self.button_box = QRect() + self.button_box_extended_border_ratio = float(0.3) + + def paint_empty_background(self, painter, option): + super(TriblerButtonsDelegate, self).paint(painter, option, self.no_index) + + def on_mouse_moved(self, pos, index): + # This method controls for which rows the buttons/box should be drawn + redraw = False + if self.hover_index != index: + self.hover_index = index + self.hoverrow = index.row() + if not self.button_box.contains(pos): + redraw = True + # Redraw when the mouse leaves the table + if index.row() == -1 and self.hoverrow != -1: + self.hoverrow = -1 + redraw = True + + for controls in self.controls: + redraw = controls.on_mouse_moved(pos, index) or redraw + + if redraw: + # TODO: optimize me to only redraw the rows that actually changed! + self.redraw_required.emit() + + @staticmethod + def split_rect_into_squares(rect, buttons): + r = rect + side_size = min(r.width() / len(buttons), r.height() - 2) + y_border = (r.height() - side_size) / 2 + for n, button in enumerate(buttons): + x = r.left() + n * side_size + y = r.top() + y_border + h = side_size + w = side_size + yield QRect(x, y, w, h), button + + def paint(self, painter, option, index): + # Draw 'hover' state highlight for every cell of a row + if index.row() == self.hoverrow: + option.state |= QStyle.State_MouseOver + if not self.paint_exact(painter, option, index): + # Draw the rest of the columns + super(TriblerButtonsDelegate, self).paint(painter, option, index) + + @abstractmethod + def paint_exact(self, painter, option, index): + pass + + def editorEvent(self, event, model, option, index): + for control in self.controls: + result = control.check_clicked(event, model, option, index) + if result: + return result + return False + + def createEditor(self, parent, option, index): + # Add null editor to action buttons column + if index.column() == index.model().column_position[ACTION_BUTTONS]: + return + + super(TriblerButtonsDelegate, self).createEditor(parent, option, index) + + +class SearchResultsDelegate(TriblerButtonsDelegate): + + def __init__(self, parent=None): + TriblerButtonsDelegate.__init__(self, parent) + self.subscribe_control = SubscribeToggleControl(ACTION_BUTTONS) + self.health_status_widget = HealthStatusDisplay() + + self.play_button = PlayIconButton() + self.download_button = DownloadIconButton() + self.ondemand_container = [self.play_button, self.download_button] + self.controls = [self.play_button, self.download_button, self.subscribe_control] + + def paint_exact(self, painter, option, index): + data_item = index.model().data_items[index.row()] + + # Draw the download controls + if ACTION_BUTTONS in index.model().column_position and \ + index.column() == index.model().column_position[ACTION_BUTTONS]: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + if data_item['type'] == 'channel': + # Draw subscribed widget + if index == self.hover_index: + self.subscribe_control.paint_hover(painter, option.rect, index) + else: + self.subscribe_control.paint(painter, option.rect, index, toggled=data_item['subscribed']) + else: + # When the cursor leaves the table, we must "forget" about the button_box + if self.hoverrow == -1: + self.button_box = QRect() + if index.row() == self.hoverrow: + extended_border_height = int(option.rect.height() * self.button_box_extended_border_ratio) + button_box_extended_rect = option.rect.adjusted(0, -extended_border_height, + 0, extended_border_height) + self.button_box = button_box_extended_rect + + active_buttons = [b for b in self.ondemand_container if b.should_draw(index)] + if active_buttons: + for rect, button in ChannelsButtonsDelegate.split_rect_into_squares( + button_box_extended_rect, active_buttons): + button.paint(painter, rect, index) + + return True + + # Draw 'category' column + elif u'category' in index.model().column_position and \ + index.column() == index.model().column_position[u'category']: + if data_item['type'] == 'channel': + category = data_item['type'] + else: + category = data_item[u'category'] + + # Draw empty cell as the background + self.paint_empty_background(painter, option) + CategoryLabel(category).paint(painter, option, index) + return True + + # Draw 'health' column + elif u'health' in index.model().column_position and index.column() == index.model().column_position[u'health']: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + if data_item['type'] == 'torrent': + self.health_status_widget.paint(painter, option.rect, index) + + return True + + +class ChannelsButtonsDelegate(TriblerButtonsDelegate): + + def __init__(self, parent=None): + TriblerButtonsDelegate.__init__(self, parent) + self.subscribe_control = SubscribeToggleControl(u'subscribed') + self.controls = [self.subscribe_control] + + def paint_exact(self, painter, option, index): + # Draw 'subscribed' column + if index.column() == index.model().column_position[u'subscribed']: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + if index.model().data_items[index.row()][u'status'] == 6: # LEGACY ENTRIES! + return True + if index.model().data_items[index.row()][u'my_channel']: + return True + + data_item = index.model().data_items[index.row()] + + if index == self.hover_index: + self.subscribe_control.paint_hover(painter, option.rect, index) + else: + self.subscribe_control.paint(painter, option.rect, index, toggled=data_item['subscribed']) + + return True + + +class TorrentsButtonsDelegate(TriblerButtonsDelegate): + + def __init__(self, parent=None): + TriblerButtonsDelegate.__init__(self, parent) + + # On-demand buttons + self.play_button = PlayIconButton() + self.download_button = DownloadIconButton() + self.delete_button = DeleteIconButton() + self.ondemand_container = [self.delete_button, self.play_button, self.download_button] + self.commit_control = CommitStatusControl(u'status') + + self.controls = [self.play_button, self.download_button, self.commit_control, self.delete_button] + + self.health_status_widget = HealthStatusDisplay() + + def paint_exact(self, painter, option, index): + # Draw 'health' column + if u'health' in index.model().column_position and index.column() == index.model().column_position[u'health']: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + self.health_status_widget.paint(painter, option.rect, index) + + return True + + # Draw buttons in the ACTION_BUTTONS column + elif ACTION_BUTTONS in index.model().column_position and \ + index.column() == index.model().column_position[ACTION_BUTTONS]: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + # When the cursor leaves the table, we must "forget" about the button_box + if self.hoverrow == -1: + self.button_box = QRect() + if index.row() == self.hoverrow: + extended_border_height = int(option.rect.height() * self.button_box_extended_border_ratio) + button_box_extended_rect = option.rect.adjusted(0, -extended_border_height, + 0, extended_border_height) + self.button_box = button_box_extended_rect + + active_buttons = [b for b in self.ondemand_container if b.should_draw(index)] + if active_buttons: + for rect, button in ChannelsButtonsDelegate.split_rect_into_squares( + button_box_extended_rect, active_buttons): + button.paint(painter, rect, index) + + return True + + # Draw 'commit_status' column + elif u'status' in index.model().column_position and index.column() == index.model().column_position[u'status']: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + + if index == self.hover_index: + self.commit_control.paint_hover(painter, option.rect, index) + else: + self.commit_control.paint(painter, option.rect, index) + + return True + + # Draw 'category' column + elif u'category' in index.model().column_position and \ + index.column() == index.model().column_position[u'category']: + # Draw empty cell as the background + self.paint_empty_background(painter, option) + CategoryLabel(index.model().data_items[index.row()]['category']).paint(painter, option, index) + return True + + +class CategoryLabel(QObject): + """ + A label that indicates the category of some metadata. + """ + + def __init__(self, category, parent=None): + QObject.__init__(self, parent=parent) + self.category = category + + def paint(self, painter, option, _): + painter.save() + + lines = QPen(QColor("#B5B5B5"), 1, Qt.SolidLine, Qt.RoundCap) + painter.setPen(lines) + + text_flags = Qt.AlignHCenter | Qt.AlignVCenter | Qt.TextSingleLine + text_box = painter.boundingRect(option.rect, text_flags, self.category) + + painter.drawText(text_box, text_flags, self.category) + bezel_thickness = 4 + bezel_box = QRect(text_box.left() - bezel_thickness, + text_box.top() - bezel_thickness, + text_box.width() + bezel_thickness * 2, + text_box.height() + bezel_thickness * 2) + + painter.setRenderHint(QPainter.Antialiasing) + painter.drawRoundedRect(bezel_box, 20, 80, mode=Qt.RelativeSize) + + painter.restore() + + +class ToggleControl(QObject): + """ + Column-level controls are stateless collections of methods for visualizing cell data and + triggering corresponding events. + """ + icon_border = 4 + icon_size = 16 + h = icon_size + 2 * icon_border + w = h + size = QSize(w, h) + + clicked = pyqtSignal(QModelIndex) + + def __init__(self, column_name, on_icon, off_icon, hover_icon, parent=None): + QObject.__init__(self, parent=parent) + self.on_icon = on_icon + self.off_icon = off_icon + self.hover_icon = hover_icon + self.column_name = column_name + self.last_index = QModelIndex() + + def paint(self, painter, rect, _, toggled=False): + icon = self.on_icon if toggled else self.off_icon + x = rect.left() + (rect.width() - self.w) / 2 + y = rect.top() + (rect.height() - self.h) / 2 + icon_rect = QRect(x, y, self.w, self.h) + + icon.paint(painter, icon_rect) + + def paint_hover(self, painter, rect, _): + icon = self.hover_icon + x = rect.left() + (rect.width() - self.w) / 2 + y = rect.top() + (rect.height() - self.h) / 2 + icon_rect = QRect(x, y, self.w, self.h) + + icon.paint(painter, icon_rect) + + def check_clicked(self, event, _, __, index): + if event.type() == QEvent.MouseButtonRelease and \ + index.model().column_position[self.column_name] == index.column(): + self.clicked.emit(index) + return True + return False + + def size_hint(self, _, __): + return self.size + + def on_mouse_moved(self, pos, index): + if self.last_index != index: + # Handle the case when the cursor leaves the table + if not index.model() or (index.model().column_position[self.column_name] == index.column()): + self.last_index = index + return True + return False + + +class SubscribeToggleControl(ToggleControl): + + def __init__(self, column_name, parent=None): + ToggleControl.__init__(self, column_name, + QIcon(get_image_path("subscribed_yes.png")), + QIcon(get_image_path("subscribed_not.png")), + QIcon(get_image_path("subscribed.png")), + parent=parent) + + +class CommitStatusControl(QObject): + # Column-level controls are stateless collections of methods for visualizing cell data and + # triggering corresponding events. + icon_border = 4 + icon_size = 16 + h = icon_size + 2 * icon_border + w = h + size = QSize(w, h) + + clicked = pyqtSignal(QModelIndex) + new_icon = QIcon(get_image_path("plus.svg")) + committed_icon = QIcon(get_image_path("check.svg")) + todelete_icon = QIcon(get_image_path("minus.svg")) + + delete_action_icon = QIcon(get_image_path("delete.png")) + restore_action_icon = QIcon(get_image_path("undo.svg")) + + def __init__(self, column_name, parent=None): + QObject.__init__(self, parent=parent) + self.column_name = column_name + self.rect = QRect() + self.last_index = QModelIndex() + + def paint(self, painter, rect, index): + data_item = index.model().data_items[index.row()] + if self.column_name not in data_item or data_item[self.column_name] == '': + return + state = data_item[self.column_name] + icon = QIcon() + if state == COMMIT_STATUS_COMMITTED: + icon = self.committed_icon + elif state == COMMIT_STATUS_NEW: + icon = self.new_icon + elif state == COMMIT_STATUS_TODELETE: + icon = self.todelete_icon + + x = rect.left() + (rect.width() - self.w) / 2 + y = rect.top() + (rect.height() - self.h) / 2 + icon_rect = QRect(x, y, self.w, self.h) + + icon.paint(painter, icon_rect) + self.rect = rect + + def paint_hover(self, painter, rect, index): + data_item = index.model().data_items[index.row()] + if self.column_name not in data_item or data_item[self.column_name] == '': + return + state = data_item[self.column_name] + icon = QIcon() + + if state == COMMIT_STATUS_COMMITTED: + icon = self.delete_action_icon + elif state == COMMIT_STATUS_NEW: + icon = self.delete_action_icon + elif state == COMMIT_STATUS_TODELETE: + icon = self.restore_action_icon + + x = rect.left() + (rect.width() - self.w) / 2 + y = rect.top() + (rect.height() - self.h) / 2 + icon_rect = QRect(x, y, self.w, self.h) + + icon.paint(painter, icon_rect) + self.rect = rect + + def check_clicked(self, event, _, __, index): + data_item = index.model().data_items[index.row()] + if event.type() == QEvent.MouseButtonRelease and \ + index.model().column_position[self.column_name] == index.column() and \ + data_item[self.column_name] != '': + self.clicked.emit(index) + return True + return False + + def size_hint(self, _, __): + return self.size + + def on_mouse_moved(self, _, index): + if self.last_index != index: + # Handle the case when the cursor leaves the table + if not index.model(): + self.last_index = index + return True + elif index.model().column_position[self.column_name] == index.column(): + self.last_index = index + return True + return False + + +class HealthStatusDisplay(QObject): + indicator_side = 10 + indicator_border = 6 + health_colors = { + HEALTH_GOOD: QColor(Qt.green), + HEALTH_DEAD: QColor(Qt.red), + HEALTH_MOOT: QColor(Qt.yellow), + HEALTH_UNCHECKED: QColor("#B5B5B5"), + HEALTH_CHECKING: QColor(Qt.yellow), + HEALTH_ERROR: QColor(Qt.red) + + } + + def draw_text(self, painter, rect, text, color=QColor("#B5B5B5"), font=None, alignment=Qt.AlignVCenter): + painter.save() + text_flags = Qt.AlignLeft | alignment | Qt.TextSingleLine + text_box = painter.boundingRect(rect, text_flags, text) + painter.setPen(QPen(color, 1, Qt.SolidLine, Qt.RoundCap)) + if font: + painter.setFont(font) + + painter.drawText(text_box, text_flags, text) + painter.restore() + + def paint(self, painter, rect, index): + data_item = index.model().data_items[index.row()] + + if u'health' not in data_item or data_item[u'health'] == "updated": + data_item[u'health'] = get_health(data_item['num_seeders'], + data_item['num_leechers'], + data_item['last_tracker_check']) + health = data_item[u'health'] + + # ---------------- + # |b---b| | + # |b|i|b| 0S 0L | + # |b---b| | + # ---------------- + + r = rect + + # Indicator ellipse rectangle + y = r.top() + (r.height() - self.indicator_side) / 2 + x = r.left() + self.indicator_border + w = self.indicator_side + h = self.indicator_side + indicator_rect = QRect(x, y, w, h) + + # Paint indicator + painter.save() + painter.setBrush(QBrush(self.health_colors[health])) + painter.setPen(QPen(self.health_colors[health], 0, Qt.SolidLine, Qt.RoundCap)) + painter.drawEllipse(indicator_rect) + painter.restore() + + x = indicator_rect.left() + indicator_rect.width() + 2 * self.indicator_border + y = r.top() + w = r.width() - indicator_rect.width() - 2 * self.indicator_border + h = r.height() + text_box = QRect(x, y, w, h) + + # Paint status text, if necessary + if health in [HEALTH_CHECKING, HEALTH_UNCHECKED, HEALTH_ERROR]: + self.draw_text(painter, text_box, health) + else: + seeders = int(data_item[u'num_seeders']) + leechers = int(data_item[u'num_leechers']) + + txt = u'S' + str(seeders) + u' L' + str(leechers) + + self.draw_text(painter, text_box, txt) diff --git a/TriblerGUI/widgets/tablecontentmodel.py b/TriblerGUI/widgets/tablecontentmodel.py new file mode 100644 index 00000000000..85246f87d12 --- /dev/null +++ b/TriblerGUI/widgets/tablecontentmodel.py @@ -0,0 +1,183 @@ +from __future__ import absolute_import + +from abc import abstractmethod + +from PyQt5.QtCore import QAbstractTableModel, QModelIndex, Qt, pyqtSignal + +from TriblerGUI.defs import ACTION_BUTTONS +from TriblerGUI.utilities import format_size + + +class RemoteTableModel(QAbstractTableModel): + """ + The base model for the tables in the Tribler GUI. + It is specifically designed to fetch data from a remote data source, i.e. over a RESTful API. + """ + on_sort = pyqtSignal(str, bool) + + def __init__(self, parent=None): + super(RemoteTableModel, self).__init__(parent) + self.data_items = [] + self.item_load_batch = 50 + self.total_items = 0 # The total number of items without pagination + self.infohashes = {} + + @abstractmethod + def _get_remote_data(self, start, end, **kwargs): + # This must call self._on_new_items_received as a callback when data received + pass + + @abstractmethod + def _set_remote_data(self): + pass + + def reset(self): + self.beginResetModel() + self.data_items = [] + self.endResetModel() + + def sort(self, column, order): + self.reset() + self.on_sort.emit(self.columns[column], bool(order)) + + def add_items(self, new_data_items): + if not new_data_items: + return + # If we want to block the signal like itemChanged, we must use QSignalBlocker object + old_end = self.rowCount() + new_end = self.rowCount() + len(new_data_items) + self.beginInsertRows(QModelIndex(), old_end, new_end - 1) + self.data_items.extend(new_data_items) + self.endInsertRows() + + +class TriblerContentModel(RemoteTableModel): + column_headers = [] + column_width = {} + column_flags = {} + column_display_filters = {} + + def __init__(self, hide_xxx=False): + RemoteTableModel.__init__(self, parent=None) + self.data_items = [] + self.column_position = {name: i for i, name in enumerate(self.columns)} + self.edit_enabled = False + self.hide_xxx = hide_xxx + + def headerData(self, num, orientation, role=None): + if orientation == Qt.Horizontal and role == Qt.DisplayRole: + return self.column_headers[num] + + def _get_remote_data(self, start, end, **kwargs): + pass + + def _set_remote_data(self): + pass + + def rowCount(self, parent=QModelIndex()): + return len(self.data_items) + + def columnCount(self, parent=QModelIndex()): + return len(self.columns) + + def flags(self, index): + return self.column_flags[self.columns[index.column()]] + + def data(self, index, role): + if role == Qt.DisplayRole: + column = self.columns[index.column()] + data = self.data_items[index.row()][column] if column in self.data_items[index.row()] else u'UNDEFINED' + return self.column_display_filters.get(column, str(data))(data) \ + if column in self.column_display_filters else data + + def add_items(self, new_data_items): + super(TriblerContentModel, self).add_items(new_data_items) + # Build reverse mapping from infohashes to rows + items_len = len(self.data_items) + new_items_len = len(new_data_items) + for i, item in enumerate(new_data_items): + if "infohash" in item: + self.infohashes[item["infohash"]] = items_len - new_items_len + i + + def reset(self): + self.infohashes.clear() + super(TriblerContentModel, self).reset() + + def update_torrent_info(self, update_dict): + row = self.infohashes.get(update_dict["infohash"]) + if row: + self.data_items[row].update(**update_dict) + self.dataChanged.emit(self.index(row, 0), self.index(row, len(self.columns)), []) + + +class SearchResultsContentModel(TriblerContentModel): + """ + Model for a list that shows search results. + """ + columns = [u'category', u'name', u'health', ACTION_BUTTONS] + column_headers = [u'Category', u'Name', u'health', u''] + column_flags = { + u'category': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'name': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'health': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + ACTION_BUTTONS: Qt.ItemIsEnabled | Qt.ItemIsSelectable + } + + def __init__(self, **kwargs): + TriblerContentModel.__init__(self, **kwargs) + self.type_filter = None + + +class ChannelsContentModel(TriblerContentModel): + """ + This model represents a list of channels that can be displayed in a table view. + """ + columns = [u'name', u'torrents', u'subscribed'] + column_headers = [u'Channel name', u'Torrents', u''] + column_flags = { + u'name': Qt.ItemIsEnabled, + u'torrents': Qt.ItemIsEnabled, + u'subscribed': Qt.ItemIsEnabled, + ACTION_BUTTONS: Qt.ItemIsEnabled + } + + def __init__(self, subscribed=False, **kwargs): + TriblerContentModel.__init__(self, **kwargs) + self.subscribed = subscribed + + +class TorrentsContentModel(TriblerContentModel): + columns = [u'category', u'name', u'size', u'health', ACTION_BUTTONS] + column_headers = [u'Category', u'Name', u'Size', u'Health', u''] + column_flags = { + u'category': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'name': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'size': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'health': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + ACTION_BUTTONS: Qt.ItemIsEnabled | Qt.ItemIsSelectable + } + + column_display_filters = { + u'size': lambda data: format_size(float(data)), + } + + def __init__(self, channel_pk='', **kwargs): + TriblerContentModel.__init__(self, **kwargs) + self.channel_pk = channel_pk + + +class MyTorrentsContentModel(TorrentsContentModel): + columns = [u'category', u'name', u'size', u'status', ACTION_BUTTONS] + column_headers = [u'Category', u'Name', u'Size', u'', u''] + column_flags = { + u'category': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'name': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'size': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + u'status': Qt.ItemIsEnabled | Qt.ItemIsSelectable, + ACTION_BUTTONS: Qt.ItemIsEnabled | Qt.ItemIsSelectable + } + + def __init__(self, channel_pk='', **kwargs): + TorrentsContentModel.__init__(self, channel_pk=channel_pk, **kwargs) + self.exclude_deleted = False + self.edit_enabled = True diff --git a/TriblerGUI/widgets/tableiconbuttons.py b/TriblerGUI/widgets/tableiconbuttons.py new file mode 100644 index 00000000000..2724d3b3bfd --- /dev/null +++ b/TriblerGUI/widgets/tableiconbuttons.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import, division + +from PyQt5.QtCore import QEvent, QModelIndex, QObject, QRect, QSize, pyqtSignal +from PyQt5.QtGui import QIcon + +from TriblerGUI.utilities import get_image_path + + +class IconButton(QObject): + icon = QIcon() + icon_border_ratio = float(0.1) + clicked = pyqtSignal(QModelIndex) + + icon_border = 4 + icon_size = 16 + h = icon_size + 2 * icon_border + w = h + size = QSize(w, h) + + def __init__(self, parent=None): + super(IconButton, self).__init__(parent=parent) + # rect property contains the active zone for the button + self.rect = QRect() + self.icon_rect = QRect() + self.icon_mode = QIcon.Normal + + def should_draw(self, _): + return True + + def paint(self, painter, rect, _): + # Update button activation rect from the drawing call + self.rect = rect + + x = rect.left() + (rect.width() - self.w) / 2 + y = rect.top() + (rect.height() - self.h) / 2 + icon_rect = QRect(x, y, self.w, self.h) + + self.icon.paint(painter, icon_rect, mode=self.icon_mode) + + def check_clicked(self, event, _, __, index): + if event.type() == QEvent.MouseButtonRelease and self.rect.contains(event.pos()): + self.clicked.emit(index) + return True + return False + + def on_mouse_moved(self, pos, _): + old_icon_mode = self.icon_mode + if self.rect.contains(pos): + self.icon_mode = QIcon.Selected + else: + self.icon_mode = QIcon.Normal + return old_icon_mode != self.icon_mode + + def size_hint(self, _, __): + return self.size + + +class DownloadIconButton(IconButton): + icon = QIcon(get_image_path("downloads.png")) + + +class PlayIconButton(IconButton): + icon = QIcon(get_image_path("play.png")) + + def should_draw(self, index): + return index.model().data_items[index.row()][u'category'] == u'Video' + +class DeleteIconButton(IconButton): + icon = QIcon(get_image_path("trash.svg")) + + def should_draw(self, index): + return index.model().edit_enabled diff --git a/TriblerGUI/widgets/torrentdetailscontainer.py b/TriblerGUI/widgets/torrentdetailscontainer.py new file mode 100644 index 00000000000..d6c7ee85c39 --- /dev/null +++ b/TriblerGUI/widgets/torrentdetailscontainer.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import + +from PyQt5 import uic +from PyQt5.QtWidgets import QWidget + +from TriblerGUI.utilities import get_ui_file_path + + +class TorrentDetailsContainer(QWidget): + + def __init__(self, parent): + QWidget.__init__(self, parent) + uic.loadUi(get_ui_file_path('torrent_details_container.ui'), self) diff --git a/TriblerGUI/widgets/torrentdetailstabwidget.py b/TriblerGUI/widgets/torrentdetailstabwidget.py index dcbe9239f51..885ab06d6de 100644 --- a/TriblerGUI/widgets/torrentdetailstabwidget.py +++ b/TriblerGUI/widgets/torrentdetailstabwidget.py @@ -1,20 +1,21 @@ +from __future__ import absolute_import + import logging import time -from PyQt5.QtWidgets import QLabel -from PyQt5.QtWidgets import QTabWidget -from PyQt5.QtWidgets import QTreeWidget -from PyQt5.QtWidgets import QTreeWidgetItem +from PyQt5.QtCore import QModelIndex +from PyQt5.QtWidgets import QLabel, QTabWidget, QTreeWidget, QTreeWidgetItem +from TriblerGUI.defs import HEALTH_CHECKING, HEALTH_GOOD, HEALTH_MOOT, HEALTH_UNCHECKED from TriblerGUI.tribler_request_manager import TriblerRequestManager -from TriblerGUI.utilities import format_size +from TriblerGUI.utilities import format_size, get_health from TriblerGUI.widgets.ellipsebutton import EllipseButton class TorrentDetailsTabWidget(QTabWidget): """ The TorrentDetailsTabWidget is the tab that provides details about a specific selected torrent. This information - includes the generic info about the torrent, files and trackers. + includes the generic info about the torrent and trackers. """ def __init__(self, parent): @@ -26,13 +27,12 @@ def __init__(self, parent): self.torrent_detail_category_label = None self.torrent_detail_size_label = None self.torrent_detail_health_label = None - self.torrent_detail_files_list = None self.torrent_detail_trackers_list = None self.check_health_button = None self.request_mgr = None self.health_request_mgr = None self.is_health_checking = False - self.last_health_check_ts = -1 + self.index = QModelIndex() def initialize_details_widget(self): """ @@ -43,44 +43,38 @@ def initialize_details_widget(self): self.torrent_detail_category_label = self.findChild(QLabel, "torrent_detail_category_label") self.torrent_detail_size_label = self.findChild(QLabel, "torrent_detail_size_label") self.torrent_detail_health_label = self.findChild(QLabel, "torrent_detail_health_label") - self.torrent_detail_files_list = self.findChild(QTreeWidget, "torrent_detail_files_list") self.torrent_detail_trackers_list = self.findChild(QTreeWidget, "torrent_detail_trackers_list") self.setCurrentIndex(0) self.check_health_button = self.findChild(EllipseButton, "check_health_button") - self.check_health_button.clicked.connect(lambda: self.on_check_health_clicked(timeout=15)) + self.check_health_button.clicked.connect(self.on_check_health_clicked) def on_torrent_info(self, torrent_info): if not torrent_info: return self.setTabEnabled(1, True) - self.setTabEnabled(2, True) - self.torrent_detail_files_list.clear() self.torrent_detail_trackers_list.clear() - for file_info in torrent_info["files"]: - item = QTreeWidgetItem(self.torrent_detail_files_list) - item.setText(0, file_info["path"]) - item.setText(1, format_size(float(file_info["size"]))) - - for tracker in torrent_info["trackers"]: - if tracker == 'DHT': - continue + for tracker in torrent_info["torrent"]["trackers"]: item = QTreeWidgetItem(self.torrent_detail_trackers_list) item.setText(0, tracker) - if torrent_info["num_seeders"] > 0: - self.torrent_detail_health_label.setText("good health (S%d L%d)" % (torrent_info["num_seeders"], - torrent_info["num_leechers"])) - elif torrent_info["num_leechers"] > 0: - self.torrent_detail_health_label.setText("unknown health (found peers)") - else: - self.torrent_detail_health_label.setText("no peers found") + if self.is_health_checking: + self.health_request_mgr.cancel_request() + self.is_health_checking = False + + self.update_health_label(torrent_info["torrent"]['num_seeders'], + torrent_info["torrent"]['num_leechers'], + torrent_info["torrent"]['last_tracker_check']) - def update_with_torrent(self, torrent_info): + # If we do not have the health of this torrent, query it + if torrent_info['torrent']['last_tracker_check'] == 0: + self.check_torrent_health() + + def update_with_torrent(self, index, torrent_info): self.torrent_info = torrent_info - self.is_health_checking = False + self.index = index self.torrent_detail_name_label.setText(self.torrent_info["name"]) if self.torrent_info["category"]: self.torrent_detail_category_label.setText(self.torrent_info["category"].lower()) @@ -92,64 +86,99 @@ def update_with_torrent(self, torrent_info): else: self.torrent_detail_size_label.setText("%s" % format_size(float(self.torrent_info["size"]))) - if self.torrent_info["num_seeders"] > 0: - self.torrent_detail_health_label.setText("good health (S%d L%d)" % (self.torrent_info["num_seeders"], - self.torrent_info["num_leechers"])) - elif self.torrent_info["num_leechers"] > 0: - self.torrent_detail_health_label.setText("unknown health (found peers)") - else: - self.torrent_detail_health_label.setText("no peers found") + self.update_health_label(torrent_info['num_seeders'], torrent_info['num_leechers'], + torrent_info['last_tracker_check']) self.setCurrentIndex(0) self.setTabEnabled(1, False) - self.setTabEnabled(2, False) self.request_mgr = TriblerRequestManager() - self.request_mgr.perform_request("torrents/%s" % self.torrent_info["infohash"], self.on_torrent_info) + self.request_mgr.perform_request("metadata/torrents/%s" % self.torrent_info["infohash"], self.on_torrent_info) - def on_check_health_clicked(self, timeout=15): - if self.is_health_checking and (time.time() - self.last_health_check_ts < timeout): - return + def on_check_health_clicked(self): + if not self.is_health_checking: + self.check_torrent_health() + + def update_health_label(self, seeders, leechers, last_tracker_check): + try: + health = get_health(seeders, leechers, last_tracker_check) + + if health == HEALTH_UNCHECKED: + self.torrent_detail_health_label.setText("Unknown health") + elif health == HEALTH_GOOD: + self.torrent_detail_health_label.setText("Good health (S%d L%d)" % (seeders, leechers)) + elif health == HEALTH_MOOT: + self.torrent_detail_health_label.setText("Unknown health (found peers)") + else: + self.torrent_detail_health_label.setText("No peers found") + except RuntimeError: + self._logger.error("The underlying GUI widget has already been removed.") + + def check_torrent_health(self): + infohash = self.torrent_info[u'infohash'] + + def on_cancel_health_check(): + self.is_health_checking = False + + if u'health' in self.index.model().column_position: + # TODO: DRY this copypaste! + # Check if details widget is still showing the same entry and the entry still exists in the table + try: + data_item = self.index.model().data_items[self.index.row()] + except IndexError: + return + if self.torrent_info["infohash"] != data_item[u'infohash']: + return + data_item[u'health'] = HEALTH_CHECKING + index = self.index.model().index(self.index.row(), self.index.model().column_position[u'health']) + self.index.model().dataChanged.emit(index, index, []) - self.is_health_checking = True self.torrent_detail_health_label.setText("Checking...") - self.last_health_check_ts = time.time() self.health_request_mgr = TriblerRequestManager() - self.health_request_mgr.perform_request("torrents/%s/health?timeout=%s&refresh=%d" % - (self.torrent_info["infohash"], timeout, 1), - self.on_health_response, capture_errors=False, priority="LOW", - on_cancel=self.on_cancel_health_check) + self.health_request_mgr.perform_request("metadata/torrents/%s/health" % infohash, + self.on_health_response, + url_params={"nowait": True, + "refresh": True}, + capture_errors=False, priority="LOW", + on_cancel=on_cancel_health_check) def on_health_response(self, response): - if not response: - return total_seeders = 0 total_leechers = 0 if not response or 'error' in response: - self.update_health(0, 0) # Just set the health to 0 seeders, 0 leechers + self.update_torrent_health(0, 0) # Just set the health to 0 seeders, 0 leechers return - for _, status in response['health'].iteritems(): + if 'checking' in response: + return + for _, status in response['health'].items(): if 'error' in status: continue # Timeout or invalid status - total_seeders += int(status['seeders']) total_leechers += int(status['leechers']) - self.is_health_checking = False - self.update_health(total_seeders, total_leechers) + self.update_torrent_health(total_seeders, total_leechers) - def update_health(self, seeders, leechers): + def update_torrent_health(self, seeders, leechers): + # Check if details widget is still showing the same entry and the entry still exists in the table try: - if seeders > 0: - self.torrent_detail_health_label.setText("good health (S%d L%d)" % (seeders, leechers)) - elif leechers > 0: - self.torrent_detail_health_label.setText("unknown health (found peers)") - else: - self.torrent_detail_health_label.setText("no peers found") - except RuntimeError: - self._logger.error("The underlying GUI widget has already been removed.") + data_item = self.index.model().data_items[self.index.row()] + except IndexError: + return + if self.torrent_info["infohash"] != data_item[u'infohash']: + return - def on_cancel_health_check(self): - self.is_health_checking = False + data_item[u'num_seeders'] = seeders + data_item[u'num_leechers'] = leechers + data_item[u'last_tracker_check'] = time.time() + data_item[u'health'] = get_health(data_item[u'num_seeders'], data_item[u'num_leechers'], + data_item[u'last_tracker_check']) + + if u'health' in self.index.model().column_position: + index = self.index.model().index(self.index.row(), self.index.model().column_position[u'health']) + self.index.model().dataChanged.emit(index, index, []) + + # Update the health label of the detail widget + self.update_health_label(data_item[u'num_seeders'], data_item[u'num_leechers'], + data_item[u'last_tracker_check']) diff --git a/TriblerGUI/widgets/torrentslistwidget.py b/TriblerGUI/widgets/torrentslistwidget.py new file mode 100644 index 00000000000..ae466d80d5b --- /dev/null +++ b/TriblerGUI/widgets/torrentslistwidget.py @@ -0,0 +1,22 @@ +from __future__ import absolute_import + +from PyQt5 import uic +from PyQt5.QtCore import pyqtSignal +from PyQt5.QtWidgets import QWidget + +from TriblerGUI.utilities import get_ui_file_path +from TriblerGUI.widgets.torrentdetailstabwidget import TorrentDetailsTabWidget + + +class TorrentsListWidget(QWidget): + on_torrent_clicked = pyqtSignal(dict) + + def __init__(self, parent=None): + QWidget.__init__(self, parent=parent) + uic.loadUi(get_ui_file_path('torrents_list.ui'), self) + + self.model = None + self.details_tab_widget = None + + self.details_tab_widget = self.findChild(TorrentDetailsTabWidget, "details_tab_widget") + self.details_tab_widget.initialize_details_widget() diff --git a/TriblerGUI/widgets/triblertablecontrollers.py b/TriblerGUI/widgets/triblertablecontrollers.py new file mode 100644 index 00000000000..650ff2ecadb --- /dev/null +++ b/TriblerGUI/widgets/triblertablecontrollers.py @@ -0,0 +1,300 @@ +""" +This file contains various controllers for table views. +The responsibility of the controller is to populate the table view with some data, contained in a specific model. +""" +from __future__ import absolute_import + +from six import text_type + +from TriblerGUI.tribler_request_manager import TriblerRequestManager + + +def sanitize_for_fts(text): + return text_type(text).translate({ord(u"\""): u"\"\"", ord(u"\'"): u"\'\'"}) + + +def to_fts_query(text): + if not text: + return "" + words = text.split(" ") + + # TODO: add support for quoted exact searches + query_list = [u'\"' + sanitize_for_fts(word) + u'\"*' for word in words] + + return " AND ".join(query_list) + + +class TriblerTableViewController(object): + """ + Base controller for a table view that displays some data. + """ + + def __init__(self, model, table_view): + self.model = model + self.model.on_sort.connect(self._on_view_sort) + self.table_view = table_view + self.table_view.setModel(self.model) + self.table_view.verticalScrollBar().valueChanged.connect(self._on_list_scroll) + self.request_mgr = None + + def _on_list_scroll(self, event): + pass + + def _on_view_sort(self, column, ascending): + pass + + def _get_sort_parameters(self): + """ + Return a tuple (column_name, sort_asc) that indicates the sorting column/order of the table view. + """ + sort_by = self.model.columns[self.table_view.horizontalHeader().sortIndicatorSection()] + sort_asc = self.table_view.horizontalHeader().sortIndicatorOrder() + return sort_by, sort_asc + + +class SearchResultsTableViewController(TriblerTableViewController): + """ + Controller for the table view that handles search results. + """ + + def __init__(self, model, table_view, details_container, num_search_results_label=None): + TriblerTableViewController.__init__(self, model, table_view) + self.num_search_results_label = num_search_results_label + self.details_container = details_container + self.query = None + table_view.selectionModel().selectionChanged.connect(self._on_selection_changed) + + def _on_selection_changed(self, _): + selected_indices = self.table_view.selectedIndexes() + if not selected_indices: + return + + torrent_info = selected_indices[0].model().data_items[selected_indices[0].row()] + if torrent_info['type'] == 'channel': + self.details_container.hide() + self.table_view.clearSelection() + return + + self.details_container.show() + self.details_container.details_tab_widget.update_with_torrent(selected_indices[0], torrent_info) + + def _on_view_sort(self, column, ascending): + self.model.reset() + self.load_search_results(self.query, 1, 50) + + def _on_list_scroll(self, event): + if self.table_view.verticalScrollBar().value() == self.table_view.verticalScrollBar().maximum() and \ + self.model.data_items: # workaround for duplicate calls to _on_list_scroll on view creation + self.load_search_results(self.query) + + def load_search_results(self, query, start=None, end=None): + """ + Fetch search results for a given query. + """ + self.query = query + + if not start or not end: + start, end = self.model.rowCount() + 1, self.model.rowCount() + self.model.item_load_batch + + sort_by, sort_asc = self._get_sort_parameters() + url_params = { + "filter": to_fts_query(query), + "first": start if start else '', + "last": end if end else '', + "sort_by": sort_by if sort_by else '', + "sort_asc": sort_asc, + "hide_xxx": self.model.hide_xxx, + "metadata_type": self.model.type_filter if self.model.type_filter else '' + } + self.request_mgr = TriblerRequestManager() + self.request_mgr.perform_request("search", self.on_search_results, url_params=url_params) + + def on_search_results(self, response): + if not response: + return + + self.model.total_items = response['total'] + + if self.num_search_results_label: + self.num_search_results_label.setText("%d results" % response['total']) + + if response['first'] >= self.model.rowCount(): + self.model.add_items(response['results']) + + +class ChannelsTableViewController(TriblerTableViewController): + """ + This class manages a list with channels. + """ + + def __init__(self, model, table_view, num_channels_label=None, filter_input=None): + TriblerTableViewController.__init__(self, model, table_view) + self.num_channels_label = num_channels_label + self.filter_input = filter_input + + if self.filter_input: + self.filter_input.textChanged.connect(self._on_filter_input_change) + + def _on_filter_input_change(self, _): + self.model.reset() + self.load_channels(1, 50) + + def _on_view_sort(self, column, ascending): + self.model.reset() + self.load_channels(1, 50) + + def _on_list_scroll(self, event): + if self.table_view.verticalScrollBar().value() == self.table_view.verticalScrollBar().maximum() and \ + self.model.data_items: # workaround for duplicate calls to _on_list_scroll on view creation + self.load_channels() + + def load_channels(self, start=None, end=None): + """ + Fetch various channels. + """ + if not start and not end: + start, end = self.model.rowCount() + 1, self.model.rowCount() + self.model.item_load_batch + + if self.filter_input and self.filter_input.text().lower(): + filter_text = self.filter_input.text().lower() + else: + filter_text = '' + + sort_by, sort_asc = self._get_sort_parameters() + + self.request_mgr = TriblerRequestManager() + self.request_mgr.perform_request( + "metadata/channels", + self.on_channels, + url_params={ + "first": start, + "last": end, + "sort_by": sort_by, + "sort_asc": sort_asc, + "filter": to_fts_query(filter_text), + "hide_xxx": self.model.hide_xxx, + "subscribed": self.model.subscribed}) + + def on_channels(self, response): + if not response: + return + + self.model.total_items = response['total'] + + if self.num_channels_label: + self.num_channels_label.setText("%d items" % response['total']) + + if response['first'] >= self.model.rowCount(): + self.model.add_items(response['channels']) + + +class TorrentsTableViewController(TriblerTableViewController): + """ + This class manages a list with torrents. + """ + + def __init__(self, model, torrents_container, num_torrents_label=None, filter_input=None): + TriblerTableViewController.__init__(self, model, torrents_container.content_table) + self.torrents_container = torrents_container + self.num_torrents_label = num_torrents_label + self.filter_input = filter_input + torrents_container.content_table.selectionModel().selectionChanged.connect(self._on_selection_changed) + + if self.filter_input: + self.filter_input.textChanged.connect(self._on_filter_input_change) + + def _on_selection_changed(self, _): + selected_indices = self.table_view.selectedIndexes() + if not selected_indices: + return + + self.torrents_container.details_container.show() + torrent_info = selected_indices[0].model().data_items[selected_indices[0].row()] + self.torrents_container.details_tab_widget.update_with_torrent(selected_indices[0], torrent_info) + + def _on_filter_input_change(self, _): + self.model.reset() + self.load_torrents(1, 50) + + def _on_view_sort(self, column, ascending): + self.model.reset() + self.load_torrents(1, 50) + + def _on_list_scroll(self, event): + if self.table_view.verticalScrollBar().value() == self.table_view.verticalScrollBar().maximum() and \ + self.model.data_items: # workaround for duplicate calls to _on_list_scroll on view creation + self.load_torrents() + + def load_torrents(self, start=None, end=None): + """ + Fetch various torrents. + """ + if not start and not end: + start, end = self.model.rowCount() + 1, self.model.rowCount() + self.model.item_load_batch + + if self.filter_input and self.filter_input.text().lower(): + filter_text = self.filter_input.text().lower() + else: + filter_text = '' + + sort_by, sort_asc = self._get_sort_parameters() + + self.request_mgr = TriblerRequestManager() + self.request_mgr.perform_request( + "metadata/channels/%s/torrents" % self.model.channel_pk, + self.on_torrents, + url_params={ + "first": start, + "last": end, + "sort_by": sort_by, + "sort_asc": sort_asc, + "hide_xxx": self.model.hide_xxx, + "filter": to_fts_query(filter_text)}) + + def on_torrents(self, response): + if not response: + return None + + self.model.total_items = response['total'] + + if self.num_torrents_label: + self.num_torrents_label.setText("%d items" % response['total']) + + if response['first'] >= self.model.rowCount(): + self.model.add_items(response['torrents']) + return True + + +class MyTorrentsTableViewController(TorrentsTableViewController): + """ + This class manages the list with the torrents in your own channel. + """ + + def load_torrents(self, start=None, end=None): + """ + Fetch various torrents. + """ + if not start and not end: + start, end = self.model.rowCount() + 1, self.model.rowCount() + self.model.item_load_batch + + if self.filter_input and self.filter_input.text().lower(): + filter_text = self.filter_input.text().lower() + else: + filter_text = '' + + sort_by, sort_asc = self._get_sort_parameters() + + self.request_mgr = TriblerRequestManager() + self.request_mgr.perform_request( + "mychannel/torrents", + self.on_torrents, + url_params={ + "sort_by": sort_by, + "sort_asc": sort_asc, + "filter": to_fts_query(filter_text), + "exclude_deleted": self.model.exclude_deleted}) + + def on_torrents(self, response): + if super(MyTorrentsTableViewController, self).on_torrents(response): + self.table_view.window().edit_channel_page.channel_dirty = response['dirty'] + self.table_view.window().edit_channel_page.update_channel_commit_views() diff --git a/debian/control b/debian/control index 181bbfefbdb..51a00afbb67 100644 --- a/debian/control +++ b/debian/control @@ -16,21 +16,15 @@ Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ffmpeg | libav-tools, - libjs-excanvas, - libjs-mootools, libsodium18 | libsodium13 | libsodium23, libx11-6, - python-apsw, python-chardet, python-cherrypy3, python-configobj, python-cryptography, python-decorator, - python-feedparser, - python-leveldb, python-libnacl, python-libtorrent (>= 0.16.18), - python-m2crypto, python-matplotlib, python-meliae, python-netifaces, diff --git a/debian/tribler.install b/debian/tribler.install index d8cc6d77139..d25bf6fbfbc 100644 --- a/debian/tribler.install +++ b/debian/tribler.install @@ -1,6 +1,5 @@ Tribler usr/share/tribler TriblerGUI usr/share/tribler -Tribler/Core/CacheDB/schema_sdb_v*.sql usr/share/tribler/Tribler Tribler/Main/Build/Ubuntu/tribler.desktop usr/share/applications Tribler/Main/Build/Ubuntu/tribler.xpm usr/share/pixmaps Tribler/Main/Build/Ubuntu/tribler_big.xpm usr/share/pixmaps diff --git a/doc/building/building_on_windows.rst b/doc/building/building_on_windows.rst index e28046002aa..50d92904683 100644 --- a/doc/building/building_on_windows.rst +++ b/doc/building/building_on_windows.rst @@ -27,10 +27,9 @@ Inside the ``build`` folder, put the following items: 1. A folder ``certs`` containing a ``.pfx`` key. In our case it's named ``swarmplayerprivatekey.pfx``. Make sure to rename paths in ``makedist_win.bat`` to match your file name. 2. A folder ``vlc`` that contains ``libvlc.dll``, ``libvlccore.dll`` and a directory ``plugins`` that contain the VLC plugins. -3. ``vc_redist_90.exe`` (Microsoft Visual C++ 2008 Redistributable Package), which is available `here `_. In case you build 32 bit, get the x86 version `here `_. Don't forget to rename the file. -4. ``vc_redist_110.exe`` (Visual C++ Redistributable for Visual Studio 2012), which is available `here `_. In case you build 32 bit, get the x86 version. Once more, don't forget to rename the file. -5. ``libsodium.dll`` which can be downloaded from `libsodium.org `_ (as of writing version 1.0.8). -6. The openssl dll files ``libeay32.dll``, ``libssl32.dll`` and ``ssleay32.dll`` (place them in a directory named ``openssl``). +3. ``vc_redist_110.exe`` (Visual C++ Redistributable for Visual Studio 2012), which is available `here `_. In case you build 32 bit, get the x86 version. Once more, don't forget to rename the file. +4. ``libsodium.dll`` which can be downloaded from `libsodium.org `_ (as of writing version 1.0.8). +5. The openssl dll files ``libeay32.dll``, ``libssl32.dll`` and ``ssleay32.dll`` (place them in a directory named ``openssl``). Then, set a ``PASSWORD`` `environment variable `_ with its value set to the password matching the one set in your ``.pfx`` file. diff --git a/doc/conf.py b/doc/conf.py index 1228feb64bf..f06310788ad 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -18,9 +18,10 @@ # import os import sys -sys.path.append(os.path.join(os.path.dirname(__name__), '..')) + from mock import Mock as MagicMock +sys.path.append(os.path.join(os.path.dirname(__name__), '..')) # Mock various libraries _classnames = { @@ -37,9 +38,9 @@ def __getattr__(cls, name): MOCK_MODULES = ['twisted', 'twisted.web', 'twisted.web.server', 'twisted.web.client', 'twisted.web.http_headers', 'twisted.internet', 'twisted.internet.defer', 'twisted.internet.interfaces', 'twisted.internet.task', 'twisted.python', 'twisted.python.failure', 'twisted.python.threadable', 'twisted.internet.base', - 'twisted.internet.error', 'twisted.internet.protocol', 'apsw', 'libtorrent', 'treq', + 'twisted.internet.error', 'twisted.internet.protocol', 'libtorrent', 'treq', 'Tribler.community.tunnel.crypto.cryptowrapper', 'Tribler.community.market.core.orderbook', - 'M2Crypto', 'M2Crypto.EC', 'M2Crypto.BIO', 'Tribler.dispersy.crypto', 'psutil', 'meliae', 'libnacl', + 'Tribler.dispersy.crypto', 'psutil', 'meliae', 'libnacl', 'decorator', 'libnacl.dual', 'libnacl.sign', 'libnacl.encode', 'libnacl.public', 'networkx', 'netifaces'] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) diff --git a/doc/development/development_on_linux.rst b/doc/development/development_on_linux.rst index f8541fabe66..88bbbe892e7 100644 --- a/doc/development/development_on_linux.rst +++ b/doc/development/development_on_linux.rst @@ -7,7 +7,7 @@ First, install the required dependencies by executing the following command in y .. code-block:: none - sudo apt-get install libav-tools libsodium18 libx11-6 python-apsw python-cherrypy3 python-cryptography python-decorator python-feedparser python-leveldb python-libtorrent python-matplotlib python-meliae python-m2crypto python-netifaces python-pil python-psutil python-pyasn1 python-scipy python-twisted python2.7 vlc python-chardet python-configobj python-pyqt5 python-pyqt5.qtsvg python-libnacl + sudo apt-get install libav-tools libsodium18 libx11-6 python-cherrypy3 python-cryptography python-decorator python-libtorrent python-matplotlib python-meliae python-netifaces python-pil python-psutil python-pyasn1 python-scipy python-twisted python2.7 vlc python-chardet python-configobj python-pyqt5 python-pyqt5.qtsvg python-libnacl Next, download the latest .deb file from `here `_. @@ -71,4 +71,4 @@ Execute the following command in your terminal: .. code-block:: none - pacman -S libsodium libtorrent-rasterbar python2-pyqt5 qt5-svg phonon-qt5-vlc python2-apsw python2-cherrypy python2-cryptography python2-decorator python2-feedparser python2-chardet python2-m2crypto python2-netifaces python2-plyvel python2-twisted python2-configobj python2-matplotlib python2-networkx python2-psutil python2-scipy python2-libnacl + pacman -S libsodium libtorrent-rasterbar python2-pyqt5 qt5-svg phonon-qt5-vlc python2-cherrypy python2-cryptography python2-decorator python2-chardet python2-netifaces python2-twisted python2-configobj python2-matplotlib python2-networkx python2-psutil python2-scipy python2-libnacl diff --git a/doc/development/development_on_osx.rst b/doc/development/development_on_osx.rst index e543fa93d55..32a3ee1b0b0 100644 --- a/doc/development/development_on_osx.rst +++ b/doc/development/development_on_osx.rst @@ -13,7 +13,7 @@ To install the Tribler dependencies using MacPorts, please run the following com .. code-block:: bash - sudo port -N install git ffmpeg qt5-qtcreator libtorrent-rasterbar gmp mpfr libmpc libsodium py27-m2crypto py27-apsw py27-Pillow py27-twisted py27-cherrypy3 py27-cffi py27-chardet py27-configobj py27-gmpy2 py27-pycparser py27-numpy py27-idna py27-leveldb py27-cryptography py27-decorator py27-feedparser py27-netifaces py27-service_identity py27-asn1-modules py27-pyinstaller py27-pyqt5 py27-sqlite py27-matplotlib py27-libnacl + sudo port -N install git ffmpeg qt5-qtcreator libtorrent-rasterbar gmp mpfr libmpc libsodium py27-Pillow py27-twisted py27-cherrypy3 py27-cffi py27-chardet py27-configobj py27-gmpy2 py27-pycparser py27-numpy py27-idna py27-cryptography py27-decorator py27-netifaces py27-service_identity py27-asn1-modules py27-pyinstaller py27-pyqt5 py27-sqlite py27-matplotlib py27-libnacl HomeBrew -------- @@ -55,51 +55,6 @@ Next, download PyQt5 from `here `_, extract it and install it: - -.. code-block:: none - - ./config --prefix=/usr/local - make && make test - sudo make install - openssl version # this should be 0.98 - -Also Swig 3.0.4 is required for the compilation of the M2Crypto library. The easiest way to install it, it to download Swig 3.0.4 from source `here `_ and compile it using: - -.. code-block:: none - - ./configure - make - sudo make install - -Note: if you get an error about a missing PCRE library, install it with brew using ``brew install pcre``. - -Now we can install M2Crypto. First download the `source `_ (version 0.22.3 is confirmed to work on El Capitan and Yosemite) and install it: - -.. code-block:: none - - python setup.py build build_ext --openssl=/usr/local - sudo python setup.py install build_ext --openssl=/usr/local - -Reopen your terminal window and test it out by executing: - -.. code-block:: none - - python -c "import M2Crypto" - -Apsw -~~~~ - -Apsw can be installed by brew but this does not seem to work to compile the last version (the Clang compiler uses the ``sqlite.h`` include shipped with Xcode which is outdated). Instead, the source should be downloaded from their `Github repository `_ (make sure to download a release version) and compiled using: - -.. code-block:: none - - sudo python setup.py fetch --all build --enable-all-extensions install test - python -c "import apsw" # verify whether apsw is successfully installed - Libtorrent ~~~~~~~~~~ @@ -139,7 +94,7 @@ There are a bunch of other packages that can easily be installed using pip and b brew install homebrew/python/pillow gmp mpfr libmpc libsodium sudo easy_install pip pip install --user cython # Needs to be installed first for meliae - pip install --user cherrypy cffi chardet configobj cryptography decorator feedparser gmpy2 idna leveldb meliae netifaces numpy pillow psutil pyasn1 pycparser scipy twisted service_identity libnacl bitcoinlib + pip install --user cherrypy cffi chardet configobj cryptography decorator gmpy2 idna meliae netifaces numpy pillow psutil pyasn1 pycparser scipy twisted service_identity libnacl bitcoinlib If you encounter any error during the installation of Pillow, make sure that libjpeg and zlib are installed. They can be installed using: diff --git a/doc/development/development_on_windows.rst b/doc/development/development_on_windows.rst index 91147ab6274..8e83f3e78c9 100644 --- a/doc/development/development_on_windows.rst +++ b/doc/development/development_on_windows.rst @@ -24,18 +24,6 @@ One other DLL that was missing was ``MSVCR110.DLL``, which belongs to the `Visua After installing these two packages, there should be no more import errors. It may be required to enable Visual C++ Toolset on the Command Line if Native Command Line tool is not available. You can do that by following article `here `__. -M2Crypto --------- - -The first package to be installed is M2Crypto which can be installed using pip (the M2Crypto binary is precompiled): - -.. code-block:: none - - pip install M2CryptoWin64 # use M2CryptoWin32 for the 32-bit version of M2Crypto - python -c "import M2Crypto" # test whether M2Crypto can be successfully imported - -If the second statement does not raise an error, M2Crypto is successfully installed. - PyQt5 ----- @@ -79,14 +67,6 @@ pyWin32 Tools In order to access some of the Windows API functions, pywin32 should be installed. The pywin32 installer can be downloaded from `Sourceforge `__ and make sure to select the amd64 version and the version compatible with Python 2.7. -apsw ----- -The apsw (Another Python SQLite Wrapper) installer can be downloaded from `GitHub `__. Again, make sure to select the amd64 version that is compatible with Python 2.7. You can test whether it is installed correctly by running: - -.. code-block:: none - - python -c "import apsw" - libtorrent ---------- @@ -121,33 +101,6 @@ Libsodium can be download as precompiled binary from `their website `__ and place it inside your python site-packages directory. Then, check check if installation was successful: - -.. code-block:: none - - python -c "import leveldb" # this should work without any error - -**Alternatively,** -you will compile leveldb from source. First, download the source code from `GitHub `__ (either clone the repository or download the source code as zip). The readme on this repo contains some basic instructions on how to compile leveldb. - -Next, open the ``levedb_ext.sln`` file in Visual Studio. This guide is based on the ``x64 release`` configuration. If you want to build a 32-bit leveldb project, change the configuration to ``win32 release``. - -You should edit the file paths of the include directories and the linker directories. These can be edited by right clicking on the project and selecting ``properties``. You will need to update ``additional include directories`` (under C/C++ -> general) to point to your Python include directory (often located in ``C:\\Python27\\include``). This is needed for the compilation of the Python bindings. Also, make sure that the following ``preprocessor definitions`` (found under C/C++ -> preprocessor) are defined: ``WIN32`` and ``LEVELDB_PLATFORM_WINDOWS``. - -Next, ``additional library directories`` should be adjusted, found under Linker -> General. You should add the directory where your Python libraries are residing, often in ``C:\\Python27\\libs``. - -Compile by pressing the ``build leveldb_ext`` in the build menu. If any errors are showing up during compilation, please refer to the Visual Studio log file and check what's going wrong. Often, this should be a missing include/linker directory. If compilation is successful, a ``leveldb_ext.pyd`` file should have been created in the project directory. Copy this file to your site-packages location and rename it to ``leveldb.pyd`` so Python is able to find it. You can test whether your binary is working by using the following command which should execute without any errors: - -.. code-block:: none - - python -c "import leveldb" - - VLC --- @@ -172,7 +125,7 @@ There are some additional packages which should be installed. They can easily be .. code-block:: none pip install cython # Needs to be installed first for meliae - pip install bitcoinlib cherrypy chardet configobj cryptography decorator feedparser meliae netifaces networkx pillow psutil twisted libnacl + pip install bitcoinlib cherrypy chardet configobj cryptography decorator meliae netifaces networkx pillow psutil twisted libnacl Running Tribler --------------- diff --git a/doc/restapi/introduction.rst b/doc/restapi/introduction.rst index ba489b35e7d..90fff5e0e89 100644 --- a/doc/restapi/introduction.rst +++ b/doc/restapi/introduction.rst @@ -40,7 +40,7 @@ If a valid request of a client caused a recoverable error the response will have { "error": { "handled": True, - "code": "DuplicateChannelNameError", + "code": "DuplicateChannelIdError", "message": "Channel name already exists: foo" } } diff --git a/Tribler/Test/mocking/__init__.py b/gen_db.py similarity index 100% rename from Tribler/Test/mocking/__init__.py rename to gen_db.py diff --git a/logger.conf b/logger.conf index 462e3a03399..5005203fa26 100644 --- a/logger.conf +++ b/logger.conf @@ -1,26 +1,19 @@ [loggers] -keys=root,candidates,twisted, +keys=root,twisted, TriblerGUI, RequestCache, TriblerLaunchMany, - Dispersy, - Timeline, - IPv8toDispersyAdapter, - LibtorrentMgr, LibtorrentDownloadImpl, CreditMiningManager, CreditMiningSource, - SQLiteCacheDB, TrustChainDB, MarketDB, - AllChannelCommunity, DiscoveryCommunity, - SearchCommunity, HiddenTunnelCommunity, TriblerTunnelCommunity, TrustChainCommunity, @@ -28,6 +21,8 @@ keys=root,candidates,twisted, DHTDiscoveryCommunity, PreviewChannelCommunity, MarketCommunity, + + MetadataStore, TunnelDispatcher, TunnelMain, @@ -92,13 +87,7 @@ args=(4*1024,) # 4KB buffer [logger_root] level=INFO -handlers=default,infoMemoryHandler,errorHandler - -[logger_candidates] -level=ERROR -qualname=dispersy-stats-detailed-candidates -handlers=default -propagate=0 +handlers=default,infoMemoryHandler,errorHandler,debugging [logger_twisted] level=ERROR @@ -123,24 +112,6 @@ qualname=TriblerLaunchMany handlers=default propagate=0 -[logger_Dispersy] -level=INFO -qualname=Dispersy -handlers=default -propagate=0 - -[logger_Timeline] -level=INFO -qualname=Timeline -handlers=default -propagate=0 - -[logger_IPv8toDispersyAdapter] -level=INFO -qualname=IPv8toDispersyAdapter -handlers=default -propagate=0 - [logger_LibtorrentMgr] level=INFO qualname=LibtorrentMgr @@ -167,12 +138,6 @@ propagate=0 ; *** database loggers *** -[logger_SQLiteCacheDB] -level=INFO -qualname=SQLiteCacheDB -handlers=default -propagate=0 - [logger_TrustChainDB] level=INFO qualname=TrustChainDB @@ -187,24 +152,12 @@ propagate=0 ; *** community loggers *** -[logger_AllChannelCommunity] -level=INFO -qualname=AllChannelCommunity -handlers=default -propagate=0 - [logger_DiscoveryCommunity] level=INFO qualname=DiscoveryCommunity handlers=default propagate=0 -[logger_SearchCommunity] -level=INFO -qualname=SearchCommunity -handlers=default -propagate=0 - [logger_HiddenTunnelCommunity] level=ERROR qualname=HiddenTunnelCommunity @@ -292,3 +245,9 @@ level=INFO qualname=ResourceMonitor handlers=default propagate=0 + +[logger_MetadataStore] +level=DEBUG +qualname=MetadataStore +handlers=default +propagate=1 diff --git a/run_tribler.py b/run_tribler.py index 427d76b4aa6..3a80272bc20 100644 --- a/run_tribler.py +++ b/run_tribler.py @@ -1,14 +1,16 @@ +from __future__ import absolute_import + +import logging.config import os +import signal import sys -import logging.config -import signal +from check_os import check_and_enable_code_tracing, check_environment, check_free_space, enable_fault_handler, \ + error_and_exit, set_process_priority, setup_gui_logging, should_kill_other_tribler_instances from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.exceptions import TriblerException -from check_os import check_environment, check_free_space, error_and_exit, setup_gui_logging, \ - should_kill_other_tribler_instances, enable_fault_handler, set_process_priority, \ - check_and_enable_code_tracing + # https://github.com/Tribler/tribler/issues/3702 # We need to make sure that anyone running cp65001 can print to the stdout before we print anything. @@ -114,13 +116,6 @@ def start_tribler(): app = TriblerApplication("triblerapp", sys.argv) - if app.is_running(): - for arg in sys.argv[1:]: - if os.path.exists(arg) and arg.endswith(".torrent"): - app.send_message("file:%s" % arg) - elif arg.startswith('magnet'): - app.send_message(arg) - sys.exit(1) window = TriblerWindow() window.setWindowTitle("Tribler") diff --git a/tribler.spec b/tribler.spec index 5aa9ba69d62..72666a3b0d8 100644 --- a/tribler.spec +++ b/tribler.spec @@ -48,10 +48,6 @@ if sys.platform.startswith('darwin'): excluded_libs = ['wx', 'bitcoinlib', 'PyQt4'] -# We use plyvel on Windows since leveldb is unable to deal with unicode paths -if sys.platform == 'win32': - excluded_libs.append('leveldb') - # Pony dependencies; each packages need to be added separatedly; added as hidden import pony_deps = ['pony', 'pony.orm', 'pony.orm.dbproviders', 'pony.orm.dbproviders.sqlite'] diff --git a/twisted/plugins/market_plugin.py b/twisted/plugins/market_plugin.py index 3d061a4083f..f9d77c8f530 100644 --- a/twisted/plugins/market_plugin.py +++ b/twisted/plugins/market_plugin.py @@ -1,25 +1,24 @@ """ This twistd plugin enables to start Tribler headless using the twistd command. """ +from __future__ import absolute_import + import os import signal -from Tribler.Core.Config.tribler_config import TriblerConfig -from twisted.application.service import MultiService, IServiceMaker +from twisted.application.service import IServiceMaker, MultiService from twisted.conch import manhole_tap from twisted.internet import reactor from twisted.plugin import IPlugin from twisted.python import usage from twisted.python.log import msg + from zope.interface import implements +from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.Modules.process_checker import ProcessChecker from Tribler.Core.Session import Session -# Register yappi profiler -from Tribler.community.market.community import MarketCommunity -from Tribler.dispersy.utils import twistd_yappi - class Options(usage.Options): optParameters = [ @@ -70,15 +69,9 @@ def signal_handler(sig, _): config = TriblerConfig() config.set_torrent_checking_enabled(False) - config.set_megacache_enabled(True) - config.set_dispersy_enabled(False) - config.set_mainline_dht_enabled(True) - config.set_torrent_collecting_enabled(False) config.set_libtorrent_enabled(False) config.set_http_api_enabled(True) config.set_video_server_enabled(False) - config.set_torrent_search_enabled(False) - config.set_channel_search_enabled(False) config.set_credit_mining_enabled(False) config.set_dummy_wallets_enabled(True) config.set_popularity_community_enabled(False) diff --git a/twisted/plugins/tribler_plugin.py b/twisted/plugins/tribler_plugin.py index 538fe081b52..2bad4c135fe 100644 --- a/twisted/plugins/tribler_plugin.py +++ b/twisted/plugins/tribler_plugin.py @@ -1,30 +1,28 @@ """ This twistd plugin enables to start Tribler headless using the twistd command. """ -from socket import inet_aton -from datetime import date +from __future__ import absolute_import + import os +import re import signal import time +from datetime import date +from socket import inet_aton -import re -from twisted.application.service import MultiService, IServiceMaker +from twisted.application.service import IServiceMaker, MultiService from twisted.conch import manhole_tap from twisted.internet import reactor from twisted.plugin import IPlugin from twisted.python import usage from twisted.python.log import msg + from zope.interface import implements from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.Modules.process_checker import ProcessChecker from Tribler.Core.Session import Session -# Register yappi profiler -from Tribler.community.allchannel.community import AllChannelCommunity -from Tribler.community.search.community import SearchCommunity -from Tribler.dispersy.utils import twistd_yappi - def check_ipv8_bootstrap_override(val): parsed = re.match(r"^([\d\.]+)\:(\d+)$", val) @@ -47,15 +45,13 @@ class Options(usage.Options): optParameters = [ ["manhole", "m", 0, "Enable manhole telnet service listening at the specified port", int], ["statedir", "s", None, "Use an alternate statedir", str], - ["restapi", "p", -1, "Use an alternate port for the REST API", int], - ["dispersy", "d", -1, "Use an alternate port for Dispersy", int], + ["restapi", "p", 8085, "Use an alternate port for the REST API", int], + ["ipv8", "i", -1, "Use an alternate port for IPv8", int], ["libtorrent", "l", -1, "Use an alternate port for libtorrent", int], ["ipv8_bootstrap_override", "b", None, "Force the usage of specific IPv8 bootstrap server (ip:port)", check_ipv8_bootstrap_override] ] optFlags = [ - ["auto-join-channel", "a", "Automatically join a channel when discovered"], - ["log-incoming-searches", "i", "Write information about incoming remote searches to a file"], ["testnet", "t", "Join the testnet"] ] @@ -119,10 +115,10 @@ def signal_handler(sig, _): config.set_http_api_enabled(True) config.set_http_api_port(options["restapi"]) - if options["dispersy"] > 0: - config.set_dispersy_port(options["dispersy"]) - elif options["dispersy"] == 0: - config.set_dispersy_enabled(False) + if options["ipv8"] > 0: + config.set_ipv8_port(options["ipv8"]) + elif options["ipv8"] == 0: + config.set_ipv8_enabled(False) if options["libtorrent"] != -1 and options["libtorrent"] > 0: config.set_libtorrent_port(options["libtorrent"]) @@ -137,18 +133,6 @@ def signal_handler(sig, _): self.session.start().addErrback(lambda failure: self.shutdown_process(failure.getErrorMessage())) msg("Tribler started") - if "auto-join-channel" in options and options["auto-join-channel"]: - msg("Enabling auto-joining of channels") - for community in self.session.get_dispersy_instance().get_communities(): - if isinstance(community, AllChannelCommunity): - community.auto_join_channel = True - - if "log-incoming-searches" in options and options["log-incoming-searches"]: - msg("Logging incoming remote searches") - for community in self.session.get_dispersy_instance().get_communities(): - if isinstance(community, SearchCommunity): - community.log_incoming_searches = self.log_incoming_remote_search - def makeService(self, options): """ Construct a Tribler service. diff --git a/twisted/plugins/tunnel_helper_plugin.py b/twisted/plugins/tunnel_helper_plugin.py index 1c73e7eb67c..72faed54b93 100644 --- a/twisted/plugins/tunnel_helper_plugin.py +++ b/twisted/plugins/tunnel_helper_plugin.py @@ -23,7 +23,31 @@ from Tribler.Core.Config.tribler_config import TriblerConfig from Tribler.Core.Session import Session from Tribler.Core.simpledefs import NTFY_REMOVE, NTFY_TUNNEL -from Tribler.dispersy.tool.clean_observers import clean_twisted_observers + +logger = logging.getLogger(__name__) + + +def clean_twisted_observers(publisher=None): + try: + from twisted.logger import LogPublisher, LimitedHistoryLogObserver, globalLogPublisher + if not publisher: + publisher = globalLogPublisher + except ImportError: + logger.debug("Running an older version of twisted, no need to clean the observers") + return + + logger.debug("Looking for rogue observers in %r", publisher._observers) + + for observer in publisher._observers: + if isinstance(observer, LogPublisher): + clean_twisted_observers(observer) + + elif isinstance(observer, LimitedHistoryLogObserver): + publisher.removeObserver(observer) + logger.debug("Removing observer %s", observer) + + else: + logger.debug("Leaving alone observer %s", observer) def check_api_port(val): @@ -52,7 +76,7 @@ def check_ipv8_address(val): def check_ipv8_bootstrap_override(val): - parsed = re.match(r"^([\d\.]+)\:(\d+)$", val) + parsed = re.match(r"^([\d\.]+)\:(\d+)$", val) if not parsed: raise ValueError("Invalid bootstrap address:port") @@ -155,20 +179,14 @@ def start(self): config.set_tunnel_community_random_slots(self.options["random_slots"]) config.set_tunnel_community_competing_slots(self.options["competing_slots"]) config.set_torrent_checking_enabled(False) - config.set_megacache_enabled(False) - config.set_dispersy_enabled(False) config.set_ipv8_enabled(True) - config.set_torrent_collecting_enabled(False) config.set_libtorrent_enabled(False) config.set_video_server_enabled(False) - config.set_dispersy_port(ipv8_port) + config.set_ipv8_port(ipv8_port) config.set_ipv8_address(self.options["ipv8_address"]) - config.set_torrent_search_enabled(False) - config.set_channel_search_enabled(False) config.set_trustchain_enabled(True) config.set_credit_mining_enabled(False) config.set_market_community_enabled(False) - config.set_mainline_dht_enabled(False) config.set_dht_enabled(True) config.set_tunnel_community_exitnode_enabled(bool(self.options["exit"])) config.set_popularity_community_enabled(False) diff --git a/win/makedist_win.bat b/win/makedist_win.bat index 93bd90f272b..3a1b0f3e25c 100644 --- a/win/makedist_win.bat +++ b/win/makedist_win.bat @@ -59,7 +59,6 @@ mkdir dist\tribler\tools copy win\tools\reset*.bat dist\tribler\tools REM Laurens, 2016-04-20: Copy the redistributables of 2008, 2012 and 2015 and the VLC installer to the install dir -copy C:\build\vc_redist_90.exe dist\tribler copy C:\build\vc_redist_110.exe dist\tribler copy C:\build\vc_redist_140.exe dist\tribler