Skip to content

Commit

Permalink
Add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
drew2a committed Jan 6, 2022
1 parent 444a030 commit b0217f3
Show file tree
Hide file tree
Showing 4 changed files with 144 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,8 @@ async def process_rpc_query(self, json_bytes: bytes):
:raises pony.orm.dbapiprovider.OperationalError: if an illegal query was performed.
"""
parameters = json.loads(json_bytes)
parameters['infohash_set'] = await self.mds.run_threaded(self.search_for_tags, parameters.get('tags'))
request_sanitized = sanitize_query(parameters, self.rqc_settings.max_response_size)
parameters['infohash_set'] = await self.mds.run_threaded(self.search_for_tags, parameters.get('tags'))
return await self.mds.get_entries_threaded(**request_sanitized)

@db_session
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ def setUp(self):
self.count = 0
self.metadata_store_set = set()
self.initialize(BasicRemoteQueryCommunity, 2)
self.torrent_template = {"title": "", "infohash": b"", "torrent_date": datetime(1970, 1, 1), "tags": "video"}

async def tearDown(self):
for metadata_store in self.metadata_store_set:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from json import dumps
from unittest.mock import AsyncMock, Mock, PropertyMock, patch

from pony.orm import db_session

from ipv8.keyvault.crypto import default_eccrypto
from ipv8.test.base import TestBase
from tribler_core.components.metadata_store.db.orm_bindings.channel_node import NEW
from tribler_core.components.metadata_store.db.store import MetadataStore
from tribler_core.components.metadata_store.remote_query_community.remote_query_community import RemoteQueryCommunity
from tribler_core.components.metadata_store.remote_query_community.settings import RemoteQueryCommunitySettings
from tribler_core.components.metadata_store.remote_query_community.tests.test_remote_query_community import \
BasicRemoteQueryCommunity
from tribler_core.components.tag.db.tag_db import TagDatabase
from tribler_core.components.tag.db.tests.test_tag_db import Tag, TestTagDB
from tribler_core.utilities.path_util import Path


class TestRemoteSearchByTags(TestBase):
""" In this test set we will use only one node's instance as it is sufficient
for testing remote search by tags
"""

def setUp(self):
super().setUp()
self.metadata_store = None
self.initialize(BasicRemoteQueryCommunity, 1)

async def tearDown(self):
if self.metadata_store:
self.metadata_store.shutdown()

await super().tearDown()

def create_node(self, *args, **kwargs):
self.metadata_store = MetadataStore(
Path(self.temporary_directory()) / f"mds.db",
Path(self.temporary_directory()),
default_eccrypto.generate_key("curve25519"),
disable_sync=True,
)

kwargs['metadata_store'] = self.metadata_store
kwargs['tags_db'] = TagDatabase(str(Path(self.temporary_directory()) / f"tags.db"))
kwargs['rqc_settings'] = RemoteQueryCommunitySettings()
return super().create_node(*args, **kwargs)

@property
def rqc(self) -> RemoteQueryCommunity:
return self.overlay(0)

@patch.object(RemoteQueryCommunity, 'tags_db', new=PropertyMock(return_value=None), create=True)
async def test_search_for_tags_no_db(self):
# test that in case of missed `tags_db`, function `search_for_tags` returns None
assert self.rqc.search_for_tags(tags=['tag']) is None

@patch.object(TagDatabase, 'get_infohashes')
async def test_search_for_tags_only_valid_tags(self, mocked_get_infohashes: Mock):
# test that function `search_for_tags` uses only valid tags
self.rqc.search_for_tags(tags=['invalid tag', 'valid_tag'])
mocked_get_infohashes.assert_called_with({'valid_tag'})

@patch.object(MetadataStore, 'get_entries_threaded', new_callable=AsyncMock)
async def test_process_rpc_query_no_tags(self, mocked_get_entries_threaded: AsyncMock):
parameters = {'first': 0, 'infohash_set': None, 'last': 100}
json = dumps(parameters).encode('utf-8')

await self.rqc.process_rpc_query(json)

expected_parameters = {'infohash_set': None}
expected_parameters.update(parameters)
mocked_get_entries_threaded.assert_called_with(**expected_parameters)

async def test_process_rpc_query_with_tags(self):
# TODO: add test description
@db_session
def fill_tags_database():
TestTagDB.add_operation_set(
self.rqc.tags_db,
{
b'infohash1': [
Tag(name='tag1', count=2),
],
b'infohash2': [
Tag(name='tag2', count=1),
]
})

@db_session
def fill_mds():
with db_session:
torrent = {"infohash": b'infohash1', "title": 'title', "tags": "", "size": 1, "status": NEW}
torrent_metadata = self.rqc.mds.TorrentMetadata.from_dict(torrent)
torrent_metadata.sign()

fill_tags_database()
fill_mds()

parameters = {'first': 0, 'infohash_set': None, 'last': 100, 'tags': ['tag1', 'tag2', 'tag3']}
json = dumps(parameters).encode('utf-8')

# TODO: add an assertion for the results
await self.rqc.process_rpc_query(json)
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,16 @@ def create_torrent_tag(self, tag='tag', infohash=b'infohash'):
def create_operation(infohash=b'infohash', tag='tag', peer=b'', operation=TagOperationEnum.ADD, clock=0):
return TagOperation(infohash=infohash, tag=tag, operation=operation, clock=clock, creator_public_key=peer)

def add_operation(self, infohash=b'infohash', tag='tag', peer=b'', operation=TagOperationEnum.ADD,
@staticmethod
def add_operation(tag_db: TagDatabase, infohash=b'infohash', tag='tag', peer=b'', operation=TagOperationEnum.ADD,
is_local_peer=False, clock=None):
operation = self.create_operation(infohash, tag, peer, operation, clock)
operation.clock = clock or self.db.get_clock(operation) + 1
assert self.db.add_tag_operation(operation, signature=b'', is_local_peer=is_local_peer)
operation = TestTagDB.create_operation(infohash, tag, peer, operation, clock)
operation.clock = clock or tag_db.get_clock(operation) + 1
assert tag_db.add_tag_operation(operation, signature=b'', is_local_peer=is_local_peer)
commit()

def add_operation_set(self, dictionary):
@staticmethod
def add_operation_set(tag_db: TagDatabase, dictionary):
index = count(0)

def generate_n_peer_names(n):
Expand All @@ -70,7 +72,7 @@ def generate_n_peer_names(n):
for infohash, tags in dictionary.items():
for tag in tags:
for peer in generate_n_peer_names(tag.count):
self.add_operation(infohash, tag.name, peer)
TestTagDB.add_operation(tag_db, infohash, tag.name, peer)

@db_session
async def test_get_or_create(self):
Expand Down Expand Up @@ -129,56 +131,57 @@ def assert_all_tables_have_the_only_one_entity():
assert self.db.instance.TorrentTagOp.select().count() == 1

# add the first operation
self.add_operation(b'infohash', 'tag', b'peer1')
self.add_operation(self.db, b'infohash', 'tag', b'peer1')
assert_all_tables_have_the_only_one_entity()

# add the same operation
self.add_operation(b'infohash', 'tag', b'peer1')
self.add_operation(self.db, b'infohash', 'tag', b'peer1')
assert_all_tables_have_the_only_one_entity()

# add an operation from the past
self.add_operation(b'infohash', 'tag', b'peer1', clock=0)
self.add_operation(self.db, b'infohash', 'tag', b'peer1', clock=0)
assert_all_tables_have_the_only_one_entity()

# add a duplicate operation but from the future
self.add_operation(b'infohash', 'tag', b'peer1', clock=1000)
self.add_operation(self.db, b'infohash', 'tag', b'peer1', clock=1000)
assert_all_tables_have_the_only_one_entity()

assert self.db.instance.TorrentTagOp.get().operation == TagOperationEnum.ADD
assert self.db.instance.TorrentTag.get().added_count == 1
assert self.db.instance.TorrentTag.get().removed_count == 0

# add a unique operation from the future
self.add_operation(b'infohash', 'tag', b'peer1', operation=TagOperationEnum.REMOVE, clock=1001)
self.add_operation(self.db, b'infohash', 'tag', b'peer1', operation=TagOperationEnum.REMOVE, clock=1001)
assert_all_tables_have_the_only_one_entity()
assert self.db.instance.TorrentTagOp.get().operation == TagOperationEnum.REMOVE
assert self.db.instance.TorrentTag.get().added_count == 0
assert self.db.instance.TorrentTag.get().removed_count == 1

@db_session
async def test_remote_add_multiple_tag_operations(self):
self.add_operation(b'infohash', 'tag', b'peer1')
self.add_operation(b'infohash', 'tag', b'peer2')
self.add_operation(b'infohash', 'tag', b'peer3')
self.add_operation(self.db, b'infohash', 'tag', b'peer1')
self.add_operation(self.db, b'infohash', 'tag', b'peer2')
self.add_operation(self.db, b'infohash', 'tag', b'peer3')

assert self.db.instance.TorrentTag.get().added_count == 3
assert self.db.instance.TorrentTag.get().removed_count == 0

self.add_operation(b'infohash', 'tag', b'peer2', operation=TagOperationEnum.REMOVE)
self.add_operation(self.db, b'infohash', 'tag', b'peer2', operation=TagOperationEnum.REMOVE)
assert self.db.instance.TorrentTag.get().added_count == 2
assert self.db.instance.TorrentTag.get().removed_count == 1

self.add_operation(b'infohash', 'tag', b'peer1', operation=TagOperationEnum.REMOVE)
self.add_operation(self.db, b'infohash', 'tag', b'peer1', operation=TagOperationEnum.REMOVE)
assert self.db.instance.TorrentTag.get().added_count == 1
assert self.db.instance.TorrentTag.get().removed_count == 2

self.add_operation(b'infohash', 'tag', b'peer1')
self.add_operation(self.db, b'infohash', 'tag', b'peer1')
assert self.db.instance.TorrentTag.get().added_count == 2
assert self.db.instance.TorrentTag.get().removed_count == 1

@db_session
async def test_multiple_tags(self):
self.add_operation_set(
self.db,
{
b'infohash1': [
Tag(name='tag1', count=2),
Expand Down Expand Up @@ -212,6 +215,7 @@ def assert_entities_count():
@db_session
async def test_get_tags_added(self):
self.add_operation_set(
self.db,
{
b'infohash1': [
Tag(name='tag1', count=1),
Expand All @@ -227,6 +231,7 @@ async def test_get_tags_added(self):
@db_session
async def test_get_tags_removed(self):
self.add_operation_set(
self.db,
{
b'infohash1': [
Tag(name='tag1', count=2),
Expand All @@ -235,64 +240,67 @@ async def test_get_tags_removed(self):
}
)

self.add_operation(infohash=b'infohash1', tag='tag2', peer=b'4', operation=TagOperationEnum.REMOVE)
self.add_operation(self.db, infohash=b'infohash1', tag='tag2', peer=b'4', operation=TagOperationEnum.REMOVE)

assert self.db.get_tags(b'infohash1') == ['tag1']

@db_session
async def test_show_local_tags(self):
# Test that locally added tags have a priority to show.
# That means no matter of other peers opinions, locally added tag should be visible.
self.add_operation(b'infohash1', 'tag1', b'peer1', operation=TagOperationEnum.REMOVE)
self.add_operation(b'infohash1', 'tag1', b'peer2', operation=TagOperationEnum.REMOVE)
self.add_operation(self.db, b'infohash1', 'tag1', b'peer1', operation=TagOperationEnum.REMOVE)
self.add_operation(self.db, b'infohash1', 'tag1', b'peer2', operation=TagOperationEnum.REMOVE)
assert not self.db.get_tags(b'infohash1')

# test local add
self.add_operation(b'infohash1', 'tag1', b'peer3', operation=TagOperationEnum.ADD, is_local_peer=True)
self.add_operation(self.db, b'infohash1', 'tag1', b'peer3', operation=TagOperationEnum.ADD, is_local_peer=True)
assert self.db.get_tags(b'infohash1') == ['tag1']

@db_session
async def test_hide_local_tags(self):
# Test that locally removed tags should not be visible to local user.
# No matter of other peers opinions, locally removed tag should be not visible.
self.add_operation(b'infohash1', 'tag1', b'peer1')
self.add_operation(b'infohash1', 'tag1', b'peer2')
self.add_operation(self.db, b'infohash1', 'tag1', b'peer1')
self.add_operation(self.db, b'infohash1', 'tag1', b'peer2')
assert self.db.get_tags(b'infohash1') == ['tag1']

# test local remove
self.add_operation(b'infohash1', 'tag1', b'peer3', operation=TagOperationEnum.REMOVE, is_local_peer=True)
self.add_operation(self.db, b'infohash1', 'tag1', b'peer3', operation=TagOperationEnum.REMOVE,
is_local_peer=True)
assert self.db.get_tags(b'infohash1') == []

@db_session
async def test_suggestions(self):
# Test whether the database returns the right suggestions.
# Suggestions are tags that have not gathered enough support for display yet.
self.add_operation(tag='tag1', peer=b'1')
self.add_operation(self.db, tag='tag1', peer=b'1')
assert self.db.get_suggestions(b'infohash') == ["tag1"]

self.add_operation(tag='tag1', peer=b'2')
self.add_operation(self.db, tag='tag1', peer=b'2')
assert self.db.get_suggestions(b'infohash') == [] # This tag now has enough support

self.add_operation(tag='tag1', peer=b'3', operation=TagOperationEnum.REMOVE) # score:1
self.add_operation(self.db, tag='tag1', peer=b'3', operation=TagOperationEnum.REMOVE) # score:1
assert self.db.get_suggestions(b'infohash') == ["tag1"]

self.add_operation(tag='tag1', peer=b'4', operation=TagOperationEnum.REMOVE) # score:0
self.add_operation(tag='tag1', peer=b'5', operation=TagOperationEnum.REMOVE) # score:-1
self.add_operation(tag='tag1', peer=b'6', operation=TagOperationEnum.REMOVE) # score:-2
self.add_operation(self.db, tag='tag1', peer=b'4', operation=TagOperationEnum.REMOVE) # score:0
self.add_operation(self.db, tag='tag1', peer=b'5', operation=TagOperationEnum.REMOVE) # score:-1
self.add_operation(self.db, tag='tag1', peer=b'6', operation=TagOperationEnum.REMOVE) # score:-2
assert not self.db.get_suggestions(b'infohash') # below the threshold

@db_session
async def test_get_clock_of_operation(self):
operation = self.create_operation(tag='tag1')
assert self.db.get_clock(operation) == 0

self.add_operation(infohash=operation.infohash, tag=operation.tag, peer=operation.creator_public_key, clock=1)
self.add_operation(self.db, infohash=operation.infohash, tag=operation.tag, peer=operation.creator_public_key,
clock=1)
assert self.db.get_clock(operation) == 1

@db_session
async def test_get_tags_operations_for_gossip(self):
time_delta = {'minutes': 1}
self.add_operation_set(
self.db,
{
b'infohash1': [
Tag(name='tag1', count=1),
Expand All @@ -312,6 +320,7 @@ async def test_get_tags_operations_for_gossip(self):
@db_session
async def test_get_infohashes(self):
self.add_operation_set(
self.db,
{
b'infohash1': [
Tag(name='tag1', count=2),
Expand Down

0 comments on commit b0217f3

Please sign in to comment.