From 52d0fbbe25afd349b74110e787976fa6a24c88de Mon Sep 17 00:00:00 2001 From: argaen Date: Sat, 13 May 2017 00:09:04 +0200 Subject: [PATCH] Add performance footprint tests --- .travis.yml | 1 + aiocache/backends/memcached.py | 2 +- tests/performance/conftest.py | 21 +++++ tests/performance/test_footprint.py | 131 ++++++++++++++++++++++++++++ 4 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 tests/performance/conftest.py create mode 100644 tests/performance/test_footprint.py diff --git a/.travis.yml b/.travis.yml index c6f8be500..4c13a9512 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ script: - python setup.py develop - pytest --cov-report term-missing --cov=aiocache -sv tests/ut - pytest -sv tests/acceptance + - pytest -sv tests/performance/test_footprint.py - bash examples/run_all.sh services: diff --git a/aiocache/backends/memcached.py b/aiocache/backends/memcached.py index 059ccdbc5..b53987e6a 100644 --- a/aiocache/backends/memcached.py +++ b/aiocache/backends/memcached.py @@ -67,7 +67,7 @@ async def _multi_set(self, pairs, ttl=0): tasks = [] for key, value in pairs: value = str.encode(value) if isinstance(value, str) else value - tasks.append(asyncio.ensure_future(self.client.set(key, value, exptime=ttl or 0))) + tasks.append(self.client.set(key, value, exptime=ttl or 0)) await asyncio.gather(*tasks) diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py new file mode 100644 index 000000000..68dac713f --- /dev/null +++ b/tests/performance/conftest.py @@ -0,0 +1,21 @@ +import pytest + +from aiocache import MemcachedCache, RedisCache +from aiocache.backends.redis import RedisBackend + + +@pytest.fixture +def redis_cache(event_loop): + cache = RedisCache( + namespace="test", loop=event_loop, pool_max_size=1) + yield cache + + for _, pool in RedisBackend.pools.items(): + pool.close() + event_loop.run_until_complete(pool.wait_closed()) + + +@pytest.fixture +def memcached_cache(event_loop): + cache = MemcachedCache(namespace="test", loop=event_loop, pool_size=1) + yield cache diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py new file mode 100644 index 000000000..648db3c1a --- /dev/null +++ b/tests/performance/test_footprint.py @@ -0,0 +1,131 @@ +import pytest +import time + +import aioredis +import aiomcache + + +@pytest.fixture +def aioredis_pool(event_loop): + return event_loop.run_until_complete( + aioredis.create_pool(("127.0.0.1", 6379), maxsize=1)) + + +class TestRedis: + + @pytest.mark.asyncio + async def test_redis_getsetdel(self, aioredis_pool, redis_cache): + N = 10000 + aioredis_total_time = 0 + for n in range(N): + start = time.time() + with await aioredis_pool as redis: + await redis.set("hi", "value") + with await aioredis_pool as redis: + await redis.get("hi") + with await aioredis_pool as redis: + await redis.delete("hi") + aioredis_total_time += time.time() - start + + aiocache_total_time = 0 + for n in range(N): + start = time.time() + await redis_cache.set("hi", "value", timeout=0) + await redis_cache.get("hi", timeout=0) + await redis_cache.delete("hi", timeout=0) + aiocache_total_time += time.time() - start + + print("{:0.2f}/{:0.2f}: {:0.2f}".format( + aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time)) + assert aiocache_total_time/aioredis_total_time < 1.25 + + @pytest.mark.asyncio + async def test_redis_multigetsetdel(self, aioredis_pool, redis_cache): + N = 5000 + aioredis_total_time = 0 + values = ["a", "b", "c", "d", "e", "f"] + for n in range(N): + start = time.time() + with await aioredis_pool as redis: + await redis.mset(*[x for x in values*2]) + with await aioredis_pool as redis: + await redis.mget(*values) + for k in values: + with await aioredis_pool as redis: + await redis.delete(k) + aioredis_total_time += time.time() - start + + aiocache_total_time = 0 + for n in range(N): + start = time.time() + await redis_cache.multi_set([(x, x) for x in values], timeout=0) + await redis_cache.multi_get(values, timeout=0) + for k in values: + await redis_cache.delete(k, timeout=0) + aioredis_total_time += time.time() - start + + print("{:0.2f}/{:0.2f}: {:0.2f}".format( + aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time)) + assert aiocache_total_time/aioredis_total_time < 1.25 + + +@pytest.fixture +def aiomcache_pool(event_loop): + yield aiomcache.Client("127.0.0.1", 11211, pool_size=1, loop=event_loop) + + +class TestMemcached: + + @pytest.mark.asyncio + async def test_memcached_getsetdel(self, aiomcache_pool, memcached_cache): + N = 10000 + aiomcache_total_time = 0 + for n in range(N): + start = time.time() + await aiomcache_pool.set(b"hi", b"value") + await aiomcache_pool.get(b"hi") + await aiomcache_pool.delete(b"hi") + aiomcache_total_time += time.time() - start + + aiocache_total_time = 0 + for n in range(N): + start = time.time() + await memcached_cache.set("hi", "value", timeout=0) + await memcached_cache.get("hi", timeout=0) + await memcached_cache.delete("hi", timeout=0) + aiocache_total_time += time.time() - start + + print("{:0.2f}/{:0.2f}: {:0.2f}".format( + aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time)) + assert aiocache_total_time/aiomcache_total_time < 1.30 + + @pytest.mark.asyncio + async def test_memcached_multigetsetdel(self, aiomcache_pool, memcached_cache): + N = 2000 + aiomcache_total_time = 0 + values = [b"a", b"b", b"c", b"d", b"e", b"f"] + for n in range(N): + start = time.time() + for k in values: + await aiomcache_pool.set(k, k) + await aiomcache_pool.multi_get(*values) + for k in values: + await aiomcache_pool.delete(k) + aiomcache_total_time += time.time() - start + + aiocache_total_time = 0 + values = [b"a", b"b", b"c", b"d", b"e", b"f"] + for n in range(N): + start = time.time() + # TODO: aiomcache pool behaves really BAD with concurrent requests so multi_set + # is not ideal. With the new MR I've submitted aiomcache/#46 it will improve + # although its not ideal... + await memcached_cache.multi_set([(x, x) for x in values], timeout=0) + await memcached_cache.multi_get(values, timeout=0) + for k in values: + await memcached_cache.delete(k, timeout=0) + aiocache_total_time += time.time() - start + + print("{:0.2f}/{:0.2f}: {:0.2f}".format( + aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time)) + assert aiocache_total_time/aiomcache_total_time < 1.90