Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add performance footprint tests #228

Merged
merged 2 commits into from
May 13, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ script:
- python setup.py develop
- pytest --cov-report term-missing --cov=aiocache -sv tests/ut
- pytest -sv tests/acceptance
- pytest -sv tests/performance/test_footprint.py
- bash examples/run_all.sh

services:
Expand Down
2 changes: 1 addition & 1 deletion aiocache/backends/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ async def _multi_set(self, pairs, ttl=0):
tasks = []
for key, value in pairs:
value = str.encode(value) if isinstance(value, str) else value
tasks.append(asyncio.ensure_future(self.client.set(key, value, exptime=ttl or 0)))
tasks.append(self.client.set(key, value, exptime=ttl or 0))

await asyncio.gather(*tasks)

Expand Down
23 changes: 14 additions & 9 deletions aiocache/backends/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,19 +66,24 @@ async def _multi_set(self, pairs, ttl=None):
"""
ttl = ttl or 0

with await self._connect() as redis:
transaction = redis.multi_exec()
flattened = list(itertools.chain.from_iterable(
(key, value) for key, value in pairs))
transaction.mset(*flattened)
if ttl > 0:
for key in flattened[::2]:
transaction.expire(key, timeout=ttl)
flattened = list(itertools.chain.from_iterable(
(key, value) for key, value in pairs))

await transaction.execute()
with await self._connect() as redis:
if ttl:
await self.__multi_set_ttl(redis, flattened, ttl)
else:
await redis.mset(*flattened)

return True

async def __multi_set_ttl(self, conn, flattened, ttl):
redis = conn.multi_exec()
redis.mset(*flattened)
for key in flattened[::2]:
redis.expire(key, timeout=ttl)
await redis.execute()

async def _add(self, key, value, ttl=None):
"""
Stores the value in the given key. Raises an error if the
Expand Down
21 changes: 21 additions & 0 deletions tests/performance/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import pytest

from aiocache import MemcachedCache, RedisCache
from aiocache.backends.redis import RedisBackend


@pytest.fixture
def redis_cache(event_loop):
cache = RedisCache(
namespace="test", loop=event_loop, pool_max_size=1)
yield cache

for _, pool in RedisBackend.pools.items():
pool.close()
event_loop.run_until_complete(pool.wait_closed())


@pytest.fixture
def memcached_cache(event_loop):
cache = MemcachedCache(namespace="test", loop=event_loop, pool_size=1)
yield cache
134 changes: 134 additions & 0 deletions tests/performance/test_footprint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import pytest
import time

import aioredis
import aiomcache


@pytest.fixture
def aioredis_pool(event_loop):
return event_loop.run_until_complete(
aioredis.create_pool(("127.0.0.1", 6379), maxsize=1))


class TestRedis:

@pytest.mark.asyncio
async def test_redis_getsetdel(self, aioredis_pool, redis_cache):
N = 10000
aioredis_total_time = 0
for n in range(N):
start = time.time()
with await aioredis_pool as redis:
await redis.set("hi", "value")
with await aioredis_pool as redis:
await redis.get("hi")
with await aioredis_pool as redis:
await redis.delete("hi")
aioredis_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await redis_cache.set("hi", "value", timeout=0)
await redis_cache.get("hi", timeout=0)
await redis_cache.delete("hi", timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time))
assert aiocache_total_time/aioredis_total_time < 1.30

@pytest.mark.asyncio
async def test_redis_multigetsetdel(self, aioredis_pool, redis_cache):
N = 5000
aioredis_total_time = 0
values = ["a", "b", "c", "d", "e", "f"]
for n in range(N):
start = time.time()
with await aioredis_pool as redis:
await redis.mset(*[x for x in values*2])
with await aioredis_pool as redis:
await redis.mget(*values)
for k in values:
with await aioredis_pool as redis:
await redis.delete(k)
aioredis_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await redis_cache.multi_set([(x, x) for x in values], timeout=0)
await redis_cache.multi_get(values, timeout=0)
for k in values:
await redis_cache.delete(k, timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time))
assert aiocache_total_time/aioredis_total_time < 1.30


@pytest.fixture
def aiomcache_pool(event_loop):
yield aiomcache.Client("127.0.0.1", 11211, pool_size=1, loop=event_loop)


class TestMemcached:

@pytest.mark.asyncio
async def test_memcached_getsetdel(self, aiomcache_pool, memcached_cache):
N = 10000
aiomcache_total_time = 0
for n in range(N):
start = time.time()
await aiomcache_pool.set(b"hi", b"value")
await aiomcache_pool.get(b"hi")
await aiomcache_pool.delete(b"hi")
aiomcache_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await memcached_cache.set("hi", "value", timeout=0)
await memcached_cache.get("hi", timeout=0)
await memcached_cache.delete("hi", timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time))
assert aiocache_total_time/aiomcache_total_time < 1.30

@pytest.mark.asyncio
async def test_memcached_multigetsetdel(self, aiomcache_pool, memcached_cache):
N = 2000
aiomcache_total_time = 0
values = [b"a", b"b", b"c", b"d", b"e", b"f"]
for n in range(N):
start = time.time()
for k in values:
await aiomcache_pool.set(k, k)
await aiomcache_pool.multi_get(*values)
for k in values:
await aiomcache_pool.delete(k)
aiomcache_total_time += time.time() - start

aiocache_total_time = 0
values = [b"a", b"b", b"c", b"d", b"e", b"f"]
for n in range(N):
start = time.time()
# TODO: aiomcache pool behaves really BAD with concurrent requests so multi_set
# is not ideal. With the new MR I've submitted aiomcache/#46 it will improve
# although its not ideal...
# Also, performance if fat worse in local because we don't get benefit from
# concurrency because latency is stable. In build and real environments, the
# number is better.
await memcached_cache.multi_set([(x, x) for x in values], timeout=0)
await memcached_cache.multi_get(values, timeout=0)
for k in values:
await memcached_cache.delete(k, timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time))
assert aiocache_total_time/aiomcache_total_time < 1.90
4 changes: 1 addition & 3 deletions tests/ut/backends/test_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,7 @@ async def test_multi_get(self, redis):
async def test_multi_set(self, redis):
cache, pool = redis
await cache._multi_set([(pytest.KEY, "value"), (pytest.KEY_1, "random")])
assert pool.client.multi_exec.call_count == 1
pool.transaction.mset.assert_called_with(pytest.KEY, "value", pytest.KEY_1, "random")
assert pool.transaction.execute.call_count == 1
pool.client.mset.assert_called_with(pytest.KEY, "value", pytest.KEY_1, "random")

@pytest.mark.asyncio
async def test_multi_set_with_ttl(self, redis):
Expand Down