Skip to content

Commit

Permalink
Add performance footprint tests
Browse files Browse the repository at this point in the history
  • Loading branch information
argaen committed May 12, 2017
1 parent eeb1b21 commit 52d0fbb
Show file tree
Hide file tree
Showing 4 changed files with 154 additions and 1 deletion.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ script:
- python setup.py develop
- pytest --cov-report term-missing --cov=aiocache -sv tests/ut
- pytest -sv tests/acceptance
- pytest -sv tests/performance/test_footprint.py
- bash examples/run_all.sh

services:
Expand Down
2 changes: 1 addition & 1 deletion aiocache/backends/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ async def _multi_set(self, pairs, ttl=0):
tasks = []
for key, value in pairs:
value = str.encode(value) if isinstance(value, str) else value
tasks.append(asyncio.ensure_future(self.client.set(key, value, exptime=ttl or 0)))
tasks.append(self.client.set(key, value, exptime=ttl or 0))

await asyncio.gather(*tasks)

Expand Down
21 changes: 21 additions & 0 deletions tests/performance/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import pytest

from aiocache import MemcachedCache, RedisCache
from aiocache.backends.redis import RedisBackend


@pytest.fixture
def redis_cache(event_loop):
cache = RedisCache(
namespace="test", loop=event_loop, pool_max_size=1)
yield cache

for _, pool in RedisBackend.pools.items():
pool.close()
event_loop.run_until_complete(pool.wait_closed())


@pytest.fixture
def memcached_cache(event_loop):
cache = MemcachedCache(namespace="test", loop=event_loop, pool_size=1)
yield cache
131 changes: 131 additions & 0 deletions tests/performance/test_footprint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import pytest
import time

import aioredis
import aiomcache


@pytest.fixture
def aioredis_pool(event_loop):
return event_loop.run_until_complete(
aioredis.create_pool(("127.0.0.1", 6379), maxsize=1))


class TestRedis:

@pytest.mark.asyncio
async def test_redis_getsetdel(self, aioredis_pool, redis_cache):
N = 10000
aioredis_total_time = 0
for n in range(N):
start = time.time()
with await aioredis_pool as redis:
await redis.set("hi", "value")
with await aioredis_pool as redis:
await redis.get("hi")
with await aioredis_pool as redis:
await redis.delete("hi")
aioredis_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await redis_cache.set("hi", "value", timeout=0)
await redis_cache.get("hi", timeout=0)
await redis_cache.delete("hi", timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time))
assert aiocache_total_time/aioredis_total_time < 1.25

@pytest.mark.asyncio
async def test_redis_multigetsetdel(self, aioredis_pool, redis_cache):
N = 5000
aioredis_total_time = 0
values = ["a", "b", "c", "d", "e", "f"]
for n in range(N):
start = time.time()
with await aioredis_pool as redis:
await redis.mset(*[x for x in values*2])
with await aioredis_pool as redis:
await redis.mget(*values)
for k in values:
with await aioredis_pool as redis:
await redis.delete(k)
aioredis_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await redis_cache.multi_set([(x, x) for x in values], timeout=0)
await redis_cache.multi_get(values, timeout=0)
for k in values:
await redis_cache.delete(k, timeout=0)
aioredis_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aioredis_total_time, aiocache_total_time/aioredis_total_time))
assert aiocache_total_time/aioredis_total_time < 1.25


@pytest.fixture
def aiomcache_pool(event_loop):
yield aiomcache.Client("127.0.0.1", 11211, pool_size=1, loop=event_loop)


class TestMemcached:

@pytest.mark.asyncio
async def test_memcached_getsetdel(self, aiomcache_pool, memcached_cache):
N = 10000
aiomcache_total_time = 0
for n in range(N):
start = time.time()
await aiomcache_pool.set(b"hi", b"value")
await aiomcache_pool.get(b"hi")
await aiomcache_pool.delete(b"hi")
aiomcache_total_time += time.time() - start

aiocache_total_time = 0
for n in range(N):
start = time.time()
await memcached_cache.set("hi", "value", timeout=0)
await memcached_cache.get("hi", timeout=0)
await memcached_cache.delete("hi", timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time))
assert aiocache_total_time/aiomcache_total_time < 1.30

@pytest.mark.asyncio
async def test_memcached_multigetsetdel(self, aiomcache_pool, memcached_cache):
N = 2000
aiomcache_total_time = 0
values = [b"a", b"b", b"c", b"d", b"e", b"f"]
for n in range(N):
start = time.time()
for k in values:
await aiomcache_pool.set(k, k)
await aiomcache_pool.multi_get(*values)
for k in values:
await aiomcache_pool.delete(k)
aiomcache_total_time += time.time() - start

aiocache_total_time = 0
values = [b"a", b"b", b"c", b"d", b"e", b"f"]
for n in range(N):
start = time.time()
# TODO: aiomcache pool behaves really BAD with concurrent requests so multi_set
# is not ideal. With the new MR I've submitted aiomcache/#46 it will improve
# although its not ideal...
await memcached_cache.multi_set([(x, x) for x in values], timeout=0)
await memcached_cache.multi_get(values, timeout=0)
for k in values:
await memcached_cache.delete(k, timeout=0)
aiocache_total_time += time.time() - start

print("{:0.2f}/{:0.2f}: {:0.2f}".format(
aiocache_total_time, aiomcache_total_time, aiocache_total_time/aiomcache_total_time))
assert aiocache_total_time/aiomcache_total_time < 1.90

0 comments on commit 52d0fbb

Please sign in to comment.