Skip to content

Commit

Permalink
chore: Generate environments for each individual test based on its ma…
Browse files Browse the repository at this point in the history
…rkers/fixtures (#2648)

* generating test environments bases on test markers and fixtures

Signed-off-by: Oleksii Moskalenko <[email protected]>

* remove "universal" marker

Signed-off-by: Oleksii Moskalenko <[email protected]>
  • Loading branch information
pyalex authored May 11, 2022
1 parent 6589f15 commit d4b0b1a
Show file tree
Hide file tree
Showing 21 changed files with 368 additions and 368 deletions.
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ test-python-universal-contrib:
FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.contrib_repo_configuration \
PYTEST_PLUGINS=feast.infra.offline_stores.contrib.trino_offline_store.tests \
FEAST_USAGE=False IS_TEST=True \
python -m pytest -n 8 --integration --universal \
python -m pytest -n 8 --integration \
-k "not test_historical_retrieval_fails_on_validation and \
not test_historical_retrieval_with_validation and \
not test_historical_features_persisting and \
Expand All @@ -93,7 +93,7 @@ test-python-universal-postgres:
PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.postgres_offline_store.tests \
FEAST_USAGE=False \
IS_TEST=True \
python -m pytest -x --integration --universal \
python -m pytest -x --integration \
-k "not test_historical_retrieval_fails_on_validation and \
not test_historical_retrieval_with_validation and \
not test_historical_features_persisting and \
Expand All @@ -105,10 +105,10 @@ test-python-universal-postgres:
sdk/python/tests

test-python-universal-local:
FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests
FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration sdk/python/tests

test-python-universal:
FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests
FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests

test-python-go-server: compile-go-lib
FEAST_USAGE=False IS_TEST=True FEAST_GO_FEATURE_RETRIEVAL=True pytest --integration --goserver sdk/python/tests
Expand Down Expand Up @@ -158,7 +158,7 @@ start-trino-locally:
sleep 15

test-trino-plugin-locally:
cd ${ROOT_DIR}/sdk/python; FULL_REPO_CONFIGS_MODULE=feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests FEAST_USAGE=False IS_TEST=True python -m pytest --integration --universal tests/
cd ${ROOT_DIR}/sdk/python; FULL_REPO_CONFIGS_MODULE=feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests FEAST_USAGE=False IS_TEST=True python -m pytest --integration tests/

kill-trino-locally:
cd ${ROOT_DIR}; docker stop trino
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,14 @@
from feast.infra.offline_stores.contrib.trino_offline_store.tests.data_source import (
TrinoSourceCreator,
)
from tests.integration.feature_repos.integration_test_repo_config import (
IntegrationTestRepoConfig,
from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG
from tests.integration.feature_repos.universal.online_store.redis import (
RedisOnlineStoreCreator,
)

FULL_REPO_CONFIGS = [
IntegrationTestRepoConfig(offline_store_creator=SparkDataSourceCreator),
IntegrationTestRepoConfig(offline_store_creator=TrinoSourceCreator),
AVAILABLE_OFFLINE_STORES = [
("local", SparkDataSourceCreator),
("local", TrinoSourceCreator),
]

AVAILABLE_ONLINE_STORES = {"redis": (REDIS_CONFIG, RedisOnlineStoreCreator)}
Original file line number Diff line number Diff line change
@@ -1,14 +1,7 @@
from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import (
PostgreSQLDataSourceCreator,
)
from tests.integration.feature_repos.integration_test_repo_config import (
IntegrationTestRepoConfig,
)

FULL_REPO_CONFIGS = [
IntegrationTestRepoConfig(
provider="local",
offline_store_creator=PostgreSQLDataSourceCreator,
online_store_creator=PostgreSQLDataSourceCreator,
),
]
AVAILABLE_OFFLINE_STORES = [("local", PostgreSQLDataSourceCreator)]

AVAILABLE_ONLINE_STORES = {"postgres": (None, PostgreSQLDataSourceCreator)}
22 changes: 12 additions & 10 deletions sdk/python/feast/infra/online_stores/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import logging
from datetime import datetime
from multiprocessing.pool import ThreadPool
from queue import Queue
from queue import Empty, Queue
from threading import Lock, Thread
from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple

Expand Down Expand Up @@ -292,22 +292,24 @@ def increment(self):

def worker(shared_counter):
while True:
client.delete_multi(deletion_queue.get())
try:
job = deletion_queue.get(block=False)
except Empty:
return

client.delete_multi(job)
shared_counter.increment()
LOGGER.debug(
f"batch deletions completed: {shared_counter.value} ({shared_counter.value * BATCH_SIZE} total entries) & outstanding queue size: {deletion_queue.qsize()}"
)
deletion_queue.task_done()

for _ in range(NUM_THREADS):
Thread(target=worker, args=(status_info_counter,), daemon=True).start()

query = client.query(kind="Row", ancestor=key)
while True:
entities = list(query.fetch(limit=BATCH_SIZE))
if not entities:
break
deletion_queue.put([entity.key for entity in entities])
for page in query.fetch().pages:
deletion_queue.put([entity.key for entity in page])

for _ in range(NUM_THREADS):
Thread(target=worker, args=(status_info_counter,)).start()

deletion_queue.join()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

@pytest.mark.benchmark
@pytest.mark.integration
@pytest.mark.universal_online_stores
def test_online_retrieval(environment, universal_data_sources, benchmark):
fs = environment.feature_store
entities, datasets, data_sources = universal_data_sources
Expand Down
Loading

0 comments on commit d4b0b1a

Please sign in to comment.