Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
[![codecov](https://codecov.io/gh/BioPack-team/shepherd/graph/badge.svg?token=NTPV9WF7EO)](https://codecov.io/gh/BioPack-team/shepherd)

# Translator Shepherd Service

Shepherd is a shared platform for ARA implementation. Incorporated ARAs have access to a plethora of shared ARA functionality while retaining the ability to implement their own custom operations.
Expand Down
1 change: 1 addition & 0 deletions test-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
fakeredis
httpx
pytest
pytest-asyncio
pytest-cov
Expand Down
42 changes: 42 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""Import pytest fixtures."""

import fakeredis.aioredis as fakeredis
from psycopg_pool import AsyncConnectionPool
import pytest
import pytest_asyncio
import redis.asyncio as aioredis
from unittest.mock import AsyncMock, MagicMock


@pytest.fixture
def postgres_mock():
"""
Factory for creating a mock postgres pool with custom return value.
"""

def _create_mock(return_value):
mock_conn = AsyncMock()
mock_conn.execute.side_effect = return_value

mock_pool = AsyncMock(spec=AsyncConnectionPool)
mock_pool.connection.return_value.__aenter__.return_value = mock_conn
mock_pool.connection.return_value.__aexit__.return_value = None

return mock_conn, mock_pool

return _create_mock


@pytest.fixture()
def redis_mock(monkeypatch):
"""
Create a fake redis client and function for getting that client.
"""
fake_redis = fakeredis.FakeRedis(decode_responses=True)

def mock_redis_constructor(*args, **kwargs):
return fake_redis

monkeypatch.setattr(aioredis, "Redis", mock_redis_constructor)

return fake_redis
Empty file added tests/helpers/__init__.py
Empty file.
21 changes: 0 additions & 21 deletions tests/helpers/mock_redis.py

This file was deleted.

Empty file added tests/unit/__init__.py
Empty file.
49 changes: 45 additions & 4 deletions tests/unit/test_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
import pytest
import redis.asyncio
from shepherd_utils.broker import get_task

from workers.example_ara.worker import example_ara
from tests.helpers.mock_redis import redis_mock
from workers.example_lookup.worker import example_lookup


@pytest.mark.asyncio
async def test_example(monkeypatch, mocker):
_, redis_constructor = await redis_mock()
monkeypatch.setattr(redis.asyncio, "Redis", redis_constructor)
async def test_example(redis_mock):
# _, redis_constructor = await redis_mock()
# monkeypatch.setattr(redis.asyncio, "Redis", redis_constructor)
logger = logging.getLogger(__name__)

await example_ara(["test", {"query_id": "test"}], logger)
Expand All @@ -28,3 +29,43 @@ async def test_example(monkeypatch, mocker):
"filter_results_top_n",
"filter_kgraph_orphans",
] == [op["id"] for op in workflow]


@pytest.mark.asyncio
async def test_example_lookup(mocker, redis_mock):
mock_callback_id = mocker.patch("workers.example_lookup.worker.add_callback_id")
mock_callback_id.return_value = "test"
mock_callback_response = mocker.patch(
"workers.example_lookup.worker.save_callback_response"
)
mock_callback_response.return_value = {}
mock_running_callbacks = mocker.patch(
"workers.example_lookup.worker.get_running_callbacks"
)
mock_running_callbacks.return_value = []
mock_response = mocker.Mock()
mocker.patch("httpx.AsyncClient.post", return_value=mock_response)
logger = logging.getLogger(__name__)

await example_lookup(
[
"test",
{
"query_id": "test",
"workflow": json.dumps(
[{"id": "example.lookup"}, {"id": "example.score"}]
),
},
],
logger,
)

# Get the task that the ara should have put on the queue
task = await get_task("example.score", "consumer", "test", logger)
assert task is not None
workflow = json.loads(task[1]["workflow"])
# make sure the workflow was correctly passed
assert len(workflow) == 1
assert [
"example.score",
] == [op["id"] for op in workflow]
4 changes: 3 additions & 1 deletion workers/example_lookup/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import httpx
import json
import logging
from pathlib import Path
import time
import uuid
from shepherd_utils.db import (
Expand All @@ -30,7 +31,8 @@ async def example_lookup(task, logger: logging.Logger):
# Do query expansion or whatever lookup process
# We're going to stub a response
start = time.time()
with open("test_response.json", "r") as f:
test_response = Path(__file__).parent / "test_response.json"
with open(test_response, "r") as f:
response = json.load(f)
logger.debug(f"Loading json took {time.time() - start}")

Expand Down
Loading