Skip to content

Commit

Permalink
chore(test): fix and re-activate integration and unit tests (#1004)
Browse files Browse the repository at this point in the history
  • Loading branch information
justinthelaw authored Sep 9, 2024
1 parent 254a27a commit d32bd72
Show file tree
Hide file tree
Showing 14 changed files with 314 additions and 176 deletions.
13 changes: 11 additions & 2 deletions .github/workflows/pytest-shim.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,23 @@ permissions:
# https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/troubleshooting-required-status-checks#handling-skipped-but-required-checks

# Abort prior jobs in the same workflow / PR

concurrency:
group: pytest-skip-${{ github.ref }}
group: pytest-integration-skip-${{ github.ref }}
cancel-in-progress: true


jobs:
pytest:
runs-on: ubuntu-latest

steps:
- name: Skipped
run: |
echo skipped
integration:
runs-on: ubuntu-latest

steps:
- name: Skipped
run: |
Expand Down
110 changes: 99 additions & 11 deletions .github/workflows/pytest.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
name: pytest

on:
pull_request:
types:
Expand All @@ -25,15 +26,15 @@ on:
- "!.gitignore"
- "!LICENSE"

# Ignore LFAI-UI things (no Python)
# Ignore UI things (no Python)
- "!src/leapfrogai_ui/**"
- "!packages/ui/**"

# Declare default permissions as read only.
permissions: read-all

concurrency:
group: pytest-${{ github.ref }}
group: pytest-integration-${{ github.ref }}
cancel-in-progress: true

jobs:
Expand All @@ -44,22 +45,109 @@ jobs:
- name: Checkout Repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0
- name: Cache Python Dependencies
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: |
~/.cache/pip
**/src/leapfrogai_api
**/src/leapfrogai_sdk
key: pytest-integration-pip-${{ github.ref }}

- name: Setup Python
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0
with:
python-version-file: 'pyproject.toml'
python-version-file: "pyproject.toml"

- name: Install Python Dependencies
run: pip install ".[dev]" "src/leapfrogai_api" "src/leapfrogai_sdk"

- name: Build Repeater
- name: Setup Repeater
env:
LOCAL_VERSION: dev
LOCAL_VERSION: e2e-test
run: |
make docker-repeater
- name: Run Repeater
run: docker run -p 50051:50051 -d --name=repeater ghcr.io/defenseunicorns/leapfrogai/repeater:dev
docker run -p 50051:50051 -d --name=repeater ghcr.io/defenseunicorns/leapfrogai/repeater:$LOCAL_VERSION
- name: Run Pytest
run: make test-api-unit
env:
LFAI_RUN_REPEATER_TESTS: true

integration:
runs-on: ai-ubuntu-big-boy-8-core

# If basic unit tests fail, do not run this job
needs: pytest

steps:
- name: Checkout Repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- name: Use Cached Python Dependencies
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: |
~/.cache/pip
**/src/leapfrogai_api
**/src/leapfrogai_sdk
key: pytest-integration-pip-${{ github.ref }}

- name: Setup Python
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0
with:
python-version-file: "pyproject.toml"

- name: Install Python Deps
run: pip install ".[dev]" "src/leapfrogai_api" "src/leapfrogai_sdk"

- name: Run Pytest
run: python -m pytest tests/pytest -v
- name: Setup Repeater
env:
LFAI_RUN_REPEATER_TESTS: true
LOCAL_VERSION: e2e-test
run: |
make docker-repeater
docker run -p 50051:50051 -d --name=repeater ghcr.io/defenseunicorns/leapfrogai/repeater:$LOCAL_VERSION
- name: Setup UDS Cluster
uses: ./.github/actions/uds-cluster
with:
registry1Username: ${{ secrets.IRON_BANK_ROBOT_USERNAME }}
registry1Password: ${{ secrets.IRON_BANK_ROBOT_PASSWORD }}
ghToken: ${{ secrets.GITHUB_TOKEN }}

- name: Setup API and Supabase
uses: ./.github/actions/lfai-core

- name: Generate Secrets
id: generate_secrets
run: |
SUPABASE_PASS=$(cat <(openssl rand -base64 32 | tr -dc 'a-zA-Z0-9!@#$%^&*()_+-=[]{}|;:,.<>?' | head -c 20) <(echo '!@1Aa') | fold -w1 | shuf | tr -d '\n')
echo "::add-mask::$SUPABASE_PASS"
echo "SUPABASE_PASS=$SUPABASE_PASS" >> $GITHUB_OUTPUT
SUPABASE_ANON_KEY=$(uds zarf tools kubectl get secret supabase-bootstrap-jwt -n leapfrogai -o jsonpath='{.data.anon-key}' | base64 -d)
echo "::add-mask::$SUPABASE_ANON_KEY"
echo "SUPABASE_ANON_KEY=$SUPABASE_ANON_KEY" >> $GITHUB_OUTPUT
- name: Verify Secrets
run: |
echo "SUPABASE_ANON_KEY is set: ${{ steps.generate_secrets.outputs.SUPABASE_ANON_KEY != '' }}"
echo "SUPABASE_PASS is set: ${{ steps.generate_secrets.outputs.SUPABASE_PASS != '' }}"
- name: Setup Text-Embeddings
run: |
make build-text-embeddings LOCAL_VERSION=e2e-test
docker image prune -af
uds zarf package deploy packages/text-embeddings/zarf-package-text-embeddings-amd64-e2e-test.tar.zst -l=trace --confirm
rm packages/text-embeddings/zarf-package-text-embeddings-amd64-e2e-test.tar.zst
- name: Run Integration Tests
env:
SUPABASE_ANON_KEY: ${{ steps.generate_secrets.outputs.SUPABASE_ANON_KEY }}
SUPABASE_PASS: ${{ steps.generate_secrets.outputs.SUPABASE_PASS }}
SUPABASE_EMAIL: [email protected]
SUPABASE_URL: https://supabase-kong.uds.dev
# Turn off NIAH tests that are not applicable for integration testing using the Repeater model
LFAI_RUN_NIAH_TESTS: "false"
run: |
make test-user-pipeline
env $(cat .env | xargs) python -m pytest -v -s tests/integration/api
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ build/
**/*.whl
.model/
*.gguf
.env.password
.env.email
.env
.ruff_cache
.branches
Expand Down
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ help: ## Display this help information
| sort | awk 'BEGIN {FS = ":.*?## "}; \
{printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'

clean: ## Clean up all the things (packages, build dirs, compiled .whl files, python eggs)
clean: ## Clean up all the things (test artifacts, packages, build dirs, compiled .whl files, python eggs)
-rm -rf .env .env.email .env.password .pytest_cache
-rm -rf .logs
-rm zarf-package-*.tar.zst
-rm packages/**/zarf-package-*.tar.zst
Expand Down
6 changes: 3 additions & 3 deletions packages/repeater/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ ENV PATH="/leapfrogai/.venv/bin:$PATH"
WORKDIR /leapfrogai

COPY --from=builder /leapfrogai/.venv/ /leapfrogai/.venv/

COPY packages/repeater/repeater.py .
COPY packages/repeater/main.py .
COPY packages/repeater/config.yaml .

# Publish port
EXPOSE 50051:50051

# Run the repeater model
ENTRYPOINT ["python", "-u", "repeater.py"]
ENTRYPOINT ["python", "-m", "leapfrogai_sdk.cli", "--app-dir=.", "main:Model"]
14 changes: 14 additions & 0 deletions packages/repeater/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# for testing purposes, not actually used by Repeater
model:
source: "."
max_context_length: 10000000000000
stop_tokens:
- "</s>"
prompt_format:
chat:
system: "{}"
assistant: "{}"
user: "{}"
defaults:
top_p: 1.0
top_k: 0
71 changes: 71 additions & 0 deletions packages/repeater/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import logging
import os
from typing import Any, AsyncGenerator

from leapfrogai_sdk import (
CompletionServiceServicer,
EmbeddingsServiceServicer,
ChatCompletionServiceServicer,
ChatCompletionStreamServiceServicer,
AudioServicer,
GrpcContext,
EmbeddingRequest,
EmbeddingResponse,
Embedding,
AudioRequest,
AudioResponse,
NameResponse,
serve,
)
from leapfrogai_sdk.llm import LLM, GenerationConfig

logging.basicConfig(
level=os.getenv("LFAI_LOG_LEVEL", logging.INFO),
format="%(name)s: %(asctime)s | %(levelname)s | %(filename)s:%(lineno)s >>> %(message)s",
)
logger = logging.getLogger(__name__)


@LLM
class Model(
CompletionServiceServicer,
EmbeddingsServiceServicer,
ChatCompletionServiceServicer,
ChatCompletionStreamServiceServicer,
AudioServicer,
):
async def generate(
self, prompt: str, config: GenerationConfig
) -> AsyncGenerator[str, Any]:
logger.info("Begin generating streamed response")
for char in prompt:
yield char # type: ignore
logger.info("Streamed response complete")

async def count_tokens(self, raw_text: str) -> int:
return len(raw_text)

async def CreateEmbedding(
self,
request: EmbeddingRequest,
context: GrpcContext,
) -> EmbeddingResponse:
return EmbeddingResponse(
embeddings=[Embedding(embedding=[0.0 for _ in range(10)])]
)

async def Transcribe(
self, request: AudioRequest, context: GrpcContext
) -> AudioResponse:
return AudioResponse(
text="The repeater model received a transcribe request",
duration=1,
language="en",
)

async def Name(self, request, context):
return NameResponse(name="repeater")


if __name__ == "__main__":
serve(Model())
Loading

0 comments on commit d32bd72

Please sign in to comment.