Skip to content

Commit

Permalink
Merge pull request #14 from SamBroomy/version-bump
Browse files Browse the repository at this point in the history
Version bump
  • Loading branch information
SamBroomy authored Dec 10, 2024
2 parents c3fbd78 + 2347d85 commit 4481573
Show file tree
Hide file tree
Showing 9 changed files with 132 additions and 77 deletions.
5 changes: 1 addition & 4 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
---
name: CI Pipeline
# git commit -m "Release v0.1.0"
# git tag v0.1.0
# git push origin main v0.1.0

on:
push:
Expand Down Expand Up @@ -85,7 +82,7 @@ jobs:
with:
python-version: "3.12"

- name: Bump Version, Build, Publish
- name: Build, Publish
run: |
uvx --from rust-just just publish-package
Expand Down
73 changes: 38 additions & 35 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
[![PyPI version](https://badge.fury.io/py/typed-prompt.svg)](https://badge.fury.io/py/typed-prompt)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/typed-prompt)](https://pypi.org/project/typed-prompt/)

# typed-prompt

A type-safe, validated prompt management system for LLMs that catches errors early, enforces type safety, and provides a structured way to manage prompts.
Expand Down Expand Up @@ -48,35 +51,7 @@ class UnusedVarPrompt(BasePrompt[UserVars]):
variables: UserVars
```

### 2. Custom Configuration

```python
from typed_prompt import RenderOutput
from pydantic import BaseModel, Field


class MyConfig(BaseModel):
temperature: float = Field(default=0.7, ge=0, le=2)
model: str = Field(default="gpt-4")

class MyPrompt(BasePrompt[UserVars]):
"""Assistant for {{name}}"""
prompt_template: str = "Help with {{topic}}"
variables: UserVars
config: MyConfig = Field(default_factory=MyConfig)

def render(self, *, topic: str, **extra_vars) -> RenderOutput:
extra_vars["topic"] = topic
return super().render(**extra_vars)

# Use custom config
prompt = MyPrompt(
variables=UserVars(name="Alice", expertise="intermediate"),
config=MyConfig(temperature=0.9, model="gpt-3.5-turbo")
)
```

### 3. Conditional Templates
### 2. Conditional Templates

```python
from typing import Union
Expand Down Expand Up @@ -107,6 +82,34 @@ class ConditionalPrompt(BasePrompt[TemplateVars]):
return super().render(**extra_vars)
```

### 3. LLM configuration defined with the template

```python
from typed_prompt import RenderOutput
from pydantic import BaseModel, Field


class MyConfig(BaseModel):
temperature: float = Field(default=0.7, ge=0, le=2)
model: str = Field(default="gpt-4")

class MyPrompt(BasePrompt[UserVars]):
"""Assistant for {{name}}"""
prompt_template: str = "Help with {{topic}}"
variables: UserVars
config: MyConfig = Field(default_factory=MyConfig)

def render(self, *, topic: str, **extra_vars) -> RenderOutput:
extra_vars["topic"] = topic
return super().render(**extra_vars)

# Use custom config
prompt = MyPrompt(
variables=UserVars(name="Alice", expertise="intermediate"),
config=MyConfig(temperature=0.9, model="gpt-3.5-turbo")
)
```

> **Note**: Using None as a value for optional variables will render as `None` in the prompt.
> e.g "Test example `{{var}}` will render as `Test example None` if `var` is `None`.
> This is the default behaviour of jinja.
Expand Down Expand Up @@ -233,13 +236,14 @@ For complex prompts, you can load templates from external files:

```python
class ComplexPrompt(BasePrompt[ComplexVariables]):
with open("templates/system_prompt.j2") as f:
system_prompt_template = f.read()
system_prompt_template = Path("templates/system_prompt.j2").read_text()

prompt_template: str = Path("templates/user_prompt.j2").read_text()

with open("templates/user_prompt.j2") as f:
prompt_template = f.read()
```

> **Note**: With templating engines like Jinja2, you can normally hot reload templates, but this is not supported in typed-prompt as the templates are validated at class definition time.
## API Reference

### BasePrompt[T]
Expand Down Expand Up @@ -284,8 +288,7 @@ Structure your templates for maximum readability and maintainability:
2. **Separate Complex Templates**: For longer templates, use external files:

```python
with open("templates/system_prompt.j2") as f:
system_prompt_template = f.read()
system_prompt_template = Path("templates/system_prompt.j2").read_text()
```

## Common Patterns
Expand Down
6 changes: 3 additions & 3 deletions examples/user.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from pydantic import BaseModel, Field

from typed_prompt import BasePrompt, RenderOutput
from typed_prompt import BasePrompt, RenderedOutput


# Example 1: Basic Prompt with Custom Configuration
Expand All @@ -27,7 +27,7 @@ class ChatPrompt(BasePrompt[ChatVariables]):
variables: ChatVariables
config: ChatConfig = Field(default_factory=ChatConfig)

def render(self, *, topic: str, **extra_vars) -> RenderOutput:
def render(self, *, topic: str, **extra_vars) -> RenderedOutput:
extra_vars["topic"] = topic
return super().render(**extra_vars)

Expand Down Expand Up @@ -115,7 +115,7 @@ class CodeReviewPrompt(BasePrompt[ReviewVariables]):

def render(
self, *, code_snippet: str, specific_concerns: str | None = None, review_depth: str = "detailed", **extra_vars
) -> RenderOutput:
) -> RenderedOutput:
extra_vars.update({
"code_snippet": code_snippet,
"specific_concerns": specific_concerns,
Expand Down
52 changes: 49 additions & 3 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,45 @@ publish:
[group('ci')]
publish-package: build publish

[group('ci')]
bump:
#! /bin/bash

project_version() {
grep -E '^version = "[0-9]+\.[0-9]\.[0-9]+"$' ${1:?} | head -n 1 | awk '{print $3}' | tr -d '"'
}

BRANCH=${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_BRANCH}
echo BRANCH \'$BRANCH\'.
MAJOR_RE='\(MAJOR\)'
MINOR_RE='\(MINOR\)'
if [[ "$CI_COMMIT_MESSAGE" =~ $MAJOR_RE ]]; then
bump=major
elif [[ "$CI_COMMIT_MESSAGE" =~ $MINOR_RE ]]; then
bump=minor
else
bump=patch
fi
git fetch --all
git checkout -B $BRANCH
git branch --set-upstream-to=origin/$BRANCH

ROOT_VERSION=$(project_version ./pyproject.toml)
uvx bump-my-version bump --current-version $ROOT_VERSION $bump ./pyproject.toml
NEW_VERSION=$(project_version ./pyproject.toml)

find . -mindepth 2 -type f -name pyproject.toml | while read pyproject_file; do
CURRENT_VERSION=$(project_version $pyproject_file)
uvx bump-my-version bump --current-version $CURRENT_VERSION --new-version $NEW_VERSION $bump $pyproject_file
done
uv lock
MESSAGE="Bump version ($bump): $ROOT_VERSION -> $NEW_VERSION [skip-ci]"
echo $MESSAGE
git commit -am "$MESSAGE"
git tag -am "$MESSAGE" "$NEW_VERSION"
git push origin $BRANCH -o ci.skip
git push origin $NEW_VERSION -o ci.skip

# Install uv
[group('env')]
install-uv:
Expand Down Expand Up @@ -111,8 +150,11 @@ stage-all:
git add -A

[group('git')]
@generate-commit-message:
ollama run qwen2.5-coder "'Output a very short commit message of the following diffs. Only output message text to pipe into the commit message:\n$(git diff --cached)'"
generate-commit-message:
@(echo "Generate a concise git commit message (max 72 chars) for these changes:"; \
echo "\n# Files changed\n\n\`\`\`\n$(git diff --cached --stat --compact-summary)\n\`\`\`\n\n"; \
echo "\n# Detailed changes\n\n\`\`\`\n$(git diff --cached --unified=1 --minimal)\n\`\`\`\n\n") | \
ollama run qwen2.5-coder "You are a commit message generator. Output only the commit message text in imperative mood. No formatting, JSON, or code blocks or JSON. Examples: 'Add user authentication', 'Fix memory leak in worker', 'Update API docs'.\n\n"

[group('git')]
commit-message:
Expand Down Expand Up @@ -160,7 +202,11 @@ commit m="":
Y='\033[0;33m' # Yellow
END='\033[0m' # Reset color
if [ -z "{{ m }}" ]; then
m=$(just commit-message)
# Capture both output and exit status
if ! m=$(just commit-message); then
echo -e "${R}Error: ${Y}Failed to generate commit message${END}" >&2
exit 1
fi
else
m="{{ m }}"
fi
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "typed-prompt"
version = "0.1.1"
version = "0.1.2"
description = "A simple type-safe, validated prompt management system for LLMs"
readme = "README.md"
authors = [
Expand Down
4 changes: 2 additions & 2 deletions src/typed_prompt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@
"""

from typed_prompt import exceptions
from typed_prompt.template import BasePrompt, RenderOutput
from typed_prompt.template import BasePrompt, RenderedOutput

__all__ = ["BasePrompt", "RenderOutput", "exceptions"]
__all__ = ["BasePrompt", "RenderedOutput", "exceptions"]
39 changes: 24 additions & 15 deletions src/typed_prompt/template.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,13 @@
from typing import Any, Generic, NamedTuple, TypeVar

import jinja2
import jinja2.meta
import jinja2.nodes
from jinja2 import meta
from pydantic import BaseModel, ConfigDict
from pydantic._internal._model_construction import ModelMetaclass

from typed_prompt.exceptions import MissingVariablesError, UndeclaredVariableError, UnusedVariablesError

T = TypeVar("T", bound=BaseModel)


class PromptMeta(ModelMetaclass):
"""Metaclass for the BasePrompt class that handles template validation and compilation.
Expand Down Expand Up @@ -48,7 +46,7 @@ class UserPrompt(BasePrompt[UserVariables]):
If validation fails, clear error messages are provided:
- Missing variables: "Template uses variables not defined..."
- Unused variables: "Variables defined but not used..."
- Missing templates: "Both 'prompt_template' and variables..."
- Missing templates: "Both 'prompt_template' and a 'variables' model must be defined..."
"""

compiled_system_prompt_template: jinja2.Template | None
Expand Down Expand Up @@ -93,15 +91,15 @@ def __new__(mcs, cls_name: str, bases: tuple[type[Any], ...], namespace: dict[st
template_env: jinja2.Environment = cls._setup_template_env()
prompt_template: str = cls._get_template_string(fetch_prompt_template)
template_node = template_env.parse(prompt_template)
template_vars = meta.find_undeclared_variables(template_node)
# # Handle system prompt template,
template_vars = jinja2.meta.find_undeclared_variables(template_node)
# Handle system prompt template
fetch_system_prompt_template: str | None = namespace.get("system_prompt_template", namespace.get("__doc__"))
system_prompt_template: str = ""
system_template_vars = set()
if fetch_system_prompt_template:
system_prompt_template: str = cls._get_template_string(fetch_system_prompt_template)
system_template_node = template_env.parse(system_prompt_template)
system_template_vars = meta.find_undeclared_variables(system_template_node)
system_template_vars = jinja2.meta.find_undeclared_variables(system_template_node)
# Validate variable coverage
template_vars |= system_template_vars
variable_fields = set(variables_model.model_fields.keys())
Expand Down Expand Up @@ -155,7 +153,7 @@ def _get_template_string(template_string: str) -> str:
return dedent(template_string).strip()


class RenderOutput(NamedTuple):
class RenderedOutput(NamedTuple):
"""Structured output from prompt rendering.
This class provides named access to the rendered system and user prompts,
Expand All @@ -167,7 +165,6 @@ class RenderOutput(NamedTuple):
Example:
```python
result 0.17.0
print(f"System: {result.system_prompt}")
print(f"User: {result.user_prompt}")
```
Expand All @@ -177,6 +174,9 @@ class RenderOutput(NamedTuple):
user_prompt: str


T = TypeVar("T", bound=BaseModel)


class BasePrompt(BaseModel, Generic[T], ABC, metaclass=PromptMeta):
"""Base class for creating type-safe, validated prompt templates.
Expand Down Expand Up @@ -226,10 +226,6 @@ def render(self, *, topic: str, **extra_vars) -> RenderOutput:
return super().render(**extra_vars)
# Usage
variables 0.0.1
prompt 0.4.1
result 0.17.0
```
Notes:
Expand All @@ -248,7 +244,7 @@ def render(self, *, topic: str, **extra_vars) -> RenderOutput:

model_config = ConfigDict(arbitrary_types_allowed=True, protected_namespaces=())

def render(self, **extra_vars: Any) -> RenderOutput:
def render(self, **extra_vars: Any) -> RenderedOutput:
"""Render prompt templates with provided variables.
This method combines the variables model data with any additional variables
Expand Down Expand Up @@ -283,4 +279,17 @@ def render(self, *, topic: str, difficulty: str = "intermediate", **extra_vars)
)
user_prompt = self.compiled_prompt_template.render(**context).strip()

return RenderOutput(system_prompt, user_prompt)
return RenderedOutput(system_prompt, user_prompt)

async def render_async(self, **extra_vars: Any) -> RenderedOutput:
variables_dict = self.variables.model_dump()
context = {**variables_dict, **extra_vars}

system_prompt = (
(await self.compiled_system_prompt_template.render_async(**context)).strip()
if self.compiled_system_prompt_template
else None
)
user_prompt = (await self.compiled_prompt_template.render_async(**context)).strip()

return RenderedOutput(system_prompt, user_prompt)
Loading

0 comments on commit 4481573

Please sign in to comment.