Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: Add method to add multiple outputs to a prompt in AIConfig #689

Merged
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions python/src/aiconfig/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,6 +696,29 @@ def add_output(self, prompt_name: str, output: Output, overwrite: bool = False):
else:
prompt.outputs.append(output)

def add_outputs(self, prompt_name: str, outputs: List[Output], overwrite: bool = False):
"""
Add multiple outputs to the prompt with the given name in the AIConfig

Args:
prompt_name (str): The name of the prompt to add the outputs to.
outputs (List[Output]): List of outputs to add.
overwrite (bool, optional): Overwrites the existing output if True. Otherwise appends the outputs to the prompt's output list. Defaults to False.
"""
prompt = self.get_prompt(prompt_name)
if not prompt:
raise IndexError(
f"Cannot out output. Prompt '{prompt_name}' not found in config."
sp6370 marked this conversation as resolved.
Show resolved Hide resolved
)
if not outputs:
raise IndexError(
sp6370 marked this conversation as resolved.
Show resolved Hide resolved
f"Cannot add outputs. No outputs provided for prompt '{prompt_name}'."
)
if overwrite:
prompt.outputs = outputs
else:
prompt.outputs.extend(outputs)

def delete_output(self, prompt_name: str):
"""
Deletes the outputs for the prompt with the given prompt_name.
Expand Down
91 changes: 91 additions & 0 deletions python/tests/test_programmatically_create_an_AIConfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,6 +587,97 @@ def test_add_output_existing_prompt_no_overwrite(ai_config_runtime: AIConfigRunt

assert ai_config_runtime.get_latest_output("GreetingPrompt") == None

def test_add_outputs_existing_prompt_no_overwrite(ai_config_runtime: AIConfigRuntime):
"""Test adding outputs to an existing prompt without overwriting."""
base_result = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "base output"},
metadata={"finish_reason": "stop"},)
sp6370 marked this conversation as resolved.
Show resolved Hide resolved
prompt1 = Prompt(
name="GreetingPrompt",
input="Hello, how are you?",
metadata=PromptMetadata(model="fakemodel"),
outputs=[base_result],
)
ai_config_runtime.add_prompt(prompt1.name, prompt1)

assert ai_config_runtime.get_latest_output("GreetingPrompt") == base_result

test_result1 = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "test output 1"},
metadata={"finish_reason": "stop"},
)

test_result2 = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "test output 2"},
metadata={"finish_reason": "stop"},
)
sp6370 marked this conversation as resolved.
Show resolved Hide resolved
ai_config_runtime.add_outputs("GreetingPrompt", [test_result1, test_result2])

assert ai_config_runtime.get_latest_output("GreetingPrompt") == test_result2
assert prompt1.outputs == [base_result, test_result1, test_result2]

def test_add_outputs_existing_prompt_with_overwrite(ai_config_runtime: AIConfigRuntime):
"""Test adding outputs to an existing prompt with overwriting."""
base_result = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "base output"},
metadata={"finish_reason": "stop"},)
prompt1 = Prompt(
name="GreetingPrompt",
input="Hello, how are you?",
metadata=PromptMetadata(model="fakemodel"),
outputs=[base_result],
)
ai_config_runtime.add_prompt(prompt1.name, prompt1)

assert ai_config_runtime.get_latest_output("GreetingPrompt") == base_result

test_result1 = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "test output 1"},
metadata={"finish_reason": "stop"},
)

test_result2 = ExecuteResult(
output_type="execute_result",
execution_count=0.0,
data={"role": "assistant", "content": "test output 2"},
metadata={"finish_reason": "stop"},
)
ai_config_runtime.add_outputs("GreetingPrompt", [test_result1, test_result2], True)

assert ai_config_runtime.get_latest_output("GreetingPrompt") == test_result2
assert prompt1.outputs == [test_result1, test_result2]

def test_add_empty_outputs_to_prompt(ai_config_runtime: AIConfigRuntime):
"""Test for adding an empty output to an existing prompt with/without overwriting using add_ouputs."""
sp6370 marked this conversation as resolved.
Show resolved Hide resolved
prompt1 = Prompt(
name="GreetingPrompt",
input="Hello, how are you?",
metadata=PromptMetadata(model="fakemodel"),
)
ai_config_runtime.add_prompt(prompt1.name, prompt1)
assert ai_config_runtime.get_latest_output("GreetingPrompt") == None
# Case 1: No outputs, no overwrite
with pytest.raises(
Exception,
match=r"Cannot add outputs. No outputs provided for prompt 'GreetingPrompt'.",
):
ai_config_runtime.add_outputs("GreetingPrompt", [])
# Case 2: No outputs, with overwrite
with pytest.raises(
Exception,
match=r"Cannot add outputs. No outputs provided for prompt 'GreetingPrompt'.",
):
ai_config_runtime.add_outputs("GreetingPrompt", [], True)

def test_extract_override_settings(ai_config_runtime: AIConfigRuntime):
initial_settings = {"topP": 0.9}
Expand Down