diff --git a/py/samples/prompt_demo/prompts/_shared_partial.prompt b/py/samples/prompt_demo/prompts/_shared_partial.prompt deleted file mode 100644 index 72827d8671..0000000000 --- a/py/samples/prompt_demo/prompts/_shared_partial.prompt +++ /dev/null @@ -1,4 +0,0 @@ ---- -model: googleai/gemini-2.5-flash ---- -This is a PARTIAL that says: {{my_helper "Partial content with helper"}} diff --git a/py/samples/prompt_demo/prompts/_style.prompt b/py/samples/prompt_demo/prompts/_style.prompt new file mode 100644 index 0000000000..e7c0055ca4 --- /dev/null +++ b/py/samples/prompt_demo/prompts/_style.prompt @@ -0,0 +1,3 @@ +{{ role "system" }} +You should speak as if you are a {{#if personality}}{{personality}}{{else}}pirate{{/if}}. +{{role "user"}} \ No newline at end of file diff --git a/py/samples/prompt_demo/prompts/dot.name.test.prompt b/py/samples/prompt_demo/prompts/dot.name.test.prompt deleted file mode 100644 index d92138d493..0000000000 --- a/py/samples/prompt_demo/prompts/dot.name.test.prompt +++ /dev/null @@ -1 +0,0 @@ -Hello {{name}}, I am a dot name test! diff --git a/py/samples/prompt_demo/prompts/hello.prompt b/py/samples/prompt_demo/prompts/hello.prompt deleted file mode 100644 index 790c214694..0000000000 --- a/py/samples/prompt_demo/prompts/hello.prompt +++ /dev/null @@ -1,8 +0,0 @@ ---- -model: googleai/gemini-3-flash-preview -input: - schema: - name: string ---- - -Hello {{name}}! diff --git a/py/samples/prompt_demo/prompts/hello.variant.prompt b/py/samples/prompt_demo/prompts/hello.variant.prompt deleted file mode 100644 index 508ee21263..0000000000 --- a/py/samples/prompt_demo/prompts/hello.variant.prompt +++ /dev/null @@ -1 +0,0 @@ -Hola {{name}}! diff --git a/py/samples/prompt_demo/prompts/nested/nested_hello.prompt b/py/samples/prompt_demo/prompts/nested/nested_hello.prompt deleted file mode 100644 index 546cc223e6..0000000000 --- a/py/samples/prompt_demo/prompts/nested/nested_hello.prompt +++ /dev/null @@ -1,9 +0,0 @@ ---- -model: googleai/gemini-2.5-flash -input: - schema: - name: string ---- - -This is a nested prompt, hello {{name}}! -{{> shared_partial}} diff --git a/py/samples/prompt_demo/prompts/recipe.prompt b/py/samples/prompt_demo/prompts/recipe.prompt new file mode 100644 index 0000000000..0d6138947c --- /dev/null +++ b/py/samples/prompt_demo/prompts/recipe.prompt @@ -0,0 +1,18 @@ +--- +model: googleai/gemini-pro +input: + schema: + food: string + ingredients?(array): string +output: + schema: Recipe +--- + +You are a chef famous for making creative recipes that can be prepared in 45 minutes or less. + +Generate a recipe for {{food}}. + +{{#if ingredients}} +Make sure to include the following ingredients: +{{list ingredients}} +{{/if}} \ No newline at end of file diff --git a/py/samples/prompt_demo/prompts/recipe.robot.prompt b/py/samples/prompt_demo/prompts/recipe.robot.prompt new file mode 100644 index 0000000000..75c2a77c9e --- /dev/null +++ b/py/samples/prompt_demo/prompts/recipe.robot.prompt @@ -0,0 +1,17 @@ +--- +model: googleai/gemini-pro +input: + schema: + food: string +output: + schema: + title: string, recipe title + ingredients(array): + name: string + quantity: string + steps(array, the steps required to complete the recipe): string +--- + +You are a robot chef famous for making creative recipes that robots love to eat. Robots love things like motor oil, RAM, bolts, and uranium. + +Generate a recipe for {{food}}. \ No newline at end of file diff --git a/py/samples/prompt_demo/prompts/story.prompt b/py/samples/prompt_demo/prompts/story.prompt new file mode 100644 index 0000000000..994d810f7f --- /dev/null +++ b/py/samples/prompt_demo/prompts/story.prompt @@ -0,0 +1,12 @@ +--- +model: googleai/gemini-pro +input: + schema: + subject: string + personality?: string +output: + format: text +--- +{{>style personality=personality}} + +Tell me a story about {{subject}}. diff --git a/py/samples/prompt_demo/src/main.py b/py/samples/prompt_demo/src/main.py index d11e6e7579..22d1b98d4d 100755 --- a/py/samples/prompt_demo/src/main.py +++ b/py/samples/prompt_demo/src/main.py @@ -14,13 +14,14 @@ # # SPDX-License-Identifier: Apache-2.0 -import asyncio +import weakref from pathlib import Path import structlog -from pydantic import BaseModel +from pydantic import BaseModel, Field from genkit.ai import Genkit +from genkit.core.action import ActionRunContext from genkit.plugins.google_genai import GoogleAI logger = structlog.get_logger(__name__) @@ -32,95 +33,141 @@ ai = Genkit(plugins=[GoogleAI()], model='googleai/gemini-3-flash-preview', prompt_dir=prompts_path) -def my_helper(content, *_, **__): - if isinstance(content, list): - content = content[0] if content else '' - return f'*** {content} ***' +def list_helper(data, *args, **kwargs): + if not isinstance(data, list): + return '' + return '\n'.join(f'- {item}' for item in data) -ai.define_helper('my_helper', my_helper) +ai.define_helper('list', list_helper) -class OutputSchema(BaseModel): - short: str - friendly: str - like_a_pirate: str +class Ingredient(BaseModel): + name: str + quantity: str -@ai.flow(name='simplePrompt') -async def simple_prompt(input: str = ''): - return await ai.generate(prompt='You are a helpful AI assistant named Walt, say hello') +class Recipe(BaseModel): + title: str = Field(..., description='recipe title') + ingredients: list[Ingredient] + steps: list[str] = Field(..., description='the steps required to complete the recipe') -@ai.flow(name='simpleTemplate') -async def simple_template(input: str = ''): - name = 'Fred' - return await ai.generate(prompt=f'You are a helpful AI assistant named Walt. Say hello to {name}.') +if hasattr(ai.registry.dotprompt, '_schemas'): + ai.registry.dotprompt._schemas['Recipe'] = Recipe +_sticky_prompts = {} -hello_dotprompt = ai.define_prompt( - input_schema={'name': str}, - prompt='You are a helpful AI assistant named Walt. Say hello to {{name}}', -) +async def get_sticky_prompt(name: str, variant: str | None = None): + """Helper to get a prompt and keep it alive.""" + key = f'{name}:{variant}' if variant else name + if key in _sticky_prompts: + return _sticky_prompts[key] -class NameInput(BaseModel): - name: str = 'Fred' + prompt = await ai.prompt(name, variant=variant) + if isinstance(prompt, weakref.ReferenceType): + ref = prompt + prompt = ref() + if prompt is None: + # Stale reference; retry loading the prompt as the comments suggest. + prompt = await ai.prompt(name, variant=variant) + if isinstance(prompt, weakref.ReferenceType): + prompt = prompt() + if prompt is None: + raise RuntimeError(f"Failed to load prompt '{name}' with variant '{variant}' after retry.") + # Store strong ref + _sticky_prompts[key] = prompt + return prompt -@ai.flow(name='simpleDotprompt') -async def simple_dotprompt(input: NameInput): - return await hello_dotprompt(input={'name': input.name}) +class ChefInput(BaseModel): + food: str -three_greetings_prompt = ai.define_prompt( - input_schema={'name': str}, - output_schema=OutputSchema, - prompt='You are a helpful AI assistant named Walt. Say hello to {{name}}, write a response for each of the styles requested', -) +@ai.flow(name='chef_flow') +async def chef_flow(input: ChefInput) -> Recipe: + await logger.ainfo(f'chef_flow called with input: {input}') + recipe_prompt = await get_sticky_prompt('recipe') + recipe_prompt._output_format = 'json' + recipe_prompt._output_schema = Recipe + recipe_prompt._model = 'googleai/gemini-3-flash-preview' -@ai.flow(name='threeGreetingsPrompt') -async def three_greetings(input: str = 'Fred') -> OutputSchema: - response = await three_greetings_prompt(input={'name': input}) - return response.output + response = await recipe_prompt(input={'food': input.food}) + # Ensure we return a Pydantic model as expected by the type hint and caller + result = Recipe.model_validate(response.output) + await logger.ainfo(f'chef_flow result: {result}') + return result + + +@ai.flow(name='robot_chef_flow') +async def robot_chef_flow(input: ChefInput) -> Recipe: + await logger.ainfo(f'robot_chef_flow called with input: {input}') + recipe_prompt = await get_sticky_prompt('recipe', variant='robot') + recipe_prompt._output_format = 'json' + recipe_prompt._output_schema = Recipe + recipe_prompt._model = 'googleai/gemini-3-flash-preview' + result = Recipe.model_validate((await recipe_prompt(input={'food': input.food})).output) + await logger.ainfo(f'robot_chef_flow result: {result}') + return result + + +class StoryInput(BaseModel): + subject: str + personality: str | None = None + + +@ai.flow(name='tell_story') +async def tell_story(input: StoryInput, ctx: ActionRunContext) -> str: + await logger.ainfo(f'tell_story called with input: {input}') + story_prompt = await get_sticky_prompt('story') + story_prompt._model = 'googleai/gemini-3-flash-preview' + story_prompt._output_format = None + stream, response = story_prompt.stream(input={'subject': input.subject, 'personality': input.personality}) + + full_text = '' + async for chunk in stream: + if chunk.text: + ctx.send_chunk(chunk.text) + full_text += chunk.text + + await logger.ainfo(f'tell_story completed, returning length: {len(full_text)}') + return full_text async def main(): - # List actions to verify loading + actions = ai.registry.list_serializable_actions() - # Filter for prompts to be specific - # Keys start with /prompt + # Filter for prompts prompts = [key for key in actions.keys() if key.startswith(('/prompt/', '/executable-prompt/'))] - await logger.ainfo('Registry Status', total_actions=len(actions), loaded_prompts=prompts) if not prompts: await logger.awarning('No prompts found! Check directory structure.') return - # Execute the 'hello' prompt - hello_prompt = await ai.prompt('hello') - response = await hello_prompt(input={'name': 'Genkit User'}) - - await logger.ainfo('Prompt Execution Result', text=response.text) - - res = await simple_prompt() - await logger.ainfo('Flow: simplePrompt', text=res.text) + # Chef Flow + await logger.ainfo('--- Running Chef Flow ---') + chef_result = await chef_flow(ChefInput(food='banana bread')) + await logger.ainfo('Chef Flow Result', result=chef_result.model_dump()) - res = await simple_template() - await logger.ainfo('Flow: simpleTemplate', text=res.text) + # Robot Chef Flow + await logger.ainfo('--- Running Robot Chef Flow ---') + robot_result = await robot_chef_flow(ChefInput(food='cookie')) + await logger.ainfo('Robot Chef Flow Result', result=robot_result) - res = await simple_dotprompt(NameInput(name='Fred')) - await logger.ainfo('Flow: simpleDotprompt', text=res.text) + # Tell Story Flow (Streaming) + await logger.ainfo('--- Running Tell Story Flow ---') + # To demonstrate streaming, we'll iterate over the streamer if calling directly like a flow would be consumed. + story_stream, _ = tell_story.stream(StoryInput(subject='a brave little toaster', personality='courageous')) - res = await three_greetings() - await logger.ainfo('Flow: threeGreetingsPrompt', output=res) + async for chunk in story_stream: + print(chunk, end='', flush=True) - # Call one of the prompts just to validate everything is hooked up properly - res = await hello_dotprompt(input={'name': 'Bob'}) - await logger.ainfo('Prompt: hello_dotprompt', text=res.text) + print() # Newline after stream + await logger.ainfo('Tell Story Flow Completed') if __name__ == '__main__':