Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions py/samples/prompt_demo/prompts/_shared_partial.prompt

This file was deleted.

3 changes: 3 additions & 0 deletions py/samples/prompt_demo/prompts/_style.prompt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{{ role "system" }}
You should speak as if you are a {{#if personality}}{{personality}}{{else}}pirate{{/if}}.
{{role "user"}}
1 change: 0 additions & 1 deletion py/samples/prompt_demo/prompts/dot.name.test.prompt

This file was deleted.

8 changes: 0 additions & 8 deletions py/samples/prompt_demo/prompts/hello.prompt

This file was deleted.

1 change: 0 additions & 1 deletion py/samples/prompt_demo/prompts/hello.variant.prompt

This file was deleted.

9 changes: 0 additions & 9 deletions py/samples/prompt_demo/prompts/nested/nested_hello.prompt

This file was deleted.

18 changes: 18 additions & 0 deletions py/samples/prompt_demo/prompts/recipe.prompt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
model: googleai/gemini-pro
input:
schema:
food: string
ingredients?(array): string
output:
schema: Recipe
---

You are a chef famous for making creative recipes that can be prepared in 45 minutes or less.

Generate a recipe for {{food}}.

{{#if ingredients}}
Make sure to include the following ingredients:
{{list ingredients}}
{{/if}}
17 changes: 17 additions & 0 deletions py/samples/prompt_demo/prompts/recipe.robot.prompt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
---
model: googleai/gemini-pro
input:
schema:
food: string
output:
schema:
title: string, recipe title
ingredients(array):
name: string
quantity: string
steps(array, the steps required to complete the recipe): string
---

You are a robot chef famous for making creative recipes that robots love to eat. Robots love things like motor oil, RAM, bolts, and uranium.

Generate a recipe for {{food}}.
12 changes: 12 additions & 0 deletions py/samples/prompt_demo/prompts/story.prompt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
model: googleai/gemini-pro
input:
schema:
subject: string
personality?: string
output:
format: text
---
{{>style personality=personality}}

Tell me a story about {{subject}}.
161 changes: 104 additions & 57 deletions py/samples/prompt_demo/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,14 @@
#
# SPDX-License-Identifier: Apache-2.0

import asyncio
import weakref
from pathlib import Path

import structlog
from pydantic import BaseModel
from pydantic import BaseModel, Field

from genkit.ai import Genkit
from genkit.core.action import ActionRunContext
from genkit.plugins.google_genai import GoogleAI

logger = structlog.get_logger(__name__)
Expand All @@ -32,95 +33,141 @@
ai = Genkit(plugins=[GoogleAI()], model='googleai/gemini-3-flash-preview', prompt_dir=prompts_path)


def my_helper(content, *_, **__):
if isinstance(content, list):
content = content[0] if content else ''
return f'*** {content} ***'
def list_helper(data, *args, **kwargs):
if not isinstance(data, list):
return ''
return '\n'.join(f'- {item}' for item in data)


ai.define_helper('my_helper', my_helper)
ai.define_helper('list', list_helper)


class OutputSchema(BaseModel):
short: str
friendly: str
like_a_pirate: str
class Ingredient(BaseModel):
name: str
quantity: str


@ai.flow(name='simplePrompt')
async def simple_prompt(input: str = ''):
return await ai.generate(prompt='You are a helpful AI assistant named Walt, say hello')
class Recipe(BaseModel):
title: str = Field(..., description='recipe title')
ingredients: list[Ingredient]
steps: list[str] = Field(..., description='the steps required to complete the recipe')


@ai.flow(name='simpleTemplate')
async def simple_template(input: str = ''):
name = 'Fred'
return await ai.generate(prompt=f'You are a helpful AI assistant named Walt. Say hello to {name}.')
if hasattr(ai.registry.dotprompt, '_schemas'):
ai.registry.dotprompt._schemas['Recipe'] = Recipe

_sticky_prompts = {}

hello_dotprompt = ai.define_prompt(
input_schema={'name': str},
prompt='You are a helpful AI assistant named Walt. Say hello to {{name}}',
)

async def get_sticky_prompt(name: str, variant: str | None = None):
"""Helper to get a prompt and keep it alive."""
key = f'{name}:{variant}' if variant else name
if key in _sticky_prompts:
return _sticky_prompts[key]

class NameInput(BaseModel):
name: str = 'Fred'
prompt = await ai.prompt(name, variant=variant)
if isinstance(prompt, weakref.ReferenceType):
ref = prompt
prompt = ref()
if prompt is None:
# Stale reference; retry loading the prompt as the comments suggest.
prompt = await ai.prompt(name, variant=variant)
if isinstance(prompt, weakref.ReferenceType):
prompt = prompt()
if prompt is None:
raise RuntimeError(f"Failed to load prompt '{name}' with variant '{variant}' after retry.")

# Store strong ref
_sticky_prompts[key] = prompt
return prompt

@ai.flow(name='simpleDotprompt')
async def simple_dotprompt(input: NameInput):
return await hello_dotprompt(input={'name': input.name})

class ChefInput(BaseModel):
food: str

three_greetings_prompt = ai.define_prompt(
input_schema={'name': str},
output_schema=OutputSchema,
prompt='You are a helpful AI assistant named Walt. Say hello to {{name}}, write a response for each of the styles requested',
)

@ai.flow(name='chef_flow')
async def chef_flow(input: ChefInput) -> Recipe:
await logger.ainfo(f'chef_flow called with input: {input}')
recipe_prompt = await get_sticky_prompt('recipe')
recipe_prompt._output_format = 'json'
recipe_prompt._output_schema = Recipe
recipe_prompt._model = 'googleai/gemini-3-flash-preview'

@ai.flow(name='threeGreetingsPrompt')
async def three_greetings(input: str = 'Fred') -> OutputSchema:
response = await three_greetings_prompt(input={'name': input})
return response.output
response = await recipe_prompt(input={'food': input.food})
# Ensure we return a Pydantic model as expected by the type hint and caller
result = Recipe.model_validate(response.output)
await logger.ainfo(f'chef_flow result: {result}')
return result


@ai.flow(name='robot_chef_flow')
async def robot_chef_flow(input: ChefInput) -> Recipe:
await logger.ainfo(f'robot_chef_flow called with input: {input}')
recipe_prompt = await get_sticky_prompt('recipe', variant='robot')
recipe_prompt._output_format = 'json'
recipe_prompt._output_schema = Recipe
recipe_prompt._model = 'googleai/gemini-3-flash-preview'
result = Recipe.model_validate((await recipe_prompt(input={'food': input.food})).output)
await logger.ainfo(f'robot_chef_flow result: {result}')
return result


class StoryInput(BaseModel):
subject: str
personality: str | None = None


@ai.flow(name='tell_story')
async def tell_story(input: StoryInput, ctx: ActionRunContext) -> str:
await logger.ainfo(f'tell_story called with input: {input}')
story_prompt = await get_sticky_prompt('story')
story_prompt._model = 'googleai/gemini-3-flash-preview'
story_prompt._output_format = None
stream, response = story_prompt.stream(input={'subject': input.subject, 'personality': input.personality})

full_text = ''
async for chunk in stream:
if chunk.text:
ctx.send_chunk(chunk.text)
full_text += chunk.text

await logger.ainfo(f'tell_story completed, returning length: {len(full_text)}')
return full_text


async def main():
# List actions to verify loading

actions = ai.registry.list_serializable_actions()

# Filter for prompts to be specific
# Keys start with /prompt
# Filter for prompts
prompts = [key for key in actions.keys() if key.startswith(('/prompt/', '/executable-prompt/'))]

await logger.ainfo('Registry Status', total_actions=len(actions), loaded_prompts=prompts)

if not prompts:
await logger.awarning('No prompts found! Check directory structure.')
return

# Execute the 'hello' prompt
hello_prompt = await ai.prompt('hello')
response = await hello_prompt(input={'name': 'Genkit User'})

await logger.ainfo('Prompt Execution Result', text=response.text)

res = await simple_prompt()
await logger.ainfo('Flow: simplePrompt', text=res.text)
# Chef Flow
await logger.ainfo('--- Running Chef Flow ---')
chef_result = await chef_flow(ChefInput(food='banana bread'))
await logger.ainfo('Chef Flow Result', result=chef_result.model_dump())

res = await simple_template()
await logger.ainfo('Flow: simpleTemplate', text=res.text)
# Robot Chef Flow
await logger.ainfo('--- Running Robot Chef Flow ---')
robot_result = await robot_chef_flow(ChefInput(food='cookie'))
await logger.ainfo('Robot Chef Flow Result', result=robot_result)

res = await simple_dotprompt(NameInput(name='Fred'))
await logger.ainfo('Flow: simpleDotprompt', text=res.text)
# Tell Story Flow (Streaming)
await logger.ainfo('--- Running Tell Story Flow ---')
# To demonstrate streaming, we'll iterate over the streamer if calling directly like a flow would be consumed.
story_stream, _ = tell_story.stream(StoryInput(subject='a brave little toaster', personality='courageous'))

res = await three_greetings()
await logger.ainfo('Flow: threeGreetingsPrompt', output=res)
async for chunk in story_stream:
print(chunk, end='', flush=True)

# Call one of the prompts just to validate everything is hooked up properly
res = await hello_dotprompt(input={'name': 'Bob'})
await logger.ainfo('Prompt: hello_dotprompt', text=res.text)
print() # Newline after stream
await logger.ainfo('Tell Story Flow Completed')


if __name__ == '__main__':
Expand Down
Loading