Prompts
Prompt vs String
We recommend using our Prompt
class to create prompts as it provides a lot of extra features and capabilities over a simple string.
The main advantage to this is that as feature and formatting improves over time for AI models, you can ensure that your project is using consistent formatting.
Creating a Prompt
Creating a new prompt is simple and can be done multiple ways each have their own pros and cons.
Structured Style Prompt
this method is the best for creating prompts that are complex and can be statically typed.
from dandy.llm.prompt import Prompt
prompt = (
Prompt()
.title('Car Generator')
.line_break()
.heading('Instructions')
.text('I would like you to create me a new type of car.')
.line_break()
.heading('Rules')
.list([
'The car should be fast',
'The car should be safe',
'The car should be fun to drive',
])
)
print(prompt.to_str())
Dynamic Style Prompt
This method is the best for creating prompts that are complex or need to have things injected into them.
from dandy.llm.prompt import Prompt
CAR_RULES = [
'The car should be fast',
'The car should be safe',
'The car should be fun to drive',
]
prompt = Prompt()
prompt.title('Car Generator')
prompt.line_break()
prompt.heading('Instructions')
prompt.text('I would like you to create me a new type of car.')
prompt.line_break()
prompt.heading('Rules')
prompt.list(CAR_RULES)
print(prompt.to_str())
String Style Prompt
This method is the best for creating prompts that are simple and do not need structured formatting.
from dandy.llm.prompt import Prompt
prompt = Prompt("""
# Car Generator
## Instructions
I would like you to create me a new type of car.
## Rules
- The car should be fast
- The car should be safe
- The car should be fun to drive
""")
print(prompt.to_str())
Prompt Formatting
There is lots of different types of formatting that can be used to create prompts.
from dandy.llm.prompt import Prompt
from dandy.intel import BaseIntel
class PersonIntel(BaseIntel):
name: str
age: int
person_intel = PersonIntel(name='John', age=30)
another_prompt = (
Prompt()
.text('Hello from another prompt')
)
new_prompt = (
Prompt()
.dict(dictionary={'key': 'value'})
.divider()
.array(items=['item1', 'item2'])
.array_random_order(items=['item1', 'item2'])
.file(file_path='docs/tutorials/prompt_test_document.md')
.heading(heading='Heading Followed by a line break')
.line_break()
.list(items=['item1 after a line break', 'item2'])
.intel(intel=person_intel)
.intel_schema(intel_class=PersonIntel)
.module_source(module_name='dandy.llm.bot.llm_bot')
.ordered_list(items=['item1', 'item2'])
.prompt(prompt=another_prompt)
.random_choice(choices=['choice1', 'choice2'])
.sub_heading(sub_heading='Sub Heading')
.text('Hello World')
.title(title='Title')
.unordered_list(items=['item1', 'item2'])
.unordered_random_list(items=['item1', 'item2'])
)
print(new_prompt.to_str())
{
"key": "value"
}
----------
[
"item1",
"item2"
]
[
"item2",
"item1"
]
# Hello From the Prompt Information Test File
Hello World in the test file
## Heading Followed by a line break
- item1 after a line break
- item2
{
"name": "John",
"age": 30
}
{
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"age": {
"title": "Age",
"type": "integer"
}
},
"required": [
"name",
"age"
],
"title": "PersonIntel",
"type": "object"
}
""" dandy.llm.bot.llm_bot
from abc import ABC
from pathlib import Path
from pydantic.main import IncEx
from typing_extensions import Type, Generic, Union, List
from dandy.core.future import AsyncFuture
from dandy.core.utils import encode_file_to_base64
from dandy.intel import BaseIntel
from dandy.intel.type_vars import IntelType
from dandy.llm.conf import llm_configs
from dandy.llm.intel import DefaultLlmIntel
from dandy.llm.processor.llm_processor import BaseLlmProcessor
from dandy.llm.prompt import Prompt
from dandy.llm.service.config.options import LlmConfigOptions
class BaseLlmBot(BaseLlmProcessor, ABC, Generic[IntelType]):
config: str = 'DEFAULT'
config_options: LlmConfigOptions = LlmConfigOptions()
instructions_prompt: Prompt = Prompt("You're a helpful assistant please follow the users instructions.")
intel_class: Type[BaseIntel] = DefaultLlmIntel
@classmethod
def process_prompt_to_intel(
cls,
prompt: Union[Prompt, str],
intel_class: Union[Type[IntelType], None] = None,
intel_object: Union[IntelType, None] = None,
images: Union[List[str], None] = None,
image_files: Union[List[str | Path], None] = None,
include_fields: Union[IncEx, None] = None,
exclude_fields: Union[IncEx, None] = None,
postfix_system_prompt: Union[Prompt, None] = None
) -> IntelType:
if intel_class is None and intel_object is None:
intel_class = cls.intel_class
if image_files:
images = [] if images is None else images
for image_file in image_files:
images.append(encode_file_to_base64(image_file))
system_prompt = Prompt()
system_prompt.prompt(cls.instructions_prompt)
if postfix_system_prompt:
system_prompt.line_break()
system_prompt.prompt(postfix_system_prompt)
return llm_configs[cls.config].generate_service(
llm_options=cls.config_options
).process_prompt_to_intel(
prompt=prompt if isinstance(prompt, Prompt) else Prompt(prompt),
intel_class=intel_class,
intel_object=intel_object,
images=images,
include_fields=include_fields,
exclude_fields=exclude_fields,
system_prompt=system_prompt
)
@classmethod
def process_to_future(cls, *args, **kwargs) -> AsyncFuture[IntelType]:
return AsyncFuture[IntelType](cls.process, *args, **kwargs)
class LlmBot(BaseLlmBot, Generic[IntelType]):
intel_class: Type[BaseIntel] = DefaultLlmIntel
@classmethod
def process(
cls,
prompt: Union[Prompt, str],
intel_class: Union[Type[IntelType], None] = None,
intel_object: Union[IntelType, None] = None,
images: Union[List[str], None] = None,
image_files: Union[List[str | Path], None] = None,
include_fields: Union[IncEx, None] = None,
exclude_fields: Union[IncEx, None] = None,
postfix_system_prompt: Union[Prompt, None] = None
) -> IntelType:
return cls.process_prompt_to_intel(
prompt=prompt,
intel_class= intel_class or cls.intel_class,
intel_object=intel_object,
images=images,
image_files=image_files,
include_fields=include_fields,
exclude_fields=exclude_fields,
postfix_system_prompt=postfix_system_prompt
)
"""
1. item1
2. item2
Hello from another prompt
choice2
### Sub Heading
Hello World
# Title
- item1
- item2
- item2
- item1
Tip
Check out the Prompt and Snippets API documentation for more information on all the possibilities.
Advanced Prompts
Let's make a function that returns a dynamically constructed prompt based on the function arguments.