Prompts
Prompt vs String
We recommend using our Prompt class to create prompts as it provides a lot of extra features and capabilities over a simple string.
The main advantage to this is that as Prompt feature and formatting improve over time for AI models, you can ensure that your project is using consistent formatting.
Creating a Prompt
Creating a new prompt is simple and can be done multiple ways each has their own pros and cons.
Structured Style Prompt
this method is the best for creating prompts that are complex and can be statically typed.
from dandy import Prompt
prompt = (
Prompt()
.title('Car Generator')
.line_break()
.heading('Instructions')
.text('I would like you to create me a new type of car.')
.line_break()
.heading('Rules')
.list([
'The car should be fast',
'The car should be safe',
'The car should be fun to drive',
])
)
print(prompt.to_str())
Dynamic Style Prompt
This method is the best for creating prompts that are complex or need to have things injected into them.
from dandy import Prompt
CAR_RULES = [
'The car should be fast',
'The car should be safe',
'The car should be fun to drive',
]
prompt = Prompt()
prompt.title('Car Generator')
prompt.line_break()
prompt.heading('Instructions')
prompt.text('I would like you to create me a new type of car.')
prompt.line_break()
prompt.heading('Rules')
prompt.list(CAR_RULES)
print(prompt.to_str())
String Style Prompt
This method is the best for creating prompts that are simple and do not need structured formatting.
from dandy import Prompt
prompt = Prompt("""
# Car Generator
## Instructions
I would like you to create me a new type of car.
## Rules
- The car should be fast
- The car should be safe
- The car should be fun to drive
""")
print(prompt.to_str())
Prompt Formatting
There is lots of different types of formatting that can be used to create prompts.
from dandy import Prompt, BaseIntel
class PersonIntel(BaseIntel):
name: str
age: int
person_intel = PersonIntel(name='John', age=30)
another_prompt = (
Prompt()
.text('Hello from another prompt')
)
new_prompt = (
Prompt()
.dict(dictionary={'key': 'value'})
.divider()
.array(items=['item1', 'item2'])
.array_random_order(items=['item1', 'item2'])
.file(file_path='docs/tutorials/prompt_test_document.md')
.heading(heading='Heading Followed by a line break')
.line_break()
.list(items=['item1 after a line break', 'item2'])
.intel(intel=person_intel)
.intel_schema(intel_class=PersonIntel)
.module_source(module_name='dandy.bot.bot')
.object_source(object_module_name='dandy.bot.bot.Bot')
.ordered_list(items=['item1', 'item2'])
.prompt(prompt=another_prompt)
.random_choice(choices=['choice1', 'choice2'])
.sub_heading(sub_heading='Sub Heading')
.text('Hello World')
.title(title='Title')
.unordered_list(items=['item1', 'item2'])
.unordered_random_list(items=['item1', 'item2'])
)
print(new_prompt.to_str())
{
"key": "value"
}
----------
[
"item1",
"item2"
]
[
"item2",
"item1"
]
# Hello From the Prompt Information Test File
Hello World in the test file
## Heading Followed by a line break
- item1 after a line break
- item2
{
"name": "John",
"age": 30
}
{
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"age": {
"title": "Age",
"type": "integer"
}
},
"required": [
"name",
"age"
],
"title": "PersonIntel",
"type": "object"
}
**module: dandy.bot.bot**
```python
from abc import ABC
from typing import Any, Self
from dandy.bot.recorder import record_process_wrapper
from dandy.core.future.future import AsyncFuture
from dandy.core.future.tools import process_to_future
from dandy.file.mixin import FileServiceMixin
from dandy.http.mixin import HttpServiceMixin
from dandy.intel.mixin import IntelServiceMixin
from dandy.llm.mixin import LlmServiceMixin
from dandy.llm.prompt.prompt import Prompt
class Bot(
FileServiceMixin,
LlmServiceMixin,
HttpServiceMixin,
IntelServiceMixin,
):
def __init__(
self,
llm_config: str | None = None,
llm_temperature: float | None = None,
**kwargs,
) -> None:
super().__init__(
llm_config=llm_config,
llm_temperature=llm_temperature,
**kwargs,
)
self.recorder_event_id = ''
self._recorder_called = None
for key, value in kwargs.items():
setattr(self, key, value)
self.__post_init__()
def __init_subclass__(cls) -> None:
super().__init_subclass__()
if ABC not in cls.__bases__:
# Typing Does not work properly for processors if you override __getattribute__ in the BaseProcessor class.
# This is a workaround and should be fixed in future versions of the python lsp.
def __getattribute__(self: Self, name: str) -> Any: # noqa: N807
attr = super().__getattribute__(name)
if (
name == 'process'
and callable(attr)
and not hasattr(attr, '_wrapped')
):
wrapped = record_process_wrapper(self, attr)
setattr(wrapped, '_wrapped', True)
return wrapped
return attr
cls.__getattribute__ = __getattribute__
def __post_init__(self) -> None: # noqa: B027
pass
@classmethod
def get_description(cls) -> str | None:
pass
def process(
self,
*args,
**kwargs,
) -> Any:
if len(args) >= 1 and isinstance(args[0], Prompt | str):
kwargs['prompt'] = args[0]
if 'prompt' in kwargs:
return self.llm.prompt_to_intel(**kwargs)
message = '`Bot.process` requires `prompt` as an argument.'
raise ValueError(message)
def process_to_future(self, *args, **kwargs) -> AsyncFuture:
return process_to_future(self.process, *args, **kwargs)
def reset(self) -> None:
super().reset()
```
```dandy.bot.bot.Bot
class Bot(
FileServiceMixin,
LlmServiceMixin,
HttpServiceMixin,
IntelServiceMixin,
):
def __init__(
self,
llm_config: str | None = None,
llm_temperature: float | None = None,
**kwargs,
) -> None:
super().__init__(
llm_config=llm_config,
llm_temperature=llm_temperature,
**kwargs,
)
self.recorder_event_id = ''
self._recorder_called = None
for key, value in kwargs.items():
setattr(self, key, value)
self.__post_init__()
def __init_subclass__(cls) -> None:
super().__init_subclass__()
if ABC not in cls.__bases__:
# Typing Does not work properly for processors if you override __getattribute__ in the BaseProcessor class.
# This is a workaround and should be fixed in future versions of the python lsp.
def __getattribute__(self: Self, name: str) -> Any: # noqa: N807
attr = super().__getattribute__(name)
if (
name == 'process'
and callable(attr)
and not hasattr(attr, '_wrapped')
):
wrapped = record_process_wrapper(self, attr)
setattr(wrapped, '_wrapped', True)
return wrapped
return attr
cls.__getattribute__ = __getattribute__
def __post_init__(self) -> None: # noqa: B027
pass
@classmethod
def get_description(cls) -> str | None:
pass
def process(
self,
*args,
**kwargs,
) -> Any:
if len(args) >= 1 and isinstance(args[0], Prompt | str):
kwargs['prompt'] = args[0]
if 'prompt' in kwargs:
return self.llm.prompt_to_intel(**kwargs)
message = '`Bot.process` requires `prompt` as an argument.'
raise ValueError(message)
def process_to_future(self, *args, **kwargs) -> AsyncFuture:
return process_to_future(self.process, *args, **kwargs)
def reset(self) -> None:
super().reset()
```
1. item1
2. item2
Hello from another prompt
choice1
### Sub Heading
Hello World
# Title
- item1
- item2
- item1
- item2
Tip
Check out the API Reference API documentation for more information on all the possibilities.
Advanced Prompts
Let's make a function that returns a dynamically constructed prompt based on the function arguments.