Skip to content

mixin

dandy.llm.mixin

LlmServiceMixin

Bases: BaseServiceMixin

Source code in dandy/llm/mixin.py
def __init__(
    self,
    role: Prompt | str | None = None,
    task: Prompt | str | None = None,
    guidelines: Prompt | str | None = None,
    diligence: float | None = None,
    llm_config: str | None = None,
    llm_temperature: float | None = None,
    **kwargs,
) -> None:
    super().__init__(**kwargs)

    if isinstance(role, (Prompt, str)):
        self.role = role

    if isinstance(task, (Prompt, str)):
        self.task = task

    if isinstance(guidelines, (Prompt, str)):
        self.guidelines = guidelines

    if isinstance(diligence, float):
        self.diligence = diligence

    if isinstance(llm_config, str):
        self.llm_config = llm_config

    if isinstance(llm_temperature, float):
        self.llm.options.temperature = llm_temperature

diligence = 1.0 class-attribute instance-attribute

llm_config = 'DEFAULT' class-attribute instance-attribute

intel_class = DefaultIntel class-attribute instance-attribute

role = 'Assistant' class-attribute instance-attribute

task = 'Provide a response based on the users request, context or instructions.' class-attribute instance-attribute

guidelines = None class-attribute instance-attribute

system_override_prompt = None class-attribute instance-attribute

llm property

get_llm_config

Source code in dandy/llm/mixin.py
def get_llm_config(self) -> LlmConfig:
    return LlmConfig(self.llm_config)

reset

Source code in dandy/llm/mixin.py
def reset(self) -> None:
    super().reset()
    self.llm.reset()