Skip to content

generate

dandy.cli.llm.generate.generate

LlmBotSourceIntel

Bases: BaseIntel

file_name instance-attribute

source instance-attribute

GenerateChoices

Bases: Enum

LLM_BOT = 'llm_bot' class-attribute instance-attribute

generate

Source code in dandy/cli/llm/generate/generate.py
def generate(
        llm_config: BaseLlmConfig,
        choice: GenerateChoices,
        output_path: Union[Path, str],
        generate_description: Union[str, None] = None,
        output_to_file: bool = True
) -> None:

    if generate_description is None:
        generate_description = input('Describe what you want to generate: ')

    if choice == GenerateChoices.LLM_BOT:
        print(f'Generating {choice} ... depending on your llm configuration this may take up to a couple minutes')

        llm_bot_source_intel = llm_config.service._process_map_prompt_to_intel(
            prompt=generate_llm_bot_user_prompt(generate_description),
            intel_class=LlmBotSourceIntel,
            system_prompt=generate_llm_bot_system_prompt(),
        )

        if llm_bot_source_intel:
            if output_to_file:
                Path(output_path).mkdir(parents=True, exist_ok=True)

                with open(Path(output_path, llm_bot_source_intel.file_name), 'w') as f:
                    f.write(llm_bot_source_intel.source)

                print(f'Done ... saved to "{Path(output_path, llm_bot_source_intel.file_name)}"')

            else:
                print(llm_bot_source_intel.source)

        else:
            print('Failed to generate ... try again')