LocalLm types documentation
    Preparing search index...

    Interface InferenceOptions

    Options for inference requests.

    InferenceOptions

    const inferenceOptions: InferenceOptions = {
    debug: true,
    tools: [weatherTool],
    history: [
    { user: "Hello", assistant: "Hi there!" }
    ],
    system: "You are a helpful assistant."
    };
    interface InferenceOptions {
        assistant?: string;
        debug?: boolean;
        history?: HistoryTurn[];
        onToolCall?: (tc: ToolCallSpec) => void;
        onToolCallEnd?: (tr: any) => void;
        system?: string;
        tools?: ToolSpec[];
        verbose?: boolean;
    }
    Index

    Properties

    assistant?: string

    Assistant message to include in the context.

    debug?: boolean

    Enable debug mode for detailed logging.

    history?: HistoryTurn[]

    Conversation history to include in the inference.

    onToolCall?: (tc: ToolCallSpec) => void
    onToolCallEnd?: (tr: any) => void
    system?: string

    System message to set the context for the conversation.

    tools?: ToolSpec[]

    Array of available tools for the conversation.

    verbose?: boolean

    Enable verbose output.