Parameters for initializing a Language Model.

LmParams

interface LmParams {
    apiKey?: string;
    defaults?: LmDefaults;
    onEndEmit?: ((result) => void);
    onError?: ((err) => void);
    onStartEmit?: ((data) => void);
    onToken?: ((t) => void);
    providerType: LmProviderType;
    serverUrl: string;
}

Properties

apiKey?: string

Optional API key for authentication.

defaults?: LmDefaults

Default settings.

onEndEmit?: ((result) => void)

Callback triggered when inference ends.

Type declaration

onError?: ((err) => void)

Callback triggered on errors.

Type declaration

    • (err): void
    • Parameters

      • err: string

      Returns void

onStartEmit?: ((data) => void)

Callback triggered when inference starts.

Type declaration

onToken?: ((t) => void)

Callback when a new token is received.

Type declaration

    • (t): void
    • Parameters

      • t: string

      Returns void

providerType: LmProviderType

Type of provider ("koboldcpp", "ollama", "goinfer").

serverUrl: string

The URL endpoint for the LM service.