Represents a Language Model (LM) which acts as a high-level interface to various underlying LM providers.

Implements

  • LmProvider

Constructors

  • Constructs a new LM instance with the specified provider and parameters.

    Parameters

    • params: LmParams

      The parameters for initializing the LM.

    Returns Lm

    Throws an error if an unknown provider type is specified or if required parameters for a provider are missing.

Properties

abort: (() => Promise<void>)
api: {
    addHeader: ((key: string, val: string) => void);
    csrfToken: (() => null | string);
    del: (<T_4>(uri: string, verbose?: boolean) => Promise<ApiResponse<T_4>>);
    get: (<T>(uri: string, verbose?: boolean) => Promise<ApiResponse<T>>);
    hasCsrfCookie: (() => boolean);
    onResponse: ((hook: OnResponseHook) => void);
    patch: (<T_3>(uri: string, payload: any[] | Record<string, any>, verbose?: boolean) => Promise<ApiResponse<T_3>>);
    post: (<T_1>(uri: string, payload: any[] | Record<string, any> | FormData, multipart?: boolean, verbose?: boolean) => Promise<ApiResponse<T_1>>);
    postSse: (<T_5>(uri: string, payload: any[] | Record<string, any> | FormData, onChunk: ((payload: T_5) => void), abortController: AbortController, parseJson?: boolean, multipart?: boolean, verbose?: boolean, debug?: boolean) => Promise<void>);
    put: (<T_2>(uri: string, payload: any[] | Record<string, any>, verbose?: boolean) => Promise<ApiResponse<T_2>>);
    removeHeader: ((key: string) => void);
    setCsrfToken: ((token: string) => void);
    setCsrfTokenFromCookie: ((verbose?: boolean) => boolean);
}
apiKey: string
defaults?: LmDefaults
infer: ((prompt: string, params: InferenceParams, parseJson?: boolean, parseJsonFunc?: ((data: string) => Record<string, any>)) => Promise<InferenceResult>)
info: (() => Promise<Record<string, any>>)
loadModel: ((name: string, ctx?: number, threads?: number, gpu_layers?: number) => Promise<void>)
model: ModelConf = ...
models: ModelConf[] = ...
modelsInfo: (() => Promise<void>)
name: string
onEndEmit?: ((result: InferenceResult) => void)
onError?: ((err: string) => void)
onStartEmit?: ((data: IngestionStats) => void)
onToken?: ((t: string) => void)
provider: LmProvider
providerType: LmProviderType
serverUrl: string