abort
abort: (() => Promise<void>)
api
api: { addHeader: ((key: string,
val: string) => void); csrfToken: (() => null | string); del: (<T_4>(uri: string,
verbose?: boolean) => Promise<ApiResponse<T_4>>); get: (<T>(uri: string,
verbose?: boolean) => Promise<ApiResponse<T>>); hasCsrfCookie: (() => boolean); onResponse: ((hook: OnResponseHook) => void); patch: (<T_3>(uri: string,
payload: any[] | Record<string, any>,
verbose?: boolean) => Promise<ApiResponse<T_3>>); post: (<T_1>(uri: string,
payload: any[] | Record<string, any> | FormData,
multipart?: boolean,
verbose?: boolean) => Promise<ApiResponse<T_1>>); postSse: (<T_5>(uri: string,
payload: any[] | Record<string, any> | FormData,
onChunk: ((payload: T_5) => void),
abortController: AbortController,
parseJson?: boolean,
multipart?: boolean,
verbose?: boolean,
debug?: boolean) => Promise<void>); put: (<T_2>(uri: string,
payload: any[] | Record<string, any>,
verbose?: boolean) => Promise<ApiResponse<T_2>>); removeHeader: ((key: string) => void); setCsrfToken: ((token: string) => void); setCsrfTokenFromCookie: ((verbose?: boolean) => boolean); } Optional
defaults
defaults?: LmDefaults
infer
infer: ((prompt: string, params: InferenceParams, parseJson?: boolean, parseJsonFunc?: ((data: string) => Record<string, any>)) => Promise<InferenceResult>)
info
info: (() => Promise<Record<string, any>>)
loadModel
loadModel: ((name: string, ctx?: number, threads?: number, gpu_layers?: number) => Promise<void>)
model
model: ModelConf = ...
models
models: ModelConf[] = ...
modelsInfo
modelsInfo: (() => Promise<void>)
Optional
onEndEmit
onEndEmit?: ((result: InferenceResult) => void)
Optional
onError
onError?: ((err: string) => void)
Optional
onStartEmit
onStartEmit?: ((data: IngestionStats) => void)
Optional
onToken
onToken?: ((t: string) => void)
provider
provider: LmProvider
providerType
providerType: LmProviderType
serverUrl
serverUrl: string
Represents a Language Model (LM) which acts as a high-level interface to various underlying LM providers.
Implements