LlmInferenceOptions constructor
LlmInferenceOptions({
- required LlmInferenceBaseOptions baseOptions,
- int maxTokens = 1024,
- int randomSeed = 1,
- int topK = 1,
- double? topP,
- double temperature = 1.0,
- JSInt32Array? supportedLoraRanks,
- String? loraPath,
Implementation
external factory LlmInferenceOptions({
required LlmInferenceBaseOptions baseOptions,
int maxTokens = 1024,
int randomSeed = 1,
int topK = 1,
double? topP,
double temperature = 1.0,
JSInt32Array? supportedLoraRanks,
String? loraPath,
});