createChat method
Future<InferenceChat>
createChat({
- double temperature = .8,
- int randomSeed = 1,
- int topK = 1,
- double? topP,
- int tokenBuffer = 256,
- String? loraPath,
- bool? supportImage,
- List<
Tool> tools = const [], - bool? supportsFunctionCalls,
- bool isThinking = false,
- ModelType? modelType,
override
Implementation
@override
Future<InferenceChat> createChat({
double temperature = .8,
int randomSeed = 1,
int topK = 1,
double? topP,
int tokenBuffer = 256,
String? loraPath,
bool? supportImage,
List<Tool> tools = const [],
bool? supportsFunctionCalls,
bool isThinking = false,
ModelType? modelType,
}) async {
chat = InferenceChat(
sessionCreator: () => createSession(
temperature: temperature,
randomSeed: randomSeed,
topK: topK,
topP: topP,
loraPath: loraPath,
enableVisionModality: supportImage ?? false,
),
maxTokens: maxTokens,
tokenBuffer: tokenBuffer,
supportImage: supportImage ?? false,
supportsFunctionCalls: supportsFunctionCalls ?? false,
tools: tools,
modelType: modelType ?? this.modelType,
isThinking: isThinking,
fileType: fileType,
);
await chat!.initSession();
return chat!;
}