createChat method
Future<InferenceChat>
createChat(
{ - double temperature = .8,
- int randomSeed = 1,
- int topK = 1,
- double? topP,
- int tokenBuffer = 256,
- String? loraPath,
- bool? supportImage,
- List<Tool> tools = const [],
- bool? supportsFunctionCalls,
- bool isThinking = false,
- ModelType? modelType,
})
Implementation
Future<InferenceChat> createChat({
double temperature = .8,
int randomSeed = 1,
int topK = 1,
double? topP,
int tokenBuffer = 256,
String? loraPath,
bool? supportImage,
List<Tool> tools = const [],
bool? supportsFunctionCalls,
bool isThinking = false, // Add isThinking parameter
ModelType? modelType, // Add modelType parameter
}) async {
chat = InferenceChat(
sessionCreator: () => createSession(
temperature: temperature,
randomSeed: randomSeed,
topK: topK,
topP: topP,
loraPath: loraPath,
enableVisionModality: supportImage ?? false,
),
maxTokens: maxTokens,
tokenBuffer: tokenBuffer,
supportImage: supportImage ?? false,
supportsFunctionCalls: supportsFunctionCalls ?? false,
tools: tools,
isThinking: isThinking, // Pass isThinking parameter
modelType: modelType ?? ModelType.gemmaIt, // Use provided modelType or default
fileType: fileType, // Pass fileType from model
);
await chat!.initSession();
return chat!;
}