createChat method

Future<InferenceChat> createChat({
  1. double temperature = .8,
  2. int randomSeed = 1,
  3. int topK = 1,
  4. double? topP,
  5. int tokenBuffer = 256,
  6. String? loraPath,
  7. bool? supportImage,
  8. List<Tool> tools = const [],
  9. bool? supportsFunctionCalls,
  10. bool isThinking = false,
  11. ModelType? modelType,
})

Implementation

Future<InferenceChat> createChat({
  double temperature = .8,
  int randomSeed = 1,
  int topK = 1,
  double? topP,
  int tokenBuffer = 256,
  String? loraPath,
  bool? supportImage,
  List<Tool> tools = const [],
  bool? supportsFunctionCalls,
  bool isThinking = false, // Add isThinking parameter
  ModelType? modelType, // Add modelType parameter
}) async {
  chat = InferenceChat(
    sessionCreator: () => createSession(
      temperature: temperature,
      randomSeed: randomSeed,
      topK: topK,
      topP: topP,
      loraPath: loraPath,
      enableVisionModality: supportImage ?? false,
    ),
    maxTokens: maxTokens,
    tokenBuffer: tokenBuffer,
    supportImage: supportImage ?? false,
    supportsFunctionCalls: supportsFunctionCalls ?? false,
    tools: tools,
    isThinking: isThinking, // Pass isThinking parameter
    modelType: modelType ?? ModelType.gemmaIt, // Use provided modelType or default
    fileType: fileType, // Pass fileType from model
  );
  await chat!.initSession();
  return chat!;
}