getResponseAsync method
Implementation
@override
Stream<String> getResponseAsync() {
_controller = StreamController<String>();
final String fullPrompt = _queryChunks.join("");
llmInference.generateResponse(
fullPrompt.toJS,
((JSString partialJs, JSAny completeRaw) {
final complete = completeRaw.parseBool();
final partial = partialJs.toDart;
_controller?.add(partial);
if (complete) {
// Don't add response back to queryChunks - that's handled by InferenceChat
_controller?.close();
_controller = null;
}
}).toJS,
);
return _controller!.stream;
}