Fix issue with "continue" feature on llama.cpp endpoints (#898)
Browse files
src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts
CHANGED
@@ -110,6 +110,7 @@ export function endpointLlamacpp(
|
|
110 |
};
|
111 |
if (data.stop) {
|
112 |
stop = true;
|
|
|
113 |
reader?.cancel();
|
114 |
}
|
115 |
yield output;
|
|
|
110 |
};
|
111 |
if (data.stop) {
|
112 |
stop = true;
|
113 |
+
output.token.special = true;
|
114 |
reader?.cancel();
|
115 |
}
|
116 |
yield output;
|