Skip to content
Snippets Groups Projects
Unverified Commit c80bf331 authored by Alex Yang's avatar Alex Yang Committed by GitHub
Browse files

fix: response.raw should be null (#705)

parent 7940d249
No related branches found
No related tags found
No related merge requests found
...@@ -56,10 +56,11 @@ ...@@ -56,10 +56,11 @@
{ {
"id": "de46b84e-7345-430f-b8fa-423354b630c9", "id": "de46b84e-7345-430f-b8fa-423354b630c9",
"response": { "response": {
"raw": {}, "raw": null,
"message": { "message": {
"content": "Hello! How can I assist you today?", "content": "Hello! How can I assist you today?",
"role": "assistant" "role": "assistant",
"options": {}
} }
} }
} }
......
...@@ -41,6 +41,7 @@ export abstract class BaseLLM< ...@@ -41,6 +41,7 @@ export abstract class BaseLLM<
}); });
return streamConverter(stream, (chunk) => { return streamConverter(stream, (chunk) => {
return { return {
raw: null,
text: chunk.delta, text: chunk.delta,
}; };
}); });
......
...@@ -17,8 +17,9 @@ const messageAccessor = (data: any): ChatResponseChunk => { ...@@ -17,8 +17,9 @@ const messageAccessor = (data: any): ChatResponseChunk => {
delta: data.message.content, delta: data.message.content,
}; };
}; };
const completionAccessor = (data: any): CompletionResponse => { const completionAccessor = (data: any): CompletionResponse => {
return { text: data.response }; return { text: data.response, raw: null };
}; };
// https://github.com/jmorganca/ollama // https://github.com/jmorganca/ollama
......
...@@ -126,8 +126,10 @@ export interface ChatResponse< ...@@ -126,8 +126,10 @@ export interface ChatResponse<
message: ChatMessage<AdditionalMessageOptions>; message: ChatMessage<AdditionalMessageOptions>;
/** /**
* Raw response from the LLM * Raw response from the LLM
*
* It's possible that this is `null` if the LLM response an iterable of chunks
*/ */
raw: object; raw: object | null;
} }
export type ChatResponseChunk< export type ChatResponseChunk<
...@@ -148,7 +150,12 @@ export type ChatResponseChunk< ...@@ -148,7 +150,12 @@ export type ChatResponseChunk<
export interface CompletionResponse { export interface CompletionResponse {
text: string; text: string;
raw?: Record<string, any>; /**
* Raw response from the LLM
*
* It's possible that this is `null` if the LLM response an iterable of chunks
*/
raw: object | null;
} }
export type LLMMetadata = { export type LLMMetadata = {
......
...@@ -84,10 +84,11 @@ export function wrapLLMEvent( ...@@ -84,10 +84,11 @@ export function wrapLLMEvent(
}; };
response[Symbol.asyncIterator] = async function* () { response[Symbol.asyncIterator] = async function* () {
const finalResponse: ChatResponse = { const finalResponse: ChatResponse = {
raw: response, raw: null,
message: { message: {
content: "", content: "",
role: "assistant", role: "assistant",
options: {},
}, },
}; };
let firstOne = false; let firstOne = false;
...@@ -98,6 +99,12 @@ export function wrapLLMEvent( ...@@ -98,6 +99,12 @@ export function wrapLLMEvent(
} else { } else {
finalResponse.message.content += chunk.delta; finalResponse.message.content += chunk.delta;
} }
if (chunk.options) {
finalResponse.message.options = {
...finalResponse.message.options,
...chunk.options,
};
}
yield chunk; yield chunk;
} }
snapshot(() => { snapshot(() => {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment