Skip to content

Commit 81e88fd

Browse files
authored
🥅 Catch SSE error (huggingface#169)
1 parent 8403bdb commit 81e88fd

File tree

4 files changed

+44
-1
lines changed

4 files changed

+44
-1
lines changed

‎packages/inference/src/tasks/custom/streamingRequest.ts‎

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,11 @@ export async function* streamingRequest<T>(
6565
onChunk(value);
6666
for (const event of events) {
6767
if (event.data.length > 0) {
68-
yield JSON.parse(event.data) as T;
68+
const data = JSON.parse(event.data);
69+
if (typeof data === "object" && data !== null && "error" in data) {
70+
throw new Error(data.error);
71+
}
72+
yield data as T;
6973
}
7074
}
7175
events = [];

‎packages/inference/src/tasks/nlp/textGeneration.ts‎

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,10 @@ export type TextGenerationArgs = BaseArgs & {
4444
* (Default: None). Float to define the tokens that are within the sample operation of text generation. Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.
4545
*/
4646
top_p?: number;
47+
/**
48+
* (Default: None). Integer. The maximum number of tokens from the input.
49+
*/
50+
truncate?: number;
4751
};
4852
};
4953

‎packages/inference/test/HfInference.spec.ts‎

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,20 @@ describe.concurrent(
216216
}
217217
});
218218

219+
it("textGenerationStream - catch error", async () => {
220+
const response = hf.textGenerationStream({
221+
model: "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
222+
inputs: "Write a short story about a robot that becomes sentient and takes over the world.",
223+
parameters: {
224+
truncate: 1024,
225+
},
226+
});
227+
228+
await expect(response.next()).rejects.toThrow(
229+
"Input validation error: `truncate` must be strictly positive and less than 1000. Given: 1024"
230+
);
231+
});
232+
219233
it("tokenClassification", async () => {
220234
expect(
221235
await hf.tokenClassification({

‎packages/inference/test/tapes.json‎

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -628,5 +628,26 @@
628628
"vary": "Accept-Encoding, Origin, Access-Control-Request-Method, Access-Control-Request-Headers"
629629
}
630630
}
631+
},
632+
"1841a1a3806a917b0b01a7e066c425d4bd8f2da39168ed144061299c9a277b34": {
633+
"url": "https://api-inference.huggingface.co/models/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
634+
"init": {
635+
"headers": {
636+
"Content-Type": "application/json"
637+
},
638+
"method": "POST",
639+
"body": "{\"inputs\":\"Write a short story about a robot that becomes sentient and takes over the world.\",\"parameters\":{\"truncate\":1024},\"stream\":true,\"options\":{}}"
640+
},
641+
"response": {
642+
"body": "data:{\"error\":\"Input validation error: `truncate` must be strictly positive and less than 1000. Given: 1024\",\"error_type\":\"validation\"}\n\n",
643+
"status": 200,
644+
"statusText": "OK",
645+
"headers": {
646+
"access-control-allow-credentials": "true",
647+
"connection": "keep-alive",
648+
"content-type": "text/event-stream",
649+
"vary": "Origin, Access-Control-Request-Method, Access-Control-Request-Headers"
650+
}
651+
}
631652
}
632653
}

0 commit comments

Comments
 (0)