Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
format
Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte
CHANGED
|
@@ -144,7 +144,7 @@
|
|
| 144 |
conversation.model,
|
| 145 |
requestMessages,
|
| 146 |
conversation.config.temperature,
|
| 147 |
-
conversation.config.maxTokens
|
| 148 |
);
|
| 149 |
// check if the user did not abort the request
|
| 150 |
if (waitForNonStreaming) {
|
|
|
|
| 144 |
conversation.model,
|
| 145 |
requestMessages,
|
| 146 |
conversation.config.temperature,
|
| 147 |
+
conversation.config.maxTokens
|
| 148 |
);
|
| 149 |
// check if the user did not abort the request
|
| 150 |
if (waitForNonStreaming) {
|
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts
CHANGED
|
@@ -28,7 +28,7 @@ export async function handleStreamingResponse(
|
|
| 28 |
model: model,
|
| 29 |
messages: messages,
|
| 30 |
temperature: temperature,
|
| 31 |
-
max_tokens: maxTokens
|
| 32 |
},
|
| 33 |
{ signal: abortController.signal }
|
| 34 |
)) {
|
|
@@ -51,13 +51,13 @@ export async function handleNonStreamingResponse(
|
|
| 51 |
model: string,
|
| 52 |
messages: ChatCompletionInputMessage[],
|
| 53 |
temperature: number,
|
| 54 |
-
maxTokens: number
|
| 55 |
): Promise<ChatCompletionInputMessage> {
|
| 56 |
const response = await hf.chatCompletion({
|
| 57 |
model: model,
|
| 58 |
messages: messages,
|
| 59 |
temperature: temperature,
|
| 60 |
-
max_tokens: maxTokens
|
| 61 |
});
|
| 62 |
|
| 63 |
if (response.choices && response.choices.length > 0) {
|
|
|
|
| 28 |
model: model,
|
| 29 |
messages: messages,
|
| 30 |
temperature: temperature,
|
| 31 |
+
max_tokens: maxTokens
|
| 32 |
},
|
| 33 |
{ signal: abortController.signal }
|
| 34 |
)) {
|
|
|
|
| 51 |
model: string,
|
| 52 |
messages: ChatCompletionInputMessage[],
|
| 53 |
temperature: number,
|
| 54 |
+
maxTokens: number
|
| 55 |
): Promise<ChatCompletionInputMessage> {
|
| 56 |
const response = await hf.chatCompletion({
|
| 57 |
model: model,
|
| 58 |
messages: messages,
|
| 59 |
temperature: temperature,
|
| 60 |
+
max_tokens: maxTokens
|
| 61 |
});
|
| 62 |
|
| 63 |
if (response.choices && response.choices.length > 0) {
|