Andrew commited on
Commit
c4408b8
·
1 Parent(s): 00549b9

(feat) Pass user api key through router endpoints

Browse files
Files changed (1) hide show
  1. src/lib/server/router/endpoint.ts +9 -4
src/lib/server/router/endpoint.ts CHANGED
@@ -5,7 +5,7 @@ import type {
5
  TextGenerationStreamOutputSimplified,
6
  } from "../endpoints/endpoints";
7
  import endpoints from "../endpoints/endpoints";
8
- import type { ProcessedModel } from "../models";
9
  import { config } from "$lib/server/config";
10
  import { logger } from "$lib/server/logger";
11
  import { archSelectRoute } from "./arch";
@@ -35,7 +35,10 @@ function stripReasoningFromMessage(message: EndpointMessage): EndpointMessage {
35
  * Create an Endpoint that performs route selection via Arch and then forwards
36
  * to the selected model (with fallbacks) using the OpenAI-compatible endpoint.
37
  */
38
- export async function makeRouterEndpoint(routerModel: ProcessedModel): Promise<Endpoint> {
 
 
 
39
  return async function routerEndpoint(params: EndpointParameters) {
40
  const routes = await getRoutes();
41
  const sanitizedMessages = params.messages.map(stripReasoningFromMessage);
@@ -69,10 +72,12 @@ export async function makeRouterEndpoint(routerModel: ProcessedModel): Promise<E
69
  } as ProcessedModel;
70
  }
71
 
 
 
72
  return endpoints.openai({
73
  type: "openai",
74
  baseURL: (config.OPENAI_BASE_URL || "https://router.huggingface.co/v1").replace(/\/$/, ""),
75
- apiKey: config.OPENAI_API_KEY || config.HF_TOKEN || "sk-",
76
  model: modelForCall,
77
  // Ensure streaming path is used
78
  streamingSupported: true,
@@ -133,7 +138,7 @@ export async function makeRouterEndpoint(routerModel: ProcessedModel): Promise<E
133
  }
134
  }
135
 
136
- const { routeName } = await archSelectRoute(sanitizedMessages);
137
 
138
  const fallbackModel = config.LLM_ROUTER_FALLBACK_MODEL || routerModel.id;
139
  const { candidates } = resolveRouteModels(routeName, routes, fallbackModel);
 
5
  TextGenerationStreamOutputSimplified,
6
  } from "../endpoints/endpoints";
7
  import endpoints from "../endpoints/endpoints";
8
+ import type { ProcessedModel, EndpointOptions } from "../models";
9
  import { config } from "$lib/server/config";
10
  import { logger } from "$lib/server/logger";
11
  import { archSelectRoute } from "./arch";
 
35
  * Create an Endpoint that performs route selection via Arch and then forwards
36
  * to the selected model (with fallbacks) using the OpenAI-compatible endpoint.
37
  */
38
+ export async function makeRouterEndpoint(
39
+ routerModel: ProcessedModel,
40
+ options?: EndpointOptions
41
+ ): Promise<Endpoint> {
42
  return async function routerEndpoint(params: EndpointParameters) {
43
  const routes = await getRoutes();
44
  const sanitizedMessages = params.messages.map(stripReasoningFromMessage);
 
72
  } as ProcessedModel;
73
  }
74
 
75
+ const defaultApiKey = config.OPENAI_API_KEY || config.HF_TOKEN || "sk-";
76
+
77
  return endpoints.openai({
78
  type: "openai",
79
  baseURL: (config.OPENAI_BASE_URL || "https://router.huggingface.co/v1").replace(/\/$/, ""),
80
+ apiKey: options?.apiKey ?? defaultApiKey,
81
  model: modelForCall,
82
  // Ensure streaming path is used
83
  streamingSupported: true,
 
138
  }
139
  }
140
 
141
+ const { routeName } = await archSelectRoute(sanitizedMessages, { apiKey: options?.apiKey });
142
 
143
  const fallbackModel = config.LLM_ROUTER_FALLBACK_MODEL || routerModel.id;
144
  const { candidates } = resolveRouteModels(routeName, routes, fallbackModel);