File size: 1,805 Bytes
			
			| 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f 1e7a57c 0181a1f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 | # inference.py
from typing import List, Dict, Optional
from hf_client import get_inference_client
from models import find_model
def chat_completion(
    model_id: str,
    messages: List[Dict[str, str]],
    provider: Optional[str] = None,
    max_tokens: int = 4096
) -> str:
    """
    Send a chat completion request to the appropriate inference provider.
    Args:
        model_id: The model identifier to use.
        messages: A list of OpenAI-style {'role','content'} messages.
        provider: Optional override for provider; uses model default if None.
        max_tokens: Maximum tokens to generate.
    Returns:
        The assistant's response content.
    """
    # resolve default provider from registry if needed
    if provider is None:
        meta = find_model(model_id)
        provider = meta.default_provider if meta else "auto"
    client = get_inference_client(model_id, provider)
    resp = client.chat.completions.create(
        model=model_id,
        messages=messages,
        max_tokens=max_tokens
    )
    return resp.choices[0].message.content
def stream_chat_completion(
    model_id: str,
    messages: List[Dict[str, str]],
    provider: Optional[str] = None,
    max_tokens: int = 4096
):
    """
    Generator for streaming chat completions.
    Yields partial message chunks as strings.
    """
    if provider is None:
        meta = find_model(model_id)
        provider = meta.default_provider if meta else "auto"
    client = get_inference_client(model_id, provider)
    stream = client.chat.completions.create(
        model=model_id,
        messages=messages,
        max_tokens=max_tokens,
        stream=True
    )
    for chunk in stream:
        delta = getattr(chunk.choices[0].delta, "content", None)
        if delta:
            yield delta
 | 
