Skip to content

Commit 1e65517

Browse files
author
Olivier Chafik
committed
openai: update after merge
1 parent 7675ac6 commit 1e65517

File tree

3 files changed

+14
-13
lines changed

3 files changed

+14
-13
lines changed

examples/openai/api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class Tool(BaseModel):
2929

3030
class ResponseFormat(BaseModel):
3131
type: Literal["json_object"]
32-
schema: Optional[Json[Any]] = None # type: ignore
32+
schema: Optional[dict[str, Any]] = None # type: ignore
3333

3434
class LlamaCppParams(BaseModel):
3535
n_predict: Optional[int] = None
@@ -67,7 +67,7 @@ class ChatCompletionRequest(LlamaCppParams):
6767
class Choice(BaseModel):
6868
index: int
6969
message: Message
70-
logprobs: Optional[Json[Any]] = None
70+
logprobs: Optional[dict[str, Any]] = None
7171
finish_reason: Union[Literal["stop"], Literal["tool_calls"]]
7272

7373
class Usage(BaseModel):

examples/openai/gguf_kvs.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,12 @@
88

99
class GGUFKeyValues:
1010
def __init__(self, model: Path):
11-
reader = GGUFReader(model.as_posix())
12-
self.fields = reader.fields
11+
self.reader = GGUFReader(model.as_posix())
1312
def __getitem__(self, key: str):
1413
if '{arch}' in key:
1514
key = key.replace('{arch}', self[Keys.General.ARCHITECTURE])
16-
return self.fields[key].read()
15+
return self.reader.read_field(self.reader.fields[key])
1716
def __contains__(self, key: str):
18-
return key in self.fields
17+
return key in self.reader.fields
1918
def keys(self):
20-
return self.fields.keys()
19+
return self.reader.fields.keys()

examples/openai/server.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,12 +37,8 @@ def main(
3737
):
3838
import uvicorn
3939

40-
if endpoint:
41-
sys.stderr.write(f"# WARNING: Unsure which model we're talking to, fetching its chat template from HuggingFace tokenizer of {template_hf_model_id_fallback}\n")
42-
assert template_hf_model_id_fallback, "template_hf_model_id_fallback is required when using an endpoint"
43-
chat_template = ChatTemplate.from_huggingface(template_hf_model_id_fallback)
44-
45-
else:
40+
chat_template = None
41+
if model:
4642
metadata = GGUFKeyValues(Path(model))
4743

4844
if not context_length:
@@ -58,6 +54,12 @@ def main(
5854
if verbose:
5955
sys.stderr.write(f"# CHAT TEMPLATE:\n\n{chat_template}\n\n")
6056

57+
if endpoint and not chat_template:
58+
sys.stderr.write(f"# WARNING: Unsure which model we're talking to, fetching its chat template from HuggingFace tokenizer of {template_hf_model_id_fallback}\n")
59+
assert template_hf_model_id_fallback or chat_template, "template_hf_model_id_fallback is required when using an endpoint without a model"
60+
chat_template = ChatTemplate.from_huggingface(template_hf_model_id_fallback)
61+
62+
else:
6163
if verbose:
6264
sys.stderr.write(f"# Starting C++ server with model {model} on {server_host}:{server_port}\n")
6365
cmd = [

0 commit comments

Comments
 (0)