ndc8
commited on
Commit
·
e46cec3
1
Parent(s):
68f41f4
update
Browse files- backend_service.py +1 -22
backend_service.py
CHANGED
|
@@ -481,28 +481,7 @@ async def list_models():
|
|
| 481 |
return ModelsResponse(data=models)
|
| 482 |
|
| 483 |
|
| 484 |
-
#
|
| 485 |
-
response_text = response_text.strip() if response_text else "No response generated."
|
| 486 |
-
|
| 487 |
-
# Create OpenAI-compatible response
|
| 488 |
-
response = ChatCompletionResponse(
|
| 489 |
-
id=f"chatcmpl-{int(time.time())}",
|
| 490 |
-
created=int(time.time()),
|
| 491 |
-
model=request.model,
|
| 492 |
-
choices=[
|
| 493 |
-
ChatCompletionChoice(
|
| 494 |
-
index=0,
|
| 495 |
-
message=ChatMessage(role="assistant", content=response_text),
|
| 496 |
-
finish_reason="stop"
|
| 497 |
-
)
|
| 498 |
-
]
|
| 499 |
-
)
|
| 500 |
-
|
| 501 |
-
return response
|
| 502 |
-
|
| 503 |
-
except Exception as e:
|
| 504 |
-
logger.error(f"Error in chat completion: {e}")
|
| 505 |
-
raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
|
| 506 |
|
| 507 |
@app.post("/api/response")
|
| 508 |
@app.post("/v1/chat/completions", response_model=ChatCompletionResponse)
|
|
|
|
| 481 |
return ModelsResponse(data=models)
|
| 482 |
|
| 483 |
|
| 484 |
+
# ...existing code...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 485 |
|
| 486 |
@app.post("/api/response")
|
| 487 |
@app.post("/v1/chat/completions", response_model=ChatCompletionResponse)
|