From: peidaqi Date: Tue, 28 Jan 2025 23:03:42 +0000 (-0700) Subject: server : Fixed wrong function name in llamacpp server unit test (#11473) X-Git-Tag: upstream/0.0.4631~53 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=cf8cc856d7d02165bd08593b4757e1256a62d501;p=pkg%2Fggml%2Fsources%2Fllama.cpp server : Fixed wrong function name in llamacpp server unit test (#11473) The test_completion_stream_with_openai_library() function is actually with stream=False by default, and test_completion_with_openai_library() with stream=True --- diff --git a/examples/server/tests/unit/test_completion.py b/examples/server/tests/unit/test_completion.py index c1fc1246..0ed5b99b 100644 --- a/examples/server/tests/unit/test_completion.py +++ b/examples/server/tests/unit/test_completion.py @@ -87,7 +87,7 @@ def test_completion_stream_vs_non_stream(): assert content_stream == res_non_stream.body["content"] -def test_completion_stream_with_openai_library(): +def test_completion_with_openai_library(): global server server.start() client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1") @@ -102,7 +102,7 @@ def test_completion_stream_with_openai_library(): assert match_regex("(going|bed)+", res.choices[0].text) -def test_completion_with_openai_library(): +def test_completion_stream_with_openai_library(): global server server.start() client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")