From: Emmanuel Schmidbauer Date: Mon, 8 Jul 2024 11:24:58 +0000 (-0400) Subject: server : add inference path to make OAI API compatible (#2270) X-Git-Tag: upstream/1.7.4~604 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=bec9836849feaf6e514eab22c0688a49e0f04ce7;p=pkg%2Fggml%2Fsources%2Fwhisper.cpp server : add inference path to make OAI API compatible (#2270) --- diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 10aae9c0..02921763 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -34,6 +34,7 @@ struct server_params std::string hostname = "127.0.0.1"; std::string public_path = "examples/server/public"; std::string request_path = ""; + std::string inference_path = "/inference"; int32_t port = 8080; int32_t read_timeout = 600; @@ -132,6 +133,7 @@ void whisper_print_usage(int /*argc*/, char ** argv, const whisper_params & para fprintf(stderr, " --port PORT, [%-7d] Port number for the server\n", sparams.port); fprintf(stderr, " --public PATH, [%-7s] Path to the public folder\n", sparams.public_path.c_str()); fprintf(stderr, " --request-path PATH, [%-7s] Request path for all requests\n", sparams.request_path.c_str()); + fprintf(stderr, " --inference-path PATH, [%-7s] Inference path for all requests\n", sparams.inference_path.c_str()); fprintf(stderr, " --convert, [%-7s] Convert audio to WAV, requires ffmpeg on the server", sparams.ffmpeg_converter ? "true" : "false"); fprintf(stderr, "\n"); } @@ -182,6 +184,7 @@ bool whisper_params_parse(int argc, char ** argv, whisper_params & params, serve else if ( arg == "--host") { sparams.hostname = argv[++i]; } else if ( arg == "--public") { sparams.public_path = argv[++i]; } else if ( arg == "--request-path") { sparams.request_path = argv[++i]; } + else if ( arg == "--inference-path") { sparams.inference_path = argv[++i]; } else if ( arg == "--convert") { sparams.ffmpeg_converter = true; } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); @@ -644,10 +647,10 @@ int main(int argc, char ** argv) { return false; }); - svr.Options(sparams.request_path + "/inference", [&](const Request &, Response &){ + svr.Options(sparams.request_path + sparams.inference_path, [&](const Request &, Response &){ }); - svr.Post(sparams.request_path + "/inference", [&](const Request &req, Response &res){ + svr.Post(sparams.request_path + sparams.inference_path, [&](const Request &req, Response &res){ // acquire whisper model mutex lock std::lock_guard lock(whisper_mutex);