fprintf(stderr, "Reverse prompt: '%s'\n", antiprompt.c_str());
}
}
+
+ if (!params.input_prefix.empty()) {
+ fprintf(stderr, "Input prefix: '%s'\n", params.input_prefix.c_str());
+ }
}
fprintf(stderr, "sampling parameters: temp = %f, top_k = %d, top_p = %f, repeat_last_n = %i, repeat_penalty = %f\n", params.temp, params.top_k, params.top_p, params.repeat_last_n, params.repeat_penalty);
fprintf(stderr, "\n\n");
}
std::string buffer;
+ if (!params.input_prefix.empty()) {
+ buffer += params.input_prefix;
+ printf(buffer.c_str());
+ }
+
std::string line;
bool another_line = true;
do {
exit(0);
} else if (arg == "--random-prompt") {
params.random_prompt = true;
+ } else if (arg == "--in-prefix") {
+ params.input_prefix = argv[++i];
} else {
fprintf(stderr, "error: unknown argument: %s\n", arg.c_str());
gpt_print_usage(argc, argv, params);
fprintf(stderr, " -p PROMPT, --prompt PROMPT\n");
fprintf(stderr, " prompt to start generation with (default: empty)\n");
fprintf(stderr, " --random-prompt start with a randomized prompt.\n");
+ fprintf(stderr, " --in-prefix STRING string to prefix user inputs with (default: empty)\n");
fprintf(stderr, " -f FNAME, --file FNAME\n");
fprintf(stderr, " prompt file to start generation.\n");
fprintf(stderr, " -n N, --n_predict N number of tokens to predict (default: %d)\n", params.n_predict);
std::string model = "models/lamma-7B/ggml-model.bin"; // model path
std::string prompt = "";
+ std::string input_prefix = ""; // string to prefix user inputs with
std::vector<std::string> antiprompt; // string upon seeing which more user input is prompted