Skip to content

Commit

Permalink
main : allow empty --prompt-cache file (ggerganov#5176)
Browse files Browse the repository at this point in the history
* allow empty --prompt-cache file

This allows the use of std::tmpnam(), std::tmpfile(), Python's tempfile.NamedTemporaryFile(), and similar create-empty-file API's for the user.

I switched from the C fopen API to the C++ filesystem api to get around the fact that, to the best of my knowledge, C has no portable way to get the file size above LONG_MAX, with std::ftell() returning long? fallback to std::ifstream for c++  < 17
(the project is currently targeting C++11 it seems - file_exists() and file_size() can be removed when we upgrade to c++17)

* formatting

(requested in codereview)

* remove c++17, file_is_empty
  • Loading branch information
divinity76 authored Jan 30, 2024
1 parent 5589921 commit 8134169
Showing 1 changed file with 18 additions and 10 deletions.
28 changes: 18 additions & 10 deletions examples/main/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,17 @@ static std::ostringstream * g_output_ss;
static std::vector<llama_token> * g_output_tokens;
static bool is_interacting = false;

static bool file_exists(const std::string &path) {
std::ifstream f(path.c_str());
return f.good();
}

static bool file_is_empty(const std::string &path) {
std::ifstream f;
f.exceptions(std::ifstream::failbit | std::ifstream::badbit);
f.open(path.c_str(), std::ios::in | std::ios::binary | std::ios::ate);
return f.tellg() == 0;
}

static void write_logfile(
const llama_context * ctx, const gpt_params & params, const llama_model * model,
Expand Down Expand Up @@ -215,12 +226,12 @@ int main(int argc, char ** argv) {

if (!path_session.empty()) {
LOG_TEE("%s: attempting to load saved session from '%s'\n", __func__, path_session.c_str());

// fopen to check for existing session
FILE * fp = std::fopen(path_session.c_str(), "rb");
if (fp != NULL) {
std::fclose(fp);

if (!file_exists(path_session)) {
LOG_TEE("%s: session file does not exist, will create.\n", __func__);
} else if (file_is_empty(path_session)) {
LOG_TEE("%s: The session file is empty. A new session will be initialized.\n", __func__);
} else {
// The file exists and is not empty
session_tokens.resize(n_ctx);
size_t n_token_count_out = 0;
if (!llama_load_session_file(ctx, path_session.c_str(), session_tokens.data(), session_tokens.capacity(), &n_token_count_out)) {
Expand All @@ -229,10 +240,7 @@ int main(int argc, char ** argv) {
}
session_tokens.resize(n_token_count_out);
llama_set_rng_seed(ctx, params.seed);

LOG_TEE("%s: loaded a session with prompt size of %d tokens\n", __func__, (int) session_tokens.size());
} else {
LOG_TEE("%s: session file does not exist, will create\n", __func__);
LOG_TEE("%s: loaded a session with prompt size of %d tokens\n", __func__, (int)session_tokens.size());
}
}

Expand Down

0 comments on commit 8134169

Please sign in to comment.