Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 26b7fb9

Browse files
authored
Merge pull request #332 from janhq/331-feat-only-enable-log-when-there-is-log-folder
feat: add condition to not emit log file if there is no path to log f…
2 parents 74b85dc + c070a21 commit 26b7fb9

File tree

2 files changed

+7
-5
lines changed

2 files changed

+7
-5
lines changed

controllers/llamaCPP.cc

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -452,10 +452,11 @@ bool llamaCPP::loadModelImpl(const Json::Value &jsonBody) {
452452
this->pre_prompt = jsonBody.get("pre_prompt", "").asString();
453453
this->repeat_last_n = jsonBody.get("repeat_last_n", 32).asInt();
454454

455-
// Set folder for llama log
456-
std::string llama_log_folder =
457-
jsonBody.get("llama_log_folder", "log/").asString();
458-
log_set_target(llama_log_folder + "llama.log");
455+
if (!jsonBody["llama_log_folder"].isNull()) {
456+
log_enable();
457+
std::string llama_log_folder = jsonBody["llama_log_folder"].asString();
458+
log_set_target(llama_log_folder + "llama.log");
459+
} // Set folder for llama log
459460
}
460461
#ifdef GGML_USE_CUBLAS
461462
LOG_INFO << "Setting up GGML CUBLAS PARAMS";

controllers/llamaCPP.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
#endif
66

77
#pragma once
8+
#define LOG_TARGET stdout
89

910
#include "log.h"
1011
#include "utils/nitro_utils.h"
@@ -2486,7 +2487,7 @@ class llamaCPP : public drogon::HttpController<llamaCPP> {
24862487
public:
24872488
llamaCPP() {
24882489
// Some default values for now below
2489-
log_enable(); // Disable the log to file feature, reduce bloat for
2490+
log_disable(); // Disable the log to file feature, reduce bloat for
24902491
// target
24912492
// system ()
24922493
std::vector<std::string> llama_models =

0 commit comments

Comments
 (0)