Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 942ee09

Browse files
authored
Update llamaCPP.cc
1 parent ac5dd44 commit 942ee09

File tree

1 file changed

+6
-4
lines changed

1 file changed

+6
-4
lines changed

controllers/llamaCPP.cc

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -160,10 +160,7 @@ void llamaCPP::chatCompletion(
160160

161161
const auto &jsonBody = req->getJsonObject();
162162
std::string formatted_output = pre_prompt;
163-
#ifdef DEBUG
164-
LOG_INFO << "Current completion text";
165-
LOG_INFO << formatted_output ;
166-
#endif
163+
167164
json data;
168165
json stopWords;
169166
// To set default value
@@ -209,6 +206,11 @@ void llamaCPP::chatCompletion(
209206

210207
bool is_streamed = data["stream"];
211208

209+
#ifdef DEBUG
210+
LOG_INFO << "Current completion text";
211+
LOG_INFO << formatted_output ;
212+
#endif
213+
212214
const int task_id = llama.request_completion(data, false, false);
213215
LOG_INFO << "Resolved request for task_id:" << task_id;
214216

0 commit comments

Comments
 (0)