Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit bcc524d

Browse files
committed
Correct formatting for prompt template
1 parent 942ee09 commit bcc524d

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

controllers/llamaCPP.cc

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ void llamaCPP::chatCompletion(
191191
role = input_role;
192192
}
193193
std::string content = message["content"].asString();
194-
formatted_output += role + content + "\n";
194+
formatted_output += role + content;
195195
}
196196
formatted_output += ai_prompt;
197197

@@ -205,12 +205,11 @@ void llamaCPP::chatCompletion(
205205
}
206206

207207
bool is_streamed = data["stream"];
208-
208+
// Enable full message debugging
209209
#ifdef DEBUG
210210
LOG_INFO << "Current completion text";
211211
LOG_INFO << formatted_output ;
212212
#endif
213-
214213
const int task_id = llama.request_completion(data, false, false);
215214
LOG_INFO << "Resolved request for task_id:" << task_id;
216215

0 commit comments

Comments
 (0)