Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit e5a973f

Browse files
committed
move private methods to private
1 parent dcd9acd commit e5a973f

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

controllers/llamaCPP.cc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ std::shared_ptr<inferenceState> create_inference_state(llamaCPP *instance) {
2222
// --------------------------------------------
2323

2424
// Function to check if the model is loaded
25-
void check_model_loaded(
26-
llama_server_context &llama, const HttpRequestPtr &req,
25+
void llamaCPP::checkModelLoaded(
26+
const HttpRequestPtr &req,
2727
std::function<void(const HttpResponsePtr &)> &callback) {
2828
if (!llama.model_loaded_external) {
2929
Json::Value jsonResp;
@@ -152,7 +152,7 @@ void llamaCPP::chatCompletion(
152152
std::function<void(const HttpResponsePtr &)> &&callback) {
153153

154154
// Check if model is loaded
155-
check_model_loaded(llama, req, callback);
155+
checkModelLoaded(req, callback);
156156

157157
const auto &jsonBody = req->getJsonObject();
158158
std::string formatted_output = pre_prompt;
@@ -405,7 +405,7 @@ void llamaCPP::chatCompletion(
405405
void llamaCPP::embedding(
406406
const HttpRequestPtr &req,
407407
std::function<void(const HttpResponsePtr &)> &&callback) {
408-
check_model_loaded(llama, req, callback);
408+
checkModelLoaded(req, callback);
409409

410410
auto state = create_inference_state(this);
411411

controllers/llamaCPP.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2573,5 +2573,7 @@ class llamaCPP : public drogon::HttpController<llamaCPP> {
25732573
void warmupModel();
25742574
void backgroundTask();
25752575
void stopBackgroundTask();
2576+
void checkModelLoaded(const HttpRequestPtr &req,
2577+
std::function<void(const HttpResponsePtr &)> &callback);
25762578
};
25772579
}; // namespace inferences

0 commit comments

Comments
 (0)