Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit dcd9acd

Browse files
committed
move private methods to private
1 parent 8f6d281 commit dcd9acd

File tree

1 file changed

+5
-9
lines changed

1 file changed

+5
-9
lines changed

controllers/llamaCPP.h

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2548,18 +2548,9 @@ class llamaCPP : public drogon::HttpController<llamaCPP> {
25482548
std::function<void(const HttpResponsePtr &)> &&callback);
25492549
void unloadModel(const HttpRequestPtr &req,
25502550
std::function<void(const HttpResponsePtr &)> &&callback);
2551-
25522551
void modelStatus(const HttpRequestPtr &req,
25532552
std::function<void(const HttpResponsePtr &)> &&callback);
25542553

2555-
bool loadModelImpl(const Json::Value &jsonBody);
2556-
2557-
void warmupModel();
2558-
2559-
void backgroundTask();
2560-
2561-
void stopBackgroundTask();
2562-
25632554
private:
25642555
llama_server_context llama;
25652556
// std::atomic<bool> model_loaded = false;
@@ -2577,5 +2568,10 @@ class llamaCPP : public drogon::HttpController<llamaCPP> {
25772568
std::atomic<bool> single_queue_is_busy; // This value only used under the
25782569
// condition n_parallel is 1
25792570
std::string grammar_file_content;
2571+
2572+
bool loadModelImpl(const Json::Value &jsonBody);
2573+
void warmupModel();
2574+
void backgroundTask();
2575+
void stopBackgroundTask();
25802576
};
25812577
}; // namespace inferences

0 commit comments

Comments
 (0)