@@ -101,49 +101,49 @@ int main(int argc, char* argv[]) {
101101
102102 RemoveBinaryTempFileIfExists ();
103103
104- std::thread t1 ([]() {
105- // TODO: namh current we only check for llamacpp. Need to add support for other engine
106- auto should_check_for_latest_llamacpp_version = true ;
107- auto now = std::chrono::system_clock::now ();
108-
109- // read the yaml to see the last time we check for update
110- auto config = file_manager_utils::GetCortexConfig ();
111- if (config.checkedForLlamacppUpdateAt != 0 ) {
112- // if it passed a day, then we should check
113- auto last_check =
114- std::chrono::system_clock::time_point (
115- std::chrono::milliseconds (config.checkedForLlamacppUpdateAt )) +
116- std::chrono::hours (24 );
117- should_check_for_latest_llamacpp_version = now > last_check;
118- CTL_DBG (" should_check_for_latest_llamacpp_version: "
119- << should_check_for_latest_llamacpp_version);
120- }
104+ auto should_check_for_latest_llamacpp_version = true ;
105+ auto now = std::chrono::system_clock::now ();
106+
107+ // read the yaml to see the last time we check for update
108+ auto config = file_manager_utils::GetCortexConfig ();
109+ if (config.checkedForLlamacppUpdateAt != 0 ) {
110+ // if it passed a day, then we should check
111+ auto last_check =
112+ std::chrono::system_clock::time_point (
113+ std::chrono::milliseconds (config.checkedForLlamacppUpdateAt )) +
114+ std::chrono::hours (24 );
115+ should_check_for_latest_llamacpp_version = now > last_check;
116+ }
121117
122- auto get_latest_version = []() -> cpp::result<std::string, std::string> {
123- try {
124- auto res = github_release_utils::GetReleaseByVersion (
125- " janhq" , " cortex.llamacpp" , " latest" );
126- if (res.has_error ()) {
127- CTL_ERR (" Failed to get latest llama.cpp version: " << res.error ());
118+ if (should_check_for_latest_llamacpp_version) {
119+ std::thread t1 ([]() {
120+ auto config = file_manager_utils::GetCortexConfig ();
121+ // TODO: namh current we only check for llamacpp. Need to add support for other engine
122+ auto get_latest_version = []() -> cpp::result<std::string, std::string> {
123+ try {
124+ auto res = github_release_utils::GetReleaseByVersion (
125+ " janhq" , " cortex.llamacpp" , " latest" );
126+ if (res.has_error ()) {
127+ CTL_ERR (" Failed to get latest llama.cpp version: " << res.error ());
128+ return cpp::fail (" Failed to get latest llama.cpp version: " +
129+ res.error ());
130+ }
131+ CTL_INF (" Latest llamacpp version: " << res->tag_name );
132+ return res->tag_name ;
133+ } catch (const std::exception& e) {
134+ CTL_ERR (" Failed to get latest llama.cpp version: " << e.what ());
128135 return cpp::fail (" Failed to get latest llama.cpp version: " +
129- res. error ( ));
136+ std::string (e. what () ));
130137 }
131- CTL_INF (" Latest llamacpp version: " << res->tag_name );
132- return res->tag_name ;
133- } catch (const std::exception& e) {
134- CTL_ERR (" Failed to get latest llama.cpp version: " << e.what ());
135- return cpp::fail (" Failed to get latest llama.cpp version: " +
136- std::string (e.what ()));
137- }
138- };
138+ };
139139
140- if (should_check_for_latest_llamacpp_version) {
141140 auto res = get_latest_version ();
142141 if (res.has_error ()) {
143142 CTL_ERR (" Failed to get latest llama.cpp version: " << res.error ());
144143 return ;
145144 }
146145
146+ auto now = std::chrono::system_clock::now ();
147147 CTL_DBG (" latest llama.cpp version: " << res.value ());
148148 config.checkedForLlamacppUpdateAt =
149149 std::chrono::duration_cast<std::chrono::milliseconds>(
@@ -159,22 +159,9 @@ int main(int argc, char* argv[]) {
159159 CTL_INF (" Updated config file with latest llama.cpp version: "
160160 << res.value ());
161161 }
162- }
163-
164- CTL_DBG (" latest llama.cpp version: " << config.latestLlamacppRelease );
165- CTL_DBG (" llamacpp version: " << config.llamacppVersion );
166- if (config.llamacppVersion .empty ()) {
167- return ;
168- }
169-
170- if (config.latestLlamacppRelease != config.llamacppVersion ) {
171- CLI_LOG (
172- " New llama.cpp version available: " << config.latestLlamacppRelease );
173- CLI_LOG (" To update, run: " << commands::GetCortexBinary ()
174- << " engines update llama-cpp\n " );
175- }
176- });
177- t1.detach ();
162+ });
163+ t1.detach ();
164+ }
178165
179166 trantor::FileLogger async_file_logger;
180167 SetupLogger (async_file_logger, verbose);
0 commit comments