forked from janhq/cortex.cpp
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.cc
More file actions
221 lines (198 loc) · 7.42 KB
/
main.cc
File metadata and controls
221 lines (198 loc) · 7.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
#include <memory>
#include "command_line_parser.h"
#include "commands/cortex_upd_cmd.h"
#include "services/download_service.h"
#include "utils/archive_utils.h"
#include "utils/cortex_utils.h"
#include "utils/file_logger.h"
#include "utils/file_manager_utils.h"
#include "utils/logging_utils.h"
#include "utils/system_info_utils.h"
#if defined(__APPLE__) && defined(__MACH__)
#include <libgen.h> // for dirname()
#include <mach-o/dyld.h>
#include <sys/types.h>
#elif defined(__linux__)
#include <libgen.h> // for dirname()
#include <signal.h>
#include <sys/types.h>
#include <unistd.h> // for readlink()
#elif defined(_WIN32)
#include <windows.h>
#undef max
#else
#error "Unsupported platform!"
#endif
#include <codecvt>
#include <locale>
void RemoveBinaryTempFileIfExists() {
auto temp =
file_manager_utils::GetExecutableFolderContainerPath() / "cortex_temp";
if (std::filesystem::exists(temp)) {
try {
std::filesystem::remove(temp);
} catch (const std::exception& e) {
std::cerr << e.what() << '\n';
}
}
}
void SetupLogger(trantor::FileLogger& async_logger, bool verbose) {
if (!verbose) {
auto config = file_manager_utils::GetCortexConfig();
std::filesystem::create_directories(
#if defined(_WIN32)
std::filesystem::u8path(config.logFolderPath) /
#else
std::filesystem::path(config.logFolderPath) /
#endif
std::filesystem::path(cortex_utils::logs_folder));
// Do not need to use u8path here because trantor handles itself
async_logger.setFileName(
(std::filesystem::path(config.logFolderPath) /
std::filesystem::path(cortex_utils::logs_cli_base_name))
.string());
async_logger.setMaxLines(config.maxLogLines); // Keep last 100000 lines
async_logger.startLogging();
trantor::Logger::setOutputFunction(
[&](const char* msg, const uint64_t len) {
async_logger.output_(msg, len);
},
[&]() { async_logger.flush(); });
}
}
void InstallServer() {
#if !defined(_WIN32)
if (getuid()) {
CLI_LOG("Error: Not root user. Please run with sudo.");
return;
}
#endif
auto cuc = commands::CortexUpdCmd(std::make_shared<DownloadService>());
cuc.Exec({}, true /*force*/);
}
int main(int argc, char* argv[]) {
// Stop the program if the system is not supported
auto system_info = system_info_utils::GetSystemInfo();
if (system_info->arch == system_info_utils::kUnsupported ||
system_info->os == system_info_utils::kUnsupported) {
CTL_ERR("Unsupported OS or architecture: " << system_info->os << ", "
<< system_info->arch);
return 1;
}
curl_global_init(CURL_GLOBAL_DEFAULT);
bool should_install_server = false;
bool verbose = false;
for (int i = 0; i < argc; i++) {
if (strcmp(argv[i], "--config_file_path") == 0) {
file_manager_utils::cortex_config_file_path = argv[i + 1];
} else if (strcmp(argv[i], "--data_folder_path") == 0) {
file_manager_utils::cortex_data_folder_path = argv[i + 1];
} else if ((strcmp(argv[i], "--server") == 0) &&
(strcmp(argv[i - 1], "update") == 0)) {
should_install_server = true;
} else if (strcmp(argv[i], "--verbose") == 0) {
verbose = true;
}
}
{
auto result = file_manager_utils::CreateConfigFileIfNotExist();
if (result.has_error()) {
CTL_ERR("Error creating config file: " << result.error());
}
namespace fmu = file_manager_utils;
// Override data folder path if it is configured and changed
if (!fmu::cortex_data_folder_path.empty()) {
auto cfg = file_manager_utils::GetCortexConfig();
if (cfg.dataFolderPath != fmu::cortex_data_folder_path ||
cfg.logFolderPath != fmu::cortex_data_folder_path) {
cfg.dataFolderPath = fmu::cortex_data_folder_path;
cfg.logFolderPath = fmu::cortex_data_folder_path;
auto config_path = file_manager_utils::GetConfigurationPath();
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
cfg, config_path.string());
if (result.has_error()) {
CTL_ERR("Error update " << config_path.string() << result.error());
}
}
}
}
RemoveBinaryTempFileIfExists();
auto should_check_for_latest_llamacpp_version = true;
auto now = std::chrono::system_clock::now();
// read the yaml to see the last time we check for update
auto config = file_manager_utils::GetCortexConfig();
if (config.checkedForLlamacppUpdateAt != 0) {
// if it passed a day, then we should check
auto last_check =
std::chrono::system_clock::time_point(
std::chrono::milliseconds(config.checkedForLlamacppUpdateAt)) +
std::chrono::hours(24);
should_check_for_latest_llamacpp_version = now > last_check;
}
if (should_check_for_latest_llamacpp_version) {
std::thread t1([]() {
// TODO: namh current we only check for llamacpp. Need to add support for other engine
auto get_latest_version = []() -> cpp::result<std::string, std::string> {
try {
auto res = github_release_utils::GetReleaseByVersion(
"janhq", "cortex.llamacpp", "latest");
if (res.has_error()) {
CTL_ERR("Failed to get latest llama.cpp version: " << res.error());
return cpp::fail("Failed to get latest llama.cpp version: " +
res.error());
}
CTL_INF("Latest llamacpp version: " << res->tag_name);
return res->tag_name;
} catch (const std::exception& e) {
CTL_ERR("Failed to get latest llama.cpp version: " << e.what());
return cpp::fail("Failed to get latest llama.cpp version: " +
std::string(e.what()));
}
};
auto res = get_latest_version();
if (res.has_error()) {
CTL_ERR("Failed to get latest llama.cpp version: " << res.error());
return;
}
auto now = std::chrono::system_clock::now();
CTL_DBG("latest llama.cpp version: " << res.value());
auto config = file_manager_utils::GetCortexConfig();
config.checkedForLlamacppUpdateAt =
std::chrono::duration_cast<std::chrono::milliseconds>(
now.time_since_epoch())
.count();
config.latestLlamacppRelease = res.value();
auto upd_config_res =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, file_manager_utils::GetConfigurationPath().string());
if (upd_config_res.has_error()) {
CTL_ERR("Failed to update config file: " << upd_config_res.error());
} else {
CTL_INF("Updated config file with latest llama.cpp version: "
<< res.value());
}
});
t1.detach();
}
static trantor::FileLogger async_file_logger;
SetupLogger(async_file_logger, verbose);
if (should_install_server) {
InstallServer();
return 0;
}
// Check if server exists, if not notify to user to install server
auto exe = commands::GetCortexServerBinary();
auto server_binary_path =
file_manager_utils::GetExecutableFolderContainerPath() / exe;
if (!std::filesystem::exists(server_binary_path)) {
std::cout << CORTEX_CPP_VERSION
<< " requires server binary, to install server, run: "
<< commands::GetRole() << commands::GetCortexBinary()
<< " update --server" << std::endl;
return 0;
}
CommandLineParser clp;
clp.SetupCommand(argc, argv);
return 0;
}