mirror of
https://github.com/davidallendj/gdpm.git
synced 2025-12-20 03:27:02 -07:00
Implemented parallel downloads through CURL multi interface and added purge command
- Update `README.md` file with examples - Fixed error messages showing no or wrong message - Changed the prompt message when installing, removing, etc. - Changed how http::request works - Added `http::multi` class for parallel downloads - Removed separate `concepts.hpp` file TODO: Fix ZIP not extracting after running the `install` command
This commit is contained in:
parent
766eabd5b2
commit
807aa8e5b2
21 changed files with 1158 additions and 758 deletions
|
|
@ -29,15 +29,15 @@ namespace gdpm::cache{
|
|||
namespace fs = std::filesystem;
|
||||
fs::path dir_path = fs::path(params.cache_path).parent_path();
|
||||
if(!fs::exists(dir_path)){
|
||||
log::info("Creating cache directories...{}", params.cache_path);
|
||||
log::debug("Creating cache directories...{}", params.cache_path);
|
||||
fs::create_directories(dir_path);
|
||||
}
|
||||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR,
|
||||
error error(ec::SQLITE_ERR,
|
||||
std::format(
|
||||
"create_package_database.sqlite3_open(): {}",
|
||||
"cache::create_package_database::sqlite3_open(): {}",
|
||||
sqlite3_errmsg(db)
|
||||
)
|
||||
);
|
||||
|
|
@ -70,8 +70,8 @@ namespace gdpm::cache{
|
|||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
// log::error("Failed to fetch data: {}\n", sqlite3_errmsg(db));
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"create_package_database.sqlite3_exec(): {}",
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::create_package_database::sqlite3_exec(): {}",
|
||||
errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
|
|
@ -101,8 +101,8 @@ namespace gdpm::cache{
|
|||
// log::println("{}", sql);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"insert_package_info.sqlite3_open(): {}",
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::insert_package_info::sqlite3_open(): {}",
|
||||
sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
|
|
@ -110,10 +110,9 @@ namespace gdpm::cache{
|
|||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(rc, std::format(
|
||||
"insert_package_info.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
error error = log::error_rc(ec::SQLITE_ERR,
|
||||
std::format("cache::insert_package_info::sqlite3_exec(): {}", errmsg)
|
||||
);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return error;
|
||||
|
|
@ -135,8 +134,6 @@ namespace gdpm::cache{
|
|||
string sql{"BEGIN TRANSACTION;\n"};
|
||||
|
||||
auto callback = [](void *data, int argc, char **argv, char **colnames){
|
||||
// log::error("{}", (const char*)data);
|
||||
// p_data *_data = (p_data*)data;
|
||||
package::info_list *_p_vector = (package::info_list*) data;
|
||||
package::info p{
|
||||
.asset_id = std::stoul(argv[1]),
|
||||
|
|
@ -163,8 +160,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"get_package_info_by_id.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::get_package_info_by_id::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return result_t(package::info_list(), error);
|
||||
|
|
@ -177,7 +174,7 @@ namespace gdpm::cache{
|
|||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"get_package_info_by_id.sqlite3_exec(): {}", errmsg
|
||||
"cache::get_package_info_by_id::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
@ -231,8 +228,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"get_package_info_by_title.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::get_package_info_by_title::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return result_t(package::info_list(), error);
|
||||
|
|
@ -246,8 +243,8 @@ namespace gdpm::cache{
|
|||
// log::println(sql);
|
||||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"get_package_info_by_title.sqlite3_exec(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::get_package_info_by_title::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
@ -292,8 +289,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"get_installed_packages.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::get_installed_packages::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return result_t(package::info_list(), error);
|
||||
|
|
@ -302,8 +299,8 @@ namespace gdpm::cache{
|
|||
sql += "SELECT * FROM " + params.table_name + " WHERE is_installed=1; COMMIT;";
|
||||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(rc, std::format(
|
||||
"get_installed_packages.sqlite3_exec(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::get_installed_packages::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
|
|
@ -326,8 +323,8 @@ namespace gdpm::cache{
|
|||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(
|
||||
constants::error::SQLITE_ERR, std::format(
|
||||
"update_package_info.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
ec::SQLITE_ERR, std::format(
|
||||
"cache::update_package_info::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return error;
|
||||
|
|
@ -359,8 +356,8 @@ namespace gdpm::cache{
|
|||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"update_package_info.sqlite3_exec(): {}\n\t{}", errmsg, sql
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::update_package_info::sqlite3_exec(): {}\n\t{}", errmsg, sql
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
@ -382,8 +379,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"delete_packages.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::delete_packages::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return error;
|
||||
|
|
@ -395,8 +392,8 @@ namespace gdpm::cache{
|
|||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"delete_packages.sqlite3_exec(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::delete_packages::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
@ -418,8 +415,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"delete_packages.sqlite3_open(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::delete_packages::sqlite3_open(): {}", errmsg
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return error;
|
||||
|
|
@ -431,8 +428,8 @@ namespace gdpm::cache{
|
|||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"delete_packages.sqlite3_exec(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::delete_packages::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
@ -451,8 +448,8 @@ namespace gdpm::cache{
|
|||
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"drop_package_database.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::drop_package_database::sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
sqlite3_close(db);
|
||||
return error;
|
||||
|
|
@ -460,8 +457,8 @@ namespace gdpm::cache{
|
|||
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(constants::error::SQLITE_ERR, std::format(
|
||||
"drop_package_database.sqlite3_exec(): {}", errmsg
|
||||
error error(ec::SQLITE_ERR, std::format(
|
||||
"cache::drop_package_database::sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
|
|
|
|||
|
|
@ -106,18 +106,15 @@ namespace gdpm::config{
|
|||
ParseErrorCode status = doc.Parse(contents.c_str()).GetParseError();
|
||||
|
||||
if(!doc.IsObject()){
|
||||
error error(
|
||||
constants::error::FILE_NOT_FOUND,
|
||||
return log::error_rc(
|
||||
ec::FILE_NOT_FOUND,
|
||||
"Could not load config file."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
error error = validate(doc);
|
||||
if(error()){
|
||||
log::error(error);
|
||||
return error;
|
||||
if(error.has_occurred()){
|
||||
return log::error_rc(error);
|
||||
}
|
||||
|
||||
/* Make sure contents were read correctly. */
|
||||
|
|
@ -138,12 +135,10 @@ namespace gdpm::config{
|
|||
);
|
||||
}
|
||||
} else {
|
||||
gdpm::error error(
|
||||
constants::error::INVALID_KEY,
|
||||
return log::error_rc(
|
||||
ec::INVALID_KEY,
|
||||
"Could not read key `remote_sources`."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
auto _get_value_string = [](Document& doc, const char *property){
|
||||
|
|
|
|||
396
src/http.cpp
396
src/http.cpp
|
|
@ -2,10 +2,14 @@
|
|||
#include "http.hpp"
|
||||
#include "utils.hpp"
|
||||
#include "log.hpp"
|
||||
#include "error.hpp"
|
||||
#include <curl/curl.h>
|
||||
#include <curl/easy.h>
|
||||
#include <curl/multi.h>
|
||||
#include <memory>
|
||||
#include <stdio.h>
|
||||
#include <chrono>
|
||||
#include <type_traits>
|
||||
|
||||
|
||||
namespace gdpm::http{
|
||||
|
|
@ -15,114 +19,70 @@ namespace gdpm::http{
|
|||
curl = curl_easy_init();
|
||||
}
|
||||
|
||||
|
||||
context::~context(){
|
||||
curl_global_cleanup();
|
||||
curl_easy_cleanup(curl);
|
||||
curl_global_cleanup();
|
||||
}
|
||||
|
||||
CURL* const context::get_curl() const{
|
||||
return curl;
|
||||
}
|
||||
|
||||
string context::url_escape(const string &url){
|
||||
return curl_easy_escape(curl, url.c_str(), url.size());;
|
||||
}
|
||||
|
||||
response context::request_get(
|
||||
const string& url,
|
||||
const http::request_params& params
|
||||
|
||||
response context::request(
|
||||
const string& url,
|
||||
const http::request& params
|
||||
){
|
||||
CURLcode res;
|
||||
utils::memory_buffer buf = utils::make_buffer();
|
||||
utils::memory_buffer data = utils::make_buffer();
|
||||
response r;
|
||||
|
||||
#if (GDPM_DELAY_HTTP_REQUESTS == 1)
|
||||
using namespace std::chrono_literals;
|
||||
utils::delay();
|
||||
#endif
|
||||
|
||||
// curl_global_init(CURL_GLOBAL_ALL);
|
||||
// curl = curl_easy_init();
|
||||
if(curl){
|
||||
utils::memory_buffer *data;
|
||||
curl_slist *list = _add_headers(curl, params.headers);
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
// curl_easy_setopt(curl, CURLOPT_POSTFIELDS, "name=daniel&project=curl");
|
||||
curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "GET");
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&buf);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl::write_to_buffer);
|
||||
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, &data);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, utils::curl::show_progress);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
curl_slist_free_all(list);
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && params.verbose > 0)
|
||||
log::error("_make_request.curl_easy_perform(): {}", curl_easy_strerror(res));
|
||||
curl_easy_cleanup(curl);
|
||||
}
|
||||
|
||||
r.body = buf.addr;
|
||||
utils::free_buffer(buf);
|
||||
// curl_global_cleanup();
|
||||
return r;
|
||||
}
|
||||
|
||||
|
||||
response context::request_post(
|
||||
const string& url,
|
||||
const http::request_params& params
|
||||
){
|
||||
// CURL *curl = nullptr;
|
||||
CURLcode res;
|
||||
utils::memory_buffer buf = utils::make_buffer();
|
||||
response r;
|
||||
|
||||
#if (GDPM_DELAY_HTTP_REQUESTS == 1)
|
||||
using namespace std::chrono_literals;
|
||||
utils::delay();
|
||||
#endif
|
||||
string h;
|
||||
std::for_each(
|
||||
params.headers.begin(),
|
||||
params.headers.end(),
|
||||
[&h](const string_pair& kv){
|
||||
h += kv.first + "=" + kv.second + "&";
|
||||
curl_slist *list = add_headers(curl, params.headers);
|
||||
if(params.method == method::POST){
|
||||
string h;
|
||||
std::for_each(
|
||||
params.headers.begin(),
|
||||
params.headers.end(),
|
||||
[&h](const string_pair& kv){
|
||||
h += kv.first + "=" + kv.second + "&";
|
||||
}
|
||||
);
|
||||
h.pop_back();
|
||||
h = url_escape(h);
|
||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, h.size());
|
||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, h.c_str());
|
||||
}
|
||||
else if(params.method == method::GET){
|
||||
curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "GET");
|
||||
}
|
||||
);
|
||||
h.pop_back();
|
||||
h = url_escape(h);
|
||||
|
||||
// const char *post_fields = "";
|
||||
// curl_global_init(CURL_GLOBAL_ALL);
|
||||
// curl = curl_easy_init();
|
||||
if(curl){
|
||||
utils::memory_buffer *data;
|
||||
curl_slist *list = _add_headers(curl, params.headers);
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
// curl_easy_setopt(curl, CURLOPT_POSTFIELDS, "name=daniel&project=curl");
|
||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, h.size());
|
||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, h.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&buf);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl::write_to_buffer);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_to_buffer);
|
||||
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, &data);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, utils::curl::show_progress);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, (void*)&data);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, show_download_progress);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
curl_slist_free_all(list);
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && params.verbose > 0)
|
||||
log::error("_make_request.curl_easy_perform(): {}", curl_easy_strerror(res));
|
||||
log::error(ec::LIBCURL_ERR,
|
||||
std::format("http::context::request::curl_easy_perform(): {}", curl_easy_strerror(res))
|
||||
);
|
||||
curl_easy_cleanup(curl);
|
||||
}
|
||||
|
||||
r.body = buf.addr;
|
||||
utils::free_buffer(buf);
|
||||
// curl_global_cleanup();
|
||||
return r;
|
||||
}
|
||||
|
||||
|
|
@ -130,7 +90,7 @@ namespace gdpm::http{
|
|||
response context::download_file(
|
||||
const string& url,
|
||||
const string& storage_path,
|
||||
const http::request_params& params
|
||||
const http::request& params
|
||||
){
|
||||
// CURL *curl = nullptr;
|
||||
CURLcode res;
|
||||
|
|
@ -141,34 +101,19 @@ namespace gdpm::http{
|
|||
using namespace std::chrono_literals;
|
||||
utils::delay();
|
||||
#endif
|
||||
|
||||
// curl_global_init(CURL_GLOBAL_ALL);
|
||||
// curl = curl_easy_init();
|
||||
if(curl){
|
||||
fp = fopen(storage_path.c_str(), "wb");
|
||||
// if(!config.username.empty() && !config.password.empty()){
|
||||
// std::string curlopt_userpwd{config.username + ":" + config.password};
|
||||
// curl_easy_setopt(curl, CURLOPT_USERPWD, curlopt_userpwd.c_str());
|
||||
// }
|
||||
|
||||
// /* Switch on full protocol/debug output while testing and disable
|
||||
// * progress meter by setting to 0L */
|
||||
// if(config.verbose){
|
||||
// curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
|
||||
// curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L);
|
||||
// }
|
||||
utils::memory_buffer *data;
|
||||
curl_slist *list = _add_headers(curl, params.headers);
|
||||
curl_slist *list = add_headers(curl, params.headers);
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
// curl_easy_setopt(curl, CURLOPT_USERPWD, "user:pass");
|
||||
curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
|
||||
curl_easy_setopt(curl, CURLOPT_HEADER, 0);
|
||||
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl::write_to_stream);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_to_stream);
|
||||
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, &data);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, utils::curl::show_progress);
|
||||
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, show_download_progress);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
|
|
@ -177,16 +122,16 @@ namespace gdpm::http{
|
|||
/* Get response code, process error, save data, and close file. */
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && params.verbose > 0){
|
||||
log::error("download_file.curl_easy_perform() failed: {}", curl_easy_strerror(res));
|
||||
log::error(ec::LIBCURL_ERR,
|
||||
std::format("http::context::download_file::curl_easy_perform() failed: {}", curl_easy_strerror(res))
|
||||
);
|
||||
}
|
||||
fclose(fp);
|
||||
}
|
||||
// curl_global_cleanup();
|
||||
return r;
|
||||
}
|
||||
|
||||
long context::get_download_size(const string& url){
|
||||
// CURL *curl = curl_easy_init();
|
||||
CURLcode res;
|
||||
if(curl){
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
|
|
@ -221,7 +166,185 @@ namespace gdpm::http{
|
|||
}
|
||||
|
||||
|
||||
curl_slist* context::_add_headers(
|
||||
multi::multi(long max_allowed_transfers){
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
if(max_allowed_transfers > 1)
|
||||
cm = curl_multi_init();
|
||||
curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)max_allowed_transfers);
|
||||
}
|
||||
|
||||
multi::~multi(){
|
||||
if(cm != nullptr)
|
||||
curl_multi_cleanup(cm);
|
||||
curl_global_cleanup();
|
||||
}
|
||||
|
||||
|
||||
string multi::url_escape(const string &url){
|
||||
return curl_easy_escape(cm, url.c_str(), url.size());;
|
||||
}
|
||||
|
||||
|
||||
ptr<transfers> multi::make_requests(
|
||||
const string_list& urls,
|
||||
const http::request& params
|
||||
){
|
||||
if(cm == nullptr){
|
||||
log::error(error(PRECONDITION_FAILED,
|
||||
"http::multi::make_downloads(): multi client not initialized.")
|
||||
);
|
||||
return std::make_unique<transfers>();
|
||||
}
|
||||
if(urls.size() <= 0){
|
||||
log::warn("No requests to make.");
|
||||
return std::make_unique<transfers>();
|
||||
}
|
||||
ptr<transfers> ts = std::make_unique<transfers>();
|
||||
for(const auto& url : urls){
|
||||
transfer t;
|
||||
if(t.curl){
|
||||
curl_slist *list = add_headers(t.curl, params.headers);
|
||||
if(params.method == method::POST){
|
||||
string h;
|
||||
std::for_each(
|
||||
params.headers.begin(),
|
||||
params.headers.end(),
|
||||
[&h](const string_pair& kv){
|
||||
h += kv.first + "=" + kv.second + "&";
|
||||
}
|
||||
|
||||
);
|
||||
h.pop_back();
|
||||
h = url_escape(h);
|
||||
curl_easy_setopt(t.curl, CURLOPT_POSTFIELDSIZE, h.size());
|
||||
curl_easy_setopt(t.curl, CURLOPT_POSTFIELDS, h.c_str());
|
||||
}
|
||||
else if(params.method == method::GET){
|
||||
curl_easy_setopt(t.curl, CURLOPT_CUSTOMREQUEST, "GET");
|
||||
}
|
||||
curl_easy_setopt(t.curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(t.curl, CURLOPT_WRITEDATA, (void*)&t.data);
|
||||
curl_easy_setopt(t.curl, CURLOPT_WRITEFUNCTION, write_to_buffer);
|
||||
curl_easy_setopt(t.curl, CURLOPT_NOPROGRESS, false);
|
||||
curl_easy_setopt(t.curl, CURLOPT_XFERINFODATA, &t.data);
|
||||
curl_easy_setopt(t.curl, CURLOPT_XFERINFOFUNCTION, show_download_progress);
|
||||
curl_easy_setopt(t.curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(t.curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
}
|
||||
}
|
||||
|
||||
return ts;
|
||||
}
|
||||
|
||||
|
||||
ptr<transfers> multi::make_downloads(
|
||||
const string_list& urls,
|
||||
const string_list& storage_paths,
|
||||
const http::request& params
|
||||
){
|
||||
if(cm == nullptr){
|
||||
log::error(error(ec::PRECONDITION_FAILED,
|
||||
"http::multi::make_downloads(): multi client not initialized.")
|
||||
);
|
||||
return std::make_unique<transfers>();
|
||||
}
|
||||
if(urls.size() != storage_paths.size()){
|
||||
log::error(error(ec::ASSERTION_FAILED,
|
||||
"http::context::make_downloads(): urls.size() != storage_paths.size()"
|
||||
));
|
||||
}
|
||||
ptr<transfers> ts = std::make_unique<transfers>();
|
||||
for(size_t i = 0; i < urls.size(); i++){
|
||||
const string& url = urls.at(i);
|
||||
const string& storage_path = storage_paths.at(i);
|
||||
response r;
|
||||
transfer t;
|
||||
t.id = i;
|
||||
|
||||
if(t.curl){
|
||||
t.fp = fopen(storage_path.c_str(), "wb");
|
||||
curl_slist *list = add_headers(t.curl, params.headers);
|
||||
curl_easy_setopt(t.curl, CURLOPT_URL, url.c_str());
|
||||
// curl_easy_setopt(t.curl, CURLOPT_PRIVATE, url.c_str());
|
||||
curl_easy_setopt(t.curl, CURLOPT_FAILONERROR, true);
|
||||
curl_easy_setopt(t.curl, CURLOPT_HEADER, 0);
|
||||
curl_easy_setopt(t.curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_easy_setopt(t.curl, CURLOPT_WRITEDATA, t.fp);
|
||||
curl_easy_setopt(t.curl, CURLOPT_WRITEFUNCTION, write_to_stream);
|
||||
curl_easy_setopt(t.curl, CURLOPT_NOPROGRESS, false);
|
||||
curl_easy_setopt(t.curl, CURLOPT_XFERINFODATA, &t.data);
|
||||
curl_easy_setopt(t.curl, CURLOPT_XFERINFOFUNCTION, show_download_progress);
|
||||
curl_easy_setopt(t.curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(t.curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
cres = curl_multi_add_handle(cm, t.curl);
|
||||
curl_slist_free_all(list);
|
||||
if(cres != CURLM_OK){
|
||||
log::error(ec::LIBCURL_ERR,
|
||||
std::format("http::context::make_downloads(): {}", curl_multi_strerror(cres))
|
||||
);
|
||||
}
|
||||
ts->emplace_back(std::move(t));
|
||||
/* NOTE: Should the file pointer be closed here? */
|
||||
}
|
||||
}
|
||||
|
||||
return ts;
|
||||
}
|
||||
|
||||
|
||||
ptr<responses> multi::execute(
|
||||
ptr<transfers> transfers,
|
||||
size_t timeout
|
||||
){
|
||||
if(cm == nullptr){
|
||||
log::error(error(PRECONDITION_FAILED,
|
||||
"http::multi::execute(): multi client not initialized")
|
||||
);
|
||||
return std::make_unique<responses>();
|
||||
}
|
||||
if(transfers->empty()){
|
||||
log::debug("http::multi::execute(): no transfers found");
|
||||
return std::make_unique<responses>();
|
||||
}
|
||||
size_t transfers_left = transfers->size();
|
||||
ptr<responses> responses = std::make_unique<http::responses>(transfers->size());
|
||||
do{
|
||||
int still_alive = 1;
|
||||
cres = curl_multi_perform(cm, &still_alive);
|
||||
|
||||
while((cmessage = curl_multi_info_read(cm, &messages_left))){
|
||||
if(cmessage->msg == CURLMSG_DONE){
|
||||
char *url = nullptr;
|
||||
transfer& t = transfers->at(transfers_left-1);
|
||||
response& r = responses->at(transfers_left-1);
|
||||
t.curl = cmessage->easy_handle;
|
||||
curl_easy_getinfo(cmessage->easy_handle, CURLINFO_EFFECTIVE_URL, &url);
|
||||
curl_easy_getinfo(cmessage->easy_handle, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if((int)cmessage->data.result != CURLM_OK){
|
||||
log::error(error(ec::LIBCURL_ERR,
|
||||
std::format("http::context::execute({}): {} <url: {}>", (int)cmessage->data.result, curl_easy_strerror(cmessage->data.result), url))
|
||||
);
|
||||
}
|
||||
curl_multi_remove_handle(cm, t.curl);
|
||||
curl_easy_cleanup(t.curl);
|
||||
if(t.fp) fclose(t.fp);
|
||||
transfers->pop_back();
|
||||
transfers_left -= 1;
|
||||
}
|
||||
else{
|
||||
log::error(error(ec::LIBCURL_ERR,
|
||||
std::format("http::context::execute(): {}", (int)cmessage->msg))
|
||||
);
|
||||
}
|
||||
}
|
||||
if(transfers_left)
|
||||
curl_multi_wait(cm, NULL, 0, timeout, NULL);
|
||||
}while(transfers_left);
|
||||
return responses;
|
||||
}
|
||||
|
||||
|
||||
curl_slist* add_headers(
|
||||
CURL *curl,
|
||||
const headers_t& headers
|
||||
){
|
||||
|
|
@ -235,4 +358,83 @@ namespace gdpm::http{
|
|||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
size_t write_to_buffer(
|
||||
char *contents,
|
||||
size_t size,
|
||||
size_t nmemb,
|
||||
void *userdata
|
||||
){
|
||||
size_t realsize = size * nmemb;
|
||||
utils::memory_buffer *m = (utils::memory_buffer*)userdata;
|
||||
|
||||
m->addr = (char*)realloc(m->addr, m->size + realsize + 1);
|
||||
if(m->addr == nullptr){
|
||||
/* Out of memory */
|
||||
log::error("Could not allocate memory (realloc returned NULL).");
|
||||
return 0;
|
||||
}
|
||||
|
||||
memcpy(&(m->addr[m->size]), contents, realsize);
|
||||
m->size += realsize;
|
||||
m->addr[m->size] = 0;
|
||||
|
||||
return realsize;
|
||||
}
|
||||
|
||||
|
||||
size_t write_to_stream(
|
||||
char *ptr,
|
||||
size_t size,
|
||||
size_t nmemb,
|
||||
void *userdata
|
||||
){
|
||||
if(nmemb == 0)
|
||||
return 0;
|
||||
|
||||
return fwrite(ptr, size, nmemb, (FILE*)userdata);
|
||||
}
|
||||
|
||||
|
||||
int show_download_progress(
|
||||
void *ptr,
|
||||
curl_off_t total_download,
|
||||
curl_off_t current_downloaded,
|
||||
curl_off_t total_upload,
|
||||
curl_off_t current_upload
|
||||
){
|
||||
if(current_downloaded >= total_download)
|
||||
return 0;
|
||||
using namespace indicators;
|
||||
show_console_cursor(false);
|
||||
// if(total_download != 0){
|
||||
// // double percent = std::floor((current_downloaded / (total_download)) * 100);
|
||||
// bar.set_option(option::MaxProgress{total_download});
|
||||
// // bar.set_option(option::HideBarWhenComplete{false});
|
||||
// bar.set_progress(current_downloaded);
|
||||
// bar.set_option(option::PostfixText{
|
||||
// utils::convert_size(current_downloaded) + " / " +
|
||||
// utils::convert_size(total_download)
|
||||
// });
|
||||
// if(bar.is_completed()){
|
||||
// bar.set_option(option::PrefixText{"Download completed."});
|
||||
// bar.mark_as_completed();
|
||||
// }
|
||||
// } else {
|
||||
// if(bar_unknown.is_completed()){
|
||||
// bar_unknown.set_option(option::PrefixText{"Download completed."});
|
||||
// bar_unknown.mark_as_completed();
|
||||
// } else {
|
||||
// bar.tick();
|
||||
// bar_unknown.set_option(
|
||||
// option::PostfixText(std::format("{}", utils::convert_size(current_downloaded)))
|
||||
// );
|
||||
|
||||
// }
|
||||
// }
|
||||
show_console_cursor(true);
|
||||
utils::memory_buffer *m = (utils::memory_buffer*)ptr;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
587
src/package.cpp
587
src/package.cpp
|
|
@ -40,97 +40,110 @@ namespace gdpm::package{
|
|||
|
||||
*/
|
||||
|
||||
/* Synchronize database information and then try to get data again from
|
||||
cache if possible. */
|
||||
if(config.enable_sync){
|
||||
result_t result = fetch(config, package_titles);
|
||||
error error = result.get_error();
|
||||
if(error.has_occurred()){
|
||||
return log::error_rc(ec::UNKNOWN, "package::install(): could not synchronize database.");
|
||||
}
|
||||
}
|
||||
/* Append files from --file option */
|
||||
read_file_inputs(package_titles, params.input_files);
|
||||
|
||||
result_t result = cache::get_package_info_by_title(package_titles);
|
||||
package::info_list p_cache = result.unwrap_unsafe();
|
||||
|
||||
/* Synchronize database information and then try to get data again from
|
||||
cache if possible. */
|
||||
if(config.enable_sync || p_cache.empty()){
|
||||
result_t result = synchronize_database(config, package_titles);
|
||||
p_cache = result.unwrap_unsafe();
|
||||
}
|
||||
|
||||
/* Match queried package titles with those found in cache. */
|
||||
package::info_list p_found = find_cached_packages(package_titles);
|
||||
|
||||
// if(p_found.size() == package_titles.size()){
|
||||
// log::info("Found all packages stored in local cache.");
|
||||
// }
|
||||
|
||||
/* Found nothing to install so there's nothing to do at this point. */
|
||||
if(p_found.empty()){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
"No packages found to install."
|
||||
if(p_cache.empty()){
|
||||
return log::error_rc(
|
||||
ec::NOT_FOUND, /* TODO: change to PACKAGE_NOT_FOUND */
|
||||
"package::install(): no packages found to install."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
log::println("Packages to install: ");
|
||||
for(const auto& p : p_found){
|
||||
string output((p.is_installed) ? p.title + " (reinstall)" : p.title);
|
||||
log::print(" {} ", (p.is_installed) ? p.title + " (reinstall)" : p.title);
|
||||
/* Show packages to install */
|
||||
{
|
||||
using namespace tabulate;
|
||||
Table table;
|
||||
table.format()
|
||||
.border_top("")
|
||||
.border_bottom("")
|
||||
.border_left("")
|
||||
.border_right("")
|
||||
.corner("")
|
||||
.padding_top(0)
|
||||
.padding_bottom(0);
|
||||
table.add_row({"Title", "Author", "Category", "Version", "Godot", "Last Modified", "Installed?"});
|
||||
table[0].format()
|
||||
.font_style({FontStyle::underline, FontStyle::bold});
|
||||
for(const auto& p : p_cache){
|
||||
table.add_row({p.title, p.author, p.category, p.version, p.godot_version, p.modify_date, (p.is_installed) ? "✔️": "❌"});
|
||||
size_t index = table.size() - 1;
|
||||
table[index][3].format().font_align(FontAlign::center);
|
||||
table[index][4].format().font_align(FontAlign::center);
|
||||
table[index][6].format().font_align(FontAlign::center);
|
||||
|
||||
// string output(p.title + GDPM_COLOR_CYAN " v" + p.version + GDPM_COLOR_RESET);
|
||||
// output += GDPM_COLOR_BLUE " last updated: " + p.modify_date + GDPM_COLOR_RESET;
|
||||
// output += (p.is_installed) ? GDPM_COLOR_LIGHT_CYAN " (reinstall)" : "";
|
||||
// output += GDPM_COLOR_RESET;
|
||||
// log::print(" {}\n", output);
|
||||
}
|
||||
table.print(std::cout);
|
||||
log::println("");
|
||||
}
|
||||
log::println("");
|
||||
|
||||
/* Skip prompt if set in config */
|
||||
if(!config.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Do you want to install these packages? (Y/n)"))
|
||||
return error();
|
||||
}
|
||||
|
||||
/* Check if provided param is in remote sources*/
|
||||
/* Check if provided remote param is in remote sources */
|
||||
if(!config.remote_sources.contains(params.remote_source)){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
"Remote source not found in config."
|
||||
return log::error_rc(
|
||||
ec::NOT_FOUND, /* TODO: change to REMOTE_NOT_FOUND */
|
||||
"package::install(): remote source not found in config."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Try and obtain all requested packages. */
|
||||
std::vector<string_pair> dir_pairs;
|
||||
task_list tasks;
|
||||
// task_list tasks;
|
||||
/* Retrieve necessary asset data if it was found already in cache */
|
||||
std::vector<string_pair> target_extract_dirs;
|
||||
rest_api::request_params rest_api_params = rest_api::make_from_config(config);
|
||||
for(auto& p : p_found){ // TODO: Execute each in parallel using coroutines??
|
||||
|
||||
/* Check if a remote source was provided. If not, then try to get packages
|
||||
in global storage location only. */
|
||||
|
||||
log::info("Fetching asset data for \"{}\"...", p.title);
|
||||
package::title_list p_download_urls;
|
||||
package::path_list p_storage_paths;
|
||||
for(auto& p : p_cache){
|
||||
log::info_n("Fetching asset data for \"{}\"...", p.title);
|
||||
string url{config.remote_sources.at(params.remote_source) + rest_api::endpoints::GET_AssetId};
|
||||
string package_dir, tmp_dir, tmp_zip;
|
||||
string package_dir = config.packages_dir + "/" + p.title;
|
||||
string tmp_dir = config.tmp_dir + "/" + p.title;
|
||||
string tmp_zip = tmp_dir + ".zip";
|
||||
|
||||
/* Retrieve necessary asset data if it was found already in cache */
|
||||
Document doc;
|
||||
bool is_missing_data = p.download_url.empty() || p.category.empty() || p.description.empty() || p.support_level.empty();
|
||||
if(is_missing_data){
|
||||
bool is_data_missing = p.download_url.empty() || p.category.empty() || p.description.empty() || p.support_level.empty();
|
||||
if(is_data_missing){
|
||||
doc = rest_api::get_asset(url, p.asset_id, rest_api_params);
|
||||
if(doc.HasParseError() || doc.IsNull()){
|
||||
return log::error_rc(error(
|
||||
constants::error::JSON_ERR,
|
||||
std::format("Error parsing JSON: {}", GetParseError_En(doc.GetParseError()))
|
||||
));
|
||||
return log::error_rc(
|
||||
ec::JSON_ERR,
|
||||
std::format("package::install(): error parsing JSON: {}",
|
||||
GetParseError_En(doc.GetParseError()))
|
||||
);
|
||||
}
|
||||
p.category = doc["category"].GetString();
|
||||
p.description = doc["description"].GetString();
|
||||
p.support_level = doc["support_level"].GetString();
|
||||
p.download_url = doc["download_url"].GetString();
|
||||
p.download_hash = doc["download_hash"].GetString();
|
||||
log::println("Done");
|
||||
}
|
||||
else{
|
||||
log::info("Found asset data found for \"{}\"", p.title);
|
||||
log::println("");
|
||||
log::info("Found asset data for \"{}\".", p.title);
|
||||
}
|
||||
|
||||
/* Set directory and temp paths for storage */
|
||||
package_dir = config.packages_dir + "/" + p.title;
|
||||
tmp_dir = config.tmp_dir + "/" + p.title;
|
||||
tmp_zip = tmp_dir + ".zip";
|
||||
|
||||
|
||||
/* Make directories for packages if they don't exist to keep everything organized */
|
||||
if(!std::filesystem::exists(config.tmp_dir))
|
||||
std::filesystem::create_directories(config.tmp_dir);
|
||||
|
|
@ -145,60 +158,83 @@ namespace gdpm::package{
|
|||
OStreamWrapper osw(ofs);
|
||||
PrettyWriter<OStreamWrapper> writer(osw);
|
||||
doc.Accept(writer);
|
||||
target_extract_dirs.emplace_back(string_pair(tmp_zip, package_dir + "/"));
|
||||
|
||||
/* Check if we already have a stored temporary file before attempting to download */
|
||||
if(std::filesystem::exists(tmp_zip) && std::filesystem::is_regular_file(tmp_zip)){
|
||||
log::info("Found cached package. Skipping download.", p.title);
|
||||
log::info("Found cached package for \"{}\".", p.title);
|
||||
}
|
||||
else{
|
||||
/* Download all the package files and place them in tmp directory. */
|
||||
log::info_n("Downloading \"{}\"...", p.title);
|
||||
http::context http;
|
||||
http::response response = http.download_file(p.download_url, tmp_zip);
|
||||
if(response.code == http::OK){
|
||||
log::println("Done.");
|
||||
}else{
|
||||
return log::error_rc(error(
|
||||
constants::error::HTTP_RESPONSE_ERR,
|
||||
std::format("HTTP Error: {}", response.code)
|
||||
));
|
||||
}
|
||||
p_download_urls.emplace_back(p.download_url);
|
||||
p_storage_paths.emplace_back(tmp_zip);
|
||||
}
|
||||
|
||||
dir_pairs.emplace_back(string_pair(tmp_zip, package_dir + "/"));
|
||||
|
||||
p.is_installed = true;
|
||||
p.install_path = package_dir;
|
||||
|
||||
/* Extract all the downloaded packages to their appropriate directory location. */
|
||||
for(const auto& p : dir_pairs){
|
||||
int ec = utils::extract_zip(p.first.c_str(), p.second.c_str());
|
||||
if(ec){
|
||||
log::error_rc(error(
|
||||
constants::error::LIBZIP_ERR,
|
||||
std::format("libzip returned an error code {}", ec)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/* Update the cache data with information from */
|
||||
log::info_n("Updating local asset data...");
|
||||
error error = cache::update_package_info(p_found);
|
||||
if(error.has_occurred()){
|
||||
string prefix = std::format(log::get_error_prefix(), utils::timestamp());
|
||||
log::println(GDPM_COLOR_LOG_ERROR"\n{}{}" GDPM_COLOR_RESET, prefix, error.get_message());
|
||||
return error;
|
||||
}
|
||||
if(config.clean_temporary){
|
||||
clean_temporary(config, package_titles);
|
||||
}
|
||||
|
||||
log::println("Done.");
|
||||
// })
|
||||
// );
|
||||
}
|
||||
|
||||
return error();
|
||||
/* Make sure the number of urls matches storage locations */
|
||||
if(p_download_urls.size() != p_storage_paths.size()){
|
||||
return log::error_rc(error(ec::ASSERTION_FAILED,
|
||||
"package::install(): p_left.size() != p_storage.size()"
|
||||
));
|
||||
}
|
||||
|
||||
/* Attempt to download ZIPs in parallel */
|
||||
if(config.jobs > 1){
|
||||
http::multi http(config.jobs);
|
||||
ptr<http::transfers> transfers = http.make_downloads(p_download_urls, p_storage_paths);
|
||||
ptr<http::responses> responses = http.execute(std::move(transfers));
|
||||
|
||||
/* Check for HTTP response errors */
|
||||
for(const auto& r : *responses){
|
||||
if(r.code != http::OK){
|
||||
log::error(error(ec::HTTP_RESPONSE_ERR,
|
||||
std::format("HTTP error: {}", r.code)
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
else{
|
||||
http::context http;
|
||||
for(size_t i = 0; i < p_download_urls.size(); i++){
|
||||
http::response r = http.download_file(
|
||||
p_download_urls[i],
|
||||
p_storage_paths[i]
|
||||
);
|
||||
if(r.code != http::OK){
|
||||
log::error(error(ec::HTTP_RESPONSE_ERR,
|
||||
std::format("HTTP error: {}", r.code)
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Extract all the downloaded packages to their appropriate directory location. */
|
||||
for(const auto& p : target_extract_dirs){
|
||||
error error = utils::extract_zip(p.first.c_str(), p.second.c_str());
|
||||
if(error.has_occurred()){
|
||||
return error;
|
||||
}
|
||||
log::println("Done.");
|
||||
}
|
||||
|
||||
/* Update the cache data */
|
||||
for(auto& p : p_cache){
|
||||
p.is_installed = true;
|
||||
p.install_path = config.packages_dir + "/" + p.title;
|
||||
}
|
||||
|
||||
log::info_n("Updating local asset data...");
|
||||
error error = cache::update_package_info(p_cache);
|
||||
if(error.has_occurred()){
|
||||
string prefix = std::format(log::get_error_prefix(), utils::timestamp());
|
||||
log::println(GDPM_COLOR_LOG_ERROR"\n{}{}" GDPM_COLOR_RESET, prefix, error.get_message());
|
||||
return error;
|
||||
}
|
||||
if(config.clean_temporary){
|
||||
clean(config, package_titles);
|
||||
}
|
||||
log::println("Done.");
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -233,18 +269,16 @@ namespace gdpm::package{
|
|||
|
||||
/* Check if provided param is in remote sources*/
|
||||
if(!config.remote_sources.contains(params.remote_source)){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
return log::error_rc(ec::NOT_FOUND,
|
||||
"Remote source not found in config."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Install the other packages from remte source. */
|
||||
std::vector<string_pair> dir_pairs;
|
||||
task_list tasks;
|
||||
rest_api::request_params rest_api_params = rest_api::make_from_config(config);
|
||||
package::info_list p_left;
|
||||
for(auto& p : p_found){ // TODO: Execute each in parallel using coroutines??
|
||||
|
||||
/* Check if a remote source was provided. If not, then try to get packages
|
||||
|
|
@ -299,20 +333,36 @@ namespace gdpm::package{
|
|||
if(std::filesystem::exists(tmp_zip) && std::filesystem::is_regular_file(tmp_zip)){
|
||||
log::info("Found cached package. Skipping download.", p.title);
|
||||
}
|
||||
else{
|
||||
/* Download all the package files and place them in tmp directory. */
|
||||
log::info_n("Downloading \"{}\"...", p.title);
|
||||
http::context http;
|
||||
http::response response = http.download_file(p.download_url, tmp_zip);
|
||||
if(response.code == http::OK){
|
||||
log::println("Done.");
|
||||
}else{
|
||||
return log::error_rc(error(
|
||||
constants::error::HTTP_RESPONSE_ERR,
|
||||
std::format("HTTP Error: {}", response.code)
|
||||
));
|
||||
}
|
||||
else {
|
||||
p_left.emplace_back(p);
|
||||
}
|
||||
} // for loop
|
||||
|
||||
/* Get the packages not found in cache and download */
|
||||
string_list urls;
|
||||
for(const auto& p : p_left){
|
||||
urls.emplace_back(p.download_url);
|
||||
}
|
||||
http::multi http;
|
||||
ptr<http::transfers> transfers = http.make_requests(urls);
|
||||
ptr<http::responses> responses = http.execute(std::move(transfers));
|
||||
|
||||
for(const auto& response : *responses){
|
||||
if(response.code == http::OK){
|
||||
log::println("Done.");
|
||||
}else{
|
||||
return log::error_rc(error(
|
||||
constants::error::HTTP_RESPONSE_ERR,
|
||||
std::format("HTTP Error: {}", response.code)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/* Extract all packages and update cache database */
|
||||
for(auto& p : p_found){
|
||||
string package_dir = std::filesystem::current_path().string() + "/" + p.title;//config.packages_dir + "/" + p.title;
|
||||
string tmp_dir = std::filesystem::current_path().string() + "/" + p.title + ".tmp";
|
||||
string tmp_zip = tmp_dir + ".zip";
|
||||
|
||||
dir_pairs.emplace_back(string_pair(tmp_zip, package_dir + "/"));
|
||||
|
||||
|
|
@ -321,13 +371,7 @@ namespace gdpm::package{
|
|||
|
||||
/* Extract all the downloaded packages to their appropriate directory location. */
|
||||
for(const auto& p : dir_pairs){
|
||||
int ec = utils::extract_zip(p.first.c_str(), p.second.c_str());
|
||||
if(ec){
|
||||
log::error_rc(error(
|
||||
constants::error::LIBZIP_ERR,
|
||||
std::format("libzip returned an error code {}", ec)
|
||||
));
|
||||
}
|
||||
error error = utils::extract_zip(p.first.c_str(), p.second.c_str());
|
||||
}
|
||||
|
||||
/* Remove temporary download archive */
|
||||
|
|
@ -357,12 +401,9 @@ namespace gdpm::package{
|
|||
result_t result = cache::get_package_info_by_title(package_titles);
|
||||
package::info_list p_cache = result.unwrap_unsafe();
|
||||
if(p_cache.empty()){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
return log::error_rc(ec::NOT_FOUND,
|
||||
"Could not find any packages to remove."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Count number packages in cache flagged as is_installed. If there are none, then there's nothing to do. */
|
||||
|
|
@ -372,19 +413,36 @@ namespace gdpm::package{
|
|||
});
|
||||
|
||||
if(p_count == 0){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
return log::error_rc(ec::NOT_FOUND,
|
||||
"No packages to remove."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
log::println("Packages to remove:");
|
||||
for(const auto& p : p_cache)
|
||||
if(p.is_installed)
|
||||
log::print(" {} ", p.title);
|
||||
log::println("");
|
||||
{
|
||||
using namespace tabulate;
|
||||
Table table;
|
||||
table.format()
|
||||
.border_top("")
|
||||
.border_bottom("")
|
||||
.border_left("")
|
||||
.border_right("")
|
||||
.corner("")
|
||||
.padding_top(0)
|
||||
.padding_bottom(0);
|
||||
table.add_row({"Title", "Author", "Category", "Version", "Godot", "Last Modified", "Installed?"});
|
||||
table[0].format()
|
||||
.font_style({FontStyle::underline, FontStyle::bold});
|
||||
for(const auto& p : p_cache){
|
||||
table.add_row({p.title, p.author, p.category, p.version, p.godot_version, p.modify_date, (p.is_installed) ? "✔️": "❌"});
|
||||
// string output(p.title + GDPM_COLOR_CYAN " v" + p.version + GDPM_COLOR_RESET);
|
||||
// output += GDPM_COLOR_BLUE " last updated: " + p.modify_date + GDPM_COLOR_RESET;
|
||||
// output += (p.is_installed) ? GDPM_COLOR_LIGHT_CYAN " (reinstall)" : "";
|
||||
// output += GDPM_COLOR_RESET;
|
||||
// log::print(" {}\n", output);
|
||||
}
|
||||
table.print(std::cout);
|
||||
log::println("");
|
||||
}
|
||||
|
||||
if(!config.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Do you want to remove these packages? (Y/n)"))
|
||||
|
|
@ -428,13 +486,13 @@ namespace gdpm::package{
|
|||
}
|
||||
log::println("Done.");
|
||||
if(config.clean_temporary){
|
||||
clean_temporary(config, package_titles);
|
||||
clean(config, package_titles);
|
||||
}
|
||||
log::info_n("Updating local asset data...");
|
||||
{
|
||||
error error = cache::update_package_info(p_cache);
|
||||
if(error.has_occurred()){
|
||||
log::error("\n{}", error.get_message());
|
||||
log::error("\nsqlite: {}", error.get_message());
|
||||
return error;
|
||||
}
|
||||
}
|
||||
|
|
@ -473,9 +531,9 @@ namespace gdpm::package{
|
|||
string url{constants::HostUrl + rest_api::endpoints::GET_AssetId};
|
||||
Document doc = rest_api::get_assets_list(url, rest_api_params);
|
||||
if(doc.IsNull()){
|
||||
constexpr const char *message = "Could not get response from server. Aborting.";
|
||||
log::error(message);
|
||||
return error(constants::error::HOST_UNREACHABLE, message);
|
||||
return log::error_rc(ec::HOST_UNREACHABLE,
|
||||
"package::update(): could not get response from server. Aborting."
|
||||
);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
|
@ -568,10 +626,9 @@ namespace gdpm::package{
|
|||
remote::print_repositories(config);
|
||||
}
|
||||
else{
|
||||
log::error(error(
|
||||
constants::error::UNKNOWN_COMMAND,
|
||||
"Unrecognized subcommand. Try either 'packages' or 'remote' instead."
|
||||
));
|
||||
log::error_rc(ec::UNKNOWN_COMMAND,
|
||||
"package::list(): unrecognized subcommand...try either 'packages' or 'remote' instead."
|
||||
);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
|
@ -598,9 +655,9 @@ namespace gdpm::package{
|
|||
|
||||
}
|
||||
else {
|
||||
constexpr const char *message = "File or directory exists!";
|
||||
log::error(message);
|
||||
return error(constants::error::FILE_EXISTS, message);
|
||||
return log::error_rc(ec::FILE_EXISTS,
|
||||
"package::export_to(): file or directory exists!"
|
||||
);
|
||||
}
|
||||
}
|
||||
std::ofstream of(path);
|
||||
|
|
@ -613,6 +670,45 @@ namespace gdpm::package{
|
|||
}
|
||||
|
||||
|
||||
error clean(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
if(package_titles.empty()){
|
||||
if(!config.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Are you sure you want to clean all temporary files? (Y/n)")){
|
||||
return error();
|
||||
}
|
||||
}
|
||||
std::filesystem::remove_all(config.tmp_dir);
|
||||
return error();
|
||||
}
|
||||
|
||||
/* Find the path of each packages is_installed then delete temporaries */
|
||||
log::info_n("Cleaning temporary files...");
|
||||
for(const auto& p_title : package_titles){
|
||||
string tmp_zip = config.tmp_dir + "/" + p_title + ".zip";
|
||||
if(config.verbose > 0)
|
||||
log::info("Removed '{}'", tmp_zip);
|
||||
std::filesystem::remove_all(tmp_zip);
|
||||
}
|
||||
log::println("Done.");
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error purge(const config::context& config){
|
||||
if(!config.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Are you sure you want to purge all installed packages? (Y/n)")){
|
||||
return error();
|
||||
}
|
||||
}
|
||||
/* Remove all packages installed in global location */
|
||||
std::filesystem::remove_all(config.packages_dir);
|
||||
return cache::drop_package_database();
|
||||
}
|
||||
|
||||
|
||||
error link(
|
||||
const config::context& config,
|
||||
const title_list& package_titles,
|
||||
|
|
@ -622,7 +718,7 @@ namespace gdpm::package{
|
|||
|
||||
if(params.paths.empty()){
|
||||
return log::error_rc(error(
|
||||
constants::error::PATH_NOT_DEFINED,
|
||||
constants::error::MALFORMED_PATH,
|
||||
"Path is required"
|
||||
));
|
||||
}
|
||||
|
|
@ -685,7 +781,7 @@ namespace gdpm::package{
|
|||
|
||||
if(params.paths.empty()){
|
||||
return log::error_rc(error(
|
||||
constants::error::PATH_NOT_DEFINED,
|
||||
constants::error::MALFORMED_PATH,
|
||||
"Path is required"
|
||||
));
|
||||
}
|
||||
|
|
@ -739,6 +835,88 @@ namespace gdpm::package{
|
|||
return error();
|
||||
}
|
||||
|
||||
|
||||
result_t<info_list> fetch(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
rest_api::request_params rest_api_params = rest_api::make_from_config(config);
|
||||
rest_api_params.page = 0;
|
||||
int page = 0;
|
||||
int page_length = 0;
|
||||
int total_items = 0;
|
||||
int items_left = 0;
|
||||
// int total_pages = 0;
|
||||
|
||||
log::info_n("Sychronizing database...");
|
||||
do{
|
||||
/* Make the GET request to get page data and store it in the local
|
||||
package database. Also, check to see if we need to keep going. */
|
||||
string url{constants::HostUrl + rest_api::endpoints::GET_Asset};
|
||||
Document doc = rest_api::get_assets_list(url, rest_api_params);
|
||||
rest_api_params.page += 1;
|
||||
|
||||
if(doc.IsNull()){
|
||||
log::println("");
|
||||
return result_t(info_list(), log::error_rc(
|
||||
ec::EMPTY_RESPONSE,
|
||||
"Could not get response from server. Aborting."
|
||||
));
|
||||
}
|
||||
|
||||
/* Need to know how many pages left to get and how many we get per
|
||||
request. */
|
||||
page = doc["page"].GetInt();
|
||||
page_length = doc["page_length"].GetInt();
|
||||
// total_pages = doc["pages"].GetInt();
|
||||
total_items = doc["total_items"].GetInt();
|
||||
items_left = total_items - (page + 1) * page_length;
|
||||
|
||||
// log::info("page: {}, page length: {}, total pages: {}, total items: {}, items left: {}", page, page_length, total_pages, total_items, items_left);
|
||||
|
||||
if(page == 0){
|
||||
error error;
|
||||
error = cache::drop_package_database();
|
||||
error = cache::create_package_database();
|
||||
}
|
||||
|
||||
info_list packages;
|
||||
for(const auto& o : doc["result"].GetArray()){
|
||||
// log::println("=======================");
|
||||
info p{
|
||||
.asset_id = std::stoul(o["asset_id"].GetString()),
|
||||
.title = o["title"].GetString(),
|
||||
.author = o["author"].GetString(),
|
||||
.author_id = std::stoul(o["author_id"].GetString()),
|
||||
.version = o["version"].GetString(),
|
||||
.godot_version = o["godot_version"].GetString(),
|
||||
.cost = o["cost"].GetString(),
|
||||
.modify_date = o["modify_date"].GetString(),
|
||||
.category = o["category"].GetString(),
|
||||
.remote_source = url
|
||||
};
|
||||
packages.emplace_back(p);
|
||||
}
|
||||
error error = cache::insert_package_info(packages);
|
||||
if (error.has_occurred()){
|
||||
log::error(error);
|
||||
/* FIXME: Should this stop here or keep going? */
|
||||
}
|
||||
/* Make the same request again to get the rest of the needed data
|
||||
using the same request, but with a different page, then update
|
||||
variables as needed. */
|
||||
|
||||
|
||||
} while(items_left > 0);
|
||||
|
||||
log::println("Done.");
|
||||
|
||||
return cache::get_package_info_by_title(package_titles);
|
||||
}
|
||||
|
||||
|
||||
void print_list(const info_list& packages){
|
||||
for(const auto& p : packages){
|
||||
log::println(
|
||||
|
|
@ -856,26 +1034,6 @@ namespace gdpm::package{
|
|||
}
|
||||
|
||||
|
||||
void clean_temporary(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
if(package_titles.empty()){
|
||||
log::info("No temporary files found to clean.");
|
||||
std::filesystem::remove_all(config.tmp_dir);
|
||||
}
|
||||
/* Find the path of each packages is_installed then delete temporaries */
|
||||
log::info_n("Cleaning temporary files...");
|
||||
for(const auto& p_title : package_titles){
|
||||
string tmp_zip = config.tmp_dir + "/" + p_title + ".zip";
|
||||
if(config.verbose > 0)
|
||||
log::info("Removed '{}'", tmp_zip);
|
||||
std::filesystem::remove_all(tmp_zip);
|
||||
}
|
||||
log::println("Done.");
|
||||
}
|
||||
|
||||
|
||||
template <typename T>
|
||||
auto set_if_key_exists(
|
||||
const var_opts& o,
|
||||
|
|
@ -886,89 +1044,6 @@ namespace gdpm::package{
|
|||
}
|
||||
|
||||
|
||||
result_t<info_list> synchronize_database(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
rest_api::request_params rest_api_params = rest_api::make_from_config(config);
|
||||
rest_api_params.page = 0;
|
||||
int page = 0;
|
||||
int page_length = 0;
|
||||
int total_items = 0;
|
||||
int items_left = 0;
|
||||
// int total_pages = 0;
|
||||
|
||||
log::info("Sychronizing database...");
|
||||
do{
|
||||
/* Make the GET request to get page data and store it in the local
|
||||
package database. Also, check to see if we need to keep going. */
|
||||
std::string url{constants::HostUrl};
|
||||
url += rest_api::endpoints::GET_Asset;
|
||||
Document doc = rest_api::get_assets_list(url, rest_api_params);
|
||||
rest_api_params.page += 1;
|
||||
|
||||
if(doc.IsNull()){
|
||||
error error(
|
||||
constants::error::EMPTY_RESPONSE,
|
||||
"Could not get response from server. Aborting."
|
||||
);
|
||||
log::error(error);
|
||||
return result_t(info_list(), error);
|
||||
}
|
||||
|
||||
/* Need to know how many pages left to get and how many we get per
|
||||
request. */
|
||||
page = doc["page"].GetInt();
|
||||
page_length = doc["page_length"].GetInt();
|
||||
// total_pages = doc["pages"].GetInt();
|
||||
total_items = doc["total_items"].GetInt();
|
||||
items_left = total_items - (page + 1) * page_length;
|
||||
|
||||
// log::info("page: {}, page length: {}, total pages: {}, total items: {}, items left: {}", page, page_length, total_pages, total_items, items_left);
|
||||
|
||||
if(page == 0){
|
||||
error error;
|
||||
error = cache::drop_package_database();
|
||||
error = cache::create_package_database();
|
||||
}
|
||||
|
||||
info_list packages;
|
||||
for(const auto& o : doc["result"].GetArray()){
|
||||
// log::println("=======================");
|
||||
info p{
|
||||
.asset_id = std::stoul(o["asset_id"].GetString()),
|
||||
.title = o["title"].GetString(),
|
||||
.author = o["author"].GetString(),
|
||||
.author_id = std::stoul(o["author_id"].GetString()),
|
||||
.version = o["version"].GetString(),
|
||||
.godot_version = o["godot_version"].GetString(),
|
||||
.cost = o["cost"].GetString(),
|
||||
.modify_date = o["modify_date"].GetString(),
|
||||
.category = o["category"].GetString(),
|
||||
.remote_source = url
|
||||
};
|
||||
packages.emplace_back(p);
|
||||
}
|
||||
error error = cache::insert_package_info(packages);
|
||||
if (error.has_occurred()){
|
||||
log::error(error);
|
||||
/* FIXME: Should this stop here or keep going? */
|
||||
}
|
||||
/* Make the same request again to get the rest of the needed data
|
||||
using the same request, but with a different page, then update
|
||||
variables as needed. */
|
||||
|
||||
|
||||
} while(items_left > 0);
|
||||
|
||||
log::println("Done.");
|
||||
|
||||
return cache::get_package_info_by_title(package_titles);
|
||||
}
|
||||
|
||||
|
||||
result_t<info_list> resolve_dependencies(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
|
|
|
|||
|
|
@ -111,6 +111,7 @@ namespace gdpm::package_manager{
|
|||
ArgumentParser update_command("update");
|
||||
ArgumentParser search_command("search");
|
||||
ArgumentParser export_command("export");
|
||||
ArgumentParser purge_command("purge");
|
||||
ArgumentParser list_command("list");
|
||||
ArgumentParser link_command("link");
|
||||
ArgumentParser clone_command("clone");
|
||||
|
|
@ -150,16 +151,16 @@ namespace gdpm::package_manager{
|
|||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
install_command.add_argument("--disable-sync")
|
||||
install_command.add_argument("--sync")
|
||||
.help("enable syncing with remote before installing")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
install_command.add_argument("--disable-cache")
|
||||
.default_value(true)
|
||||
.nargs(nargs_pattern::any);
|
||||
install_command.add_argument("--cache")
|
||||
.help("disable caching asset data")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
.nargs(nargs_pattern::any);
|
||||
install_command.add_argument("--remote")
|
||||
.help("set the remote to use")
|
||||
.nargs(1);
|
||||
|
|
@ -200,17 +201,25 @@ namespace gdpm::package_manager{
|
|||
remove_command.add_description("remove package(s)");
|
||||
remove_command.add_argument("packages")
|
||||
.nargs(nargs_pattern::any);
|
||||
remove_command.add_argument("--clean");
|
||||
remove_command.add_argument("--clean")
|
||||
.help("clean temporary files")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
remove_command.add_argument("-y", "--skip-prompt");
|
||||
remove_command.add_argument("-f", "--file")
|
||||
.help("set the file(s) to read as input")
|
||||
.append()
|
||||
.nargs(nargs_pattern::at_least_one);
|
||||
.nargs(1);
|
||||
|
||||
update_command.add_description("update package(s)");
|
||||
update_command.add_argument("packages")
|
||||
.nargs(nargs_pattern::any);
|
||||
update_command.add_argument("--clean");
|
||||
update_command.add_argument("--clean")
|
||||
.help("clean temporary files")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
update_command.add_argument("--remote");
|
||||
update_command.add_argument("-f", "--file")
|
||||
.help("set the file(s) to read as input")
|
||||
|
|
@ -301,8 +310,19 @@ namespace gdpm::package_manager{
|
|||
clean_command.add_description("clean package(s) temporary files");
|
||||
clean_command.add_argument("packages")
|
||||
.help("package(s) to clean")
|
||||
.required()
|
||||
.nargs(nargs_pattern::at_least_one);
|
||||
.nargs(nargs_pattern::any);
|
||||
clean_command.add_argument("-y", "--skip-prompt")
|
||||
.help("skip the yes/no prompt")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
|
||||
purge_command.add_description("purge cache database");
|
||||
purge_command.add_argument("-y", "--skip-prompt")
|
||||
.help("skip the yes/no prompt")
|
||||
.implicit_value(true)
|
||||
.default_value(false)
|
||||
.nargs(0);
|
||||
|
||||
fetch_command.add_description("fetch and sync asset data");
|
||||
fetch_command.add_argument("remote")
|
||||
|
|
@ -362,6 +382,7 @@ namespace gdpm::package_manager{
|
|||
program.add_subparser(update_command);
|
||||
program.add_subparser(search_command);
|
||||
program.add_subparser(export_command);
|
||||
program.add_subparser(purge_command);
|
||||
program.add_subparser(list_command);
|
||||
program.add_subparser(link_command);
|
||||
program.add_subparser(clone_command);
|
||||
|
|
@ -377,25 +398,41 @@ namespace gdpm::package_manager{
|
|||
program.parse_args(argc, argv);
|
||||
// program.parse_known_args(argc, argv);
|
||||
} catch(const std::runtime_error& e){
|
||||
return log::error_rc(error(
|
||||
constants::error::ARGPARSE_ERROR,
|
||||
e.what())
|
||||
);
|
||||
return log::error_rc(ec::ARGPARSE_ERROR, e.what());
|
||||
}
|
||||
|
||||
if(program.is_subcommand_used(install_command)){
|
||||
action = action_e::install;
|
||||
if(install_command.is_used("packages"))
|
||||
package_titles = install_command.get<string_list>("packages");
|
||||
// if(install_command.is_used("packages"))
|
||||
// package_titles = install_command.get<string_list>("packages");
|
||||
package_titles = get_values_from_parser(install_command);
|
||||
set_if_used(install_command, config.rest_api_params.godot_version, "godot-version");
|
||||
set_if_used(install_command, config.clean_temporary, "clean");
|
||||
set_if_used(install_command, config.enable_sync, "disable-sync");
|
||||
set_if_used(install_command, config.enable_cache, "disable-cache");
|
||||
// set_if_used(install_command, config.enable_sync, "disable-sync");
|
||||
// set_if_used(install_command, config.enable_cache, "disable-cache");
|
||||
set_if_used(install_command, params.remote_source, "remote");
|
||||
set_if_used(install_command, config.jobs, "jobs");
|
||||
set_if_used(install_command, config.skip_prompt, "skip-prompt");
|
||||
set_if_used(install_command, params.input_files, "file");
|
||||
set_if_used(install_command, config.timeout, "timeout");
|
||||
if(install_command.is_used("sync")){
|
||||
string sync = install_command.get<string>("sync");
|
||||
if(!sync.compare("enable") || !sync.compare("true") || sync.empty()){
|
||||
config.enable_sync = true;
|
||||
}
|
||||
else if(!sync.compare("disable") || !sync.compare("false")){
|
||||
config.enable_sync = false;
|
||||
}
|
||||
}
|
||||
if(install_command.is_used("cache")){
|
||||
string cache = install_command.get<string>("sync");
|
||||
if(!cache.compare("enable") || !cache.compare("true") || cache.empty()){
|
||||
config.enable_sync = true;
|
||||
}
|
||||
else if(!cache.compare("disable") || !cache.compare("false")){
|
||||
config.enable_sync = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(program.is_subcommand_used(get_command)){
|
||||
action = action_e::get;
|
||||
|
|
@ -437,6 +474,15 @@ namespace gdpm::package_manager{
|
|||
action = action_e::p_export;
|
||||
params.paths = export_command.get<string_list>("paths");
|
||||
}
|
||||
else if(program.is_subcommand_used(clean_command)){
|
||||
action = action_e::clean;
|
||||
package_titles = get_values_from_parser(clean_command);
|
||||
set_if_used(clean_command, config.skip_prompt, "skip-prompt");
|
||||
}
|
||||
else if(program.is_subcommand_used(purge_command)){
|
||||
action = action_e::purge;
|
||||
set_if_used(purge_command, config.skip_prompt, "skip-prompt");
|
||||
}
|
||||
else if(program.is_subcommand_used(list_command)){
|
||||
action = action_e::list;
|
||||
if(list_command.is_used("show"))
|
||||
|
|
@ -471,10 +517,6 @@ namespace gdpm::package_manager{
|
|||
params.paths = clone_command.get<string_list>("path");
|
||||
}
|
||||
}
|
||||
else if(program.is_subcommand_used(clean_command)){
|
||||
action = action_e::clean;
|
||||
package_titles = get_values_from_parser(clean_command);
|
||||
}
|
||||
else if(program.is_subcommand_used(config_command)){
|
||||
if(config_command.is_used("style")){
|
||||
string style = config_command.get<string>("style");
|
||||
|
|
@ -550,15 +592,16 @@ namespace gdpm::package_manager{
|
|||
case action_e::update: package::update(config, package_titles, params); break;
|
||||
case action_e::search: package::search(config, package_titles, params); break;
|
||||
case action_e::p_export: package::export_to(params.paths); break;
|
||||
case action_e::purge: package::purge(config); break;
|
||||
case action_e::list: package::list(config, params); break;
|
||||
/* ...opts are the paths here */
|
||||
case action_e::link: package::link(config, package_titles, params); break;
|
||||
case action_e::clone: package::clone(config, package_titles, params); break;
|
||||
case action_e::clean: package::clean_temporary(config, package_titles); break;
|
||||
case action_e::clean: package::clean(config, package_titles); break;
|
||||
case action_e::config_get: config::print_properties(config, params.args); break;
|
||||
case action_e::config_set: config::set_property(config, params.args[0], params.args[1]); break;
|
||||
case action_e::fetch: package::synchronize_database(config, package_titles); break;
|
||||
case action_e::sync: package::synchronize_database(config, package_titles); break;
|
||||
case action_e::fetch: package::fetch(config, package_titles); break;
|
||||
case action_e::sync: package::fetch(config, package_titles); break;
|
||||
case action_e::remote_list: remote::print_repositories(config); break;
|
||||
case action_e::remote_add: remote::add_repository(config, params.args); break;
|
||||
case action_e::remote_remove: remote::remove_respositories(config, params.args); break;
|
||||
|
|
|
|||
118
src/rest_api.cpp
118
src/rest_api.cpp
|
|
@ -119,7 +119,10 @@ namespace gdpm::rest_api{
|
|||
return request_url;
|
||||
}
|
||||
|
||||
error print_params(const request_params& params, const string& filter){
|
||||
error print_params(
|
||||
const request_params& params,
|
||||
const string& filter
|
||||
){
|
||||
log::println("params: \n"
|
||||
"\ttype: {}\n"
|
||||
"\tcategory: {}\n"
|
||||
|
|
@ -147,7 +150,7 @@ namespace gdpm::rest_api{
|
|||
Document doc = rest_api::get_assets_list(request_url, rest_api_params);
|
||||
if(doc.IsNull()){
|
||||
return log::error_rc(error(
|
||||
constants::error::HOST_UNREACHABLE,
|
||||
ec::HOST_UNREACHABLE,
|
||||
"Could not fetch metadata. Aborting."
|
||||
));
|
||||
}
|
||||
|
|
@ -166,9 +169,9 @@ namespace gdpm::rest_api{
|
|||
http::context http;
|
||||
string request_url{url};
|
||||
request_url += to_string(type);
|
||||
http::response r = http.request_get(url);
|
||||
http::response r = http.request(url);
|
||||
if(verbose > 0)
|
||||
log::info("URL: {}", url);
|
||||
log::info("rest_api::configure::url: {}", url);
|
||||
return _parse_json(r.body);
|
||||
}
|
||||
|
||||
|
|
@ -179,15 +182,15 @@ namespace gdpm::rest_api{
|
|||
const string& filter
|
||||
){
|
||||
http::context http;
|
||||
http::request_params http_params;
|
||||
http_params.headers.insert(http::header("Accept", "*/*"));
|
||||
http_params.headers.insert(http::header("Accept-Encoding", "application/gzip"));
|
||||
http_params.headers.insert(http::header("Content-Encoding", "application/gzip"));
|
||||
http_params.headers.insert(http::header("Connection", "keep-alive"));
|
||||
string request_url = _prepare_request(url, c, http.url_escape(filter));
|
||||
http::response r = http.request_get(request_url, http_params);
|
||||
http::request params;
|
||||
params.headers.insert(http::header("Accept", "*/*"));
|
||||
params.headers.insert(http::header("Accept-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Content-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Connection", "keep-alive"));
|
||||
string prepared_url = _prepare_request(url, c, http.url_escape(filter));
|
||||
http::response r = http.request(prepared_url, params);
|
||||
if(c.verbose >= log::INFO)
|
||||
log::info("rest_api::get_asset_list()::URL: {}", request_url);
|
||||
log::info("rest_api::get_asset_list()::url: {}", prepared_url);
|
||||
return _parse_json(r.body, c.verbose);
|
||||
}
|
||||
|
||||
|
|
@ -200,16 +203,18 @@ namespace gdpm::rest_api{
|
|||
){
|
||||
/* Set up HTTP request */
|
||||
http::context http;
|
||||
http::request_params http_params;
|
||||
http_params.headers.insert(http::header("Accept", "*/*"));
|
||||
http_params.headers.insert(http::header("Accept-Encoding", "application/gzip"));
|
||||
http_params.headers.insert(http::header("Content-Encoding", "application/gzip"));
|
||||
http_params.headers.insert(http::header("Connection", "keep-alive"));
|
||||
string request_url = utils::replace_all(_prepare_request(url, api_params, http.url_escape(filter)), "{id}", std::to_string(asset_id));
|
||||
http::response r = http.request_get(request_url.c_str(), http_params);
|
||||
http::request params;
|
||||
params.headers.insert(http::header("Accept", "*/*"));
|
||||
params.headers.insert(http::header("Accept-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Content-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Connection", "keep-alive"));
|
||||
string prepared_url = utils::replace_all(
|
||||
_prepare_request(url, api_params,
|
||||
http.url_escape(filter)
|
||||
), "{id}", std::to_string(asset_id));
|
||||
http::response r = http.request(prepared_url, params);
|
||||
if(api_params.verbose >= log::INFO)
|
||||
log::info("get_asset().URL: {}", request_url);
|
||||
|
||||
log::info("rest_api::get_asset()::url: {}", prepared_url);
|
||||
return _parse_json(r.body);
|
||||
}
|
||||
|
||||
|
|
@ -228,27 +233,56 @@ namespace gdpm::rest_api{
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
namespace multi{
|
||||
json::documents get_assets(
|
||||
const string_list& urls,
|
||||
id_list asset_ids,
|
||||
const request_params& api_params,
|
||||
const string_list& filters
|
||||
){
|
||||
if(urls.size() == asset_ids.size() && urls.size() == filters.size()){
|
||||
log::error(ec::ASSERTION_FAILED,
|
||||
"multi::get_assets(): urls.size() != filters.size()"
|
||||
);
|
||||
}
|
||||
http::multi http;
|
||||
http::request params;
|
||||
json::documents docs;
|
||||
params.headers.insert(http::header("Accept", "*/*"));
|
||||
params.headers.insert(http::header("Accept-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Content-Encoding", "application/gzip"));
|
||||
params.headers.insert(http::header("Connection", "keep-alive"));
|
||||
string_list prepared_urls = {};
|
||||
|
||||
/* Prepare the URLs for the request_multi() call z*/
|
||||
for(size_t i = 0; i < urls.size(); i++){
|
||||
const string& url = urls.at(i);
|
||||
const string& filter = filters.at(i);
|
||||
int asset_id = asset_ids.at(i);
|
||||
string prepared_url = utils::replace_all(
|
||||
_prepare_request(url, api_params, http.url_escape(filter)),
|
||||
"{id}", std::to_string(asset_id));
|
||||
prepared_urls.emplace_back(prepared_url);
|
||||
if(api_params.verbose >= log::INFO)
|
||||
log::info("get_assets(i={})::url: {}", i, prepared_url);
|
||||
}
|
||||
|
||||
/* Parse JSON string into objects */
|
||||
ptr<http::transfers> transfers = http.make_requests(prepared_urls, params);
|
||||
ptr<http::responses> responses = http.execute(std::move(transfers));
|
||||
for(const auto& response : *responses){
|
||||
docs.emplace_back(_parse_json(response.body));
|
||||
}
|
||||
return docs;
|
||||
}
|
||||
}
|
||||
|
||||
namespace edits{
|
||||
|
||||
void edit_asset(){
|
||||
|
||||
}
|
||||
|
||||
void get_asset_edit(int asset_id){
|
||||
|
||||
}
|
||||
|
||||
string review_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
string accept_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
string reject_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
void edit_asset(){}
|
||||
void get_asset_edit(int asset_id){}
|
||||
string review_asset_edit(int asset_id){ return string(); }
|
||||
string accept_asset_edit(int asset_id){ return string(); }
|
||||
string reject_asset_edit(int asset_id){ return string(); }
|
||||
} // namespace edits
|
||||
}
|
||||
301
src/utils.cpp
301
src/utils.cpp
|
|
@ -2,11 +2,9 @@
|
|||
#include "utils.hpp"
|
||||
#include "config.hpp"
|
||||
#include "constants.hpp"
|
||||
#include "error.hpp"
|
||||
#include "log.hpp"
|
||||
#include "indicators/indeterminate_progress_bar.hpp"
|
||||
#include "indicators/dynamic_progress.hpp"
|
||||
#include "indicators/progress_bar.hpp"
|
||||
#include "indicators/block_progress_bar.hpp"
|
||||
|
||||
#include "csv2/reader.hpp"
|
||||
|
||||
|
||||
|
|
@ -30,38 +28,13 @@
|
|||
|
||||
namespace gdpm::utils{
|
||||
|
||||
using namespace indicators;
|
||||
BlockProgressBar bar {
|
||||
option::BarWidth{50},
|
||||
// option::Start{"["},
|
||||
// option::Fill{"="},
|
||||
// option::Lead{">"},
|
||||
// option::Remainder{" "},
|
||||
// option::End{"]"},
|
||||
option::PrefixText{"Downloading file "},
|
||||
option::PostfixText{""},
|
||||
option::ForegroundColor{Color::green},
|
||||
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}},
|
||||
};
|
||||
// option::ShowElapsedTime{true},
|
||||
// option::ShowRemainingTime{true},
|
||||
IndeterminateProgressBar bar_unknown {
|
||||
option::BarWidth{50},
|
||||
option::Start{"["},
|
||||
option::Fill{"."},
|
||||
option::Lead{"<==>"},
|
||||
option::PrefixText{"Downloading file "},
|
||||
option::End{"]"},
|
||||
option::PostfixText{""},
|
||||
option::ForegroundColor{Color::green},
|
||||
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}},
|
||||
};
|
||||
|
||||
|
||||
bool to_bool(const std::string& s){
|
||||
bool to_bool(const string& s){
|
||||
return to_lower(s) == "true";
|
||||
}
|
||||
|
||||
std::vector<std::string> split_lines(const std::string& contents){
|
||||
std::vector<string> split_lines(const string& contents){
|
||||
using namespace csv2;
|
||||
csv2::Reader<
|
||||
delimiter<'\n'>,
|
||||
|
|
@ -69,7 +42,7 @@ namespace gdpm::utils{
|
|||
first_row_is_header<false>,
|
||||
trim_policy::trim_whitespace
|
||||
> csv;
|
||||
std::vector<std::string> lines;
|
||||
std::vector<string> lines;
|
||||
if(csv.parse(contents)){
|
||||
for(const auto& row : csv){
|
||||
for(const auto& cell : row){
|
||||
|
|
@ -82,13 +55,13 @@ namespace gdpm::utils{
|
|||
|
||||
|
||||
#if (GDPM_READFILE_IMPL == 0)
|
||||
std::string readfile(const std::string& path){
|
||||
string readfile(const string& path){
|
||||
constexpr auto read_size = std::size_t{4096};
|
||||
auto stream = std::ifstream{path.data()};
|
||||
stream.exceptions(std::ios_base::badbit);
|
||||
|
||||
auto out = std::string{};
|
||||
auto buf = std::string(read_size, '\0');
|
||||
auto out = string{};
|
||||
auto buf = string(read_size, '\0');
|
||||
while (stream.read(& buf[0], read_size)) {
|
||||
out.append(buf, 0, stream.gcount());
|
||||
}
|
||||
|
|
@ -97,15 +70,15 @@ namespace gdpm::utils{
|
|||
}
|
||||
#elif(GDPM_READFILE_IMPL == 1)
|
||||
|
||||
std::string readfile(const std::string& path){
|
||||
string readfile(const string& path){
|
||||
std::ifstream ifs(path);
|
||||
return std::string(
|
||||
return string(
|
||||
(std::istreambuf_iterator<char>(ifs)),
|
||||
(std::istreambuf_iterator<char>())
|
||||
);
|
||||
}
|
||||
#elif(GDPM_READFILE_IMPL == 2)
|
||||
std::string readfile(const std::string& path){
|
||||
string readfile(const string& path){
|
||||
std::ifstream ifs(path);
|
||||
std::stringstream buffer;
|
||||
buffer << ifs.rdbuf();
|
||||
|
|
@ -113,43 +86,43 @@ namespace gdpm::utils{
|
|||
}
|
||||
#endif
|
||||
|
||||
std::string to_lower(const std::string& s){
|
||||
std::string copy = s;
|
||||
string to_lower(const string& s){
|
||||
string copy = s;
|
||||
std::transform(copy.begin(), copy.end(), copy.begin(), tolower);
|
||||
return copy;
|
||||
}
|
||||
|
||||
std::string trim(const std::string& s){
|
||||
string trim(const string& s){
|
||||
return trim_right(trim_left(s));
|
||||
}
|
||||
|
||||
std::string trim_left(const std::string& s){
|
||||
string trim_left(const string& s){
|
||||
return trim_left(s, constants::WHITESPACE);
|
||||
}
|
||||
|
||||
std::string trim_left(
|
||||
const std::string& s,
|
||||
const std::string& ref
|
||||
string trim_left(
|
||||
const string& s,
|
||||
const string& ref
|
||||
){
|
||||
size_t start = s.find_first_not_of(ref);
|
||||
return (start == std::string::npos) ? "" : s.substr(start);
|
||||
return (start == string::npos) ? "" : s.substr(start);
|
||||
}
|
||||
|
||||
std::string trim_right(const std::string& s){
|
||||
string trim_right(const string& s){
|
||||
return trim_right(s, constants::WHITESPACE);
|
||||
}
|
||||
|
||||
std::string trim_right(
|
||||
const std::string& s,
|
||||
const std::string& ref
|
||||
string trim_right(
|
||||
const string& s,
|
||||
const string& ref
|
||||
){
|
||||
size_t end = s.find_last_not_of(ref);
|
||||
return (end == std::string::npos) ? "" : s.substr(0, end + 1);
|
||||
return (end == string::npos) ? "" : s.substr(0, end + 1);
|
||||
}
|
||||
|
||||
std::vector<std::string> parse_lines(const std::string &s){
|
||||
std::string line;
|
||||
std::vector<std::string> result;
|
||||
std::vector<string> parse_lines(const string &s){
|
||||
string line;
|
||||
std::vector<string> result;
|
||||
std::stringstream ss(s);
|
||||
while(std::getline(ss, line)){
|
||||
result.emplace_back(line);
|
||||
|
|
@ -157,26 +130,26 @@ namespace gdpm::utils{
|
|||
return result;
|
||||
}
|
||||
|
||||
std::string replace_first(
|
||||
const std::string &s,
|
||||
const std::string &from,
|
||||
const std::string &to
|
||||
string replace_first(
|
||||
const string &s,
|
||||
const string &from,
|
||||
const string &to
|
||||
){
|
||||
std::string copy = s; // make string copy
|
||||
string copy = s; // make string copy
|
||||
size_t pos = copy.find(from);
|
||||
if(pos == std::string::npos)
|
||||
if(pos == string::npos)
|
||||
return copy;
|
||||
return copy.replace(pos, from.length(), to);
|
||||
}
|
||||
|
||||
std::string replace_all(
|
||||
const std::string& s,
|
||||
const std::string& from,
|
||||
const std::string& to
|
||||
string replace_all(
|
||||
const string& s,
|
||||
const string& from,
|
||||
const string& to
|
||||
){
|
||||
std::string copy = s; // make string copy
|
||||
string copy = s; // make string copy
|
||||
size_t pos = 0;
|
||||
while((pos = copy.find(from, pos)) != std::string::npos){
|
||||
while((pos = copy.find(from, pos)) != string::npos){
|
||||
copy.replace(pos, s.length(), to);
|
||||
pos += to.length();
|
||||
}
|
||||
|
|
@ -184,45 +157,47 @@ namespace gdpm::utils{
|
|||
}
|
||||
|
||||
/* Ref: https://gist.github.com/mobius/1759816 */
|
||||
int extract_zip(
|
||||
error extract_zip(
|
||||
const char *archive,
|
||||
const char *dest,
|
||||
int verbose
|
||||
){
|
||||
constexpr const char *prog = "gpdm";
|
||||
struct zip *zip;
|
||||
constexpr int SIZE = 1024;
|
||||
struct zip *za;
|
||||
struct zip_file *zf;
|
||||
struct zip_stat sb;
|
||||
char buf[100];
|
||||
char buf[SIZE];
|
||||
int err;
|
||||
int i, len, fd;
|
||||
zip_uint64_t sum;
|
||||
|
||||
// log::info_n("Extracting package contents to '{}'...", dest);
|
||||
log::info_n("Extracting package contents...");
|
||||
if((zip = zip_open(archive, 0, &err)) == nullptr){
|
||||
std::filesystem::path path(archive);
|
||||
log::info_n("Extracting \"{}\" archive...", path.filename().string());
|
||||
if((za = zip_open(path.c_str(), ZIP_RDONLY, &err)) == NULL){
|
||||
zip_error_to_str(buf, sizeof(buf), err, errno);
|
||||
log::error("{}: can't open zip archive {}: {}", prog, archive, buf);
|
||||
return 1;
|
||||
log::println("");
|
||||
return log::error_rc(error(
|
||||
ec::LIBZIP_ERR,
|
||||
std::format("utils::extract_zip(): can't open zip archive \"{}\": {}", path.filename().string(), buf))
|
||||
);
|
||||
}
|
||||
|
||||
for(i = 0; i < zip_get_num_entries(zip, 0); i++){
|
||||
if(zip_stat_index(zip, i, 0, &sb) == 0){
|
||||
for(i = 0; i < zip_get_num_entries(za, 0); i++){
|
||||
if(zip_stat_index(za, i, 0, &sb) == 0){
|
||||
len = strlen(sb.name);
|
||||
if(verbose > 1){
|
||||
log::print("{}, ", sb.name);
|
||||
log::println("size: {}, ", sb.size);
|
||||
log::println("utils::extract_zip(): {}, size: {}", sb.name, sb.size);
|
||||
}
|
||||
std::string path{dest};
|
||||
string path{dest};
|
||||
path += sb.name;
|
||||
if(sb.name[len-1] == '/'){
|
||||
// safe_create_dir(sb.name);
|
||||
std::filesystem::create_directory(path);
|
||||
} else {
|
||||
zf = zip_fopen_index(zip, i, 0);
|
||||
zf = zip_fopen_index(za, i, 0);
|
||||
if(!zf){
|
||||
log::error("extract_zip: zip_fopen_index() failed.");
|
||||
return 100;
|
||||
return log::error_rc(error(ec::LIBZIP_ERR,
|
||||
"utils::extract_zip(): zip_fopen_index() failed.")
|
||||
);
|
||||
}
|
||||
#ifdef _WIN32
|
||||
fd = open(sb.name, O_RDWR | O_TRUNC | O_CREAT | O_BINARY, 0644);
|
||||
|
|
@ -230,16 +205,19 @@ namespace gdpm::utils{
|
|||
fd = open(path.c_str(), O_RDWR | O_TRUNC | O_CREAT, 0644);
|
||||
#endif
|
||||
if(fd < 0){
|
||||
log::error("extract_zip: open() failed. (path: {}, fd={})", path, fd);
|
||||
return 101;
|
||||
log::error_rc(error(ec::LIBZIP_ERR,
|
||||
std::format("utils::extract_zip(): open() failed. (path: {}, fd={})", path, fd))
|
||||
);
|
||||
}
|
||||
|
||||
sum = 0;
|
||||
while(sum != sb.size){
|
||||
len = zip_fread(zf, buf, 100);
|
||||
if(len < 0){
|
||||
log::error("extract_zip: zip_fread() returned len < 0 (len={})", len);
|
||||
return 102;
|
||||
return log::error_rc(error(
|
||||
ec::LIBZIP_ERR,
|
||||
std::format("utils::extract_zip(): zip_fread() returned len < 0 (len={})", len))
|
||||
);
|
||||
}
|
||||
write(fd, buf, len);
|
||||
sum += len;
|
||||
|
|
@ -252,24 +230,23 @@ namespace gdpm::utils{
|
|||
}
|
||||
}
|
||||
|
||||
if(zip_close(zip) == -1){
|
||||
log::error("{}: can't close zip archive '{}'", prog, archive);
|
||||
return 1;
|
||||
if(zip_close(za) == -1){
|
||||
return log::error_rc(error(ec::LIBZIP_ERR,
|
||||
std::format("utils::extract_zip: can't close zip archive '{}'", archive))
|
||||
);
|
||||
}
|
||||
log::println("Done.");
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
std::string prompt_user(const char *message){
|
||||
string prompt_user(const char *message){
|
||||
log::print("{} ", message);
|
||||
std::string input;
|
||||
// std::cin >> input;
|
||||
string input;
|
||||
getline(std::cin, input);
|
||||
return input;
|
||||
}
|
||||
|
||||
bool prompt_user_yn(const char *message){
|
||||
std::string input{""};
|
||||
string input{""};
|
||||
while( input != "y" && input != "n" ){
|
||||
input = to_lower(utils::prompt_user(message));
|
||||
bool is_default = (input == "\0" || input == "\n" || input == "\r\n" || input.empty());
|
||||
|
|
@ -287,15 +264,15 @@ namespace gdpm::utils{
|
|||
// sleep_until(system_clock::now() + millis);
|
||||
}
|
||||
|
||||
std::string join(
|
||||
const std::vector<std::string>& target,
|
||||
const std::string& delimiter
|
||||
string join(
|
||||
const std::vector<string>& target,
|
||||
const string& delimiter
|
||||
){
|
||||
std::string o;
|
||||
string o;
|
||||
std::for_each(
|
||||
target.begin(),
|
||||
target.end(),
|
||||
[&o, &delimiter](const std::string& s){
|
||||
[&o, &delimiter](const string& s){
|
||||
o += s + delimiter;
|
||||
}
|
||||
);
|
||||
|
|
@ -303,16 +280,16 @@ namespace gdpm::utils{
|
|||
return o;
|
||||
}
|
||||
|
||||
std::string join(
|
||||
const std::unordered_map<std::string, std::string>& target,
|
||||
const std::string& prefix,
|
||||
const std::string& delimiter
|
||||
string join(
|
||||
const std::unordered_map<string, string>& target,
|
||||
const string& prefix,
|
||||
const string& delimiter
|
||||
){
|
||||
std::string o;
|
||||
string o;
|
||||
std::for_each(
|
||||
target.begin(),
|
||||
target.end(),
|
||||
[&o, &prefix, &delimiter](const std::pair<std::string, std::string>& p){
|
||||
[&o, &prefix, &delimiter](const std::pair<string, string>& p){
|
||||
o += prefix + p.first + ": " + p.second + delimiter;
|
||||
}
|
||||
);
|
||||
|
|
@ -320,13 +297,13 @@ namespace gdpm::utils{
|
|||
return o;
|
||||
}
|
||||
|
||||
std::string convert_size(long size){
|
||||
string convert_size(long size){
|
||||
int digit = 0;
|
||||
while(size > 1000){
|
||||
size /= 1000;
|
||||
digit += 1;
|
||||
}
|
||||
std::string s = std::to_string(size);
|
||||
string s = std::to_string(size);
|
||||
switch(digit){
|
||||
case 0: return s + " B";
|
||||
case 1: return s + " KB";
|
||||
|
|
@ -338,93 +315,13 @@ namespace gdpm::utils{
|
|||
return std::to_string(size);
|
||||
}
|
||||
|
||||
|
||||
namespace curl {
|
||||
size_t write_to_buffer(
|
||||
char *contents,
|
||||
size_t size,
|
||||
size_t nmemb,
|
||||
void *userdata
|
||||
){
|
||||
size_t realsize = size * nmemb;
|
||||
struct memory_buffer *m = (struct memory_buffer*)userdata;
|
||||
|
||||
m->addr = (char*)realloc(m->addr, m->size + realsize + 1);
|
||||
if(m->addr == nullptr){
|
||||
/* Out of memory */
|
||||
fprintf(stderr, "Not enough memory (realloc returned NULL)\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
memcpy(&(m->addr[m->size]), contents, realsize);
|
||||
m->size += realsize;
|
||||
m->addr[m->size] = 0;
|
||||
|
||||
return realsize;
|
||||
}
|
||||
|
||||
size_t write_to_stream(
|
||||
char *ptr,
|
||||
size_t size,
|
||||
size_t nmemb,
|
||||
void *userdata
|
||||
){
|
||||
if(nmemb == 0)
|
||||
return 0;
|
||||
|
||||
return fwrite(ptr, size, nmemb, (FILE*)userdata);
|
||||
}
|
||||
|
||||
int show_progress(
|
||||
void *ptr,
|
||||
curl_off_t total_download,
|
||||
curl_off_t current_downloaded,
|
||||
curl_off_t total_upload,
|
||||
curl_off_t current_upload
|
||||
){
|
||||
|
||||
if(current_downloaded >= total_download)
|
||||
return 0;
|
||||
using namespace indicators;
|
||||
show_console_cursor(false);
|
||||
if(total_download != 0){
|
||||
// double percent = std::floor((current_downloaded / (total_download)) * 100);
|
||||
bar.set_option(option::MaxProgress{total_download});
|
||||
// bar.set_option(option::HideBarWhenComplete{false});
|
||||
bar.set_progress(current_downloaded);
|
||||
bar.set_option(option::PostfixText{
|
||||
convert_size(current_downloaded) + " / " +
|
||||
convert_size(total_download)
|
||||
});
|
||||
if(bar.is_completed()){
|
||||
bar.set_option(option::PrefixText{"Download completed."});
|
||||
bar.mark_as_completed();
|
||||
}
|
||||
} else {
|
||||
if(bar_unknown.is_completed()){
|
||||
bar_unknown.set_option(option::PrefixText{"Download completed."});
|
||||
bar_unknown.mark_as_completed();
|
||||
} else {
|
||||
bar.tick();
|
||||
bar_unknown.set_option(
|
||||
option::PostfixText(std::format("{}", convert_size(current_downloaded)))
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
show_console_cursor(true);
|
||||
memory_buffer *m = (memory_buffer*)ptr;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
namespace json {
|
||||
std::string from_array(
|
||||
const std::set<std::string>& a,
|
||||
const std::string& prefix
|
||||
string from_array(
|
||||
const std::set<string>& a,
|
||||
const string& prefix
|
||||
){
|
||||
std::string o{"["};
|
||||
for(const std::string& src : a)
|
||||
string o{"["};
|
||||
for(const string& src : a)
|
||||
o += prefix + "\t\"" + src + "\",";
|
||||
if(o.back() == ',')
|
||||
o.pop_back();
|
||||
|
|
@ -433,14 +330,14 @@ namespace gdpm::utils{
|
|||
};
|
||||
|
||||
|
||||
std::string from_object(
|
||||
const std::unordered_map<std::string, std::string>& m,
|
||||
const std::string& prefix,
|
||||
const std::string& spaces
|
||||
string from_object(
|
||||
const std::unordered_map<string, string>& m,
|
||||
const string& prefix,
|
||||
const string& spaces
|
||||
){
|
||||
std::string o{"{"};
|
||||
string o{"{"};
|
||||
std::for_each(m.begin(), m.end(),
|
||||
[&o, &prefix, &spaces](const std::pair<std::string, std::string>& p){
|
||||
[&o, &prefix, &spaces](const std::pair<string, string>& p){
|
||||
o += std::format("{}\t\"{}\":{}\"{}\",", prefix, p.first, spaces, p.second);
|
||||
}
|
||||
);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue