mirror of
https://github.com/davidallendj/gdpm.git
synced 2025-12-20 03:27:02 -07:00
Major refactor and API changes
- Updated `.gitignore` file - Updated `CMakeLists.txt` to build static exectuable - Changed some `Doxyfile` configurations to build more robust and complete documentation (WIP) - Changed how `remote` works to better reflect `git`'s API (WIP) - Changed how error handling works - Improved `bin/compile.sh` script - Improved `bin/lines.sh` script (kinda) - Removed some instances of `fmt` in favor of `std` string functions - Restructed style for better readibility
This commit is contained in:
parent
ba23299777
commit
5a73651ad1
29 changed files with 1836 additions and 1140 deletions
284
src/cache.cpp
284
src/cache.cpp
|
|
@ -2,35 +2,44 @@
|
|||
#include "cache.hpp"
|
||||
#include "log.hpp"
|
||||
#include "constants.hpp"
|
||||
#include "package_manager.hpp"
|
||||
#include "package.hpp"
|
||||
#include "utils.hpp"
|
||||
#include "result.hpp"
|
||||
#include <filesystem>
|
||||
#include <string>
|
||||
#include <format>
|
||||
#include <tuple>
|
||||
|
||||
|
||||
namespace gdpm::cache{
|
||||
int create_package_database(const std::string& cache_path, const std::string& table_name){
|
||||
error create_package_database(bool overwrite, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg;
|
||||
|
||||
/* Check and make sure directory is created before attempting to open */
|
||||
namespace fs = std::filesystem;
|
||||
fs::path dir_path = fs::path(cache_path).parent_path();
|
||||
fs::path dir_path = fs::path(params.cache_path).parent_path();
|
||||
if(!fs::exists(dir_path)){
|
||||
log::info("Creating cache directories...{}", cache_path);
|
||||
log::info("Creating cache directories...{}", params.cache_path);
|
||||
fs::create_directories(dir_path);
|
||||
}
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("create_package_database.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc,
|
||||
std::format(
|
||||
"create_package_database.sqlite3_open(): {}",
|
||||
sqlite3_errmsg(db)
|
||||
)
|
||||
);
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
std::string sql = "CREATE TABLE IF NOT EXISTS " +
|
||||
table_name + "("
|
||||
string sql = "CREATE TABLE IF NOT EXISTS " +
|
||||
params.table_name + "("
|
||||
"id INTEGER PRIMARY KEY AUTOINCREMENT,"
|
||||
"asset_id INT NOT NULL,"
|
||||
"type INT NOT NULL,"
|
||||
|
|
@ -54,60 +63,71 @@ namespace gdpm::cache{
|
|||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
// log::error("Failed to fetch data: {}\n", sqlite3_errmsg(db));
|
||||
log::error("create_package_database.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"create_package_database.sqlite3_exec(): {}",
|
||||
errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
int insert_package_info(const std::vector<package_info>& packages, const std::string& cache_path, const std::string& table_name){
|
||||
error insert_package_info(const package::info_list& packages, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
|
||||
/* Prepare values to use in sql statement */
|
||||
std::string sql{"BEGIN TRANSACTION; "};
|
||||
string sql{"BEGIN TRANSACTION; "};
|
||||
for(const auto& p : packages){
|
||||
sql += "INSERT INTO " + table_name + " (" GDPM_PACKAGE_CACHE_COLNAMES ") ";
|
||||
sql += "VALUES (" + to_values(p) + "); ";
|
||||
sql += "INSERT INTO " + params.table_name + " (" GDPM_PACKAGE_CACHE_COLNAMES ") ";
|
||||
sql += "VALUES (" + to_values(p).unwrap_unsafe() + "); ";
|
||||
}
|
||||
sql += "COMMIT;";
|
||||
// log::println("{}", sql);
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("insert_package_info.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"insert_package_info.sqlite3_open(): {}",
|
||||
sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("insert_package_info.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"insert_package_info.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
std::vector<package_info> get_package_info_by_id(const std::vector<size_t>& package_ids, const std::string& cache_path, const std::string& table_name){
|
||||
result_t<package::info_list> get_package_info_by_id(const package::id_list& package_ids, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
size_t p_size = 0;
|
||||
std::vector<package_info> p_vector;
|
||||
std::string sql{"BEGIN TRANSACTION;\n"};
|
||||
package::info_list p_vector;
|
||||
string sql{"BEGIN TRANSACTION;\n"};
|
||||
|
||||
auto callback = [](void *data, int argc, char **argv, char **colnames){
|
||||
// log::error("{}", (const char*)data);
|
||||
// p_data *_data = (p_data*)data;
|
||||
std::vector<package_info> *_p_vector = (std::vector<package_info>*) data;
|
||||
package_info p{
|
||||
package::info_list *_p_vector = (package::info_list*) data;
|
||||
package::info p{
|
||||
.asset_id = std::stoul(argv[1]),
|
||||
.type = argv[2],
|
||||
.title = argv[3],
|
||||
|
|
@ -130,41 +150,47 @@ namespace gdpm::cache{
|
|||
return 0;
|
||||
};
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("get_package_info_by_id.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"get_package_info_by_id.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
|
||||
for(const auto& p_id : package_ids){
|
||||
sql += "SELECT * FROM " + table_name + " WHERE asset_id=" + fmt::to_string(p_id)+ ";\n";
|
||||
sql += "SELECT * FROM " + params.table_name + " WHERE asset_id=" + std::to_string(p_id)+ ";\n";
|
||||
}
|
||||
sql += "COMMIT;\n";
|
||||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
error error(rc, std::format(
|
||||
"get_package_info_by_id.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error("get_package_info_by_id.sqlite3_exec(): {}", errmsg);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return p_vector;
|
||||
return result_t(p_vector, error());
|
||||
}
|
||||
|
||||
|
||||
std::vector<package_info> get_package_info_by_title(const std::vector<std::string>& package_titles, const std::string& cache_path, const std::string& table_name){
|
||||
result_t<package::info_list> get_package_info_by_title(const package::title_list& package_titles, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
std::vector<package_info> p_vector;
|
||||
package::info_list p_vector;
|
||||
|
||||
auto callback = [](void *data, int argc, char **argv, char **colnames){
|
||||
if(argc <= 0)
|
||||
return 1;
|
||||
std::vector<package_info> *_p_vector = (std::vector<package_info>*)data;
|
||||
package::info_list *_p_vector = (package::info_list*)data;
|
||||
// log::println("get_package_info_by_title.callback.argv: \n\t{}\n\t{}\n\t{}\n\t{}\n\t{}", argv[0], argv[1], argv[2],argv[3], argv[4]);
|
||||
package_info p{
|
||||
package::info p{
|
||||
.asset_id = std::stoul(argv[1]),
|
||||
.type = argv[2],
|
||||
.title = argv[3],
|
||||
|
|
@ -188,44 +214,50 @@ namespace gdpm::cache{
|
|||
};
|
||||
|
||||
/* Check to make sure the directory is there before attempting to open */
|
||||
if(!std::filesystem::exists(cache_path))
|
||||
std::filesystem::create_directories(cache_path);
|
||||
if(!std::filesystem::exists(params.cache_path))
|
||||
std::filesystem::create_directories(params.cache_path);
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("get_package_info_by_title.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"get_package_info_by_title.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
|
||||
std::string sql{"BEGIN TRANSACTION;"};
|
||||
string sql{"BEGIN TRANSACTION;"};
|
||||
for(const auto& p_title : package_titles){
|
||||
sql += "SELECT * FROM " + table_name + " WHERE title='" + p_title + "';";
|
||||
sql += "SELECT * FROM " + params.table_name + " WHERE title='" + p_title + "';";
|
||||
}
|
||||
sql += "COMMIT;";
|
||||
// log::println(sql);
|
||||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("get_package_info_by_title.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"get_package_info_by_title.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return p_vector;
|
||||
return result_t(p_vector, error());
|
||||
}
|
||||
|
||||
|
||||
std::vector<package_info> get_installed_packages(const std::string& cache_path, const std::string& table_name){
|
||||
result_t<package::info_list> get_installed_packages(const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
std::vector<package_info> p_vector;
|
||||
std::string sql{"BEGIN TRANSACTION;"};
|
||||
package::info_list p_vector;
|
||||
string sql{"BEGIN TRANSACTION;"};
|
||||
|
||||
auto callback = [](void *data, int argc, char **argv, char **colnames){
|
||||
std::vector<package_info> *_p_vector = (std::vector<package_info>*) data;
|
||||
package_info p{
|
||||
package::info_list *_p_vector = (package::info_list*) data;
|
||||
package::info p{
|
||||
.asset_id = std::stoul(argv[1]),
|
||||
.type = argv[2],
|
||||
.title = argv[3],
|
||||
|
|
@ -248,46 +280,55 @@ namespace gdpm::cache{
|
|||
return 0;
|
||||
};
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("get_installed_packages.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"get_installed_packages.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
|
||||
sql += "SELECT * FROM " + table_name + " WHERE is_installed=1; COMMIT;";
|
||||
sql += "SELECT * FROM " + params.table_name + " WHERE is_installed=1; COMMIT;";
|
||||
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("get_installed_packages.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"get_installed_packages.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return {};
|
||||
return result_t(package::info_list(), error);
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return p_vector;
|
||||
return result_t(p_vector, error());
|
||||
}
|
||||
|
||||
|
||||
int update_package_info(const std::vector<package_info>& packages, const std::string& cache_path, const std::string& table_name){
|
||||
error update_package_info(const package::info_list& packages, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("update_package_info.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"update_package_info.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
std::string sql;
|
||||
string sql;
|
||||
for(const auto& p : packages){
|
||||
sql += "UPDATE " + table_name + " SET "
|
||||
" asset_id=" + fmt::to_string(p.asset_id) + ", "
|
||||
sql += "UPDATE " + params.table_name + " SET "
|
||||
" asset_id=" + std::to_string(p.asset_id) + ", "
|
||||
" type='" + p.type + "', "
|
||||
" title='" + p.title + "', "
|
||||
" author='" + p.author + "', " +
|
||||
" author_id=" + fmt::to_string(p.author_id) + ", "
|
||||
" author_id=" + std::to_string(p.author_id) + ", "
|
||||
" version='" + p.version + "', " +
|
||||
" godot_version='" + p.godot_version + "', " +
|
||||
" cost='" + p.cost + "', " +
|
||||
|
|
@ -298,115 +339,137 @@ namespace gdpm::cache{
|
|||
" remote_source='" + p.remote_source + "', " +
|
||||
" download_url='" + p.download_url + "', " +
|
||||
" download_hash='" + p.download_hash + "', " +
|
||||
" is_installed=" + fmt::to_string(p.is_installed) + ", "
|
||||
" is_installed=" + std::to_string(p.is_installed) + ", "
|
||||
" install_path='" + p.install_path + "'"
|
||||
// " dependencies='" + p.dependencies + "'"
|
||||
" WHERE title='" + p.title + "' AND asset_id=" + fmt::to_string(p.asset_id)
|
||||
" WHERE title='" + p.title + "' AND asset_id=" + std::to_string(p.asset_id)
|
||||
+ ";\n";
|
||||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("update_package_info.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"update_package_info.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
int delete_packages(const std::vector<std::string>& package_titles, const std::string& cache_path, const std::string& table_name){
|
||||
error delete_packages(const package::title_list& package_titles, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
std::string sql;
|
||||
string sql;
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("delete_packages.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"delete_packages.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
for(const auto& p_title : package_titles){
|
||||
sql += "DELETE FROM " + table_name + " WHERE title='"
|
||||
sql += "DELETE FROM " + params.table_name + " WHERE title='"
|
||||
+ p_title + "';\n";
|
||||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("delete_packages.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"delete_packages.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
int delete_packages(const std::vector<size_t>& package_ids, const std::string& cache_path, const std::string& table_name){
|
||||
error delete_packages(const package::id_list& package_ids, const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
std::string sql;
|
||||
string sql;
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("delete_packages.sqlite3_open(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"delete_packages.sqlite3_open(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
for(const auto& p_id : package_ids){
|
||||
sql += "DELETE FROM " + table_name + " WHERE asset_id="
|
||||
+ fmt::to_string(p_id) + ";\n";
|
||||
sql += "DELETE FROM " + params.table_name + " WHERE asset_id="
|
||||
+ std::to_string(p_id) + ";\n";
|
||||
}
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("delete_packages.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"delete_packages.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
sqlite3_close(db);
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
int drop_package_database(const std::string& cache_path, const std::string& table_name){
|
||||
error drop_package_database(const params& params){
|
||||
sqlite3 *db;
|
||||
sqlite3_stmt *res;
|
||||
char *errmsg = nullptr;
|
||||
std::string sql{"DROP TABLE IF EXISTS " + table_name + ";\n"};
|
||||
string sql{"DROP TABLE IF EXISTS " + params.table_name + ";\n"};
|
||||
|
||||
int rc = sqlite3_open(cache_path.c_str(), &db);
|
||||
int rc = sqlite3_open(params.cache_path.c_str(), &db);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("drop_package_database.sqlite3_open(): {}", sqlite3_errmsg(db));
|
||||
error error(rc, std::format(
|
||||
"drop_package_database.sqlite3_open(): {}", sqlite3_errmsg(db)
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
|
||||
if(rc != SQLITE_OK){
|
||||
log::error("drop_package_database.sqlite3_exec(): {}", errmsg);
|
||||
error error(rc, std::format(
|
||||
"drop_package_database.sqlite3_exec(): {}", errmsg
|
||||
));
|
||||
log::error(error);
|
||||
sqlite3_free(errmsg);
|
||||
sqlite3_close(db);
|
||||
return rc;
|
||||
return error;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return error();
|
||||
}
|
||||
|
||||
std::string to_values(const package_info& p){
|
||||
std::string p_values{};
|
||||
std::string p_title = p.title; /* need copy for utils::replace_all() */
|
||||
|
||||
p_values += fmt::to_string(p.asset_id) + ", ";
|
||||
result_t<string> to_values(const package::info& p){
|
||||
string p_values{};
|
||||
string p_title = p.title; /* need copy for utils::replace_all() */
|
||||
|
||||
p_values += std::to_string(p.asset_id) + ", ";
|
||||
p_values += "'" + p.type + "', ";
|
||||
p_values += "'" + utils::replace_all(p_title, "'", "''") + "', ";
|
||||
p_values += "'" + p.author + "', ";
|
||||
p_values += fmt::to_string(p.author_id) + ", ";
|
||||
p_values += std::to_string(p.author_id) + ", ";
|
||||
p_values += "'" + p.version + "', ";
|
||||
p_values += "'" + p.godot_version + "', ";
|
||||
p_values += "'" + p.cost + "', ";
|
||||
|
|
@ -417,16 +480,17 @@ namespace gdpm::cache{
|
|||
p_values += "'" + p.remote_source + "', ";
|
||||
p_values += "'" + p.download_url + "', ";
|
||||
p_values += "'" + p.download_hash + "', ";
|
||||
p_values += fmt::to_string(p.is_installed) + ", ";
|
||||
p_values += std::to_string(p.is_installed) + ", ";
|
||||
p_values += "'" + p.install_path + "'";
|
||||
return p_values;
|
||||
return result_t(p_values, error());
|
||||
}
|
||||
|
||||
std::string to_values(const std::vector<package_info>& packages){
|
||||
std::string o;
|
||||
|
||||
result_t<string> to_values(const package::info_list& packages){
|
||||
string o;
|
||||
for(const auto& p : packages)
|
||||
o += to_values(p);
|
||||
return o;
|
||||
o += to_values(p).unwrap_unsafe();
|
||||
return result_t(o, error());
|
||||
}
|
||||
|
||||
}
|
||||
123
src/config.cpp
123
src/config.cpp
|
|
@ -1,7 +1,9 @@
|
|||
#include "config.hpp"
|
||||
#include "error.hpp"
|
||||
#include "log.hpp"
|
||||
#include "utils.hpp"
|
||||
#include "constants.hpp"
|
||||
#include "error.hpp"
|
||||
|
||||
// RapidJSON
|
||||
#include <rapidjson/ostreamwrapper.h>
|
||||
|
|
@ -23,6 +25,8 @@
|
|||
#include <fstream>
|
||||
#include <ios>
|
||||
#include <memory>
|
||||
#include <set>
|
||||
#include <unordered_map>
|
||||
|
||||
|
||||
namespace gdpm::config{
|
||||
|
|
@ -38,8 +42,19 @@ namespace gdpm::config{
|
|||
return o;
|
||||
};
|
||||
|
||||
auto _build_json_object = [](const string_map& m){
|
||||
string o{"{"};
|
||||
std::for_each(m.begin(), m.end(), [&o](const string_pair& p){
|
||||
o += std::format("\n\"{}\": \"{}\",", p.first, p.second);
|
||||
});
|
||||
if(o.back() == ',')
|
||||
o.pop_back();
|
||||
o += "}";
|
||||
return o;
|
||||
};
|
||||
|
||||
/* Build a JSON string to pass to document */
|
||||
std::string json{
|
||||
string json{
|
||||
"{\"username\":\"" + params.username + "\","
|
||||
+ "\"password\":\"" + params.password + "\","
|
||||
+ "\"path\":\"" + params.path + "\","
|
||||
|
|
@ -47,7 +62,7 @@ namespace gdpm::config{
|
|||
+ "\"godot_version\":\"" + params.godot_version + "\","
|
||||
+ "\"packages_dir\":\"" + params.packages_dir + "\","
|
||||
+ "\"tmp_dir\":\"" + params.tmp_dir + "\","
|
||||
+ "\"remote_sources\":" + _build_json_array(params.remote_sources) + ","
|
||||
+ "\"remote_sources\":" + _build_json_object(params.remote_sources) + ","
|
||||
+ "\"threads\":" + fmt::to_string(params.threads) + ","
|
||||
+ "\"timeout\":" + fmt::to_string(params.timeout) + ","
|
||||
+ "\"enable_sync\":" + fmt::to_string(params.enable_sync) + ","
|
||||
|
|
@ -58,17 +73,19 @@ namespace gdpm::config{
|
|||
}
|
||||
|
||||
|
||||
gdpm::error load(std::filesystem::path path, context& config, int verbose){
|
||||
error load(
|
||||
std::filesystem::path path,
|
||||
context& config,
|
||||
int verbose
|
||||
){
|
||||
std::fstream file;
|
||||
gdpm::error error;
|
||||
file.open(path, std::ios::in);
|
||||
if(!file){
|
||||
if(verbose){
|
||||
if(verbose)
|
||||
log::info("No configuration file found. Creating a new one.");
|
||||
config = make_context();
|
||||
save(config.path, config, verbose);
|
||||
}
|
||||
return error;
|
||||
config = make_context();
|
||||
save(config.path, config, verbose);
|
||||
return error();
|
||||
}
|
||||
else if(file.is_open()){
|
||||
/*
|
||||
|
|
@ -79,24 +96,30 @@ namespace gdpm::config{
|
|||
using namespace rapidjson;
|
||||
|
||||
/* Read JSON from config, parse, and check document. Must make sure that program does not crash here and use default config instead! */
|
||||
std::string contents, line;
|
||||
string contents, line;
|
||||
while(std::getline(file, line))
|
||||
contents += line + "\n";
|
||||
|
||||
if(verbose > 0)
|
||||
log::info("Load config...\n{}", contents.c_str());
|
||||
log::info("Loading configuration file...\n{}", contents.c_str());
|
||||
|
||||
Document doc;
|
||||
ParseErrorCode status = doc.Parse(contents.c_str()).GetParseError();
|
||||
|
||||
if(!doc.IsObject()){
|
||||
log::error("Could not load config file.");
|
||||
error error(
|
||||
constants::error::FILE_NOT_FOUND,
|
||||
"Could not load config file."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
assert(doc.IsObject());
|
||||
assert(doc.HasMember("remote_sources"));
|
||||
assert(doc["remote_sources"].IsArray());
|
||||
error error = validate(doc);
|
||||
if(error()){
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Make sure contents were read correctly. */
|
||||
// if(!status){
|
||||
|
|
@ -105,16 +128,23 @@ namespace gdpm::config{
|
|||
// return context();
|
||||
// }
|
||||
|
||||
/* Must check if keys exists first, then populate _config_params. */
|
||||
/* Must check if keys exists first, then populate `_config_params`. */
|
||||
if(doc.HasMember("remote_sources")){
|
||||
if(doc["remote_sources"].IsArray()){
|
||||
const Value& srcs = doc["remote_sources"];
|
||||
for(auto& src : srcs.GetArray()){
|
||||
for(auto& src : srcs.GetObject()){
|
||||
// config.remote_sources.push_back(src.GetString());
|
||||
config.remote_sources.insert(src.GetString());
|
||||
config.remote_sources.insert(
|
||||
std::pair(src.name.GetString(), src.value.GetString())
|
||||
);
|
||||
}
|
||||
} else{
|
||||
log::error("Malformed sources found.");
|
||||
} else {
|
||||
gdpm::error error(
|
||||
constants::error::INVALID_KEY,
|
||||
"Could not read key `remote_sources`."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
auto _get_value_string = [](Document& doc, const char *property){
|
||||
|
|
@ -135,23 +165,27 @@ namespace gdpm::config{
|
|||
config.path = _get_value_string(doc, "path");
|
||||
config.token = _get_value_string(doc, "token");
|
||||
config.godot_version = _get_value_string(doc, "godot_version");
|
||||
config.packages_dir = _get_value_string(doc, "packages_dir");
|
||||
config.tmp_dir = _get_value_string(doc, "tmp_dir");
|
||||
config.packages_dir = _get_value_string(doc, "packages_dir");
|
||||
config.tmp_dir = _get_value_string(doc, "tmp_dir");
|
||||
config.threads = _get_value_int(doc, "threads");
|
||||
config.enable_sync = _get_value_int(doc, "enable_sync");
|
||||
config.enable_file_logging = _get_value_int(doc, "enable_file_logging");
|
||||
}
|
||||
return error;
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
gdpm::error save(std::filesystem::path path, const context& config, int verbose){
|
||||
error save(
|
||||
std::filesystem::path path,
|
||||
const context& config,
|
||||
int verbose
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
/* Build a JSON string to pass to document */
|
||||
std::string json = to_json(config);
|
||||
string json = to_json(config);
|
||||
if(verbose > 0)
|
||||
log::info("Save config...\n{}", json.c_str());
|
||||
log::info("Saving configuration file...\n{}", json.c_str());
|
||||
|
||||
/* Dump JSON config to file */
|
||||
Document doc;
|
||||
|
|
@ -166,14 +200,28 @@ namespace gdpm::config{
|
|||
}
|
||||
|
||||
|
||||
context make_context(const std::string& username, const std::string& password, const std::string& path, const std::string& token, const std::string& godot_version, const std::string& packages_dir, const std::string& tmp_dir, const std::set<std::string>& remote_sources, size_t threads, size_t timeout, bool enable_sync, bool enable_file_logging, int verbose){
|
||||
context make_context(
|
||||
const string& username,
|
||||
const string& password,
|
||||
const string& path,
|
||||
const string& token,
|
||||
const string& godot_version,
|
||||
const string& packages_dir,
|
||||
const string& tmp_dir,
|
||||
const string_map& remote_sources,
|
||||
size_t threads,
|
||||
size_t timeout,
|
||||
bool enable_sync,
|
||||
bool enable_file_logging,
|
||||
int verbose
|
||||
){
|
||||
context config {
|
||||
.username = username,
|
||||
.password = password,
|
||||
.path = path,
|
||||
.token = token,
|
||||
.godot_version = godot_version,
|
||||
.packages_dir = (packages_dir.empty()) ? std::string(getenv("HOME")) + ".gdpm" : packages_dir,
|
||||
.packages_dir = (packages_dir.empty()) ? string(getenv("HOME")) + ".gdpm" : packages_dir,
|
||||
.tmp_dir = tmp_dir,
|
||||
.remote_sources = remote_sources,
|
||||
.threads = threads,
|
||||
|
|
@ -185,4 +233,23 @@ namespace gdpm::config{
|
|||
return config;
|
||||
}
|
||||
|
||||
|
||||
error validate(const rapidjson::Document& doc){
|
||||
error error(constants::error::INVALID_CONFIG, "");
|
||||
if(!doc.IsObject()){
|
||||
error.set_message("Document is not a JSON object.");
|
||||
return error;
|
||||
}
|
||||
if(!doc.HasMember("remote_sources")){
|
||||
error.set_message("Could not find `remote_sources` in config.");
|
||||
return error;
|
||||
}
|
||||
if(!doc["remote_sources"].IsObject()){
|
||||
error.set_message("Key `remote_sources` is not a JSON object.");
|
||||
return error;
|
||||
}
|
||||
error.set_code(constants::error::NONE);
|
||||
return error;
|
||||
}
|
||||
|
||||
}
|
||||
41
src/http.cpp
41
src/http.cpp
|
|
@ -8,7 +8,20 @@
|
|||
|
||||
|
||||
namespace gdpm::http{
|
||||
response request_get(const std::string& url, size_t timeout, int verbose){
|
||||
|
||||
string url_escape(const string &url){
|
||||
CURL *curl = nullptr;
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
char *escaped_url = curl_easy_escape(curl, url.c_str(), url.size());
|
||||
std::string url_copy = escaped_url;
|
||||
curl_global_cleanup();
|
||||
return escaped_url;
|
||||
}
|
||||
|
||||
response request_get(
|
||||
const string& url,
|
||||
const http::params& params
|
||||
){
|
||||
CURL *curl = nullptr;
|
||||
CURLcode res;
|
||||
utils::memory_buffer buf = utils::make_buffer();
|
||||
|
|
@ -28,10 +41,10 @@ namespace gdpm::http{
|
|||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&buf);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl_write_to_buffer);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, timeout);
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && verbose > 0)
|
||||
if(res != CURLE_OK && params.verbose > 0)
|
||||
log::error("_make_request.curl_easy_perform(): {}", curl_easy_strerror(res));
|
||||
curl_easy_cleanup(curl);
|
||||
}
|
||||
|
|
@ -42,7 +55,12 @@ namespace gdpm::http{
|
|||
return r;
|
||||
}
|
||||
|
||||
response request_post(const std::string& url, const char *post_fields, size_t timeout, int verbose){
|
||||
|
||||
response request_post(
|
||||
const string& url,
|
||||
const char *post_fields,
|
||||
const http::params& params
|
||||
){
|
||||
CURL *curl = nullptr;
|
||||
CURLcode res;
|
||||
utils::memory_buffer buf = utils::make_buffer();
|
||||
|
|
@ -62,10 +80,10 @@ namespace gdpm::http{
|
|||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&buf);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl_write_to_buffer);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, timeout);
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && verbose > 0)
|
||||
if(res != CURLE_OK && params.verbose > 0)
|
||||
log::error("_make_request.curl_easy_perform(): {}", curl_easy_strerror(res));
|
||||
curl_easy_cleanup(curl);
|
||||
}
|
||||
|
|
@ -76,7 +94,12 @@ namespace gdpm::http{
|
|||
return r;
|
||||
}
|
||||
|
||||
response download_file(const std::string& url, const std::string& storage_path, size_t timeout, int verbose){
|
||||
|
||||
response download_file(
|
||||
const string& url,
|
||||
const string& storage_path,
|
||||
const http::params& params
|
||||
){
|
||||
CURL *curl = nullptr;
|
||||
CURLcode res;
|
||||
response r;
|
||||
|
|
@ -110,10 +133,10 @@ namespace gdpm::http{
|
|||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, utils::curl_write_to_stream);
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, constants::UserAgent.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, timeout);
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, params.timeout);
|
||||
res = curl_easy_perform(curl);
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &r.code);
|
||||
if(res != CURLE_OK && verbose > 0){
|
||||
if(res != CURLE_OK && params.verbose > 0){
|
||||
log::error("download_file.curl_easy_perform() failed: {}", curl_easy_strerror(res));
|
||||
}
|
||||
fclose(fp);
|
||||
|
|
|
|||
|
|
@ -4,11 +4,14 @@
|
|||
#include "log.hpp"
|
||||
#include "config.hpp"
|
||||
#include "package_manager.hpp"
|
||||
#include "result.hpp"
|
||||
|
||||
|
||||
int main(int argc, char **argv){
|
||||
gdpm::package_manager::initialize(argc, argv);
|
||||
gdpm::package_manager::execute();
|
||||
gdpm::package_manager::finalize();
|
||||
using namespace gdpm;
|
||||
result_t <package_manager::exec_args> r_input = package_manager::initialize(argc, argv);
|
||||
package_manager::exec_args input = r_input.unwrap_unsafe();
|
||||
package_manager::execute(input.args, input.opts);
|
||||
package_manager::finalize();
|
||||
return 0;
|
||||
}
|
||||
763
src/package.cpp
Normal file
763
src/package.cpp
Normal file
|
|
@ -0,0 +1,763 @@
|
|||
|
||||
#include "package.hpp"
|
||||
#include "error.hpp"
|
||||
#include "rest_api.hpp"
|
||||
#include "config.hpp"
|
||||
#include "cache.hpp"
|
||||
#include "http.hpp"
|
||||
#include "remote.hpp"
|
||||
#include <future>
|
||||
#include <rapidjson/ostreamwrapper.h>
|
||||
#include <rapidjson/prettywriter.h>
|
||||
|
||||
namespace gdpm::package{
|
||||
|
||||
error install(
|
||||
const config::context& config,
|
||||
const package::title_list& package_titles,
|
||||
const package::params& params
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
/* TODO: Need a way to use remote sources from config until none left */
|
||||
|
||||
/* Check if the package data is already stored in cache. If it is, there
|
||||
is no need to do a lookup to synchronize the local database since we
|
||||
have all the information we need to fetch the asset data. */
|
||||
result_t result = cache::get_package_info_by_title(package_titles);
|
||||
package::info_list p_found = {};
|
||||
package::info_list p_cache = result.unwrap_unsafe();
|
||||
|
||||
/* Synchronize database information and then try to get data again from
|
||||
cache if possible. */
|
||||
if(config.enable_sync){
|
||||
if(p_cache.empty()){
|
||||
result_t result = synchronize_database(config, package_titles);
|
||||
p_cache = result.unwrap_unsafe();
|
||||
}
|
||||
}
|
||||
|
||||
for(const auto& p_title : package_titles){
|
||||
auto found = std::find_if(
|
||||
p_cache.begin(),
|
||||
p_cache.end(),
|
||||
[&p_title](const package::info& p){
|
||||
return p.title == p_title;
|
||||
}
|
||||
);
|
||||
if(found != p_cache.end()){
|
||||
p_found.emplace_back(*found);
|
||||
}
|
||||
}
|
||||
|
||||
/* Found nothing to install so there's nothing to do at this point. */
|
||||
if(p_found.empty()){
|
||||
constexpr const char *message = "No packages found to install.";
|
||||
log::error(message);
|
||||
return error(constants::error::NOT_FOUND, message);
|
||||
}
|
||||
|
||||
log::println("Packages to install: ");
|
||||
for(const auto& p : p_found){
|
||||
std::string output((p.is_installed) ? p.title + " (reinstall)" : p.title);
|
||||
log::print(" {} ", (p.is_installed) ? p.title + " (reinstall)" : p.title);
|
||||
}
|
||||
log::println("");
|
||||
|
||||
if(!params.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Do you want to install these packages? (y/n)"))
|
||||
return error();
|
||||
}
|
||||
|
||||
/* Try and obtain all requested packages. */
|
||||
using ss_pair = std::pair<std::string, std::string>;
|
||||
std::vector<ss_pair> dir_pairs;
|
||||
std::vector<std::future<error>> tasks;
|
||||
rest_api::context rest_api_params = rest_api::make_from_config(config);
|
||||
for(auto& p : p_found){ // TODO: Execute each in parallel using coroutines??
|
||||
|
||||
/* Check if a remote source was provided. If not, then try to get packages
|
||||
in global storage location only. */
|
||||
|
||||
log::info("Fetching asset data for \"{}\"...", p.title);
|
||||
std::string url{config.remote_sources.at(params.remote_source) + rest_api::endpoints::GET_AssetId};
|
||||
std::string package_dir, tmp_dir, tmp_zip;
|
||||
|
||||
/* Retrieve necessary asset data if it was found already in cache */
|
||||
Document doc;
|
||||
bool is_valid = p.download_url.empty() || p.category.empty() || p.description.empty() || p.support_level.empty();
|
||||
if(is_valid){
|
||||
doc = rest_api::get_asset(url, p.asset_id, rest_api_params);
|
||||
if(doc.HasParseError() || doc.IsNull()){
|
||||
constexpr const char *message = "\nError parsing HTTP response.";
|
||||
log::error(message);
|
||||
return error(doc.GetParseError(), message);
|
||||
}
|
||||
p.category = doc["category"].GetString();
|
||||
p.description = doc["description"].GetString();
|
||||
p.support_level = doc["support_level"].GetString();
|
||||
p.download_url = doc["download_url"].GetString();
|
||||
p.download_hash = doc["download_hash"].GetString();
|
||||
}
|
||||
else{
|
||||
log::error("Not a valid package.");
|
||||
/* Package for in cache so no remote request. Still need to populate RapidJson::Document to write to package.json.
|
||||
NOTE: This may not be necessary at all!
|
||||
*/
|
||||
// doc["asset_id"].SetUint64(p.asset_id
|
||||
// doc["type"].SetString(p.type, doc.GetAllocator());
|
||||
// doc["title"].SetString(p.title, doc.GetAllocator());
|
||||
// doc["author"].SetString(p.author, doc.GetAllocator());
|
||||
// doc["author_id"].SetUint64(p.author_id);
|
||||
// doc["version"].SetString(p.version, doc.GetAllocator());
|
||||
// doc["category"].SetString(p.category, doc.GetAllocator());
|
||||
// doc["godot_version"].SetString(p.godot_version, doc.GetAllocator());
|
||||
// doc["cost"].SetString(p.cost, doc.GetAllocator());
|
||||
// doc["description"].SetString(p.description, doc.GetAllocator());
|
||||
// doc["support_level"].SetString(p.support_level, doc.GetAllocator());
|
||||
// doc["download_url"].SetString(p.download_url, doc.GetAllocator());
|
||||
// doc["download_hash"].SetString(p.download_hash, doc.GetAllocator;
|
||||
}
|
||||
|
||||
/* Set directory and temp paths for storage */
|
||||
package_dir = config.packages_dir + "/" + p.title;
|
||||
tmp_dir = config.tmp_dir + "/" + p.title;
|
||||
tmp_zip = tmp_dir + ".zip";
|
||||
|
||||
/* Make directories for packages if they don't exist to keep everything organized */
|
||||
if(!std::filesystem::exists(config.tmp_dir))
|
||||
std::filesystem::create_directories(config.tmp_dir);
|
||||
if(!std::filesystem::exists(config.packages_dir))
|
||||
std::filesystem::create_directories(config.packages_dir);
|
||||
|
||||
/* Dump asset information for lookup into JSON in package directory */
|
||||
if(!std::filesystem::exists(package_dir))
|
||||
std::filesystem::create_directory(package_dir);
|
||||
|
||||
std::ofstream ofs(package_dir + "/asset.json");
|
||||
OStreamWrapper osw(ofs);
|
||||
PrettyWriter<OStreamWrapper> writer(osw);
|
||||
doc.Accept(writer);
|
||||
|
||||
/* Check if we already have a stored temporary file before attempting to download */
|
||||
if(std::filesystem::exists(tmp_zip) && std::filesystem::is_regular_file(tmp_zip)){
|
||||
log::println("Found cached package. Skipping download.", p.title);
|
||||
}
|
||||
else{
|
||||
/* Download all the package files and place them in tmp directory. */
|
||||
log::info_n("Downloading \"{}\"...", p.title);
|
||||
std::string download_url = p.download_url;// doc["download_url"].GetString();
|
||||
std::string title = p.title;// doc["title"].GetString();
|
||||
http::response response = http::download_file(download_url, tmp_zip);
|
||||
if(response.code == http::OK){
|
||||
log::println("Done.");
|
||||
}else{
|
||||
error error(
|
||||
constants::error::HTTP_RESPONSE_ERROR,
|
||||
std::format("HTTP Error: {}", response.code)
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
|
||||
dir_pairs.emplace_back(ss_pair(tmp_zip, package_dir + "/"));
|
||||
|
||||
p.is_installed = true;
|
||||
p.install_path = package_dir;
|
||||
|
||||
/* Extract all the downloaded packages to their appropriate directory location. */
|
||||
for(const auto& p : dir_pairs)
|
||||
utils::extract_zip(p.first.c_str(), p.second.c_str());
|
||||
|
||||
/* Update the cache data with information from */
|
||||
log::info_n("Updating local asset data...");
|
||||
cache::update_package_info(p_found);
|
||||
log::println("done.");
|
||||
// })
|
||||
// );
|
||||
}
|
||||
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error remove(
|
||||
const config::context& config,
|
||||
const string_list& package_titles,
|
||||
const package::params& params
|
||||
){
|
||||
using namespace rapidjson;
|
||||
using namespace std::filesystem;
|
||||
|
||||
/* Find the packages to remove if they're is_installed and show them to the user */
|
||||
result_t result = cache::get_package_info_by_title(package_titles);
|
||||
std::vector<package::info> p_cache = result.unwrap_unsafe();
|
||||
if(p_cache.empty()){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
"\nCould not find any packages to remove."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Count number packages in cache flagged as is_installed. If there are none, then there's nothing to do. */
|
||||
size_t p_count = 0;
|
||||
std::for_each(p_cache.begin(), p_cache.end(), [&p_count](const package::info& p){
|
||||
p_count += (p.is_installed) ? 1 : 0;
|
||||
});
|
||||
|
||||
if(p_count == 0){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
"\nNo packages to remove."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
log::println("Packages to remove:");
|
||||
for(const auto& p : p_cache)
|
||||
if(p.is_installed)
|
||||
log::print(" {} ", p.title);
|
||||
log::println("");
|
||||
|
||||
if(!params.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Do you want to remove these packages? (y/n)"))
|
||||
return error();
|
||||
}
|
||||
|
||||
log::info_n("Removing packages...");
|
||||
for(auto& p : p_cache){
|
||||
const std::filesystem::path path{config.packages_dir};
|
||||
std::filesystem::remove_all(config.packages_dir + "/" + p.title);
|
||||
if(config.verbose > 0){
|
||||
log::debug("package directory: {}", path.string());
|
||||
}
|
||||
|
||||
/* Traverse the package directory */
|
||||
// for(const auto& entry : recursive_directory_iterator(path)){
|
||||
// if(entry.is_directory()){
|
||||
// }
|
||||
// else if(entry.is_regular_file()){
|
||||
// std::string filename = entry.path().filename().string();
|
||||
// std::string pkg_path = entry.path().lexically_normal().string();
|
||||
|
||||
// // pkg_path = utils::replace_all(pkg_path, " ", "\\ ");
|
||||
// if(filename == "package.json"){
|
||||
// std::string contents = utils::readfile(pkg_path);
|
||||
// Document doc;
|
||||
// if(config.verbose > 0){
|
||||
// log::debug("package path: {}", pkg_path);
|
||||
// log::debug("contents: \n{}", contents);
|
||||
// }
|
||||
// doc.Parse(contents.c_str());
|
||||
// if(doc.IsNull()){
|
||||
// log::println("");
|
||||
// log::error("Could not remove packages. Parsing 'package.json' returned NULL.");
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
p.is_installed = false;
|
||||
}
|
||||
log::println("Done.");
|
||||
log::info_n("Updating local asset data...");
|
||||
cache::update_package_info(p_cache);
|
||||
log::println("done.");
|
||||
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
Removes all local packages.
|
||||
*/
|
||||
error remove_all(
|
||||
const config::context& config,
|
||||
const package::params& params
|
||||
){
|
||||
/* Get the list of all packages to remove then remove */
|
||||
result_t r_installed = cache::get_installed_packages();
|
||||
package::info_list p_installed = r_installed.unwrap_unsafe();
|
||||
result_t r_titles = get_package_titles(p_installed);
|
||||
package::title_list p_titles = r_titles.unwrap_unsafe();
|
||||
return remove(config, p_titles, params);
|
||||
}
|
||||
|
||||
|
||||
error update(
|
||||
const config::context& config,
|
||||
const package::title_list& package_titles,
|
||||
const package::params& params
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
/* If no package titles provided, update everything and then exit */
|
||||
rest_api::context rest_api_params = rest_api::make_from_config(config);
|
||||
if(package_titles.empty()){
|
||||
std::string url{constants::HostUrl};
|
||||
url += rest_api::endpoints::GET_AssetId;
|
||||
Document doc = rest_api::get_assets_list(url, rest_api_params);
|
||||
if(doc.IsNull()){
|
||||
constexpr const char *message = "Could not get response from server. Aborting.";
|
||||
log::error(message);
|
||||
return error(constants::error::HOST_UNREACHABLE, message);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
/* Fetch remote asset data and compare to see if there are package updates */
|
||||
std::vector<std::string> p_updates = {};
|
||||
result_t r_cache = cache::get_package_info_by_title(package_titles);
|
||||
package::info_list p_cache = r_cache.unwrap_unsafe();
|
||||
|
||||
log::println("Packages to update: ");
|
||||
for(const auto& p_title : p_updates)
|
||||
log::print(" {} ", p_title);
|
||||
log::println("");
|
||||
|
||||
/* Check version information to see if packages need updates */
|
||||
for(const auto& p : p_cache){
|
||||
std::string url{constants::HostUrl};
|
||||
url += rest_api::endpoints::GET_AssetId;
|
||||
Document doc = rest_api::get_asset(url, p.asset_id);
|
||||
std::string remote_version = doc["version"].GetString();
|
||||
if(p.version != remote_version){
|
||||
p_updates.emplace_back(p.title);
|
||||
}
|
||||
}
|
||||
|
||||
if(!params.skip_prompt){
|
||||
if(!utils::prompt_user_yn("Do you want to update the following packages? (y/n)"))
|
||||
return error();
|
||||
}
|
||||
|
||||
{
|
||||
error error;
|
||||
error = remove(config, p_updates);
|
||||
error = install(config, p_updates, params);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error search(
|
||||
const config::context& config,
|
||||
const package::title_list &package_titles,
|
||||
const package::params& params
|
||||
){
|
||||
result_t r_cache = cache::get_package_info_by_title(package_titles);
|
||||
std::vector<package::info> p_cache = r_cache.unwrap_unsafe();
|
||||
|
||||
if(!p_cache.empty() && !config.enable_sync){
|
||||
print_list(p_cache);
|
||||
return error();
|
||||
}
|
||||
|
||||
rest_api::context rest_api_params = rest_api::make_from_config(config);
|
||||
for(const auto& p_title : package_titles){
|
||||
using namespace rapidjson;
|
||||
|
||||
rest_api_params.filter = http::url_escape(p_title);
|
||||
rest_api_params.verbose = config.verbose;
|
||||
rest_api_params.godot_version = config.godot_version;
|
||||
rest_api_params.max_results = 200;
|
||||
|
||||
std::string request_url{constants::HostUrl};
|
||||
request_url += rest_api::endpoints::GET_Asset;
|
||||
Document doc = rest_api::get_assets_list(request_url, rest_api_params);
|
||||
if(doc.IsNull()){
|
||||
error error(
|
||||
constants::error::HOST_UNREACHABLE,
|
||||
"Could not fetch metadata."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
log::info("{} package(s) found...", doc["total_items"].GetInt());
|
||||
print_list(doc);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error list(
|
||||
const config::context& config,
|
||||
const args_t& args,
|
||||
const opts_t& opts
|
||||
){
|
||||
using namespace rapidjson;
|
||||
using namespace std::filesystem;
|
||||
|
||||
if(opts.empty() || opts.contains("packages")){
|
||||
result_t r_installed = cache::get_installed_packages();
|
||||
info_list p_installed = r_installed.unwrap_unsafe();
|
||||
if(!p_installed.empty()){
|
||||
log::println("Installed packages: ");
|
||||
print_list(p_installed);
|
||||
}
|
||||
}
|
||||
else if(opts.contains("remote")){
|
||||
remote::print_repositories(config);
|
||||
}
|
||||
else{
|
||||
error error(
|
||||
constants::error::UNKNOWN_COMMAND,
|
||||
"Unrecognized subcommand. Try either 'packages' or 'remote' instead."
|
||||
|
||||
);
|
||||
log::error(error);
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error export_to(const string_list& paths){
|
||||
/* Get all installed package information for export */
|
||||
result_t r_installed = cache::get_installed_packages();
|
||||
info_list p_installed = r_installed.unwrap_unsafe();
|
||||
|
||||
result_t r_titles = get_package_titles(p_installed);
|
||||
title_list p_titles = r_titles.unwrap_unsafe();
|
||||
|
||||
/* Build string of contents with one package title per line */
|
||||
string output{};
|
||||
std::for_each(p_titles.begin(), p_titles.end(), [&output](const string& p){
|
||||
output += p + "\n";
|
||||
});
|
||||
|
||||
/* Write contents of installed packages in reusable format */
|
||||
for(const auto& path : paths ){
|
||||
std::ofstream of(path);
|
||||
if(std::filesystem::exists(path)){
|
||||
constexpr const char *message = "File or directory exists!";
|
||||
log::error(message);
|
||||
of.close();
|
||||
return error(constants::error::FILE_EXISTS, message);
|
||||
}
|
||||
log::println("writing contents to file");
|
||||
of << output;
|
||||
of.close();
|
||||
}
|
||||
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error link(
|
||||
const config::context& config,
|
||||
const title_list& package_titles,
|
||||
const opts_t& opts
|
||||
){
|
||||
using namespace std::filesystem;
|
||||
|
||||
path_list paths = {};
|
||||
if(opts.contains("path")){
|
||||
paths = opts.at("path");
|
||||
}
|
||||
|
||||
if(paths.empty()){
|
||||
error error(
|
||||
constants::error::PATH_NOT_DEFINED,
|
||||
"No path set. Use '--path' option to set a path."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
result_t r_cache = cache::get_package_info_by_title(package_titles);
|
||||
info_list p_found = {};
|
||||
info_list p_cache = r_cache.unwrap_unsafe();
|
||||
if(p_cache.empty()){
|
||||
error error(
|
||||
constants::error::NOT_FOUND,
|
||||
"Could not find any packages to link."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
for(const auto& p_title : package_titles){
|
||||
auto found = std::find_if(p_cache.begin(), p_cache.end(), [&p_title](const package::info& p){ return p.title == p_title; });
|
||||
if(found != p_cache.end()){
|
||||
p_found.emplace_back(*found);
|
||||
}
|
||||
}
|
||||
|
||||
if(p_found.empty()){
|
||||
error error(
|
||||
constants::error::NO_PACKAGE_FOUND,
|
||||
"No packages found to link."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Get the storage paths for all packages to create symlinks */
|
||||
const path package_dir{config.packages_dir};
|
||||
for(const auto& p : p_found){
|
||||
for(const auto& path : paths){
|
||||
log::info_n("Creating symlink for \"{}\" package to '{}'...", p.title, path + "/" + p.title);
|
||||
// std::filesystem::path target{config.packages_dir + "/" + p.title};
|
||||
std::filesystem::path target = {current_path().string() + "/" + config.packages_dir + "/" + p.title};
|
||||
std::filesystem::path symlink_path{path + "/" + p.title};
|
||||
if(!std::filesystem::exists(symlink_path.string()))
|
||||
std::filesystem::create_directories(path + "/");
|
||||
std::error_code ec;
|
||||
std::filesystem::create_directory_symlink(target, symlink_path, ec);
|
||||
if(ec){
|
||||
error error(
|
||||
constants::error::STD_ERROR,
|
||||
std::format("Could not create symlink: {}", ec.message())
|
||||
);
|
||||
log::error(error);
|
||||
}
|
||||
log::println("Done.");
|
||||
}
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
error clone(
|
||||
const config::context& config,
|
||||
const title_list& package_titles,
|
||||
const opts_t& paths
|
||||
){
|
||||
using namespace std::filesystem;
|
||||
|
||||
if(paths.empty()){
|
||||
error error(
|
||||
constants::error::PATH_NOT_DEFINED,
|
||||
"No path set. Use '--path' option to set a path."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
result_t r_cache = cache::get_package_info_by_title(package_titles);
|
||||
package::info_list p_found = {};
|
||||
package::info_list p_cache = r_cache.unwrap_unsafe();
|
||||
if(p_cache.empty()){
|
||||
error error(
|
||||
constants::error::NO_PACKAGE_FOUND,
|
||||
"Could not find any packages to clone."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
for(const auto& p_title : package_titles){
|
||||
auto found = std::find_if(p_cache.begin(), p_cache.end(), [&p_title](const package::info& p){ return p.title == p_title; });
|
||||
if(found != p_cache.end()){
|
||||
p_found.emplace_back(*found);
|
||||
}
|
||||
}
|
||||
|
||||
if(p_found.empty()){
|
||||
error error(
|
||||
constants::error::NO_PACKAGE_FOUND,
|
||||
"No packages found to clone."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
|
||||
/* Get the storage paths for all packages to create clones */
|
||||
const path package_dir{config.packages_dir};
|
||||
for(const auto& p : p_found){
|
||||
for(const auto& path_list : paths){
|
||||
for(const auto& path : path_list.second){
|
||||
log::info("Cloning \"{}\" package to {}", p.title, path + "/" + p.title);
|
||||
std::filesystem::path from{config.packages_dir + "/" + p.title};
|
||||
std::filesystem::path to{path + "/" + p.title};
|
||||
if(!std::filesystem::exists(to.string()))
|
||||
std::filesystem::create_directories(to);
|
||||
|
||||
/* TODO: Add an option to force overwriting (i.e. --overwrite) */
|
||||
std::filesystem::copy(from, to, copy_options::update_existing | copy_options::recursive);
|
||||
}
|
||||
}
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
void print_list(const info_list& packages){
|
||||
for(const auto& p : packages){
|
||||
log::println("{}/{}/{} {} id={}\n\tGodot {}, {}, {}, Last Modified: {}",
|
||||
p.support_level,
|
||||
p.author,
|
||||
p.title,
|
||||
p.version,
|
||||
p.asset_id,
|
||||
p.godot_version,
|
||||
p.cost,
|
||||
p.category,
|
||||
p.modify_date
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void print_list(const rapidjson::Document& json){
|
||||
for(const auto& o : json["result"].GetArray()){
|
||||
log::println("{}/{}/{} {} id={}\n\tGodot {}, {}, {}, Last Modified: {}",
|
||||
o["support_level"] .GetString(),
|
||||
o["author"] .GetString(),
|
||||
o["title"] .GetString(),
|
||||
o["version_string"] .GetString(),
|
||||
o["asset_id"] .GetString(),
|
||||
o["godot_version"] .GetString(),
|
||||
o["cost"] .GetString(),
|
||||
o["category"] .GetString(),
|
||||
o["modify_date"] .GetString()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
result_t<info_list> get_package_info(const title_list& package_titles){
|
||||
return cache::get_package_info_by_title(package_titles);
|
||||
}
|
||||
|
||||
|
||||
result_t<title_list> get_package_titles(const info_list &packages){
|
||||
title_list package_titles;
|
||||
std::for_each(packages.begin(), packages.end(), [&package_titles](const package::info& p){
|
||||
package_titles.emplace_back(p.title);
|
||||
});
|
||||
return result_t(package_titles, error());
|
||||
}
|
||||
|
||||
|
||||
void clean_temporary(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
if(package_titles.empty()){
|
||||
log::info("Cleaned all temporary files.");
|
||||
std::filesystem::remove_all(config.tmp_dir);
|
||||
}
|
||||
/* Find the path of each packages is_installed then delete temporaries */
|
||||
log::info_n("Cleaning temporary files...");
|
||||
for(const auto& p_title : package_titles){
|
||||
string tmp_zip = config.tmp_dir + "/" + p_title + ".zip";
|
||||
if(config.verbose > 0)
|
||||
log::info("Removed '{}'", tmp_zip);
|
||||
std::filesystem::remove_all(tmp_zip);
|
||||
}
|
||||
log::println("Done.");
|
||||
}
|
||||
|
||||
|
||||
result_t<info_list> synchronize_database(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
using namespace rapidjson;
|
||||
|
||||
rest_api::context rest_api_params = rest_api::make_from_config(config);
|
||||
rest_api_params.page = 0;
|
||||
int page = 0;
|
||||
int page_length = 0;
|
||||
// int total_pages = 0;
|
||||
int total_items = 0;
|
||||
int items_left = 0;
|
||||
|
||||
log::info("Sychronizing database...");
|
||||
do{
|
||||
/* Make the GET request to get page data and store it in the local
|
||||
package database. Also, check to see if we need to keep going. */
|
||||
std::string url{constants::HostUrl};
|
||||
url += rest_api::endpoints::GET_Asset;
|
||||
Document doc = rest_api::get_assets_list(url, rest_api_params);
|
||||
rest_api_params.page += 1;
|
||||
|
||||
if(doc.IsNull()){
|
||||
error error(
|
||||
constants::error::EMPTY_RESPONSE,
|
||||
"Could not get response from server. Aborting."
|
||||
);
|
||||
log::error(error);
|
||||
return result_t(info_list(), error);
|
||||
}
|
||||
|
||||
/* Need to know how many pages left to get and how many we get per
|
||||
request. */
|
||||
page = doc["page"].GetInt();
|
||||
page_length = doc["page_length"].GetInt();
|
||||
// total_pages = doc["pages"].GetInt();
|
||||
total_items = doc["total_items"].GetInt();
|
||||
items_left = total_items - (page + 1) * page_length;
|
||||
|
||||
// log::info("page: {}, page length: {}, total pages: {}, total items: {}, items left: {}", page, page_length, total_pages, total_items, items_left);
|
||||
|
||||
if(page == 0){
|
||||
error error;
|
||||
error = cache::drop_package_database();
|
||||
error = cache::create_package_database();
|
||||
}
|
||||
|
||||
info_list packages;
|
||||
for(const auto& o : doc["result"].GetArray()){
|
||||
// log::println("=======================");
|
||||
info p{
|
||||
.asset_id = std::stoul(o["asset_id"].GetString()),
|
||||
.title = o["title"].GetString(),
|
||||
.author = o["author"].GetString(),
|
||||
.author_id = std::stoul(o["author_id"].GetString()),
|
||||
.version = o["version"].GetString(),
|
||||
.godot_version = o["godot_version"].GetString(),
|
||||
.cost = o["cost"].GetString(),
|
||||
.modify_date = o["modify_date"].GetString(),
|
||||
.category = o["category"].GetString(),
|
||||
.remote_source = url
|
||||
};
|
||||
packages.emplace_back(p);
|
||||
}
|
||||
error error = cache::insert_package_info(packages);
|
||||
if (error.has_occurred()){
|
||||
log::error(error);
|
||||
/* FIXME: Should this stop here or keep going? */
|
||||
}
|
||||
/* Make the same request again to get the rest of the needed data
|
||||
using the same request, but with a different page, then update
|
||||
variables as needed. */
|
||||
|
||||
|
||||
} while(items_left > 0);
|
||||
|
||||
log::println("Done.");
|
||||
|
||||
return cache::get_package_info_by_title(package_titles);
|
||||
}
|
||||
|
||||
|
||||
result_t<info_list> resolve_dependencies(
|
||||
const config::context& config,
|
||||
const title_list& package_titles
|
||||
){
|
||||
result_t r_cache = cache::get_package_info_by_title(package_titles);
|
||||
info_list p_cache = r_cache.unwrap_unsafe();
|
||||
info_list p_deps = {};
|
||||
|
||||
/* Build an graph of every thing to check then install in order */
|
||||
for(const auto& p : p_cache){
|
||||
if(p.dependencies.empty())
|
||||
continue;
|
||||
|
||||
/* Check if dependency has a dependency. If so, resolve those first. */
|
||||
for(const auto& d : p.dependencies){
|
||||
result_t r_temp = resolve_dependencies(config, {d.title});
|
||||
info_list temp = r_temp.unwrap_unsafe();
|
||||
utils::move_if_not(temp, p_deps, [](const info& p){ return true; });
|
||||
}
|
||||
}
|
||||
|
||||
return result_t(p_deps, error());
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
103
src/remote.cpp
Normal file
103
src/remote.cpp
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
|
||||
#include "remote.hpp"
|
||||
#include "error.hpp"
|
||||
#include "log.hpp"
|
||||
#include "types.hpp"
|
||||
#include <readline/readline.h>
|
||||
|
||||
namespace gdpm::remote{
|
||||
error _handle_remote(
|
||||
config::context& config,
|
||||
const args_t& args,
|
||||
const opts_t& opts
|
||||
){
|
||||
log::println("_handle_remote");
|
||||
for(const auto& arg : args){
|
||||
log::println("arg: {}", arg);
|
||||
}
|
||||
for(const auto& opt : opts){
|
||||
log::println("opt: {}:{}", opt.first, utils::join(opt.second));
|
||||
}
|
||||
|
||||
/* Check if enough arguments are supplied */
|
||||
size_t argc = args.size();
|
||||
if (argc < 1){
|
||||
print_repositories(config);
|
||||
return error();
|
||||
}
|
||||
|
||||
/* Check which subcommand is supplied */
|
||||
string sub_command = args.front();
|
||||
args_t argv(args.begin()+1, args.end());
|
||||
if(argv.size() < 2){
|
||||
error error(
|
||||
constants::error::INVALID_ARGS,
|
||||
"Invalid number of args"
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
string name = argv[1];
|
||||
string url = argv[2];
|
||||
if(sub_command == "add") add_repositories(config, {{name, url}});
|
||||
else if (sub_command == "remove") remove_respositories(config, argv);
|
||||
// else if (sub_command == "set") set_repositories(config::context &context, const repository_map &repos)
|
||||
else if (sub_command == "list") print_repositories(config);
|
||||
else{
|
||||
error error(
|
||||
constants::error::UNKNOWN,
|
||||
"Unknown sub-command. Try 'gdpm help remote' for options."
|
||||
);
|
||||
log::error(error);
|
||||
return error;
|
||||
}
|
||||
return error();
|
||||
}
|
||||
|
||||
|
||||
void set_repositories(
|
||||
config::context& config,
|
||||
const repository_map &repos
|
||||
){
|
||||
config.remote_sources = repos;
|
||||
}
|
||||
|
||||
|
||||
void add_repositories(
|
||||
config::context& config,
|
||||
const repository_map &repos
|
||||
){
|
||||
std::for_each(repos.begin(), repos.end(),
|
||||
[&config](const string_pair& p){
|
||||
config.remote_sources.insert(p);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
void remove_respositories(
|
||||
config::context& config,
|
||||
const repo_names& names
|
||||
){
|
||||
std::for_each(names.end(), names.begin(), [&config](const string& repo){
|
||||
config.remote_sources.erase(repo);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void move_respository(
|
||||
config::context& config,
|
||||
int old_position,
|
||||
int new_position
|
||||
){
|
||||
|
||||
}
|
||||
|
||||
void print_repositories(const config::context& config){
|
||||
log::println("Remote sources:");
|
||||
const auto &rs = config.remote_sources;
|
||||
std::for_each(rs.begin(), rs.end(), [](const string_pair& p){
|
||||
log::println("\t{}: {}", p.first, p.second);
|
||||
});
|
||||
}
|
||||
}
|
||||
113
src/rest_api.cpp
113
src/rest_api.cpp
|
|
@ -15,16 +15,12 @@
|
|||
#include <curlpp/Exception.hpp>
|
||||
|
||||
namespace gdpm::rest_api{
|
||||
bool register_account(const std::string& username, const std::string& password, const std::string& email){
|
||||
return false;
|
||||
}
|
||||
|
||||
bool login(const std::string& username, const std::string& password){
|
||||
return false;
|
||||
}
|
||||
|
||||
bool logout(){
|
||||
return false;
|
||||
|
||||
context make_from_config(const config::context& config){
|
||||
context params = make_context();
|
||||
params.godot_version = config.godot_version;
|
||||
params.verbose = config.verbose;
|
||||
return params;
|
||||
}
|
||||
|
||||
context make_context(type_e type, int category, support_e support, const std::string& filter, const std::string& user, const std::string& godot_version, int max_results, int page, sort_e sort, bool reverse, int verbose){
|
||||
|
|
@ -44,7 +40,29 @@ namespace gdpm::rest_api{
|
|||
return params;
|
||||
}
|
||||
|
||||
rapidjson::Document _parse_json(const std::string& r, int verbose){
|
||||
bool register_account(
|
||||
const string& username,
|
||||
const string& password,
|
||||
const string& email
|
||||
){
|
||||
return false;
|
||||
}
|
||||
|
||||
bool login(
|
||||
const string& username,
|
||||
const string& password
|
||||
){
|
||||
return false;
|
||||
}
|
||||
|
||||
bool logout(){
|
||||
return false;
|
||||
}
|
||||
|
||||
rapidjson::Document _parse_json(
|
||||
const string& r,
|
||||
int verbose
|
||||
){
|
||||
using namespace rapidjson;
|
||||
Document d;
|
||||
d.Parse(r.c_str());
|
||||
|
|
@ -58,7 +76,7 @@ namespace gdpm::rest_api{
|
|||
return d;
|
||||
}
|
||||
|
||||
std::string to_string(type_e type){
|
||||
string to_string(type_e type){
|
||||
std::string _s{"type="};
|
||||
switch(type){
|
||||
case any: _s += "any"; break;
|
||||
|
|
@ -68,8 +86,8 @@ namespace gdpm::rest_api{
|
|||
return _s;
|
||||
}
|
||||
|
||||
std::string to_string(support_e support){
|
||||
std::string _s{"support="};
|
||||
string to_string(support_e support){
|
||||
string _s{"support="};
|
||||
switch(support){
|
||||
case all: _s += "official+community+testing"; break;
|
||||
case official: _s += "official"; break;
|
||||
|
|
@ -79,8 +97,8 @@ namespace gdpm::rest_api{
|
|||
return _s;
|
||||
}
|
||||
|
||||
std::string to_string(sort_e sort){
|
||||
std::string _s{"sort="};
|
||||
string to_string(sort_e sort){
|
||||
string _s{"sort="};
|
||||
switch(sort){
|
||||
case none: _s += ""; break;
|
||||
case rating: _s += "rating"; break;
|
||||
|
|
@ -91,16 +109,19 @@ namespace gdpm::rest_api{
|
|||
return _s;
|
||||
}
|
||||
|
||||
std::string _prepare_request(const std::string &url, const context &c){
|
||||
std::string request_url{url};
|
||||
string _prepare_request(
|
||||
const string &url,
|
||||
const context &c
|
||||
){
|
||||
string request_url{url};
|
||||
request_url += to_string(c.type);
|
||||
request_url += (c.category <= 0) ? "&category=" : "&category="+fmt::to_string(c.category);
|
||||
request_url += (c.category <= 0) ? "&category=" : "&category="+std::to_string(c.category);
|
||||
request_url += "&" + to_string(c.support);
|
||||
request_url += "&" + to_string(c.sort);
|
||||
request_url += (!c.filter.empty()) ? "&filter="+c.filter : "";
|
||||
request_url += (!c.godot_version.empty()) ? "&godot_version="+c.godot_version : "";
|
||||
request_url += "&max_results=" + fmt::to_string(c.max_results);
|
||||
request_url += "&page=" + fmt::to_string(c.page);
|
||||
request_url += "&max_results=" + std::to_string(c.max_results);
|
||||
request_url += "&page=" + std::to_string(c.page);
|
||||
request_url += (c.reverse) ? "&reverse" : "";
|
||||
return request_url;
|
||||
}
|
||||
|
|
@ -122,8 +143,12 @@ namespace gdpm::rest_api{
|
|||
);
|
||||
}
|
||||
|
||||
rapidjson::Document configure(const std::string& url, type_e type, int verbose){
|
||||
std::string request_url{url};
|
||||
rapidjson::Document configure(
|
||||
const string& url,
|
||||
type_e type,
|
||||
int verbose
|
||||
){
|
||||
string request_url{url};
|
||||
request_url += to_string(type);
|
||||
http::response r = http::request_get(url);
|
||||
if(verbose > 0)
|
||||
|
|
@ -131,7 +156,20 @@ namespace gdpm::rest_api{
|
|||
return _parse_json(r.body);
|
||||
}
|
||||
|
||||
rapidjson::Document get_assets_list(const std::string& url, type_e type, int category, support_e support, const std::string& filter,const std::string& user, const std::string& godot_version, int max_results, int page, sort_e sort, bool reverse, int verbose){
|
||||
rapidjson::Document get_assets_list(
|
||||
const string& url,
|
||||
type_e type,
|
||||
int category,
|
||||
support_e support,
|
||||
const string& filter,
|
||||
const string& user,
|
||||
const string& godot_version,
|
||||
int max_results,
|
||||
int page,
|
||||
sort_e sort,
|
||||
bool reverse,
|
||||
int verbose
|
||||
){
|
||||
context c{
|
||||
.type = type,
|
||||
.category = category,
|
||||
|
|
@ -148,16 +186,23 @@ namespace gdpm::rest_api{
|
|||
return get_assets_list(url, c);
|
||||
}
|
||||
|
||||
rapidjson::Document get_assets_list(const std::string& url, const context& c){
|
||||
std::string request_url = _prepare_request(url, c);
|
||||
rapidjson::Document get_assets_list(
|
||||
const string& url,
|
||||
const context& c
|
||||
){
|
||||
string request_url = _prepare_request(url, c);
|
||||
http::response r = http::request_get(request_url);
|
||||
if(c.verbose > 0)
|
||||
log::info("URL: {}", request_url);
|
||||
return _parse_json(r.body, c.verbose);
|
||||
}
|
||||
|
||||
rapidjson::Document get_asset(const std::string& url, int asset_id, const context& params){
|
||||
std::string request_url = _prepare_request(url, params);
|
||||
rapidjson::Document get_asset(
|
||||
const string& url,
|
||||
int asset_id,
|
||||
const context& params
|
||||
){
|
||||
string request_url = _prepare_request(url, params);
|
||||
utils::replace_all(request_url, "{id}", std::to_string(asset_id));
|
||||
http::response r = http::request_get(request_url.c_str());
|
||||
if(params.verbose > 0)
|
||||
|
|
@ -187,16 +232,16 @@ namespace gdpm::rest_api{
|
|||
|
||||
}
|
||||
|
||||
std::string review_asset_edit(int asset_id){
|
||||
return std::string();
|
||||
string review_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
std::string accept_asset_edit(int asset_id){
|
||||
return std::string();
|
||||
string accept_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
std::string reject_asset_edit(int asset_id){
|
||||
return std::string();
|
||||
string reject_asset_edit(int asset_id){
|
||||
return string();
|
||||
}
|
||||
|
||||
} // namespace edits
|
||||
|
|
|
|||
|
|
@ -66,14 +66,22 @@ namespace gdpm::utils{
|
|||
return result;
|
||||
}
|
||||
|
||||
std::string replace_first(std::string &s, const std::string &from, const std::string &to){
|
||||
std::string replace_first(
|
||||
std::string &s,
|
||||
const std::string &from,
|
||||
const std::string &to
|
||||
){
|
||||
size_t pos = s.find(from);
|
||||
if(pos == std::string::npos)
|
||||
return s;
|
||||
return s.replace(pos, from.length(), to);
|
||||
}
|
||||
|
||||
std::string replace_all(std::string& s, const std::string& from, const std::string& to){
|
||||
std::string replace_all(
|
||||
std::string& s,
|
||||
const std::string& from,
|
||||
const std::string& to
|
||||
){
|
||||
size_t pos = 0;
|
||||
while((pos = s.find(from, pos)) != std::string::npos){
|
||||
s.replace(pos, s.length(), to);
|
||||
|
|
@ -83,7 +91,11 @@ namespace gdpm::utils{
|
|||
}
|
||||
|
||||
/* Ref: https://gist.github.com/mobius/1759816 */
|
||||
int extract_zip(const char *archive, const char *dest, int verbose){
|
||||
int extract_zip(
|
||||
const char *archive,
|
||||
const char *dest,
|
||||
int verbose
|
||||
){
|
||||
const char *prog = "gpdm";
|
||||
struct zip *za;
|
||||
struct zip_file *zf;
|
||||
|
|
@ -179,4 +191,15 @@ namespace gdpm::utils{
|
|||
sleep_for(millis);
|
||||
// sleep_until(system_clock::now() + millis);
|
||||
}
|
||||
|
||||
std::string join(
|
||||
const std::vector<std::string>& target,
|
||||
const std::string& delimiter
|
||||
){
|
||||
std::string o;
|
||||
std::for_each(target.begin(), target.end(), [&o, &delimiter](const std::string& s){
|
||||
o += s + delimiter;
|
||||
});
|
||||
return o;
|
||||
}
|
||||
} // namespace gdpm::utils
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue