Fixed issue where assets would not installed due to directory not being created

This commit is contained in:
David Allen 2022-07-30 09:34:21 -05:00
parent 2fd55de71e
commit 060d19bda4
10 changed files with 193 additions and 137 deletions

View file

@ -4,24 +4,31 @@
#include "constants.hpp"
#include "package_manager.hpp"
#include "utils.hpp"
#include <filesystem>
#include <string>
namespace gdpm::cache{
int create_package_database(){
int create_package_database(const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg;
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
/* Check and make sure directory is created before attempting to open */
if(!std::filesystem::exists(cache_path)){
log::info("Creating cache directories...{}", cache_path);
std::filesystem::create_directories(cache_path);
}
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("create_package_database.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
return rc;
}
constexpr const char *sql = "CREATE TABLE IF NOT EXISTS "
GDPM_PACKAGE_CACHE_TABLENAME "("
std::string sql = "CREATE TABLE IF NOT EXISTS " +
table_name + "("
"id INTEGER PRIMARY KEY AUTOINCREMENT,"
"asset_id INT NOT NULL,"
"type INT NOT NULL,"
@ -42,7 +49,7 @@ namespace gdpm::cache{
"install_path TEXT NOT NULL);";
// rc = sqlite3_prepare_v2(db, "SELECT", -1, &res, 0);
rc = sqlite3_exec(db, sql, nullptr, nullptr, &errmsg);
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
if(rc != SQLITE_OK){
// log::error("Failed to fetch data: {}\n", sqlite3_errmsg(db));
log::error("create_package_database.sqlite3_exec(): {}", errmsg);
@ -55,7 +62,7 @@ namespace gdpm::cache{
}
int insert_package_info(const std::vector<package_info>& packages){
int insert_package_info(const std::vector<package_info>& packages, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
@ -63,12 +70,12 @@ namespace gdpm::cache{
/* Prepare values to use in sql statement */
std::string sql{"BEGIN TRANSACTION; "};
for(const auto& p : packages){
sql += "INSERT INTO " GDPM_PACKAGE_CACHE_TABLENAME " (" GDPM_PACKAGE_CACHE_COLNAMES ") ";
sql += "INSERT INTO " + table_name + " (" GDPM_PACKAGE_CACHE_COLNAMES ") ";
sql += "VALUES (" + to_values(p) + "); ";
}
sql += "COMMIT;";
// log::println("{}", sql);
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("insert_package_info.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
@ -86,7 +93,7 @@ namespace gdpm::cache{
}
std::vector<package_info> get_package_info_by_id(const std::vector<size_t>& package_ids){
std::vector<package_info> get_package_info_by_id(const std::vector<size_t>& package_ids, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
@ -121,7 +128,7 @@ namespace gdpm::cache{
return 0;
};
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("get_package_info_by_id.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
@ -129,7 +136,7 @@ namespace gdpm::cache{
}
for(const auto& p_id : package_ids){
sql += "SELECT * FROM " GDPM_PACKAGE_CACHE_TABLENAME " WHERE asset_id=" + fmt::to_string(p_id)+ ";\n";
sql += "SELECT * FROM " + table_name + " WHERE asset_id=" + fmt::to_string(p_id)+ ";\n";
}
sql += "COMMIT;\n";
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
@ -144,7 +151,7 @@ namespace gdpm::cache{
}
std::vector<package_info> get_package_info_by_title(const std::vector<std::string>& package_titles){
std::vector<package_info> get_package_info_by_title(const std::vector<std::string>& package_titles, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
@ -178,7 +185,11 @@ namespace gdpm::cache{
return 0;
};
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
/* Check to make sure the directory is there before attempting to open */
if(!std::filesystem::exists(cache_path))
std::filesystem::create_directories(cache_path);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("get_package_info_by_title.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
@ -187,7 +198,7 @@ namespace gdpm::cache{
std::string sql{"BEGIN TRANSACTION;"};
for(const auto& p_title : package_titles){
sql += "SELECT * FROM " GDPM_PACKAGE_CACHE_TABLENAME " WHERE title='" + p_title + "';";
sql += "SELECT * FROM " + table_name + " WHERE title='" + p_title + "';";
}
sql += "COMMIT;";
// log::println(sql);
@ -203,7 +214,7 @@ namespace gdpm::cache{
}
std::vector<package_info> get_installed_packages(){
std::vector<package_info> get_installed_packages(const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
@ -235,14 +246,14 @@ namespace gdpm::cache{
return 0;
};
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("get_installed_packages.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
return {};
}
sql += "SELECT * FROM " GDPM_PACKAGE_CACHE_TABLENAME " WHERE is_installed=1; COMMIT;";
sql += "SELECT * FROM " + table_name + " WHERE is_installed=1; COMMIT;";
rc = sqlite3_exec(db, sql.c_str(), callback, (void*)&p_vector, &errmsg);
if(rc != SQLITE_OK){
log::error("get_installed_packages.sqlite3_exec(): {}", errmsg);
@ -255,12 +266,12 @@ namespace gdpm::cache{
}
int update_package_info(const std::vector<package_info>& packages){
int update_package_info(const std::vector<package_info>& packages, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("update_package_info.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
@ -269,7 +280,7 @@ namespace gdpm::cache{
std::string sql;
for(const auto& p : packages){
sql += "UPDATE " GDPM_PACKAGE_CACHE_TABLENAME " SET "
sql += "UPDATE " + table_name + " SET "
" asset_id=" + fmt::to_string(p.asset_id) + ", "
" type='" + p.type + "', "
" title='" + p.title + "', "
@ -303,13 +314,13 @@ namespace gdpm::cache{
}
int delete_packages(const std::vector<std::string>& package_titles){
int delete_packages(const std::vector<std::string>& package_titles, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
std::string sql;
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path, &db);
if(rc != SQLITE_OK){
log::error("delete_packages.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);
@ -317,7 +328,7 @@ namespace gdpm::cache{
}
for(const auto& p_title : package_titles){
sql += "DELETE FROM " GDPM_PACKAGE_CACHE_PATH " WHERE title='"
sql += "DELETE FROM " + table_name + " WHERE title='"
+ p_title + "';\n";
}
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
@ -332,13 +343,13 @@ namespace gdpm::cache{
}
int delete_packages(const std::vector<size_t>& package_ids){
int delete_packages(const std::vector<size_t>& package_ids, const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
std::string sql;
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("delete_packages.sqlite3_open(): {}", errmsg);
sqlite3_close(db);
@ -346,7 +357,7 @@ namespace gdpm::cache{
}
for(const auto& p_id : package_ids){
sql += "DELETE FROM " GDPM_PACKAGE_CACHE_PATH " WHERE asset_id="
sql += "DELETE FROM " + table_name + " WHERE asset_id="
+ fmt::to_string(p_id) + ";\n";
}
rc = sqlite3_exec(db, sql.c_str(), nullptr, nullptr, &errmsg);
@ -361,13 +372,13 @@ namespace gdpm::cache{
}
int drop_package_database(){
int drop_package_database(const std::string& cache_path, const std::string& table_name){
sqlite3 *db;
sqlite3_stmt *res;
char *errmsg = nullptr;
std::string sql{"DROP TABLE IF EXISTS " GDPM_PACKAGE_CACHE_TABLENAME ";\n"};
std::string sql{"DROP TABLE IF EXISTS " + table_name + ";\n"};
int rc = sqlite3_open(GDPM_PACKAGE_CACHE_PATH, &db);
int rc = sqlite3_open(cache_path.c_str(), &db);
if(rc != SQLITE_OK){
log::error("drop_package_database.sqlite3_open(): {}", sqlite3_errmsg(db));
sqlite3_close(db);

View file

@ -28,7 +28,7 @@
namespace gdpm::config{
config_context config;
std::string to_json(const config_context& params){
auto _build_json_array = [](std::vector<std::string> a){
auto _build_json_array = [](std::set<std::string> a){
std::string o{"["};
for(const std::string& src : a)
o += "\"" + src + "\",";
@ -104,7 +104,8 @@ namespace gdpm::config{
if(doc["remote_sources"].IsArray()){
const Value& srcs = doc["remote_sources"];
for(auto& src : srcs.GetArray()){
config.remote_sources.emplace_back(src.GetString());
// config.remote_sources.push_back(src.GetString());
config.remote_sources.insert(src.GetString());
}
} else{
log::error("Malformed sources found.");
@ -157,7 +158,7 @@ namespace gdpm::config{
return 0;
}
config_context make_config(const std::string& username, const std::string& password, const std::string& path, const std::string& token, const std::string& godot_version, const std::string& packages_dir, const std::string& tmp_dir, const std::vector<std::string>& remote_sources, size_t threads, size_t timeout, bool enable_sync, bool enable_file_logging, int verbose){
config_context make_config(const std::string& username, const std::string& password, const std::string& path, const std::string& token, const std::string& godot_version, const std::string& packages_dir, const std::string& tmp_dir, const std::set<std::string>& remote_sources, size_t threads, size_t timeout, bool enable_sync, bool enable_file_logging, int verbose){
config_context config {
.username = username,
.password = password,

View file

@ -85,6 +85,9 @@ namespace gdpm::package_manager{
void install_packages(const std::vector<std::string>& package_titles){
using namespace rapidjson;
params.verbose = config.verbose;
/* TODO: Need a way to use remote sources from config until no left */
/* Check if the package data is already stored in cache. If it is, there
is no need to do a lookup to synchronize the local database since we
@ -101,7 +104,6 @@ namespace gdpm::package_manager{
}
}
// FIXME: This does not return the package to be is_installed correctly
for(const auto& p_title : package_titles){
auto found = std::find_if(p_cache.begin(), p_cache.end(), [&p_title](const package_info& p){ return p.title == p_title; });
if(found != p_cache.end()){
@ -129,87 +131,92 @@ namespace gdpm::package_manager{
std::vector<ss_pair> dir_pairs;
for(auto& p : p_found){
log::info_n("Fetching asset data for \"{}\"...", p.title);
std::string url{constants::HostUrl}, package_dir, tmp_dir, tmp_zip;
url += rest_api::endpoints::GET_AssetId;
/* TODO: Try fetching the data with all available remote sources until retrieved */
for(const auto& remote_url : config.remote_sources){
std::string url{remote_url}, package_dir, tmp_dir, tmp_zip;
url += rest_api::endpoints::GET_AssetId;
/* Retrieve necessary asset data if it was found already in cache */
Document doc;
// log::debug("download_url: {}\ncategory: {}\ndescription: {}\nsupport_level: {}", p.download_url, p.category, p.description, p.support_level);
if(p.download_url.empty() || p.category.empty() || p.description.empty() || p.support_level.empty()){
params.verbose = config.verbose;
doc = rest_api::get_asset(url, p.asset_id, params);
if(doc.HasParseError() || doc.IsNull()){
log::println("");
log::error("Error parsing HTTP response. (error code: {})", doc.GetParseError());
return;
/* Retrieve necessary asset data if it was found already in cache */
Document doc;
// log::debug("download_url: {}\ncategory: {}\ndescription: {}\nsupport_level: {}", p.download_url, p.category, p.description, p.support_level);
if(p.download_url.empty() || p.category.empty() || p.description.empty() || p.support_level.empty()){
params.verbose = config.verbose;
doc = rest_api::get_asset(url, p.asset_id, params);
if(doc.HasParseError() || doc.IsNull()){
log::println("");
log::error("Error parsing HTTP response. (error code: {})", doc.GetParseError());
return;
}
p.category = doc["category"].GetString();
p.description = doc["description"].GetString();
p.support_level = doc["support_level"].GetString();
p.download_url = doc["download_url"].GetString();
p.download_hash = doc["download_hash"].GetString();
}
p.category = doc["category"].GetString();
p.description = doc["description"].GetString();
p.support_level = doc["support_level"].GetString();
p.download_url = doc["download_url"].GetString();
p.download_hash = doc["download_hash"].GetString();
}
else{
/* Package for in cache so no remote request. Still need to populate RapidJson::Document to write to package.json.
NOTE: This may not be necessary at all!
*/
// doc["asset_id"].SetUint64(p.asset_id
// doc["type"].SetString(p.type, doc.GetAllocator());
// doc["title"].SetString(p.title, doc.GetAllocator());
// doc["author"].SetString(p.author, doc.GetAllocator());
// doc["author_id"].SetUint64(p.author_id);
// doc["version"].SetString(p.version, doc.GetAllocator());
// doc["category"].SetString(p.category, doc.GetAllocator());
// doc["godot_version"].SetString(p.godot_version, doc.GetAllocator());
// doc["cost"].SetString(p.cost, doc.GetAllocator());
// doc["description"].SetString(p.description, doc.GetAllocator());
// doc["support_level"].SetString(p.support_level, doc.GetAllocator());
// doc["download_url"].SetString(p.download_url, doc.GetAllocator());
// doc["download_hash"].SetString(p.download_hash, doc.GetAllocator;
}
/* Set directory and temp paths for storage */
package_dir = config.packages_dir + "/" + p.title;
tmp_dir = config.tmp_dir + "/" + p.title;
tmp_zip = tmp_dir + ".zip";
/* Make directories for packages if they don't exist to keep everything organized */
if(!std::filesystem::exists(config.tmp_dir))
std::filesystem::create_directories(config.tmp_dir);
if(!std::filesystem::exists(config.packages_dir))
std::filesystem::create_directories(config.packages_dir);
/* Dump asset information for lookup into JSON in package directory */
if(!std::filesystem::exists(package_dir))
std::filesystem::create_directory(package_dir);
std::ofstream ofs(package_dir + "/package.json");
OStreamWrapper osw(ofs);
PrettyWriter<OStreamWrapper> writer(osw);
doc.Accept(writer);
/* Check if we already have a stored temporary file before attempting to download */
if(std::filesystem::exists(tmp_zip) && std::filesystem::is_regular_file(tmp_zip)){
log::println("Found cached package. Skipping download.", p.title);
}
else{
/* Download all the package files and place them in tmp directory. */
log::print("Downloading \"{}\"...", p.title);
std::string download_url = p.download_url;// doc["download_url"].GetString();
std::string title = p.title;// doc["title"].GetString();
http::response response = http::download_file(download_url, tmp_zip);
if(response.code == 200){
log::println("Done.");
}else{
log::error("Something went wrong...(code {})", response.code);
return;
else{
/* Package for in cache so no remote request. Still need to populate RapidJson::Document to write to package.json.
NOTE: This may not be necessary at all!
*/
// doc["asset_id"].SetUint64(p.asset_id
// doc["type"].SetString(p.type, doc.GetAllocator());
// doc["title"].SetString(p.title, doc.GetAllocator());
// doc["author"].SetString(p.author, doc.GetAllocator());
// doc["author_id"].SetUint64(p.author_id);
// doc["version"].SetString(p.version, doc.GetAllocator());
// doc["category"].SetString(p.category, doc.GetAllocator());
// doc["godot_version"].SetString(p.godot_version, doc.GetAllocator());
// doc["cost"].SetString(p.cost, doc.GetAllocator());
// doc["description"].SetString(p.description, doc.GetAllocator());
// doc["support_level"].SetString(p.support_level, doc.GetAllocator());
// doc["download_url"].SetString(p.download_url, doc.GetAllocator());
// doc["download_hash"].SetString(p.download_hash, doc.GetAllocator;
}
/* Set directory and temp paths for storage */
package_dir = config.packages_dir + "/" + p.title;
tmp_dir = config.tmp_dir + "/" + p.title;
tmp_zip = tmp_dir + ".zip";
/* Make directories for packages if they don't exist to keep everything organized */
if(!std::filesystem::exists(config.tmp_dir))
std::filesystem::create_directories(config.tmp_dir);
if(!std::filesystem::exists(config.packages_dir))
std::filesystem::create_directories(config.packages_dir);
/* Dump asset information for lookup into JSON in package directory */
if(!std::filesystem::exists(package_dir))
std::filesystem::create_directory(package_dir);
std::ofstream ofs(package_dir + "/package.json");
OStreamWrapper osw(ofs);
PrettyWriter<OStreamWrapper> writer(osw);
doc.Accept(writer);
/* Check if we already have a stored temporary file before attempting to download */
if(std::filesystem::exists(tmp_zip) && std::filesystem::is_regular_file(tmp_zip)){
log::println("Found cached package. Skipping download.", p.title);
}
else{
/* Download all the package files and place them in tmp directory. */
log::print("Downloading \"{}\"...", p.title);
std::string download_url = p.download_url;// doc["download_url"].GetString();
std::string title = p.title;// doc["title"].GetString();
http::response response = http::download_file(download_url, tmp_zip);
if(response.code == 200){
log::println("Done.");
}else{
log::error("Something went wrong...(code {})", response.code);
return;
}
}
dir_pairs.emplace_back(ss_pair(tmp_zip, package_dir + "/"));
p.is_installed = true;
p.install_path = package_dir;
}
dir_pairs.emplace_back(ss_pair(tmp_zip, package_dir + "/"));
p.is_installed = true;
p.install_path = package_dir;
}
/* Extract all the downloaded packages to their appropriate directory location. */
@ -512,18 +519,19 @@ namespace gdpm::package_manager{
void add_remote_repository(const std::string& repository, ssize_t offset){
auto& s = config.remote_sources;
auto iter = (offset > 0) ? s.begin() + offset : s.end() - offset;
config.remote_sources.insert(iter, repository);
// auto iter = (offset > 0) ? s.begin() + offset : s.end() - offset;
// config.remote_sources.insert(iter, repository);
config.remote_sources.insert(repository);
}
void delete_remote_repository(const std::string& repository){
auto& s = config.remote_sources;
std::erase(s, repository);
(void)std::remove_if(s.begin(), s.end(), [&repository](const std::string& rs){
return repository == rs;
});
s.erase(repository);
// std::erase(s, repository);
// (void)std::remove_if(s.begin(), s.end(), [&repository](const std::string& rs){
// return repository == rs;
// });
}
@ -594,7 +602,7 @@ namespace gdpm::package_manager{
("set-temporary-directory", "Set the local temporary storage location.", cxxopts::value<std::string>())
("timeout", "Set the amount of time to wait for a response.", cxxopts::value<size_t>())
("no-sync", "Disable synchronizing with remote.", cxxopts::value<bool>()->implicit_value("true")->default_value("false"))
("y,yes", "Bypass yes/no prompt for installing or removing packages.")
("y,no-prompt", "Bypass yes/no prompt for installing or removing packages.")
("v,verbose", "Show verbose output.", cxxopts::value<int>()->implicit_value("1")->default_value("0"), "0-5")
;
@ -615,7 +623,8 @@ namespace gdpm::package_manager{
}
if(result.count("add-remote")){
std::string repo = result["remote-add"].as<std::string>();
config.remote_sources.emplace_back(repo);
// config.remote_sources.emplace_back(repo);
config.remote_sources.insert(repo);
}
if(result.count("delete-remote")){
std::string repo = result["remote-add"].as<std::string>();

16
src/version.cpp Normal file
View file

@ -0,0 +1,16 @@
#include "version.hpp"
namespace gdpm::version{
std::string to_string(const version_context& context){
return std::string();
}
version_context to_version(const std::string& version){
version_context v;
return v;
}
}