Skip to content

Commit

Permalink
Fix caching on systems with bad system time
Browse files Browse the repository at this point in the history
  • Loading branch information
mon committed Apr 8, 2024
1 parent 930d966 commit 69af440
Show file tree
Hide file tree
Showing 6 changed files with 75 additions and 50 deletions.
2 changes: 1 addition & 1 deletion meson.build
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
project('layeredfs', 'c', 'cpp', version: '3.2',
project('layeredfs', 'c', 'cpp', version: '3.3',
default_options: [
'cpp_std=c++17',
'buildtype=release',
Expand Down
2 changes: 1 addition & 1 deletion src/avs.h
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ X(int, property_query_size, property_t prop) \
X(void, property_destroy, property_t prop) \
/* md5sum *sha1 if needed) */ \
X(mdigest_p, mdigest_create, mdigest_algs_t algorithm) \
X(void, mdigest_update, mdigest_p digest, const char* data, int size) \
X(void, mdigest_update, mdigest_p digest, const void* data, int size) \
X(void, mdigest_finish, mdigest_p digest, uint8_t* hash, int size) \
X(void, mdigest_destroy, mdigest_p digest) \
/* compression */ \
Expand Down
32 changes: 9 additions & 23 deletions src/hook.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -220,34 +220,24 @@ void handle_texbin(HookFile &file) {
return;
}

auto starting = file.get_path_to_open();
string out = CACHE_FOLDER "/" + file.norm_path;
auto out_hashed = out + ".hashed";
auto cache_hasher = CacheHasher(out_hashed);

uint8_t hash[MD5_LEN];
hash_filenames(pngs_list, hash);

uint8_t cache_hash[MD5_LEN] = {0};
FILE* cache_hashfile;
cache_hashfile = fopen(out_hashed.c_str(), "rb");
if (cache_hashfile) {
fread(cache_hash, 1, sizeof(cache_hash), cache_hashfile);
fclose(cache_hashfile);
cache_hasher.add(starting);
for (auto &path : pngs_list) {
cache_hasher.add(path);
}
cache_hasher.finish();

auto time_out = file_time(out.c_str());
auto newest = file_time(file.get_path_to_open().c_str());
for (auto &path : pngs_list)
newest = std::max(newest, file_time(path.c_str()));
// no need to merge - timestamps all up to date, dll not newer, files haven't been deleted
if(time_out >= newest && time_out >= dll_time && memcmp(hash, cache_hash, sizeof(hash)) == 0) {
if(cache_hasher.matches()) {
file.mod_path = out;
log_misc("texbin cache up to date, skip");
return;
}
// log_verbose("Regenerating cache");
// log_verbose(" time_out >= newest == %d", time_out >= newest);
// log_verbose(" time_out >= dll_time == %d", time_out >= dll_time);
// log_verbose(" memcmp(hash, cache_hash, sizeof(hash)) == 0 == %d", memcmp(hash, cache_hash, sizeof(hash)) == 0);
log_verbose("Regenerating cache");

Texbin texbin;
auto _orig_data = file.load_to_vec();
Expand Down Expand Up @@ -284,11 +274,7 @@ void handle_texbin(HookFile &file) {
return;
}

cache_hashfile = fopen(out_hashed.c_str(), "wb");
if (cache_hashfile) {
fwrite(hash, 1, sizeof(hash), cache_hashfile);
fclose(cache_hashfile);
}
cache_hasher.commit();
file.mod_path = out;

log_misc("Texbin generation took %d ms", time() - start);
Expand Down
26 changes: 7 additions & 19 deletions src/imagefs.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -458,24 +458,16 @@ void merge_xmls(HookFile &file) {
auto starting = file.get_path_to_open();
out = CACHE_FOLDER "/" + file.norm_path;
auto out_hashed = out + ".hashed";
auto cache_hasher = CacheHasher(out_hashed);

uint8_t hash[MD5_LEN];
hash_filenames(to_merge, hash);

uint8_t cache_hash[MD5_LEN] = {0};
FILE* cache_hashfile = fopen(out_hashed.c_str(), "rb");
if (cache_hashfile) {
fread(cache_hash, 1, sizeof(cache_hash), cache_hashfile);
fclose(cache_hashfile);
cache_hasher.add(starting); // don't forget to take the input into account
for (auto &path : to_merge) {
cache_hasher.add(path);
}
cache_hasher.finish();

auto time_out = file_time(out.c_str());
// don't forget to take the input into account
auto newest = file_time(starting.c_str());
for (auto &path : to_merge)
newest = std::max(newest, file_time(path.c_str()));
// no need to merge - timestamps all up to date, dll not newer, files haven't been deleted
if(time_out >= newest && time_out >= dll_time && memcmp(hash, cache_hash, sizeof(hash)) == 0) {
if(cache_hasher.matches()) {
file.mod_path = out;
return;
}
Expand Down Expand Up @@ -511,11 +503,7 @@ void merge_xmls(HookFile &file) {
}

rapidxml_dump_to_file(out, merged_xml);
cache_hashfile = fopen(out_hashed.c_str(), "wb");
if (cache_hashfile) {
fwrite(hash, 1, sizeof(hash), cache_hashfile);
fclose(cache_hashfile);
}
cache_hasher.commit();
file.mod_path = out;

log_misc("Merge took %d ms", time() - start);
Expand Down
40 changes: 35 additions & 5 deletions src/utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include "utils.hpp"
#include "log.hpp"
#include "avs.h"
#include "hook.h"

char* snprintf_auto(const char* fmt, ...) {
va_list argList;
Expand Down Expand Up @@ -198,13 +199,42 @@ string basename_without_extension(string const & path) {
return p > 0 && p != string::npos ? basename.substr(0, p) : basename;
}

void hash_filenames(std::vector<std::string> &filenames, uint8_t hash[MD5_LEN]) {
auto digest = mdigest_create(MD5);
CacheHasher::CacheHasher(std::string hash_file): hash_file(hash_file) {
digest = mdigest_create(MD5);

for (auto &path : filenames) {
mdigest_update(digest, path.c_str(), (int)path.length());
// always hash the DLL time
mdigest_update(digest, &dll_time, sizeof(dll_time));

auto cache_hashfile = fopen(hash_file.c_str(), "rb");
if (cache_hashfile) {
fread(existing_hash, 1, MD5_LEN, cache_hashfile);
fclose(cache_hashfile);
}
}

mdigest_finish(digest, hash, MD5_LEN);
CacheHasher::~CacheHasher() {
mdigest_destroy(digest);
}

void CacheHasher::add(std::string &path) {
mdigest_update(digest, path.c_str(), (int)path.length());

auto ts = file_time(path.c_str());
mdigest_update(digest, &ts, sizeof(ts));
}

void CacheHasher::finish() {
mdigest_finish(digest, new_hash, MD5_LEN);
}

bool CacheHasher::matches() {
return memcmp(new_hash, existing_hash, sizeof(new_hash)) == 0;
}

void CacheHasher::commit() {
auto cache_hashfile = fopen(hash_file.c_str(), "wb");
if (cache_hashfile) {
fwrite(new_hash, 1, sizeof(new_hash), cache_hashfile);
fclose(cache_hashfile);
}
}
23 changes: 22 additions & 1 deletion src/utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,28 @@ std::vector<std::string> folders_in_folder(const char* root);
uint64_t file_time(const char* path);
LONG time(void);
std::string basename_without_extension(std::string const & path);
void hash_filenames(std::vector<std::string> &filenames, uint8_t hash[MD5_LEN]);

// Hashes the names and timestamps of input files into a rebuilt output.
// Invalidates on DLL timestamp change, input timestamp change, or input change
class CacheHasher {
public:
CacheHasher(std::string hash_file);
~CacheHasher();
// add a path and its timestamp to the hash. Should not be called after `finish`
void add(std::string &path);
// complete the hashing op
void finish();
// check if the hashfile matches
bool matches();
// write out an updated hashfile. Should be called after `finish`
void commit();

private:
std::string hash_file;
mdigest_p digest;
uint8_t existing_hash[MD5_LEN] = {0};
uint8_t new_hash[MD5_LEN] = {0};
};

struct CaseInsensitiveCompare {
bool operator() (const std::string& a, const std::string& b) const {
Expand Down

0 comments on commit 69af440

Please sign in to comment.