Files
LightlessClient/LightlessSync/FileCache/FileCacheManager.cs
2025-10-06 03:47:24 +09:00

607 lines
23 KiB
C#

using K4os.Compression.LZ4.Legacy;
using LightlessSync.Interop.Ipc;
using LightlessSync.LightlessConfiguration;
using LightlessSync.Services.Mediator;
using LightlessSync.Utils;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
using System.Globalization;
using System.Text;
namespace LightlessSync.FileCache;
public sealed class FileCacheManager : IHostedService
{
public const string CachePrefix = "{cache}";
public const string CsvSplit = "|";
public const string PenumbraPrefix = "{penumbra}";
private readonly LightlessConfigService _configService;
private readonly LightlessMediator _lightlessMediator;
private readonly string _csvPath;
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, FileCacheEntity>> _fileCaches = new(StringComparer.Ordinal);
private readonly ConcurrentDictionary<string, FileCacheEntity> _fileCachesByPrefixedPath = new(StringComparer.OrdinalIgnoreCase);
private readonly SemaphoreSlim _getCachesByPathsSemaphore = new(1, 1);
private readonly Lock _fileWriteLock = new();
private readonly IpcManager _ipcManager;
private readonly ILogger<FileCacheManager> _logger;
public string CacheFolder => _configService.Current.CacheFolder;
public FileCacheManager(ILogger<FileCacheManager> logger, IpcManager ipcManager, LightlessConfigService configService, LightlessMediator lightlessMediator)
{
_logger = logger;
_ipcManager = ipcManager;
_configService = configService;
_lightlessMediator = lightlessMediator;
_csvPath = Path.Combine(configService.ConfigurationDirectory, "FileCache.csv");
}
private string CsvBakPath => _csvPath + ".bak";
private static string NormalizeSeparators(string path)
{
return path.Replace("/", "\\", StringComparison.Ordinal)
.Replace("\\\\", "\\", StringComparison.Ordinal);
}
private static string NormalizePrefixedPathKey(string prefixedPath)
{
if (string.IsNullOrEmpty(prefixedPath))
{
return string.Empty;
}
return NormalizeSeparators(prefixedPath).ToLowerInvariant();
}
private string NormalizeToPrefixedPath(string path)
{
if (string.IsNullOrEmpty(path)) return string.Empty;
var normalized = NormalizeSeparators(path);
if (normalized.StartsWith(CachePrefix, StringComparison.OrdinalIgnoreCase) ||
normalized.StartsWith(PenumbraPrefix, StringComparison.OrdinalIgnoreCase))
{
return NormalizePrefixedPathKey(normalized);
}
var penumbraDir = _ipcManager.Penumbra.ModDirectory;
if (!string.IsNullOrEmpty(penumbraDir))
{
var normalizedPenumbra = NormalizeSeparators(penumbraDir);
var replacement = normalizedPenumbra.EndsWith("\\", StringComparison.Ordinal)
? PenumbraPrefix + "\\"
: PenumbraPrefix;
normalized = normalized.Replace(normalizedPenumbra, replacement, StringComparison.OrdinalIgnoreCase);
}
var cacheFolder = _configService.Current.CacheFolder;
if (!string.IsNullOrEmpty(cacheFolder))
{
var normalizedCache = NormalizeSeparators(cacheFolder);
var replacement = normalizedCache.EndsWith("\\", StringComparison.Ordinal)
? CachePrefix + "\\"
: CachePrefix;
normalized = normalized.Replace(normalizedCache, replacement, StringComparison.OrdinalIgnoreCase);
}
return NormalizePrefixedPathKey(normalized);
}
public FileCacheEntity? CreateCacheEntry(string path)
{
FileInfo fi = new(path);
if (!fi.Exists) return null;
_logger.LogTrace("Creating cache entry for {path}", path);
return CreateFileEntity(_configService.Current.CacheFolder.ToLowerInvariant(), CachePrefix, fi);
}
public FileCacheEntity? CreateFileEntry(string path)
{
FileInfo fi = new(path);
if (!fi.Exists) return null;
_logger.LogTrace("Creating file entry for {path}", path);
return CreateFileEntity(_ipcManager.Penumbra.ModDirectory!.ToLowerInvariant(), PenumbraPrefix, fi);
}
private FileCacheEntity? CreateFileEntity(string directory, string prefix, FileInfo fi)
{
var fullName = fi.FullName.ToLowerInvariant();
if (!fullName.Contains(directory, StringComparison.Ordinal)) return null;
string prefixedPath = fullName.Replace(directory, prefix + "\\", StringComparison.Ordinal).Replace("\\\\", "\\", StringComparison.Ordinal);
return CreateFileCacheEntity(fi, prefixedPath);
}
public List<FileCacheEntity> GetAllFileCaches() => _fileCaches.Values.SelectMany(v => v.Values.Where(e => e != null)).ToList();
public List<FileCacheEntity> GetAllFileCachesByHash(string hash, bool ignoreCacheEntries = false, bool validate = true)
{
List<FileCacheEntity> output = [];
if (_fileCaches.TryGetValue(hash, out var fileCacheEntities))
{
foreach (var fileCache in fileCacheEntities.Values.Where(c => !ignoreCacheEntries || !c.IsCacheEntry).ToList())
{
if (!validate)
{
output.Add(fileCache);
}
else
{
var validated = GetValidatedFileCache(fileCache);
if (validated != null)
{
output.Add(validated);
}
}
}
}
return output;
}
public Task<List<FileCacheEntity>> ValidateLocalIntegrity(IProgress<(int, int, FileCacheEntity)> progress, CancellationToken cancellationToken)
{
_lightlessMediator.Publish(new HaltScanMessage(nameof(ValidateLocalIntegrity)));
_logger.LogInformation("Validating local storage");
var cacheEntries = _fileCaches.Values.SelectMany(v => v.Values.Where(e => e != null)).Where(v => v.IsCacheEntry).ToList();
List<FileCacheEntity> brokenEntities = [];
int i = 0;
foreach (var fileCache in cacheEntries)
{
if (cancellationToken.IsCancellationRequested) break;
_logger.LogInformation("Validating {file}", fileCache.ResolvedFilepath);
progress.Report((i, cacheEntries.Count, fileCache));
i++;
if (!File.Exists(fileCache.ResolvedFilepath))
{
brokenEntities.Add(fileCache);
continue;
}
try
{
var computedHash = Crypto.GetFileHash(fileCache.ResolvedFilepath);
if (!string.Equals(computedHash, fileCache.Hash, StringComparison.Ordinal))
{
_logger.LogInformation("Failed to validate {file}, got hash {computedHash}, expected hash {hash}", fileCache.ResolvedFilepath, computedHash, fileCache.Hash);
brokenEntities.Add(fileCache);
}
}
catch (Exception e)
{
_logger.LogWarning(e, "Error during validation of {file}", fileCache.ResolvedFilepath);
brokenEntities.Add(fileCache);
}
}
foreach (var brokenEntity in brokenEntities)
{
RemoveHashedFile(brokenEntity.Hash, brokenEntity.PrefixedFilePath);
try
{
File.Delete(brokenEntity.ResolvedFilepath);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Could not delete {file}", brokenEntity.ResolvedFilepath);
}
}
_lightlessMediator.Publish(new ResumeScanMessage(nameof(ValidateLocalIntegrity)));
return Task.FromResult(brokenEntities);
}
public string GetCacheFilePath(string hash, string extension)
{
return Path.Combine(_configService.Current.CacheFolder, hash + "." + extension);
}
public async Task<(string, byte[])> GetCompressedFileData(string fileHash, CancellationToken uploadToken)
{
var fileCache = GetFileCacheByHash(fileHash)!.ResolvedFilepath;
return (fileHash, LZ4Wrapper.WrapHC(await File.ReadAllBytesAsync(fileCache, uploadToken).ConfigureAwait(false), 0,
(int)new FileInfo(fileCache).Length));
}
public FileCacheEntity? GetFileCacheByHash(string hash)
{
if (_fileCaches.TryGetValue(hash, out var entries))
{
var item = entries.Values
.OrderBy(p => p.PrefixedFilePath.Contains(PenumbraPrefix, StringComparison.Ordinal) ? 0 : 1)
.FirstOrDefault();
if (item != null)
{
return GetValidatedFileCache(item);
}
}
return null;
}
private FileCacheEntity? GetFileCacheByPath(string path)
{
var normalizedPrefixedPath = NormalizeToPrefixedPath(path);
if (string.IsNullOrEmpty(normalizedPrefixedPath))
{
return null;
}
if (_fileCachesByPrefixedPath.TryGetValue(normalizedPrefixedPath, out var entry))
{
return GetValidatedFileCache(entry);
}
_logger.LogDebug("Found no entries for {path}", normalizedPrefixedPath);
if (normalizedPrefixedPath.Contains(CachePrefix, StringComparison.Ordinal))
{
return CreateCacheEntry(path);
}
return CreateFileEntry(path) ?? CreateCacheEntry(path);
}
public Dictionary<string, FileCacheEntity?> GetFileCachesByPaths(string[] paths)
{
_getCachesByPathsSemaphore.Wait();
try
{
var result = new Dictionary<string, FileCacheEntity?>(StringComparer.OrdinalIgnoreCase);
var seenNormalized = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var originalPath in paths)
{
if (string.IsNullOrEmpty(originalPath))
{
result[originalPath] = null;
continue;
}
var normalized = NormalizeToPrefixedPath(originalPath);
if (seenNormalized.Add(normalized))
{
if (!string.IsNullOrEmpty(normalized))
{
_logger.LogDebug("Normalized path {cleaned}", normalized);
}
}
else if (!string.IsNullOrEmpty(normalized))
{
_logger.LogWarning("Duplicate normalized path detected: {cleaned}", normalized);
}
if (_fileCachesByPrefixedPath.TryGetValue(normalized, out var entity))
{
result[originalPath] = GetValidatedFileCache(entity);
continue;
}
FileCacheEntity? created = null;
if (normalized.Contains(CachePrefix, StringComparison.Ordinal))
{
created = CreateCacheEntry(originalPath);
}
else if (normalized.Contains(PenumbraPrefix, StringComparison.Ordinal))
{
created = CreateFileEntry(originalPath);
}
else
{
created = CreateFileEntry(originalPath) ?? CreateCacheEntry(originalPath);
}
result[originalPath] = created;
}
return result;
}
finally
{
_getCachesByPathsSemaphore.Release();
}
}
public void RemoveHashedFile(string hash, string prefixedFilePath)
{
var normalizedPath = NormalizePrefixedPathKey(prefixedFilePath);
if (_fileCaches.TryGetValue(hash, out var caches))
{
_logger.LogTrace("Removing from DB: {hash} => {path}", hash, prefixedFilePath);
if (caches.TryRemove(normalizedPath, out var removedEntity))
{
_logger.LogTrace("Removed from DB: {hash} => {path}", hash, removedEntity.PrefixedFilePath);
}
if (caches.IsEmpty)
{
_fileCaches.TryRemove(hash, out _);
}
}
_fileCachesByPrefixedPath.TryRemove(normalizedPath, out _);
}
public void UpdateHashedFile(FileCacheEntity fileCache, bool computeProperties = true)
{
_logger.LogTrace("Updating hash for {path}", fileCache.ResolvedFilepath);
var oldHash = fileCache.Hash;
var prefixedPath = fileCache.PrefixedFilePath;
if (computeProperties)
{
var fi = new FileInfo(fileCache.ResolvedFilepath);
fileCache.Size = fi.Length;
fileCache.CompressedSize = null;
fileCache.Hash = Crypto.GetFileHash(fileCache.ResolvedFilepath);
fileCache.LastModifiedDateTicks = fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
}
RemoveHashedFile(oldHash, prefixedPath);
AddHashedFile(fileCache);
}
public (FileState State, FileCacheEntity FileCache) ValidateFileCacheEntity(FileCacheEntity fileCache)
{
fileCache = ReplacePathPrefixes(fileCache);
FileInfo fi = new(fileCache.ResolvedFilepath);
if (!fi.Exists)
{
return (FileState.RequireDeletion, fileCache);
}
if (!string.Equals(fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
{
return (FileState.RequireUpdate, fileCache);
}
return (FileState.Valid, fileCache);
}
public void WriteOutFullCsv()
{
lock (_fileWriteLock)
{
StringBuilder sb = new();
foreach (var entry in _fileCaches.Values.SelectMany(k => k.Values).OrderBy(f => f.PrefixedFilePath, StringComparer.OrdinalIgnoreCase))
{
sb.AppendLine(entry.CsvEntry);
}
if (File.Exists(_csvPath))
{
File.Copy(_csvPath, CsvBakPath, overwrite: true);
}
try
{
File.WriteAllText(_csvPath, sb.ToString());
File.Delete(CsvBakPath);
}
catch
{
File.WriteAllText(CsvBakPath, sb.ToString());
}
}
}
internal FileCacheEntity MigrateFileHashToExtension(FileCacheEntity fileCache, string ext)
{
try
{
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
var extensionPath = fileCache.ResolvedFilepath.ToUpper(CultureInfo.InvariantCulture) + "." + ext;
File.Move(fileCache.ResolvedFilepath, extensionPath, overwrite: true);
var newHashedEntity = new FileCacheEntity(fileCache.Hash, fileCache.PrefixedFilePath + "." + ext, DateTime.UtcNow.Ticks.ToString(CultureInfo.InvariantCulture));
newHashedEntity.SetResolvedFilePath(extensionPath);
AddHashedFile(newHashedEntity);
_logger.LogTrace("Migrated from {oldPath} to {newPath}", fileCache.ResolvedFilepath, newHashedEntity.ResolvedFilepath);
return newHashedEntity;
}
catch (Exception ex)
{
AddHashedFile(fileCache);
_logger.LogWarning(ex, "Failed to migrate entity {entity}", fileCache.PrefixedFilePath);
return fileCache;
}
}
private void AddHashedFile(FileCacheEntity fileCache)
{
var normalizedPath = NormalizePrefixedPathKey(fileCache.PrefixedFilePath);
var entries = _fileCaches.GetOrAdd(fileCache.Hash, _ => new ConcurrentDictionary<string, FileCacheEntity>(StringComparer.OrdinalIgnoreCase));
entries[normalizedPath] = fileCache;
_fileCachesByPrefixedPath[normalizedPath] = fileCache;
}
private FileCacheEntity? CreateFileCacheEntity(FileInfo fileInfo, string prefixedPath, string? hash = null)
{
hash ??= Crypto.GetFileHash(fileInfo.FullName);
var entity = new FileCacheEntity(hash, prefixedPath, fileInfo.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileInfo.Length);
entity = ReplacePathPrefixes(entity);
AddHashedFile(entity);
lock (_fileWriteLock)
{
File.AppendAllLines(_csvPath, new[] { entity.CsvEntry });
}
var result = GetFileCacheByPath(fileInfo.FullName);
_logger.LogTrace("Creating cache entity for {name} success: {success}", fileInfo.FullName, (result != null));
return result;
}
private FileCacheEntity? GetValidatedFileCache(FileCacheEntity fileCache)
{
var resultingFileCache = ReplacePathPrefixes(fileCache);
//_logger.LogTrace("Validating {path}", fileCache.PrefixedFilePath);
resultingFileCache = Validate(resultingFileCache);
return resultingFileCache;
}
private FileCacheEntity ReplacePathPrefixes(FileCacheEntity fileCache)
{
if (fileCache.PrefixedFilePath.StartsWith(PenumbraPrefix, StringComparison.OrdinalIgnoreCase))
{
fileCache.SetResolvedFilePath(fileCache.PrefixedFilePath.Replace(PenumbraPrefix, _ipcManager.Penumbra.ModDirectory, StringComparison.Ordinal));
}
else if (fileCache.PrefixedFilePath.StartsWith(CachePrefix, StringComparison.OrdinalIgnoreCase))
{
fileCache.SetResolvedFilePath(fileCache.PrefixedFilePath.Replace(CachePrefix, _configService.Current.CacheFolder, StringComparison.Ordinal));
}
return fileCache;
}
private FileCacheEntity? Validate(FileCacheEntity fileCache)
{
if (string.IsNullOrWhiteSpace(fileCache.ResolvedFilepath))
{
_logger.LogWarning("FileCacheEntity has empty ResolvedFilepath for hash {hash}, prefixed path {prefixed}", fileCache.Hash, fileCache.PrefixedFilePath);
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
return null;
}
var file = new FileInfo(fileCache.ResolvedFilepath);
if (!file.Exists)
{
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
return null;
}
if (!string.Equals(file.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
{
UpdateHashedFile(fileCache);
}
return fileCache;
}
public Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Starting FileCacheManager");
lock (_fileWriteLock)
{
try
{
_logger.LogInformation("Checking for {bakPath}", CsvBakPath);
if (File.Exists(CsvBakPath))
{
_logger.LogInformation("{bakPath} found, moving to {csvPath}", CsvBakPath, _csvPath);
File.Move(CsvBakPath, _csvPath, overwrite: true);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to move BAK to ORG, deleting BAK");
try
{
if (File.Exists(CsvBakPath))
File.Delete(CsvBakPath);
}
catch (Exception ex1)
{
_logger.LogWarning(ex1, "Could not delete bak file");
}
}
}
if (File.Exists(_csvPath))
{
if (!_ipcManager.Penumbra.APIAvailable || string.IsNullOrEmpty(_ipcManager.Penumbra.ModDirectory))
{
_lightlessMediator.Publish(new NotificationMessage("Penumbra not connected",
"Could not load local file cache data. Penumbra is not connected or not properly set up. Please enable and/or configure Penumbra properly to use Lightless. After, reload Lightless in the Plugin installer.",
LightlessConfiguration.Models.NotificationType.Error));
}
_logger.LogInformation("{csvPath} found, parsing", _csvPath);
bool success = false;
string[] entries = [];
int attempts = 0;
while (!success && attempts < 10)
{
try
{
_logger.LogInformation("Attempting to read {csvPath}", _csvPath);
entries = File.ReadAllLines(_csvPath);
success = true;
}
catch (Exception ex)
{
attempts++;
_logger.LogWarning(ex, "Could not open {file}, trying again", _csvPath);
Task.Delay(100, cancellationToken);
}
}
if (!entries.Any())
{
_logger.LogWarning("Could not load entries from {path}, continuing with empty file cache", _csvPath);
}
_logger.LogInformation("Found {amount} files in {path}", entries.Length, _csvPath);
Dictionary<string, bool> processedFiles = new(StringComparer.OrdinalIgnoreCase);
foreach (var entry in entries)
{
var splittedEntry = entry.Split(CsvSplit, StringSplitOptions.None);
try
{
var hash = splittedEntry[0];
if (hash.Length != 40) throw new InvalidOperationException("Expected Hash length of 40, received " + hash.Length);
var path = splittedEntry[1];
var time = splittedEntry[2];
if (processedFiles.ContainsKey(path))
{
_logger.LogWarning("Already processed {file}, ignoring", path);
continue;
}
processedFiles.Add(path, value: true);
long size = -1;
long compressed = -1;
if (splittedEntry.Length > 3)
{
if (long.TryParse(splittedEntry[3], CultureInfo.InvariantCulture, out long result))
{
size = result;
}
if (long.TryParse(splittedEntry[4], CultureInfo.InvariantCulture, out long resultCompressed))
{
compressed = resultCompressed;
}
}
AddHashedFile(ReplacePathPrefixes(new FileCacheEntity(hash, path, time, size, compressed)));
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to initialize entry {entry}, ignoring", entry);
}
}
if (processedFiles.Count != entries.Length)
{
WriteOutFullCsv();
}
}
_logger.LogInformation("Started FileCacheManager");
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken)
{
WriteOutFullCsv();
return Task.CompletedTask;
}
}