834 lines
30 KiB
C#
834 lines
30 KiB
C#
using K4os.Compression.LZ4.Legacy;
|
|
using LightlessSync.Interop.Ipc;
|
|
using LightlessSync.LightlessConfiguration;
|
|
using LightlessSync.Services.Mediator;
|
|
using LightlessSync.Utils;
|
|
using Microsoft.Extensions.Hosting;
|
|
using Microsoft.Extensions.Logging;
|
|
using System.Collections.Concurrent;
|
|
using System.Globalization;
|
|
using System.Text;
|
|
|
|
namespace LightlessSync.FileCache;
|
|
|
|
public sealed class FileCacheManager : IHostedService
|
|
{
|
|
public const string CachePrefix = "{cache}";
|
|
public const string CsvSplit = "|";
|
|
public const string PenumbraPrefix = "{penumbra}";
|
|
private const int FileCacheVersion = 1;
|
|
private const string FileCacheVersionHeaderPrefix = "#lightless-file-cache-version:";
|
|
private readonly LightlessConfigService _configService;
|
|
private readonly LightlessMediator _lightlessMediator;
|
|
private readonly string _csvPath;
|
|
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, FileCacheEntity>> _fileCaches = new(StringComparer.Ordinal);
|
|
private readonly ConcurrentDictionary<string, FileCacheEntity> _fileCachesByPrefixedPath = new(StringComparer.OrdinalIgnoreCase);
|
|
private readonly SemaphoreSlim _getCachesByPathsSemaphore = new(1, 1);
|
|
private readonly Lock _fileWriteLock = new();
|
|
private readonly IpcManager _ipcManager;
|
|
private readonly ILogger<FileCacheManager> _logger;
|
|
private bool _csvHeaderEnsured;
|
|
public string CacheFolder => _configService.Current.CacheFolder;
|
|
|
|
public FileCacheManager(ILogger<FileCacheManager> logger, IpcManager ipcManager, LightlessConfigService configService, LightlessMediator lightlessMediator)
|
|
{
|
|
_logger = logger;
|
|
_ipcManager = ipcManager;
|
|
_configService = configService;
|
|
_lightlessMediator = lightlessMediator;
|
|
_csvPath = Path.Combine(configService.ConfigurationDirectory, "FileCache.csv");
|
|
}
|
|
|
|
private string CsvBakPath => _csvPath + ".bak";
|
|
|
|
private static string NormalizeSeparators(string path)
|
|
{
|
|
return path.Replace("/", "\\", StringComparison.Ordinal)
|
|
.Replace("\\\\", "\\", StringComparison.Ordinal);
|
|
}
|
|
|
|
private static string NormalizePrefixedPathKey(string prefixedPath)
|
|
{
|
|
if (string.IsNullOrEmpty(prefixedPath))
|
|
{
|
|
return string.Empty;
|
|
}
|
|
|
|
return NormalizeSeparators(prefixedPath).ToLowerInvariant();
|
|
}
|
|
|
|
private static bool TryBuildPrefixedPath(string path, string? baseDirectory, string prefix, out string prefixedPath, out int matchedLength)
|
|
{
|
|
prefixedPath = string.Empty;
|
|
matchedLength = 0;
|
|
|
|
if (string.IsNullOrEmpty(path) || string.IsNullOrEmpty(baseDirectory))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
var normalizedPath = NormalizeSeparators(path).ToLowerInvariant();
|
|
var normalizedBase = NormalizeSeparators(baseDirectory).TrimEnd('\\').ToLowerInvariant();
|
|
|
|
if (!normalizedPath.StartsWith(normalizedBase, StringComparison.Ordinal))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
if (normalizedPath.Length > normalizedBase.Length)
|
|
{
|
|
if (normalizedPath[normalizedBase.Length] != '\\')
|
|
{
|
|
return false;
|
|
}
|
|
|
|
prefixedPath = prefix + normalizedPath.Substring(normalizedBase.Length);
|
|
}
|
|
else
|
|
{
|
|
prefixedPath = prefix;
|
|
}
|
|
|
|
prefixedPath = prefixedPath.Replace("\\\\", "\\", StringComparison.Ordinal);
|
|
matchedLength = normalizedBase.Length;
|
|
return true;
|
|
}
|
|
|
|
private static string BuildVersionHeader() => $"{FileCacheVersionHeaderPrefix}{FileCacheVersion}";
|
|
|
|
private static bool TryParseVersionHeader(string? line, out int version)
|
|
{
|
|
version = 0;
|
|
if (string.IsNullOrWhiteSpace(line))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
if (!line.StartsWith(FileCacheVersionHeaderPrefix, StringComparison.OrdinalIgnoreCase))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
var versionSpan = line.AsSpan(FileCacheVersionHeaderPrefix.Length);
|
|
return int.TryParse(versionSpan, NumberStyles.Integer, CultureInfo.InvariantCulture, out version);
|
|
}
|
|
|
|
private string NormalizeToPrefixedPath(string path)
|
|
{
|
|
if (string.IsNullOrEmpty(path)) return string.Empty;
|
|
|
|
var normalized = NormalizeSeparators(path);
|
|
|
|
if (normalized.StartsWith(CachePrefix, StringComparison.OrdinalIgnoreCase) ||
|
|
normalized.StartsWith(PenumbraPrefix, StringComparison.OrdinalIgnoreCase))
|
|
{
|
|
return NormalizePrefixedPathKey(normalized);
|
|
}
|
|
|
|
string? chosenPrefixed = null;
|
|
var chosenLength = -1;
|
|
|
|
if (TryBuildPrefixedPath(normalized, _ipcManager.Penumbra.ModDirectory, PenumbraPrefix, out var penumbraPrefixed, out var penumbraMatch))
|
|
{
|
|
chosenPrefixed = penumbraPrefixed;
|
|
chosenLength = penumbraMatch;
|
|
}
|
|
|
|
if (TryBuildPrefixedPath(normalized, _configService.Current.CacheFolder, CachePrefix, out var cachePrefixed, out var cacheMatch))
|
|
{
|
|
if (cacheMatch > chosenLength)
|
|
{
|
|
chosenPrefixed = cachePrefixed;
|
|
chosenLength = cacheMatch;
|
|
}
|
|
}
|
|
|
|
return NormalizePrefixedPathKey(chosenPrefixed ?? normalized);
|
|
}
|
|
|
|
public FileCacheEntity? CreateCacheEntry(string path)
|
|
{
|
|
FileInfo fi = new(path);
|
|
if (!fi.Exists) return null;
|
|
_logger.LogTrace("Creating cache entry for {path}", path);
|
|
var cacheFolder = _configService.Current.CacheFolder;
|
|
if (string.IsNullOrEmpty(cacheFolder)) return null;
|
|
return CreateFileEntity(cacheFolder, CachePrefix, fi);
|
|
}
|
|
|
|
public FileCacheEntity? CreateFileEntry(string path)
|
|
{
|
|
FileInfo fi = new(path);
|
|
if (!fi.Exists) return null;
|
|
_logger.LogTrace("Creating file entry for {path}", path);
|
|
var modDirectory = _ipcManager.Penumbra.ModDirectory;
|
|
if (string.IsNullOrEmpty(modDirectory)) return null;
|
|
return CreateFileEntity(modDirectory, PenumbraPrefix, fi);
|
|
}
|
|
|
|
private FileCacheEntity? CreateFileEntity(string directory, string prefix, FileInfo fi)
|
|
{
|
|
if (!TryBuildPrefixedPath(fi.FullName, directory, prefix, out var prefixedPath, out _))
|
|
{
|
|
return null;
|
|
}
|
|
|
|
return CreateFileCacheEntity(fi, prefixedPath);
|
|
}
|
|
|
|
public List<FileCacheEntity> GetAllFileCaches() => _fileCaches.Values.SelectMany(v => v.Values.Where(e => e != null)).ToList();
|
|
|
|
public List<FileCacheEntity> GetAllFileCachesByHash(string hash, bool ignoreCacheEntries = false, bool validate = true)
|
|
{
|
|
List<FileCacheEntity> output = [];
|
|
if (_fileCaches.TryGetValue(hash, out var fileCacheEntities))
|
|
{
|
|
foreach (var fileCache in fileCacheEntities.Values.Where(c => !ignoreCacheEntries || !c.IsCacheEntry).ToList())
|
|
{
|
|
if (!validate)
|
|
{
|
|
output.Add(fileCache);
|
|
}
|
|
else
|
|
{
|
|
var validated = GetValidatedFileCache(fileCache);
|
|
if (validated != null)
|
|
{
|
|
output.Add(validated);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return output;
|
|
}
|
|
|
|
public async Task<List<FileCacheEntity>> ValidateLocalIntegrity(IProgress<(int completed, int total, FileCacheEntity current)> progress, CancellationToken cancellationToken)
|
|
{
|
|
_lightlessMediator.Publish(new HaltScanMessage(nameof(ValidateLocalIntegrity)));
|
|
_logger.LogInformation("Validating local storage");
|
|
|
|
var cacheEntries = _fileCaches.Values
|
|
.SelectMany(v => v.Values)
|
|
.Where(v => v.IsCacheEntry)
|
|
.ToList();
|
|
|
|
int total = cacheEntries.Count;
|
|
int processed = 0;
|
|
var brokenEntities = new ConcurrentBag<FileCacheEntity>();
|
|
|
|
_logger.LogInformation("Checking {count} cache entries...", total);
|
|
|
|
await Parallel.ForEachAsync(cacheEntries, new ParallelOptions
|
|
{
|
|
MaxDegreeOfParallelism = Environment.ProcessorCount,
|
|
CancellationToken = cancellationToken
|
|
},
|
|
async (fileCache, token) =>
|
|
{
|
|
try
|
|
{
|
|
int current = Interlocked.Increment(ref processed);
|
|
if (current % 10 == 0)
|
|
progress.Report((current, total, fileCache));
|
|
|
|
if (!File.Exists(fileCache.ResolvedFilepath))
|
|
{
|
|
brokenEntities.Add(fileCache);
|
|
return;
|
|
}
|
|
|
|
string computedHash;
|
|
try
|
|
{
|
|
computedHash = await Crypto.GetFileHashAsync(fileCache.ResolvedFilepath, token).ConfigureAwait(false);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogWarning(ex, "Error hashing {file}", fileCache.ResolvedFilepath);
|
|
brokenEntities.Add(fileCache);
|
|
return;
|
|
}
|
|
|
|
if (!string.Equals(computedHash, fileCache.Hash, StringComparison.Ordinal))
|
|
{
|
|
_logger.LogInformation(
|
|
"Hash mismatch: {file} (got {computedHash}, expected {expected})",
|
|
fileCache.ResolvedFilepath, computedHash, fileCache.Hash);
|
|
|
|
brokenEntities.Add(fileCache);
|
|
}
|
|
}
|
|
catch (OperationCanceledException)
|
|
{
|
|
_logger.LogError("Validation got cancelled for {file}", fileCache.ResolvedFilepath);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "Unexpected error validating {file}", fileCache.ResolvedFilepath);
|
|
brokenEntities.Add(fileCache);
|
|
}
|
|
}).ConfigureAwait(false);
|
|
|
|
foreach (var brokenEntity in brokenEntities)
|
|
{
|
|
RemoveHashedFile(brokenEntity.Hash, brokenEntity.PrefixedFilePath);
|
|
|
|
try
|
|
{
|
|
File.Delete(brokenEntity.ResolvedFilepath);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogWarning(ex, "Failed to delete invalid cache file {file}", brokenEntity.ResolvedFilepath);
|
|
}
|
|
}
|
|
|
|
_lightlessMediator.Publish(new ResumeScanMessage(nameof(ValidateLocalIntegrity)));
|
|
_logger.LogInformation("Validation complete. Found {count} invalid entries.", brokenEntities.Count);
|
|
|
|
return [.. brokenEntities];
|
|
}
|
|
|
|
public string GetCacheFilePath(string hash, string extension)
|
|
{
|
|
return Path.Combine(_configService.Current.CacheFolder, hash + "." + extension);
|
|
}
|
|
|
|
public async Task<(string, byte[])> GetCompressedFileData(string fileHash, CancellationToken uploadToken)
|
|
{
|
|
var fileCache = GetFileCacheByHash(fileHash)!.ResolvedFilepath;
|
|
return (fileHash, LZ4Wrapper.WrapHC(await File.ReadAllBytesAsync(fileCache, uploadToken).ConfigureAwait(false), 0,
|
|
(int)new FileInfo(fileCache).Length));
|
|
}
|
|
|
|
public FileCacheEntity? GetFileCacheByHash(string hash)
|
|
{
|
|
if (_fileCaches.TryGetValue(hash, out var entries))
|
|
{
|
|
var item = entries.Values
|
|
.OrderBy(p => p.PrefixedFilePath.Contains(PenumbraPrefix, StringComparison.Ordinal) ? 0 : 1)
|
|
.FirstOrDefault();
|
|
if (item != null)
|
|
{
|
|
return GetValidatedFileCache(item);
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
private FileCacheEntity? GetFileCacheByPath(string path)
|
|
{
|
|
var normalizedPrefixedPath = NormalizeToPrefixedPath(path);
|
|
if (string.IsNullOrEmpty(normalizedPrefixedPath))
|
|
{
|
|
return null;
|
|
}
|
|
|
|
if (_fileCachesByPrefixedPath.TryGetValue(normalizedPrefixedPath, out var entry))
|
|
{
|
|
return GetValidatedFileCache(entry);
|
|
}
|
|
|
|
_logger.LogDebug("Found no entries for {path}", normalizedPrefixedPath);
|
|
|
|
if (normalizedPrefixedPath.Contains(CachePrefix, StringComparison.Ordinal))
|
|
{
|
|
return CreateCacheEntry(path);
|
|
}
|
|
|
|
return CreateFileEntry(path) ?? CreateCacheEntry(path);
|
|
}
|
|
|
|
public Dictionary<string, FileCacheEntity?> GetFileCachesByPaths(string[] paths)
|
|
{
|
|
_getCachesByPathsSemaphore.Wait();
|
|
|
|
try
|
|
{
|
|
var result = new Dictionary<string, FileCacheEntity?>(StringComparer.OrdinalIgnoreCase);
|
|
var seenNormalized = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
|
|
|
foreach (var originalPath in paths)
|
|
{
|
|
if (string.IsNullOrEmpty(originalPath))
|
|
{
|
|
result[originalPath] = null;
|
|
continue;
|
|
}
|
|
|
|
var normalized = NormalizeToPrefixedPath(originalPath);
|
|
if (seenNormalized.Add(normalized))
|
|
{
|
|
if (!string.IsNullOrEmpty(normalized))
|
|
{
|
|
_logger.LogDebug("Normalized path {cleaned}", normalized);
|
|
}
|
|
}
|
|
else if (!string.IsNullOrEmpty(normalized))
|
|
{
|
|
_logger.LogWarning("Duplicate normalized path detected: {cleaned}", normalized);
|
|
}
|
|
|
|
if (_fileCachesByPrefixedPath.TryGetValue(normalized, out var entity))
|
|
{
|
|
result[originalPath] = GetValidatedFileCache(entity);
|
|
continue;
|
|
}
|
|
|
|
FileCacheEntity? created = null;
|
|
|
|
if (normalized.Contains(CachePrefix, StringComparison.Ordinal))
|
|
{
|
|
created = CreateCacheEntry(originalPath);
|
|
}
|
|
else if (normalized.Contains(PenumbraPrefix, StringComparison.Ordinal))
|
|
{
|
|
created = CreateFileEntry(originalPath);
|
|
}
|
|
else
|
|
{
|
|
created = CreateFileEntry(originalPath) ?? CreateCacheEntry(originalPath);
|
|
}
|
|
|
|
result[originalPath] = created;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
finally
|
|
{
|
|
_getCachesByPathsSemaphore.Release();
|
|
}
|
|
}
|
|
|
|
public void RemoveHashedFile(string hash, string prefixedFilePath)
|
|
{
|
|
var normalizedPath = NormalizePrefixedPathKey(prefixedFilePath);
|
|
|
|
if (_fileCaches.TryGetValue(hash, out var caches))
|
|
{
|
|
_logger.LogTrace("Removing from DB: {hash} => {path}", hash, prefixedFilePath);
|
|
|
|
if (caches.TryRemove(normalizedPath, out var removedEntity))
|
|
{
|
|
_logger.LogTrace("Removed from DB: {hash} => {path}", hash, removedEntity.PrefixedFilePath);
|
|
}
|
|
|
|
if (caches.IsEmpty)
|
|
{
|
|
_fileCaches.TryRemove(hash, out _);
|
|
}
|
|
}
|
|
|
|
_fileCachesByPrefixedPath.TryRemove(normalizedPath, out _);
|
|
}
|
|
|
|
public void UpdateHashedFile(FileCacheEntity fileCache, bool computeProperties = true)
|
|
{
|
|
_logger.LogTrace("Updating hash for {path}", fileCache.ResolvedFilepath);
|
|
var oldHash = fileCache.Hash;
|
|
var prefixedPath = fileCache.PrefixedFilePath;
|
|
if (computeProperties)
|
|
{
|
|
var fi = new FileInfo(fileCache.ResolvedFilepath);
|
|
fileCache.Size = fi.Length;
|
|
fileCache.CompressedSize = null;
|
|
fileCache.Hash = Crypto.GetFileHash(fileCache.ResolvedFilepath);
|
|
fileCache.LastModifiedDateTicks = fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
|
|
}
|
|
RemoveHashedFile(oldHash, prefixedPath);
|
|
AddHashedFile(fileCache);
|
|
}
|
|
|
|
public (FileState State, FileCacheEntity FileCache) ValidateFileCacheEntity(FileCacheEntity fileCache)
|
|
{
|
|
fileCache = ReplacePathPrefixes(fileCache);
|
|
FileInfo fi = new(fileCache.ResolvedFilepath);
|
|
if (!fi.Exists)
|
|
{
|
|
return (FileState.RequireDeletion, fileCache);
|
|
}
|
|
if (!string.Equals(fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
|
|
{
|
|
return (FileState.RequireUpdate, fileCache);
|
|
}
|
|
|
|
return (FileState.Valid, fileCache);
|
|
}
|
|
|
|
public void WriteOutFullCsv()
|
|
{
|
|
lock (_fileWriteLock)
|
|
{
|
|
StringBuilder sb = new();
|
|
sb.AppendLine(BuildVersionHeader());
|
|
foreach (var entry in _fileCaches.Values.SelectMany(k => k.Values).OrderBy(f => f.PrefixedFilePath, StringComparer.OrdinalIgnoreCase))
|
|
{
|
|
sb.AppendLine(entry.CsvEntry);
|
|
}
|
|
|
|
if (File.Exists(_csvPath))
|
|
{
|
|
File.Copy(_csvPath, CsvBakPath, overwrite: true);
|
|
}
|
|
|
|
try
|
|
{
|
|
File.WriteAllText(_csvPath, sb.ToString());
|
|
File.Delete(CsvBakPath);
|
|
}
|
|
catch
|
|
{
|
|
File.WriteAllText(CsvBakPath, sb.ToString());
|
|
}
|
|
}
|
|
}
|
|
|
|
private void EnsureCsvHeaderLocked()
|
|
{
|
|
if (!File.Exists(_csvPath))
|
|
{
|
|
return;
|
|
}
|
|
|
|
string[] existingLines = File.ReadAllLines(_csvPath);
|
|
if (existingLines.Length > 0 && TryParseVersionHeader(existingLines[0], out var existingVersion) && existingVersion == FileCacheVersion)
|
|
{
|
|
_csvHeaderEnsured = true;
|
|
return;
|
|
}
|
|
|
|
StringBuilder rebuilt = new();
|
|
rebuilt.AppendLine(BuildVersionHeader());
|
|
foreach (var line in existingLines)
|
|
{
|
|
if (TryParseVersionHeader(line, out _))
|
|
{
|
|
continue;
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(line))
|
|
{
|
|
rebuilt.AppendLine(line);
|
|
}
|
|
}
|
|
|
|
File.WriteAllText(_csvPath, rebuilt.ToString());
|
|
_csvHeaderEnsured = true;
|
|
}
|
|
|
|
private void EnsureCsvHeaderLockedCached()
|
|
{
|
|
if (_csvHeaderEnsured)
|
|
{
|
|
return;
|
|
}
|
|
|
|
EnsureCsvHeaderLocked();
|
|
_csvHeaderEnsured = true;
|
|
}
|
|
|
|
private void BackupUnsupportedCache(string suffix)
|
|
{
|
|
var sanitizedSuffix = string.IsNullOrWhiteSpace(suffix) ? "unsupported" : $"{suffix}.unsupported";
|
|
var backupPath = _csvPath + "." + sanitizedSuffix;
|
|
|
|
try
|
|
{
|
|
File.Move(_csvPath, backupPath, overwrite: true);
|
|
_logger.LogWarning("Backed up unsupported file cache to {path}", backupPath);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogWarning(ex, "Failed to back up unsupported file cache to {path}", backupPath);
|
|
}
|
|
}
|
|
|
|
internal FileCacheEntity MigrateFileHashToExtension(FileCacheEntity fileCache, string ext)
|
|
{
|
|
try
|
|
{
|
|
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
|
|
var extensionPath = fileCache.ResolvedFilepath.ToUpper(CultureInfo.InvariantCulture) + "." + ext;
|
|
File.Move(fileCache.ResolvedFilepath, extensionPath, overwrite: true);
|
|
var newHashedEntity = new FileCacheEntity(fileCache.Hash, fileCache.PrefixedFilePath + "." + ext, DateTime.UtcNow.Ticks.ToString(CultureInfo.InvariantCulture));
|
|
newHashedEntity.SetResolvedFilePath(extensionPath);
|
|
AddHashedFile(newHashedEntity);
|
|
_logger.LogTrace("Migrated from {oldPath} to {newPath}", fileCache.ResolvedFilepath, newHashedEntity.ResolvedFilepath);
|
|
return newHashedEntity;
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
AddHashedFile(fileCache);
|
|
_logger.LogWarning(ex, "Failed to migrate entity {entity}", fileCache.PrefixedFilePath);
|
|
return fileCache;
|
|
}
|
|
}
|
|
|
|
private void AddHashedFile(FileCacheEntity fileCache)
|
|
{
|
|
var normalizedPath = NormalizePrefixedPathKey(fileCache.PrefixedFilePath);
|
|
var entries = _fileCaches.GetOrAdd(fileCache.Hash, _ => new ConcurrentDictionary<string, FileCacheEntity>(StringComparer.OrdinalIgnoreCase));
|
|
|
|
entries[normalizedPath] = fileCache;
|
|
_fileCachesByPrefixedPath[normalizedPath] = fileCache;
|
|
}
|
|
|
|
private FileCacheEntity? CreateFileCacheEntity(FileInfo fileInfo, string prefixedPath, string? hash = null)
|
|
{
|
|
hash ??= Crypto.GetFileHash(fileInfo.FullName);
|
|
var entity = new FileCacheEntity(hash, prefixedPath, fileInfo.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileInfo.Length);
|
|
entity = ReplacePathPrefixes(entity);
|
|
AddHashedFile(entity);
|
|
lock (_fileWriteLock)
|
|
{
|
|
if (!File.Exists(_csvPath))
|
|
{
|
|
File.WriteAllLines(_csvPath, new[] { BuildVersionHeader(), entity.CsvEntry });
|
|
_csvHeaderEnsured = true;
|
|
}
|
|
else
|
|
{
|
|
EnsureCsvHeaderLockedCached();
|
|
File.AppendAllLines(_csvPath, new[] { entity.CsvEntry });
|
|
}
|
|
}
|
|
var result = GetFileCacheByPath(fileInfo.FullName);
|
|
_logger.LogTrace("Creating cache entity for {name} success: {success}", fileInfo.FullName, (result != null));
|
|
return result;
|
|
}
|
|
|
|
private FileCacheEntity? GetValidatedFileCache(FileCacheEntity fileCache)
|
|
{
|
|
var resultingFileCache = ReplacePathPrefixes(fileCache);
|
|
//_logger.LogTrace("Validating {path}", fileCache.PrefixedFilePath);
|
|
resultingFileCache = Validate(resultingFileCache);
|
|
return resultingFileCache;
|
|
}
|
|
|
|
private FileCacheEntity ReplacePathPrefixes(FileCacheEntity fileCache)
|
|
{
|
|
if (fileCache.PrefixedFilePath.StartsWith(PenumbraPrefix, StringComparison.OrdinalIgnoreCase))
|
|
{
|
|
fileCache.SetResolvedFilePath(fileCache.PrefixedFilePath.Replace(PenumbraPrefix, _ipcManager.Penumbra.ModDirectory, StringComparison.Ordinal));
|
|
}
|
|
else if (fileCache.PrefixedFilePath.StartsWith(CachePrefix, StringComparison.OrdinalIgnoreCase))
|
|
{
|
|
fileCache.SetResolvedFilePath(fileCache.PrefixedFilePath.Replace(CachePrefix, _configService.Current.CacheFolder, StringComparison.Ordinal));
|
|
}
|
|
|
|
return fileCache;
|
|
}
|
|
|
|
private FileCacheEntity? Validate(FileCacheEntity fileCache)
|
|
{
|
|
if (string.IsNullOrWhiteSpace(fileCache.ResolvedFilepath))
|
|
{
|
|
_logger.LogWarning("FileCacheEntity has empty ResolvedFilepath for hash {hash}, prefixed path {prefixed}", fileCache.Hash, fileCache.PrefixedFilePath);
|
|
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
|
|
return null;
|
|
}
|
|
var file = new FileInfo(fileCache.ResolvedFilepath);
|
|
if (!file.Exists)
|
|
{
|
|
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
|
|
return null;
|
|
}
|
|
|
|
if (!string.Equals(file.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
|
|
{
|
|
UpdateHashedFile(fileCache);
|
|
}
|
|
|
|
return fileCache;
|
|
}
|
|
|
|
public Task StartAsync(CancellationToken cancellationToken)
|
|
{
|
|
_logger.LogInformation("Starting FileCacheManager");
|
|
|
|
lock (_fileWriteLock)
|
|
{
|
|
try
|
|
{
|
|
_logger.LogInformation("Checking for {bakPath}", CsvBakPath);
|
|
|
|
if (File.Exists(CsvBakPath))
|
|
{
|
|
_logger.LogInformation("{bakPath} found, moving to {csvPath}", CsvBakPath, _csvPath);
|
|
|
|
File.Move(CsvBakPath, _csvPath, overwrite: true);
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogWarning(ex, "Failed to move BAK to ORG, deleting BAK");
|
|
try
|
|
{
|
|
if (File.Exists(CsvBakPath))
|
|
File.Delete(CsvBakPath);
|
|
}
|
|
catch (Exception ex1)
|
|
{
|
|
_logger.LogWarning(ex1, "Could not delete bak file");
|
|
}
|
|
}
|
|
}
|
|
|
|
if (File.Exists(_csvPath))
|
|
{
|
|
if (!_ipcManager.Penumbra.APIAvailable || string.IsNullOrEmpty(_ipcManager.Penumbra.ModDirectory))
|
|
{
|
|
_lightlessMediator.Publish(new NotificationMessage("Penumbra not connected",
|
|
"Could not load local file cache data. Penumbra is not connected or not properly set up. Please enable and/or configure Penumbra properly to use Lightless. After, reload Lightless in the Plugin installer.",
|
|
LightlessConfiguration.Models.NotificationType.Error));
|
|
}
|
|
|
|
_logger.LogInformation("{csvPath} found, parsing", _csvPath);
|
|
|
|
bool success = false;
|
|
string[] entries = [];
|
|
int attempts = 0;
|
|
while (!success && attempts < 10)
|
|
{
|
|
try
|
|
{
|
|
_logger.LogInformation("Attempting to read {csvPath}", _csvPath);
|
|
entries = File.ReadAllLines(_csvPath);
|
|
success = true;
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
attempts++;
|
|
_logger.LogWarning(ex, "Could not open {file}, trying again", _csvPath);
|
|
Task.Delay(100, cancellationToken);
|
|
}
|
|
}
|
|
|
|
if (!entries.Any())
|
|
{
|
|
_logger.LogWarning("Could not load entries from {path}, continuing with empty file cache", _csvPath);
|
|
}
|
|
|
|
bool rewriteRequired = false;
|
|
bool parseEntries = entries.Length > 0;
|
|
int startIndex = 0;
|
|
|
|
if (entries.Length > 0)
|
|
{
|
|
var headerLine = entries[0];
|
|
var hasHeader = !string.IsNullOrEmpty(headerLine) &&
|
|
headerLine.StartsWith(FileCacheVersionHeaderPrefix, StringComparison.OrdinalIgnoreCase);
|
|
|
|
if (hasHeader)
|
|
{
|
|
if (!TryParseVersionHeader(headerLine, out var parsedVersion))
|
|
{
|
|
_logger.LogWarning("Failed to parse file cache version header \"{header}\". Backing up existing cache.", headerLine);
|
|
BackupUnsupportedCache("invalid-version");
|
|
parseEntries = false;
|
|
rewriteRequired = true;
|
|
entries = Array.Empty<string>();
|
|
}
|
|
else if (parsedVersion != FileCacheVersion)
|
|
{
|
|
_logger.LogWarning("Unsupported file cache version {version} detected (expected {expected}). Backing up existing cache.", parsedVersion, FileCacheVersion);
|
|
BackupUnsupportedCache($"v{parsedVersion}");
|
|
parseEntries = false;
|
|
rewriteRequired = true;
|
|
entries = Array.Empty<string>();
|
|
}
|
|
else
|
|
{
|
|
startIndex = 1;
|
|
}
|
|
}
|
|
else if (entries.Length > 0)
|
|
{
|
|
_logger.LogInformation("File cache missing version header, scheduling rewrite.");
|
|
rewriteRequired = true;
|
|
}
|
|
}
|
|
|
|
var totalEntries = Math.Max(0, entries.Length - startIndex);
|
|
Dictionary<string, bool> processedFiles = new(StringComparer.OrdinalIgnoreCase);
|
|
|
|
if (parseEntries && totalEntries > 0)
|
|
{
|
|
_logger.LogInformation("Found {amount} files in {path}", totalEntries, _csvPath);
|
|
|
|
for (var index = startIndex; index < entries.Length; index++)
|
|
{
|
|
var entry = entries[index];
|
|
if (string.IsNullOrWhiteSpace(entry))
|
|
{
|
|
continue;
|
|
}
|
|
|
|
var splittedEntry = entry.Split(CsvSplit, StringSplitOptions.None);
|
|
try
|
|
{
|
|
var hash = splittedEntry[0];
|
|
if (hash.Length != 40)
|
|
throw new InvalidOperationException("Expected Hash length of 40, received " + hash.Length);
|
|
var path = splittedEntry[1];
|
|
var time = splittedEntry[2];
|
|
|
|
if (processedFiles.ContainsKey(path))
|
|
{
|
|
_logger.LogWarning("Already processed {file}, ignoring", path);
|
|
continue;
|
|
}
|
|
|
|
processedFiles.Add(path, value: true);
|
|
|
|
long size = -1;
|
|
long compressed = -1;
|
|
if (splittedEntry.Length > 3)
|
|
{
|
|
if (long.TryParse(splittedEntry[3], CultureInfo.InvariantCulture, out long result))
|
|
{
|
|
size = result;
|
|
}
|
|
if (splittedEntry.Length > 4 &&
|
|
long.TryParse(splittedEntry[4], CultureInfo.InvariantCulture, out long resultCompressed))
|
|
{
|
|
compressed = resultCompressed;
|
|
}
|
|
}
|
|
AddHashedFile(ReplacePathPrefixes(new FileCacheEntity(hash, path, time, size, compressed)));
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogWarning(ex, "Failed to initialize entry {entry}, ignoring", entry);
|
|
}
|
|
}
|
|
|
|
if (processedFiles.Count != totalEntries)
|
|
{
|
|
rewriteRequired = true;
|
|
}
|
|
}
|
|
else if (!parseEntries && entries.Length > 0)
|
|
{
|
|
_logger.LogInformation("Skipping existing file cache entries due to incompatible version.");
|
|
}
|
|
|
|
if (rewriteRequired)
|
|
{
|
|
WriteOutFullCsv();
|
|
}
|
|
}
|
|
|
|
_logger.LogInformation("Started FileCacheManager");
|
|
|
|
return Task.CompletedTask;
|
|
}
|
|
|
|
public Task StopAsync(CancellationToken cancellationToken)
|
|
{
|
|
WriteOutFullCsv();
|
|
return Task.CompletedTask;
|
|
}
|
|
} |