2.0.0 (#92)
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m27s

2.0.0 Changes:

- Reworked shell finder UI with compact or list view with profile tags showing with the listing, allowing moderators to broadcast the syncshell as well to have it be used more.
- Reworked user list in syncshell admin screen to have filter visible and moved away from table to its own thing, allowing to copy uid/note/alias when clicking on the name.
- Reworked download bars and download box to make it look more modern, removed the jitter around, so it shouldn't vibrate around much.
- Chat has been added to the top menu, working in Zone or in Syncshells to be used there.
- Paired system has been revamped to make pausing and unpausing faster, and loading people should be faster as well.
- Moved to the internal object table to have faster load times for users; people should load in faster
- Compactor is running on a multi-threaded level instead of single-threaded; this should increase the speed of compacting files
- Nameplate Service has been reworked so it wouldn't use the nameplate handler anymore.
- Files can be resized when downloading to reduce load on users if they aren't compressed. (can be toggled to resize all).
- Penumbra Collections are now only made when people are visible, reducing the load on boot-up when having many syncshells in your list.
- Lightfinder plates have been moved away from using Nameplates, but will use an overlay.
- Main UI has been changed a bit with a gradient, and on hover will glow up now.
- Reworked Profile UI for Syncshell and Users to be more user-facing with more customizable items.
- Reworked Settings UI to look more modern.
- Performance should be better due to new systems that would dispose of the collections and better caching of items.

Co-authored-by: defnotken <itsdefnotken@gmail.com>
Co-authored-by: azyges <aaaaaa@aaa.aaa>
Co-authored-by: choco <choco@patat.nl>
Co-authored-by: cake <admin@cakeandbanana.nl>
Co-authored-by: Minmoose <KennethBohr@outlook.com>
Reviewed-on: #92
This commit was merged in pull request #92.
This commit is contained in:
2025-12-21 17:19:34 +00:00
parent 906f401940
commit 835a0a637d
191 changed files with 32636 additions and 8841 deletions

View File

@@ -6,6 +6,7 @@ using LightlessSync.Utils;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.IO;
namespace LightlessSync.FileCache;
@@ -21,6 +22,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
private CancellationTokenSource _scanCancellationTokenSource = new();
private readonly CancellationTokenSource _periodicCalculationTokenSource = new();
public static readonly IImmutableList<string> AllowedFileExtensions = [".mdl", ".tex", ".mtrl", ".tmb", ".pap", ".avfx", ".atex", ".sklb", ".eid", ".phyb", ".pbd", ".scd", ".skp", ".shpk", ".kdb"];
private static readonly HashSet<string> AllowedFileExtensionSet = new(AllowedFileExtensions, StringComparer.OrdinalIgnoreCase);
public CacheMonitor(ILogger<CacheMonitor> logger, IpcManager ipcManager, LightlessConfigService configService,
FileCacheManager fileDbManager, LightlessMediator mediator, PerformanceCollectorService performanceCollector, DalamudUtilService dalamudUtil,
@@ -72,7 +74,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
{
while (_dalamudUtil.IsOnFrameworkThread && !token.IsCancellationRequested)
{
await Task.Delay(1).ConfigureAwait(false);
await Task.Delay(1, token).ConfigureAwait(false);
}
RecalculateFileCacheSize(token);
@@ -101,8 +103,8 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
}
record WatcherChange(WatcherChangeTypes ChangeType, string? OldPath = null);
private readonly Dictionary<string, WatcherChange> _watcherChanges = new Dictionary<string, WatcherChange>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, WatcherChange> _lightlessChanges = new Dictionary<string, WatcherChange>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, WatcherChange> _watcherChanges = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, WatcherChange> _lightlessChanges = new(StringComparer.OrdinalIgnoreCase);
public void StopMonitoring()
{
@@ -128,7 +130,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
}
var fsType = FileSystemHelper.GetFilesystemType(_configService.Current.CacheFolder, _dalamudUtil.IsWine);
if (fsType == FileSystemHelper.FilesystemType.NTFS)
if (fsType == FileSystemHelper.FilesystemType.NTFS && !_dalamudUtil.IsWine)
{
StorageisNTFS = true;
Logger.LogInformation("Lightless Storage is on NTFS drive: {isNtfs}", StorageisNTFS);
@@ -163,7 +165,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
{
Logger.LogTrace("Lightless FSW: FileChanged: {change} => {path}", e.ChangeType, e.FullPath);
if (!AllowedFileExtensions.Any(ext => e.FullPath.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) return;
if (!HasAllowedExtension(e.FullPath)) return;
lock (_watcherChanges)
{
@@ -207,7 +209,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
private void Fs_Changed(object sender, FileSystemEventArgs e)
{
if (Directory.Exists(e.FullPath)) return;
if (!AllowedFileExtensions.Any(ext => e.FullPath.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) return;
if (!HasAllowedExtension(e.FullPath)) return;
if (e.ChangeType is not (WatcherChangeTypes.Changed or WatcherChangeTypes.Deleted or WatcherChangeTypes.Created))
return;
@@ -231,7 +233,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
{
foreach (var file in directoryFiles)
{
if (!AllowedFileExtensions.Any(ext => file.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) continue;
if (!HasAllowedExtension(file)) continue;
var oldPath = file.Replace(e.FullPath, e.OldFullPath, StringComparison.OrdinalIgnoreCase);
_watcherChanges.Remove(oldPath);
@@ -243,7 +245,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
}
else
{
if (!AllowedFileExtensions.Any(ext => e.FullPath.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) return;
if (!HasAllowedExtension(e.FullPath)) return;
lock (_watcherChanges)
{
@@ -259,9 +261,21 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
private CancellationTokenSource _penumbraFswCts = new();
private CancellationTokenSource _lightlessFswCts = new();
public FileSystemWatcher? PenumbraWatcher { get; private set; }
public FileSystemWatcher? LightlessWatcher { get; private set; }
private static bool HasAllowedExtension(string path)
{
if (string.IsNullOrEmpty(path))
{
return false;
}
var extension = Path.GetExtension(path);
return !string.IsNullOrEmpty(extension) && AllowedFileExtensionSet.Contains(extension);
}
private async Task LightlessWatcherExecution()
{
_lightlessFswCts = _lightlessFswCts.CancelRecreate();
@@ -469,6 +483,52 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
FileCacheSize = totalSize;
if (Directory.Exists(_configService.Current.CacheFolder + "/downscaled"))
{
var filesDownscaled = Directory.EnumerateFiles(_configService.Current.CacheFolder + "/downscaled").Select(f => new FileInfo(f)).OrderBy(f => f.LastAccessTime).ToList();
long totalSizeDownscaled = 0;
foreach (var f in filesDownscaled)
{
token.ThrowIfCancellationRequested();
try
{
long size = 0;
if (!isWine)
{
try
{
size = _fileCompactor.GetFileSizeOnDisk(f);
}
catch (Exception ex)
{
Logger.LogTrace(ex, "GetFileSizeOnDisk failed for {file}, using fallback length", f.FullName);
size = f.Length;
}
}
else
{
size = f.Length;
}
totalSizeDownscaled += size;
}
catch (Exception ex)
{
Logger.LogTrace(ex, "Error getting size for {file}", f.FullName);
}
}
FileCacheSize = (totalSize + totalSizeDownscaled);
}
else
{
FileCacheSize = totalSize;
}
var maxCacheInBytes = (long)(_configService.Current.MaxLocalCacheInGiB * 1024d * 1024d * 1024d);
if (FileCacheSize < maxCacheInBytes)
return;
@@ -510,12 +570,19 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
_scanCancellationTokenSource?.Cancel();
// Disposing of file system watchers
PenumbraWatcher?.Dispose();
LightlessWatcher?.Dispose();
// Disposing of cancellation token sources
_scanCancellationTokenSource?.CancelDispose();
_scanCancellationTokenSource?.Dispose();
_penumbraFswCts?.CancelDispose();
_penumbraFswCts?.Dispose();
_lightlessFswCts?.CancelDispose();
_lightlessFswCts?.Dispose();
_periodicCalculationTokenSource?.CancelDispose();
_periodicCalculationTokenSource?.Dispose();
}
private void FullFileScan(CancellationToken ct)
@@ -552,7 +619,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
[
.. Directory.GetFiles(folder, "*.*", SearchOption.AllDirectories)
.AsParallel()
.Where(f => AllowedFileExtensions.Any(e => f.EndsWith(e, StringComparison.OrdinalIgnoreCase))
.Where(f => HasAllowedExtension(f)
&& !f.Contains(@"\bg\", StringComparison.OrdinalIgnoreCase)
&& !f.Contains(@"\bgcommon\", StringComparison.OrdinalIgnoreCase)
&& !f.Contains(@"\ui\", StringComparison.OrdinalIgnoreCase)),
@@ -593,7 +660,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase
List<FileCacheEntity> entitiesToRemove = [];
List<FileCacheEntity> entitiesToUpdate = [];
object sync = new();
Lock sync = new();
Thread[] workerThreads = new Thread[threadCount];
ConcurrentQueue<FileCacheEntity> fileCaches = new(_fileDbManager.GetAllFileCaches());

View File

@@ -18,6 +18,7 @@ public sealed class FileCacheManager : IHostedService
public const string PenumbraPrefix = "{penumbra}";
private const int FileCacheVersion = 1;
private const string FileCacheVersionHeaderPrefix = "#lightless-file-cache-version:";
private readonly SemaphoreSlim _fileWriteSemaphore = new(1, 1);
private readonly LightlessConfigService _configService;
private readonly LightlessMediator _lightlessMediator;
private readonly string _csvPath;
@@ -41,11 +42,8 @@ public sealed class FileCacheManager : IHostedService
private string CsvBakPath => _csvPath + ".bak";
private static string NormalizeSeparators(string path)
{
return path.Replace("/", "\\", StringComparison.Ordinal)
private static string NormalizeSeparators(string path) => path.Replace("/", "\\", StringComparison.Ordinal)
.Replace("\\\\", "\\", StringComparison.Ordinal);
}
private static string NormalizePrefixedPathKey(string prefixedPath)
{
@@ -134,13 +132,9 @@ public sealed class FileCacheManager : IHostedService
chosenLength = penumbraMatch;
}
if (TryBuildPrefixedPath(normalized, _configService.Current.CacheFolder, CachePrefix, out var cachePrefixed, out var cacheMatch))
if (TryBuildPrefixedPath(normalized, _configService.Current.CacheFolder, CachePrefix, out var cachePrefixed, out var cacheMatch) && cacheMatch > chosenLength)
{
if (cacheMatch > chosenLength)
{
chosenPrefixed = cachePrefixed;
chosenLength = cacheMatch;
}
chosenPrefixed = cachePrefixed;
}
return NormalizePrefixedPathKey(chosenPrefixed ?? normalized);
@@ -176,27 +170,53 @@ public sealed class FileCacheManager : IHostedService
return CreateFileCacheEntity(fi, prefixedPath);
}
public List<FileCacheEntity> GetAllFileCaches() => _fileCaches.Values.SelectMany(v => v.Values.Where(e => e != null)).ToList();
public List<FileCacheEntity> GetAllFileCaches() => [.. _fileCaches.Values.SelectMany(v => v.Values.Where(e => e != null))];
public List<FileCacheEntity> GetAllFileCachesByHash(string hash, bool ignoreCacheEntries = false, bool validate = true)
{
List<FileCacheEntity> output = [];
if (_fileCaches.TryGetValue(hash, out var fileCacheEntities))
var output = new List<FileCacheEntity>();
if (!_fileCaches.TryGetValue(hash, out var fileCacheEntities))
return output;
foreach (var fileCache in fileCacheEntities.Values
.Where(c => !ignoreCacheEntries || !c.IsCacheEntry))
{
foreach (var fileCache in fileCacheEntities.Values.Where(c => !ignoreCacheEntries || !c.IsCacheEntry).ToList())
if (!validate)
{
if (!validate)
{
output.Add(fileCache);
}
else
{
var validated = GetValidatedFileCache(fileCache);
if (validated != null)
{
output.Add(validated);
}
}
output.Add(fileCache);
continue;
}
var validated = GetValidatedFileCache(fileCache);
if (validated != null)
output.Add(validated);
}
return output;
}
public async Task<List<FileCacheEntity>> GetAllFileCachesByHashAsync(string hash, bool ignoreCacheEntries = false, bool validate = true,CancellationToken token = default)
{
var output = new List<FileCacheEntity>();
if (!_fileCaches.TryGetValue(hash, out var fileCacheEntities))
return output;
foreach (var fileCache in fileCacheEntities.Values.Where(c => !ignoreCacheEntries || !c.IsCacheEntry))
{
token.ThrowIfCancellationRequested();
if (!validate)
{
output.Add(fileCache);
}
else
{
var validated = await GetValidatedFileCacheAsync(fileCache, token).ConfigureAwait(false);
if (validated != null)
output.Add(validated);
}
}
@@ -238,10 +258,11 @@ public sealed class FileCacheManager : IHostedService
return;
}
var algo = Crypto.DetectAlgo(fileCache.Hash);
string computedHash;
try
{
computedHash = await Crypto.GetFileHashAsync(fileCache.ResolvedFilepath, token).ConfigureAwait(false);
computedHash = await Crypto.ComputeFileHashAsync(fileCache.ResolvedFilepath, Crypto.HashAlgo.Sha1, token).ConfigureAwait(false);
}
catch (Exception ex)
{
@@ -253,8 +274,8 @@ public sealed class FileCacheManager : IHostedService
if (!string.Equals(computedHash, fileCache.Hash, StringComparison.Ordinal))
{
_logger.LogInformation(
"Hash mismatch: {file} (got {computedHash}, expected {expected})",
fileCache.ResolvedFilepath, computedHash, fileCache.Hash);
"Hash mismatch: {file} (got {computedHash}, expected {expected} : hash {hash})",
fileCache.ResolvedFilepath, computedHash, fileCache.Hash, algo);
brokenEntities.Add(fileCache);
}
@@ -434,7 +455,7 @@ public sealed class FileCacheManager : IHostedService
var fi = new FileInfo(fileCache.ResolvedFilepath);
fileCache.Size = fi.Length;
fileCache.CompressedSize = null;
fileCache.Hash = Crypto.GetFileHash(fileCache.ResolvedFilepath);
fileCache.Hash = Crypto.ComputeFileHash(fileCache.ResolvedFilepath, Crypto.HashAlgo.Sha1);
fileCache.LastModifiedDateTicks = fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
}
RemoveHashedFile(oldHash, prefixedPath);
@@ -485,6 +506,44 @@ public sealed class FileCacheManager : IHostedService
}
}
public async Task WriteOutFullCsvAsync(CancellationToken cancellationToken = default)
{
await _fileWriteSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var sb = new StringBuilder();
sb.AppendLine(BuildVersionHeader());
foreach (var entry in _fileCaches.Values
.SelectMany(k => k.Values)
.OrderBy(f => f.PrefixedFilePath, StringComparer.OrdinalIgnoreCase))
{
sb.AppendLine(entry.CsvEntry);
}
if (File.Exists(_csvPath))
{
File.Copy(_csvPath, CsvBakPath, overwrite: true);
}
try
{
await File.WriteAllTextAsync(_csvPath, sb.ToString(), cancellationToken).ConfigureAwait(false);
File.Delete(CsvBakPath);
}
catch
{
await File.WriteAllTextAsync(CsvBakPath, sb.ToString(), cancellationToken).ConfigureAwait(false);
}
}
finally
{
_fileWriteSemaphore.Release();
}
}
private void EnsureCsvHeaderLocked()
{
if (!File.Exists(_csvPath))
@@ -577,7 +636,7 @@ public sealed class FileCacheManager : IHostedService
private FileCacheEntity? CreateFileCacheEntity(FileInfo fileInfo, string prefixedPath, string? hash = null)
{
hash ??= Crypto.GetFileHash(fileInfo.FullName);
hash ??= Crypto.ComputeFileHash(fileInfo.FullName, Crypto.HashAlgo.Sha1);
var entity = new FileCacheEntity(hash, prefixedPath, fileInfo.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileInfo.Length);
entity = ReplacePathPrefixes(entity);
AddHashedFile(entity);
@@ -585,13 +644,13 @@ public sealed class FileCacheManager : IHostedService
{
if (!File.Exists(_csvPath))
{
File.WriteAllLines(_csvPath, new[] { BuildVersionHeader(), entity.CsvEntry });
File.WriteAllLines(_csvPath, [BuildVersionHeader(), entity.CsvEntry]);
_csvHeaderEnsured = true;
}
else
{
EnsureCsvHeaderLockedCached();
File.AppendAllLines(_csvPath, new[] { entity.CsvEntry });
File.AppendAllLines(_csvPath, [entity.CsvEntry]);
}
}
var result = GetFileCacheByPath(fileInfo.FullName);
@@ -602,11 +661,17 @@ public sealed class FileCacheManager : IHostedService
private FileCacheEntity? GetValidatedFileCache(FileCacheEntity fileCache)
{
var resultingFileCache = ReplacePathPrefixes(fileCache);
//_logger.LogTrace("Validating {path}", fileCache.PrefixedFilePath);
resultingFileCache = Validate(resultingFileCache);
return resultingFileCache;
}
private async Task<FileCacheEntity?> GetValidatedFileCacheAsync(FileCacheEntity fileCache, CancellationToken token = default)
{
var resultingFileCache = ReplacePathPrefixes(fileCache);
resultingFileCache = await ValidateAsync(resultingFileCache, token).ConfigureAwait(false);
return resultingFileCache;
}
private FileCacheEntity ReplacePathPrefixes(FileCacheEntity fileCache)
{
if (fileCache.PrefixedFilePath.StartsWith(PenumbraPrefix, StringComparison.OrdinalIgnoreCase))
@@ -629,6 +694,7 @@ public sealed class FileCacheManager : IHostedService
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
return null;
}
var file = new FileInfo(fileCache.ResolvedFilepath);
if (!file.Exists)
{
@@ -636,7 +702,8 @@ public sealed class FileCacheManager : IHostedService
return null;
}
if (!string.Equals(file.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
var lastWriteTicks = file.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
if (!string.Equals(lastWriteTicks, fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
{
UpdateHashedFile(fileCache);
}
@@ -644,7 +711,34 @@ public sealed class FileCacheManager : IHostedService
return fileCache;
}
public Task StartAsync(CancellationToken cancellationToken)
private async Task<FileCacheEntity?> ValidateAsync(FileCacheEntity fileCache, CancellationToken token)
{
if (string.IsNullOrWhiteSpace(fileCache.ResolvedFilepath))
{
_logger.LogWarning("FileCacheEntity has empty ResolvedFilepath for hash {hash}, prefixed path {prefixed}", fileCache.Hash, fileCache.PrefixedFilePath);
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
return null;
}
return await Task.Run(() =>
{
var file = new FileInfo(fileCache.ResolvedFilepath);
if (!file.Exists)
{
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
return null;
}
if (!string.Equals(file.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), fileCache.LastModifiedDateTicks, StringComparison.Ordinal))
{
UpdateHashedFile(fileCache);
}
return fileCache;
}, token).ConfigureAwait(false);
}
public async Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Starting FileCacheManager");
@@ -695,14 +789,14 @@ public sealed class FileCacheManager : IHostedService
try
{
_logger.LogInformation("Attempting to read {csvPath}", _csvPath);
entries = File.ReadAllLines(_csvPath);
entries = await File.ReadAllLinesAsync(_csvPath, cancellationToken).ConfigureAwait(false);
success = true;
}
catch (Exception ex)
{
attempts++;
_logger.LogWarning(ex, "Could not open {file}, trying again", _csvPath);
Task.Delay(100, cancellationToken);
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
}
}
@@ -729,7 +823,7 @@ public sealed class FileCacheManager : IHostedService
BackupUnsupportedCache("invalid-version");
parseEntries = false;
rewriteRequired = true;
entries = Array.Empty<string>();
entries = [];
}
else if (parsedVersion != FileCacheVersion)
{
@@ -737,7 +831,7 @@ public sealed class FileCacheManager : IHostedService
BackupUnsupportedCache($"v{parsedVersion}");
parseEntries = false;
rewriteRequired = true;
entries = Array.Empty<string>();
entries = [];
}
else
{
@@ -817,18 +911,18 @@ public sealed class FileCacheManager : IHostedService
if (rewriteRequired)
{
WriteOutFullCsv();
await WriteOutFullCsvAsync(cancellationToken).ConfigureAwait(false);
}
}
_logger.LogInformation("Started FileCacheManager");
return Task.CompletedTask;
_lightlessMediator.Publish(new FileCacheInitializedMessage());
await Task.CompletedTask.ConfigureAwait(false);
}
public Task StopAsync(CancellationToken cancellationToken)
public async Task StopAsync(CancellationToken cancellationToken)
{
WriteOutFullCsv();
return Task.CompletedTask;
await WriteOutFullCsvAsync(cancellationToken).ConfigureAwait(false);
await Task.CompletedTask.ConfigureAwait(false);
}
}

View File

@@ -4,6 +4,7 @@ using LightlessSync.Services.Compactor;
using Microsoft.Extensions.Logging;
using Microsoft.Win32.SafeHandles;
using System.Collections.Concurrent;
using System.ComponentModel;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading.Channels;
@@ -11,7 +12,7 @@ using static LightlessSync.Utils.FileSystemHelper;
namespace LightlessSync.FileCache;
public sealed class FileCompactor : IDisposable
public sealed partial class FileCompactor : IDisposable
{
public const uint FSCTL_DELETE_EXTERNAL_BACKING = 0x90314U;
public const ulong WOF_PROVIDER_FILE = 2UL;
@@ -29,23 +30,26 @@ public sealed class FileCompactor : IDisposable
private readonly SemaphoreSlim _globalGate;
//Limit btrfs gate on half of threads given to compactor.
private static readonly SemaphoreSlim _btrfsGate = new(4, 4);
private readonly SemaphoreSlim _btrfsGate;
private readonly BatchFilefragService _fragBatch;
private readonly WOF_FILE_COMPRESSION_INFO_V1 _efInfo = new()
private readonly bool _isWindows;
private readonly int _workerCount;
private readonly WofFileCompressionInfoV1 _efInfo = new()
{
Algorithm = (int)CompressionAlgorithm.XPRESS8K,
Flags = 0
};
[StructLayout(LayoutKind.Sequential, Pack = 1)]
private struct WOF_FILE_COMPRESSION_INFO_V1
private struct WofFileCompressionInfoV1
{
public int Algorithm;
public ulong Flags;
}
private enum CompressionAlgorithm
private enum CompressionAlgorithm
{
NO_COMPRESSION = -2,
LZNT1 = -1,
@@ -61,6 +65,7 @@ public sealed class FileCompactor : IDisposable
_logger = logger;
_lightlessConfigService = lightlessConfigService;
_dalamudUtilService = dalamudUtilService;
_isWindows = OperatingSystem.IsWindows();
_compactionQueue = Channel.CreateUnbounded<string>(new UnboundedChannelOptions
{
@@ -68,29 +73,36 @@ public sealed class FileCompactor : IDisposable
SingleWriter = false
});
//Amount of threads given for the compactor
int workers = Math.Clamp(Math.Min(Environment.ProcessorCount / 2, 4), 1, 8);
//Setup gates for the threads and setup worker count
_globalGate = new SemaphoreSlim(workers, workers);
int workerCount = Math.Max(workers * 2, workers);
_btrfsGate = new SemaphoreSlim(workers / 2, workers / 2);
_workerCount = Math.Max(workers * 2, workers);
for (int i = 0; i < workerCount; i++)
//Setup workers on the queue
for (int i = 0; i < _workerCount; i++)
{
int workerId = i;
_workers.Add(Task.Factory.StartNew(
() => ProcessQueueWorkerAsync(_compactionCts.Token),
() => ProcessQueueWorkerAsync(workerId, _compactionCts.Token),
_compactionCts.Token,
TaskCreationOptions.LongRunning,
TaskScheduler.Default).Unwrap());
}
//Uses an batching service for the filefrag command on Linux
_fragBatch = new BatchFilefragService(
useShell: _dalamudUtilService.IsWine,
log: _logger,
batchSize: 64,
flushMs: 25,
flushMs: 25,
runDirect: RunProcessDirect,
runShell: RunProcessShell
);
_logger.LogInformation("FileCompactor started with {workers} workers", workerCount);
_logger.LogInformation("FileCompactor started with {workers} workers", _workerCount);
}
public bool MassCompactRunning { get; private set; }
@@ -100,37 +112,91 @@ public sealed class FileCompactor : IDisposable
/// Compact the storage of the Cache Folder
/// </summary>
/// <param name="compress">Used to check if files needs to be compressed</param>
public void CompactStorage(bool compress)
public void CompactStorage(bool compress, int? maxDegree = null)
{
MassCompactRunning = true;
try
{
var allFiles = Directory.EnumerateFiles(_lightlessConfigService.Current.CacheFolder).ToList();
int total = allFiles.Count;
int current = 0;
foreach (var file in allFiles)
var folder = _lightlessConfigService.Current.CacheFolder;
if (string.IsNullOrWhiteSpace(folder) || !Directory.Exists(folder))
{
current++;
Progress = $"{current}/{total}";
if (_logger.IsEnabled(LogLevel.Warning))
_logger.LogWarning("Filecompacator couldnt find your Cache folder: {folder}", folder);
Progress = "0/0";
return;
}
var files = Directory.EnumerateFiles(folder).ToArray();
var total = files.Length;
Progress = $"0/{total}";
if (total == 0) return;
var degree = maxDegree ?? Math.Clamp(Environment.ProcessorCount / 2, 1, 8);
var done = 0;
int workerCounter = -1;
var po = new ParallelOptions
{
MaxDegreeOfParallelism = degree,
CancellationToken = _compactionCts.Token
};
Parallel.ForEach(files, po, localInit: () => Interlocked.Increment(ref workerCounter), body: (file, state, workerId) =>
{
_globalGate.WaitAsync(po.CancellationToken).GetAwaiter().GetResult();
if (!_pendingCompactions.TryAdd(file, 0))
return -1;
try
{
// Compress or decompress files
if (compress)
CompactFile(file);
else
DecompressFile(file);
try
{
if (compress)
{
if (_lightlessConfigService.Current.UseCompactor)
CompactFile(file, workerId);
}
else
{
DecompressFile(file, workerId);
}
}
catch (IOException ioEx)
{
_logger.LogDebug(ioEx, "[W{worker}] File being read/written, skipping file: {file}", workerId, file);
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[W{worker}] Error processing file: {file}", workerId, file);
}
finally
{
var n = Interlocked.Increment(ref done);
Progress = $"{n}/{total}";
}
}
catch (IOException ioEx)
finally
{
_logger.LogDebug(ioEx, "File {file} locked or busy, skipping", file);
_pendingCompactions.TryRemove(file, out _);
_globalGate.Release();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error compacting/decompressing file {file}", file);
}
}
return workerId;
},
localFinally: _ =>
{
//Ignore local finally for now
});
}
catch (OperationCanceledException ex)
{
_logger.LogDebug(ex, "Mass compaction call got cancelled, shutting off compactor.");
}
finally
{
@@ -139,6 +205,7 @@ public sealed class FileCompactor : IDisposable
}
}
/// <summary>
/// Write all bytes into a directory async
/// </summary>
@@ -197,24 +264,20 @@ public sealed class FileCompactor : IDisposable
{
try
{
bool isWindowsProc = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
var (_, linuxPath) = ResolvePathsForBtrfs(fileInfo.FullName);
var (ok, output, err, code) =
isWindowsProc
_isWindows
? RunProcessShell($"stat -c='%b' {QuoteSingle(linuxPath)}", workingDir: null, 10000)
: RunProcessDirect("stat", ["-c='%b'", linuxPath], workingDir: null, 10000);
if (ok && long.TryParse(output.Trim(), out long blocks))
return (false, blocks * 512L); // st_blocks are always 512B units
_logger.LogDebug("Btrfs size probe failed for {linux} (stat {code}, err {err}). Falling back to Length.", linuxPath, code, err);
return (false, fileInfo.Length);
return (flowControl: false, value: fileInfo.Length);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed Btrfs size probe for {file}, using Length", fileInfo.FullName);
return (false, fileInfo.Length);
if (_logger.IsEnabled(LogLevel.Debug))
_logger.LogDebug(ex, "Failed Btrfs size probe for {file}, using Length", fileInfo.FullName);
return (flowControl: true, value: fileInfo.Length);
}
}
@@ -228,34 +291,48 @@ public sealed class FileCompactor : IDisposable
try
{
var blockSize = GetBlockSizeForPath(fileInfo.FullName, _logger, _dalamudUtilService.IsWine);
var losize = GetCompressedFileSizeW(fileInfo.FullName, out uint hosize);
var size = (long)hosize << 32 | losize;
return (flowControl: false, value: ((size + blockSize - 1) / blockSize) * blockSize);
if (blockSize <= 0)
throw new InvalidOperationException($"Invalid block size {blockSize} for {fileInfo.FullName}");
uint lo = GetCompressedFileSizeW(fileInfo.FullName, out uint hi);
if (lo == 0xFFFFFFFF)
{
int err = Marshal.GetLastWin32Error();
if (err != 0)
throw new Win32Exception(err);
}
long size = ((long)hi << 32) | lo;
long rounded = ((size + blockSize - 1) / blockSize) * blockSize;
return (flowControl: false, value: rounded);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed stat size for {file}, fallback to Length", fileInfo.FullName);
return (flowControl: true, value: default);
}
return (flowControl: true, value: default);
}
/// <summary>
/// Compressing the given path with BTRFS or NTFS file system.
/// </summary>
/// <param name="path">Path of the decompressed/normal file</param>
private void CompactFile(string filePath)
/// <param name="filePath">Path of the decompressed/normal file</param>
/// <param name="workerId">Worker/Process Id</param>
private void CompactFile(string filePath, int workerId)
{
var fi = new FileInfo(filePath);
if (!fi.Exists)
{
_logger.LogTrace("Skip compaction: missing {file}", filePath);
if (_logger.IsEnabled(LogLevel.Trace))
_logger.LogTrace("[W{worker}] Skip compaction: missing {file}", workerId, filePath);
return;
}
var fsType = GetFilesystemType(filePath, _dalamudUtilService.IsWine);
var oldSize = fi.Length;
int blockSize = GetBlockSizeForPath(fi.FullName, _logger, _dalamudUtilService.IsWine);
int blockSize = (int)(GetFileSizeOnDisk(fi) / 512);
// We skipping small files (128KiB) as they slow down the system a lot for BTRFS. as BTRFS has a different blocksize it requires an different calculation.
long minSizeBytes = fsType == FilesystemType.Btrfs
@@ -264,7 +341,8 @@ public sealed class FileCompactor : IDisposable
if (oldSize < minSizeBytes)
{
_logger.LogTrace("Skip compaction: {file} ({size} B) < threshold ({th} B)", filePath, oldSize, minSizeBytes);
if (_logger.IsEnabled(LogLevel.Trace))
_logger.LogTrace("[W{worker}] Skip compaction: {file} ({size} B) < threshold ({th} B)", workerId, filePath, oldSize, minSizeBytes);
return;
}
@@ -272,20 +350,20 @@ public sealed class FileCompactor : IDisposable
{
if (!IsWOFCompactedFile(filePath))
{
_logger.LogDebug("NTFS compaction XPRESS8K: {file}", filePath);
if (WOFCompressFile(filePath))
{
var newSize = GetFileSizeOnDisk(fi);
_logger.LogDebug("NTFS compressed {file} {old} -> {new}", filePath, oldSize, newSize);
_logger.LogDebug("[W{worker}] NTFS compressed XPRESS8K {file} {old} -> {new}", workerId, filePath, oldSize, newSize);
}
else
{
_logger.LogWarning("NTFS compression failed or unavailable for {file}", filePath);
_logger.LogWarning("[W{worker}] NTFS compression failed or unavailable for {file}", workerId, filePath);
}
}
else
{
_logger.LogTrace("Already NTFS-compressed: {file}", filePath);
if (_logger.IsEnabled(LogLevel.Trace))
_logger.LogTrace("[W{worker}] Already NTFS-compressed with XPRESS8K: {file}", workerId, filePath);
}
return;
}
@@ -294,41 +372,43 @@ public sealed class FileCompactor : IDisposable
{
if (!IsBtrfsCompressedFile(filePath))
{
_logger.LogDebug("Btrfs compression zstd: {file}", filePath);
if (BtrfsCompressFile(filePath))
{
var newSize = GetFileSizeOnDisk(fi);
_logger.LogDebug("Btrfs compressed {file} {old} -> {new}", filePath, oldSize, newSize);
_logger.LogDebug("[W{worker}] Btrfs compressed clzo {file} {old} -> {new}", workerId, filePath, oldSize, newSize);
}
else
{
_logger.LogWarning("Btrfs compression failed or unavailable for {file}", filePath);
_logger.LogWarning("[W{worker}] Btrfs compression failed or unavailable for {file}", workerId, filePath);
}
}
else
{
_logger.LogTrace("Already Btrfs-compressed: {file}", filePath);
if (_logger.IsEnabled(LogLevel.Trace))
_logger.LogTrace("[W{worker}] Already Btrfs-compressed with clzo: {file}", workerId, filePath);
}
return;
}
_logger.LogTrace("Skip compact: unsupported FS for {file}", filePath);
if (_logger.IsEnabled(LogLevel.Trace))
_logger.LogTrace("[W{worker}] Skip compact: unsupported FS for {file}", workerId, filePath);
}
/// <summary>
/// Decompressing the given path with BTRFS file system or NTFS file system.
/// </summary>
/// <param name="path">Path of the compressed file</param>
private void DecompressFile(string path)
/// <param name="filePath">Path of the decompressed/normal file</param>
/// <param name="workerId">Worker/Process Id</param>
private void DecompressFile(string filePath, int workerId)
{
_logger.LogDebug("Decompress request: {file}", path);
var fsType = GetFilesystemType(path, _dalamudUtilService.IsWine);
_logger.LogDebug("[W{worker}] Decompress request: {file}", workerId, filePath);
var fsType = GetFilesystemType(filePath, _dalamudUtilService.IsWine);
if (fsType == FilesystemType.NTFS && !_dalamudUtilService.IsWine)
{
try
{
bool flowControl = DecompressWOFFile(path);
bool flowControl = DecompressWOFFile(filePath, workerId);
if (!flowControl)
{
return;
@@ -336,7 +416,7 @@ public sealed class FileCompactor : IDisposable
}
catch (Exception ex)
{
_logger.LogWarning(ex, "NTFS decompress error {file}", path);
_logger.LogWarning(ex, "[W{worker}] NTFS decompress error {file}", workerId, filePath);
}
}
@@ -344,7 +424,7 @@ public sealed class FileCompactor : IDisposable
{
try
{
bool flowControl = DecompressBtrfsFile(path);
bool flowControl = DecompressBtrfsFile(filePath);
if (!flowControl)
{
return;
@@ -352,7 +432,7 @@ public sealed class FileCompactor : IDisposable
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Btrfs decompress error {file}", path);
_logger.LogWarning(ex, "[W{worker}] Btrfs decompress error {file}", workerId, filePath);
}
}
}
@@ -372,51 +452,48 @@ public sealed class FileCompactor : IDisposable
string linuxPath = isWine ? ToLinuxPathIfWine(path, isWine) : path;
var opts = GetMountOptionsForPath(linuxPath);
bool hasCompress = opts.Contains("compress", StringComparison.OrdinalIgnoreCase);
bool hasCompressForce = opts.Contains("compress-force", StringComparison.OrdinalIgnoreCase);
if (!string.IsNullOrEmpty(opts))
_logger.LogTrace("Mount opts for {file}: {opts}", linuxPath, opts);
if (hasCompressForce)
var probe = RunProcessShell("command -v btrfs || which btrfs", timeoutMs: 5000);
var _btrfsAvailable = probe.ok && !string.IsNullOrWhiteSpace(probe.stdout);
if (!_btrfsAvailable)
_logger.LogWarning("btrfs cli not found in path. Compression will be skipped.");
var prop = isWine
? RunProcessShell($"btrfs property set -- {QuoteSingle(linuxPath)} compression none", timeoutMs: 15000)
: RunProcessDirect("btrfs", ["property", "set", "--", linuxPath, "compression", "none"], "/", 15000);
if (prop.ok) _logger.LogTrace("Set per-file 'compression none' on {file}", linuxPath);
else _logger.LogTrace("btrfs property set failed for {file} (exit {code}): {err}", linuxPath, prop.exitCode, prop.stderr);
var defrag = isWine
? RunProcessShell($"btrfs filesystem defragment -f -- {QuoteSingle(linuxPath)}", timeoutMs: 60000)
: RunProcessDirect("btrfs", ["filesystem", "defragment", "-f", "--", linuxPath], "/", 60000);
if (!defrag.ok)
{
_logger.LogWarning("Cannot safely decompress {file}: mount options contains compress-force ({opts}).", linuxPath, opts);
_logger.LogWarning("btrfs defragment (decompress) failed for {file} (exit {code}): {err}",
linuxPath, defrag.exitCode, defrag.stderr);
return false;
}
if (hasCompress)
{
var setCmd = $"btrfs property set -- {QuoteDouble(linuxPath)} compression none";
var (okSet, _, errSet, codeSet) = isWine
? RunProcessShell(setCmd)
: RunProcessDirect("btrfs", ["property", "set", "--", linuxPath, "compression", "none"]);
if (!okSet)
{
_logger.LogWarning("Failed to set 'compression none' on {file}, please check drive options (exit code is: {code}): {err}", linuxPath, codeSet, errSet);
return false;
}
_logger.LogTrace("Set per-file 'compression none' on {file}", linuxPath);
}
if (!IsBtrfsCompressedFile(linuxPath))
{
_logger.LogTrace("{file} is not compressed, skipping decompression completely", linuxPath);
return true;
}
var (ok, stdout, stderr, code) = isWine
? RunProcessShell($"btrfs filesystem defragment -- {QuoteDouble(linuxPath)}")
: RunProcessDirect("btrfs", ["filesystem", "defragment", "--", linuxPath]);
if (!ok)
{
_logger.LogWarning("btrfs defragment (decompress) failed for {file} (exit code is: {code}): {stderr}",
linuxPath, code, stderr);
return false;
}
if (!string.IsNullOrWhiteSpace(stdout))
_logger.LogTrace("btrfs defragment output for {file}: {out}", linuxPath, stdout.Trim());
if (!string.IsNullOrWhiteSpace(defrag.stdout))
_logger.LogTrace("btrfs defragment output for {file}: {out}", linuxPath, defrag.stdout.Trim());
_logger.LogInformation("Decompressed (rewritten uncompressed) Btrfs file: {file}", linuxPath);
try
{
if (_fragBatch != null)
{
var compressed = _fragBatch.IsCompressedAsync(linuxPath, _compactionCts.Token).GetAwaiter().GetResult();
if (compressed)
_logger.LogTrace("Post-check: {file} still shows 'compressed' flag (may be stale).", linuxPath);
}
}
catch { /* ignore verification noisy */ }
return true;
}
catch (Exception ex)
@@ -432,18 +509,18 @@ public sealed class FileCompactor : IDisposable
/// </summary>
/// <param name="path">Path of the compressed file</param>
/// <returns>Decompressing state</returns>
private bool DecompressWOFFile(string path)
private bool DecompressWOFFile(string path, int workerID)
{
//Check if its already been compressed
if (TryIsWofExternal(path, out bool isExternal, out int algo))
{
if (!isExternal)
{
_logger.LogTrace("Already decompressed file: {file}", path);
_logger.LogTrace("[W{worker}] Already decompressed file: {file}", workerID, path);
return true;
}
var compressString = ((CompressionAlgorithm)algo).ToString();
_logger.LogTrace("WOF compression (algo={algo}) detected for {file}", compressString, path);
_logger.LogTrace("[W{worker}] WOF compression (algo={algo}) detected for {file}", workerID, compressString, path);
}
//This will attempt to start WOF thread.
@@ -457,15 +534,15 @@ public sealed class FileCompactor : IDisposable
// 342 error code means its been decompressed after the control, we handle it as it succesfully been decompressed.
if (err == 342)
{
_logger.LogTrace("Successfully decompressed NTFS file {file}", path);
_logger.LogTrace("[W{worker}] Successfully decompressed NTFS file {file}", workerID, path);
return true;
}
_logger.LogWarning("DeviceIoControl failed for {file} with Win32 error {err}", path, err);
_logger.LogWarning("[W{worker}] DeviceIoControl failed for {file} with Win32 error {err}", workerID, path, err);
return false;
}
_logger.LogTrace("Successfully decompressed NTFS file {file}", path);
_logger.LogTrace("[W{worker}] Successfully decompressed NTFS file {file}", workerID, path);
return true;
});
}
@@ -478,6 +555,7 @@ public sealed class FileCompactor : IDisposable
/// <returns>Converted path to be used in Linux</returns>
private string ToLinuxPathIfWine(string path, bool isWine, bool preferShell = true)
{
//Return if not wine
if (!isWine || !IsProbablyWine())
return path;
@@ -539,7 +617,7 @@ public sealed class FileCompactor : IDisposable
/// <returns>Compessing state</returns>
private bool WOFCompressFile(string path)
{
int size = Marshal.SizeOf<WOF_FILE_COMPRESSION_INFO_V1>();
int size = Marshal.SizeOf<WofFileCompressionInfoV1>();
IntPtr efInfoPtr = Marshal.AllocHGlobal(size);
try
@@ -592,7 +670,7 @@ public sealed class FileCompactor : IDisposable
{
try
{
uint buf = (uint)Marshal.SizeOf<WOF_FILE_COMPRESSION_INFO_V1>();
uint buf = (uint)Marshal.SizeOf<WofFileCompressionInfoV1>();
int result = WofIsExternalFile(filePath, out int isExternal, out _, out var info, ref buf);
if (result != 0 || isExternal == 0)
return false;
@@ -621,7 +699,7 @@ public sealed class FileCompactor : IDisposable
algorithm = 0;
try
{
uint buf = (uint)Marshal.SizeOf<WOF_FILE_COMPRESSION_INFO_V1>();
uint buf = (uint)Marshal.SizeOf<WofFileCompressionInfoV1>();
int hr = WofIsExternalFile(path, out int ext, out _, out var info, ref buf);
if (hr == 0 && ext != 0)
{
@@ -630,13 +708,13 @@ public sealed class FileCompactor : IDisposable
}
return true;
}
catch (DllNotFoundException)
catch (DllNotFoundException)
{
return false;
return false;
}
catch (EntryPointNotFoundException)
{
return false;
catch (EntryPointNotFoundException)
{
return false;
}
}
@@ -651,8 +729,7 @@ public sealed class FileCompactor : IDisposable
{
try
{
bool windowsProc = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
string linuxPath = windowsProc ? ResolveLinuxPathForWine(path) : path;
string linuxPath = _isWindows ? ResolveLinuxPathForWine(path) : path;
var task = _fragBatch.IsCompressedAsync(linuxPath, _compactionCts.Token);
@@ -685,7 +762,6 @@ public sealed class FileCompactor : IDisposable
try
{
var (winPath, linuxPath) = ResolvePathsForBtrfs(path);
bool isWindowsProc = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
if (IsBtrfsCompressedFile(linuxPath))
{
@@ -699,8 +775,13 @@ public sealed class FileCompactor : IDisposable
return false;
}
var probe = RunProcessShell("command -v btrfs || which btrfs", timeoutMs: 5000);
var _btrfsAvailable = probe.ok && !string.IsNullOrWhiteSpace(probe.stdout);
if (!_btrfsAvailable)
_logger.LogWarning("btrfs cli not found in path. Compression will be skipped.");
(bool ok, string stdout, string stderr, int code) =
isWindowsProc
_isWindows
? RunProcessShell($"btrfs filesystem defragment -clzo -- {QuoteSingle(linuxPath)}")
: RunProcessDirect("btrfs", ["filesystem", "defragment", "-clzo", "--", linuxPath]);
@@ -783,9 +864,10 @@ public sealed class FileCompactor : IDisposable
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
CreateNoWindow = true,
WorkingDirectory = workingDir ?? "/",
};
if (!string.IsNullOrEmpty(workingDir)) psi.WorkingDirectory = workingDir;
foreach (var a in args) psi.ArgumentList.Add(a);
EnsureUnixPathEnv(psi);
@@ -799,8 +881,18 @@ public sealed class FileCompactor : IDisposable
}
int code;
try { code = proc.ExitCode; } catch { code = -1; }
return (code == 0, so2, se2, code);
try { code = proc.ExitCode; }
catch { code = -1; }
bool ok = code == 0;
if (!ok && code == -1 &&
string.IsNullOrWhiteSpace(se2) && !string.IsNullOrWhiteSpace(so2))
{
ok = true;
}
return (ok, so2, se2, code);
}
/// <summary>
@@ -811,15 +903,14 @@ public sealed class FileCompactor : IDisposable
/// <returns>State of the process, output of the process and error with exit code</returns>
private (bool ok, string stdout, string stderr, int exitCode) RunProcessShell(string command, string? workingDir = null, int timeoutMs = 60000)
{
var psi = new ProcessStartInfo("/bin/bash")
{
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
CreateNoWindow = true,
WorkingDirectory = workingDir ?? "/",
};
if (!string.IsNullOrEmpty(workingDir)) psi.WorkingDirectory = workingDir;
// Use a Login shell so PATH includes /usr/sbin etc. AKA -lc for login shell
psi.ArgumentList.Add("-lc");
@@ -836,65 +927,72 @@ public sealed class FileCompactor : IDisposable
}
int code;
try { code = proc.ExitCode; } catch { code = -1; }
return (code == 0, so2, se2, code);
try { code = proc.ExitCode; }
catch { code = -1; }
bool ok = code == 0;
if (!ok && code == -1 && string.IsNullOrWhiteSpace(se2) && !string.IsNullOrWhiteSpace(so2))
{
ok = true;
}
return (ok, so2, se2, code);
}
/// <summary>
/// Checking the process result for shell or direct processes
/// </summary>
/// <param name="proc">Process</param>
/// <param name="timeoutMs">How long when timeout is gotten</param>
/// <param name="timeoutMs">How long when timeout goes over threshold</param>
/// <param name="token">Cancellation Token</param>
/// <returns>Multiple variables</returns>
private (bool success, string testy, string testi) CheckProcessResult(Process proc, int timeoutMs, CancellationToken token)
private (bool success, string output, string errorCode) CheckProcessResult(Process proc, int timeoutMs, CancellationToken token)
{
var outTask = proc.StandardOutput.ReadToEndAsync(token);
var errTask = proc.StandardError.ReadToEndAsync(token);
var bothTasks = Task.WhenAll(outTask, errTask);
//On wine, we dont wanna use waitforexit as it will be always broken and giving an error.
if (_dalamudUtilService.IsWine)
{
var finished = Task.WhenAny(bothTasks, Task.Delay(timeoutMs, token)).GetAwaiter().GetResult();
if (finished != bothTasks)
{
try
{
proc.Kill(entireProcessTree: true);
Task.WaitAll([outTask, errTask], 1000, token);
}
catch
{
// ignore this
}
var so = outTask.IsCompleted ? outTask.Result : "";
var se = errTask.IsCompleted ? errTask.Result : "timeout";
return (false, so, se);
}
var finished = Task.WhenAny(bothTasks, Task.Delay(timeoutMs, token)).GetAwaiter().GetResult();
var stderr = errTask.Result;
var ok = string.IsNullOrWhiteSpace(stderr);
return (ok, outTask.Result, stderr);
if (token.IsCancellationRequested)
return KillProcess(proc, outTask, errTask, token);
if (finished != bothTasks)
return KillProcess(proc, outTask, errTask, token);
bool isWine = _dalamudUtilService?.IsWine ?? false;
if (!isWine)
{
try { proc.WaitForExit(); } catch { /* ignore quirks */ }
}
else
{
var sw = Stopwatch.StartNew();
while (!proc.HasExited && sw.ElapsedMilliseconds < 75)
Thread.Sleep(5);
}
// On linux, we can use it as we please
if (!proc.WaitForExit(timeoutMs))
{
try
{
proc.Kill(entireProcessTree: true);
Task.WaitAll([outTask, errTask], 1000, token);
}
catch
{
// ignore this
}
return (false, outTask.IsCompleted ? outTask.Result : "", "timeout");
}
var stdout = outTask.Status == TaskStatus.RanToCompletion ? outTask.Result : "";
var stderr = errTask.Status == TaskStatus.RanToCompletion ? errTask.Result : "";
Task.WaitAll(outTask, errTask);
return (true, outTask.Result, errTask.Result);
int code = -1;
try { if (proc.HasExited) code = proc.ExitCode; } catch { /* Wine may still throw */ }
bool ok = code == 0 || (isWine && string.IsNullOrWhiteSpace(stderr));
return (ok, stdout, stderr);
static (bool success, string output, string errorCode) KillProcess(
Process proc, Task<string> outTask, Task<string> errTask, CancellationToken token)
{
try { proc.Kill(entireProcessTree: true); } catch { /* ignore */ }
try { Task.WaitAll([outTask, errTask], 1000, token); } catch { /* ignore */ }
var so = outTask.IsCompleted ? outTask.Result : "";
var se = errTask.IsCompleted ? errTask.Result : "canceled/timeout";
return (false, so, se);
}
}
/// <summary>
@@ -954,10 +1052,10 @@ public sealed class FileCompactor : IDisposable
}
/// <summary>
/// Process the queue with, meant for a worker/thread
/// Process the queue, meant for a worker/thread
/// </summary>
/// <param name="token">Cancellation token for the worker whenever it needs to be stopped</param>
private async Task ProcessQueueWorkerAsync(CancellationToken token)
private async Task ProcessQueueWorkerAsync(int workerId, CancellationToken token)
{
try
{
@@ -973,7 +1071,7 @@ public sealed class FileCompactor : IDisposable
try
{
if (_lightlessConfigService.Current.UseCompactor && File.Exists(filePath))
CompactFile(filePath);
CompactFile(filePath, workerId);
}
finally
{
@@ -992,8 +1090,8 @@ public sealed class FileCompactor : IDisposable
}
}
}
catch (OperationCanceledException)
{
catch (OperationCanceledException)
{
// Shutting down worker, this exception is expected
}
}
@@ -1005,7 +1103,7 @@ public sealed class FileCompactor : IDisposable
/// <returns>Linux path to be used in Linux</returns>
private string ResolveLinuxPathForWine(string windowsPath)
{
var (ok, outp, _, _) = RunProcessShell($"winepath -u {QuoteSingle(windowsPath)}", null, 5000);
var (ok, outp, _, _) = RunProcessShell($"winepath -u {QuoteSingle(windowsPath)}", workingDir: null, 5000);
if (ok && !string.IsNullOrWhiteSpace(outp)) return outp.Trim();
return ToLinuxPathIfWine(windowsPath, isWine: true);
}
@@ -1029,9 +1127,7 @@ public sealed class FileCompactor : IDisposable
/// <returns></returns>
private (string windowsPath, string linuxPath) ResolvePathsForBtrfs(string path)
{
bool isWindowsProc = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
if (!isWindowsProc)
if (!_isWindows)
return (path, path);
var (ok, outp, _, _) = RunProcessShell($"winepath -u {QuoteSingle(path)}", workingDir: null, 5000);
@@ -1050,7 +1146,7 @@ public sealed class FileCompactor : IDisposable
{
try
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
if (_isWindows)
{
using var _ = new FileStream(winePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
}
@@ -1060,7 +1156,11 @@ public sealed class FileCompactor : IDisposable
}
return true;
}
catch { return false; }
catch (Exception ex)
{
_logger.LogTrace(ex, "Probe open failed for {file} (linux={linux})", winePath, linuxPath);
return false;
}
}
/// <summary>
@@ -1085,17 +1185,18 @@ public sealed class FileCompactor : IDisposable
}
[DllImport("kernel32.dll", SetLastError = true)]
private static extern bool DeviceIoControl(SafeFileHandle hDevice, uint dwIoControlCode, IntPtr lpInBuffer, uint nInBufferSize, IntPtr lpOutBuffer, uint nOutBufferSize, out uint lpBytesReturned, IntPtr lpOverlapped);
[LibraryImport("kernel32.dll", SetLastError = true)]
private static partial uint GetCompressedFileSizeW([MarshalAs(UnmanagedType.LPWStr)] string lpFileName, out uint lpFileSizeHigh);
[DllImport("kernel32.dll")]
private static extern uint GetCompressedFileSizeW([In, MarshalAs(UnmanagedType.LPWStr)] string lpFileName, [Out, MarshalAs(UnmanagedType.U4)] out uint lpFileSizeHigh);
[LibraryImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static partial bool DeviceIoControl(SafeFileHandle hDevice, uint dwIoControlCode, IntPtr lpInBuffer, uint nInBufferSize, IntPtr lpOutBuffer, uint nOutBufferSize, out uint lpBytesReturned, IntPtr lpOverlapped);
[DllImport("WofUtil.dll")]
private static extern int WofIsExternalFile([MarshalAs(UnmanagedType.LPWStr)] string Filepath, out int IsExternalFile, out uint Provider, out WOF_FILE_COMPRESSION_INFO_V1 Info, ref uint BufferLength);
[LibraryImport("WofUtil.dll")]
private static partial int WofIsExternalFile([MarshalAs(UnmanagedType.LPWStr)] string Filepath, out int IsExternalFile, out uint Provider, out WofFileCompressionInfoV1 Info, ref uint BufferLength);
[DllImport("WofUtil.dll", SetLastError = true)]
private static extern int WofSetFileDataLocation(SafeFileHandle FileHandle, ulong Provider, IntPtr ExternalFileInfo, ulong Length);
[LibraryImport("WofUtil.dll")]
private static partial int WofSetFileDataLocation(SafeFileHandle FileHandle, ulong Provider, IntPtr ExternalFileInfo, ulong Length);
private static string QuoteSingle(string s) => "'" + s.Replace("'", "'\\''", StringComparison.Ordinal) + "'";
@@ -1103,7 +1204,11 @@ public sealed class FileCompactor : IDisposable
public void Dispose()
{
//Cleanup of gates and frag service
_fragBatch?.Dispose();
_btrfsGate?.Dispose();
_globalGate?.Dispose();
_compactionQueue.Writer.TryComplete();
_compactionCts.Cancel();
@@ -1111,8 +1216,8 @@ public sealed class FileCompactor : IDisposable
{
Task.WaitAll([.. _workers.Where(t => t != null)], TimeSpan.FromSeconds(5));
}
catch
{
catch
{
// Ignore this catch on the dispose
}
finally

View File

@@ -5,4 +5,5 @@ public enum FileState
Valid,
RequireUpdate,
RequireDeletion,
RequireRehash
}

View File

@@ -3,11 +3,17 @@ using LightlessSync.LightlessConfiguration;
using LightlessSync.LightlessConfiguration.Configurations;
using LightlessSync.PlayerData.Data;
using LightlessSync.PlayerData.Handlers;
using LightlessSync.PlayerData.Factories;
using LightlessSync.Services;
using LightlessSync.Services.ActorTracking;
using LightlessSync.Services.Mediator;
using LightlessSync.Utils;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Linq;
using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind;
namespace LightlessSync.FileCache;
@@ -17,21 +23,29 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
private readonly HashSet<string> _cachedHandledPaths = new(StringComparer.Ordinal);
private readonly TransientConfigService _configurationService;
private readonly DalamudUtilService _dalamudUtil;
private readonly ActorObjectService _actorObjectService;
private readonly GameObjectHandlerFactory _gameObjectHandlerFactory;
private readonly object _ownedHandlerLock = new();
private readonly string[] _handledFileTypes = ["tmb", "pap", "avfx", "atex", "sklb", "eid", "phyb", "scd", "skp", "shpk", "kdb"];
private readonly string[] _handledRecordingFileTypes = ["tex", "mdl", "mtrl"];
private readonly HashSet<GameObjectHandler> _playerRelatedPointers = [];
private ConcurrentDictionary<IntPtr, ObjectKind> _cachedFrameAddresses = [];
private readonly Dictionary<nint, GameObjectHandler> _ownedHandlers = new();
private ConcurrentDictionary<nint, ObjectKind> _cachedFrameAddresses = new();
private ConcurrentDictionary<ObjectKind, HashSet<string>>? _semiTransientResources = null;
private uint _lastClassJobId = uint.MaxValue;
public bool IsTransientRecording { get; private set; } = false;
public TransientResourceManager(ILogger<TransientResourceManager> logger, TransientConfigService configurationService,
DalamudUtilService dalamudUtil, LightlessMediator mediator) : base(logger, mediator)
DalamudUtilService dalamudUtil, LightlessMediator mediator, ActorObjectService actorObjectService, GameObjectHandlerFactory gameObjectHandlerFactory) : base(logger, mediator)
{
_configurationService = configurationService;
_dalamudUtil = dalamudUtil;
_actorObjectService = actorObjectService;
_gameObjectHandlerFactory = gameObjectHandlerFactory;
Mediator.Subscribe<PenumbraResourceLoadMessage>(this, Manager_PenumbraResourceLoadEvent);
Mediator.Subscribe<ActorTrackedMessage>(this, msg => HandleActorTracked(msg.Descriptor));
Mediator.Subscribe<ActorUntrackedMessage>(this, msg => HandleActorUntracked(msg.Descriptor));
Mediator.Subscribe<PenumbraModSettingChangedMessage>(this, (_) => Manager_PenumbraModSettingChanged());
Mediator.Subscribe<PriorityFrameworkUpdateMessage>(this, (_) => DalamudUtil_FrameworkUpdate());
Mediator.Subscribe<GameObjectHandlerCreatedMessage>(this, (msg) =>
@@ -44,6 +58,11 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
if (!msg.OwnedObject) return;
_playerRelatedPointers.Remove(msg.GameObjectHandler);
});
foreach (var descriptor in _actorObjectService.PlayerDescriptors)
{
HandleActorTracked(descriptor);
}
}
private TransientConfig.TransientPlayerConfig PlayerConfig
@@ -123,12 +142,21 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
return;
}
var transientResources = resources.ToList();
Logger.LogDebug("Persisting {count} transient resources", transientResources.Count);
List<string> newlyAddedGamePaths = resources.Except(semiTransientResources, StringComparer.Ordinal).ToList();
foreach (var gamePath in transientResources)
List<string> transientResources;
lock (resources)
{
semiTransientResources.Add(gamePath);
transientResources = resources.ToList();
}
Logger.LogDebug("Persisting {count} transient resources", transientResources.Count);
List<string> newlyAddedGamePaths;
lock (semiTransientResources)
{
newlyAddedGamePaths = transientResources.Except(semiTransientResources, StringComparer.Ordinal).ToList();
foreach (var gamePath in transientResources)
{
semiTransientResources.Add(gamePath);
}
}
bool saveConfig = false;
@@ -161,7 +189,10 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
_configurationService.Save();
}
TransientResources[objectKind].Clear();
lock (resources)
{
resources.Clear();
}
}
public void RemoveTransientResource(ObjectKind objectKind, string path)
@@ -241,16 +272,46 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
TransientResources.Clear();
SemiTransientResources.Clear();
lock (_ownedHandlerLock)
{
foreach (var handler in _ownedHandlers.Values)
{
handler.Dispose();
}
_ownedHandlers.Clear();
}
}
private void DalamudUtil_FrameworkUpdate()
{
_cachedFrameAddresses = new(_playerRelatedPointers.Where(k => k.Address != nint.Zero).ToDictionary(c => c.Address, c => c.ObjectKind));
lock (_cacheAdditionLock)
{
_cachedHandledPaths.Clear();
}
var activeDescriptors = new Dictionary<nint, ObjectKind>();
foreach (var descriptor in _actorObjectService.PlayerDescriptors)
{
if (TryResolveObjectKind(descriptor, out var resolvedKind))
{
activeDescriptors[descriptor.Address] = resolvedKind;
}
}
foreach (var address in _cachedFrameAddresses.Keys.ToList())
{
if (!activeDescriptors.ContainsKey(address))
{
_cachedFrameAddresses.TryRemove(address, out _);
}
}
foreach (var descriptor in activeDescriptors)
{
_cachedFrameAddresses[descriptor.Key] = descriptor.Value;
}
if (_lastClassJobId != _dalamudUtil.ClassJobId)
{
_lastClassJobId = _dalamudUtil.ClassJobId;
@@ -259,16 +320,15 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
value?.Clear();
}
// reload config for current new classjob
PlayerConfig.JobSpecificCache.TryGetValue(_dalamudUtil.ClassJobId, out var jobSpecificData);
SemiTransientResources[ObjectKind.Player] = PlayerConfig.GlobalPersistentCache.Concat(jobSpecificData ?? []).ToHashSet(StringComparer.OrdinalIgnoreCase);
PlayerConfig.JobSpecificPetCache.TryGetValue(_dalamudUtil.ClassJobId, out var petSpecificData);
SemiTransientResources[ObjectKind.Pet] = [.. petSpecificData ?? []];
}
foreach (var kind in Enum.GetValues(typeof(ObjectKind)))
foreach (var kind in Enum.GetValues(typeof(ObjectKind)).Cast<ObjectKind>())
{
if (!_cachedFrameAddresses.Any(k => k.Value == (ObjectKind)kind) && TransientResources.Remove((ObjectKind)kind, out _))
if (!_cachedFrameAddresses.Any(k => k.Value == kind) && TransientResources.Remove(kind, out _))
{
Logger.LogDebug("Object not present anymore: {kind}", kind.ToString());
}
@@ -292,6 +352,119 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
_semiTransientResources = null;
}
private static bool TryResolveObjectKind(ActorObjectService.ActorDescriptor descriptor, out ObjectKind resolvedKind)
{
if (descriptor.OwnedKind is ObjectKind ownedKind)
{
resolvedKind = ownedKind;
return true;
}
if (descriptor.ObjectKind == DalamudObjectKind.Player)
{
resolvedKind = ObjectKind.Player;
return true;
}
resolvedKind = default;
return false;
}
private void HandleActorTracked(ActorObjectService.ActorDescriptor descriptor)
{
if (descriptor.IsInGpose)
return;
if (!TryResolveObjectKind(descriptor, out var resolvedKind))
return;
if (Logger.IsEnabled(LogLevel.Debug))
{
Logger.LogDebug("ActorObject tracked: {kind} addr={address:X} name={name}", resolvedKind, descriptor.Address, descriptor.Name);
}
_cachedFrameAddresses[descriptor.Address] = resolvedKind;
if (descriptor.OwnedKind is not ObjectKind ownedKind)
return;
lock (_ownedHandlerLock)
{
if (_ownedHandlers.ContainsKey(descriptor.Address))
return;
_ = CreateOwnedHandlerAsync(descriptor, ownedKind);
}
}
private void HandleActorUntracked(ActorObjectService.ActorDescriptor descriptor)
{
if (Logger.IsEnabled(LogLevel.Debug))
{
var kindLabel = descriptor.OwnedKind?.ToString()
?? (descriptor.ObjectKind == DalamudObjectKind.Player ? ObjectKind.Player.ToString() : "<none>");
Logger.LogDebug("ActorObject untracked: addr={address:X} name={name} kind={kind}", descriptor.Address, descriptor.Name, kindLabel);
}
_cachedFrameAddresses.TryRemove(descriptor.Address, out _);
if (descriptor.OwnedKind is not ObjectKind)
return;
lock (_ownedHandlerLock)
{
if (_ownedHandlers.Remove(descriptor.Address, out var handler))
{
handler.Dispose();
}
}
}
private async Task CreateOwnedHandlerAsync(ActorObjectService.ActorDescriptor descriptor, ObjectKind kind)
{
try
{
var handler = await _gameObjectHandlerFactory.Create(
kind,
() =>
{
if (!string.IsNullOrEmpty(descriptor.HashedContentId) &&
_actorObjectService.TryGetValidatedActorByHash(descriptor.HashedContentId, out var current) &&
current.OwnedKind == kind)
{
return current.Address;
}
return descriptor.Address;
},
true).ConfigureAwait(false);
if (handler.Address == IntPtr.Zero)
{
handler.Dispose();
return;
}
lock (_ownedHandlerLock)
{
if (!_cachedFrameAddresses.ContainsKey(descriptor.Address))
{
Logger.LogDebug("ActorObject handler discarded (stale): addr={address:X}", descriptor.Address);
handler.Dispose();
return;
}
_ownedHandlers[descriptor.Address] = handler;
}
Logger.LogDebug("ActorObject handler created: {kind} addr={address:X}", kind, descriptor.Address);
}
catch (Exception ex)
{
Logger.LogError(ex, "Failed to create owned handler for {kind} at {address:X}", kind, descriptor.Address);
}
}
private void Manager_PenumbraResourceLoadEvent(PenumbraResourceLoadMessage msg)
{
var gamePath = msg.GamePath.ToLowerInvariant();
@@ -383,21 +556,30 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase
private void SendTransients(nint gameObject, ObjectKind objectKind)
{
_sendTransientCts.Cancel();
_sendTransientCts = new();
var token = _sendTransientCts.Token;
_ = Task.Run(async () =>
{
_sendTransientCts?.Cancel();
_sendTransientCts?.Dispose();
_sendTransientCts = new();
var token = _sendTransientCts.Token;
await Task.Delay(TimeSpan.FromSeconds(5), token).ConfigureAwait(false);
foreach (var kvp in TransientResources)
try
{
await Task.Delay(TimeSpan.FromSeconds(5), token).ConfigureAwait(false);
if (TransientResources.TryGetValue(objectKind, out var values) && values.Any())
{
Logger.LogTrace("Sending Transients for {kind}", objectKind);
Mediator.Publish(new TransientResourceChangedMessage(gameObject));
}
}
catch (TaskCanceledException)
{
}
catch (System.OperationCanceledException)
{
}
});
}