Compare commits

..

2 Commits

16 changed files with 315 additions and 559 deletions

View File

@@ -92,7 +92,7 @@ public sealed class PenumbraTexture : PenumbraBase
{
token.ThrowIfCancellationRequested();
logger.LogInformation("Converting texture {Input} -> {Output} ({Target})", job.InputFile, job.OutputFile, job.TargetType);
logger.LogDebug("Converting texture {Input} -> {Output} ({Target})", job.InputFile, job.OutputFile, job.TargetType);
var convertTask = _convertTextureFile.Invoke(job.InputFile, job.OutputFile, job.TargetType, job.IncludeMipMaps);
await convertTask.ConfigureAwait(false);

View File

@@ -1,17 +1,11 @@
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Nodes;
using LightlessSync.LightlessConfiguration.Configurations;
using LightlessSync.LightlessConfiguration.Models;
using LightlessSync.WebAPI;
using LightlessSync.WebAPI;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace LightlessSync.LightlessConfiguration;
public class ConfigurationMigrator(ILogger<ConfigurationMigrator> logger, TransientConfigService transientConfigService,
ServerConfigService serverConfigService, TempCollectionConfigService tempCollectionConfigService,
LightlessConfigService lightlessConfigService) : IHostedService
ServerConfigService serverConfigService) : IHostedService
{
private readonly ILogger<ConfigurationMigrator> _logger = logger;
@@ -57,8 +51,6 @@ public class ConfigurationMigrator(ILogger<ConfigurationMigrator> logger, Transi
serverConfigService.Current.Version = 2;
serverConfigService.Save();
}
MigrateTempCollectionConfig(tempCollectionConfigService, lightlessConfigService);
}
public Task StartAsync(CancellationToken cancellationToken)
@@ -71,273 +63,4 @@ public class ConfigurationMigrator(ILogger<ConfigurationMigrator> logger, Transi
{
return Task.CompletedTask;
}
private void MigrateTempCollectionConfig(TempCollectionConfigService tempCollectionConfigService, LightlessConfigService lightlessConfigService)
{
var now = DateTime.UtcNow;
TempCollectionConfig tempConfig = tempCollectionConfigService.Current;
var tempChanged = false;
var tempNeedsSave = false;
if (TryReadTempCollectionData(lightlessConfigService.ConfigurationPath, out var root, out var ids, out var entries))
{
tempChanged |= MergeTempCollectionData(tempConfig, ids, entries, now);
var removed = root.Remove("OrphanableTempCollections");
removed |= root.Remove("OrphanableTempCollectionEntries");
if (removed)
{
try
{
string updatedJson = root.ToJsonString(new JsonSerializerOptions
{
WriteIndented = true
});
File.WriteAllText(lightlessConfigService.ConfigurationPath, updatedJson);
lightlessConfigService.UpdateLastWriteTime();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to rewrite {config} after temp collection migration", lightlessConfigService.ConfigurationPath);
return;
}
}
if (ids.Count > 0 || entries.Count > 0)
{
_logger.LogInformation("Migrated {ids} temp collection ids and {entries} entries to {configName}",
ids.Count, entries.Count, tempCollectionConfigService.ConfigurationName);
}
}
if (TryReadTempCollectionData(tempCollectionConfigService.ConfigurationPath, out var tempRoot, out var tempIds, out var tempEntries))
{
tempChanged |= MergeTempCollectionData(tempConfig, tempIds, tempEntries, now);
if (tempRoot.Remove("OrphanableTempCollections"))
{
tempNeedsSave = true;
}
}
if (tempChanged || tempNeedsSave)
{
tempCollectionConfigService.Save();
}
}
private bool TryReadTempCollectionData(string configPath, out JsonObject root, out HashSet<Guid> ids, out List<OrphanableTempCollectionEntry> entries)
{
root = new JsonObject();
ids = [];
entries = [];
if (!File.Exists(configPath))
{
return false;
}
try
{
root = JsonNode.Parse(File.ReadAllText(configPath)) as JsonObject ?? new JsonObject();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to read temp collection data from {config}", configPath);
return false;
}
root.TryGetPropertyValue("OrphanableTempCollections", out JsonNode? idsNode);
root.TryGetPropertyValue("OrphanableTempCollectionEntries", out JsonNode? entriesNode);
if (idsNode == null && entriesNode == null)
{
return false;
}
ids = ParseGuidSet(idsNode);
entries = ParseEntries(entriesNode);
return true;
}
private static HashSet<Guid> ParseGuidSet(JsonNode? node)
{
HashSet<Guid> ids = [];
if (node is not JsonArray array)
{
return ids;
}
foreach (JsonNode? item in array)
{
Guid id = ParseGuid(item);
if (id != Guid.Empty)
{
ids.Add(id);
}
}
return ids;
}
private static List<OrphanableTempCollectionEntry> ParseEntries(JsonNode? node)
{
List<OrphanableTempCollectionEntry> entries = [];
if (node is not JsonArray array)
{
return entries;
}
foreach (JsonNode? item in array)
{
if (item is not JsonObject obj)
{
continue;
}
Guid id = ParseGuid(obj["Id"]);
if (id == Guid.Empty)
{
continue;
}
DateTime registeredAtUtc = DateTime.MinValue;
if (TryParseDateTime(obj["RegisteredAtUtc"], out DateTime parsed))
{
registeredAtUtc = parsed;
}
entries.Add(new OrphanableTempCollectionEntry
{
Id = id,
RegisteredAtUtc = registeredAtUtc
});
}
return entries;
}
private static Guid ParseGuid(JsonNode? node)
{
if (node is JsonValue value)
{
if (value.TryGetValue<string>(out string? stringValue) && Guid.TryParse(stringValue, out Guid parsed))
{
return parsed;
}
}
return Guid.Empty;
}
private static bool TryParseDateTime(JsonNode? node, out DateTime value)
{
value = DateTime.MinValue;
if (node is not JsonValue val)
{
return false;
}
if (val.TryGetValue<DateTime>(out DateTime direct))
{
value = direct;
return true;
}
if (val.TryGetValue<string>(out string? stringValue)
&& DateTime.TryParse(stringValue, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out DateTime parsed))
{
value = parsed;
return true;
}
return false;
}
private static bool MergeTempCollectionData(TempCollectionConfig config, HashSet<Guid> ids, List<OrphanableTempCollectionEntry> entries, DateTime now)
{
bool changed = false;
Dictionary<Guid, OrphanableTempCollectionEntry> entryLookup = new();
for (var i = config.OrphanableTempCollectionEntries.Count - 1; i >= 0; i--)
{
var entry = config.OrphanableTempCollectionEntries[i];
if (entry.Id == Guid.Empty)
{
config.OrphanableTempCollectionEntries.RemoveAt(i);
changed = true;
continue;
}
if (entryLookup.TryGetValue(entry.Id, out var existing))
{
if (entry.RegisteredAtUtc != DateTime.MinValue
&& (existing.RegisteredAtUtc == DateTime.MinValue || entry.RegisteredAtUtc < existing.RegisteredAtUtc))
{
existing.RegisteredAtUtc = entry.RegisteredAtUtc;
changed = true;
}
config.OrphanableTempCollectionEntries.RemoveAt(i);
changed = true;
continue;
}
entryLookup[entry.Id] = entry;
}
foreach (OrphanableTempCollectionEntry entry in entries)
{
if (entry.Id == Guid.Empty)
{
continue;
}
if (!entryLookup.TryGetValue(entry.Id, out OrphanableTempCollectionEntry? existing))
{
var added = new OrphanableTempCollectionEntry
{
Id = entry.Id,
RegisteredAtUtc = entry.RegisteredAtUtc
};
config.OrphanableTempCollectionEntries.Add(added);
entryLookup[entry.Id] = added;
changed = true;
continue;
}
if (entry.RegisteredAtUtc != DateTime.MinValue
&& (existing.RegisteredAtUtc == DateTime.MinValue || entry.RegisteredAtUtc < existing.RegisteredAtUtc))
{
existing.RegisteredAtUtc = entry.RegisteredAtUtc;
changed = true;
}
}
foreach (Guid id in ids)
{
if (id == Guid.Empty)
{
continue;
}
if (!entryLookup.TryGetValue(id, out OrphanableTempCollectionEntry? existing))
{
var added = new OrphanableTempCollectionEntry
{
Id = id,
RegisteredAtUtc = now
};
config.OrphanableTempCollectionEntries.Add(added);
entryLookup[id] = added;
changed = true;
continue;
}
if (existing.RegisteredAtUtc == DateTime.MinValue)
{
existing.RegisteredAtUtc = now;
changed = true;
}
}
return changed;
}
}

View File

@@ -72,41 +72,37 @@ public class ConfigurationSaveService : IHostedService
{
_logger.LogTrace("Saving {configName}", config.ConfigurationName);
var configDir = config.ConfigurationPath.Replace(config.ConfigurationName, string.Empty);
var isTempCollections = string.Equals(config.ConfigurationName, TempCollectionConfigService.ConfigName, StringComparison.OrdinalIgnoreCase);
if (!isTempCollections)
try
{
try
{
var configBackupFolder = Path.Join(configDir, BackupFolder);
if (!Directory.Exists(configBackupFolder))
Directory.CreateDirectory(configBackupFolder);
var configBackupFolder = Path.Join(configDir, BackupFolder);
if (!Directory.Exists(configBackupFolder))
Directory.CreateDirectory(configBackupFolder);
var configNameSplit = config.ConfigurationName.Split(".");
var existingConfigs = Directory.EnumerateFiles(
configBackupFolder,
configNameSplit[0] + "*")
.Select(c => new FileInfo(c))
.OrderByDescending(c => c.LastWriteTime).ToList();
if (existingConfigs.Skip(10).Any())
var configNameSplit = config.ConfigurationName.Split(".");
var existingConfigs = Directory.EnumerateFiles(
configBackupFolder,
configNameSplit[0] + "*")
.Select(c => new FileInfo(c))
.OrderByDescending(c => c.LastWriteTime).ToList();
if (existingConfigs.Skip(10).Any())
{
foreach (var oldBak in existingConfigs.Skip(10).ToList())
{
foreach (var oldBak in existingConfigs.Skip(10).ToList())
{
oldBak.Delete();
}
oldBak.Delete();
}
}
string backupPath = Path.Combine(configBackupFolder, configNameSplit[0] + "." + DateTime.Now.ToString("yyyyMMddHHmmss") + "." + configNameSplit[1]);
_logger.LogTrace("Backing up current config to {backupPath}", backupPath);
File.Copy(config.ConfigurationPath, backupPath, overwrite: true);
FileInfo fi = new(backupPath);
fi.LastWriteTimeUtc = DateTime.UtcNow;
}
catch (Exception ex)
{
// ignore if file cannot be backupped
_logger.LogWarning(ex, "Could not create backup for {config}", config.ConfigurationPath);
}
string backupPath = Path.Combine(configBackupFolder, configNameSplit[0] + "." + DateTime.Now.ToString("yyyyMMddHHmmss") + "." + configNameSplit[1]);
_logger.LogTrace("Backing up current config to {backupPath}", backupPath);
File.Copy(config.ConfigurationPath, backupPath, overwrite: true);
FileInfo fi = new(backupPath);
fi.LastWriteTimeUtc = DateTime.UtcNow;
}
catch (Exception ex)
{
// ignore if file cannot be backupped
_logger.LogWarning(ex, "Could not create backup for {config}", config.ConfigurationPath);
}
var temp = config.ConfigurationPath + ".tmp";
@@ -114,7 +110,7 @@ public class ConfigurationSaveService : IHostedService
{
await File.WriteAllTextAsync(temp, JsonSerializer.Serialize(config.Current, typeof(T), new JsonSerializerOptions()
{
WriteIndented = !isTempCollections
WriteIndented = true
})).ConfigureAwait(false);
File.Move(temp, config.ConfigurationPath, true);
config.UpdateLastWriteTime();

View File

@@ -160,6 +160,8 @@ public class LightlessConfig : ILightlessConfiguration
public string? SelectedFinderSyncshell { get; set; } = null;
public string LastSeenVersion { get; set; } = string.Empty;
public bool EnableParticleEffects { get; set; } = true;
public HashSet<Guid> OrphanableTempCollections { get; set; } = [];
public List<OrphanableTempCollectionEntry> OrphanableTempCollectionEntries { get; set; } = [];
public AnimationValidationMode AnimationValidationMode { get; set; } = AnimationValidationMode.Unsafe;
public bool AnimationAllowOneBasedShift { get; set; } = false;
public bool AnimationAllowNeighborIndexTolerance { get; set; } = false;

View File

@@ -1,10 +0,0 @@
using LightlessSync.LightlessConfiguration.Models;
namespace LightlessSync.LightlessConfiguration.Configurations;
[Serializable]
public sealed class TempCollectionConfig : ILightlessConfiguration
{
public int Version { get; set; } = 1;
public List<OrphanableTempCollectionEntry> OrphanableTempCollectionEntries { get; set; } = [];
}

View File

@@ -1,12 +0,0 @@
using LightlessSync.LightlessConfiguration.Configurations;
namespace LightlessSync.LightlessConfiguration;
public sealed class TempCollectionConfigService : ConfigurationServiceBase<TempCollectionConfig>
{
public const string ConfigName = "tempcollections.json";
public TempCollectionConfigService(string configDir) : base(configDir) { }
public override string ConfigurationName => ConfigName;
}

View File

@@ -125,6 +125,8 @@ public sealed class Plugin : IDalamudPlugin
services.AddSingleton<FileTransferOrchestrator>();
services.AddSingleton<LightlessPlugin>();
services.AddSingleton<LightlessProfileManager>();
services.AddSingleton<TextureProcessingQueue>();
services.AddSingleton<ModelProcessingQueue>();
services.AddSingleton<TextureCompressionService>();
services.AddSingleton<TextureDownscaleService>();
services.AddSingleton<ModelDecimationService>();
@@ -429,7 +431,6 @@ public sealed class Plugin : IDalamudPlugin
LightlessSync.UI.Style.MainStyle.Init(cfg, theme);
return cfg;
});
services.AddSingleton(sp => new TempCollectionConfigService(configDir));
services.AddSingleton(sp => new ServerConfigService(configDir));
services.AddSingleton(sp => new NotesConfigService(configDir));
services.AddSingleton(sp => new PairTagConfigService(configDir));
@@ -443,7 +444,6 @@ public sealed class Plugin : IDalamudPlugin
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<LightlessConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<UiThemeConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<ChatConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<TempCollectionConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<ServerConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<NotesConfigService>());
services.AddSingleton<IConfigService<ILightlessConfiguration>>(sp => sp.GetRequiredService<PairTagConfigService>());

View File

@@ -0,0 +1,93 @@
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
namespace LightlessSync.Services;
public sealed class AssetProcessingQueue : IDisposable
{
private readonly BlockingCollection<WorkItem> _queue = new();
private readonly Thread _worker;
private readonly ILogger _logger;
private bool _disposed;
public AssetProcessingQueue(ILogger logger, string name)
{
_logger = logger;
_worker = new Thread(Run)
{
IsBackground = true,
Name = string.IsNullOrWhiteSpace(name) ? "LightlessSync.AssetProcessing" : name
};
_worker.Start();
}
public Task Enqueue(Func<CancellationToken, Task> work, CancellationToken token = default)
{
if (work is null)
{
throw new ArgumentNullException(nameof(work));
}
var completion = new TaskCompletionSource<object?>(TaskCreationOptions.RunContinuationsAsynchronously);
if (token.IsCancellationRequested)
{
completion.TrySetCanceled(token);
return completion.Task;
}
if (_queue.IsAddingCompleted || _disposed)
{
completion.TrySetException(new ObjectDisposedException(nameof(AssetProcessingQueue)));
return completion.Task;
}
_queue.Add(new WorkItem(work, token, completion));
return completion.Task;
}
private void Run()
{
foreach (var item in _queue.GetConsumingEnumerable())
{
if (item.Token.IsCancellationRequested)
{
item.Completion.TrySetCanceled(item.Token);
continue;
}
try
{
item.Work(item.Token).GetAwaiter().GetResult();
item.Completion.TrySetResult(null);
}
catch (OperationCanceledException ex)
{
var token = ex.CancellationToken.IsCancellationRequested ? ex.CancellationToken : item.Token;
item.Completion.TrySetCanceled(token);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Asset processing job failed.");
item.Completion.TrySetException(ex);
}
}
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_queue.CompleteAdding();
_worker.Join(TimeSpan.FromSeconds(2));
_queue.Dispose();
}
private readonly record struct WorkItem(
Func<CancellationToken, Task> Work,
CancellationToken Token,
TaskCompletionSource<object?> Completion);
}

View File

@@ -1343,11 +1343,22 @@ internal static class MdlDecimator
return false;
}
return ModelDecimationFilters.IsBodyMaterial(mdl.Materials[mesh.MaterialIndex]);
return IsBodyMaterial(mdl.Materials[mesh.MaterialIndex]);
}
private static bool IsBodyMaterial(string materialPath)
=> ModelDecimationFilters.IsBodyMaterial(materialPath);
{
if (string.IsNullOrWhiteSpace(materialPath))
{
return false;
}
var normalized = materialPath.Replace('\\', '/').ToLowerInvariant();
var nameStart = normalized.LastIndexOf('/');
var fileName = nameStart >= 0 ? normalized[(nameStart + 1)..] : normalized;
return fileName.Contains("_bibo", StringComparison.Ordinal)
|| fileName.EndsWith("_a.mtrl", StringComparison.Ordinal);
}
private sealed class BodyCollisionData
{
@@ -2104,6 +2115,16 @@ internal static class MdlDecimator
}
}
if (boneWeights != null
&& blendWeightEncoding == BlendWeightEncoding.Default
&& format.BlendWeightsElement is { } blendWeightsElement
&& (MdlFile.VertexType)blendWeightsElement.Type == MdlFile.VertexType.UShort4
&& ShouldTreatWeightsAsByteNormalized(boneWeights))
{
RescaleUShortAsByteWeights(boneWeights);
blendWeightEncoding = BlendWeightEncoding.UShortAsByte;
}
decoded = new DecodedMeshData(positions, normals, tangents, tangents2, colors, boneWeights, uvChannels, positionWs, normalWs, blendWeightEncoding);
return true;
}
@@ -3413,6 +3434,44 @@ internal static class MdlDecimator
return ToUShortNormalized(normalized);
}
private static bool ShouldTreatWeightsAsByteNormalized(BoneWeight[] weights)
{
const float maxByteUnorm = byte.MaxValue / (float)ushort.MaxValue;
var maxWeight = 0f;
for (var i = 0; i < weights.Length; i++)
{
var weight = weights[i];
maxWeight = Math.Max(maxWeight, weight.weight0);
maxWeight = Math.Max(maxWeight, weight.weight1);
maxWeight = Math.Max(maxWeight, weight.weight2);
maxWeight = Math.Max(maxWeight, weight.weight3);
if (maxWeight > maxByteUnorm)
{
return false;
}
}
return maxWeight > 0f;
}
private static void RescaleUShortAsByteWeights(BoneWeight[] weights)
{
var scale = ushort.MaxValue / (float)byte.MaxValue;
for (var i = 0; i < weights.Length; i++)
{
var weight = weights[i];
weights[i] = new BoneWeight(
weight.index0,
weight.index1,
weight.index2,
weight.index3,
weight.weight0 * scale,
weight.weight1 * scale,
weight.weight2 * scale,
weight.weight3 * scale);
}
}
private static void NormalizeWeights(float[] weights)
{
var sum = weights.Sum();

View File

@@ -1,132 +0,0 @@
namespace LightlessSync.Services.ModelDecimation;
internal static class ModelDecimationFilters
{
// MODELS ONLY HERE, NOT MATERIALS
internal static readonly string[] HairPaths =
[
"/hair/",
"hir.mdl",
];
internal static readonly string[] ClothingPaths =
[
"chara/equipment/",
"/equipment/",
"met.mdl",
"top.mdl",
"glv.mdl",
"dwn.mdl",
"sho.mdl",
];
internal static readonly string[] AccessoryPaths =
[
"/accessory/",
"chara/accessory/",
"ear.mdl",
"nek.mdl",
"wrs.mdl",
"ril.mdl",
"rir.mdl",
];
internal static readonly string[] BodyPaths =
[
"/body/",
"chara/equipment/e0000/model/",
"chara/equipment/e9903/model/",
"chara/equipment/e9903/model/",
"chara/equipment/e0279/model/",
];
internal static readonly string[] FaceHeadPaths =
[
"/face/",
"/obj/face/",
"/head/",
"fac.mdl",
];
internal static readonly string[] TailOrEarPaths =
[
"/tail/",
"/obj/tail/",
"/zear/",
"/obj/zear/",
"til.mdl",
"zer.mdl",
];
// BODY MATERIALS ONLY, NOT MESHES
internal static readonly string[] BodyMaterials =
[
"b0001_bibo.mtrl",
"b0101_bibo.mtrl",
"b0001_a.mtrl",
"b0001_b.mtrl",
"b0101_a.mtrl",
"b0101_b.mtrl",
];
internal static string NormalizePath(string path)
=> path.Replace('\\', '/').ToLowerInvariant();
internal static bool IsHairPath(string normalizedPath)
=> ContainsAny(normalizedPath, HairPaths);
internal static bool IsClothingPath(string normalizedPath)
=> ContainsAny(normalizedPath, ClothingPaths);
internal static bool IsAccessoryPath(string normalizedPath)
=> ContainsAny(normalizedPath, AccessoryPaths);
internal static bool IsBodyPath(string normalizedPath)
=> ContainsAny(normalizedPath, BodyPaths);
internal static bool IsFaceHeadPath(string normalizedPath)
=> ContainsAny(normalizedPath, FaceHeadPaths);
internal static bool IsTailOrEarPath(string normalizedPath)
=> ContainsAny(normalizedPath, TailOrEarPaths);
internal static bool ContainsAny(string normalizedPath, IReadOnlyList<string> markers)
{
for (var i = 0; i < markers.Count; i++)
{
if (normalizedPath.Contains(markers[i], StringComparison.Ordinal))
{
return true;
}
}
return false;
}
internal static bool IsBodyMaterial(string materialPath)
{
if (string.IsNullOrWhiteSpace(materialPath))
{
return false;
}
var normalized = NormalizePath(materialPath);
var nameStart = normalized.LastIndexOf('/');
var fileName = nameStart >= 0 ? normalized[(nameStart + 1)..] : normalized;
foreach (var marker in BodyMaterials)
{
if (fileName.Contains(marker, StringComparison.Ordinal))
{
return true;
}
}
return false;
}
}

View File

@@ -1,6 +1,7 @@
using LightlessSync.FileCache;
using LightlessSync.LightlessConfiguration;
using LightlessSync.LightlessConfiguration.Configurations;
using LightlessSync.Services;
using LightlessSync.Utils;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
@@ -19,6 +20,7 @@ public sealed class ModelDecimationService
private readonly FileCacheManager _fileCacheManager;
private readonly PlayerPerformanceConfigService _performanceConfigService;
private readonly XivDataStorageService _xivDataStorageService;
private readonly ModelProcessingQueue _processingQueue;
private readonly SemaphoreSlim _decimationSemaphore = new(MaxConcurrentJobs);
private readonly TaskRegistry<string> _decimationDeduplicator = new();
@@ -30,13 +32,15 @@ public sealed class ModelDecimationService
LightlessConfigService configService,
FileCacheManager fileCacheManager,
PlayerPerformanceConfigService performanceConfigService,
XivDataStorageService xivDataStorageService)
XivDataStorageService xivDataStorageService,
ModelProcessingQueue processingQueue)
{
_logger = logger;
_configService = configService;
_fileCacheManager = fileCacheManager;
_performanceConfigService = performanceConfigService;
_xivDataStorageService = xivDataStorageService;
_processingQueue = processingQueue;
}
public void ScheduleDecimation(string hash, string filePath, string? gamePath = null)
@@ -53,9 +57,9 @@ public sealed class ModelDecimationService
_logger.LogDebug("Queued model decimation for {Hash}", hash);
_decimationDeduplicator.GetOrStart(hash, async () =>
_decimationDeduplicator.GetOrStart(hash, () => _processingQueue.Enqueue(async token =>
{
await _decimationSemaphore.WaitAsync().ConfigureAwait(false);
await _decimationSemaphore.WaitAsync(token).ConfigureAwait(false);
try
{
await DecimateInternalAsync(hash, filePath).ConfigureAwait(false);
@@ -69,7 +73,7 @@ public sealed class ModelDecimationService
{
_decimationSemaphore.Release();
}
});
}, CancellationToken.None));
}
public void ScheduleBatchDecimation(string hash, string filePath, ModelDecimationSettings settings)
@@ -89,9 +93,9 @@ public sealed class ModelDecimationService
_logger.LogInformation("Queued batch model decimation for {Hash}", hash);
_decimationDeduplicator.GetOrStart(hash, async () =>
_decimationDeduplicator.GetOrStart(hash, () => _processingQueue.Enqueue(async token =>
{
await _decimationSemaphore.WaitAsync().ConfigureAwait(false);
await _decimationSemaphore.WaitAsync(token).ConfigureAwait(false);
try
{
await DecimateInternalAsync(hash, filePath, settings, allowExisting: false, destinationOverride: filePath, registerDecimatedPath: false).ConfigureAwait(false);
@@ -105,7 +109,7 @@ public sealed class ModelDecimationService
{
_decimationSemaphore.Release();
}
});
}, CancellationToken.None));
}
public bool ShouldScheduleDecimation(string hash, string filePath, string? gamePath = null)
@@ -348,40 +352,46 @@ public sealed class ModelDecimationService
return true;
}
var normalized = ModelDecimationFilters.NormalizePath(gamePath);
if (ModelDecimationFilters.IsHairPath(normalized))
var normalized = NormalizeGamePath(gamePath);
if (normalized.Contains("/hair/", StringComparison.Ordinal))
{
return false;
}
if (ModelDecimationFilters.IsClothingPath(normalized))
if (normalized.Contains("/chara/equipment/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowClothing;
}
if (ModelDecimationFilters.IsAccessoryPath(normalized))
if (normalized.Contains("/chara/accessory/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowAccessories;
}
if (ModelDecimationFilters.IsBodyPath(normalized))
if (normalized.Contains("/chara/human/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowBody;
}
if (normalized.Contains("/body/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowBody;
}
if (ModelDecimationFilters.IsFaceHeadPath(normalized))
{
return _performanceConfigService.Current.ModelDecimationAllowFaceHead;
}
if (normalized.Contains("/face/", StringComparison.Ordinal) || normalized.Contains("/head/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowFaceHead;
}
if (ModelDecimationFilters.IsTailOrEarPath(normalized))
{
return _performanceConfigService.Current.ModelDecimationAllowTail;
if (normalized.Contains("/tail/", StringComparison.Ordinal))
{
return _performanceConfigService.Current.ModelDecimationAllowTail;
}
}
return true;
}
private static string NormalizeGamePath(string path)
=> path.Replace('\\', '/').ToLowerInvariant();
private bool TryGetDecimationSettings(out ModelDecimationSettings settings)
{
settings = new ModelDecimationSettings(

View File

@@ -0,0 +1,19 @@
using Microsoft.Extensions.Logging;
namespace LightlessSync.Services;
public sealed class ModelProcessingQueue : IDisposable
{
private readonly AssetProcessingQueue _queue;
public ModelProcessingQueue(ILogger<ModelProcessingQueue> logger)
{
_queue = new AssetProcessingQueue(logger, "LightlessSync.ModelProcessing");
}
public Task Enqueue(Func<CancellationToken, Task> work, CancellationToken token = default)
=> _queue.Enqueue(work, token);
public void Dispose()
=> _queue.Dispose();
}

View File

@@ -10,18 +10,15 @@ namespace LightlessSync.Services;
public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriberBase
{
private readonly IpcManager _ipc;
private readonly TempCollectionConfigService _config;
private readonly CancellationTokenSource _cleanupCts = new();
private readonly LightlessConfigService _config;
private int _ran;
private const int CleanupBatchSize = 50;
private static readonly TimeSpan CleanupBatchDelay = TimeSpan.FromMilliseconds(50);
private static readonly TimeSpan OrphanCleanupDelay = TimeSpan.FromDays(1);
public PenumbraTempCollectionJanitor(
ILogger<PenumbraTempCollectionJanitor> logger,
LightlessMediator mediator,
IpcManager ipc,
TempCollectionConfigService config) : base(logger, mediator)
LightlessConfigService config) : base(logger, mediator)
{
_ipc = ipc;
_config = config;
@@ -34,6 +31,10 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
if (id == Guid.Empty) return;
var changed = false;
var config = _config.Current;
if (config.OrphanableTempCollections.Add(id))
{
changed = true;
}
var now = DateTime.UtcNow;
var existing = config.OrphanableTempCollectionEntries.FirstOrDefault(entry => entry.Id == id);
@@ -62,7 +63,8 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
{
if (id == Guid.Empty) return;
var config = _config.Current;
var changed = RemoveEntry(config.OrphanableTempCollectionEntries, id) > 0;
var changed = config.OrphanableTempCollections.Remove(id);
changed |= RemoveEntry(config.OrphanableTempCollectionEntries, id) > 0;
if (changed)
{
_config.Save();
@@ -77,31 +79,14 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
if (!_ipc.Penumbra.APIAvailable)
return;
_ = Task.Run(async () =>
{
try
{
await CleanupOrphansOnBootAsync(_cleanupCts.Token).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
}
catch (Exception ex)
{
Logger.LogError(ex, "Error cleaning orphaned temp collections");
}
});
}
private async Task CleanupOrphansOnBootAsync(CancellationToken token)
{
var config = _config.Current;
var ids = config.OrphanableTempCollections;
var entries = config.OrphanableTempCollectionEntries;
if (entries.Count == 0)
if (ids.Count == 0 && entries.Count == 0)
return;
var now = DateTime.UtcNow;
var changed = EnsureEntryTimes(entries, now);
var changed = EnsureEntries(ids, entries, now);
var cutoff = now - OrphanCleanupDelay;
var expired = entries
.Where(entry => entry.Id != Guid.Empty && entry.RegisteredAtUtc != DateTime.MinValue && entry.RegisteredAtUtc <= cutoff)
@@ -120,47 +105,25 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
var appId = Guid.NewGuid();
Logger.LogInformation("Cleaning up {count} orphaned Lightless temp collections older than {delay}", expired.Count, OrphanCleanupDelay);
List<Guid> removedIds = [];
foreach (var id in expired)
{
if (token.IsCancellationRequested)
{
break;
}
try
{
await _ipc.Penumbra.RemoveTemporaryCollectionAsync(Logger, appId, id).ConfigureAwait(false);
_ipc.Penumbra.RemoveTemporaryCollectionAsync(Logger, appId, id)
.GetAwaiter().GetResult();
}
catch (Exception ex)
{
Logger.LogDebug(ex, "Failed removing orphaned temp collection {id}", id);
}
removedIds.Add(id);
if (removedIds.Count % CleanupBatchSize == 0)
{
try
{
await Task.Delay(CleanupBatchDelay, token).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
break;
}
}
}
if (removedIds.Count == 0)
foreach (var id in expired)
{
if (changed)
{
_config.Save();
}
return;
ids.Remove(id);
}
foreach (var id in removedIds)
foreach (var id in expired)
{
RemoveEntry(entries, id);
}
@@ -168,17 +131,6 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
_config.Save();
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
_cleanupCts.Cancel();
_cleanupCts.Dispose();
}
base.Dispose(disposing);
}
private static int RemoveEntry(List<OrphanableTempCollectionEntry> entries, Guid id)
{
var removed = 0;
@@ -196,9 +148,29 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber
return removed;
}
private static bool EnsureEntryTimes(List<OrphanableTempCollectionEntry> entries, DateTime now)
private static bool EnsureEntries(HashSet<Guid> ids, List<OrphanableTempCollectionEntry> entries, DateTime now)
{
var changed = false;
foreach (var id in ids)
{
if (id == Guid.Empty)
{
continue;
}
if (entries.Any(entry => entry.Id == id))
{
continue;
}
entries.Add(new OrphanableTempCollectionEntry
{
Id = id,
RegisteredAtUtc = now
});
changed = true;
}
foreach (var entry in entries)
{
if (entry.Id == Guid.Empty || entry.RegisteredAtUtc != DateTime.MinValue)

View File

@@ -8,6 +8,7 @@ using System.Threading;
using OtterTex;
using OtterImage = OtterTex.Image;
using LightlessSync.LightlessConfiguration;
using LightlessSync.Services;
using LightlessSync.Utils;
using LightlessSync.FileCache;
using Microsoft.Extensions.Logging;
@@ -33,6 +34,7 @@ public sealed class TextureDownscaleService
private readonly PlayerPerformanceConfigService _playerPerformanceConfigService;
private readonly FileCacheManager _fileCacheManager;
private readonly TextureCompressionService _textureCompressionService;
private readonly TextureProcessingQueue _processingQueue;
private readonly TaskRegistry<string> _downscaleDeduplicator = new();
private readonly ConcurrentDictionary<string, string> _downscaledPaths = new(StringComparer.OrdinalIgnoreCase);
@@ -73,13 +75,15 @@ public sealed class TextureDownscaleService
LightlessConfigService configService,
PlayerPerformanceConfigService playerPerformanceConfigService,
FileCacheManager fileCacheManager,
TextureCompressionService textureCompressionService)
TextureCompressionService textureCompressionService,
TextureProcessingQueue processingQueue)
{
_logger = logger;
_configService = configService;
_playerPerformanceConfigService = playerPerformanceConfigService;
_fileCacheManager = fileCacheManager;
_textureCompressionService = textureCompressionService;
_processingQueue = processingQueue;
}
public void ScheduleDownscale(string hash, string filePath, TextureMapKind mapKind)
@@ -90,7 +94,7 @@ public sealed class TextureDownscaleService
if (!filePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) return;
if (_downscaleDeduplicator.TryGetExisting(hash, out _)) return;
_downscaleDeduplicator.GetOrStart(hash, async () =>
_downscaleDeduplicator.GetOrStart(hash, () => _processingQueue.Enqueue(async token =>
{
TextureMapKind mapKind;
try
@@ -104,7 +108,7 @@ public sealed class TextureDownscaleService
}
await DownscaleInternalAsync(hash, filePath, mapKind).ConfigureAwait(false);
});
}, CancellationToken.None));
}
public bool ShouldScheduleDownscale(string filePath)
@@ -382,6 +386,12 @@ public sealed class TextureDownscaleService
{
var isCompressed = sourceFormat.IsCompressed();
var targetFormat = isCompressed ? sourceFormat : DXGIFormat.B8G8R8A8UNorm;
_logger.LogDebug(
"Downscale convert target {TargetFormat} (source {SourceFormat}, compressed {IsCompressed}, penumbraFallback {PenumbraFallback})",
targetFormat,
sourceFormat,
isCompressed,
attemptPenumbraFallback);
try
{
result = source.Convert(targetFormat);
@@ -433,6 +443,7 @@ public sealed class TextureDownscaleService
{
try
{
_logger.LogDebug("Downscale Penumbra re-encode target {Target} for {Hash}.", target, hash);
using var uncompressed = resizedScratch.Convert(DXGIFormat.B8G8R8A8UNorm);
TexFileHelper.Save(destination, uncompressed);
}

View File

@@ -0,0 +1,19 @@
using Microsoft.Extensions.Logging;
namespace LightlessSync.Services;
public sealed class TextureProcessingQueue : IDisposable
{
private readonly AssetProcessingQueue _queue;
public TextureProcessingQueue(ILogger<TextureProcessingQueue> logger)
{
_queue = new AssetProcessingQueue(logger, "LightlessSync.TextureProcessing");
}
public Task Enqueue(Func<CancellationToken, Task> work, CancellationToken token = default)
=> _queue.Enqueue(work, token);
public void Dispose()
=> _queue.Dispose();
}

View File

@@ -56,6 +56,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase
private readonly ModelDecimationService _modelDecimationService;
private readonly TextureCompressionService _textureCompressionService;
private readonly TextureMetadataHelper _textureMetadataHelper;
private readonly TextureProcessingQueue _processingQueue;
private readonly List<TextureRow> _textureRows = new();
private readonly Dictionary<string, TextureCompressionTarget> _textureSelections = new(StringComparer.OrdinalIgnoreCase);
@@ -137,7 +138,8 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase
LightlessConfigService configService,
PlayerPerformanceConfigService playerPerformanceConfig, TransientResourceManager transientResourceManager,
TransientConfigService transientConfigService, ModelDecimationService modelDecimationService,
TextureCompressionService textureCompressionService, TextureMetadataHelper textureMetadataHelper)
TextureCompressionService textureCompressionService, TextureMetadataHelper textureMetadataHelper,
TextureProcessingQueue processingQueue)
: base(logger, mediator, "Lightless Character Data Analysis", performanceCollectorService)
{
_characterAnalyzer = characterAnalyzer;
@@ -150,6 +152,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase
_modelDecimationService = modelDecimationService;
_textureCompressionService = textureCompressionService;
_textureMetadataHelper = textureMetadataHelper;
_processingQueue = processingQueue;
Mediator.Subscribe<CharacterDataAnalyzedMessage>(this, (_) =>
{
_hasUpdate = true;
@@ -3716,7 +3719,10 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase
_conversionCurrentFileProgress = 0;
_conversionFailed = false;
_conversionTask = RunTextureConversionAsync(requests, _conversionCancellationTokenSource.Token);
var conversionToken = _conversionCancellationTokenSource.Token;
_conversionTask = _processingQueue.Enqueue(
queueToken => RunTextureConversionAsync(requests, queueToken),
conversionToken);
_showModal = true;
}