sigma update

This commit is contained in:
2026-01-16 11:00:58 +09:00
parent 59ed03a825
commit 96123d00a2
51 changed files with 6640 additions and 1382 deletions

View File

@@ -2,6 +2,7 @@ using LightlessSync.Interop.Ipc;
using LightlessSync.FileCache;
using Microsoft.Extensions.Logging;
using Penumbra.Api.Enums;
using System.Globalization;
namespace LightlessSync.Services.TextureCompression;
@@ -27,7 +28,9 @@ public sealed class TextureCompressionService
public async Task ConvertTexturesAsync(
IReadOnlyList<TextureCompressionRequest> requests,
IProgress<TextureConversionProgress>? progress,
CancellationToken token)
CancellationToken token,
bool requestRedraw = true,
bool includeMipMaps = true)
{
if (requests.Count == 0)
{
@@ -48,7 +51,7 @@ public sealed class TextureCompressionService
continue;
}
await RunPenumbraConversionAsync(request, textureType, total, completed, progress, token).ConfigureAwait(false);
await RunPenumbraConversionAsync(request, textureType, total, completed, progress, token, requestRedraw, includeMipMaps).ConfigureAwait(false);
completed++;
}
@@ -65,14 +68,16 @@ public sealed class TextureCompressionService
int total,
int completedBefore,
IProgress<TextureConversionProgress>? progress,
CancellationToken token)
CancellationToken token,
bool requestRedraw,
bool includeMipMaps)
{
var primaryPath = request.PrimaryFilePath;
var displayJob = new TextureConversionJob(
primaryPath,
primaryPath,
targetType,
IncludeMipMaps: true,
IncludeMipMaps: includeMipMaps,
request.DuplicateFilePaths);
var backupPath = CreateBackupCopy(primaryPath);
@@ -83,7 +88,7 @@ public sealed class TextureCompressionService
try
{
WaitForAccess(primaryPath);
await _ipcManager.Penumbra.ConvertTextureFiles(_logger, new[] { conversionJob }, null, token).ConfigureAwait(false);
await _ipcManager.Penumbra.ConvertTextureFiles(_logger, new[] { conversionJob }, null, token, requestRedraw).ConfigureAwait(false);
if (!IsValidConversionResult(displayJob.OutputFile))
{
@@ -128,19 +133,46 @@ public sealed class TextureCompressionService
var cacheEntries = _fileCacheManager.GetFileCachesByPaths(paths.ToArray());
foreach (var path in paths)
{
var hasExpectedHash = TryGetExpectedHashFromPath(path, out var expectedHash);
if (!cacheEntries.TryGetValue(path, out var entry) || entry is null)
{
entry = _fileCacheManager.CreateFileEntry(path);
if (hasExpectedHash)
{
entry = _fileCacheManager.CreateCacheEntryWithKnownHash(path, expectedHash);
}
entry ??= _fileCacheManager.CreateFileEntry(path);
if (entry is null)
{
_logger.LogWarning("Unable to locate cache entry for {Path}; skipping hash refresh", path);
continue;
}
}
else if (hasExpectedHash && entry.IsCacheEntry && !string.Equals(entry.Hash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogDebug("Fixing cache hash mismatch for {Path}: {Current} -> {Expected}", path, entry.Hash, expectedHash);
_fileCacheManager.RemoveHashedFile(entry.Hash, entry.PrefixedFilePath, removeDerivedFiles: false);
var corrected = _fileCacheManager.CreateCacheEntryWithKnownHash(path, expectedHash);
if (corrected is not null)
{
entry = corrected;
}
}
try
{
_fileCacheManager.UpdateHashedFile(entry);
if (entry.IsCacheEntry)
{
var info = new FileInfo(path);
entry.Size = info.Length;
entry.CompressedSize = null;
entry.LastModifiedDateTicks = info.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
_fileCacheManager.UpdateHashedFile(entry, computeProperties: false);
}
else
{
_fileCacheManager.UpdateHashedFile(entry);
}
}
catch (Exception ex)
{
@@ -149,6 +181,35 @@ public sealed class TextureCompressionService
}
}
private static bool TryGetExpectedHashFromPath(string path, out string hash)
{
hash = Path.GetFileNameWithoutExtension(path);
if (string.IsNullOrWhiteSpace(hash))
{
return false;
}
if (hash.Length is not (40 or 64))
{
return false;
}
for (var i = 0; i < hash.Length; i++)
{
var c = hash[i];
var isHex = (c >= '0' && c <= '9')
|| (c >= 'a' && c <= 'f')
|| (c >= 'A' && c <= 'F');
if (!isHex)
{
return false;
}
}
hash = hash.ToUpperInvariant();
return true;
}
private static readonly string WorkingDirectory =
Path.Combine(Path.GetTempPath(), "LightlessSync.TextureCompression");

View File

@@ -4,9 +4,11 @@ using System.Buffers.Binary;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using OtterTex;
using OtterImage = OtterTex.Image;
using LightlessSync.LightlessConfiguration;
using LightlessSync.Utils;
using LightlessSync.FileCache;
using Microsoft.Extensions.Logging;
using Lumina.Data.Files;
@@ -30,10 +32,12 @@ public sealed class TextureDownscaleService
private readonly LightlessConfigService _configService;
private readonly PlayerPerformanceConfigService _playerPerformanceConfigService;
private readonly FileCacheManager _fileCacheManager;
private readonly TextureCompressionService _textureCompressionService;
private readonly ConcurrentDictionary<string, Task> _activeJobs = new(StringComparer.OrdinalIgnoreCase);
private readonly TaskRegistry<string> _downscaleDeduplicator = new();
private readonly ConcurrentDictionary<string, string> _downscaledPaths = new(StringComparer.OrdinalIgnoreCase);
private readonly SemaphoreSlim _downscaleSemaphore = new(4);
private readonly SemaphoreSlim _compressionSemaphore = new(1);
private static readonly IReadOnlyDictionary<int, TextureCompressionTarget> BlockCompressedFormatMap =
new Dictionary<int, TextureCompressionTarget>
{
@@ -68,12 +72,14 @@ public sealed class TextureDownscaleService
ILogger<TextureDownscaleService> logger,
LightlessConfigService configService,
PlayerPerformanceConfigService playerPerformanceConfigService,
FileCacheManager fileCacheManager)
FileCacheManager fileCacheManager,
TextureCompressionService textureCompressionService)
{
_logger = logger;
_configService = configService;
_playerPerformanceConfigService = playerPerformanceConfigService;
_fileCacheManager = fileCacheManager;
_textureCompressionService = textureCompressionService;
}
public void ScheduleDownscale(string hash, string filePath, TextureMapKind mapKind)
@@ -82,9 +88,9 @@ public sealed class TextureDownscaleService
public void ScheduleDownscale(string hash, string filePath, Func<TextureMapKind> mapKindFactory)
{
if (!filePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) return;
if (_activeJobs.ContainsKey(hash)) return;
if (_downscaleDeduplicator.TryGetExisting(hash, out _)) return;
_activeJobs[hash] = Task.Run(async () =>
_downscaleDeduplicator.GetOrStart(hash, async () =>
{
TextureMapKind mapKind;
try
@@ -98,7 +104,7 @@ public sealed class TextureDownscaleService
}
await DownscaleInternalAsync(hash, filePath, mapKind).ConfigureAwait(false);
}, CancellationToken.None);
});
}
public bool ShouldScheduleDownscale(string filePath)
@@ -107,7 +113,9 @@ public sealed class TextureDownscaleService
return false;
var performanceConfig = _playerPerformanceConfigService.Current;
return performanceConfig.EnableNonIndexTextureMipTrim || performanceConfig.EnableIndexTextureDownscale;
return performanceConfig.EnableNonIndexTextureMipTrim
|| performanceConfig.EnableIndexTextureDownscale
|| performanceConfig.EnableUncompressedTextureCompression;
}
public string GetPreferredPath(string hash, string originalPath)
@@ -144,7 +152,7 @@ public sealed class TextureDownscaleService
continue;
}
if (_activeJobs.TryGetValue(hash, out var job))
if (_downscaleDeduplicator.TryGetExisting(hash, out var job))
{
pending.Add(job);
}
@@ -182,10 +190,18 @@ public sealed class TextureDownscaleService
targetMaxDimension = ResolveTargetMaxDimension();
onlyDownscaleUncompressed = performanceConfig.OnlyDownscaleUncompressedTextures;
if (onlyDownscaleUncompressed && !headerInfo.HasValue)
{
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for texture {Hash}; format unknown and only-uncompressed enabled.", hash);
return;
}
destination = Path.Combine(GetDownscaledDirectory(), $"{hash}.tex");
if (File.Exists(destination))
{
RegisterDownscaledTexture(hash, sourcePath, destination);
await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false);
return;
}
@@ -196,6 +212,7 @@ public sealed class TextureDownscaleService
if (performanceConfig.EnableNonIndexTextureMipTrim
&& await TryDropTopMipAsync(hash, sourcePath, destination, targetMaxDimension, onlyDownscaleUncompressed, headerInfo).ConfigureAwait(false))
{
await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false);
return;
}
@@ -206,6 +223,7 @@ public sealed class TextureDownscaleService
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for non-index texture {Hash}; no mip reduction required.", hash);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
@@ -213,6 +231,7 @@ public sealed class TextureDownscaleService
{
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for index texture {Hash}; feature disabled.", hash);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
@@ -222,6 +241,7 @@ public sealed class TextureDownscaleService
{
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for index texture {Hash}; header dimensions {Width}x{Height} within target.", hash, headerValue.Width, headerValue.Height);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
@@ -229,10 +249,12 @@ public sealed class TextureDownscaleService
{
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for index texture {Hash}; block compressed format {Format}.", hash, headerInfo.Value.Format);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
using var sourceScratch = TexFileHelper.Load(sourcePath);
var sourceFormat = sourceScratch.Meta.Format;
using var rgbaScratch = sourceScratch.GetRGBA(out var rgbaInfo).ThrowIfError(rgbaInfo);
var bytesPerPixel = rgbaInfo.Meta.Format.BitsPerPixel() / 8;
@@ -248,16 +270,39 @@ public sealed class TextureDownscaleService
{
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace("Skipping downscale for index texture {Hash}; already within bounds.", hash);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
using var resized = IndexDownscaler.Downscale(originalImage, targetSize.width, targetSize.height, BlockMultiple);
var canReencodeWithPenumbra = TryResolveCompressionTarget(headerInfo, sourceFormat, out var compressionTarget);
using var resizedScratch = CreateScratchImage(resized, targetSize.width, targetSize.height);
using var finalScratch = resizedScratch.Convert(DXGIFormat.B8G8R8A8UNorm);
if (!TryConvertForSave(resizedScratch, sourceFormat, out var finalScratch, canReencodeWithPenumbra))
{
if (canReencodeWithPenumbra
&& await TryReencodeWithPenumbraAsync(hash, sourcePath, destination, resizedScratch, compressionTarget).ConfigureAwait(false))
{
await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false);
return;
}
TexFileHelper.Save(destination, finalScratch);
RegisterDownscaledTexture(hash, sourcePath, destination);
_downscaledPaths[hash] = sourcePath;
_logger.LogTrace(
"Skipping downscale for index texture {Hash}; failed to re-encode to {Format}.",
hash,
sourceFormat);
await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false);
return;
}
using (finalScratch)
{
TexFileHelper.Save(destination, finalScratch);
RegisterDownscaledTexture(hash, sourcePath, destination);
}
await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false);
}
catch (Exception ex)
{
@@ -277,7 +322,6 @@ public sealed class TextureDownscaleService
finally
{
_downscaleSemaphore.Release();
_activeJobs.TryRemove(hash, out _);
}
}
@@ -330,6 +374,157 @@ public sealed class TextureDownscaleService
}
}
private bool TryConvertForSave(
ScratchImage source,
DXGIFormat sourceFormat,
out ScratchImage result,
bool attemptPenumbraFallback)
{
var isCompressed = sourceFormat.IsCompressed();
var targetFormat = isCompressed ? sourceFormat : DXGIFormat.B8G8R8A8UNorm;
try
{
result = source.Convert(targetFormat);
return true;
}
catch (Exception ex)
{
var compressedFallback = attemptPenumbraFallback
? " Attempting Penumbra re-encode."
: " Skipping downscale.";
_logger.LogWarning(
ex,
"Failed to convert downscaled texture to {Format}.{Fallback}",
targetFormat,
isCompressed ? compressedFallback : " Falling back to B8G8R8A8.");
if (isCompressed)
{
result = default!;
return false;
}
result = source.Convert(DXGIFormat.B8G8R8A8UNorm);
return true;
}
}
private bool TryResolveCompressionTarget(TexHeaderInfo? headerInfo, DXGIFormat sourceFormat, out TextureCompressionTarget target)
{
if (headerInfo is { } info && TryGetCompressionTarget(info.Format, out target))
{
return _textureCompressionService.IsTargetSelectable(target);
}
if (sourceFormat.IsCompressed() && BlockCompressedFormatMap.TryGetValue((int)sourceFormat, out target))
{
return _textureCompressionService.IsTargetSelectable(target);
}
target = default;
return false;
}
private async Task<bool> TryReencodeWithPenumbraAsync(
string hash,
string sourcePath,
string destination,
ScratchImage resizedScratch,
TextureCompressionTarget target)
{
try
{
using var uncompressed = resizedScratch.Convert(DXGIFormat.B8G8R8A8UNorm);
TexFileHelper.Save(destination, uncompressed);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to save uncompressed downscaled texture for {Hash}. Skipping downscale.", hash);
TryDelete(destination);
return false;
}
await _compressionSemaphore.WaitAsync().ConfigureAwait(false);
try
{
var request = new TextureCompressionRequest(destination, Array.Empty<string>(), target);
await _textureCompressionService
.ConvertTexturesAsync(new[] { request }, null, CancellationToken.None, requestRedraw: false)
.ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to re-encode downscaled texture {Hash} to {Target}. Skipping downscale.", hash, target);
TryDelete(destination);
return false;
}
finally
{
_compressionSemaphore.Release();
}
RegisterDownscaledTexture(hash, sourcePath, destination);
_logger.LogDebug("Downscaled texture {Hash} -> {Path} (re-encoded via Penumbra).", hash, destination);
return true;
}
private async Task TryAutoCompressAsync(string hash, string texturePath, TextureMapKind mapKind, TexHeaderInfo? headerInfo)
{
var performanceConfig = _playerPerformanceConfigService.Current;
if (!performanceConfig.EnableUncompressedTextureCompression)
{
return;
}
if (string.IsNullOrEmpty(texturePath) || !File.Exists(texturePath))
{
return;
}
var info = headerInfo ?? (TryReadTexHeader(texturePath, out var header) ? header : (TexHeaderInfo?)null);
if (!info.HasValue)
{
_logger.LogTrace("Skipping auto-compress for texture {Hash}; unable to read header.", hash);
return;
}
if (IsBlockCompressedFormat(info.Value.Format))
{
_logger.LogTrace("Skipping auto-compress for texture {Hash}; already block-compressed.", hash);
return;
}
var suggestion = TextureMetadataHelper.GetSuggestedTarget(info.Value.Format.ToString(), mapKind, texturePath);
if (suggestion is null)
{
return;
}
var target = _textureCompressionService.NormalizeTarget(suggestion.Value.Target);
if (!_textureCompressionService.IsTargetSelectable(target))
{
_logger.LogTrace("Skipping auto-compress for texture {Hash}; target {Target} not supported.", hash, target);
return;
}
await _compressionSemaphore.WaitAsync().ConfigureAwait(false);
try
{
var includeMipMaps = !performanceConfig.SkipUncompressedTextureCompressionMipMaps;
var request = new TextureCompressionRequest(texturePath, Array.Empty<string>(), target);
await _textureCompressionService
.ConvertTexturesAsync(new[] { request }, null, CancellationToken.None, requestRedraw: false, includeMipMaps: includeMipMaps)
.ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Auto-compress failed for texture {Hash} ({Path})", hash, texturePath);
}
finally
{
_compressionSemaphore.Release();
}
}
private static bool IsIndexMap(TextureMapKind kind)
=> kind is TextureMapKind.Mask
or TextureMapKind.Index;