Testing PAP handling changes.
This commit is contained in:
@@ -27,6 +27,7 @@ public sealed class FileCacheManager : IHostedService
|
||||
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, FileCacheEntity>> _fileCaches = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, FileCacheEntity> _fileCachesByPrefixedPath = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly SemaphoreSlim _getCachesByPathsSemaphore = new(1, 1);
|
||||
private readonly SemaphoreSlim _evictSemaphore = new(1, 1);
|
||||
private readonly Lock _fileWriteLock = new();
|
||||
private readonly IpcManager _ipcManager;
|
||||
private readonly ILogger<FileCacheManager> _logger;
|
||||
@@ -226,13 +227,23 @@ public sealed class FileCacheManager : IHostedService
|
||||
var compressed = LZ4Wrapper.WrapHC(raw, 0, raw.Length);
|
||||
|
||||
var tmpPath = compressedPath + ".tmp";
|
||||
await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false);
|
||||
File.Move(tmpPath, compressedPath, overwrite: true);
|
||||
try
|
||||
{
|
||||
await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false);
|
||||
File.Move(tmpPath, compressedPath, overwrite: true);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try { if (File.Exists(tmpPath)) File.Delete(tmpPath); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
var compressedSize = compressed.LongLength;
|
||||
var compressedSize = new FileInfo(compressedPath).Length;
|
||||
SetSizeInfo(hash, originalSize, compressedSize);
|
||||
UpdateEntitiesSizes(hash, originalSize, compressedSize);
|
||||
|
||||
var maxBytes = GiBToBytes(_configService.Current.MaxLocalCacheInGiB);
|
||||
await EnforceCacheLimitAsync(maxBytes, token).ConfigureAwait(false);
|
||||
|
||||
return compressed;
|
||||
}
|
||||
finally
|
||||
@@ -877,6 +888,83 @@ public sealed class FileCacheManager : IHostedService
|
||||
}, token).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task EnforceCacheLimitAsync(long maxBytes, CancellationToken token)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(CacheFolder) || maxBytes <= 0) return;
|
||||
|
||||
await _evictSemaphore.WaitAsync(token).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
Directory.CreateDirectory(CacheFolder);
|
||||
|
||||
foreach (var tmp in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension + ".tmp"))
|
||||
{
|
||||
try { File.Delete(tmp); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
var files = Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension, SearchOption.TopDirectoryOnly)
|
||||
.Select(p => new FileInfo(p))
|
||||
.Where(fi => fi.Exists)
|
||||
.OrderBy(fi => fi.LastWriteTimeUtc)
|
||||
.ToList();
|
||||
|
||||
long total = files.Sum(f => f.Length);
|
||||
if (total <= maxBytes) return;
|
||||
|
||||
foreach (var fi in files)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (total <= maxBytes) break;
|
||||
|
||||
var hash = Path.GetFileNameWithoutExtension(fi.Name);
|
||||
|
||||
try
|
||||
{
|
||||
var len = fi.Length;
|
||||
fi.Delete();
|
||||
total -= len;
|
||||
_sizeCache.TryRemove(hash, out _);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to evict cache file {file}", fi.FullName);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_evictSemaphore.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private static long GiBToBytes(double gib)
|
||||
{
|
||||
if (double.IsNaN(gib) || double.IsInfinity(gib) || gib <= 0)
|
||||
return 0;
|
||||
|
||||
var bytes = gib * 1024d * 1024d * 1024d;
|
||||
|
||||
if (bytes >= long.MaxValue) return long.MaxValue;
|
||||
|
||||
return (long)Math.Round(bytes, MidpointRounding.AwayFromZero);
|
||||
}
|
||||
|
||||
private void CleanupOrphanCompressedCache()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(CacheFolder) || !Directory.Exists(CacheFolder))
|
||||
return;
|
||||
|
||||
foreach (var path in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension))
|
||||
{
|
||||
var hash = Path.GetFileNameWithoutExtension(path);
|
||||
if (!_fileCaches.ContainsKey(hash))
|
||||
{
|
||||
try { File.Delete(path); }
|
||||
catch (Exception ex) { _logger.LogWarning(ex, "Failed deleting orphan {file}", path); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async Task StartAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("Starting FileCacheManager");
|
||||
@@ -1060,6 +1148,8 @@ public sealed class FileCacheManager : IHostedService
|
||||
{
|
||||
await WriteOutFullCsvAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
CleanupOrphanCompressedCache();
|
||||
}
|
||||
|
||||
_logger.LogInformation("Started FileCacheManager");
|
||||
|
||||
@@ -21,6 +21,7 @@ public class PlayerDataFactory
|
||||
private readonly XivDataAnalyzer _modelAnalyzer;
|
||||
private readonly LightlessMediator _lightlessMediator;
|
||||
private readonly TransientResourceManager _transientResourceManager;
|
||||
private static readonly SemaphoreSlim _papParseLimiter = new(1, 1);
|
||||
|
||||
public PlayerDataFactory(ILogger<PlayerDataFactory> logger, DalamudUtilService dalamudUtil, IpcManager ipcManager,
|
||||
TransientResourceManager transientResourceManager, FileCacheManager fileReplacementFactory,
|
||||
@@ -121,7 +122,6 @@ public class PlayerDataFactory
|
||||
_logger.LogDebug("Building character data for {obj}", playerRelatedObject);
|
||||
var logDebug = _logger.IsEnabled(LogLevel.Debug);
|
||||
|
||||
// wait until chara is not drawing and present so nothing spontaneously explodes
|
||||
await _dalamudUtil.WaitWhileCharacterIsDrawing(_logger, playerRelatedObject, Guid.NewGuid(), 30000, ct: ct).ConfigureAwait(false);
|
||||
int totalWaitTime = 10000;
|
||||
while (!await _dalamudUtil.IsObjectPresentAsync(await _dalamudUtil.CreateGameObjectAsync(playerRelatedObject.Address).ConfigureAwait(false)).ConfigureAwait(false) && totalWaitTime > 0)
|
||||
@@ -135,7 +135,6 @@ public class PlayerDataFactory
|
||||
|
||||
DateTime start = DateTime.UtcNow;
|
||||
|
||||
// penumbra call, it's currently broken
|
||||
Dictionary<string, HashSet<string>>? resolvedPaths;
|
||||
|
||||
resolvedPaths = (await _ipcManager.Penumbra.GetCharacterData(_logger, playerRelatedObject).ConfigureAwait(false));
|
||||
@@ -144,8 +143,7 @@ public class PlayerDataFactory
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
fragment.FileReplacements =
|
||||
new HashSet<FileReplacement>(resolvedPaths.Select(c => new FileReplacement([.. c.Value], c.Key)), FileReplacementComparer.Instance)
|
||||
.Where(p => p.HasFileReplacement).ToHashSet();
|
||||
[.. new HashSet<FileReplacement>(resolvedPaths.Select(c => new FileReplacement([.. c.Value], c.Key)), FileReplacementComparer.Instance).Where(p => p.HasFileReplacement)];
|
||||
fragment.FileReplacements.RemoveWhere(c => c.GamePaths.Any(g => !CacheMonitor.AllowedFileExtensions.Any(e => g.EndsWith(e, StringComparison.OrdinalIgnoreCase))));
|
||||
|
||||
ct.ThrowIfCancellationRequested();
|
||||
@@ -169,8 +167,6 @@ public class PlayerDataFactory
|
||||
|
||||
await _transientResourceManager.WaitForRecording(ct).ConfigureAwait(false);
|
||||
|
||||
// if it's pet then it's summoner, if it's summoner we actually want to keep all filereplacements alive at all times
|
||||
// or we get into redraw city for every change and nothing works properly
|
||||
if (objectKind == ObjectKind.Pet)
|
||||
{
|
||||
foreach (var item in fragment.FileReplacements.Where(i => i.HasFileReplacement).SelectMany(p => p.GamePaths))
|
||||
@@ -189,10 +185,8 @@ public class PlayerDataFactory
|
||||
|
||||
_logger.LogDebug("Handling transient update for {obj}", playerRelatedObject);
|
||||
|
||||
// remove all potentially gathered paths from the transient resource manager that are resolved through static resolving
|
||||
_transientResourceManager.ClearTransientPaths(objectKind, fragment.FileReplacements.SelectMany(c => c.GamePaths).ToList());
|
||||
_transientResourceManager.ClearTransientPaths(objectKind, [.. fragment.FileReplacements.SelectMany(c => c.GamePaths)]);
|
||||
|
||||
// get all remaining paths and resolve them
|
||||
var transientPaths = ManageSemiTransientData(objectKind);
|
||||
var resolvedTransientPaths = await GetFileReplacementsFromPaths(playerRelatedObject, transientPaths, new HashSet<string>(StringComparer.Ordinal)).ConfigureAwait(false);
|
||||
|
||||
@@ -213,12 +207,10 @@ public class PlayerDataFactory
|
||||
}
|
||||
}
|
||||
|
||||
// clean up all semi transient resources that don't have any file replacement (aka null resolve)
|
||||
_transientResourceManager.CleanUpSemiTransientResources(objectKind, [.. fragment.FileReplacements]);
|
||||
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
// make sure we only return data that actually has file replacements
|
||||
fragment.FileReplacements = new HashSet<FileReplacement>(fragment.FileReplacements.Where(v => v.HasFileReplacement).OrderBy(v => v.ResolvedPath, StringComparer.Ordinal), FileReplacementComparer.Instance);
|
||||
|
||||
// gather up data from ipc
|
||||
@@ -270,13 +262,17 @@ public class PlayerDataFactory
|
||||
|
||||
Dictionary<string, List<ushort>>? boneIndices = null;
|
||||
var hasPapFiles = false;
|
||||
|
||||
if (objectKind == ObjectKind.Player)
|
||||
{
|
||||
hasPapFiles = fragment.FileReplacements.Any(f =>
|
||||
!f.IsFileSwap && f.GamePaths.First().EndsWith("pap", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (hasPapFiles)
|
||||
{
|
||||
boneIndices = await _dalamudUtil.RunOnFrameworkThread(() => _modelAnalyzer.GetSkeletonBoneIndices(playerRelatedObject)).ConfigureAwait(false);
|
||||
boneIndices = await _dalamudUtil
|
||||
.RunOnFrameworkThread(() => _modelAnalyzer.GetSkeletonBoneIndices(playerRelatedObject))
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -284,9 +280,16 @@ public class PlayerDataFactory
|
||||
{
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
if (hasPapFiles && boneIndices != null)
|
||||
{
|
||||
_modelAnalyzer.DumpLocalSkeletonIndices(playerRelatedObject);
|
||||
}
|
||||
#endif
|
||||
if (hasPapFiles)
|
||||
{
|
||||
await VerifyPlayerAnimationBones(boneIndices, (fragment as CharacterDataFragmentPlayer)!, ct).ConfigureAwait(false);
|
||||
await VerifyPlayerAnimationBones(boneIndices, (fragment as CharacterDataFragmentPlayer)!, ct)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException e)
|
||||
@@ -305,74 +308,174 @@ public class PlayerDataFactory
|
||||
return fragment;
|
||||
}
|
||||
|
||||
private async Task VerifyPlayerAnimationBones(Dictionary<string, List<ushort>>? boneIndices, CharacterDataFragmentPlayer fragment, CancellationToken ct)
|
||||
private async Task VerifyPlayerAnimationBones(
|
||||
Dictionary<string, List<ushort>>? playerBoneIndices,
|
||||
CharacterDataFragmentPlayer fragment,
|
||||
CancellationToken ct)
|
||||
{
|
||||
if (boneIndices == null) return;
|
||||
if (playerBoneIndices == null || playerBoneIndices.Count == 0)
|
||||
return;
|
||||
|
||||
var playerBoneSets = new Dictionary<string, HashSet<ushort>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var (rawLocalKey, indices) in playerBoneIndices)
|
||||
{
|
||||
if (indices == null || indices.Count == 0)
|
||||
continue;
|
||||
var key = XivDataAnalyzer.CanonicalizeSkeletonKey(rawLocalKey);
|
||||
if (string.IsNullOrEmpty(key))
|
||||
continue;
|
||||
|
||||
if (!playerBoneSets.TryGetValue(key, out var set))
|
||||
playerBoneSets[key] = set = new HashSet<ushort>();
|
||||
|
||||
foreach (var idx in indices)
|
||||
set.Add(idx);
|
||||
}
|
||||
|
||||
if (playerBoneSets.Count == 0)
|
||||
return;
|
||||
|
||||
if (_logger.IsEnabled(LogLevel.Debug))
|
||||
{
|
||||
foreach (var kvp in boneIndices)
|
||||
foreach (var kvp in playerBoneSets)
|
||||
{
|
||||
_logger.LogDebug("Found {skellyname} ({idx} bone indices) on player: {bones}", kvp.Key, kvp.Value.Any() ? kvp.Value.Max() : 0, string.Join(',', kvp.Value));
|
||||
_logger.LogDebug(
|
||||
"Found local skeleton bucket '{bucket}' ({count} indices, max {max})",
|
||||
kvp.Key,
|
||||
kvp.Value.Count,
|
||||
kvp.Value.Count > 0 ? kvp.Value.Max() : 0);
|
||||
}
|
||||
}
|
||||
|
||||
var maxPlayerBoneIndex = boneIndices.SelectMany(kvp => kvp.Value).DefaultIfEmpty().Max();
|
||||
if (maxPlayerBoneIndex <= 0) return;
|
||||
var papFiles = fragment.FileReplacements
|
||||
.Where(f => !f.IsFileSwap && f.GamePaths.First().EndsWith("pap", StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
|
||||
if (papFiles.Count == 0)
|
||||
return;
|
||||
|
||||
var papGroupsByHash = papFiles
|
||||
.Where(f => !string.IsNullOrEmpty(f.Hash))
|
||||
.GroupBy(f => f.Hash, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
int noValidationFailed = 0;
|
||||
foreach (var file in fragment.FileReplacements.Where(f => !f.IsFileSwap && f.GamePaths.First().EndsWith("pap", StringComparison.OrdinalIgnoreCase)).ToList())
|
||||
|
||||
static ushort MaxIndex(List<ushort> list)
|
||||
{
|
||||
if (list == null || list.Count == 0) return 0;
|
||||
ushort max = 0;
|
||||
for (int i = 0; i < list.Count; i++)
|
||||
if (list[i] > max) max = list[i];
|
||||
return max;
|
||||
}
|
||||
|
||||
static bool ShouldIgnorePap(Dictionary<string, List<ushort>> pap)
|
||||
{
|
||||
foreach (var kv in pap)
|
||||
{
|
||||
if (kv.Value == null || kv.Value.Count == 0)
|
||||
continue;
|
||||
|
||||
if (MaxIndex(kv.Value) > 105)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
foreach (var group in papGroupsByHash)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var skeletonIndices = await _dalamudUtil.RunOnFrameworkThread(() => _modelAnalyzer.GetBoneIndicesFromPap(file.Hash)).ConfigureAwait(false);
|
||||
bool validationFailed = false;
|
||||
if (skeletonIndices != null)
|
||||
var hash = group.Key;
|
||||
|
||||
Dictionary<string, List<ushort>>? papSkeletonIndices;
|
||||
|
||||
await _papParseLimiter.WaitAsync(ct).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
// 105 is the maximum vanilla skellington spoopy bone index
|
||||
if (skeletonIndices.All(k => k.Value.Max() <= 105))
|
||||
{
|
||||
_logger.LogTrace("All indices of {path} are <= 105, ignoring", file.ResolvedPath);
|
||||
papSkeletonIndices = await Task.Run(() => _modelAnalyzer.GetBoneIndicesFromPap(hash), ct)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_papParseLimiter.Release();
|
||||
}
|
||||
|
||||
if (papSkeletonIndices == null || papSkeletonIndices.Count == 0)
|
||||
continue;
|
||||
|
||||
if (ShouldIgnorePap(papSkeletonIndices))
|
||||
{
|
||||
_logger.LogTrace("All indices of PAP hash {hash} are <= 105, ignoring", hash);
|
||||
continue;
|
||||
}
|
||||
|
||||
bool invalid = false;
|
||||
string? reason = null;
|
||||
|
||||
foreach (var (rawPapName, usedIndices) in papSkeletonIndices)
|
||||
{
|
||||
var papKey = XivDataAnalyzer.CanonicalizeSkeletonKey(rawPapName);
|
||||
if (string.IsNullOrEmpty(papKey))
|
||||
continue;
|
||||
|
||||
if (!playerBoneSets.TryGetValue(papKey, out var available))
|
||||
{
|
||||
invalid = true;
|
||||
reason = $"Missing skeleton bucket '{papKey}' (raw '{rawPapName}') on local player.";
|
||||
break;
|
||||
}
|
||||
|
||||
_logger.LogDebug("Verifying bone indices for {path}, found {x} skeletons", file.ResolvedPath, skeletonIndices.Count);
|
||||
|
||||
foreach (var boneCount in skeletonIndices)
|
||||
for (int i = 0; i < usedIndices.Count; i++)
|
||||
{
|
||||
var maxAnimationIndex = boneCount.Value.DefaultIfEmpty().Max();
|
||||
if (maxAnimationIndex > maxPlayerBoneIndex)
|
||||
var idx = usedIndices[i];
|
||||
if (!available.Contains(idx))
|
||||
{
|
||||
_logger.LogWarning("Found more bone indices on the animation {path} skeleton {skl} (max indice {idx}) than on any player related skeleton (max indice {idx2})",
|
||||
file.ResolvedPath, boneCount.Key, maxAnimationIndex, maxPlayerBoneIndex);
|
||||
validationFailed = true;
|
||||
invalid = true;
|
||||
reason = $"Skeleton '{papKey}' missing bone index {idx} (raw '{rawPapName}').";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (invalid)
|
||||
break;
|
||||
}
|
||||
|
||||
if (validationFailed)
|
||||
if (!invalid)
|
||||
continue;
|
||||
|
||||
noValidationFailed++;
|
||||
|
||||
_logger.LogWarning(
|
||||
"Animation PAP hash {hash} is not compatible with local skeletons; dropping all mappings for this hash. Reason: {reason}",
|
||||
hash,
|
||||
reason);
|
||||
|
||||
foreach (var file in group.ToList())
|
||||
{
|
||||
noValidationFailed++;
|
||||
_logger.LogDebug("Removing {file} from sent file replacements and transient data", file.ResolvedPath);
|
||||
fragment.FileReplacements.Remove(file);
|
||||
foreach (var gamePath in file.GamePaths)
|
||||
{
|
||||
_transientResourceManager.RemoveTransientResource(ObjectKind.Player, gamePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var gamePath in file.GamePaths)
|
||||
_transientResourceManager.RemoveTransientResource(ObjectKind.Player, gamePath);
|
||||
}
|
||||
}
|
||||
|
||||
if (noValidationFailed > 0)
|
||||
{
|
||||
_lightlessMediator.Publish(new NotificationMessage("Invalid Skeleton Setup",
|
||||
$"Your client is attempting to send {noValidationFailed} animation files with invalid bone data. Those animation files have been removed from your sent data. " +
|
||||
$"Verify that you are using the correct skeleton for those animation files (Check /xllog for more information).",
|
||||
NotificationType.Warning, TimeSpan.FromSeconds(10)));
|
||||
_lightlessMediator.Publish(new NotificationMessage(
|
||||
"Invalid Skeleton Setup",
|
||||
$"Your client is attempting to send {noValidationFailed} animation file groups with bone indices not present on your current skeleton. " +
|
||||
"Those animation files have been removed from your sent data. Verify that you are using the correct skeleton for those animations " +
|
||||
"(Check /xllog for more information).",
|
||||
NotificationType.Warning,
|
||||
TimeSpan.FromSeconds(10)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private async Task<IReadOnlyDictionary<string, string[]>> GetFileReplacementsFromPaths(GameObjectHandler handler, HashSet<string> forwardResolve, HashSet<string> reverseResolve)
|
||||
{
|
||||
var forwardPaths = forwardResolve.ToArray();
|
||||
|
||||
@@ -22,6 +22,7 @@ using Microsoft.Extensions.Logging;
|
||||
using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind;
|
||||
using ObjectKind = LightlessSync.API.Data.Enum.ObjectKind;
|
||||
using FileReplacementDataComparer = LightlessSync.PlayerData.Data.FileReplacementDataComparer;
|
||||
using LightlessSync.LightlessConfiguration.Models;
|
||||
|
||||
namespace LightlessSync.PlayerData.Pairs;
|
||||
|
||||
@@ -46,6 +47,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
private readonly TextureDownscaleService _textureDownscaleService;
|
||||
private readonly PairStateCache _pairStateCache;
|
||||
private readonly PairPerformanceMetricsCache _performanceMetricsCache;
|
||||
private readonly XivDataAnalyzer _modelAnalyzer;
|
||||
private readonly PenumbraTempCollectionJanitor _tempCollectionJanitor;
|
||||
private readonly PairManager _pairManager;
|
||||
private CancellationTokenSource? _applicationCancellationTokenSource;
|
||||
@@ -90,6 +92,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
".avfx",
|
||||
".scd"
|
||||
};
|
||||
private readonly ConcurrentDictionary<string, byte> _blockedPapHashes = new(StringComparer.OrdinalIgnoreCase);
|
||||
private DateTime? _invisibleSinceUtc;
|
||||
private DateTime? _visibilityEvictionDueAtUtc;
|
||||
private DateTime _nextActorLookupUtc = DateTime.MinValue;
|
||||
@@ -184,7 +187,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
TextureDownscaleService textureDownscaleService,
|
||||
PairStateCache pairStateCache,
|
||||
PairPerformanceMetricsCache performanceMetricsCache,
|
||||
PenumbraTempCollectionJanitor tempCollectionJanitor) : base(logger, mediator)
|
||||
PenumbraTempCollectionJanitor tempCollectionJanitor,
|
||||
XivDataAnalyzer modelAnalyzer) : base(logger, mediator)
|
||||
{
|
||||
_pairManager = pairManager;
|
||||
Ident = ident;
|
||||
@@ -203,6 +207,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
_pairStateCache = pairStateCache;
|
||||
_performanceMetricsCache = performanceMetricsCache;
|
||||
_tempCollectionJanitor = tempCollectionJanitor;
|
||||
_modelAnalyzer = modelAnalyzer;
|
||||
}
|
||||
|
||||
public void Initialize()
|
||||
@@ -1669,11 +1674,36 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
return;
|
||||
}
|
||||
|
||||
SplitPapMappings(moddedPaths, out var withoutPap, out var papOnly);
|
||||
|
||||
await _ipcManager.Penumbra.AssignTemporaryCollectionAsync(Logger, penumbraCollection, objIndex.Value).ConfigureAwait(false);
|
||||
|
||||
await _ipcManager.Penumbra.SetTemporaryModsAsync(Logger, _applicationId, penumbraCollection,
|
||||
moddedPaths.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)).ConfigureAwait(false);
|
||||
_lastAppliedModdedPaths = new Dictionary<(string GamePath, string? Hash), string>(moddedPaths, moddedPaths.Comparer);
|
||||
await _ipcManager.Penumbra.SetTemporaryModsAsync(
|
||||
Logger, _applicationId, penumbraCollection,
|
||||
withoutPap.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal))
|
||||
.ConfigureAwait(false);
|
||||
|
||||
await _ipcManager.Penumbra.RedrawAsync(Logger, handlerForApply, _applicationId, token).ConfigureAwait(false);
|
||||
if (handlerForApply.Address != nint.Zero)
|
||||
await _actorObjectService.WaitForFullyLoadedAsync(handlerForApply.Address, token).ConfigureAwait(false);
|
||||
|
||||
var removedPap = await StripIncompatiblePapAsync(handlerForApply, charaData, papOnly, token).ConfigureAwait(false);
|
||||
if (removedPap > 0)
|
||||
{
|
||||
Logger.LogTrace("[{applicationId}] Removed {removedPap} incompatible PAP mappings found for {handler}", _applicationId, removedPap, GetLogIdentifier());
|
||||
}
|
||||
|
||||
var merged = new Dictionary<(string GamePath, string? Hash), string>(withoutPap, withoutPap.Comparer);
|
||||
foreach (var kv in papOnly)
|
||||
merged[kv.Key] = kv.Value;
|
||||
|
||||
await _ipcManager.Penumbra.SetTemporaryModsAsync(
|
||||
Logger, _applicationId, penumbraCollection,
|
||||
merged.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal))
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_lastAppliedModdedPaths = new Dictionary<(string GamePath, string? Hash), string>(merged, merged.Comparer);
|
||||
|
||||
LastAppliedDataBytes = -1;
|
||||
foreach (var path in moddedPaths.Values.Distinct(StringComparer.OrdinalIgnoreCase).Select(v => new FileInfo(v)).Where(p => p.Exists))
|
||||
{
|
||||
@@ -1983,9 +2013,17 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
|
||||
foreach (var gamePath in item.GamePaths)
|
||||
{
|
||||
if (gamePath.EndsWith(".pap", StringComparison.OrdinalIgnoreCase)
|
||||
&& !string.IsNullOrEmpty(item.Hash)
|
||||
&& _blockedPapHashes.ContainsKey(item.Hash))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var preferredPath = skipDownscaleForPair
|
||||
? fileCache.ResolvedFilepath
|
||||
: _textureDownscaleService.GetPreferredPath(item.Hash, fileCache.ResolvedFilepath);
|
||||
|
||||
outputDict[(gamePath, item.Hash)] = preferredPath;
|
||||
}
|
||||
}
|
||||
@@ -2295,7 +2333,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
HandleVisibilityLoss(logChange: false);
|
||||
}
|
||||
|
||||
private bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid)
|
||||
private static bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid)
|
||||
{
|
||||
hashedCid = descriptor.HashedContentId ?? string.Empty;
|
||||
if (!string.IsNullOrEmpty(hashedCid))
|
||||
@@ -2308,6 +2346,194 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
|
||||
return !string.IsNullOrEmpty(hashedCid);
|
||||
}
|
||||
|
||||
private static bool ContainsIndexCompat(HashSet<ushort> available, ushort idx)
|
||||
{
|
||||
if (available.Contains(idx)) return true;
|
||||
|
||||
if (idx > 0 && available.Contains((ushort)(idx - 1))) return true;
|
||||
if (idx < ushort.MaxValue && available.Contains((ushort)(idx + 1))) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool IsPapCompatible(
|
||||
IReadOnlyDictionary<string, HashSet<ushort>> localBoneSets,
|
||||
IReadOnlyDictionary<string, List<ushort>> papBoneIndices,
|
||||
out string reason)
|
||||
{
|
||||
var groups = papBoneIndices
|
||||
.Select(kvp => new
|
||||
{
|
||||
Raw = kvp.Key,
|
||||
Key = XivDataAnalyzer.CanonicalizeSkeletonKey(kvp.Key),
|
||||
Indices = kvp.Value
|
||||
})
|
||||
.Where(x => !string.IsNullOrEmpty(x.Key) && x.Indices is { Count: > 0 })
|
||||
.GroupBy(x => x.Key, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
if (groups.Count == 0)
|
||||
{
|
||||
reason = "No bindings found in the PAP";
|
||||
return false;
|
||||
}
|
||||
|
||||
var relevant = groups.Where(g => localBoneSets.ContainsKey(g.Key)).ToList();
|
||||
|
||||
if (relevant.Count == 0)
|
||||
{
|
||||
var papKeys = string.Join(", ", groups.Select(g => g.Key).Distinct(StringComparer.OrdinalIgnoreCase));
|
||||
var localKeys = string.Join(", ", localBoneSets.Keys);
|
||||
reason = $"No matching skeleton bucket between PAP [{papKeys}] and local [{localKeys}].";
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var g in relevant)
|
||||
{
|
||||
var available = localBoneSets[g.Key];
|
||||
|
||||
bool anyVariantOk = false;
|
||||
foreach (var variant in g)
|
||||
{
|
||||
bool ok = true;
|
||||
foreach (var idx in variant.Indices)
|
||||
{
|
||||
if (!ContainsIndexCompat(available, idx))
|
||||
{
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ok)
|
||||
{
|
||||
anyVariantOk = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!anyVariantOk)
|
||||
{
|
||||
var first = g.First();
|
||||
ushort? missing = null;
|
||||
foreach (var idx in first.Indices)
|
||||
{
|
||||
if (!ContainsIndexCompat(available, idx))
|
||||
{
|
||||
missing = idx;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
reason = missing.HasValue
|
||||
? $"Skeleton '{g.Key}' missing bone index {missing.Value}. (raw '{first.Raw}')"
|
||||
: $"Skeleton '{g.Key}' missing required bone indices. (raw '{first.Raw}')";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
reason = string.Empty;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void SplitPapMappings(
|
||||
Dictionary<(string GamePath, string? Hash), string> moddedPaths,
|
||||
out Dictionary<(string GamePath, string? Hash), string> withoutPap,
|
||||
out Dictionary<(string GamePath, string? Hash), string> papOnly)
|
||||
{
|
||||
withoutPap = new(moddedPaths.Comparer);
|
||||
papOnly = new(moddedPaths.Comparer);
|
||||
|
||||
foreach (var kv in moddedPaths)
|
||||
{
|
||||
var gamePath = kv.Key.GamePath;
|
||||
if (gamePath.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))
|
||||
papOnly[kv.Key] = kv.Value;
|
||||
else
|
||||
withoutPap[kv.Key] = kv.Value;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> StripIncompatiblePapAsync(
|
||||
GameObjectHandler handlerForApply,
|
||||
CharacterData charaData,
|
||||
Dictionary<(string GamePath, string? Hash), string> papOnly,
|
||||
CancellationToken token)
|
||||
{
|
||||
if (papOnly.Count == 0)
|
||||
return 0;
|
||||
|
||||
var boneIndices = await _dalamudUtil.RunOnFrameworkThread(
|
||||
() => _modelAnalyzer.GetSkeletonBoneIndices(handlerForApply))
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (boneIndices == null || boneIndices.Count == 0)
|
||||
return papOnly.Count;
|
||||
|
||||
var localBoneSets = new Dictionary<string, HashSet<ushort>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var (rawKey, list) in boneIndices)
|
||||
{
|
||||
var key = XivDataAnalyzer.CanonicalizeSkeletonKey(rawKey);
|
||||
if (string.IsNullOrEmpty(key) || list is null || list.Count == 0)
|
||||
continue;
|
||||
|
||||
if (!localBoneSets.TryGetValue(key, out var set))
|
||||
localBoneSets[key] = set = new HashSet<ushort>();
|
||||
|
||||
foreach (var v in list)
|
||||
set.Add(v);
|
||||
}
|
||||
|
||||
int removed = 0;
|
||||
|
||||
foreach (var hash in papOnly.Keys.Select(k => k.Hash).Where(h => !string.IsNullOrEmpty(h)).Distinct(StringComparer.OrdinalIgnoreCase).ToList())
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
|
||||
var papIndices = await _dalamudUtil.RunOnFrameworkThread(
|
||||
() => _modelAnalyzer.GetBoneIndicesFromPap(hash!))
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (papIndices == null || papIndices.Count == 0)
|
||||
continue;
|
||||
|
||||
if (papIndices.All(k => k.Value.DefaultIfEmpty().Max() <= 105))
|
||||
continue;
|
||||
|
||||
if (!IsPapCompatible(localBoneSets, papIndices, out var reason))
|
||||
{
|
||||
var keysToRemove = papOnly.Keys.Where(k => string.Equals(k.Hash, hash, StringComparison.OrdinalIgnoreCase)).ToList();
|
||||
foreach (var k in keysToRemove)
|
||||
papOnly.Remove(k);
|
||||
|
||||
removed += keysToRemove.Count;
|
||||
if (hash == null)
|
||||
continue;
|
||||
|
||||
if (_blockedPapHashes.TryAdd(hash, 0))
|
||||
{
|
||||
Logger.LogWarning("Blocked remote object PAP (hash {hash}) for {handler}: {reason}", hash, GetLogIdentifier(), reason);
|
||||
}
|
||||
|
||||
if (charaData.FileReplacements.TryGetValue(ObjectKind.Player, out var list))
|
||||
{
|
||||
list.RemoveAll(r => string.Equals(r.Hash, hash, StringComparison.OrdinalIgnoreCase)
|
||||
&& r.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var nullHashKeys = papOnly.Keys.Where(k => string.IsNullOrEmpty(k.Hash)).ToList();
|
||||
foreach (var k in nullHashKeys)
|
||||
{
|
||||
papOnly.Remove(k);
|
||||
removed++;
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
private async Task ApplyCustomizeAsync(nint address, string customizeData, ObjectKind kind)
|
||||
{
|
||||
_customizeIds[kind] = await _ipcManager.CustomizePlus.SetBodyScaleAsync(address, customizeData).ConfigureAwait(false);
|
||||
|
||||
@@ -32,6 +32,7 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory
|
||||
private readonly PairStateCache _pairStateCache;
|
||||
private readonly PairPerformanceMetricsCache _pairPerformanceMetricsCache;
|
||||
private readonly PenumbraTempCollectionJanitor _tempCollectionJanitor;
|
||||
private readonly XivDataAnalyzer _modelAnalyzer;
|
||||
|
||||
public PairHandlerAdapterFactory(
|
||||
ILoggerFactory loggerFactory,
|
||||
@@ -50,7 +51,8 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory
|
||||
TextureDownscaleService textureDownscaleService,
|
||||
PairStateCache pairStateCache,
|
||||
PairPerformanceMetricsCache pairPerformanceMetricsCache,
|
||||
PenumbraTempCollectionJanitor tempCollectionJanitor)
|
||||
PenumbraTempCollectionJanitor tempCollectionJanitor,
|
||||
XivDataAnalyzer modelAnalyzer)
|
||||
{
|
||||
_loggerFactory = loggerFactory;
|
||||
_mediator = mediator;
|
||||
@@ -69,6 +71,7 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory
|
||||
_pairStateCache = pairStateCache;
|
||||
_pairPerformanceMetricsCache = pairPerformanceMetricsCache;
|
||||
_tempCollectionJanitor = tempCollectionJanitor;
|
||||
_modelAnalyzer = modelAnalyzer;
|
||||
}
|
||||
|
||||
public IPairHandlerAdapter Create(string ident)
|
||||
@@ -95,6 +98,7 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory
|
||||
_textureDownscaleService,
|
||||
_pairStateCache,
|
||||
_pairPerformanceMetricsCache,
|
||||
_tempCollectionJanitor);
|
||||
_tempCollectionJanitor,
|
||||
_modelAnalyzer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ using LightlessSync.LightlessConfiguration;
|
||||
using LightlessSync.PlayerData.Handlers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace LightlessSync.Services;
|
||||
|
||||
@@ -29,67 +30,140 @@ public sealed class XivDataAnalyzer
|
||||
|
||||
public unsafe Dictionary<string, List<ushort>>? GetSkeletonBoneIndices(GameObjectHandler handler)
|
||||
{
|
||||
if (handler.Address == nint.Zero) return null;
|
||||
var chara = (CharacterBase*)(((Character*)handler.Address)->GameObject.DrawObject);
|
||||
if (chara->GetModelType() != CharacterBase.ModelType.Human) return null;
|
||||
var resHandles = chara->Skeleton->SkeletonResourceHandles;
|
||||
Dictionary<string, List<ushort>> outputIndices = [];
|
||||
if (handler is null || handler.Address == nint.Zero)
|
||||
return null;
|
||||
|
||||
Dictionary<string, HashSet<ushort>> sets = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
try
|
||||
{
|
||||
for (int i = 0; i < chara->Skeleton->PartialSkeletonCount; i++)
|
||||
var drawObject = ((Character*)handler.Address)->GameObject.DrawObject;
|
||||
if (drawObject == null)
|
||||
return null;
|
||||
|
||||
var chara = (CharacterBase*)drawObject;
|
||||
if (chara->GetModelType() != CharacterBase.ModelType.Human)
|
||||
return null;
|
||||
|
||||
var skeleton = chara->Skeleton;
|
||||
if (skeleton == null)
|
||||
return null;
|
||||
|
||||
var resHandles = skeleton->SkeletonResourceHandles;
|
||||
var partialCount = skeleton->PartialSkeletonCount;
|
||||
if (partialCount <= 0)
|
||||
return null;
|
||||
|
||||
for (int i = 0; i < partialCount; i++)
|
||||
{
|
||||
var handle = *(resHandles + i);
|
||||
_logger.LogTrace("Iterating over SkeletonResourceHandle #{i}:{x}", i, ((nint)handle).ToString("X"));
|
||||
if ((nint)handle == nint.Zero) continue;
|
||||
var curBones = handle->BoneCount;
|
||||
// this is unrealistic, the filename shouldn't ever be that long
|
||||
if (handle->FileName.Length > 1024) continue;
|
||||
var skeletonName = handle->FileName.ToString();
|
||||
if (string.IsNullOrEmpty(skeletonName)) continue;
|
||||
outputIndices[skeletonName] = [];
|
||||
for (ushort boneIdx = 0; boneIdx < curBones; boneIdx++)
|
||||
if ((nint)handle == nint.Zero)
|
||||
continue;
|
||||
|
||||
if (handle->FileName.Length > 1024)
|
||||
continue;
|
||||
|
||||
var rawName = handle->FileName.ToString();
|
||||
if (string.IsNullOrWhiteSpace(rawName))
|
||||
continue;
|
||||
|
||||
var skeletonKey = CanonicalizeSkeletonKey(rawName);
|
||||
if (string.IsNullOrEmpty(skeletonKey))
|
||||
continue;
|
||||
|
||||
var boneCount = handle->BoneCount;
|
||||
if (boneCount == 0)
|
||||
continue;
|
||||
|
||||
var havokSkel = handle->HavokSkeleton;
|
||||
if ((nint)havokSkel == nint.Zero)
|
||||
continue;
|
||||
|
||||
if (!sets.TryGetValue(skeletonKey, out var set))
|
||||
{
|
||||
var boneName = handle->HavokSkeleton->Bones[boneIdx].Name.String;
|
||||
if (boneName == null) continue;
|
||||
outputIndices[skeletonName].Add((ushort)(boneIdx + 1));
|
||||
set = [];
|
||||
sets[skeletonKey] = set;
|
||||
}
|
||||
|
||||
uint maxExclusive = boneCount;
|
||||
uint ushortExclusive = (uint)ushort.MaxValue + 1u;
|
||||
if (maxExclusive > ushortExclusive)
|
||||
maxExclusive = ushortExclusive;
|
||||
|
||||
for (uint boneIdx = 0; boneIdx < maxExclusive; boneIdx++)
|
||||
{
|
||||
var name = havokSkel->Bones[boneIdx].Name.String;
|
||||
if (name == null)
|
||||
continue;
|
||||
|
||||
set.Add((ushort)boneIdx);
|
||||
}
|
||||
|
||||
_logger.LogTrace("Local skeleton raw file='{raw}', key='{key}', boneCount={count}",
|
||||
rawName, skeletonKey, boneCount);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Could not process skeleton data");
|
||||
return null;
|
||||
}
|
||||
|
||||
return (outputIndices.Count != 0 && outputIndices.Values.All(u => u.Count > 0)) ? outputIndices : null;
|
||||
if (sets.Count == 0)
|
||||
return null;
|
||||
|
||||
var output = new Dictionary<string, List<ushort>>(sets.Count, StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var (key, set) in sets)
|
||||
{
|
||||
if (set.Count == 0)
|
||||
continue;
|
||||
|
||||
var list = set.ToList();
|
||||
list.Sort();
|
||||
output[key] = list;
|
||||
}
|
||||
|
||||
return (output.Count != 0 && output.Values.All(v => v.Count > 0)) ? output : null;
|
||||
}
|
||||
|
||||
public unsafe Dictionary<string, List<ushort>>? GetBoneIndicesFromPap(string hash)
|
||||
{
|
||||
if (_configService.Current.BonesDictionary.TryGetValue(hash, out var bones)) return bones;
|
||||
if (_configService.Current.BonesDictionary.TryGetValue(hash, out var cached))
|
||||
return cached;
|
||||
|
||||
var cacheEntity = _fileCacheManager.GetFileCacheByHash(hash);
|
||||
if (cacheEntity == null) return null;
|
||||
if (cacheEntity == null || string.IsNullOrEmpty(cacheEntity.ResolvedFilepath) || !File.Exists(cacheEntity.ResolvedFilepath))
|
||||
return null;
|
||||
|
||||
using BinaryReader reader = new(File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read));
|
||||
using var fs = File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new BinaryReader(fs);
|
||||
|
||||
// most of this shit is from vfxeditor, surely nothing will change in the pap format :copium:
|
||||
// most of this is from vfxeditor
|
||||
reader.ReadInt32(); // ignore
|
||||
reader.ReadInt32(); // ignore
|
||||
reader.ReadInt16(); // read 2 (num animations)
|
||||
reader.ReadInt16(); // read 2 (modelid)
|
||||
var type = reader.ReadByte();// read 1 (type)
|
||||
if (type != 0) return null; // it's not human, just ignore it, whatever
|
||||
reader.ReadInt16(); // num animations
|
||||
reader.ReadInt16(); // modelid
|
||||
var type = reader.ReadByte(); // type
|
||||
if (type != 0)
|
||||
return null; // not human
|
||||
|
||||
reader.ReadByte(); // read 1 (variant)
|
||||
reader.ReadByte(); // variant
|
||||
reader.ReadInt32(); // ignore
|
||||
var havokPosition = reader.ReadInt32();
|
||||
var footerPosition = reader.ReadInt32();
|
||||
|
||||
if (havokPosition <= 0 || footerPosition <= havokPosition || footerPosition > fs.Length)
|
||||
return null;
|
||||
|
||||
var havokDataSize = footerPosition - havokPosition;
|
||||
reader.BaseStream.Position = havokPosition;
|
||||
|
||||
var havokData = reader.ReadBytes(havokDataSize);
|
||||
if (havokData.Length <= 8) return null; // no havok data
|
||||
if (havokData.Length <= 8)
|
||||
return null;
|
||||
|
||||
var output = new Dictionary<string, List<ushort>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var tempHavokDataPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()) + ".hkx";
|
||||
var tempHavokDataPathAnsi = Marshal.StringToHGlobalAnsi(tempHavokDataPath);
|
||||
|
||||
@@ -102,54 +176,150 @@ public sealed class XivDataAnalyzer
|
||||
loadoptions->ClassNameRegistry = hkBuiltinTypeRegistry.Instance()->GetClassNameRegistry();
|
||||
loadoptions->Flags = new hkFlags<hkSerializeUtil.LoadOptionBits, int>
|
||||
{
|
||||
Storage = (int)(hkSerializeUtil.LoadOptionBits.Default)
|
||||
Storage = (int)hkSerializeUtil.LoadOptionBits.Default
|
||||
};
|
||||
|
||||
var resource = hkSerializeUtil.LoadFromFile((byte*)tempHavokDataPathAnsi, null, loadoptions);
|
||||
if (resource == null)
|
||||
{
|
||||
throw new InvalidOperationException("Resource was null after loading");
|
||||
}
|
||||
|
||||
var rootLevelName = @"hkRootLevelContainer"u8;
|
||||
fixed (byte* n1 = rootLevelName)
|
||||
{
|
||||
var container = (hkRootLevelContainer*)resource->GetContentsPointer(n1, hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry());
|
||||
if (container == null)
|
||||
return null;
|
||||
|
||||
var animationName = @"hkaAnimationContainer"u8;
|
||||
fixed (byte* n2 = animationName)
|
||||
{
|
||||
var animContainer = (hkaAnimationContainer*)container->findObjectByName(n2, null);
|
||||
if (animContainer == null)
|
||||
return null;
|
||||
|
||||
for (int i = 0; i < animContainer->Bindings.Length; i++)
|
||||
{
|
||||
var binding = animContainer->Bindings[i].ptr;
|
||||
if (binding == null)
|
||||
continue;
|
||||
|
||||
var rawSkel = binding->OriginalSkeletonName.String;
|
||||
var skeletonKey = CanonicalizeSkeletonKey(rawSkel);
|
||||
if (string.IsNullOrEmpty(skeletonKey))
|
||||
continue;
|
||||
|
||||
var boneTransform = binding->TransformTrackToBoneIndices;
|
||||
string name = binding->OriginalSkeletonName.String! + "_" + i;
|
||||
output[name] = [];
|
||||
if (boneTransform.Length <= 0)
|
||||
continue;
|
||||
|
||||
if (!output.TryGetValue(skeletonKey, out var list))
|
||||
{
|
||||
list = new List<ushort>(boneTransform.Length);
|
||||
output[skeletonKey] = list;
|
||||
}
|
||||
|
||||
for (int boneIdx = 0; boneIdx < boneTransform.Length; boneIdx++)
|
||||
{
|
||||
output[name].Add((ushort)boneTransform[boneIdx]);
|
||||
list.Add((ushort)boneTransform[boneIdx]);
|
||||
}
|
||||
output[name].Sort();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var key in output.Keys.ToList())
|
||||
{
|
||||
output[key] = [.. output[key]
|
||||
.Distinct()
|
||||
.Order()];
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Could not load havok file in {path}", tempHavokDataPath);
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
Marshal.FreeHGlobal(tempHavokDataPathAnsi);
|
||||
File.Delete(tempHavokDataPath);
|
||||
try { File.Delete(tempHavokDataPath); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
_configService.Current.BonesDictionary[hash] = output;
|
||||
_configService.Save();
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
private static readonly Regex _bucketPathRegex =
|
||||
new(@"(?i)(?:^|/)(?<bucket>c\d{4})(?:/|$)", RegexOptions.Compiled);
|
||||
|
||||
private static readonly Regex _bucketSklRegex =
|
||||
new(@"(?i)\bskl_(?<bucket>c\d{4})[a-z]\d{4}\b", RegexOptions.Compiled);
|
||||
|
||||
private static readonly Regex _bucketLooseRegex =
|
||||
new(@"(?i)(?<![a-z0-9])(?<bucket>c\d{4})(?!\d)", RegexOptions.Compiled);
|
||||
|
||||
public static string CanonicalizeSkeletonKey(string? raw)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(raw))
|
||||
return string.Empty;
|
||||
|
||||
var s = raw.Replace('\\', '/').Trim();
|
||||
|
||||
var underscore = s.LastIndexOf('_');
|
||||
if (underscore > 0 && underscore + 1 < s.Length && char.IsDigit(s[underscore + 1]))
|
||||
s = s[..underscore];
|
||||
|
||||
if (s.StartsWith("skeleton", StringComparison.OrdinalIgnoreCase))
|
||||
return "skeleton";
|
||||
|
||||
var m = _bucketPathRegex.Match(s);
|
||||
if (m.Success)
|
||||
return m.Groups["bucket"].Value.ToLowerInvariant();
|
||||
|
||||
m = _bucketSklRegex.Match(s);
|
||||
if (m.Success)
|
||||
return m.Groups["bucket"].Value.ToLowerInvariant();
|
||||
|
||||
m = _bucketLooseRegex.Match(s);
|
||||
if (m.Success)
|
||||
return m.Groups["bucket"].Value.ToLowerInvariant();
|
||||
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
public void DumpLocalSkeletonIndices(GameObjectHandler handler, string? filter = null)
|
||||
{
|
||||
var skels = GetSkeletonBoneIndices(handler);
|
||||
if (skels == null)
|
||||
{
|
||||
_logger.LogTrace("DumpLocalSkeletonIndices: local skeleton indices are null or not found");
|
||||
return;
|
||||
}
|
||||
|
||||
var keys = skels.Keys
|
||||
.Order(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
_logger.LogTrace("Local skeleton indices found ({count}): {keys}",
|
||||
keys.Length,
|
||||
string.Join(", ", keys));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(filter))
|
||||
{
|
||||
var hits = keys.Where(k =>
|
||||
k.Equals(filter, StringComparison.OrdinalIgnoreCase) ||
|
||||
k.StartsWith(filter + "_", StringComparison.OrdinalIgnoreCase) ||
|
||||
filter.StartsWith(k + "_", StringComparison.OrdinalIgnoreCase) ||
|
||||
k.Contains(filter, StringComparison.OrdinalIgnoreCase))
|
||||
.ToArray();
|
||||
|
||||
_logger.LogTrace("Matches found for '{filter}': {hits}",
|
||||
filter,
|
||||
hits.Length == 0 ? "<none>" : string.Join(", ", hits));
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<long> GetTrianglesByHash(string hash)
|
||||
{
|
||||
if (_configService.Current.TriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0)
|
||||
|
||||
@@ -167,7 +167,7 @@ public class DownloadUi : WindowMediatorSubscriberBase
|
||||
List<KeyValuePair<GameObjectHandler, Dictionary<string, FileDownloadStatus>>> transfers;
|
||||
try
|
||||
{
|
||||
transfers = _currentDownloads.ToList();
|
||||
transfers = [.. _currentDownloads];
|
||||
}
|
||||
catch (ArgumentException)
|
||||
{
|
||||
|
||||
@@ -404,76 +404,32 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
|
||||
private async Task WaitForDownloadReady(List<DownloadFileTransfer> downloadFileTransfer, Guid requestId, CancellationToken downloadCt)
|
||||
{
|
||||
bool alreadyCancelled = false;
|
||||
try
|
||||
while (true)
|
||||
{
|
||||
CancellationTokenSource localTimeoutCts = new();
|
||||
localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5));
|
||||
CancellationTokenSource composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token);
|
||||
downloadCt.ThrowIfCancellationRequested();
|
||||
|
||||
while (!_orchestrator.IsDownloadReady(requestId))
|
||||
if (_orchestrator.IsDownloadReady(requestId))
|
||||
break;
|
||||
|
||||
using var resp = await _orchestrator.SendRequestAsync(
|
||||
HttpMethod.Get,
|
||||
LightlessFiles.RequestCheckQueueFullPath(downloadFileTransfer[0].DownloadUri, requestId),
|
||||
downloadFileTransfer.Select(t => t.Hash).ToList(),
|
||||
downloadCt).ConfigureAwait(false);
|
||||
|
||||
resp.EnsureSuccessStatusCode();
|
||||
|
||||
var body = (await resp.Content.ReadAsStringAsync(downloadCt).ConfigureAwait(false)).Trim();
|
||||
if (string.Equals(body, "true", StringComparison.OrdinalIgnoreCase) ||
|
||||
body.Contains("\"ready\":true", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
try
|
||||
{
|
||||
await Task.Delay(250, composite.Token).ConfigureAwait(false);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
if (downloadCt.IsCancellationRequested) throw;
|
||||
|
||||
var req = await _orchestrator.SendRequestAsync(
|
||||
HttpMethod.Get,
|
||||
LightlessFiles.RequestCheckQueueFullPath(downloadFileTransfer[0].DownloadUri, requestId),
|
||||
downloadFileTransfer.Select(c => c.Hash).ToList(),
|
||||
downloadCt).ConfigureAwait(false);
|
||||
|
||||
req.EnsureSuccessStatusCode();
|
||||
|
||||
localTimeoutCts.Dispose();
|
||||
composite.Dispose();
|
||||
|
||||
localTimeoutCts = new();
|
||||
localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5));
|
||||
composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
localTimeoutCts.Dispose();
|
||||
composite.Dispose();
|
||||
|
||||
Logger.LogDebug("Download {requestId} ready", requestId);
|
||||
await Task.Delay(250, downloadCt).ConfigureAwait(false);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId))
|
||||
.ConfigureAwait(false);
|
||||
alreadyCancelled = true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (downloadCt.IsCancellationRequested && !alreadyCancelled)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId))
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
_orchestrator.ClearDownloadRequest(requestId);
|
||||
}
|
||||
_orchestrator.ClearDownloadRequest(requestId);
|
||||
}
|
||||
|
||||
private async Task DownloadQueuedBlockFileAsync(
|
||||
@@ -532,11 +488,9 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
|
||||
try
|
||||
{
|
||||
// sanity check length
|
||||
if (fileLengthBytes < 0 || fileLengthBytes > int.MaxValue)
|
||||
throw new InvalidDataException($"Invalid block entry length: {fileLengthBytes}");
|
||||
|
||||
// safe cast after check
|
||||
var len = checked((int)fileLengthBytes);
|
||||
|
||||
if (!replacementLookup.TryGetValue(fileHash, out var repl))
|
||||
@@ -546,11 +500,9 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
continue;
|
||||
}
|
||||
|
||||
// decompress
|
||||
var filePath = _fileDbManager.GetCacheFilePath(fileHash, repl.Extension);
|
||||
Logger.LogTrace("{dlName}: Decompressing {file}:{len} => {dest}", downloadLabel, fileHash, fileLengthBytes, filePath);
|
||||
|
||||
// read compressed data
|
||||
var compressed = new byte[len];
|
||||
await ReadExactlyAsync(fileBlockStream, compressed.AsMemory(0, len), ct).ConfigureAwait(false);
|
||||
|
||||
@@ -563,20 +515,17 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
|
||||
MungeBuffer(compressed);
|
||||
|
||||
// limit concurrent decompressions
|
||||
await _decompressGate.WaitAsync(ct).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
|
||||
// decompress
|
||||
var decompressed = LZ4Wrapper.Unwrap(compressed);
|
||||
|
||||
Logger.LogTrace("{dlName}: Unwrap {fileHash} took {ms}ms (compressed {c} bytes, decompressed {d} bytes)",
|
||||
downloadLabel, fileHash, sw.ElapsedMilliseconds, compressed.Length, decompressed?.Length ?? -1);
|
||||
|
||||
// write to file
|
||||
await _fileCompactor.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false);
|
||||
await _fileCompactor.WriteAllBytesAsync(filePath, bytes: decompressed, ct).ConfigureAwait(false);
|
||||
PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale);
|
||||
}
|
||||
finally
|
||||
@@ -793,7 +742,6 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
|
||||
try
|
||||
{
|
||||
// download (with slot)
|
||||
var progress = CreateInlineProgress(bytes => AddTransferredBytes(statusKey, bytes));
|
||||
|
||||
// Download slot held on get
|
||||
@@ -974,14 +922,12 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
|
||||
if (!_orchestrator.IsInitialized)
|
||||
throw new InvalidOperationException("FileTransferManager is not initialized");
|
||||
|
||||
// batch request
|
||||
var response = await _orchestrator.SendRequestAsync(
|
||||
HttpMethod.Get,
|
||||
LightlessFiles.ServerFilesGetSizesFullPath(_orchestrator.FilesCdnUri!),
|
||||
hashes,
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
// ensure success
|
||||
return await response.Content.ReadFromJsonAsync<List<DownloadFileDto>>(cancellationToken: ct).ConfigureAwait(false) ?? [];
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user