Files
LightlessClient/LightlessSync/Services/XivDataAnalyzer.cs
2026-01-06 13:54:04 +01:00

714 lines
25 KiB
C#

using FFXIVClientStructs.FFXIV.Client.Game.Character;
using FFXIVClientStructs.FFXIV.Client.Graphics.Scene;
using FFXIVClientStructs.Havok.Common.Serialize.Resource;
using FFXIVClientStructs.Havok.Animation;
using FFXIVClientStructs.Havok.Common.Base.Types;
using FFXIVClientStructs.Havok.Common.Serialize.Resource;
using FFXIVClientStructs.Havok.Common.Serialize.Util;
using LightlessSync.FileCache;
using LightlessSync.Interop.GameModel;
using LightlessSync.LightlessConfiguration;
using LightlessSync.PlayerData.Factories;
using LightlessSync.PlayerData.Handlers;
using Microsoft.Extensions.Logging;
using OtterGui.Text.EndObjects;
using System.Collections.Concurrent;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
namespace LightlessSync.Services;
public sealed partial class XivDataAnalyzer
{
private readonly ILogger<XivDataAnalyzer> _logger;
private readonly FileCacheManager _fileCacheManager;
private readonly XivDataStorageService _configService;
private readonly List<string> _failedCalculatedTris = [];
private readonly List<string> _failedCalculatedEffectiveTris = [];
public XivDataAnalyzer(ILogger<XivDataAnalyzer> logger, FileCacheManager fileCacheManager,
XivDataStorageService configService)
{
_logger = logger;
_fileCacheManager = fileCacheManager;
_configService = configService;
}
public unsafe Dictionary<string, List<ushort>>? GetSkeletonBoneIndices(GameObjectHandler handler)
{
if (handler is null || handler.Address == nint.Zero)
return null;
Dictionary<string, HashSet<ushort>> sets = new(StringComparer.OrdinalIgnoreCase);
try
{
var drawObject = ((Character*)handler.Address)->GameObject.DrawObject;
if (drawObject == null)
return null;
var chara = (CharacterBase*)drawObject;
if (chara->GetModelType() != CharacterBase.ModelType.Human)
return null;
var skeleton = chara->Skeleton;
if (skeleton == null)
return null;
var resHandles = skeleton->SkeletonResourceHandles;
var partialCount = skeleton->PartialSkeletonCount;
if (partialCount <= 0)
return null;
for (int i = 0; i < partialCount; i++)
{
var handle = *(resHandles + i);
if ((nint)handle == nint.Zero)
continue;
if (handle->FileName.Length > 1024)
continue;
var rawName = handle->FileName.ToString();
if (string.IsNullOrWhiteSpace(rawName))
continue;
var skeletonKey = CanonicalizeSkeletonKey(rawName);
if (string.IsNullOrEmpty(skeletonKey))
continue;
var boneCount = handle->BoneCount;
if (boneCount == 0)
continue;
var havokSkel = handle->HavokSkeleton;
if ((nint)havokSkel == nint.Zero)
continue;
if (!sets.TryGetValue(skeletonKey, out var set))
{
set = [];
sets[skeletonKey] = set;
}
uint maxExclusive = boneCount;
uint ushortExclusive = (uint)ushort.MaxValue + 1u;
if (maxExclusive > ushortExclusive)
maxExclusive = ushortExclusive;
for (uint boneIdx = 0; boneIdx < maxExclusive; boneIdx++)
{
var name = havokSkel->Bones[boneIdx].Name.String;
if (name == null)
continue;
set.Add((ushort)boneIdx);
}
_logger.LogTrace("Local skeleton raw file='{raw}', key='{key}', boneCount={count}",
rawName, skeletonKey, boneCount);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Could not process skeleton data");
return null;
}
if (sets.Count == 0)
return null;
var output = new Dictionary<string, List<ushort>>(sets.Count, StringComparer.OrdinalIgnoreCase);
foreach (var (key, set) in sets)
{
if (set.Count == 0)
continue;
var list = set.ToList();
list.Sort();
output[key] = list;
}
return (output.Count != 0 && output.Values.All(v => v.Count > 0)) ? output : null;
}
public unsafe Dictionary<string, List<ushort>>? GetBoneIndicesFromPap(string hash, bool persistToConfig = true)
{
if (string.IsNullOrWhiteSpace(hash))
return null;
if (_configService.Current.BonesDictionary.TryGetValue(hash, out var cached) && cached is not null)
return cached;
var cacheEntity = _fileCacheManager.GetFileCacheByHash(hash);
if (cacheEntity == null || string.IsNullOrEmpty(cacheEntity.ResolvedFilepath) || !File.Exists(cacheEntity.ResolvedFilepath))
return null;
using var fs = File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new BinaryReader(fs);
// PAP header (mostly from vfxeditor)
try
{
_ = reader.ReadInt32(); // ignore
_ = reader.ReadInt32(); // ignore
var numAnimations = reader.ReadInt16(); // num animations
var modelId = reader.ReadInt16(); // modelid
if (numAnimations < 0 || numAnimations > 1000)
{
_logger.LogWarning("PAP file {hash} has invalid animation count {count}, skipping", hash, numAnimations);
return null;
}
var type = reader.ReadByte(); // type
if (type != 0)
return null; // not human
_ = reader.ReadByte(); // variant
_ = reader.ReadInt32(); // ignore
var havokPosition = reader.ReadInt32();
var footerPosition = reader.ReadInt32();
if (havokPosition <= 0 || footerPosition <= havokPosition ||
footerPosition > fs.Length || havokPosition >= fs.Length)
{
_logger.LogWarning("PAP file {hash} has invalid offsets (havok={havok}, footer={footer}, length={length})",
hash, havokPosition, footerPosition, fs.Length);
return null;
}
var havokDataSizeLong = (long)footerPosition - havokPosition;
if (havokDataSizeLong <= 8 || havokDataSizeLong > int.MaxValue)
{
_logger.LogWarning("PAP file {hash} has invalid Havok data size {size}", hash, havokDataSizeLong);
return null;
}
var havokDataSize = (int)havokDataSizeLong;
reader.BaseStream.Position = havokPosition;
var havokData = reader.ReadBytes(havokDataSize);
if (havokData.Length != havokDataSize)
return null;
var tempSets = new Dictionary<string, HashSet<ushort>>(StringComparer.OrdinalIgnoreCase);
var tempHavokDataPath = Path.Combine(Path.GetTempPath(), $"lightless_{Guid.NewGuid():N}.hkx");
var tempHavokDataPathAnsi = Marshal.StringToHGlobalAnsi(tempHavokDataPath);
try
{
var tempDir = Path.GetDirectoryName(tempHavokDataPath);
if (!Directory.Exists(tempDir))
{
_logger.LogWarning("Temp directory {dir} doesn't exist", tempDir);
return null;
}
// Write the file with explicit error handling
try
{
File.WriteAllBytes(tempHavokDataPath, havokData);
}
catch (Exception writeEx)
{
_logger.LogError(writeEx, "Failed to write temporary Havok file to {path}", tempHavokDataPath);
return null;
}
if (!File.Exists(tempHavokDataPath))
{
_logger.LogWarning("Temporary havok file was not created at {path}", tempHavokDataPath);
return null;
}
var writtenFileInfo = new FileInfo(tempHavokDataPath);
if (writtenFileInfo.Length != havokData.Length)
{
_logger.LogWarning("Written temp file size mismatch: expected {expected}, got {actual}",
havokData.Length, writtenFileInfo.Length);
try { File.Delete(tempHavokDataPath); } catch { }
return null;
}
Thread.Sleep(10); // stabilize file system
try
{
using var testStream = File.OpenRead(tempHavokDataPath);
if (testStream.Length != havokData.Length)
{
_logger.LogWarning("File verification failed: length mismatch after write");
try { File.Delete(tempHavokDataPath); } catch { }
return null;
}
}
catch (Exception readEx)
{
_logger.LogError(readEx, "Cannot read back temporary file at {path}", tempHavokDataPath);
try { File.Delete(tempHavokDataPath); } catch { }
return null;
}
var pathBytes = System.Text.Encoding.ASCII.GetBytes(tempHavokDataPath + "\0");
hkSerializeUtil.LoadOptions loadOptions = default;
loadOptions.TypeInfoRegistry = hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry();
loadOptions.ClassNameRegistry = hkBuiltinTypeRegistry.Instance()->GetClassNameRegistry();
loadOptions.Flags = new hkFlags<hkSerializeUtil.LoadOptionBits, int>
{
Storage = (int)hkSerializeUtil.LoadOptionBits.Default
};
fixed (byte* pPath = pathBytes)
{
var resource = hkSerializeUtil.LoadFromFile(pPath, errorResult: null, &loadOptions);
if (resource == null)
return null;
var rootLevelName = @"hkRootLevelContainer"u8;
fixed (byte* n1 = rootLevelName)
{
var container = (hkRootLevelContainer*)resource->GetContentsPointer(n1, hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry());
if (container == null)
return null;
var animationName = @"hkaAnimationContainer"u8;
fixed (byte* n2 = animationName)
{
var animContainer = (hkaAnimationContainer*)container->findObjectByName(n2, null);
if (animContainer == null)
return null;
for (int i = 0; i < animContainer->Bindings.Length; i++)
{
var binding = animContainer->Bindings[i].ptr;
if (binding == null)
continue;
var rawSkel = binding->OriginalSkeletonName.String;
var skeletonKey = CanonicalizeSkeletonKey(rawSkel);
if (string.IsNullOrEmpty(skeletonKey) || string.Equals(skeletonKey, "skeleton", StringComparison.OrdinalIgnoreCase))
skeletonKey = "__any__";
var boneTransform = binding->TransformTrackToBoneIndices;
if (boneTransform.Length <= 0)
continue;
if (!tempSets.TryGetValue(skeletonKey, out var set))
{
set = [];
tempSets[skeletonKey] = set;
}
for (int boneIdx = 0; boneIdx < boneTransform.Length; boneIdx++)
{
var v = boneTransform[boneIdx];
if (v < 0) continue;
set.Add((ushort)v);
}
}
}
}
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Could not load havok file in {path}", tempHavokDataPath);
return null;
}
finally
{
if (tempHavokDataPathAnsi != IntPtr.Zero)
Marshal.FreeHGlobal(tempHavokDataPathAnsi);
int retryCount = 3;
while (retryCount > 0 && File.Exists(tempHavokDataPath))
{
try
{
File.Delete(tempHavokDataPath);
break;
}
catch (IOException ex)
{
retryCount--;
if (retryCount == 0)
{
_logger.LogDebug(ex, "Failed to delete temporary havok file after retries: {path}", tempHavokDataPath);
}
else
{
Thread.Sleep(50);
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Unexpected error deleting temporary havok file: {path}", tempHavokDataPath);
break;
}
}
}
if (tempSets.Count == 0)
{
_logger.LogDebug("No bone sets found in PAP file (hash={hash})", hash);
return null;
}
var output = new Dictionary<string, List<ushort>>(tempSets.Count, StringComparer.OrdinalIgnoreCase);
foreach (var (key, set) in tempSets)
{
if (set.Count == 0) continue;
var list = set.ToList();
list.Sort();
output[key] = list;
}
if (output.Count == 0)
return null;
_configService.Current.BonesDictionary[hash] = output;
if (persistToConfig)
_configService.Save();
return output;
}
catch (Exception ex)
{
_logger.LogError(ex, "Outer exception reading PAP file (hash={hash})", hash);
return null;
}
}
private static bool IsValidPointer(IntPtr ptr)
{
if (ptr == IntPtr.Zero)
return false;
try
{
_ = Marshal.ReadByte(ptr);
return true;
}
catch
{
return false;
}
}
public static string CanonicalizeSkeletonKey(string? raw)
{
if (string.IsNullOrWhiteSpace(raw))
return string.Empty;
var s = raw.Replace('\\', '/').Trim();
var underscore = s.LastIndexOf('_');
if (underscore > 0 && underscore + 1 < s.Length && char.IsDigit(s[underscore + 1]))
s = s[..underscore];
if (s.StartsWith("skeleton", StringComparison.OrdinalIgnoreCase))
return "skeleton";
var m = _bucketPathRegex.Match(s);
if (m.Success)
return m.Groups["bucket"].Value.ToLowerInvariant();
m = _bucketSklRegex.Match(s);
if (m.Success)
return m.Groups["bucket"].Value.ToLowerInvariant();
m = _bucketLooseRegex.Match(s);
if (m.Success)
return m.Groups["bucket"].Value.ToLowerInvariant();
return string.Empty;
}
public static bool ContainsIndexCompat(
HashSet<ushort> available,
ushort idx,
bool papLikelyOneBased,
bool allowOneBasedShift,
bool allowNeighborTolerance)
{
Span<ushort> candidates = stackalloc ushort[2];
int count = 0;
candidates[count++] = idx;
if (allowOneBasedShift && papLikelyOneBased && idx > 0)
candidates[count++] = (ushort)(idx - 1);
for (int i = 0; i < count; i++)
{
var c = candidates[i];
if (available.Contains(c))
return true;
if (allowNeighborTolerance)
{
if (c > 0 && available.Contains((ushort)(c - 1)))
return true;
if (c < ushort.MaxValue && available.Contains((ushort)(c + 1)))
return true;
}
}
return false;
}
public static bool IsPapCompatible(
IReadOnlyDictionary<string, HashSet<ushort>> localBoneSets,
IReadOnlyDictionary<string, List<ushort>> papBoneIndices,
AnimationValidationMode mode,
bool allowOneBasedShift,
bool allowNeighborTolerance,
out string reason)
{
reason = string.Empty;
if (mode == AnimationValidationMode.Unsafe)
return true;
var papByBucket = new Dictionary<string, List<ushort>>(StringComparer.OrdinalIgnoreCase);
foreach (var (rawKey, list) in papBoneIndices)
{
var key = CanonicalizeSkeletonKey(rawKey);
if (string.IsNullOrEmpty(key))
continue;
if (string.Equals(key, "skeleton", StringComparison.OrdinalIgnoreCase))
key = "__any__";
if (!papByBucket.TryGetValue(key, out var acc))
papByBucket[key] = acc = [];
if (list is { Count: > 0 })
acc.AddRange(list);
}
foreach (var k in papByBucket.Keys.ToList())
papByBucket[k] = papByBucket[k].Distinct().ToList();
if (papByBucket.Count == 0)
{
reason = "No skeleton bucket bindings found in the PAP";
return false;
}
static bool AllIndicesOk(
HashSet<ushort> available,
List<ushort> indices,
bool papLikelyOneBased,
bool allowOneBasedShift,
bool allowNeighborTolerance,
out ushort missing)
{
foreach (var idx in indices)
{
if (!ContainsIndexCompat(available, idx, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance))
{
missing = idx;
return false;
}
}
missing = 0;
return true;
}
foreach (var (bucket, indices) in papByBucket)
{
if (indices.Count == 0)
continue;
bool has0 = false, has1 = false;
ushort min = ushort.MaxValue;
foreach (var v in indices)
{
if (v == 0) has0 = true;
if (v == 1) has1 = true;
if (v < min) min = v;
}
bool papLikelyOneBased = allowOneBasedShift && (min == 1) && has1 && !has0;
if (string.Equals(bucket, "__any__", StringComparison.OrdinalIgnoreCase))
{
foreach (var (lk, ls) in localBoneSets)
{
if (AllIndicesOk(ls, indices, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance, out _))
goto nextBucket;
}
reason = $"No compatible local skeleton bucket for generic PAP skeleton '{bucket}'. Local buckets: {string.Join(", ", localBoneSets.Keys)}";
return false;
}
if (!localBoneSets.TryGetValue(bucket, out var available))
{
reason = $"Missing skeleton bucket '{bucket}' on local actor.";
return false;
}
if (!AllIndicesOk(available, indices, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance, out var missing))
{
reason = $"No compatible local skeleton for PAP '{bucket}': missing bone index {missing}.";
return false;
}
nextBucket:
;
}
return true;
}
public void DumpLocalSkeletonIndices(GameObjectHandler handler, string? filter = null)
{
var skels = GetSkeletonBoneIndices(handler);
if (skels == null)
{
_logger.LogTrace("DumpLocalSkeletonIndices: local skeleton indices are null or not found");
return;
}
var keys = skels.Keys
.Order(StringComparer.OrdinalIgnoreCase)
.ToArray();
_logger.LogTrace("Local skeleton indices found ({count}): {keys}",
keys.Length,
string.Join(", ", keys));
if (!string.IsNullOrWhiteSpace(filter))
{
var hits = keys.Where(k =>
k.Equals(filter, StringComparison.OrdinalIgnoreCase) ||
k.StartsWith(filter + "_", StringComparison.OrdinalIgnoreCase) ||
filter.StartsWith(k + "_", StringComparison.OrdinalIgnoreCase) ||
k.Contains(filter, StringComparison.OrdinalIgnoreCase))
.ToArray();
_logger.LogTrace("Matches found for '{filter}': {hits}",
filter,
hits.Length == 0 ? "<none>" : string.Join(", ", hits));
}
}
public async Task<long> GetTrianglesByHash(string hash)
{
if (_configService.Current.TriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0)
return cachedTris;
if (_failedCalculatedTris.Contains(hash, StringComparer.Ordinal))
return 0;
var path = _fileCacheManager.GetFileCacheByHash(hash);
if (path == null || !path.ResolvedFilepath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase))
return 0;
return CalculateTrianglesFromPath(hash, path.ResolvedFilepath, _configService.Current.TriangleDictionary, _failedCalculatedTris);
}
public async Task<long> GetEffectiveTrianglesByHash(string hash, string filePath)
{
if (_configService.Current.EffectiveTriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0)
return cachedTris;
if (_failedCalculatedEffectiveTris.Contains(hash, StringComparer.Ordinal))
return 0;
if (string.IsNullOrEmpty(filePath)
|| !filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase)
|| !File.Exists(filePath))
{
return 0;
}
return CalculateTrianglesFromPath(hash, filePath, _configService.Current.EffectiveTriangleDictionary, _failedCalculatedEffectiveTris);
}
private long CalculateTrianglesFromPath(
string hash,
string filePath,
ConcurrentDictionary<string, long> cache,
List<string> failedList)
{
try
{
_logger.LogDebug("Detected Model File {path}, calculating Tris", filePath);
var file = new MdlFile(filePath);
if (file.LodCount <= 0)
{
failedList.Add(hash);
cache[hash] = 0;
_configService.Save();
return 0;
}
long tris = 0;
foreach (var lod in file.Lods)
{
try
{
var meshIdx = lod.MeshIndex;
var meshCnt = lod.MeshCount;
tris = file.Meshes.Skip(meshIdx).Take(meshCnt).Sum(p => p.IndexCount) / 3;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Could not load lod mesh {mesh} from path {path}", lod.MeshIndex, filePath);
continue;
}
if (tris > 0)
{
_logger.LogDebug("TriAnalysis: {filePath} => {tris} triangles", filePath, tris);
cache[hash] = tris;
_configService.Save();
break;
}
}
return tris;
}
catch (Exception e)
{
failedList.Add(hash);
cache[hash] = 0;
_configService.Save();
_logger.LogWarning(e, "Could not parse file {file}", filePath);
return 0;
}
}
// Regexes for canonicalizing skeleton keys
private static readonly Regex _bucketPathRegex =
BucketRegex();
private static readonly Regex _bucketSklRegex =
SklRegex();
private static readonly Regex _bucketLooseRegex =
LooseBucketRegex();
[GeneratedRegex(@"(?i)(?:^|/)(?<bucket>c\d{4})(?:/|$)", RegexOptions.Compiled, "en-NL")]
private static partial Regex BucketRegex();
[GeneratedRegex(@"(?i)\bskl_(?<bucket>c\d{4})[a-z]\d{4}\b", RegexOptions.Compiled, "en-NL")]
private static partial Regex SklRegex();
[GeneratedRegex(@"(?i)(?<![a-z0-9])(?<bucket>c\d{4})(?!\d)", RegexOptions.Compiled, "en-NL")]
private static partial Regex LooseBucketRegex();
}