Compare commits

..

18 Commits

Author SHA1 Message Date
cake
3654365f2a bump version
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m6s
2026-01-06 14:45:23 +01:00
cake
9b256dd185 Merge branch '2.0.3' into dev 2026-01-06 14:45:02 +01:00
cake
d8b9e9cf19 Splitting havok tasks. 2026-01-06 14:27:01 +01:00
cake
ad34d88336 Merged I18N and latest 2.0.3 changes 2026-01-06 13:54:04 +01:00
9167bb1afd i18n init (#135)
shouldnt break anything?

Co-authored-by: Tsubasahane <wozaiha@gmail.com>
Reviewed-on: #135
Co-authored-by: Tsubasa <tsubasa@noreply.git.lightless-sync.org>
Co-committed-by: Tsubasa <tsubasa@noreply.git.lightless-sync.org>
2026-01-06 12:51:29 +00:00
cake
5161c6bad3 Attempt fix on crash. 2026-01-06 13:50:58 +01:00
defnotken
223ade39cb another push
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m13s
2026-01-05 20:48:24 -06:00
defnotken
5aca9e70b2 Merge branch '2.0.3' into dev 2026-01-05 20:47:38 -06:00
defnotken
ce28799db3 More checks for animations and bones. 2026-01-05 20:46:14 -06:00
defnotken
92772cf334 dev push
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m12s
2026-01-05 20:21:26 -06:00
defnotken
0395e81a9f Merge branch '2.0.3' into dev 2026-01-05 20:17:12 -06:00
defnotken
7734a7bf7e dev build
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m8s
2026-01-05 17:42:21 -06:00
defnotken
db2d19bb1e Merge branch '2.0.3' into dev 2026-01-05 17:41:48 -06:00
defnotken
ab305a249c more checks
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m7s
2026-01-05 15:48:54 -06:00
defnotken
9d104a9dd8 Merge branch '2.0.3' into dev 2026-01-05 15:42:15 -06:00
defnotken
bcd3bd5ca2 add more checks
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m8s
2026-01-05 15:08:26 -06:00
defnotken
c1829a9837 Merge branch '2.0.3' into dev
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m14s
2026-01-05 14:48:47 -06:00
defnotken
cca23f6e05 Building Dev
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m27s
2026-01-05 10:50:25 -06:00
7 changed files with 252 additions and 369 deletions

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<Authors></Authors>
<Company></Company>
<Version>2.0.3</Version>
<Version>2.0.2.76</Version>
<Description></Description>
<Copyright></Copyright>
<PackageProjectUrl>https://github.com/Light-Public-Syncshells/LightlessClient</PackageProjectUrl>

View File

@@ -566,20 +566,20 @@ public class PlayerDataFactory
await _papParseLimiter.WaitAsync(ct).ConfigureAwait(false);
try
{
try
var cacheEntity = _fileCacheManager.GetFileCacheByHash(hash);
var papPath = cacheEntity?.ResolvedFilepath;
if (!string.IsNullOrEmpty(papPath) && File.Exists(papPath))
{
papIndices = await Task.Run(() => _modelAnalyzer.GetBoneIndicesFromPap(hash, persistToConfig: false), ct)
var havokBytes = await Task.Run(() => XivDataAnalyzer.ReadHavokBytesFromPap(papPath), ct)
.ConfigureAwait(false);
}
catch (SEHException ex)
{
_logger.LogError(ex, "SEH exception while parsing PAP file (hash={hash}, path={path}). Error code: 0x{code:X}. Skipping this animation.", hash, papPathSummary, ex.ErrorCode);
continue;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unexpected error parsing PAP file (hash={hash}, path={path}). Skipping this animation.", hash, papPathSummary);
continue;
if (havokBytes is { Length: > 8 })
{
papIndices = await _dalamudUtil.RunOnFrameworkThread(
() => _modelAnalyzer.ParseHavokBytesOnFrameworkThread(havokBytes, hash, persistToConfig: false))
.ConfigureAwait(false);
}
}
}
finally
@@ -590,20 +590,6 @@ public class PlayerDataFactory
if (papIndices == null || papIndices.Count == 0)
continue;
bool hasValidIndices = false;
try
{
hasValidIndices = papIndices.All(k => k.Value != null && k.Value.DefaultIfEmpty().Max() <= 105);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error validating bone indices for PAP (hash={hash}, path={path}). Skipping.", hash, papPathSummary);
continue;
}
if (hasValidIndices)
continue;
if (_logger.IsEnabled(LogLevel.Debug))
{
try
@@ -703,8 +689,8 @@ public class PlayerDataFactory
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase).AsReadOnly();
}
var forwardPathsLower = forwardPaths.Length == 0 ? Array.Empty<string>() : forwardPaths.Select(p => p.ToLowerInvariant()).ToArray();
var reversePathsLower = reversePaths.Length == 0 ? Array.Empty<string>() : reversePaths.Select(p => p.ToLowerInvariant()).ToArray();
var forwardPathsLower = forwardPaths.Length == 0 ? [] : forwardPaths.Select(p => p.ToLowerInvariant()).ToArray();
var reversePathsLower = reversePaths.Length == 0 ? [] : reversePaths.Select(p => p.ToLowerInvariant()).ToArray();
Dictionary<string, List<string>> resolvedPaths = new(forwardPaths.Length + reversePaths.Length, StringComparer.Ordinal);
if (handler.ObjectKind != ObjectKind.Player)

View File

@@ -121,6 +121,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
private DateTime _nextActorLookupUtc = DateTime.MinValue;
private static readonly TimeSpan ActorLookupInterval = TimeSpan.FromSeconds(1);
private static readonly SemaphoreSlim ActorInitializationLimiter = new(1, 1);
private static readonly SemaphoreSlim _papParseLimiter = new(1, 1);
private const int FullyLoadedTimeoutMsPlayer = 30000;
private const int FullyLoadedTimeoutMsOther = 5000;
private readonly object _actorInitializationGate = new();
@@ -2910,13 +2911,13 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
var mode = _configService.Current.AnimationValidationMode;
var allowBasedShift = _configService.Current.AnimationAllowOneBasedShift;
var allownNightIndex = _configService.Current.AnimationAllowNeighborIndexTolerance;
var allowNeighborIndex = _configService.Current.AnimationAllowNeighborIndexTolerance;
if (mode == AnimationValidationMode.Unsafe || papOnly.Count == 0)
return 0;
var boneIndices = await _dalamudUtil.RunOnFrameworkThread(
() => _modelAnalyzer.GetSkeletonBoneIndices(handlerForApply))
() => _modelAnalyzer.GetSkeletonBoneIndices(handlerForApply))
.ConfigureAwait(false);
if (boneIndices == null || boneIndices.Count == 0)
@@ -2930,47 +2931,86 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
foreach (var (rawKey, list) in boneIndices)
{
var key = XivDataAnalyzer.CanonicalizeSkeletonKey(rawKey);
if (string.IsNullOrEmpty(key)) continue;
if (string.IsNullOrEmpty(key) || list == null || list.Count == 0)
continue;
if (!localBoneSets.TryGetValue(key, out var set))
localBoneSets[key] = set = [];
localBoneSets[key] = set = new HashSet<ushort>();
foreach (var v in list)
set.Add(v);
}
if (localBoneSets.Count == 0)
{
var removedCount = papOnly.Count;
papOnly.Clear();
return removedCount;
}
int removed = 0;
foreach (var hash in papOnly.Keys.Select(k => k.Hash).Where(h => !string.IsNullOrEmpty(h)).Distinct(StringComparer.OrdinalIgnoreCase).ToList())
var groups = papOnly
.Where(kvp => !string.IsNullOrEmpty(kvp.Key.Hash))
.GroupBy(kvp => kvp.Key.Hash!, StringComparer.OrdinalIgnoreCase)
.ToList();
foreach (var grp in groups)
{
token.ThrowIfCancellationRequested();
var papIndices = await _dalamudUtil.RunOnFrameworkThread(
() => _modelAnalyzer.GetBoneIndicesFromPap(hash!))
.ConfigureAwait(false);
var hash = grp.Key;
var papPath = grp.Select(x => x.Value)
.FirstOrDefault(p => !string.IsNullOrEmpty(p) && File.Exists(p));
if (string.IsNullOrEmpty(papPath))
continue;
var havokBytes = await Task.Run(() => XivDataAnalyzer.ReadHavokBytesFromPap(papPath), token)
.ConfigureAwait(false);
if (havokBytes is not { Length: > 8 })
continue;
Dictionary<string, List<ushort>>? papIndices;
await _papParseLimiter.WaitAsync(token).ConfigureAwait(false);
try
{
papIndices = await _dalamudUtil.RunOnFrameworkThread(
() => _modelAnalyzer.ParseHavokBytesOnFrameworkThread(havokBytes, hash, persistToConfig: false))
.ConfigureAwait(false);
}
finally
{
_papParseLimiter.Release();
}
if (papIndices == null || papIndices.Count == 0)
continue;
if (papIndices.All(k => k.Value.DefaultIfEmpty().Max() <= 105))
if (papIndices.All(k => k.Value == null || k.Value.Count == 0 || k.Value.Max() <= 105))
continue;
if (XivDataAnalyzer.IsPapCompatible(localBoneSets, papIndices, mode, allowBasedShift, allownNightIndex, out var reason))
if (XivDataAnalyzer.IsPapCompatible(localBoneSets, papIndices, mode, allowBasedShift, allowNeighborIndex, out var reason))
continue;
var keysToRemove = papOnly.Keys.Where(k => string.Equals(k.Hash, hash, StringComparison.OrdinalIgnoreCase)).ToList();
var keysToRemove = grp.Select(x => x.Key).ToList();
foreach (var k in keysToRemove)
papOnly.Remove(k);
removed += keysToRemove.Count;
if (_blockedPapHashes.TryAdd(hash!, 0))
Logger.LogWarning("Blocked remote object PAP (hash {hash}) for {handler}: {reason}", hash, GetLogIdentifier(), reason);
if (_blockedPapHashes.TryAdd(hash, 0))
Logger.LogWarning("Blocked remote object PAP {papPath} (hash {hash}) for {handler}: {reason}",
papPath, hash, GetLogIdentifier(), reason);
if (charaData.FileReplacements.TryGetValue(ObjectKind.Player, out var list))
{
list.RemoveAll(r => string.Equals(r.Hash, hash, StringComparison.OrdinalIgnoreCase)
&& r.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase)));
list.RemoveAll(r =>
string.Equals(r.Hash, hash, StringComparison.OrdinalIgnoreCase) &&
r.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase)));
}
}
@@ -2984,6 +3024,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa
return removed;
}
private async Task ApplyCustomizeAsync(nint address, string customizeData, ObjectKind kind)
{
_customizeIds[kind] = await _ipcManager.CustomizePlus.SetBodyScaleAsync(address, customizeData).ConfigureAwait(false);

View File

@@ -1,6 +1,7 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
@@ -18,7 +19,7 @@ namespace LightlessSync.Resources {
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "18.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
public class Resources {

View File

@@ -93,7 +93,7 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber
{
return gameData.GetExcelSheet<Lumina.Excel.Sheets.World>(clientLanguage)!
.Where(w => !w.Name.IsEmpty && w.DataCenter.RowId != 0 && (w.IsPublic || char.IsUpper(w.Name.ToString()[0])
|| w is { RowId: > 1000, UserType: 101 or 201 }))
|| w is { RowId: > 1000, Region: 101 or 201 }))
.ToDictionary(w => (ushort)w.RowId, w => w.Name.ToString());
});
JobData = new(() =>

View File

@@ -3,6 +3,7 @@ using FFXIVClientStructs.FFXIV.Client.Graphics.Scene;
using FFXIVClientStructs.Havok.Common.Serialize.Resource;
using FFXIVClientStructs.Havok.Animation;
using FFXIVClientStructs.Havok.Common.Base.Types;
using FFXIVClientStructs.Havok.Common.Serialize.Resource;
using FFXIVClientStructs.Havok.Common.Serialize.Util;
using LightlessSync.FileCache;
using LightlessSync.Interop.GameModel;
@@ -10,6 +11,7 @@ using LightlessSync.LightlessConfiguration;
using LightlessSync.PlayerData.Factories;
using LightlessSync.PlayerData.Handlers;
using Microsoft.Extensions.Logging;
using OtterGui.Text.EndObjects;
using System.Collections.Concurrent;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
@@ -130,315 +132,135 @@ public sealed partial class XivDataAnalyzer
return (output.Count != 0 && output.Values.All(v => v.Count > 0)) ? output : null;
}
public unsafe Dictionary<string, List<ushort>>? GetBoneIndicesFromPap(string hash, bool persistToConfig = true)
public static byte[]? ReadHavokBytesFromPap(string papPath)
{
if (string.IsNullOrWhiteSpace(hash))
return null;
if (_configService.Current.BonesDictionary.TryGetValue(hash, out var cached) && cached is not null)
return cached;
var cacheEntity = _fileCacheManager.GetFileCacheByHash(hash);
if (cacheEntity == null || string.IsNullOrEmpty(cacheEntity.ResolvedFilepath) || !File.Exists(cacheEntity.ResolvedFilepath))
return null;
using var fs = File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read);
using var fs = File.Open(papPath, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new BinaryReader(fs);
// PAP header (mostly from vfxeditor)
try
{
_ = reader.ReadInt32(); // ignore
_ = reader.ReadInt32(); // ignore
var numAnimations = reader.ReadInt16(); // num animations
var modelId = reader.ReadInt16(); // modelid
_ = reader.ReadInt32();
_ = reader.ReadInt32();
_ = reader.ReadInt16();
_ = reader.ReadInt16();
if (numAnimations < 0 || numAnimations > 1000)
{
_logger.LogWarning("PAP file {hash} has invalid animation count {count}, skipping", hash, numAnimations);
return null;
}
var type = reader.ReadByte();
if (type != 0) return null;
var type = reader.ReadByte(); // type
if (type != 0)
return null; // not human
_ = reader.ReadByte();
_ = reader.ReadInt32();
_ = reader.ReadByte(); // variant
_ = reader.ReadInt32(); // ignore
var havokPosition = reader.ReadInt32();
var footerPosition = reader.ReadInt32();
var havokPosition = reader.ReadInt32();
var footerPosition = reader.ReadInt32();
if (havokPosition <= 0 || footerPosition <= havokPosition ||
footerPosition > fs.Length || havokPosition >= fs.Length)
{
_logger.LogWarning("PAP file {hash} has invalid offsets (havok={havok}, footer={footer}, length={length})",
hash, havokPosition, footerPosition, fs.Length);
return null;
}
var havokDataSizeLong = (long)footerPosition - havokPosition;
if (havokDataSizeLong <= 8 || havokDataSizeLong > int.MaxValue)
{
_logger.LogWarning("PAP file {hash} has invalid Havok data size {size}", hash, havokDataSizeLong);
return null;
}
var havokDataSize = (int)havokDataSizeLong;
reader.BaseStream.Position = havokPosition;
var havokData = new byte[havokDataSize];
var bytesRead = reader.Read(havokData, 0, havokDataSize);
if (bytesRead != havokDataSize)
{
_logger.LogWarning("PAP file {hash}: Expected to read {expected} bytes but got {actual}",
hash, havokDataSize, bytesRead);
return null;
}
if (havokData.Length < 8)
return null;
var tempSets = new Dictionary<string, HashSet<ushort>>(StringComparer.OrdinalIgnoreCase);
var tempFileName = $"lightless_pap_{Guid.NewGuid():N}_{hash.Substring(0, Math.Min(8, hash.Length))}.hkx";
var tempHavokDataPath = Path.Combine(Path.GetTempPath(), tempFileName);
IntPtr tempHavokDataPathAnsi = IntPtr.Zero;
try
{
var tempDir = Path.GetDirectoryName(tempHavokDataPath);
if (!Directory.Exists(tempDir))
{
_logger.LogWarning("Temp directory {dir} doesn't exist", tempDir);
return null;
}
File.WriteAllBytes(tempHavokDataPath, havokData);
if (!File.Exists(tempHavokDataPath))
{
_logger.LogWarning("Temporary havok file was not created at {path}", tempHavokDataPath);
return null;
}
var writtenFileInfo = new FileInfo(tempHavokDataPath);
if (writtenFileInfo.Length != havokData.Length)
{
_logger.LogWarning("Written temp file size mismatch: expected {expected}, got {actual}",
havokData.Length, writtenFileInfo.Length);
File.Delete(tempHavokDataPath);
return null;
}
tempHavokDataPathAnsi = Marshal.StringToHGlobalAnsi(tempHavokDataPath);
var loadoptions = stackalloc hkSerializeUtil.LoadOptions[1];
loadoptions->TypeInfoRegistry = hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry();
loadoptions->ClassNameRegistry = hkBuiltinTypeRegistry.Instance()->GetClassNameRegistry();
loadoptions->Flags = new hkFlags<hkSerializeUtil.LoadOptionBits, int>
{
Storage = (int)hkSerializeUtil.LoadOptionBits.Default
};
hkResource* resource = null;
try
{
resource = hkSerializeUtil.LoadFromFile((byte*)tempHavokDataPathAnsi, null, loadoptions);
}
catch (SEHException ex)
{
_logger.LogError(ex, "SEH exception loading Havok file from {path} (hash={hash}). Native error code: 0x{code:X}",
tempHavokDataPath, hash, ex.ErrorCode);
return null;
}
if (resource == null)
{
_logger.LogDebug("Havok resource was null after loading from {path} (hash={hash})", tempHavokDataPath, hash);
return null;
}
if ((nint)resource == nint.Zero || !IsValidPointer((IntPtr)resource))
{
_logger.LogDebug("Havok resource pointer is invalid (hash={hash})", hash);
return null;
}
var rootLevelName = @"hkRootLevelContainer"u8;
fixed (byte* n1 = rootLevelName)
{
var container = (hkRootLevelContainer*)resource->GetContentsPointer(n1, hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry());
if (container == null)
{
_logger.LogDebug("hkRootLevelContainer is null (hash={hash})", hash);
return null;
}
if ((nint)container == nint.Zero || !IsValidPointer((IntPtr)container))
{
_logger.LogDebug("hkRootLevelContainer pointer is invalid (hash={hash})", hash);
return null;
}
var animationName = @"hkaAnimationContainer"u8;
fixed (byte* n2 = animationName)
{
var animContainer = (hkaAnimationContainer*)container->findObjectByName(n2, null);
if (animContainer == null)
{
_logger.LogDebug("hkaAnimationContainer is null (hash={hash})", hash);
return null;
}
if ((nint)animContainer == nint.Zero || !IsValidPointer((IntPtr)animContainer))
{
_logger.LogDebug("hkaAnimationContainer pointer is invalid (hash={hash})", hash);
return null;
}
if (animContainer->Bindings.Length < 0 || animContainer->Bindings.Length > 10000)
{
_logger.LogDebug("Invalid bindings count {count} (hash={hash})", animContainer->Bindings.Length, hash);
return null;
}
for (int i = 0; i < animContainer->Bindings.Length; i++)
{
var binding = animContainer->Bindings[i].ptr;
if (binding == null)
continue;
if ((nint)binding == nint.Zero || !IsValidPointer((IntPtr)binding))
{
_logger.LogDebug("Skipping invalid binding at index {index} (hash={hash})", i, hash);
continue;
}
var rawSkel = binding->OriginalSkeletonName.String;
var skeletonKey = CanonicalizeSkeletonKey(rawSkel);
if (string.IsNullOrEmpty(skeletonKey))
continue;
var boneTransform = binding->TransformTrackToBoneIndices;
if (boneTransform.Length <= 0 || boneTransform.Length > 10000)
{
_logger.LogDebug("Invalid bone transform length {length} for skeleton {skel} (hash={hash})",
boneTransform.Length, skeletonKey, hash);
continue;
}
if (!tempSets.TryGetValue(skeletonKey, out var set))
{
set = [];
tempSets[skeletonKey] = set;
}
for (int boneIdx = 0; boneIdx < boneTransform.Length; boneIdx++)
{
var v = boneTransform[boneIdx];
if (v < 0 || v > ushort.MaxValue)
continue;
set.Add((ushort)v);
}
}
}
}
}
catch (SEHException ex)
{
_logger.LogError(ex, "SEH exception processing PAP file {hash} from {path}. Error code: 0x{code:X}",
hash, tempHavokDataPath, ex.ErrorCode);
return null;
}
catch (Exception ex)
{
_logger.LogError(ex, "Managed exception loading havok file {hash} from {path}", hash, tempHavokDataPath);
return null;
}
finally
{
if (tempHavokDataPathAnsi != IntPtr.Zero)
Marshal.FreeHGlobal(tempHavokDataPathAnsi);
int retryCount = 3;
while (retryCount > 0 && File.Exists(tempHavokDataPath))
{
try
{
File.Delete(tempHavokDataPath);
break;
}
catch (IOException ex)
{
retryCount--;
if (retryCount == 0)
{
_logger.LogDebug(ex, "Failed to delete temporary havok file after retries: {path}", tempHavokDataPath);
}
else
{
Thread.Sleep(50);
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Unexpected error deleting temporary havok file: {path}", tempHavokDataPath);
break;
}
}
}
if (tempSets.Count == 0)
{
_logger.LogDebug("No bone sets found in PAP file (hash={hash})", hash);
return null;
}
var output = new Dictionary<string, List<ushort>>(tempSets.Count, StringComparer.OrdinalIgnoreCase);
foreach (var (key, set) in tempSets)
{
if (set.Count == 0) continue;
var list = set.ToList();
list.Sort();
output[key] = list;
}
if (output.Count == 0)
return null;
_configService.Current.BonesDictionary[hash] = output;
if (persistToConfig)
_configService.Save();
return output;
}
catch (Exception ex)
{
_logger.LogError(ex, "Outer exception reading PAP file (hash={hash})", hash);
if (havokPosition <= 0 || footerPosition <= havokPosition || footerPosition > fs.Length)
return null;
}
var sizeLong = (long)footerPosition - havokPosition;
if (sizeLong <= 8 || sizeLong > int.MaxValue)
return null;
var size = (int)sizeLong;
fs.Position = havokPosition;
var bytes = reader.ReadBytes(size);
return bytes.Length > 8 ? bytes : null;
}
private static bool IsValidPointer(IntPtr ptr)
public unsafe Dictionary<string, List<ushort>>? ParseHavokBytesOnFrameworkThread(
byte[] havokData,
string hash,
bool persistToConfig)
{
if (ptr == IntPtr.Zero)
return false;
var tempSets = new Dictionary<string, HashSet<ushort>>(StringComparer.OrdinalIgnoreCase);
var tempHkxPath = Path.Combine(Path.GetTempPath(), $"lightless_{Guid.NewGuid():N}.hkx");
IntPtr pathAnsi = IntPtr.Zero;
try
{
_ = Marshal.ReadByte(ptr);
return true;
}
catch
{
return false;
}
}
File.WriteAllBytes(tempHkxPath, havokData);
pathAnsi = Marshal.StringToHGlobalAnsi(tempHkxPath);
hkSerializeUtil.LoadOptions loadOptions = default;
loadOptions.TypeInfoRegistry = hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry();
loadOptions.ClassNameRegistry = hkBuiltinTypeRegistry.Instance()->GetClassNameRegistry();
loadOptions.Flags = new hkFlags<hkSerializeUtil.LoadOptionBits, int>
{
Storage = (int)hkSerializeUtil.LoadOptionBits.Default
};
hkSerializeUtil.LoadOptions* pOpts = &loadOptions;
var resource = hkSerializeUtil.LoadFromFile((byte*)pathAnsi, errorResult: null, pOpts);
if (resource == null)
return null;
var rootLevelName = @"hkRootLevelContainer"u8;
fixed (byte* n1 = rootLevelName)
{
var container = (hkRootLevelContainer*)resource->GetContentsPointer(
n1, hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry());
if (container == null) return null;
var animationName = @"hkaAnimationContainer"u8;
fixed (byte* n2 = animationName)
{
var animContainer = (hkaAnimationContainer*)container->findObjectByName(n2, null);
if (animContainer == null) return null;
for (int i = 0; i < animContainer->Bindings.Length; i++)
{
var binding = animContainer->Bindings[i].ptr;
if (binding == null) continue;
var rawSkel = binding->OriginalSkeletonName.String;
var skeletonKey = CanonicalizeSkeletonKey(rawSkel);
if (string.IsNullOrEmpty(skeletonKey)) continue;
var boneTransform = binding->TransformTrackToBoneIndices;
if (boneTransform.Length <= 0) continue;
if (!tempSets.TryGetValue(skeletonKey, out var set))
tempSets[skeletonKey] = set = [];
for (int boneIdx = 0; boneIdx < boneTransform.Length; boneIdx++)
{
var v = boneTransform[boneIdx];
if (v < 0) continue;
set.Add((ushort)v);
}
}
}
}
}
finally
{
if (pathAnsi != IntPtr.Zero)
Marshal.FreeHGlobal(pathAnsi);
try { if (File.Exists(tempHkxPath)) File.Delete(tempHkxPath); }
catch { /* ignore */ }
}
if (tempSets.Count == 0) return null;
var output = new Dictionary<string, List<ushort>>(tempSets.Count, StringComparer.OrdinalIgnoreCase);
foreach (var (key, set) in tempSets)
{
if (set.Count == 0) continue;
var list = set.ToList();
list.Sort();
output[key] = list;
}
if (output.Count == 0) return null;
_configService.Current.BonesDictionary[hash] = output;
if (persistToConfig) _configService.Save();
return output;
}
public static string CanonicalizeSkeletonKey(string? raw)
{
@@ -517,41 +339,56 @@ public sealed partial class XivDataAnalyzer
if (mode == AnimationValidationMode.Unsafe)
return true;
var papBuckets = papBoneIndices.Keys
.Select(CanonicalizeSkeletonKey)
.Where(k => !string.IsNullOrEmpty(k))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToList();
var papByBucket = new Dictionary<string, List<ushort>>(StringComparer.OrdinalIgnoreCase);
if (papBuckets.Count == 0)
foreach (var (rawKey, list) in papBoneIndices)
{
var key = CanonicalizeSkeletonKey(rawKey);
if (string.IsNullOrEmpty(key))
continue;
if (string.Equals(key, "skeleton", StringComparison.OrdinalIgnoreCase))
key = "__any__";
if (!papByBucket.TryGetValue(key, out var acc))
papByBucket[key] = acc = [];
if (list is { Count: > 0 })
acc.AddRange(list);
}
foreach (var k in papByBucket.Keys.ToList())
papByBucket[k] = papByBucket[k].Distinct().ToList();
if (papByBucket.Count == 0)
{
reason = "No skeleton bucket bindings found in the PAP";
return false;
}
if (mode == AnimationValidationMode.Safe)
static bool AllIndicesOk(
HashSet<ushort> available,
List<ushort> indices,
bool papLikelyOneBased,
bool allowOneBasedShift,
bool allowNeighborTolerance,
out ushort missing)
{
if (papBuckets.Any(b => localBoneSets.ContainsKey(b)))
return true;
reason = $"No matching skeleton bucket between PAP [{string.Join(", ", papBuckets)}] and local [{string.Join(", ", localBoneSets.Keys.Order())}].";
return false;
}
foreach (var bucket in papBuckets)
{
if (!localBoneSets.TryGetValue(bucket, out var available))
foreach (var idx in indices)
{
reason = $"Missing skeleton bucket '{bucket}' on local actor.";
return false;
if (!ContainsIndexCompat(available, idx, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance))
{
missing = idx;
return false;
}
}
var indices = papBoneIndices
.Where(kvp => string.Equals(CanonicalizeSkeletonKey(kvp.Key), bucket, StringComparison.OrdinalIgnoreCase))
.SelectMany(kvp => kvp.Value ?? Enumerable.Empty<ushort>())
.Distinct()
.ToList();
missing = 0;
return true;
}
foreach (var (bucket, indices) in papByBucket)
{
if (indices.Count == 0)
continue;
@@ -565,14 +402,32 @@ public sealed partial class XivDataAnalyzer
}
bool papLikelyOneBased = allowOneBasedShift && (min == 1) && has1 && !has0;
foreach (var idx in indices)
if (string.Equals(bucket, "__any__", StringComparison.OrdinalIgnoreCase))
{
if (!ContainsIndexCompat(available, idx, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance))
foreach (var (lk, ls) in localBoneSets)
{
reason = $"No compatible local skeleton for PAP '{bucket}': missing bone index {idx}.";
return false;
if (AllIndicesOk(ls, indices, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance, out _))
goto nextBucket;
}
reason = $"No compatible local skeleton bucket for generic PAP skeleton '{bucket}'. Local buckets: {string.Join(", ", localBoneSets.Keys)}";
return false;
}
if (!localBoneSets.TryGetValue(bucket, out var available))
{
reason = $"Missing skeleton bucket '{bucket}' on local actor.";
return false;
}
if (!AllIndicesOk(available, indices, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance, out var missing))
{
reason = $"No compatible local skeleton for PAP '{bucket}': missing bone index {missing}.";
return false;
}
nextBucket:
;
}
return true;