From 30717ba200fdedbd4cb6725187ba7e6b8f8e8abc Mon Sep 17 00:00:00 2001 From: cake Date: Mon, 5 Jan 2026 00:45:14 +0000 Subject: [PATCH] Merged Cake and Abel branched into 2.0.3 (#131) Co-authored-by: azyges Co-authored-by: cake Co-authored-by: defnotken Reviewed-on: https://git.lightless-sync.org/Lightless-Sync/LightlessClient/pulls/131 --- LightlessSync/FileCache/CacheMonitor.cs | 215 ++- LightlessSync/FileCache/FileCacheManager.cs | 158 +- .../FileCache/TransientResourceManager.cs | 90 +- .../Interop/BlockedCharacterHandler.cs | 24 +- LightlessSync/Interop/Ipc/IpcManager.cs | 13 +- .../Configurations/LightlessConfig.cs | 6 + .../Configurations/PlayerPerformanceConfig.cs | 11 + .../Configurations/XivDataStorageConfig.cs | 1 + LightlessSync/LightlessPlugin.cs | 15 +- .../Factories/AnimationValidationMode.cs | 9 + .../Factories/FileDownloadManagerFactory.cs | 5 + .../PlayerData/Factories/PlayerDataFactory.cs | 674 ++++--- .../Pairs/IPairPerformanceSubject.cs | 1 + LightlessSync/PlayerData/Pairs/Pair.cs | 1 + .../PlayerData/Pairs/PairCoordinator.Users.cs | 1 + .../PlayerData/Pairs/PairHandlerAdapter.cs | 873 ++++++++-- .../Pairs/PairHandlerAdapterFactory.cs | 27 +- .../PlayerData/Pairs/PairHandlerRegistry.cs | 2 +- LightlessSync/PlayerData/Pairs/PairLedger.cs | 3 +- .../Pairs/PairPerformanceMetricsCache.cs | 3 +- .../Pairs/VisibleUserDataDistributor.cs | 18 +- LightlessSync/Plugin.cs | 7 +- .../ActorTracking/ActorObjectService.cs | 102 +- LightlessSync/Services/CharacterAnalyzer.cs | 2 +- LightlessSync/Services/DalamudUtilService.cs | 40 +- LightlessSync/Services/Mediator/Messages.cs | 5 +- .../Services/ModelDecimation/MdlDecimator.cs | 1462 ++++++++++++++++ .../ModelDecimation/ModelDecimationService.cs | 381 ++++ .../Services/PlayerPerformanceService.cs | 44 +- .../TextureDownscaleService.cs | 25 +- LightlessSync/Services/XivDataAnalyzer.cs | 470 ++++- .../Algorithms/DecimationAlgorithm.cs | 169 ++ .../FastQuadricMeshSimplification.cs | 1549 +++++++++++++++++ .../ThirdParty/MeshDecimator/BoneWeight.cs | 249 +++ .../Collections/ResizableArray.cs | 179 ++ .../MeshDecimator/Collections/UVChannels.cs | 79 + .../ThirdParty/MeshDecimator/LICENSE.md | 21 + .../MeshDecimator/Math/MathHelper.cs | 286 +++ .../MeshDecimator/Math/SymmetricMatrix.cs | 303 ++++ .../ThirdParty/MeshDecimator/Math/Vector2.cs | 425 +++++ .../ThirdParty/MeshDecimator/Math/Vector2d.cs | 425 +++++ .../ThirdParty/MeshDecimator/Math/Vector2i.cs | 348 ++++ .../ThirdParty/MeshDecimator/Math/Vector3.cs | 494 ++++++ .../ThirdParty/MeshDecimator/Math/Vector3d.cs | 481 +++++ .../ThirdParty/MeshDecimator/Math/Vector3i.cs | 368 ++++ .../ThirdParty/MeshDecimator/Math/Vector4.cs | 467 +++++ .../ThirdParty/MeshDecimator/Math/Vector4d.cs | 467 +++++ .../ThirdParty/MeshDecimator/Math/Vector4i.cs | 388 +++++ .../ThirdParty/MeshDecimator/Mesh.cs | 955 ++++++++++ .../MeshDecimator/MeshDecimation.cs | 180 ++ LightlessSync/UI/CompactUI.cs | 5 + LightlessSync/UI/Components/DrawFolderTag.cs | 1 + LightlessSync/UI/Components/DrawUserPair.cs | 16 +- LightlessSync/UI/DataAnalysisUi.cs | 559 +++++- LightlessSync/UI/DownloadUi.cs | 82 +- LightlessSync/UI/DrawEntityFactory.cs | 1 + LightlessSync/UI/DtrEntry.cs | 13 +- LightlessSync/UI/Handlers/IdDisplayHandler.cs | 4 +- LightlessSync/UI/Models/PairUiEntry.cs | 1 + .../UI/Models/TextureFormatSortMode.cs | 8 + .../UI/Models/VisiblePairSortMode.cs | 1 + LightlessSync/UI/SettingsUi.cs | 379 +++- LightlessSync/UI/ZoneChatUi.cs | 14 +- LightlessSync/Utils/VariousExtensions.cs | 8 +- .../WebAPI/Files/FileDownloadManager.cs | 383 ++-- .../WebAPI/Files/Models/DownloadStatus.cs | 3 +- .../WebAPI/Files/Models/FileDownloadStatus.cs | 50 +- 67 files changed, 13247 insertions(+), 802 deletions(-) create mode 100644 LightlessSync/PlayerData/Factories/AnimationValidationMode.cs create mode 100644 LightlessSync/Services/ModelDecimation/MdlDecimator.cs create mode 100644 LightlessSync/Services/ModelDecimation/ModelDecimationService.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/LICENSE.md create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/Mesh.cs create mode 100644 LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs create mode 100644 LightlessSync/UI/Models/TextureFormatSortMode.cs diff --git a/LightlessSync/FileCache/CacheMonitor.cs b/LightlessSync/FileCache/CacheMonitor.cs index fde9b6d..165a58c 100644 --- a/LightlessSync/FileCache/CacheMonitor.cs +++ b/LightlessSync/FileCache/CacheMonitor.cs @@ -103,6 +103,7 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase } record WatcherChange(WatcherChangeTypes ChangeType, string? OldPath = null); + private readonly record struct CacheEvictionCandidate(string FullPath, long Size, DateTime LastAccessTime); private readonly Dictionary _watcherChanges = new(StringComparer.OrdinalIgnoreCase); private readonly Dictionary _lightlessChanges = new(StringComparer.OrdinalIgnoreCase); @@ -441,116 +442,40 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase Logger.LogWarning(ex, "Could not determine drive size for storage folder {folder}", _configService.Current.CacheFolder); } - var files = Directory.EnumerateFiles(_configService.Current.CacheFolder) - .Select(f => new FileInfo(f)) - .OrderBy(f => f.LastAccessTime) - .ToList(); - + var cacheFolder = _configService.Current.CacheFolder; + var candidates = new List(); long totalSize = 0; - - foreach (var f in files) - { - token.ThrowIfCancellationRequested(); - - try - { - long size = 0; - - if (!isWine) - { - try - { - size = _fileCompactor.GetFileSizeOnDisk(f); - } - catch (Exception ex) - { - Logger.LogTrace(ex, "GetFileSizeOnDisk failed for {file}, using fallback length", f.FullName); - size = f.Length; - } - } - else - { - size = f.Length; - } - - totalSize += size; - } - catch (Exception ex) - { - Logger.LogTrace(ex, "Error getting size for {file}", f.FullName); - } - } + totalSize += AddFolderCandidates(cacheFolder, candidates, token, isWine); + totalSize += AddFolderCandidates(Path.Combine(cacheFolder, "downscaled"), candidates, token, isWine); + totalSize += AddFolderCandidates(Path.Combine(cacheFolder, "decimated"), candidates, token, isWine); FileCacheSize = totalSize; - if (Directory.Exists(_configService.Current.CacheFolder + "/downscaled")) - { - var filesDownscaled = Directory.EnumerateFiles(_configService.Current.CacheFolder + "/downscaled").Select(f => new FileInfo(f)).OrderBy(f => f.LastAccessTime).ToList(); - - long totalSizeDownscaled = 0; - - foreach (var f in filesDownscaled) - { - token.ThrowIfCancellationRequested(); - - try - { - long size = 0; - - if (!isWine) - { - try - { - size = _fileCompactor.GetFileSizeOnDisk(f); - } - catch (Exception ex) - { - Logger.LogTrace(ex, "GetFileSizeOnDisk failed for {file}, using fallback length", f.FullName); - size = f.Length; - } - } - else - { - size = f.Length; - } - - totalSizeDownscaled += size; - } - catch (Exception ex) - { - Logger.LogTrace(ex, "Error getting size for {file}", f.FullName); - } - } - - FileCacheSize = (totalSize + totalSizeDownscaled); - } - else - { - FileCacheSize = totalSize; - } - var maxCacheInBytes = (long)(_configService.Current.MaxLocalCacheInGiB * 1024d * 1024d * 1024d); if (FileCacheSize < maxCacheInBytes) return; var maxCacheBuffer = maxCacheInBytes * 0.05d; - while (FileCacheSize > maxCacheInBytes - (long)maxCacheBuffer && files.Count > 0) + candidates.Sort(static (a, b) => a.LastAccessTime.CompareTo(b.LastAccessTime)); + + var evictionTarget = maxCacheInBytes - (long)maxCacheBuffer; + var index = 0; + while (FileCacheSize > evictionTarget && index < candidates.Count) { - var oldestFile = files[0]; + var oldestFile = candidates[index]; try { - long fileSize = oldestFile.Length; - File.Delete(oldestFile.FullName); - FileCacheSize -= fileSize; + EvictCacheCandidate(oldestFile, cacheFolder); + FileCacheSize -= oldestFile.Size; } catch (Exception ex) { - Logger.LogTrace(ex, "Failed to delete old file {file}", oldestFile.FullName); + Logger.LogTrace(ex, "Failed to delete old file {file}", oldestFile.FullPath); } - files.RemoveAt(0); + index++; } } @@ -559,6 +484,114 @@ public sealed class CacheMonitor : DisposableMediatorSubscriberBase HaltScanLocks.Clear(); } + private long AddFolderCandidates(string directory, List candidates, CancellationToken token, bool isWine) + { + if (!Directory.Exists(directory)) + { + return 0; + } + + long totalSize = 0; + foreach (var path in Directory.EnumerateFiles(directory)) + { + token.ThrowIfCancellationRequested(); + + try + { + var file = new FileInfo(path); + var size = GetFileSizeOnDisk(file, isWine); + totalSize += size; + candidates.Add(new CacheEvictionCandidate(file.FullName, size, file.LastAccessTime)); + } + catch (Exception ex) + { + Logger.LogTrace(ex, "Error getting size for {file}", path); + } + } + + return totalSize; + } + + private long GetFileSizeOnDisk(FileInfo file, bool isWine) + { + if (isWine) + { + return file.Length; + } + + try + { + return _fileCompactor.GetFileSizeOnDisk(file); + } + catch (Exception ex) + { + Logger.LogTrace(ex, "GetFileSizeOnDisk failed for {file}, using fallback length", file.FullName); + return file.Length; + } + } + + private void EvictCacheCandidate(CacheEvictionCandidate candidate, string cacheFolder) + { + if (TryGetCacheHashAndPrefixedPath(candidate.FullPath, cacheFolder, out var hash, out var prefixedPath)) + { + _fileDbManager.RemoveHashedFile(hash, prefixedPath); + } + + try + { + if (File.Exists(candidate.FullPath)) + { + File.Delete(candidate.FullPath); + } + } + catch (Exception ex) + { + Logger.LogTrace(ex, "Failed to delete old file {file}", candidate.FullPath); + } + } + + private static bool TryGetCacheHashAndPrefixedPath(string filePath, string cacheFolder, out string hash, out string prefixedPath) + { + hash = string.Empty; + prefixedPath = string.Empty; + + if (string.IsNullOrEmpty(cacheFolder)) + { + return false; + } + + var fileName = Path.GetFileNameWithoutExtension(filePath); + if (string.IsNullOrEmpty(fileName) || !IsSha1Hash(fileName)) + { + return false; + } + + var relative = Path.GetRelativePath(cacheFolder, filePath) + .Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar); + var sanitizedRelative = relative.TrimStart(Path.DirectorySeparatorChar); + prefixedPath = Path.Combine(FileCacheManager.CachePrefix, sanitizedRelative); + hash = fileName; + return true; + } + + private static bool IsSha1Hash(string value) + { + if (value.Length != 40) + { + return false; + } + + foreach (var ch in value) + { + if (!Uri.IsHexDigit(ch)) + { + return false; + } + } + + return true; + } + public void ResumeScan(string source) { if (!HaltScanLocks.ContainsKey(source)) HaltScanLocks[source] = 0; diff --git a/LightlessSync/FileCache/FileCacheManager.cs b/LightlessSync/FileCache/FileCacheManager.cs index b0becf3..b98b441 100644 --- a/LightlessSync/FileCache/FileCacheManager.cs +++ b/LightlessSync/FileCache/FileCacheManager.cs @@ -27,6 +27,7 @@ public sealed class FileCacheManager : IHostedService private readonly ConcurrentDictionary> _fileCaches = new(StringComparer.Ordinal); private readonly ConcurrentDictionary _fileCachesByPrefixedPath = new(StringComparer.OrdinalIgnoreCase); private readonly SemaphoreSlim _getCachesByPathsSemaphore = new(1, 1); + private readonly SemaphoreSlim _evictSemaphore = new(1, 1); private readonly Lock _fileWriteLock = new(); private readonly IpcManager _ipcManager; private readonly ILogger _logger; @@ -226,13 +227,23 @@ public sealed class FileCacheManager : IHostedService var compressed = LZ4Wrapper.WrapHC(raw, 0, raw.Length); var tmpPath = compressedPath + ".tmp"; - await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false); - File.Move(tmpPath, compressedPath, overwrite: true); + try + { + await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false); + File.Move(tmpPath, compressedPath, overwrite: true); + } + finally + { + try { if (File.Exists(tmpPath)) File.Delete(tmpPath); } catch { /* ignore */ } + } - var compressedSize = compressed.LongLength; + var compressedSize = new FileInfo(compressedPath).Length; SetSizeInfo(hash, originalSize, compressedSize); UpdateEntitiesSizes(hash, originalSize, compressedSize); + var maxBytes = GiBToBytes(_configService.Current.MaxLocalCacheInGiB); + await EnforceCacheLimitAsync(maxBytes, token).ConfigureAwait(false); + return compressed; } finally @@ -280,6 +291,26 @@ public sealed class FileCacheManager : IHostedService return CreateFileEntity(cacheFolder, CachePrefix, fi); } + public FileCacheEntity? CreateCacheEntryWithKnownHash(string path, string hash) + { + if (string.IsNullOrWhiteSpace(hash)) + { + return CreateCacheEntry(path); + } + + FileInfo fi = new(path); + if (!fi.Exists) return null; + _logger.LogTrace("Creating cache entry for {path} using provided hash", path); + var cacheFolder = _configService.Current.CacheFolder; + if (string.IsNullOrEmpty(cacheFolder)) return null; + if (!TryBuildPrefixedPath(fi.FullName, cacheFolder, CachePrefix, out var prefixedPath, out _)) + { + return null; + } + + return CreateFileCacheEntity(fi, prefixedPath, hash); + } + public FileCacheEntity? CreateFileEntry(string path) { FileInfo fi = new(path); @@ -562,9 +593,10 @@ public sealed class FileCacheManager : IHostedService } } - public void RemoveHashedFile(string hash, string prefixedFilePath) + public void RemoveHashedFile(string hash, string prefixedFilePath, bool removeDerivedFiles = true) { var normalizedPath = NormalizePrefixedPathKey(prefixedFilePath); + var removedHash = false; if (_fileCaches.TryGetValue(hash, out var caches)) { @@ -577,11 +609,16 @@ public sealed class FileCacheManager : IHostedService if (caches.IsEmpty) { - _fileCaches.TryRemove(hash, out _); + removedHash = _fileCaches.TryRemove(hash, out _); } } _fileCachesByPrefixedPath.TryRemove(normalizedPath, out _); + + if (removeDerivedFiles && removedHash) + { + RemoveDerivedCacheFiles(hash); + } } public void UpdateHashedFile(FileCacheEntity fileCache, bool computeProperties = true) @@ -597,7 +634,8 @@ public sealed class FileCacheManager : IHostedService fileCache.Hash = Crypto.ComputeFileHash(fileCache.ResolvedFilepath, Crypto.HashAlgo.Sha1); fileCache.LastModifiedDateTicks = fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture); } - RemoveHashedFile(oldHash, prefixedPath); + var removeDerivedFiles = !string.Equals(oldHash, fileCache.Hash, StringComparison.OrdinalIgnoreCase); + RemoveHashedFile(oldHash, prefixedPath, removeDerivedFiles); AddHashedFile(fileCache); } @@ -747,7 +785,7 @@ public sealed class FileCacheManager : IHostedService { try { - RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath); + RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath, removeDerivedFiles: false); var extensionPath = fileCache.ResolvedFilepath.ToUpper(CultureInfo.InvariantCulture) + "." + ext; File.Move(fileCache.ResolvedFilepath, extensionPath, overwrite: true); var newHashedEntity = new FileCacheEntity(fileCache.Hash, fileCache.PrefixedFilePath + "." + ext, DateTime.UtcNow.Ticks.ToString(CultureInfo.InvariantCulture)); @@ -764,6 +802,33 @@ public sealed class FileCacheManager : IHostedService } } + private void RemoveDerivedCacheFiles(string hash) + { + var cacheFolder = _configService.Current.CacheFolder; + if (string.IsNullOrWhiteSpace(cacheFolder)) + { + return; + } + + TryDeleteDerivedCacheFile(Path.Combine(cacheFolder, "downscaled", $"{hash}.tex")); + TryDeleteDerivedCacheFile(Path.Combine(cacheFolder, "decimated", $"{hash}.mdl")); + } + + private void TryDeleteDerivedCacheFile(string path) + { + try + { + if (File.Exists(path)) + { + File.Delete(path); + } + } + catch (Exception ex) + { + _logger.LogTrace(ex, "Failed to delete derived cache file {path}", path); + } + } + private void AddHashedFile(FileCacheEntity fileCache) { var normalizedPath = NormalizePrefixedPathKey(fileCache.PrefixedFilePath); @@ -877,6 +942,83 @@ public sealed class FileCacheManager : IHostedService }, token).ConfigureAwait(false); } + private async Task EnforceCacheLimitAsync(long maxBytes, CancellationToken token) + { + if (string.IsNullOrWhiteSpace(CacheFolder) || maxBytes <= 0) return; + + await _evictSemaphore.WaitAsync(token).ConfigureAwait(false); + try + { + Directory.CreateDirectory(CacheFolder); + + foreach (var tmp in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension + ".tmp")) + { + try { File.Delete(tmp); } catch { /* ignore */ } + } + + var files = Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension, SearchOption.TopDirectoryOnly) + .Select(p => new FileInfo(p)) + .Where(fi => fi.Exists) + .OrderBy(fi => fi.LastWriteTimeUtc) + .ToList(); + + long total = files.Sum(f => f.Length); + if (total <= maxBytes) return; + + foreach (var fi in files) + { + token.ThrowIfCancellationRequested(); + if (total <= maxBytes) break; + + var hash = Path.GetFileNameWithoutExtension(fi.Name); + + try + { + var len = fi.Length; + fi.Delete(); + total -= len; + _sizeCache.TryRemove(hash, out _); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to evict cache file {file}", fi.FullName); + } + } + } + finally + { + _evictSemaphore.Release(); + } + } + + private static long GiBToBytes(double gib) + { + if (double.IsNaN(gib) || double.IsInfinity(gib) || gib <= 0) + return 0; + + var bytes = gib * 1024d * 1024d * 1024d; + + if (bytes >= long.MaxValue) return long.MaxValue; + + return (long)Math.Round(bytes, MidpointRounding.AwayFromZero); + } + + private void CleanupOrphanCompressedCache() + { + if (string.IsNullOrWhiteSpace(CacheFolder) || !Directory.Exists(CacheFolder)) + return; + + foreach (var path in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension)) + { + var hash = Path.GetFileNameWithoutExtension(path); + if (!_fileCaches.ContainsKey(hash)) + { + try { File.Delete(path); } + catch (Exception ex) { _logger.LogWarning(ex, "Failed deleting orphan {file}", path); } + } + } + } + public async Task StartAsync(CancellationToken cancellationToken) { _logger.LogInformation("Starting FileCacheManager"); @@ -1060,6 +1202,8 @@ public sealed class FileCacheManager : IHostedService { await WriteOutFullCsvAsync(cancellationToken).ConfigureAwait(false); } + + CleanupOrphanCompressedCache(); } _logger.LogInformation("Started FileCacheManager"); diff --git a/LightlessSync/FileCache/TransientResourceManager.cs b/LightlessSync/FileCache/TransientResourceManager.cs index a8b467e..11073dc 100644 --- a/LightlessSync/FileCache/TransientResourceManager.cs +++ b/LightlessSync/FileCache/TransientResourceManager.cs @@ -297,7 +297,7 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase private void DalamudUtil_FrameworkUpdate() { - RefreshPlayerRelatedAddressMap(); + _ = Task.Run(() => RefreshPlayerRelatedAddressMap()); lock (_cacheAdditionLock) { @@ -306,20 +306,64 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase if (_lastClassJobId != _dalamudUtil.ClassJobId) { - _lastClassJobId = _dalamudUtil.ClassJobId; - if (SemiTransientResources.TryGetValue(ObjectKind.Pet, out HashSet? value)) - { - value?.Clear(); - } - - PlayerConfig.JobSpecificCache.TryGetValue(_dalamudUtil.ClassJobId, out var jobSpecificData); - SemiTransientResources[ObjectKind.Player] = PlayerConfig.GlobalPersistentCache.Concat(jobSpecificData ?? []).ToHashSet(StringComparer.OrdinalIgnoreCase); - PlayerConfig.JobSpecificPetCache.TryGetValue(_dalamudUtil.ClassJobId, out var petSpecificData); - SemiTransientResources[ObjectKind.Pet] = new HashSet( - petSpecificData ?? [], - StringComparer.OrdinalIgnoreCase); + UpdateClassJobCache(); } + CleanupAbsentObjects(); + } + + private void RefreshPlayerRelatedAddressMap() + { + var tempMap = new ConcurrentDictionary(); + var updatedFrameAddresses = new ConcurrentDictionary(); + + lock (_playerRelatedLock) + { + foreach (var handler in _playerRelatedPointers) + { + var address = (nint)handler.Address; + if (address != nint.Zero) + { + tempMap[address] = handler; + updatedFrameAddresses[address] = handler.ObjectKind; + } + } + } + + _playerRelatedByAddress.Clear(); + foreach (var kvp in tempMap) + { + _playerRelatedByAddress[kvp.Key] = kvp.Value; + } + + _cachedFrameAddresses.Clear(); + foreach (var kvp in updatedFrameAddresses) + { + _cachedFrameAddresses[kvp.Key] = kvp.Value; + } + } + + private void UpdateClassJobCache() + { + _lastClassJobId = _dalamudUtil.ClassJobId; + if (SemiTransientResources.TryGetValue(ObjectKind.Pet, out HashSet? value)) + { + value?.Clear(); + } + + PlayerConfig.JobSpecificCache.TryGetValue(_dalamudUtil.ClassJobId, out var jobSpecificData); + SemiTransientResources[ObjectKind.Player] = PlayerConfig.GlobalPersistentCache + .Concat(jobSpecificData ?? []) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + PlayerConfig.JobSpecificPetCache.TryGetValue(_dalamudUtil.ClassJobId, out var petSpecificData); + SemiTransientResources[ObjectKind.Pet] = new HashSet( + petSpecificData ?? [], + StringComparer.OrdinalIgnoreCase); + } + + private void CleanupAbsentObjects() + { foreach (var kind in Enum.GetValues(typeof(ObjectKind)).Cast()) { if (!_cachedFrameAddresses.Any(k => k.Value == kind) && TransientResources.Remove(kind, out _)) @@ -349,26 +393,6 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase _semiTransientResources = null; } - private void RefreshPlayerRelatedAddressMap() - { - _playerRelatedByAddress.Clear(); - var updatedFrameAddresses = new ConcurrentDictionary(); - lock (_playerRelatedLock) - { - foreach (var handler in _playerRelatedPointers) - { - var address = (nint)handler.Address; - if (address != nint.Zero) - { - _playerRelatedByAddress[address] = handler; - updatedFrameAddresses[address] = handler.ObjectKind; - } - } - } - - _cachedFrameAddresses = updatedFrameAddresses; - } - private void HandleActorTracked(ActorObjectService.ActorDescriptor descriptor) { if (descriptor.IsInGpose) diff --git a/LightlessSync/Interop/BlockedCharacterHandler.cs b/LightlessSync/Interop/BlockedCharacterHandler.cs index 0ad3c80..675bf3b 100644 --- a/LightlessSync/Interop/BlockedCharacterHandler.cs +++ b/LightlessSync/Interop/BlockedCharacterHandler.cs @@ -1,4 +1,5 @@ using Dalamud.Plugin.Services; +using Dalamud.Game.ClientState.Objects.SubKinds; using FFXIVClientStructs.FFXIV.Client.Game.Character; using FFXIVClientStructs.FFXIV.Client.UI.Info; using Microsoft.Extensions.Logging; @@ -11,24 +12,35 @@ public unsafe class BlockedCharacterHandler private readonly Dictionary _blockedCharacterCache = new(); private readonly ILogger _logger; + private readonly IObjectTable _objectTable; - public BlockedCharacterHandler(ILogger logger, IGameInteropProvider gameInteropProvider) + public BlockedCharacterHandler(ILogger logger, IGameInteropProvider gameInteropProvider, IObjectTable objectTable) { gameInteropProvider.InitializeFromAttributes(this); _logger = logger; + _objectTable = objectTable; } - private static CharaData GetIdsFromPlayerPointer(nint ptr) + private CharaData? TryGetIdsFromPlayerPointer(nint ptr, ushort objectIndex) { - if (ptr == nint.Zero) return new(0, 0); - var castChar = ((BattleChara*)ptr); + if (ptr == nint.Zero || objectIndex >= 200) + return null; + + var obj = _objectTable[objectIndex]; + if (obj is not IPlayerCharacter player || player.Address != ptr) + return null; + + var castChar = (BattleChara*)player.Address; return new(castChar->Character.AccountId, castChar->Character.ContentId); } - public bool IsCharacterBlocked(nint ptr, out bool firstTime) + public bool IsCharacterBlocked(nint ptr, ushort objectIndex, out bool firstTime) { firstTime = false; - var combined = GetIdsFromPlayerPointer(ptr); + var combined = TryGetIdsFromPlayerPointer(ptr, objectIndex); + if (combined == null) + return false; + if (_blockedCharacterCache.TryGetValue(combined, out var isBlocked)) return isBlocked; diff --git a/LightlessSync/Interop/Ipc/IpcManager.cs b/LightlessSync/Interop/Ipc/IpcManager.cs index f77b084..5e95413 100644 --- a/LightlessSync/Interop/Ipc/IpcManager.cs +++ b/LightlessSync/Interop/Ipc/IpcManager.cs @@ -5,6 +5,8 @@ namespace LightlessSync.Interop.Ipc; public sealed partial class IpcManager : DisposableMediatorSubscriberBase { + private bool _wasInitialized; + public IpcManager(ILogger logger, LightlessMediator mediator, IpcCallerPenumbra penumbraIpc, IpcCallerGlamourer glamourerIpc, IpcCallerCustomize customizeIpc, IpcCallerHeels heelsIpc, IpcCallerHonorific honorificIpc, IpcCallerMoodles moodlesIpc, IpcCallerPetNames ipcCallerPetNames, IpcCallerBrio ipcCallerBrio, @@ -20,7 +22,8 @@ public sealed partial class IpcManager : DisposableMediatorSubscriberBase Brio = ipcCallerBrio; Lifestream = ipcCallerLifestream; - if (Initialized) + _wasInitialized = Initialized; + if (_wasInitialized) { Mediator.Publish(new PenumbraInitializedMessage()); } @@ -60,6 +63,14 @@ public sealed partial class IpcManager : DisposableMediatorSubscriberBase Moodles.CheckAPI(); PetNames.CheckAPI(); Brio.CheckAPI(); + + var initialized = Initialized; + if (initialized && !_wasInitialized) + { + Mediator.Publish(new PenumbraInitializedMessage()); + } + + _wasInitialized = initialized; Lifestream.CheckAPI(); } } \ No newline at end of file diff --git a/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs index 27c2856..8f1a3de 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs @@ -4,6 +4,7 @@ using LightlessSync.LightlessConfiguration.Models; using LightlessSync.UI; using LightlessSync.UI.Models; using Microsoft.Extensions.Logging; +using LightlessSync.PlayerData.Factories; namespace LightlessSync.LightlessConfiguration.Configurations; @@ -51,6 +52,7 @@ public class LightlessConfig : ILightlessConfiguration public bool PreferNotesOverNamesForVisible { get; set; } = false; public VisiblePairSortMode VisiblePairSortMode { get; set; } = VisiblePairSortMode.Alphabetical; public OnlinePairSortMode OnlinePairSortMode { get; set; } = OnlinePairSortMode.Alphabetical; + public TextureFormatSortMode TextureFormatSortMode { get; set; } = TextureFormatSortMode.None; public float ProfileDelay { get; set; } = 1.5f; public bool ProfilePopoutRight { get; set; } = false; public bool ProfilesAllowNsfw { get; set; } = false; @@ -157,4 +159,8 @@ public class LightlessConfig : ILightlessConfiguration public string LastSeenVersion { get; set; } = string.Empty; public bool EnableParticleEffects { get; set; } = true; public HashSet OrphanableTempCollections { get; set; } = []; + public AnimationValidationMode AnimationValidationMode { get; set; } = AnimationValidationMode.Safe; + public bool AnimationAllowOneBasedShift { get; set; } = true; + + public bool AnimationAllowNeighborIndexTolerance { get; set; } = false; } diff --git a/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs index 7da9ac2..462a63f 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs @@ -22,4 +22,15 @@ public class PlayerPerformanceConfig : ILightlessConfiguration public int TextureDownscaleMaxDimension { get; set; } = 2048; public bool OnlyDownscaleUncompressedTextures { get; set; } = true; public bool KeepOriginalTextureFiles { get; set; } = false; + public bool SkipTextureDownscaleForPreferredPairs { get; set; } = true; + public bool EnableModelDecimation { get; set; } = false; + public int ModelDecimationTriangleThreshold { get; set; } = 20_000; + public double ModelDecimationTargetRatio { get; set; } = 0.8; + public bool KeepOriginalModelFiles { get; set; } = true; + public bool SkipModelDecimationForPreferredPairs { get; set; } = true; + public bool ModelDecimationAllowBody { get; set; } = false; + public bool ModelDecimationAllowFaceHead { get; set; } = false; + public bool ModelDecimationAllowTail { get; set; } = false; + public bool ModelDecimationAllowClothing { get; set; } = true; + public bool ModelDecimationAllowAccessories { get; set; } = true; } \ No newline at end of file diff --git a/LightlessSync/LightlessConfiguration/Configurations/XivDataStorageConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/XivDataStorageConfig.cs index 8444ae8..ce7990a 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/XivDataStorageConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/XivDataStorageConfig.cs @@ -5,6 +5,7 @@ namespace LightlessSync.LightlessConfiguration.Configurations; public class XivDataStorageConfig : ILightlessConfiguration { public ConcurrentDictionary TriangleDictionary { get; set; } = new(StringComparer.OrdinalIgnoreCase); + public ConcurrentDictionary EffectiveTriangleDictionary { get; set; } = new(StringComparer.OrdinalIgnoreCase); public ConcurrentDictionary>> BonesDictionary { get; set; } = new(StringComparer.OrdinalIgnoreCase); public int Version { get; set; } = 0; } \ No newline at end of file diff --git a/LightlessSync/LightlessPlugin.cs b/LightlessSync/LightlessPlugin.cs index fe7e9a4..e82235f 100644 --- a/LightlessSync/LightlessPlugin.cs +++ b/LightlessSync/LightlessPlugin.cs @@ -74,6 +74,7 @@ public class LightlessPlugin : MediatorSubscriberBase, IHostedService private readonly DalamudUtilService _dalamudUtil; private readonly LightlessConfigService _lightlessConfigService; private readonly ServerConfigurationManager _serverConfigurationManager; + private readonly PairHandlerRegistry _pairHandlerRegistry; private readonly IServiceScopeFactory _serviceScopeFactory; private IServiceScope? _runtimeServiceScope; private Task? _launchTask = null; @@ -81,11 +82,13 @@ public class LightlessPlugin : MediatorSubscriberBase, IHostedService public LightlessPlugin(ILogger logger, LightlessConfigService lightlessConfigService, ServerConfigurationManager serverConfigurationManager, DalamudUtilService dalamudUtil, + PairHandlerRegistry pairHandlerRegistry, IServiceScopeFactory serviceScopeFactory, LightlessMediator mediator) : base(logger, mediator) { _lightlessConfigService = lightlessConfigService; _serverConfigurationManager = serverConfigurationManager; _dalamudUtil = dalamudUtil; + _pairHandlerRegistry = pairHandlerRegistry; _serviceScopeFactory = serviceScopeFactory; } @@ -108,12 +111,20 @@ public class LightlessPlugin : MediatorSubscriberBase, IHostedService public Task StopAsync(CancellationToken cancellationToken) { + Logger.LogDebug("Halting LightlessPlugin"); + try + { + _pairHandlerRegistry.ResetAllHandlers(); + } + catch (Exception ex) + { + Logger.LogWarning(ex, "Failed to reset pair handlers on shutdown"); + } + UnsubscribeAll(); DalamudUtilOnLogOut(); - Logger.LogDebug("Halting LightlessPlugin"); - return Task.CompletedTask; } diff --git a/LightlessSync/PlayerData/Factories/AnimationValidationMode.cs b/LightlessSync/PlayerData/Factories/AnimationValidationMode.cs new file mode 100644 index 0000000..ca73117 --- /dev/null +++ b/LightlessSync/PlayerData/Factories/AnimationValidationMode.cs @@ -0,0 +1,9 @@ +namespace LightlessSync.PlayerData.Factories +{ + public enum AnimationValidationMode + { + Unsafe = 0, + Safe = 1, + Safest = 2, + } +} diff --git a/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs b/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs index e3697cf..211a6fc 100644 --- a/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs +++ b/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs @@ -1,6 +1,7 @@ using LightlessSync.FileCache; using LightlessSync.LightlessConfiguration; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.TextureCompression; using LightlessSync.WebAPI.Files; using Microsoft.Extensions.Logging; @@ -16,6 +17,7 @@ public class FileDownloadManagerFactory private readonly FileCompactor _fileCompactor; private readonly LightlessConfigService _configService; private readonly TextureDownscaleService _textureDownscaleService; + private readonly ModelDecimationService _modelDecimationService; private readonly TextureMetadataHelper _textureMetadataHelper; public FileDownloadManagerFactory( @@ -26,6 +28,7 @@ public class FileDownloadManagerFactory FileCompactor fileCompactor, LightlessConfigService configService, TextureDownscaleService textureDownscaleService, + ModelDecimationService modelDecimationService, TextureMetadataHelper textureMetadataHelper) { _loggerFactory = loggerFactory; @@ -35,6 +38,7 @@ public class FileDownloadManagerFactory _fileCompactor = fileCompactor; _configService = configService; _textureDownscaleService = textureDownscaleService; + _modelDecimationService = modelDecimationService; _textureMetadataHelper = textureMetadataHelper; } @@ -48,6 +52,7 @@ public class FileDownloadManagerFactory _fileCompactor, _configService, _textureDownscaleService, + _modelDecimationService, _textureMetadataHelper); } } diff --git a/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs b/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs index 9ecfcc3..5c5b580 100644 --- a/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs +++ b/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs @@ -2,12 +2,15 @@ using LightlessSync.API.Data.Enum; using LightlessSync.FileCache; using LightlessSync.Interop.Ipc; +using LightlessSync.LightlessConfiguration; using LightlessSync.LightlessConfiguration.Models; using LightlessSync.PlayerData.Data; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Logging; +using System.Collections.Concurrent; +using System.Diagnostics; namespace LightlessSync.PlayerData.Factories; @@ -18,13 +21,34 @@ public class PlayerDataFactory private readonly IpcManager _ipcManager; private readonly ILogger _logger; private readonly PerformanceCollectorService _performanceCollector; + private readonly LightlessConfigService _configService; private readonly XivDataAnalyzer _modelAnalyzer; private readonly LightlessMediator _lightlessMediator; private readonly TransientResourceManager _transientResourceManager; + private static readonly SemaphoreSlim _papParseLimiter = new(1, 1); - public PlayerDataFactory(ILogger logger, DalamudUtilService dalamudUtil, IpcManager ipcManager, - TransientResourceManager transientResourceManager, FileCacheManager fileReplacementFactory, - PerformanceCollectorService performanceCollector, XivDataAnalyzer modelAnalyzer, LightlessMediator lightlessMediator) + // Transient resolved entries threshold + private const int _maxTransientResolvedEntries = 1000; + + // Character build caches + private readonly ConcurrentDictionary> _characterBuildInflight = new(); + private readonly ConcurrentDictionary _characterBuildCache = new(); + + // Time out thresholds + private static readonly TimeSpan _characterCacheTtl = TimeSpan.FromMilliseconds(750); + private static readonly TimeSpan _softReturnIfBusyAfter = TimeSpan.FromMilliseconds(250); + private static readonly TimeSpan _hardBuildTimeout = TimeSpan.FromSeconds(30); + + public PlayerDataFactory( + ILogger logger, + DalamudUtilService dalamudUtil, + IpcManager ipcManager, + TransientResourceManager transientResourceManager, + FileCacheManager fileReplacementFactory, + PerformanceCollectorService performanceCollector, + XivDataAnalyzer modelAnalyzer, + LightlessMediator lightlessMediator, + LightlessConfigService configService) { _logger = logger; _dalamudUtil = dalamudUtil; @@ -34,15 +58,15 @@ public class PlayerDataFactory _performanceCollector = performanceCollector; _modelAnalyzer = modelAnalyzer; _lightlessMediator = lightlessMediator; + _configService = configService; _logger.LogTrace("Creating {this}", nameof(PlayerDataFactory)); } + private sealed record CacheEntry(CharacterDataFragment Fragment, DateTime CreatedUtc); public async Task BuildCharacterData(GameObjectHandler playerRelatedObject, CancellationToken token) { if (!_ipcManager.Initialized) - { throw new InvalidOperationException("Penumbra or Glamourer is not connected"); - } if (playerRelatedObject == null) return null; @@ -67,16 +91,17 @@ public class PlayerDataFactory if (pointerIsZero) { - _logger.LogTrace("Pointer was zero for {objectKind}", playerRelatedObject.ObjectKind); + _logger.LogTrace("Pointer was zero for {objectKind}; couldn't build character", playerRelatedObject.ObjectKind); return null; } try { - return await _performanceCollector.LogPerformance(this, $"CreateCharacterData>{playerRelatedObject.ObjectKind}", async () => - { - return await CreateCharacterData(playerRelatedObject, token).ConfigureAwait(false); - }).ConfigureAwait(true); + return await _performanceCollector.LogPerformance( + this, + $"CreateCharacterData>{playerRelatedObject.ObjectKind}", + async () => await CreateCharacterData(playerRelatedObject, token).ConfigureAwait(false) + ).ConfigureAwait(false); } catch (OperationCanceledException) { @@ -92,17 +117,14 @@ public class PlayerDataFactory } private async Task CheckForNullDrawObject(IntPtr playerPointer) - { - return await _dalamudUtil.RunOnFrameworkThread(() => CheckForNullDrawObjectUnsafe(playerPointer)).ConfigureAwait(false); - } + => await _dalamudUtil.RunOnFrameworkThread(() => CheckForNullDrawObjectUnsafe(playerPointer)).ConfigureAwait(false); - private unsafe bool CheckForNullDrawObjectUnsafe(IntPtr playerPointer) + private unsafe static bool CheckForNullDrawObjectUnsafe(IntPtr playerPointer) { if (playerPointer == IntPtr.Zero) return true; var character = (Character*)playerPointer; - if (character == null) return true; @@ -113,93 +135,177 @@ public class PlayerDataFactory return gameObject->DrawObject == null; } - private async Task CreateCharacterData(GameObjectHandler playerRelatedObject, CancellationToken ct) + private static bool IsCacheFresh(CacheEntry entry) + => (DateTime.UtcNow - entry.CreatedUtc) <= _characterCacheTtl; + + private Task CreateCharacterData(GameObjectHandler playerRelatedObject, CancellationToken ct) + => CreateCharacterDataCoalesced(playerRelatedObject, ct); + + private async Task CreateCharacterDataCoalesced(GameObjectHandler obj, CancellationToken ct) { - var objectKind = playerRelatedObject.ObjectKind; - CharacterDataFragment fragment = objectKind == ObjectKind.Player ? new CharacterDataFragmentPlayer() : new(); + var key = obj.Address; - _logger.LogDebug("Building character data for {obj}", playerRelatedObject); - var logDebug = _logger.IsEnabled(LogLevel.Debug); + if (_characterBuildCache.TryGetValue(key, out var cached) && IsCacheFresh(cached) && !_characterBuildInflight.ContainsKey(key)) + return cached.Fragment; - // wait until chara is not drawing and present so nothing spontaneously explodes - await _dalamudUtil.WaitWhileCharacterIsDrawing(_logger, playerRelatedObject, Guid.NewGuid(), 30000, ct: ct).ConfigureAwait(false); - int totalWaitTime = 10000; - while (!await _dalamudUtil.IsObjectPresentAsync(await _dalamudUtil.CreateGameObjectAsync(playerRelatedObject.Address).ConfigureAwait(false)).ConfigureAwait(false) && totalWaitTime > 0) + var buildTask = _characterBuildInflight.GetOrAdd(key, _ => BuildAndCacheAsync(obj, key)); + + if (_characterBuildCache.TryGetValue(key, out cached)) { - _logger.LogTrace("Character is null but it shouldn't be, waiting"); - await Task.Delay(50, ct).ConfigureAwait(false); - totalWaitTime -= 50; + var completed = await Task.WhenAny(buildTask, Task.Delay(_softReturnIfBusyAfter, ct)).ConfigureAwait(false); + if (completed != buildTask && (DateTime.UtcNow - cached.CreatedUtc) <= TimeSpan.FromSeconds(5)) + { + return cached.Fragment; + } } - ct.ThrowIfCancellationRequested(); + return await WithCancellation(buildTask, ct).ConfigureAwait(false); + } - DateTime start = DateTime.UtcNow; - - // penumbra call, it's currently broken - Dictionary>? resolvedPaths; - - resolvedPaths = (await _ipcManager.Penumbra.GetCharacterData(_logger, playerRelatedObject).ConfigureAwait(false)); - if (resolvedPaths == null) throw new InvalidOperationException("Penumbra returned null data"); - - ct.ThrowIfCancellationRequested(); + private async Task BuildAndCacheAsync(GameObjectHandler obj, nint key) + { + try + { + using var cts = new CancellationTokenSource(_hardBuildTimeout); + var fragment = await CreateCharacterDataInternal(obj, cts.Token).ConfigureAwait(false); fragment.FileReplacements = new HashSet(resolvedPaths.Select(c => new FileReplacement([.. c.Value], c.Key)), FileReplacementComparer.Instance) .Where(p => p.HasFileReplacement).ToHashSet(); - fragment.FileReplacements.RemoveWhere(c => c.GamePaths.Any(g => !CacheMonitor.AllowedFileExtensions.Any(e => g.EndsWith(e, StringComparison.OrdinalIgnoreCase)))); + var allowedExtensions = CacheMonitor.AllowedFileExtensions; + fragment.FileReplacements.RemoveWhere(c => c.GamePaths.Any(g => !allowedExtensions.Any(e => g.EndsWith(e, StringComparison.OrdinalIgnoreCase)))); + _characterBuildCache[key] = new CacheEntry(fragment, DateTime.UtcNow); + PruneCharacterCacheIfNeeded(); + return fragment; + } + finally + { + _characterBuildInflight.TryRemove(key, out _); + } + } + + private void PruneCharacterCacheIfNeeded() + { + if (_characterBuildCache.Count < 2048) return; + + var cutoff = DateTime.UtcNow - TimeSpan.FromSeconds(10); + foreach (var kv in _characterBuildCache) + { + if (kv.Value.CreatedUtc < cutoff) + _characterBuildCache.TryRemove(kv.Key, out _); + } + } + + private static async Task WithCancellation(Task task, CancellationToken ct) + => await task.WaitAsync(ct).ConfigureAwait(false); + + private async Task CreateCharacterDataInternal(GameObjectHandler playerRelatedObject, CancellationToken ct) + { + var objectKind = playerRelatedObject.ObjectKind; + CharacterDataFragment fragment = objectKind == ObjectKind.Player ? new CharacterDataFragmentPlayer() : new(); + + var logDebug = _logger.IsEnabled(LogLevel.Debug); + var sw = Stopwatch.StartNew(); + + _logger.LogDebug("Building character data for {obj}", playerRelatedObject); + + await EnsureObjectPresentAsync(playerRelatedObject, ct).ConfigureAwait(false); ct.ThrowIfCancellationRequested(); + var waitRecordingTask = _transientResourceManager.WaitForRecording(ct); + + await _dalamudUtil.WaitWhileCharacterIsDrawing(_logger, playerRelatedObject, Guid.NewGuid(), 30000, ct: ct) + .ConfigureAwait(false); + + // get all remaining paths and resolve them + var transientPaths = ManageSemiTransientData(objectKind); + var resolvedTransientPaths = transientPaths.Count == 0 + ? new Dictionary(StringComparer.OrdinalIgnoreCase).AsReadOnly() + : await GetFileReplacementsFromPaths(playerRelatedObject, transientPaths, new HashSet(StringComparer.Ordinal)).ConfigureAwait(false); + ct.ThrowIfCancellationRequested(); + + if (await CheckForNullDrawObject(playerRelatedObject.Address).ConfigureAwait(false)) + throw new InvalidOperationException("DrawObject became null during build (actor despawned)"); + + Task getGlamourerData = _ipcManager.Glamourer.GetCharacterCustomizationAsync(playerRelatedObject.Address); + Task getCustomizeData = _ipcManager.CustomizePlus.GetScaleAsync(playerRelatedObject.Address); + Task? getMoodlesData = null; + Task? getHeelsOffset = null; + Task? getHonorificTitle = null; + + if (objectKind == ObjectKind.Player) + { + getHeelsOffset = _ipcManager.Heels.GetOffsetAsync(); + getHonorificTitle = _ipcManager.Honorific.GetTitle(); + getMoodlesData = _ipcManager.Moodles.GetStatusAsync(playerRelatedObject.Address); + } + + var resolvedPaths = await _ipcManager.Penumbra.GetCharacterData(_logger, playerRelatedObject).ConfigureAwait(false) ?? throw new InvalidOperationException("Penumbra returned null data; couldn't proceed with character"); + ct.ThrowIfCancellationRequested(); + + var staticBuildTask = Task.Run(() => BuildStaticReplacements(resolvedPaths), ct); + + fragment.FileReplacements = await staticBuildTask.ConfigureAwait(false); + if (logDebug) { _logger.LogDebug("== Static Replacements =="); - foreach (var replacement in fragment.FileReplacements.Where(i => i.HasFileReplacement).OrderBy(i => i.GamePaths.First(), StringComparer.OrdinalIgnoreCase)) + foreach (var replacement in fragment.FileReplacements + .Where(i => i.HasFileReplacement) + .OrderBy(i => i.GamePaths.First(), StringComparer.OrdinalIgnoreCase)) { _logger.LogDebug("=> {repl}", replacement); ct.ThrowIfCancellationRequested(); } } - else + + var staticReplacements = new HashSet(fragment.FileReplacements, FileReplacementComparer.Instance); + + var transientTask = ResolveTransientReplacementsAsync( + playerRelatedObject, + objectKind, + staticReplacements, + waitRecordingTask, + ct); + + fragment.GlamourerString = await getGlamourerData.ConfigureAwait(false); + _logger.LogDebug("Glamourer is now: {data}", fragment.GlamourerString); + + var customizeScale = await getCustomizeData.ConfigureAwait(false); + fragment.CustomizePlusScale = customizeScale ?? string.Empty; + _logger.LogDebug("Customize is now: {data}", fragment.CustomizePlusScale); + + if (objectKind == ObjectKind.Player) { - foreach (var replacement in fragment.FileReplacements.Where(i => i.HasFileReplacement)) - { - ct.ThrowIfCancellationRequested(); - } - } + CharacterDataFragmentPlayer? playerFragment = fragment as CharacterDataFragmentPlayer ?? throw new InvalidOperationException("Failed to cast CharacterDataFragment to Player variant"); - await _transientResourceManager.WaitForRecording(ct).ConfigureAwait(false); + playerFragment.ManipulationString = _ipcManager.Penumbra.GetMetaManipulations(); + playerFragment.HonorificData = await getHonorificTitle!.ConfigureAwait(false); + _logger.LogDebug("Honorific is now: {data}", playerFragment!.HonorificData); - // if it's pet then it's summoner, if it's summoner we actually want to keep all filereplacements alive at all times - // or we get into redraw city for every change and nothing works properly - if (objectKind == ObjectKind.Pet) - { - foreach (var item in fragment.FileReplacements.Where(i => i.HasFileReplacement).SelectMany(p => p.GamePaths)) - { - if (_transientResourceManager.AddTransientResource(objectKind, item)) - { - _logger.LogDebug("Marking static {item} for Pet as transient", item); - } - } + playerFragment.PetNamesData = _ipcManager.PetNames.GetLocalNames(); + _logger.LogDebug("Pet Nicknames is now: {petnames}", playerFragment!.PetNamesData); - _logger.LogTrace("Clearing {count} Static Replacements for Pet", fragment.FileReplacements.Count); - fragment.FileReplacements.Clear(); + playerFragment.HeelsData = await getHeelsOffset!.ConfigureAwait(false); + _logger.LogDebug("Heels is now: {heels}", playerFragment!.HeelsData); + + playerFragment.MoodlesData = (await getMoodlesData!.ConfigureAwait(false)) ?? string.Empty; + _logger.LogDebug("Moodles is now: {moodles}", playerFragment!.MoodlesData); } ct.ThrowIfCancellationRequested(); - _logger.LogDebug("Handling transient update for {obj}", playerRelatedObject); - - // remove all potentially gathered paths from the transient resource manager that are resolved through static resolving - _transientResourceManager.ClearTransientPaths(objectKind, fragment.FileReplacements.SelectMany(c => c.GamePaths).ToList()); - - // get all remaining paths and resolve them - var transientPaths = ManageSemiTransientData(objectKind); - var resolvedTransientPaths = await GetFileReplacementsFromPaths(playerRelatedObject, transientPaths, new HashSet(StringComparer.Ordinal)).ConfigureAwait(false); + var (resolvedTransientPaths, clearedForPet) = await transientTask.ConfigureAwait(false); + if (clearedForPet != null) + fragment.FileReplacements.Clear(); if (logDebug) { _logger.LogDebug("== Transient Replacements =="); - foreach (var replacement in resolvedTransientPaths.Select(c => new FileReplacement([.. c.Value], c.Key)).OrderBy(f => f.ResolvedPath, StringComparer.Ordinal)) + foreach (var replacement in resolvedTransientPaths + .Select(c => new FileReplacement([.. c.Value], c.Key)) + .OrderBy(f => f.ResolvedPath, StringComparer.Ordinal)) { _logger.LogDebug("=> {repl}", replacement); fragment.FileReplacements.Add(replacement); @@ -208,85 +314,64 @@ public class PlayerDataFactory else { foreach (var replacement in resolvedTransientPaths.Select(c => new FileReplacement([.. c.Value], c.Key))) - { fragment.FileReplacements.Add(replacement); - } } - // clean up all semi transient resources that don't have any file replacement (aka null resolve) _transientResourceManager.CleanUpSemiTransientResources(objectKind, [.. fragment.FileReplacements]); - ct.ThrowIfCancellationRequested(); - - // make sure we only return data that actually has file replacements - fragment.FileReplacements = new HashSet(fragment.FileReplacements.Where(v => v.HasFileReplacement).OrderBy(v => v.ResolvedPath, StringComparer.Ordinal), FileReplacementComparer.Instance); - - // gather up data from ipc - Task getHeelsOffset = _ipcManager.Heels.GetOffsetAsync(); - Task getGlamourerData = _ipcManager.Glamourer.GetCharacterCustomizationAsync(playerRelatedObject.Address); - Task getCustomizeData = _ipcManager.CustomizePlus.GetScaleAsync(playerRelatedObject.Address); - Task getHonorificTitle = _ipcManager.Honorific.GetTitle(); - fragment.GlamourerString = await getGlamourerData.ConfigureAwait(false); - _logger.LogDebug("Glamourer is now: {data}", fragment.GlamourerString); - var customizeScale = await getCustomizeData.ConfigureAwait(false); - fragment.CustomizePlusScale = customizeScale ?? string.Empty; - _logger.LogDebug("Customize is now: {data}", fragment.CustomizePlusScale); - - if (objectKind == ObjectKind.Player) - { - var playerFragment = (fragment as CharacterDataFragmentPlayer)!; - playerFragment.ManipulationString = _ipcManager.Penumbra.GetMetaManipulations(); - - playerFragment!.HonorificData = await getHonorificTitle.ConfigureAwait(false); - _logger.LogDebug("Honorific is now: {data}", playerFragment!.HonorificData); - - playerFragment!.HeelsData = await getHeelsOffset.ConfigureAwait(false); - _logger.LogDebug("Heels is now: {heels}", playerFragment!.HeelsData); - - playerFragment!.MoodlesData = await _ipcManager.Moodles.GetStatusAsync(playerRelatedObject.Address).ConfigureAwait(false) ?? string.Empty; - _logger.LogDebug("Moodles is now: {moodles}", playerFragment!.MoodlesData); - - playerFragment!.PetNamesData = _ipcManager.PetNames.GetLocalNames(); - _logger.LogDebug("Pet Nicknames is now: {petnames}", playerFragment!.PetNamesData); - } + fragment.FileReplacements = new HashSet( + fragment.FileReplacements + .Where(v => v.HasFileReplacement) + .OrderBy(v => v.ResolvedPath, StringComparer.Ordinal), + FileReplacementComparer.Instance); ct.ThrowIfCancellationRequested(); var toCompute = fragment.FileReplacements.Where(f => !f.IsFileSwap).ToArray(); _logger.LogDebug("Getting Hashes for {amount} Files", toCompute.Length); - var computedPaths = _fileCacheManager.GetFileCachesByPaths(toCompute.Select(c => c.ResolvedPath).ToArray()); - foreach (var file in toCompute) + + await Task.Run(() => { - ct.ThrowIfCancellationRequested(); - file.Hash = computedPaths[file.ResolvedPath]?.Hash ?? string.Empty; - } + var computedPaths = _fileCacheManager.GetFileCachesByPaths([.. toCompute.Select(c => c.ResolvedPath)]); + foreach (var file in toCompute) + { + ct.ThrowIfCancellationRequested(); + file.Hash = computedPaths[file.ResolvedPath]?.Hash ?? string.Empty; + } + }, ct).ConfigureAwait(false); + var removed = fragment.FileReplacements.RemoveWhere(f => !f.IsFileSwap && string.IsNullOrEmpty(f.Hash)); if (removed > 0) - { _logger.LogDebug("Removed {amount} of invalid files", removed); - } ct.ThrowIfCancellationRequested(); Dictionary>? boneIndices = null; var hasPapFiles = false; - if (objectKind == ObjectKind.Player) - { - hasPapFiles = fragment.FileReplacements.Any(f => - !f.IsFileSwap && f.GamePaths.First().EndsWith("pap", StringComparison.OrdinalIgnoreCase)); - if (hasPapFiles) - { - boneIndices = await _dalamudUtil.RunOnFrameworkThread(() => _modelAnalyzer.GetSkeletonBoneIndices(playerRelatedObject)).ConfigureAwait(false); - } - } if (objectKind == ObjectKind.Player) { + hasPapFiles = fragment.FileReplacements.Any(f => + !f.IsFileSwap && f.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))); + + if (hasPapFiles) + { + boneIndices = await _dalamudUtil + .RunOnFrameworkThread(() => _modelAnalyzer.GetSkeletonBoneIndices(playerRelatedObject)) + .ConfigureAwait(false); + } + try { +#if DEBUG + if (hasPapFiles && boneIndices != null) + _modelAnalyzer.DumpLocalSkeletonIndices(playerRelatedObject); +#endif + if (hasPapFiles) { - await VerifyPlayerAnimationBones(boneIndices, (fragment as CharacterDataFragmentPlayer)!, ct).ConfigureAwait(false); + await VerifyPlayerAnimationBones(boneIndices, (CharacterDataFragmentPlayer)fragment, ct) + .ConfigureAwait(false); } } catch (OperationCanceledException e) @@ -300,105 +385,277 @@ public class PlayerDataFactory } } - _logger.LogInformation("Building character data for {obj} took {time}ms", objectKind, TimeSpan.FromTicks(DateTime.UtcNow.Ticks - start.Ticks).TotalMilliseconds); + _logger.LogInformation("Building character data for {obj} took {time}ms", + objectKind, sw.Elapsed.TotalMilliseconds); return fragment; } - private async Task VerifyPlayerAnimationBones(Dictionary>? boneIndices, CharacterDataFragmentPlayer fragment, CancellationToken ct) + private async Task EnsureObjectPresentAsync(GameObjectHandler handler, CancellationToken ct) { - if (boneIndices == null) return; - - if (_logger.IsEnabled(LogLevel.Debug)) - { - foreach (var kvp in boneIndices) - { - _logger.LogDebug("Found {skellyname} ({idx} bone indices) on player: {bones}", kvp.Key, kvp.Value.Any() ? kvp.Value.Max() : 0, string.Join(',', kvp.Value)); - } - } - - var maxPlayerBoneIndex = boneIndices.SelectMany(kvp => kvp.Value).DefaultIfEmpty().Max(); - if (maxPlayerBoneIndex <= 0) return; - - int noValidationFailed = 0; - foreach (var file in fragment.FileReplacements.Where(f => !f.IsFileSwap && f.GamePaths.First().EndsWith("pap", StringComparison.OrdinalIgnoreCase)).ToList()) + var remaining = 10000; + while (remaining > 0) { ct.ThrowIfCancellationRequested(); - var skeletonIndices = await _dalamudUtil.RunOnFrameworkThread(() => _modelAnalyzer.GetBoneIndicesFromPap(file.Hash)).ConfigureAwait(false); - bool validationFailed = false; - if (skeletonIndices != null) + var obj = await _dalamudUtil.CreateGameObjectAsync(handler.Address).ConfigureAwait(false); + if (await _dalamudUtil.IsObjectPresentAsync(obj).ConfigureAwait(false)) + return; + + _logger.LogTrace("Character is null but it shouldn't be, waiting"); + await Task.Delay(50, ct).ConfigureAwait(false); + remaining -= 50; + } + } + + private static HashSet BuildStaticReplacements(Dictionary> resolvedPaths) + { + var set = new HashSet(FileReplacementComparer.Instance); + + foreach (var kvp in resolvedPaths) + { + var fr = new FileReplacement([.. kvp.Value], kvp.Key); + if (!fr.HasFileReplacement) continue; + + var allAllowed = fr.GamePaths.All(g => + CacheMonitor.AllowedFileExtensions.Any(e => g.EndsWith(e, StringComparison.OrdinalIgnoreCase))); + + if (!allAllowed) continue; + + set.Add(fr); + } + + return set; + } + + private async Task<(IReadOnlyDictionary ResolvedPaths, HashSet? ClearedReplacements)> + ResolveTransientReplacementsAsync( + GameObjectHandler obj, + ObjectKind objectKind, + HashSet staticReplacements, + Task waitRecordingTask, + CancellationToken ct) + { + await waitRecordingTask.ConfigureAwait(false); + + HashSet? clearedReplacements = null; + + if (objectKind == ObjectKind.Pet) + { + foreach (var item in staticReplacements.Where(i => i.HasFileReplacement).SelectMany(p => p.GamePaths)) { - // 105 is the maximum vanilla skellington spoopy bone index - if (skeletonIndices.All(k => k.Value.Max() <= 105)) - { - _logger.LogTrace("All indices of {path} are <= 105, ignoring", file.ResolvedPath); - continue; - } + if (_transientResourceManager.AddTransientResource(objectKind, item)) + _logger.LogDebug("Marking static {item} for Pet as transient", item); + } - _logger.LogDebug("Verifying bone indices for {path}, found {x} skeletons", file.ResolvedPath, skeletonIndices.Count); + _logger.LogTrace("Clearing {count} Static Replacements for Pet", staticReplacements.Count); + clearedReplacements = staticReplacements; + } - foreach (var boneCount in skeletonIndices) - { - var maxAnimationIndex = boneCount.Value.DefaultIfEmpty().Max(); - if (maxAnimationIndex > maxPlayerBoneIndex) + ct.ThrowIfCancellationRequested(); + + _transientResourceManager.ClearTransientPaths(objectKind, [.. staticReplacements.SelectMany(c => c.GamePaths)]); + + var transientPaths = ManageSemiTransientData(objectKind); + if (transientPaths.Count == 0) + return (new Dictionary(StringComparer.Ordinal), clearedReplacements); + + var resolved = await GetFileReplacementsFromPaths(obj, transientPaths, new HashSet(StringComparer.Ordinal)) + .ConfigureAwait(false); + + if (_maxTransientResolvedEntries > 0 && resolved.Count > _maxTransientResolvedEntries) + { + _logger.LogWarning("Transient entries ({resolved}) are above the threshold {max}; Please consider disable some mods (VFX have heavy load) to reduce transient load", + resolved.Count, + _maxTransientResolvedEntries); + } + + return (resolved, clearedReplacements); + } + + + private async Task VerifyPlayerAnimationBones( + Dictionary>? playerBoneIndices, + CharacterDataFragmentPlayer fragment, + CancellationToken ct) + { + var mode = _configService.Current.AnimationValidationMode; + var allowBasedShift = _configService.Current.AnimationAllowOneBasedShift; + var allownNightIndex = _configService.Current.AnimationAllowNeighborIndexTolerance; + + if (mode == AnimationValidationMode.Unsafe) + return; + + if (playerBoneIndices == null || playerBoneIndices.Count == 0) + return; + + var localBoneSets = new Dictionary>(StringComparer.OrdinalIgnoreCase); + + foreach (var (rawLocalKey, indices) in playerBoneIndices) + { + if (indices is not { Count: > 0 }) + continue; + + var key = XivDataAnalyzer.CanonicalizeSkeletonKey(rawLocalKey); + if (string.IsNullOrEmpty(key)) + continue; + + if (!localBoneSets.TryGetValue(key, out var set)) + localBoneSets[key] = set = []; + + foreach (var idx in indices) + set.Add(idx); + } + + if (localBoneSets.Count == 0) + return; + + if (_logger.IsEnabled(LogLevel.Debug)) + { + _logger.LogDebug("SEND local buckets: {b}", + string.Join(", ", localBoneSets.Keys.Order(StringComparer.Ordinal))); + + foreach (var kvp in localBoneSets.OrderBy(k => k.Key, StringComparer.OrdinalIgnoreCase)) + { + var min = kvp.Value.Count > 0 ? kvp.Value.Min() : 0; + var max = kvp.Value.Count > 0 ? kvp.Value.Max() : 0; + _logger.LogDebug("Local bucket {bucket}: count={count} min={min} max={max}", + kvp.Key, kvp.Value.Count, min, max); + } + } + + var papGroups = fragment.FileReplacements + .Where(f => !f.IsFileSwap + && !string.IsNullOrEmpty(f.Hash) + && f.GamePaths is { Count: > 0 } + && f.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))) + .GroupBy(f => f.Hash!, StringComparer.OrdinalIgnoreCase) + .ToList(); + + int noValidationFailed = 0; + + foreach (var g in papGroups) + { + ct.ThrowIfCancellationRequested(); + + var hash = g.Key; + + Dictionary>? papIndices = null; + + await _papParseLimiter.WaitAsync(ct).ConfigureAwait(false); + try + { + papIndices = await Task.Run(() => _modelAnalyzer.GetBoneIndicesFromPap(hash), ct) + .ConfigureAwait(false); + } + finally + { + _papParseLimiter.Release(); + } + + if (papIndices == null || papIndices.Count == 0) + continue; + + if (papIndices.All(k => k.Value.DefaultIfEmpty().Max() <= 105)) + continue; + + if (_logger.IsEnabled(LogLevel.Debug)) + { + var papBuckets = papIndices + .Select(kvp => new { - _logger.LogWarning("Found more bone indices on the animation {path} skeleton {skl} (max indice {idx}) than on any player related skeleton (max indice {idx2})", - file.ResolvedPath, boneCount.Key, maxAnimationIndex, maxPlayerBoneIndex); - validationFailed = true; - break; - } - } + Raw = kvp.Key, + Key = XivDataAnalyzer.CanonicalizeSkeletonKey(kvp.Key), + Indices = kvp.Value + }) + .Where(x => x.Indices is { Count: > 0 }) + .GroupBy(x => string.IsNullOrEmpty(x.Key) ? x.Raw : x.Key!, StringComparer.OrdinalIgnoreCase) + .Select(grp => + { + var all = grp.SelectMany(v => v.Indices).ToList(); + var min = all.Count > 0 ? all.Min() : 0; + var max = all.Count > 0 ? all.Max() : 0; + var raws = string.Join(',', grp.Select(v => v.Raw).Distinct(StringComparer.OrdinalIgnoreCase)); + return $"{grp.Key}(min={min},max={max},raw=[{raws}])"; + }) + .ToList(); + + _logger.LogDebug("SEND pap buckets for hash={hash}: {b}", + hash, + string.Join(" | ", papBuckets)); } - if (validationFailed) - { - noValidationFailed++; - _logger.LogDebug("Removing {file} from sent file replacements and transient data", file.ResolvedPath); - fragment.FileReplacements.Remove(file); - foreach (var gamePath in file.GamePaths) - { - _transientResourceManager.RemoveTransientResource(ObjectKind.Player, gamePath); - } - } + if (XivDataAnalyzer.IsPapCompatible(localBoneSets, papIndices, mode, allowBasedShift, allownNightIndex, out var reason)) + continue; + noValidationFailed++; + + _logger.LogWarning( + "Animation PAP hash {hash} is not compatible with local skeletons; dropping all mappings for this hash. Reason: {reason}", + hash, + reason); + + var removedGamePaths = fragment.FileReplacements + .Where(fr => !fr.IsFileSwap + && string.Equals(fr.Hash, hash, StringComparison.OrdinalIgnoreCase) + && fr.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))) + .SelectMany(fr => fr.GamePaths.Where(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + fragment.FileReplacements.RemoveWhere(fr => + !fr.IsFileSwap + && string.Equals(fr.Hash, hash, StringComparison.OrdinalIgnoreCase) + && fr.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))); + + foreach (var gp in removedGamePaths) + _transientResourceManager.RemoveTransientResource(ObjectKind.Player, gp); } if (noValidationFailed > 0) { - _lightlessMediator.Publish(new NotificationMessage("Invalid Skeleton Setup", - $"Your client is attempting to send {noValidationFailed} animation files with invalid bone data. Those animation files have been removed from your sent data. " + - $"Verify that you are using the correct skeleton for those animation files (Check /xllog for more information).", - NotificationType.Warning, TimeSpan.FromSeconds(10))); + _lightlessMediator.Publish(new NotificationMessage( + "Invalid Skeleton Setup", + $"Your client is attempting to send {noValidationFailed} animation files that don't match your current skeleton validation mode ({mode}). " + + "Please adjust your skeleton/mods or change the validation mode if this is unexpected. " + + "Those animation files have been removed from your sent (player) data. (Check /xllog for details).", + NotificationType.Warning, + TimeSpan.FromSeconds(10))); } } - private async Task> GetFileReplacementsFromPaths(GameObjectHandler handler, HashSet forwardResolve, HashSet reverseResolve) + + private async Task> GetFileReplacementsFromPaths( + GameObjectHandler handler, + HashSet forwardResolve, + HashSet reverseResolve) { var forwardPaths = forwardResolve.ToArray(); var reversePaths = reverseResolve.ToArray(); - Dictionary> resolvedPaths = new(StringComparer.Ordinal); + if (forwardPaths.Length == 0 && reversePaths.Length == 0) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase).AsReadOnly(); + } + + var forwardPathsLower = forwardPaths.Length == 0 ? Array.Empty() : forwardPaths.Select(p => p.ToLowerInvariant()).ToArray(); + var reversePathsLower = reversePaths.Length == 0 ? Array.Empty() : reversePaths.Select(p => p.ToLowerInvariant()).ToArray(); + + Dictionary> resolvedPaths = new(forwardPaths.Length + reversePaths.Length, StringComparer.Ordinal); if (handler.ObjectKind != ObjectKind.Player) { var (objectIndex, forwardResolved, reverseResolved) = await _dalamudUtil.RunOnFrameworkThread(() => { var idx = handler.GetGameObject()?.ObjectIndex; if (!idx.HasValue) - { return ((int?)null, Array.Empty(), Array.Empty()); - } var resolvedForward = new string[forwardPaths.Length]; for (int i = 0; i < forwardPaths.Length; i++) - { resolvedForward[i] = _ipcManager.Penumbra.ResolveGameObjectPath(forwardPaths[i], idx.Value); - } var resolvedReverse = new string[reversePaths.Length][]; for (int i = 0; i < reversePaths.Length; i++) - { resolvedReverse[i] = _ipcManager.Penumbra.ReverseResolveGameObjectPath(reversePaths[i], idx.Value); - } return (idx, resolvedForward, resolvedReverse); }).ConfigureAwait(false); @@ -409,14 +666,10 @@ public class PlayerDataFactory { var filePath = forwardResolved[i]?.ToLowerInvariant(); if (string.IsNullOrEmpty(filePath)) - { continue; - } if (resolvedPaths.TryGetValue(filePath, out var list)) - { list.Add(forwardPaths[i].ToLowerInvariant()); - } else { resolvedPaths[filePath] = [forwardPaths[i].ToLowerInvariant()]; @@ -425,15 +678,16 @@ public class PlayerDataFactory for (int i = 0; i < reversePaths.Length; i++) { - var filePath = reversePaths[i].ToLowerInvariant(); + var filePath = reversePathsLower[i]; + var reverseResolvedLower = new string[reverseResolved[i].Length]; + for (var j = 0; j < reverseResolvedLower.Length; j++) + { + reverseResolvedLower[j] = reverseResolved[i][j].ToLowerInvariant(); + } if (resolvedPaths.TryGetValue(filePath, out var list)) - { list.AddRange(reverseResolved[i].Select(c => c.ToLowerInvariant())); - } else - { - resolvedPaths[filePath] = new List(reverseResolved[i].Select(c => c.ToLowerInvariant()).ToList()); - } + resolvedPaths[filePath] = [.. reverseResolved[i].Select(c => c.ToLowerInvariant()).ToList()]; } return resolvedPaths.ToDictionary(k => k.Key, k => k.Value.ToArray(), StringComparer.OrdinalIgnoreCase).AsReadOnly(); @@ -441,30 +695,28 @@ public class PlayerDataFactory } var (forward, reverse) = await _ipcManager.Penumbra.ResolvePathsAsync(forwardPaths, reversePaths).ConfigureAwait(false); + for (int i = 0; i < forwardPaths.Length; i++) { var filePath = forward[i].ToLowerInvariant(); if (resolvedPaths.TryGetValue(filePath, out var list)) - { list.Add(forwardPaths[i].ToLowerInvariant()); - } else - { resolvedPaths[filePath] = [forwardPaths[i].ToLowerInvariant()]; - } } for (int i = 0; i < reversePaths.Length; i++) { - var filePath = reversePaths[i].ToLowerInvariant(); + var filePath = reversePathsLower[i]; + var reverseResolvedLower = new string[reverse[i].Length]; + for (var j = 0; j < reverseResolvedLower.Length; j++) + { + reverseResolvedLower[j] = reverse[i][j].ToLowerInvariant(); + } if (resolvedPaths.TryGetValue(filePath, out var list)) - { list.AddRange(reverse[i].Select(c => c.ToLowerInvariant())); - } else - { - resolvedPaths[filePath] = new List(reverse[i].Select(c => c.ToLowerInvariant()).ToList()); - } + resolvedPaths[filePath] = [.. reverse[i].Select(c => c.ToLowerInvariant()).ToList()]; } return resolvedPaths.ToDictionary(k => k.Key, k => k.Value.ToArray(), StringComparer.OrdinalIgnoreCase).AsReadOnly(); @@ -475,11 +727,29 @@ public class PlayerDataFactory _transientResourceManager.PersistTransientResources(objectKind); HashSet pathsToResolve = new(StringComparer.Ordinal); - foreach (var path in _transientResourceManager.GetSemiTransientResources(objectKind).Where(path => !string.IsNullOrEmpty(path))) + + int scanned = 0, skippedEmpty = 0, skippedVfx = 0; + + foreach (var path in _transientResourceManager.GetSemiTransientResources(objectKind)) { + scanned++; + + if (string.IsNullOrEmpty(path)) + { + skippedEmpty++; + continue; + } + pathsToResolve.Add(path); } + if (_logger.IsEnabled(LogLevel.Debug)) + { + _logger.LogDebug( + "ManageSemiTransientData({kind}): scanned={scanned}, added={added}, skippedEmpty={skippedEmpty}, skippedVfx={skippedVfx}", + objectKind, scanned, pathsToResolve.Count, skippedEmpty, skippedVfx); + } + return pathsToResolve; } -} \ No newline at end of file +} diff --git a/LightlessSync/PlayerData/Pairs/IPairPerformanceSubject.cs b/LightlessSync/PlayerData/Pairs/IPairPerformanceSubject.cs index cd62f98..b6355a8 100644 --- a/LightlessSync/PlayerData/Pairs/IPairPerformanceSubject.cs +++ b/LightlessSync/PlayerData/Pairs/IPairPerformanceSubject.cs @@ -16,4 +16,5 @@ public interface IPairPerformanceSubject long LastAppliedApproximateVRAMBytes { get; set; } long LastAppliedApproximateEffectiveVRAMBytes { get; set; } long LastAppliedDataTris { get; set; } + long LastAppliedApproximateEffectiveTris { get; set; } } diff --git a/LightlessSync/PlayerData/Pairs/Pair.cs b/LightlessSync/PlayerData/Pairs/Pair.cs index 2a85cd3..e95b7fe 100644 --- a/LightlessSync/PlayerData/Pairs/Pair.cs +++ b/LightlessSync/PlayerData/Pairs/Pair.cs @@ -69,6 +69,7 @@ public class Pair public string? PlayerName => TryGetHandler()?.PlayerName ?? UserPair.User.AliasOrUID; public long LastAppliedDataBytes => TryGetHandler()?.LastAppliedDataBytes ?? -1; public long LastAppliedDataTris => TryGetHandler()?.LastAppliedDataTris ?? -1; + public long LastAppliedApproximateEffectiveTris => TryGetHandler()?.LastAppliedApproximateEffectiveTris ?? -1; public long LastAppliedApproximateVRAMBytes => TryGetHandler()?.LastAppliedApproximateVRAMBytes ?? -1; public long LastAppliedApproximateEffectiveVRAMBytes => TryGetHandler()?.LastAppliedApproximateEffectiveVRAMBytes ?? -1; public string Ident => TryGetHandler()?.Ident ?? TryGetConnection()?.Ident ?? string.Empty; diff --git a/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs b/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs index 0891035..713333e 100644 --- a/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs +++ b/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs @@ -125,6 +125,7 @@ public sealed partial class PairCoordinator } } + _mediator.Publish(new PairOnlineMessage(new PairUniqueIdentifier(dto.User.UID))); PublishPairDataChanged(); } diff --git a/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs b/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs index b0f2710..82f4749 100644 --- a/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs +++ b/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs @@ -1,16 +1,19 @@ using System.Collections.Concurrent; using System.Diagnostics; +using Dalamud.Plugin.Services; using LightlessSync.API.Data; using LightlessSync.API.Data.Enum; using LightlessSync.API.Data.Extensions; using LightlessSync.FileCache; using LightlessSync.Interop.Ipc; +using LightlessSync.LightlessConfiguration; using LightlessSync.PlayerData.Factories; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services; using LightlessSync.Services.ActorTracking; using LightlessSync.Services.Events; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.PairProcessing; using LightlessSync.Services.ServerConfiguration; using LightlessSync.Services.TextureCompression; @@ -22,6 +25,7 @@ using Microsoft.Extensions.Logging; using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind; using ObjectKind = LightlessSync.API.Data.Enum.ObjectKind; using FileReplacementDataComparer = LightlessSync.PlayerData.Data.FileReplacementDataComparer; +using LightlessSync.LightlessConfiguration; namespace LightlessSync.PlayerData.Pairs; @@ -36,6 +40,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private readonly ActorObjectService _actorObjectService; private readonly FileDownloadManager _downloadManager; private readonly FileCacheManager _fileDbManager; + private readonly PlayerPerformanceConfigService _playerPerformanceConfigService; private readonly GameObjectHandlerFactory _gameObjectHandlerFactory; private readonly IpcManager _ipcManager; private readonly IHostApplicationLifetime _lifetime; @@ -44,10 +49,14 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private readonly ServerConfigurationManager _serverConfigManager; private readonly PluginWarningNotificationService _pluginWarningNotificationManager; private readonly TextureDownscaleService _textureDownscaleService; + private readonly ModelDecimationService _modelDecimationService; private readonly PairStateCache _pairStateCache; private readonly PairPerformanceMetricsCache _performanceMetricsCache; + private readonly XivDataAnalyzer _modelAnalyzer; private readonly PenumbraTempCollectionJanitor _tempCollectionJanitor; + private readonly LightlessConfigService _configService; private readonly PairManager _pairManager; + private readonly IFramework _framework; private CancellationTokenSource? _applicationCancellationTokenSource; private Guid _applicationId; private Task? _applicationTask; @@ -66,6 +75,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private int _lastMissingNonCriticalMods; private int _lastMissingForbiddenMods; private bool _lastMissingCachedFiles; + private string? _lastSuccessfulDataHash; private bool _isVisible; private Guid _penumbraCollection; private readonly object _collectionGate = new(); @@ -82,6 +92,13 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private readonly object _visibilityGraceGate = new(); private CancellationTokenSource? _visibilityGraceCts; private static readonly TimeSpan VisibilityEvictionGrace = TimeSpan.FromMinutes(1); + private readonly object _ownedRetryGate = new(); + private readonly Dictionary> _pendingOwnedChanges = new(); + private CancellationTokenSource? _ownedRetryCts; + private Task _ownedRetryTask = Task.CompletedTask; + private static readonly TimeSpan OwnedRetryInitialDelay = TimeSpan.FromSeconds(1); + private static readonly TimeSpan OwnedRetryMaxDelay = TimeSpan.FromSeconds(10); + private static readonly TimeSpan OwnedRetryStaleDataGrace = TimeSpan.FromMinutes(5); private static readonly HashSet NonPriorityModExtensions = new(StringComparer.OrdinalIgnoreCase) { ".tmb", @@ -90,15 +107,24 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa ".avfx", ".scd" }; + + private readonly ConcurrentDictionary _blockedPapHashes = new(StringComparer.OrdinalIgnoreCase); + private readonly ConcurrentDictionary _dumpedRemoteSkeletonForHash = new(StringComparer.OrdinalIgnoreCase); + private DateTime? _invisibleSinceUtc; private DateTime? _visibilityEvictionDueAtUtc; private DateTime _nextActorLookupUtc = DateTime.MinValue; private static readonly TimeSpan ActorLookupInterval = TimeSpan.FromSeconds(1); private static readonly SemaphoreSlim ActorInitializationLimiter = new(1, 1); + private const int FullyLoadedTimeoutMsPlayer = 30000; + private const int FullyLoadedTimeoutMsOther = 5000; private readonly object _actorInitializationGate = new(); private ActorObjectService.ActorDescriptor? _pendingActorDescriptor; private bool _actorInitializationInProgress; private bool _frameworkUpdateSubscribed; + private nint _lastKnownAddress = nint.Zero; + private ushort _lastKnownObjectIndex = ushort.MaxValue; + private string? _lastKnownName; public DateTime? InvisibleSinceUtc => _invisibleSinceUtc; public DateTime? VisibilityEvictionDueAtUtc => _visibilityEvictionDueAtUtc; @@ -147,6 +173,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa public long LastAppliedDataBytes { get; private set; } public long LastAppliedDataTris { get; set; } = -1; + public long LastAppliedApproximateEffectiveTris { get; set; } = -1; public long LastAppliedApproximateVRAMBytes { get; set; } = -1; public long LastAppliedApproximateEffectiveVRAMBytes { get; set; } = -1; public CharacterData? LastReceivedCharacterData { get; private set; } @@ -175,16 +202,21 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa FileDownloadManager transferManager, PluginWarningNotificationService pluginWarningNotificationManager, DalamudUtilService dalamudUtil, + IFramework framework, ActorObjectService actorObjectService, IHostApplicationLifetime lifetime, FileCacheManager fileDbManager, + PlayerPerformanceConfigService playerPerformanceConfigService, PlayerPerformanceService playerPerformanceService, PairProcessingLimiter pairProcessingLimiter, ServerConfigurationManager serverConfigManager, TextureDownscaleService textureDownscaleService, + ModelDecimationService modelDecimationService, PairStateCache pairStateCache, PairPerformanceMetricsCache performanceMetricsCache, - PenumbraTempCollectionJanitor tempCollectionJanitor) : base(logger, mediator) + PenumbraTempCollectionJanitor tempCollectionJanitor, + XivDataAnalyzer modelAnalyzer, + LightlessConfigService configService) : base(logger, mediator) { _pairManager = pairManager; Ident = ident; @@ -193,16 +225,21 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _downloadManager = transferManager; _pluginWarningNotificationManager = pluginWarningNotificationManager; _dalamudUtil = dalamudUtil; + _framework = framework; _actorObjectService = actorObjectService; _lifetime = lifetime; _fileDbManager = fileDbManager; + _playerPerformanceConfigService = playerPerformanceConfigService; _playerPerformanceService = playerPerformanceService; _pairProcessingLimiter = pairProcessingLimiter; _serverConfigManager = serverConfigManager; _textureDownscaleService = textureDownscaleService; + _modelDecimationService = modelDecimationService; _pairStateCache = pairStateCache; _performanceMetricsCache = performanceMetricsCache; _tempCollectionJanitor = tempCollectionJanitor; + _modelAnalyzer = modelAnalyzer; + _configService = configService; } public void Initialize() @@ -225,7 +262,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - if (LastAppliedDataBytes < 0 || LastAppliedDataTris < 0 + if (LastAppliedDataBytes < 0 || LastAppliedDataTris < 0 || LastAppliedApproximateEffectiveTris < 0 || LastAppliedApproximateVRAMBytes < 0 || LastAppliedApproximateEffectiveVRAMBytes < 0) { _forceApplyMods = true; @@ -432,7 +469,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } - private void ResetPenumbraCollection(bool releaseFromPenumbra = true, string? reason = null) + private void ResetPenumbraCollection(bool releaseFromPenumbra = true, string? reason = null, bool awaitIpc = true) { Guid toRelease = Guid.Empty; bool hadCollection = false; @@ -466,16 +503,33 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - try + var applicationId = Guid.NewGuid(); + if (awaitIpc) { - var applicationId = Guid.NewGuid(); - Logger.LogTrace("[{applicationId}] Removing temp collection {CollectionId} for {handler} ({reason})", applicationId, toRelease, GetLogIdentifier(), reason ?? "Cleanup"); - _ipcManager.Penumbra.RemoveTemporaryCollectionAsync(Logger, applicationId, toRelease).GetAwaiter().GetResult(); + try + { + Logger.LogTrace("[{applicationId}] Removing temp collection {CollectionId} for {handler} ({reason})", applicationId, toRelease, GetLogIdentifier(), reason ?? "Cleanup"); + _ipcManager.Penumbra.RemoveTemporaryCollectionAsync(Logger, applicationId, toRelease).GetAwaiter().GetResult(); + } + catch (Exception ex) + { + Logger.LogDebug(ex, "Failed to remove temporary Penumbra collection for {handler}", GetLogIdentifier()); + } + return; } - catch (Exception ex) + + _ = Task.Run(async () => { - Logger.LogDebug(ex, "Failed to remove temporary Penumbra collection for {handler}", GetLogIdentifier()); - } + try + { + Logger.LogTrace("[{applicationId}] Removing temp collection {CollectionId} for {handler} ({reason})", applicationId, toRelease, GetLogIdentifier(), reason ?? "Cleanup"); + await _ipcManager.Penumbra.RemoveTemporaryCollectionAsync(Logger, applicationId, toRelease).ConfigureAwait(false); + } + catch (Exception ex) + { + Logger.LogDebug(ex, "Failed to remove temporary Penumbra collection for {handler}", GetLogIdentifier()); + } + }); } private bool AnyPair(Func predicate) @@ -483,11 +537,31 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return GetCurrentPairs().Any(predicate); } - private bool ShouldSkipDownscale() + private bool IsPreferredDirectPair() { return GetCurrentPairs().Any(p => p.IsDirectlyPaired && p.SelfToOtherPermissions.IsSticky()); } + private bool ShouldSkipDownscale() + { + if (!_playerPerformanceConfigService.Current.SkipTextureDownscaleForPreferredPairs) + { + return false; + } + + return IsPreferredDirectPair(); + } + + private bool ShouldSkipDecimation() + { + if (!_playerPerformanceConfigService.Current.SkipModelDecimationForPreferredPairs) + { + return false; + } + + return IsPreferredDirectPair(); + } + private bool IsPaused() { var pairs = GetCurrentPairs(); @@ -545,6 +619,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _forceApplyMods = true; LastAppliedDataBytes = -1; LastAppliedDataTris = -1; + LastAppliedApproximateEffectiveTris = -1; LastAppliedApproximateVRAMBytes = -1; LastAppliedApproximateEffectiveVRAMBytes = -1; } @@ -559,9 +634,11 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } var hasMissingCachedFiles = HasMissingCachedFiles(LastReceivedCharacterData); + var missingStarted = !_lastMissingCachedFiles && hasMissingCachedFiles; var missingResolved = _lastMissingCachedFiles && !hasMissingCachedFiles; _lastMissingCachedFiles = hasMissingCachedFiles; - var shouldForce = forced || missingResolved; + var shouldForce = forced || missingStarted || missingResolved; + var forceApplyCustomization = forced; if (IsPaused()) { @@ -569,25 +646,46 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - if (shouldForce) - { - _forceApplyMods = true; - _forceFullReapply = true; - LastAppliedDataBytes = -1; - LastAppliedDataTris = -1; - LastAppliedApproximateVRAMBytes = -1; - LastAppliedApproximateEffectiveVRAMBytes = -1; - } - - var sanitized = CloneAndSanitizeLastReceived(out _); + var sanitized = CloneAndSanitizeLastReceived(out var dataHash); if (sanitized is null) { Logger.LogTrace("Sanitized data null for {Ident}", Ident); return; } + var dataApplied = !string.IsNullOrEmpty(dataHash) + && string.Equals(dataHash, _lastSuccessfulDataHash ?? string.Empty, StringComparison.Ordinal); + var needsApply = !dataApplied; + var modFilesChanged = PlayerModFilesChanged(sanitized, _cachedData); + var shouldForceMods = shouldForce || modFilesChanged; + forceApplyCustomization = forced || needsApply; + var suppressForcedModRedraw = !forced && hasMissingCachedFiles && dataApplied; + + if (shouldForceMods) + { + _forceApplyMods = true; + _forceFullReapply = true; + LastAppliedDataBytes = -1; + LastAppliedDataTris = -1; + LastAppliedApproximateEffectiveTris = -1; + LastAppliedApproximateVRAMBytes = -1; + LastAppliedApproximateEffectiveVRAMBytes = -1; + } _pairStateCache.Store(Ident, sanitized); + if (!IsVisible && !_pauseRequested) + { + if (_charaHandler is not null && _charaHandler.Address == nint.Zero) + { + _charaHandler.Refresh(); + } + + if (PlayerCharacter != nint.Zero) + { + IsVisible = true; + } + } + if (!IsVisible) { Logger.LogTrace("Handler for {Ident} not visible, caching sanitized data for later", Ident); @@ -596,7 +694,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - ApplyCharacterData(Guid.NewGuid(), sanitized, shouldForce); + ApplyCharacterData(Guid.NewGuid(), sanitized, forceApplyCustomization, suppressForcedModRedraw); } public bool FetchPerformanceMetricsFromCache() @@ -668,6 +766,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private void ApplyCachedMetrics(PairPerformanceMetrics metrics) { LastAppliedDataTris = metrics.TriangleCount; + LastAppliedApproximateEffectiveTris = metrics.ApproximateEffectiveTris; LastAppliedApproximateVRAMBytes = metrics.ApproximateVramBytes; LastAppliedApproximateEffectiveVRAMBytes = metrics.ApproximateEffectiveVramBytes; } @@ -675,6 +774,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private void StorePerformanceMetrics(CharacterData charaData) { if (LastAppliedDataTris < 0 + || LastAppliedApproximateEffectiveTris < 0 || LastAppliedApproximateVRAMBytes < 0 || LastAppliedApproximateEffectiveVRAMBytes < 0) { @@ -690,7 +790,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _performanceMetricsCache.StoreMetrics( Ident, dataHash, - new PairPerformanceMetrics(LastAppliedDataTris, LastAppliedApproximateVRAMBytes, LastAppliedApproximateEffectiveVRAMBytes)); + new PairPerformanceMetrics(LastAppliedDataTris, LastAppliedApproximateVRAMBytes, LastAppliedApproximateEffectiveVRAMBytes, LastAppliedApproximateEffectiveTris)); } private bool HasMissingCachedFiles(CharacterData characterData) @@ -906,7 +1006,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa SetUploading(false); } - public void ApplyCharacterData(Guid applicationBase, CharacterData characterData, bool forceApplyCustomization = false) + public void ApplyCharacterData(Guid applicationBase, CharacterData characterData, bool forceApplyCustomization = false, bool suppressForcedModRedraw = false) { _lastApplyAttemptAt = DateTime.UtcNow; ClearFailureState(); @@ -1000,7 +1100,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa Mediator.Publish(new EventMessage(new Event(PlayerName, user, nameof(PairHandlerAdapter), EventSeverity.Informational, "Applying Character Data"))); - var charaDataToUpdate = characterData.CheckUpdatedData(applicationBase, _cachedData?.DeepClone() ?? new(), Logger, this, forceApplyCustomization, _forceApplyMods); + var charaDataToUpdate = characterData.CheckUpdatedData(applicationBase, _cachedData?.DeepClone() ?? new(), Logger, this, + forceApplyCustomization, _forceApplyMods, suppressForcedModRedraw); if (handlerReady && _forceApplyMods) { @@ -1021,7 +1122,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa Logger.LogDebug("[BASE-{appbase}] Downloading and applying character for {name}", applicationBase, GetPrimaryAliasOrUidSafe()); var forceFullReapply = _forceFullReapply - || LastAppliedApproximateVRAMBytes < 0 || LastAppliedDataTris < 0; + || LastAppliedApproximateVRAMBytes < 0 || LastAppliedDataTris < 0 || LastAppliedApproximateEffectiveTris < 0; DownloadAndApplyCharacter(applicationBase, characterData.DeepClone(), charaDataToUpdate, forceFullReapply); } @@ -1097,12 +1198,183 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa }, CancellationToken.None); } + private void ScheduleOwnedObjectRetry(ObjectKind kind, HashSet changes) + { + if (kind == ObjectKind.Player || changes.Count == 0) + { + return; + } + + lock (_ownedRetryGate) + { + _pendingOwnedChanges[kind] = new HashSet(changes); + if (!_ownedRetryTask.IsCompleted) + { + return; + } + + _ownedRetryCts = _ownedRetryCts?.CancelRecreate() ?? new CancellationTokenSource(); + var token = _ownedRetryCts.Token; + _ownedRetryTask = Task.Run(() => OwnedObjectRetryLoopAsync(token), CancellationToken.None); + } + } + + private void ClearOwnedObjectRetry(ObjectKind kind) + { + lock (_ownedRetryGate) + { + if (!_pendingOwnedChanges.Remove(kind)) + { + return; + } + } + } + + private void ClearAllOwnedObjectRetries() + { + lock (_ownedRetryGate) + { + _pendingOwnedChanges.Clear(); + } + } + + private bool IsOwnedRetryDataStale() + { + if (!_lastDataReceivedAt.HasValue) + { + return true; + } + + return DateTime.UtcNow - _lastDataReceivedAt.Value > OwnedRetryStaleDataGrace; + } + + private async Task OwnedObjectRetryLoopAsync(CancellationToken token) + { + var delay = OwnedRetryInitialDelay; + try + { + while (!token.IsCancellationRequested) + { + if (IsOwnedRetryDataStale()) + { + ClearAllOwnedObjectRetries(); + return; + } + + Dictionary> pending; + lock (_ownedRetryGate) + { + if (_pendingOwnedChanges.Count == 0) + { + return; + } + + pending = _pendingOwnedChanges.ToDictionary(kvp => kvp.Key, kvp => new HashSet(kvp.Value)); + } + + if (!IsVisible || IsPaused() || !CanApplyNow() || PlayerCharacter == nint.Zero || _charaHandler is null) + { + await Task.Delay(delay, token).ConfigureAwait(false); + delay = IncreaseRetryDelay(delay); + continue; + } + + if ((_applicationTask?.IsCompleted ?? true) == false || (_pairDownloadTask?.IsCompleted ?? true) == false) + { + await Task.Delay(delay, token).ConfigureAwait(false); + delay = IncreaseRetryDelay(delay); + continue; + } + + var sanitized = CloneAndSanitizeLastReceived(out _); + if (sanitized is null) + { + await Task.Delay(delay, token).ConfigureAwait(false); + delay = IncreaseRetryDelay(delay); + continue; + } + + bool anyApplied = false; + foreach (var entry in pending) + { + if (!HasAppearanceDataForKind(sanitized, entry.Key)) + { + ClearOwnedObjectRetry(entry.Key); + continue; + } + + var applied = await ApplyCustomizationDataAsync(Guid.NewGuid(), entry, sanitized, token).ConfigureAwait(false); + if (applied) + { + ClearOwnedObjectRetry(entry.Key); + anyApplied = true; + } + } + + if (!anyApplied) + { + await Task.Delay(delay, token).ConfigureAwait(false); + delay = IncreaseRetryDelay(delay); + } + else + { + delay = OwnedRetryInitialDelay; + } + } + } + catch (OperationCanceledException) + { + // ignore + } + catch (Exception ex) + { + Logger.LogDebug(ex, "Owned object retry task failed for {handler}", GetLogIdentifier()); + } + } + + private static TimeSpan IncreaseRetryDelay(TimeSpan delay) + { + var nextMs = Math.Min(delay.TotalMilliseconds * 2, OwnedRetryMaxDelay.TotalMilliseconds); + return TimeSpan.FromMilliseconds(nextMs); + } + + private static bool HasAppearanceDataForKind(CharacterData data, ObjectKind kind) + { + if (data.FileReplacements.TryGetValue(kind, out var replacements) && replacements.Count > 0) + { + return true; + } + + if (data.GlamourerData.TryGetValue(kind, out var glamourer) && !string.IsNullOrEmpty(glamourer)) + { + return true; + } + + if (data.CustomizePlusData.TryGetValue(kind, out var customize) && !string.IsNullOrEmpty(customize)) + { + return true; + } + + return false; + } + protected override void Dispose(bool disposing) { base.Dispose(disposing); SetUploading(false); var name = PlayerName; + if (!string.IsNullOrEmpty(name)) + { + _lastKnownName = name; + } + + var currentAddress = PlayerCharacter; + if (currentAddress != nint.Zero) + { + _lastKnownAddress = currentAddress; + } + var user = GetPrimaryUserDataSafe(); var alias = GetPrimaryAliasOrUidSafe(); Logger.LogDebug("Disposing {name} ({user})", name, alias); @@ -1113,6 +1385,9 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _applicationCancellationTokenSource = null; _downloadCancellationTokenSource?.CancelDispose(); _downloadCancellationTokenSource = null; + ClearAllOwnedObjectRetries(); + _ownedRetryCts?.CancelDispose(); + _ownedRetryCts = null; _downloadManager.Dispose(); _charaHandler?.Dispose(); CancelVisibilityGraceTask(); @@ -1125,43 +1400,62 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa Mediator.Publish(new EventMessage(new Event(name, user, nameof(PairHandlerAdapter), EventSeverity.Informational, "Disposing User"))); } - if (_lifetime.ApplicationStopping.IsCancellationRequested) return; - - if (_dalamudUtil is { IsZoning: false, IsInCutscene: false } && !string.IsNullOrEmpty(name)) + if (IsFrameworkUnloading()) { - Logger.LogTrace("[{applicationId}] Restoring state for {name} ({user})", applicationId, name, alias); - Logger.LogDebug("[{applicationId}] Removing Temp Collection for {name} ({user})", applicationId, name, alias); - ResetPenumbraCollection(reason: nameof(Dispose)); - if (!IsVisible) + Logger.LogWarning("Framework is unloading, skipping disposal for {name} ({user})", name, alias); + return; + } + + var isStopping = _lifetime.ApplicationStopping.IsCancellationRequested; + if (isStopping) + { + ResetPenumbraCollection(reason: "DisposeStopping", awaitIpc: false); + ScheduleSafeRevertOnDisposal(applicationId, name, alias); + return; + } + + var canCleanup = !string.IsNullOrEmpty(name) + && _dalamudUtil.IsLoggedIn + && !_dalamudUtil.IsZoning + && !_dalamudUtil.IsInCutscene; + + if (!canCleanup) + { + return; + } + + Logger.LogTrace("[{applicationId}] Restoring state for {name} ({user})", applicationId, name, alias); + Logger.LogDebug("[{applicationId}] Removing Temp Collection for {name} ({user})", applicationId, name, alias); + ResetPenumbraCollection(reason: nameof(Dispose)); + if (!IsVisible) + { + Logger.LogDebug("[{applicationId}] Restoring Glamourer for {name} ({user})", applicationId, name, alias); + _ipcManager.Glamourer.RevertByNameAsync(Logger, name, applicationId).GetAwaiter().GetResult(); + } + else + { + using var cts = new CancellationTokenSource(); + cts.CancelAfter(TimeSpan.FromSeconds(60)); + + var effectiveCachedData = _cachedData ?? _pairStateCache.TryLoad(Ident); + if (effectiveCachedData is not null) { - Logger.LogDebug("[{applicationId}] Restoring Glamourer for {name} ({user})", applicationId, name, alias); - _ipcManager.Glamourer.RevertByNameAsync(Logger, name, applicationId).GetAwaiter().GetResult(); + _cachedData = effectiveCachedData; } - else + + Logger.LogInformation("[{applicationId}] CachedData is null {isNull}, contains things: {contains}", + applicationId, _cachedData == null, _cachedData?.FileReplacements.Any() ?? false); + + foreach (KeyValuePair> item in _cachedData?.FileReplacements ?? []) { - using var cts = new CancellationTokenSource(); - cts.CancelAfter(TimeSpan.FromSeconds(60)); - - var effectiveCachedData = _cachedData ?? _pairStateCache.TryLoad(Ident); - if (effectiveCachedData is not null) + try { - _cachedData = effectiveCachedData; + RevertCustomizationDataAsync(item.Key, name, applicationId, cts.Token).GetAwaiter().GetResult(); } - - Logger.LogInformation("[{applicationId}] CachedData is null {isNull}, contains things: {contains}", - applicationId, _cachedData == null, _cachedData?.FileReplacements.Any() ?? false); - - foreach (KeyValuePair> item in _cachedData?.FileReplacements ?? []) + catch (InvalidOperationException ex) { - try - { - RevertCustomizationDataAsync(item.Key, name, applicationId, cts.Token).GetAwaiter().GetResult(); - } - catch (InvalidOperationException ex) - { - Logger.LogWarning(ex, "Failed disposing player (not present anymore?)"); - break; - } + Logger.LogWarning(ex, "Failed disposing player (not present anymore?)"); + break; } } } @@ -1174,6 +1468,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { PlayerName = null; _cachedData = null; + _lastSuccessfulDataHash = null; _lastAppliedModdedPaths = null; _needsCollectionRebuild = false; _performanceMetricsCache.Clear(Ident); @@ -1181,9 +1476,145 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } - private async Task ApplyCustomizationDataAsync(Guid applicationId, KeyValuePair> changes, CharacterData charaData, CancellationToken token) + private bool IsFrameworkUnloading() { - if (PlayerCharacter == nint.Zero) return; + try + { + var prop = _framework.GetType().GetProperty("IsFrameworkUnloading"); + if (prop?.PropertyType == typeof(bool)) + { + return (bool)prop.GetValue(_framework)!; + } + } + catch + { + // ignore + } + + return false; + } + + private void ScheduleSafeRevertOnDisposal(Guid applicationId, string? name, string alias) + { + var cleanupName = !string.IsNullOrEmpty(name) ? name : _lastKnownName; + var cleanupAddress = _lastKnownAddress != nint.Zero + ? _lastKnownAddress + : _dalamudUtil.GetPlayerCharacterFromCachedTableByIdent(Ident); + var cleanupObjectIndex = _lastKnownObjectIndex; + var cleanupIdent = Ident; + var customizeIds = _customizeIds.Values.Where(id => id.HasValue) + .Select(id => id!.Value) + .Distinct() + .ToList(); + + if (string.IsNullOrEmpty(cleanupName) + && cleanupAddress == nint.Zero + && cleanupObjectIndex == ushort.MaxValue + && customizeIds.Count == 0) + { + return; + } + + _ = Task.Run(() => SafeRevertOnDisposalAsync( + applicationId, + cleanupName, + cleanupAddress, + cleanupObjectIndex, + cleanupIdent, + customizeIds, + alias)); + } + + private async Task SafeRevertOnDisposalAsync( + Guid applicationId, + string? cleanupName, + nint cleanupAddress, + ushort cleanupObjectIndex, + string cleanupIdent, + IReadOnlyList customizeIds, + string alias) + { + try + { + if (IsFrameworkUnloading()) + { + return; + } + + if (!string.IsNullOrEmpty(cleanupName) && _ipcManager.Glamourer.APIAvailable) + { + Logger.LogDebug("[{applicationId}] Restoring Glamourer for {name} ({user})", applicationId, cleanupName, alias); + await _ipcManager.Glamourer.RevertByNameAsync(Logger, cleanupName, applicationId).ConfigureAwait(false); + } + + if (_ipcManager.CustomizePlus.APIAvailable && customizeIds.Count > 0) + { + foreach (var customizeId in customizeIds) + { + await _ipcManager.CustomizePlus.RevertByIdAsync(customizeId).ConfigureAwait(false); + } + } + + var address = cleanupAddress; + if (address == nint.Zero && cleanupObjectIndex != ushort.MaxValue) + { + address = await _dalamudUtil.RunOnFrameworkThread(() => + { + var obj = _dalamudUtil.GetCharacterFromObjectTableByIndex(cleanupObjectIndex); + if (obj is not Dalamud.Game.ClientState.Objects.SubKinds.IPlayerCharacter player) + { + return nint.Zero; + } + + if (!DalamudUtilService.TryGetHashedCID(player, out var hash) + || !string.Equals(hash, cleanupIdent, StringComparison.Ordinal)) + { + return nint.Zero; + } + + return player.Address; + }).ConfigureAwait(false); + } + + if (address == nint.Zero) + { + return; + } + + if (_ipcManager.CustomizePlus.APIAvailable) + { + await _ipcManager.CustomizePlus.RevertAsync(address).ConfigureAwait(false); + } + + if (_ipcManager.Heels.APIAvailable) + { + await _ipcManager.Heels.RestoreOffsetForPlayerAsync(address).ConfigureAwait(false); + } + + if (_ipcManager.Honorific.APIAvailable) + { + await _ipcManager.Honorific.ClearTitleAsync(address).ConfigureAwait(false); + } + + if (_ipcManager.Moodles.APIAvailable) + { + await _ipcManager.Moodles.RevertStatusAsync(address).ConfigureAwait(false); + } + + if (_ipcManager.PetNames.APIAvailable) + { + await _ipcManager.PetNames.ClearPlayerData(address).ConfigureAwait(false); + } + } + catch (Exception ex) + { + Logger.LogDebug(ex, "Failed shutdown cleanup for {name}", cleanupName ?? cleanupIdent); + } + } + + private async Task ApplyCustomizationDataAsync(Guid applicationId, KeyValuePair> changes, CharacterData charaData, CancellationToken token) + { + if (PlayerCharacter == nint.Zero) return false; var ptr = PlayerCharacter; var handler = changes.Key switch @@ -1199,14 +1630,29 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { if (handler.Address == nint.Zero) { - return; + return false; } Logger.LogDebug("[{applicationId}] Applying Customization Data for {handler}", applicationId, handler); - await _dalamudUtil.WaitWhileCharacterIsDrawing(Logger, handler, applicationId, 30000, token).ConfigureAwait(false); + await handler.IsBeingDrawnRunOnFrameworkAsync().ConfigureAwait(false); + if (handler.ObjectKind != ObjectKind.Player + && handler.CurrentDrawCondition == GameObjectHandler.DrawCondition.DrawObjectZero) + { + Logger.LogDebug("[{applicationId}] Skipping customization apply for {handler}, draw object not available", applicationId, handler); + return false; + } + + var drawTimeoutMs = handler.ObjectKind == ObjectKind.Player ? 30000 : 5000; + var fullyLoadedTimeoutMs = handler.ObjectKind == ObjectKind.Player ? FullyLoadedTimeoutMsPlayer : FullyLoadedTimeoutMsOther; + await _dalamudUtil.WaitWhileCharacterIsDrawing(Logger, handler, applicationId, drawTimeoutMs, token).ConfigureAwait(false); if (handler.Address != nint.Zero) { - await _actorObjectService.WaitForFullyLoadedAsync(handler.Address, token).ConfigureAwait(false); + var fullyLoaded = await _actorObjectService.WaitForFullyLoadedAsync(handler.Address, token, fullyLoadedTimeoutMs).ConfigureAwait(false); + if (!fullyLoaded) + { + Logger.LogDebug("[{applicationId}] Timed out waiting for {handler} to fully load, skipping customization apply", applicationId, handler); + return false; + } } token.ThrowIfCancellationRequested(); @@ -1270,6 +1716,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { await _ipcManager.Penumbra.RedrawAsync(Logger, handler, applicationId, token).ConfigureAwait(false); } + + return true; } finally { @@ -1429,6 +1877,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa try { bool skipDownscaleForPair = ShouldSkipDownscale(); + bool skipDecimationForPair = ShouldSkipDecimation(); var user = GetPrimaryUserData(); Dictionary<(string GamePath, string? Hash), string> moddedPaths; List missingReplacements = []; @@ -1467,7 +1916,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } var handlerForDownload = _charaHandler; - _pairDownloadTask = Task.Run(async () => await _downloadManager.DownloadFiles(handlerForDownload, toDownloadReplacements, downloadToken, skipDownscaleForPair).ConfigureAwait(false)); + _pairDownloadTask = Task.Run(async () => await _downloadManager.DownloadFiles(handlerForDownload, toDownloadReplacements, downloadToken, skipDownscaleForPair, skipDecimationForPair).ConfigureAwait(false)); await _pairDownloadTask.ConfigureAwait(false); @@ -1492,6 +1941,20 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } + if (!skipDecimationForPair) + { + var downloadedModelHashes = toDownloadReplacements + .Where(static replacement => replacement.GamePaths.Any(static path => path.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase))) + .Select(static replacement => replacement.Hash) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (downloadedModelHashes.Count > 0) + { + await _modelDecimationService.WaitForPendingJobsAsync(downloadedModelHashes, downloadToken).ConfigureAwait(false); + } + } + toDownloadReplacements = TryCalculateModdedDictionary(applicationBase, charaData, out moddedPaths, downloadToken); missingReplacements = toDownloadReplacements; @@ -1577,37 +2040,25 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa RecordFailure("Handler not available for application", "HandlerUnavailable"); return; } - _applicationCancellationTokenSource = _applicationCancellationTokenSource.CancelRecreate() ?? new CancellationTokenSource(); - if (_applicationTask != null && !_applicationTask.IsCompleted) + var appToken = _applicationCancellationTokenSource?.Token; + while ((!_applicationTask?.IsCompleted ?? false) + && !downloadToken.IsCancellationRequested + && (!appToken?.IsCancellationRequested ?? false)) { - Logger.LogDebug("[BASE-{appBase}] Cancelling current data application (Id: {id}) for player ({handler})", applicationBase, _applicationId, PlayerName); - - var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); - var combinedCts = CancellationTokenSource.CreateLinkedTokenSource(downloadToken, timeoutCts.Token); - - try - { - await _applicationTask.WaitAsync(combinedCts.Token).ConfigureAwait(false); - } - catch (OperationCanceledException) - { - Logger.LogWarning("[BASE-{appBase}] Timeout waiting for application task {id} to complete, proceeding anyway", applicationBase, _applicationId); - } - finally - { - timeoutCts.Dispose(); - combinedCts.Dispose(); - } + Logger.LogDebug("[BASE-{appBase}] Waiting for current data application (Id: {id}) for player ({handler}) to finish", + applicationBase, _applicationId, PlayerName); + await Task.Delay(250).ConfigureAwait(false); } - if (downloadToken.IsCancellationRequested) + if (downloadToken.IsCancellationRequested || (appToken?.IsCancellationRequested ?? false)) { _forceFullReapply = true; RecordFailure("Application cancelled", "Cancellation"); return; } + _applicationCancellationTokenSource = _applicationCancellationTokenSource.CancelRecreate() ?? new CancellationTokenSource(); var token = _applicationCancellationTokenSource.Token; _applicationTask = ApplyCharacterDataAsync(applicationBase, handlerForApply, charaData, updatedData, updateModdedPaths, updateManip, moddedPaths, wantsModApply, pendingModReapply, token); @@ -1630,7 +2081,17 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa await _dalamudUtil.WaitWhileCharacterIsDrawing(Logger, handlerForApply, _applicationId, 30000, token).ConfigureAwait(false); if (handlerForApply.Address != nint.Zero) { - await _actorObjectService.WaitForFullyLoadedAsync(handlerForApply.Address, token).ConfigureAwait(false); + var fullyLoaded = await _actorObjectService.WaitForFullyLoadedAsync(handlerForApply.Address, token, FullyLoadedTimeoutMsPlayer).ConfigureAwait(false); + if (!fullyLoaded) + { + Logger.LogDebug("[BASE-{applicationId}] Timed out waiting for {handler} to fully load, caching data for later application", + applicationBase, GetLogIdentifier()); + _cachedData = charaData; + _pairStateCache.Store(Ident, charaData); + _forceFullReapply = true; + RecordFailure("Actor not fully loaded within timeout", "FullyLoadedTimeout"); + return; + } } token.ThrowIfCancellationRequested(); @@ -1669,11 +2130,36 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } + SplitPapMappings(moddedPaths, out var withoutPap, out var papOnly); + await _ipcManager.Penumbra.AssignTemporaryCollectionAsync(Logger, penumbraCollection, objIndex.Value).ConfigureAwait(false); - await _ipcManager.Penumbra.SetTemporaryModsAsync(Logger, _applicationId, penumbraCollection, - moddedPaths.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)).ConfigureAwait(false); - _lastAppliedModdedPaths = new Dictionary<(string GamePath, string? Hash), string>(moddedPaths, moddedPaths.Comparer); + await _ipcManager.Penumbra.SetTemporaryModsAsync( + Logger, _applicationId, penumbraCollection, + withoutPap.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)) + .ConfigureAwait(false); + + await _ipcManager.Penumbra.RedrawAsync(Logger, handlerForApply, _applicationId, token).ConfigureAwait(false); + if (handlerForApply.Address != nint.Zero) + await _actorObjectService.WaitForFullyLoadedAsync(handlerForApply.Address, token).ConfigureAwait(false); + + var removedPap = await StripIncompatiblePapAsync(handlerForApply, charaData, papOnly, token).ConfigureAwait(false); + if (removedPap > 0) + { + Logger.LogTrace("[{applicationId}] Removed {removedPap} incompatible PAP mappings found for {handler}", _applicationId, removedPap, GetLogIdentifier()); + } + + var merged = new Dictionary<(string GamePath, string? Hash), string>(withoutPap, withoutPap.Comparer); + foreach (var kv in papOnly) + merged[kv.Key] = kv.Value; + + await _ipcManager.Penumbra.SetTemporaryModsAsync( + Logger, _applicationId, penumbraCollection, + merged.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)) + .ConfigureAwait(false); + + _lastAppliedModdedPaths = new Dictionary<(string GamePath, string? Hash), string>(merged, merged.Comparer); + LastAppliedDataBytes = -1; foreach (var path in moddedPaths.Values.Distinct(StringComparer.OrdinalIgnoreCase).Select(v => new FileInfo(v)).Where(p => p.Exists)) { @@ -1692,7 +2178,15 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa foreach (var kind in updatedData) { - await ApplyCustomizationDataAsync(_applicationId, kind, charaData, token).ConfigureAwait(false); + var applied = await ApplyCustomizationDataAsync(_applicationId, kind, charaData, token).ConfigureAwait(false); + if (applied) + { + ClearOwnedObjectRetry(kind.Key); + } + else if (kind.Key != ObjectKind.Player) + { + ScheduleOwnedObjectRetry(kind.Key, kind.Value); + } token.ThrowIfCancellationRequested(); } @@ -1706,17 +2200,19 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _needsCollectionRebuild = false; if (LastAppliedApproximateVRAMBytes < 0 || LastAppliedApproximateEffectiveVRAMBytes < 0) { - _playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, charaData, new List()); - } - if (LastAppliedDataTris < 0) - { - await _playerPerformanceService.CheckTriangleUsageThresholds(this, charaData).ConfigureAwait(false); - } + _playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, charaData, new List()); + } - StorePerformanceMetrics(charaData); - _lastSuccessfulApplyAt = DateTime.UtcNow; - ClearFailureState(); - Logger.LogDebug("[{applicationId}] Application finished", _applicationId); + if (LastAppliedDataTris < 0 || LastAppliedApproximateEffectiveTris < 0) + { + await _playerPerformanceService.CheckTriangleUsageThresholds(this, charaData).ConfigureAwait(false); + } + + StorePerformanceMetrics(charaData); + _lastSuccessfulDataHash = GetDataHashSafe(charaData); + _lastSuccessfulApplyAt = DateTime.UtcNow; + ClearFailureState(); + Logger.LogDebug("[{applicationId}] Application finished", _applicationId); } catch (OperationCanceledException) { @@ -1732,19 +2228,19 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { IsVisible = false; _forceApplyMods = true; - _cachedData = charaData; - _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; - Logger.LogDebug("[{applicationId}] Cancelled, player turned null during application", _applicationId); + _cachedData = charaData; + _pairStateCache.Store(Ident, charaData); + _forceFullReapply = true; + Logger.LogDebug("[{applicationId}] Cancelled, player turned null during application", _applicationId); + } + else + { + Logger.LogWarning(ex, "[{applicationId}] Cancelled", _applicationId); + _forceFullReapply = true; + } + RecordFailure($"Application failed: {ex.Message}", "Exception"); } - else - { - Logger.LogWarning(ex, "[{applicationId}] Cancelled", _applicationId); - _forceFullReapply = true; - } - RecordFailure($"Application failed: {ex.Message}", "Exception"); } -} private void FrameworkUpdate() { @@ -1827,6 +2323,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { IsVisible = false; _charaHandler?.Invalidate(); + ClearAllOwnedObjectRetries(); _downloadCancellationTokenSource?.CancelDispose(); _downloadCancellationTokenSource = null; if (logChange) @@ -1839,6 +2336,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { PlayerName = name; _charaHandler = _gameObjectHandlerFactory.Create(ObjectKind.Player, () => _dalamudUtil.GetPlayerCharacterFromCachedTableByIdent(Ident), isWatched: false).GetAwaiter().GetResult(); + UpdateLastKnownActor(_charaHandler.Address, name); var user = GetPrimaryUserData(); if (!string.IsNullOrEmpty(user.UID)) @@ -1953,6 +2451,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa ConcurrentDictionary<(string GamePath, string? Hash), string> outputDict = new(); bool hasMigrationChanges = false; bool skipDownscaleForPair = ShouldSkipDownscale(); + bool skipDecimationForPair = ShouldSkipDecimation(); try { @@ -1978,14 +2477,37 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (string.IsNullOrEmpty(new FileInfo(fileCache.ResolvedFilepath).Extension)) { hasMigrationChanges = true; - fileCache = _fileDbManager.MigrateFileHashToExtension(fileCache, item.GamePaths[0].Split(".")[^1]); + var anyGamePath = item.GamePaths.FirstOrDefault(); + + if (!string.IsNullOrEmpty(anyGamePath)) + { + var ext = Path.GetExtension(anyGamePath); + var extNoDot = ext.StartsWith('.') ? ext[1..] : ext; + + if (!string.IsNullOrEmpty(extNoDot)) + { + hasMigrationChanges = true; + fileCache = _fileDbManager.MigrateFileHashToExtension(fileCache, extNoDot); + } + } } foreach (var gamePath in item.GamePaths) { + var mode = _configService.Current.AnimationValidationMode; + + if (mode != AnimationValidationMode.Unsafe + && gamePath.EndsWith(".pap", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrEmpty(item.Hash) + && _blockedPapHashes.ContainsKey(item.Hash)) + { + continue; + } + var preferredPath = skipDownscaleForPair ? fileCache.ResolvedFilepath : _textureDownscaleService.GetPreferredPath(item.Hash, fileCache.ResolvedFilepath); + outputDict[(gamePath, item.Hash)] = preferredPath; } } @@ -2127,6 +2649,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _cachedData = null; LastAppliedDataBytes = -1; LastAppliedDataTris = -1; + LastAppliedApproximateEffectiveTris = -1; LastAppliedApproximateVRAMBytes = -1; LastAppliedApproximateEffectiveVRAMBytes = -1; } @@ -2185,6 +2708,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (descriptor.Address == nint.Zero) return; + UpdateLastKnownActor(descriptor); RefreshTrackedHandler(descriptor); QueueActorInitialization(descriptor); } @@ -2295,7 +2819,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa HandleVisibilityLoss(logChange: false); } - private bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid) + private static bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid) { hashedCid = descriptor.HashedContentId ?? string.Empty; if (!string.IsNullOrEmpty(hashedCid)) @@ -2308,6 +2832,129 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return !string.IsNullOrEmpty(hashedCid); } + private void UpdateLastKnownActor(ActorObjectService.ActorDescriptor descriptor) + { + _lastKnownAddress = descriptor.Address; + _lastKnownObjectIndex = descriptor.ObjectIndex; + if (!string.IsNullOrEmpty(descriptor.Name)) + { + _lastKnownName = descriptor.Name; + } + } + + private void UpdateLastKnownActor(nint address, string? name) + { + if (address != nint.Zero) + { + _lastKnownAddress = address; + } + + if (!string.IsNullOrEmpty(name)) + { + _lastKnownName = name; + } + } + + private static void SplitPapMappings( + Dictionary<(string GamePath, string? Hash), string> moddedPaths, + out Dictionary<(string GamePath, string? Hash), string> withoutPap, + out Dictionary<(string GamePath, string? Hash), string> papOnly) + { + withoutPap = new(moddedPaths.Comparer); + papOnly = new(moddedPaths.Comparer); + + foreach (var kv in moddedPaths) + { + var gamePath = kv.Key.GamePath; + if (gamePath.EndsWith(".pap", StringComparison.OrdinalIgnoreCase)) + papOnly[kv.Key] = kv.Value; + else + withoutPap[kv.Key] = kv.Value; + } + } + + private async Task StripIncompatiblePapAsync( + GameObjectHandler handlerForApply, + CharacterData charaData, + Dictionary<(string GamePath, string? Hash), string> papOnly, + CancellationToken token) + { + var mode = _configService.Current.AnimationValidationMode; + var allowBasedShift = _configService.Current.AnimationAllowOneBasedShift; + var allownNightIndex = _configService.Current.AnimationAllowNeighborIndexTolerance; + + if (mode == AnimationValidationMode.Unsafe || papOnly.Count == 0) + return 0; + + var boneIndices = await _dalamudUtil.RunOnFrameworkThread( + () => _modelAnalyzer.GetSkeletonBoneIndices(handlerForApply)) + .ConfigureAwait(false); + + if (boneIndices == null || boneIndices.Count == 0) + { + var removedCount = papOnly.Count; + papOnly.Clear(); + return removedCount; + } + + var localBoneSets = new Dictionary>(StringComparer.OrdinalIgnoreCase); + foreach (var (rawKey, list) in boneIndices) + { + var key = XivDataAnalyzer.CanonicalizeSkeletonKey(rawKey); + if (string.IsNullOrEmpty(key)) continue; + + if (!localBoneSets.TryGetValue(key, out var set)) + localBoneSets[key] = set = []; + + foreach (var v in list) + set.Add(v); + } + + int removed = 0; + + foreach (var hash in papOnly.Keys.Select(k => k.Hash).Where(h => !string.IsNullOrEmpty(h)).Distinct(StringComparer.OrdinalIgnoreCase).ToList()) + { + token.ThrowIfCancellationRequested(); + + var papIndices = await _dalamudUtil.RunOnFrameworkThread( + () => _modelAnalyzer.GetBoneIndicesFromPap(hash!)) + .ConfigureAwait(false); + + if (papIndices == null || papIndices.Count == 0) + continue; + + if (papIndices.All(k => k.Value.DefaultIfEmpty().Max() <= 105)) + continue; + + if (XivDataAnalyzer.IsPapCompatible(localBoneSets, papIndices, mode, allowBasedShift, allownNightIndex, out var reason)) + continue; + + var keysToRemove = papOnly.Keys.Where(k => string.Equals(k.Hash, hash, StringComparison.OrdinalIgnoreCase)).ToList(); + foreach (var k in keysToRemove) + papOnly.Remove(k); + + removed += keysToRemove.Count; + + if (_blockedPapHashes.TryAdd(hash!, 0)) + Logger.LogWarning("Blocked remote object PAP (hash {hash}) for {handler}: {reason}", hash, GetLogIdentifier(), reason); + + if (charaData.FileReplacements.TryGetValue(ObjectKind.Player, out var list)) + { + list.RemoveAll(r => string.Equals(r.Hash, hash, StringComparison.OrdinalIgnoreCase) + && r.GamePaths.Any(p => p.EndsWith(".pap", StringComparison.OrdinalIgnoreCase))); + } + } + + var nullHashKeys = papOnly.Keys.Where(k => string.IsNullOrEmpty(k.Hash)).ToList(); + foreach (var k in nullHashKeys) + { + papOnly.Remove(k); + removed++; + } + + return removed; + } + private async Task ApplyCustomizeAsync(nint address, string customizeData, ObjectKind kind) { _customizeIds[kind] = await _ipcManager.CustomizePlus.SetBodyScaleAsync(address, customizeData).ConfigureAwait(false); diff --git a/LightlessSync/PlayerData/Pairs/PairHandlerAdapterFactory.cs b/LightlessSync/PlayerData/Pairs/PairHandlerAdapterFactory.cs index 5169820..47336eb 100644 --- a/LightlessSync/PlayerData/Pairs/PairHandlerAdapterFactory.cs +++ b/LightlessSync/PlayerData/Pairs/PairHandlerAdapterFactory.cs @@ -1,12 +1,15 @@ using LightlessSync.FileCache; using LightlessSync.Interop.Ipc; +using LightlessSync.LightlessConfiguration; using LightlessSync.PlayerData.Factories; using LightlessSync.Services; using LightlessSync.Services.ActorTracking; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.PairProcessing; using LightlessSync.Services.ServerConfiguration; using LightlessSync.Services.TextureCompression; +using Dalamud.Plugin.Services; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; @@ -25,13 +28,18 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory private readonly IServiceProvider _serviceProvider; private readonly IHostApplicationLifetime _lifetime; private readonly FileCacheManager _fileCacheManager; + private readonly PlayerPerformanceConfigService _playerPerformanceConfigService; private readonly PlayerPerformanceService _playerPerformanceService; private readonly PairProcessingLimiter _pairProcessingLimiter; private readonly ServerConfigurationManager _serverConfigManager; private readonly TextureDownscaleService _textureDownscaleService; + private readonly ModelDecimationService _modelDecimationService; private readonly PairStateCache _pairStateCache; private readonly PairPerformanceMetricsCache _pairPerformanceMetricsCache; private readonly PenumbraTempCollectionJanitor _tempCollectionJanitor; + private readonly LightlessConfigService _configService; + private readonly XivDataAnalyzer _modelAnalyzer; + private readonly IFramework _framework; public PairHandlerAdapterFactory( ILoggerFactory loggerFactory, @@ -42,15 +50,20 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory FileDownloadManagerFactory fileDownloadManagerFactory, PluginWarningNotificationService pluginWarningNotificationManager, IServiceProvider serviceProvider, + IFramework framework, IHostApplicationLifetime lifetime, FileCacheManager fileCacheManager, + PlayerPerformanceConfigService playerPerformanceConfigService, PlayerPerformanceService playerPerformanceService, PairProcessingLimiter pairProcessingLimiter, ServerConfigurationManager serverConfigManager, TextureDownscaleService textureDownscaleService, + ModelDecimationService modelDecimationService, PairStateCache pairStateCache, PairPerformanceMetricsCache pairPerformanceMetricsCache, - PenumbraTempCollectionJanitor tempCollectionJanitor) + PenumbraTempCollectionJanitor tempCollectionJanitor, + XivDataAnalyzer modelAnalyzer, + LightlessConfigService configService) { _loggerFactory = loggerFactory; _mediator = mediator; @@ -60,15 +73,20 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory _fileDownloadManagerFactory = fileDownloadManagerFactory; _pluginWarningNotificationManager = pluginWarningNotificationManager; _serviceProvider = serviceProvider; + _framework = framework; _lifetime = lifetime; _fileCacheManager = fileCacheManager; + _playerPerformanceConfigService = playerPerformanceConfigService; _playerPerformanceService = playerPerformanceService; _pairProcessingLimiter = pairProcessingLimiter; _serverConfigManager = serverConfigManager; _textureDownscaleService = textureDownscaleService; + _modelDecimationService = modelDecimationService; _pairStateCache = pairStateCache; _pairPerformanceMetricsCache = pairPerformanceMetricsCache; _tempCollectionJanitor = tempCollectionJanitor; + _modelAnalyzer = modelAnalyzer; + _configService = configService; } public IPairHandlerAdapter Create(string ident) @@ -86,15 +104,20 @@ internal sealed class PairHandlerAdapterFactory : IPairHandlerAdapterFactory downloadManager, _pluginWarningNotificationManager, dalamudUtilService, + _framework, actorObjectService, _lifetime, _fileCacheManager, + _playerPerformanceConfigService, _playerPerformanceService, _pairProcessingLimiter, _serverConfigManager, _textureDownscaleService, + _modelDecimationService, _pairStateCache, _pairPerformanceMetricsCache, - _tempCollectionJanitor); + _tempCollectionJanitor, + _modelAnalyzer, + _configService); } } diff --git a/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs b/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs index ec05ee7..881c35c 100644 --- a/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs +++ b/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs @@ -89,7 +89,7 @@ public sealed class PairHandlerRegistry : IDisposable } if (handler.LastReceivedCharacterData is not null && - (handler.LastAppliedApproximateVRAMBytes < 0 || handler.LastAppliedDataTris < 0)) + (handler.LastAppliedApproximateVRAMBytes < 0 || handler.LastAppliedDataTris < 0 || handler.LastAppliedApproximateEffectiveTris < 0)) { handler.ApplyLastReceivedData(forced: true); } diff --git a/LightlessSync/PlayerData/Pairs/PairLedger.cs b/LightlessSync/PlayerData/Pairs/PairLedger.cs index b151e1f..fdb226e 100644 --- a/LightlessSync/PlayerData/Pairs/PairLedger.cs +++ b/LightlessSync/PlayerData/Pairs/PairLedger.cs @@ -258,7 +258,8 @@ public sealed class PairLedger : DisposableMediatorSubscriberBase if (handler.LastAppliedApproximateVRAMBytes >= 0 && handler.LastAppliedDataTris >= 0 - && handler.LastAppliedApproximateEffectiveVRAMBytes >= 0) + && handler.LastAppliedApproximateEffectiveVRAMBytes >= 0 + && handler.LastAppliedApproximateEffectiveTris >= 0) { continue; } diff --git a/LightlessSync/PlayerData/Pairs/PairPerformanceMetricsCache.cs b/LightlessSync/PlayerData/Pairs/PairPerformanceMetricsCache.cs index 110d845..5d83cee 100644 --- a/LightlessSync/PlayerData/Pairs/PairPerformanceMetricsCache.cs +++ b/LightlessSync/PlayerData/Pairs/PairPerformanceMetricsCache.cs @@ -5,7 +5,8 @@ namespace LightlessSync.PlayerData.Pairs; public readonly record struct PairPerformanceMetrics( long TriangleCount, long ApproximateVramBytes, - long ApproximateEffectiveVramBytes); + long ApproximateEffectiveVramBytes, + long ApproximateEffectiveTris); /// /// caches performance metrics keyed by pair ident diff --git a/LightlessSync/PlayerData/Pairs/VisibleUserDataDistributor.cs b/LightlessSync/PlayerData/Pairs/VisibleUserDataDistributor.cs index f71080a..35bf3ed 100644 --- a/LightlessSync/PlayerData/Pairs/VisibleUserDataDistributor.cs +++ b/LightlessSync/PlayerData/Pairs/VisibleUserDataDistributor.cs @@ -50,6 +50,7 @@ public class VisibleUserDataDistributor : DisposableMediatorSubscriberBase }); Mediator.Subscribe(this, (_) => PushToAllVisibleUsers()); + Mediator.Subscribe(this, (msg) => HandlePairOnline(msg.PairIdent)); Mediator.Subscribe(this, (_) => { _fileTransferManager.CancelUpload(); @@ -111,6 +112,20 @@ public class VisibleUserDataDistributor : DisposableMediatorSubscriberBase _ = PushCharacterDataAsync(forced); } + private void HandlePairOnline(PairUniqueIdentifier pairIdent) + { + if (!_apiController.IsConnected || !_pairLedger.IsPairVisible(pairIdent)) + { + return; + } + + if (_pairLedger.GetHandler(pairIdent)?.UserData is { } user) + { + _usersToPushDataTo.Add(user); + PushCharacterData(forced: true); + } + } + private async Task PushCharacterDataAsync(bool forced = false) { await _pushLock.WaitAsync(_runtimeCts.Token).ConfigureAwait(false); @@ -152,5 +167,6 @@ public class VisibleUserDataDistributor : DisposableMediatorSubscriberBase } } - private List GetVisibleUsers() => [.. _pairLedger.GetVisiblePairs().Select(connection => connection.User)]; + private List GetVisibleUsers() + => [.. _pairLedger.GetVisiblePairs().Where(connection => connection.IsOnline).Select(connection => connection.User)]; } diff --git a/LightlessSync/Plugin.cs b/LightlessSync/Plugin.cs index 9e80ff7..88382c6 100644 --- a/LightlessSync/Plugin.cs +++ b/LightlessSync/Plugin.cs @@ -40,6 +40,7 @@ using System.Reflection; using OtterTex; using LightlessSync.Services.LightFinder; using LightlessSync.Services.PairProcessing; +using LightlessSync.Services.ModelDecimation; using LightlessSync.UI.Models; namespace LightlessSync; @@ -105,6 +106,7 @@ public sealed class Plugin : IDalamudPlugin services.AddSingleton(new WindowSystem("LightlessSync")); services.AddSingleton(); services.AddSingleton(new Dalamud.Localization("LightlessSync.Localization.", string.Empty, useEmbedded: true)); + services.AddSingleton(framework); services.AddSingleton(gameGui); services.AddSingleton(gameInteropProvider); services.AddSingleton(addonLifecycle); @@ -121,10 +123,12 @@ public sealed class Plugin : IDalamudPlugin services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); @@ -177,7 +181,8 @@ public sealed class Plugin : IDalamudPlugin services.AddSingleton(sp => new BlockedCharacterHandler( sp.GetRequiredService>(), - gameInteropProvider)); + gameInteropProvider, + objectTable)); services.AddSingleton(sp => new IpcProvider( sp.GetRequiredService>(), diff --git a/LightlessSync/Services/ActorTracking/ActorObjectService.cs b/LightlessSync/Services/ActorTracking/ActorObjectService.cs index 28c5533..e443496 100644 --- a/LightlessSync/Services/ActorTracking/ActorObjectService.cs +++ b/LightlessSync/Services/ActorTracking/ActorObjectService.cs @@ -6,6 +6,7 @@ using FFXIVClientStructs.Interop; using FFXIVClientStructs.FFXIV.Client.Game.Character; using FFXIVClientStructs.FFXIV.Client.Game.Object; using FFXIVClientStructs.FFXIV.Client.Graphics.Scene; +using LightlessSync.PlayerData.Handlers; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; @@ -16,7 +17,7 @@ using LightlessObjectKind = LightlessSync.API.Data.Enum.ObjectKind; namespace LightlessSync.Services.ActorTracking; -public sealed class ActorObjectService : IHostedService, IDisposable +public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorSubscriber { public readonly record struct ActorDescriptor( string Name, @@ -36,6 +37,8 @@ public sealed class ActorObjectService : IHostedService, IDisposable private readonly IClientState _clientState; private readonly ICondition _condition; private readonly LightlessMediator _mediator; + private readonly object _playerRelatedHandlerLock = new(); + private readonly HashSet _playerRelatedHandlers = []; private readonly ConcurrentDictionary _activePlayers = new(); private readonly ConcurrentDictionary _gposePlayers = new(); @@ -71,6 +74,25 @@ public sealed class ActorObjectService : IHostedService, IDisposable _clientState = clientState; _condition = condition; _mediator = mediator; + + _mediator.Subscribe(this, (msg) => + { + if (!msg.OwnedObject) return; + lock (_playerRelatedHandlerLock) + { + _playerRelatedHandlers.Add(msg.GameObjectHandler); + } + RefreshTrackedActors(force: true); + }); + _mediator.Subscribe(this, (msg) => + { + if (!msg.OwnedObject) return; + lock (_playerRelatedHandlerLock) + { + _playerRelatedHandlers.Remove(msg.GameObjectHandler); + } + RefreshTrackedActors(force: true); + }); } private bool IsZoning => _condition[ConditionFlag.BetweenAreas] || _condition[ConditionFlag.BetweenAreas51]; @@ -84,6 +106,7 @@ public sealed class ActorObjectService : IHostedService, IDisposable public IReadOnlyList PlayerDescriptors => Snapshot.PlayerDescriptors; public IReadOnlyList OwnedDescriptors => Snapshot.OwnedDescriptors; public IReadOnlyList GposeDescriptors => CurrentGposeSnapshot.GposeDescriptors; + public LightlessMediator Mediator => _mediator; public bool TryGetActorByHash(string hash, out ActorDescriptor descriptor) => _actorsByHash.TryGetValue(hash, out descriptor); public bool TryGetValidatedActorByHash(string hash, out ActorDescriptor descriptor) @@ -213,18 +236,25 @@ public sealed class ActorObjectService : IHostedService, IDisposable return false; } - public async Task WaitForFullyLoadedAsync(nint address, CancellationToken cancellationToken = default) + public async Task WaitForFullyLoadedAsync(nint address, CancellationToken cancellationToken = default, int timeOutMs = 30000) { if (address == nint.Zero) throw new ArgumentException("Address cannot be zero.", nameof(address)); + var timeoutAt = timeOutMs > 0 ? Environment.TickCount64 + timeOutMs : long.MaxValue; while (true) { cancellationToken.ThrowIfCancellationRequested(); - var isLoaded = await _framework.RunOnFrameworkThread(() => IsObjectFullyLoaded(address)).ConfigureAwait(false); - if (!IsZoning && isLoaded) - return; + var loadState = await _framework.RunOnFrameworkThread(() => GetObjectLoadState(address)).ConfigureAwait(false); + if (!loadState.IsValid) + return false; + + if (!IsZoning && loadState.IsLoaded) + return true; + + if (Environment.TickCount64 >= timeoutAt) + return false; await Task.Delay(100, cancellationToken).ConfigureAwait(false); } @@ -317,6 +347,11 @@ public sealed class ActorObjectService : IHostedService, IDisposable _actorsByHash.Clear(); _actorsByName.Clear(); _pendingHashResolutions.Clear(); + _mediator.UnsubscribeAll(this); + lock (_playerRelatedHandlerLock) + { + _playerRelatedHandlers.Clear(); + } Volatile.Write(ref _snapshot, ActorSnapshot.Empty); Volatile.Write(ref _gposeSnapshot, GposeSnapshot.Empty); return Task.CompletedTask; @@ -493,7 +528,9 @@ public sealed class ActorObjectService : IHostedService, IDisposable if (objectKind is DalamudObjectKind.MountType or DalamudObjectKind.Companion) { var expectedMinionOrMount = GetMinionOrMountAddress(localPlayerAddress, localEntityId); - if (expectedMinionOrMount != nint.Zero && (nint)gameObject == expectedMinionOrMount) + if (expectedMinionOrMount != nint.Zero + && (nint)gameObject == expectedMinionOrMount + && IsPlayerRelatedOwnedAddress(expectedMinionOrMount, LightlessObjectKind.MinionOrMount)) { var resolvedOwner = ownerId != 0 ? ownerId : localEntityId; return (LightlessObjectKind.MinionOrMount, resolvedOwner); @@ -507,16 +544,37 @@ public sealed class ActorObjectService : IHostedService, IDisposable return (null, ownerId); var expectedPet = GetPetAddress(localPlayerAddress, localEntityId); - if (expectedPet != nint.Zero && (nint)gameObject == expectedPet) + if (expectedPet != nint.Zero + && (nint)gameObject == expectedPet + && IsPlayerRelatedOwnedAddress(expectedPet, LightlessObjectKind.Pet)) return (LightlessObjectKind.Pet, ownerId); var expectedCompanion = GetCompanionAddress(localPlayerAddress, localEntityId); - if (expectedCompanion != nint.Zero && (nint)gameObject == expectedCompanion) + if (expectedCompanion != nint.Zero + && (nint)gameObject == expectedCompanion + && IsPlayerRelatedOwnedAddress(expectedCompanion, LightlessObjectKind.Companion)) return (LightlessObjectKind.Companion, ownerId); return (null, ownerId); } + private bool IsPlayerRelatedOwnedAddress(nint address, LightlessObjectKind expectedKind) + { + if (address == nint.Zero) + return false; + + lock (_playerRelatedHandlerLock) + { + foreach (var handler in _playerRelatedHandlers) + { + if (handler.Address == address && handler.ObjectKind == expectedKind) + return true; + } + } + + return false; + } + private unsafe nint GetMinionOrMountAddress(nint localPlayerAddress, uint ownerEntityId) { if (localPlayerAddress == nint.Zero) @@ -524,20 +582,20 @@ public sealed class ActorObjectService : IHostedService, IDisposable var playerObject = (GameObject*)localPlayerAddress; var candidateAddress = _objectTable.GetObjectAddress(playerObject->ObjectIndex + 1); + if (ownerEntityId == 0) + return nint.Zero; + if (candidateAddress != nint.Zero) { var candidate = (GameObject*)candidateAddress; var candidateKind = (DalamudObjectKind)candidate->ObjectKind; if (candidateKind is DalamudObjectKind.MountType or DalamudObjectKind.Companion) { - if (ownerEntityId == 0 || ResolveOwnerId(candidate) == ownerEntityId) + if (ResolveOwnerId(candidate) == ownerEntityId) return candidateAddress; } } - if (ownerEntityId == 0) - return candidateAddress; - foreach (var obj in _objectTable) { if (obj is null || obj.Address == nint.Zero || obj.Address == localPlayerAddress) @@ -551,7 +609,7 @@ public sealed class ActorObjectService : IHostedService, IDisposable return obj.Address; } - return candidateAddress; + return nint.Zero; } private unsafe nint GetPetAddress(nint localPlayerAddress, uint ownerEntityId) @@ -1022,6 +1080,7 @@ public sealed class ActorObjectService : IHostedService, IDisposable public void Dispose() { DisposeHooks(); + _mediator.UnsubscribeAll(this); GC.SuppressFinalize(this); } @@ -1143,6 +1202,18 @@ public sealed class ActorObjectService : IHostedService, IDisposable return results; } + private LoadState GetObjectLoadState(nint address) + { + if (address == nint.Zero) + return LoadState.Invalid; + + var obj = _objectTable.CreateObjectReference(address); + if (obj is null || obj.Address != address) + return LoadState.Invalid; + + return new LoadState(true, IsObjectFullyLoaded(address)); + } + private static unsafe bool IsObjectFullyLoaded(nint address) { if (address == nint.Zero) @@ -1169,6 +1240,11 @@ public sealed class ActorObjectService : IHostedService, IDisposable return true; } + private readonly record struct LoadState(bool IsValid, bool IsLoaded) + { + public static LoadState Invalid => new(false, false); + } + private sealed record OwnedObjectSnapshot( IReadOnlyList RenderedPlayers, IReadOnlyList RenderedCompanions, diff --git a/LightlessSync/Services/CharacterAnalyzer.cs b/LightlessSync/Services/CharacterAnalyzer.cs index 959ece3..58388ae 100644 --- a/LightlessSync/Services/CharacterAnalyzer.cs +++ b/LightlessSync/Services/CharacterAnalyzer.cs @@ -28,7 +28,7 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable { _baseAnalysisCts = _baseAnalysisCts.CancelRecreate(); var token = _baseAnalysisCts.Token; - _ = BaseAnalysis(msg.CharacterData, token); + _ = Task.Run(async () => await BaseAnalysis(msg.CharacterData, token).ConfigureAwait(false), token); }); _fileCacheManager = fileCacheManager; _xivDataAnalyzer = modelAnalyzer; diff --git a/LightlessSync/Services/DalamudUtilService.cs b/LightlessSync/Services/DalamudUtilService.cs index 71bdace..e399a9d 100644 --- a/LightlessSync/Services/DalamudUtilService.cs +++ b/LightlessSync/Services/DalamudUtilService.cs @@ -22,8 +22,10 @@ using LightlessSync.Utils; using Lumina.Excel.Sheets; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; +using System.Diagnostics; using System.Numerics; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Text; using BattleNpcSubKind = FFXIVClientStructs.FFXIV.Client.Game.Object.BattleNpcSubKind; using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind; @@ -843,31 +845,41 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber return Task.CompletedTask; } - public async Task WaitWhileCharacterIsDrawing(ILogger logger, GameObjectHandler handler, Guid redrawId, int timeOut = 5000, CancellationToken? ct = null) + public async Task WaitWhileCharacterIsDrawing( + ILogger logger, + GameObjectHandler handler, + Guid redrawId, + int timeOut = 5000, + CancellationToken? ct = null) { if (!_clientState.IsLoggedIn) return; - if (ct == null) - ct = CancellationToken.None; + var token = ct ?? CancellationToken.None; + + const int tick = 250; + const int initialSettle = 50; + + var sw = Stopwatch.StartNew(); - const int tick = 250; - int curWaitTime = 0; try { logger.LogTrace("[{redrawId}] Starting wait for {handler} to draw", redrawId, handler); - await Task.Delay(tick, ct.Value).ConfigureAwait(true); - curWaitTime += tick; - while ((!ct.Value.IsCancellationRequested) - && curWaitTime < timeOut - && await handler.IsBeingDrawnRunOnFrameworkAsync().ConfigureAwait(false)) // 0b100000000000 is "still rendering" or something + await Task.Delay(initialSettle, token).ConfigureAwait(false); + + while (!token.IsCancellationRequested + && sw.ElapsedMilliseconds < timeOut + && await handler.IsBeingDrawnRunOnFrameworkAsync().ConfigureAwait(false)) { logger.LogTrace("[{redrawId}] Waiting for {handler} to finish drawing", redrawId, handler); - curWaitTime += tick; - await Task.Delay(tick, ct.Value).ConfigureAwait(true); + await Task.Delay(tick, token).ConfigureAwait(false); } - logger.LogTrace("[{redrawId}] Finished drawing after {curWaitTime}ms", redrawId, curWaitTime); + logger.LogTrace("[{redrawId}] Finished drawing after {ms}ms", redrawId, sw.ElapsedMilliseconds); + } + catch (OperationCanceledException) + { + // ignore } catch (AccessViolationException ex) { @@ -1032,7 +1044,7 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber if (actor.ObjectIndex >= 200) continue; - if (_blockedCharacterHandler.IsCharacterBlocked(playerAddress, out bool firstTime) && firstTime) + if (_blockedCharacterHandler.IsCharacterBlocked(playerAddress, actor.ObjectIndex, out bool firstTime) && firstTime) { _logger.LogTrace("Skipping character {addr}, blocked/muted", playerAddress.ToString("X")); continue; diff --git a/LightlessSync/Services/Mediator/Messages.cs b/LightlessSync/Services/Mediator/Messages.cs index ae74b78..e6db9e7 100644 --- a/LightlessSync/Services/Mediator/Messages.cs +++ b/LightlessSync/Services/Mediator/Messages.cs @@ -73,7 +73,7 @@ public record HubClosedMessage(Exception? Exception) : SameThreadMessage; public record ResumeScanMessage(string Source) : MessageBase; public record FileCacheInitializedMessage : MessageBase; public record DownloadReadyMessage(Guid RequestId) : MessageBase; -public record DownloadStartedMessage(GameObjectHandler DownloadId, Dictionary DownloadStatus) : MessageBase; +public record DownloadStartedMessage(GameObjectHandler DownloadId, IReadOnlyDictionary DownloadStatus) : MessageBase; public record DownloadFinishedMessage(GameObjectHandler DownloadId) : MessageBase; public record UiToggleMessage(Type UiType) : MessageBase; public record PlayerUploadingMessage(GameObjectHandler Handler, bool IsUploading) : MessageBase; @@ -104,6 +104,7 @@ public record PairUiUpdatedMessage(PairUiSnapshot Snapshot) : MessageBase; public record CensusUpdateMessage(byte Gender, byte RaceId, byte TribeId) : MessageBase; public record TargetPairMessage(Pair Pair) : MessageBase; public record PairFocusCharacterMessage(Pair Pair) : SameThreadMessage; +public record PairOnlineMessage(PairUniqueIdentifier PairIdent) : MessageBase; public record CombatStartMessage : MessageBase; public record CombatEndMessage : MessageBase; public record PerformanceStartMessage : MessageBase; @@ -138,4 +139,4 @@ public record OpenUserProfileMessage(UserData User) : MessageBase; public record LocationSharingMessage(UserData User, LocationInfo LocationInfo, DateTimeOffset ExpireAt) : MessageBase; public record MapChangedMessage(uint MapId) : MessageBase; #pragma warning restore S2094 -#pragma warning restore MA0048 // File name must match type name \ No newline at end of file +#pragma warning restore MA0048 // File name must match type name diff --git a/LightlessSync/Services/ModelDecimation/MdlDecimator.cs b/LightlessSync/Services/ModelDecimation/MdlDecimator.cs new file mode 100644 index 0000000..a7af13f --- /dev/null +++ b/LightlessSync/Services/ModelDecimation/MdlDecimator.cs @@ -0,0 +1,1462 @@ +using Lumina.Data.Parsing; +using Lumina.Extensions; +using MeshDecimator; +using MeshDecimator.Algorithms; +using MeshDecimator.Math; +using Microsoft.Extensions.Logging; +using Penumbra.GameData.Files.ModelStructs; +using System.Buffers.Binary; +using MdlFile = Penumbra.GameData.Files.MdlFile; +using MsLogger = Microsoft.Extensions.Logging.ILogger; + +namespace LightlessSync.Services.ModelDecimation; + +internal static class MdlDecimator +{ + private const int MaxStreams = 3; + private const int ReadRetryCount = 8; + private const int ReadRetryDelayMs = 250; + + private static readonly HashSet SupportedUsages = + [ + MdlFile.VertexUsage.Position, + MdlFile.VertexUsage.Normal, + MdlFile.VertexUsage.Tangent1, + MdlFile.VertexUsage.UV, + MdlFile.VertexUsage.Color, + MdlFile.VertexUsage.BlendWeights, + MdlFile.VertexUsage.BlendIndices, + ]; + + private static readonly HashSet SupportedTypes = + [ + MdlFile.VertexType.Single2, + MdlFile.VertexType.Single3, + MdlFile.VertexType.Single4, + MdlFile.VertexType.Half2, + MdlFile.VertexType.Half4, + MdlFile.VertexType.UByte4, + MdlFile.VertexType.NByte4, + ]; + + public static bool TryDecimate(string sourcePath, string destinationPath, int triangleThreshold, double targetRatio, MsLogger logger) + { + try + { + if (!TryReadModelBytes(sourcePath, logger, out var data)) + { + logger.LogInformation("Skipping model decimation; source file locked or unreadable: {Path}", sourcePath); + return false; + } + var mdl = new MdlFile(data); + if (!mdl.Valid) + { + logger.LogInformation("Skipping model decimation; invalid mdl: {Path}", sourcePath); + return false; + } + + if (mdl.LodCount != 1) + { + logger.LogInformation("Skipping model decimation; unsupported LOD count for {Path}", sourcePath); + return false; + } + + if (HasShapeData(mdl)) + { + logger.LogInformation("Skipping model decimation; shape/morph data present for {Path}", sourcePath); + return false; + } + + const int lodIndex = 0; + var lod = mdl.Lods[lodIndex]; + var meshes = mdl.Meshes.ToArray(); + if (meshes.Length == 0) + { + logger.LogInformation("Skipping model decimation; no meshes for {Path}", sourcePath); + return false; + } + + if (lod.MeshCount == 0) + { + logger.LogInformation("Skipping model decimation; no meshes for {Path}", sourcePath); + return false; + } + + var lodMeshStart = (int)lod.MeshIndex; + var lodMeshEnd = lodMeshStart + lod.MeshCount; + if (lodMeshStart < 0 || lodMeshEnd > meshes.Length) + { + logger.LogInformation("Skipping model decimation; invalid LOD mesh range for {Path}", sourcePath); + return false; + } + + var anyDecimated = false; + var newSubMeshes = new List(mdl.SubMeshes.Length); + var newVertexBuffer = new List(mdl.VertexBufferSize[lodIndex] > 0 ? (int)mdl.VertexBufferSize[lodIndex] : 0); + var newIndexBuffer = new List(mdl.IndexBufferSize[lodIndex] > 0 ? (int)(mdl.IndexBufferSize[lodIndex] / sizeof(ushort)) : 0); + var subMeshCursor = 0; + DecimationAlgorithm? decimationAlgorithm = null; + int? decimationUvChannelCount = null; + + for (var meshIndex = 0; meshIndex < meshes.Length; meshIndex++) + { + var mesh = meshes[meshIndex]; + var meshSubMeshes = mdl.SubMeshes + .Skip(mesh.SubMeshIndex) + .Take(mesh.SubMeshCount) + .ToArray(); + + var meshIndexBase = newIndexBuffer.Count; + var vertexBufferBase = newVertexBuffer.Count; + + MeshStruct updatedMesh; + MdlStructs.SubmeshStruct[] updatedSubMeshes; + byte[][] vertexStreams; + int[] indices; + bool decimated; + + if (meshIndex >= lodMeshStart && meshIndex < lodMeshEnd + && TryProcessMesh(mdl, lodIndex, meshIndex, mesh, meshSubMeshes, triangleThreshold, targetRatio, + out updatedMesh, + out updatedSubMeshes, + out vertexStreams, + out indices, + out decimated, + ref decimationAlgorithm, + ref decimationUvChannelCount, + logger)) + { + updatedSubMeshes = OffsetSubMeshes(updatedSubMeshes, meshIndexBase); + } + else + { + if (meshIndex >= lodMeshStart && meshIndex < lodMeshEnd) + { + logger.LogDebug("Skipping decimation for mesh {MeshIndex} in {Path}", meshIndex, sourcePath); + } + + updatedMesh = mesh; + updatedSubMeshes = CopySubMeshes(meshSubMeshes, meshIndexBase, mesh.StartIndex); + vertexStreams = CopyVertexStreams(mdl, lodIndex, mesh); + indices = ReadIndices(mdl, lodIndex, mesh); + decimated = false; + } + + anyDecimated |= decimated; + + var vertexCount = updatedMesh.VertexCount; + var streamSizes = new int[MaxStreams]; + for (var stream = 0; stream < MaxStreams; stream++) + { + var stride = updatedMesh.VertexBufferStride(stream); + if (stride > 0 && vertexCount > 0) + { + streamSizes[stream] = stride * vertexCount; + } + } + + updatedMesh.VertexBufferOffset1 = (uint)vertexBufferBase; + updatedMesh.VertexBufferOffset2 = (uint)(vertexBufferBase + streamSizes[0]); + updatedMesh.VertexBufferOffset3 = (uint)(vertexBufferBase + streamSizes[0] + streamSizes[1]); + + newVertexBuffer.AddRange(vertexStreams[0]); + newVertexBuffer.AddRange(vertexStreams[1]); + newVertexBuffer.AddRange(vertexStreams[2]); + + updatedMesh.StartIndex = (uint)meshIndexBase; + updatedMesh.SubMeshIndex = (ushort)subMeshCursor; + updatedMesh.SubMeshCount = (ushort)updatedSubMeshes.Length; + updatedMesh.IndexCount = (uint)indices.Length; + + meshes[meshIndex] = updatedMesh; + newSubMeshes.AddRange(updatedSubMeshes); + subMeshCursor += updatedSubMeshes.Length; + newIndexBuffer.AddRange(indices.Select(static i => (ushort)i)); + } + + if (!anyDecimated) + { + logger.LogInformation("Skipping model decimation; no eligible meshes for {Path}", sourcePath); + return false; + } + + var indexBytes = BuildIndexBytes(newIndexBuffer); + + mdl.Meshes = meshes; + mdl.SubMeshes = [.. newSubMeshes]; + mdl.VertexOffset[lodIndex] = 0; + mdl.IndexOffset[lodIndex] = (uint)newVertexBuffer.Count; + mdl.VertexBufferSize[lodIndex] = (uint)newVertexBuffer.Count; + mdl.IndexBufferSize[lodIndex] = (uint)indexBytes.Length; + + mdl.Lods[lodIndex] = mdl.Lods[lodIndex] with + { + VertexDataOffset = 0, + VertexBufferSize = (uint)newVertexBuffer.Count, + IndexDataOffset = (uint)newVertexBuffer.Count, + IndexBufferSize = (uint)indexBytes.Length, + }; + + for (var clearIndex = 1; clearIndex < mdl.VertexOffset.Length; clearIndex++) + { + mdl.VertexOffset[clearIndex] = 0; + mdl.IndexOffset[clearIndex] = 0; + mdl.VertexBufferSize[clearIndex] = 0; + mdl.IndexBufferSize[clearIndex] = 0; + + if (clearIndex < mdl.Lods.Length) + { + mdl.Lods[clearIndex] = mdl.Lods[clearIndex] with + { + VertexDataOffset = 0, + VertexBufferSize = 0, + IndexDataOffset = 0, + IndexBufferSize = 0, + }; + } + } + + mdl.RemainingData = [.. newVertexBuffer, .. indexBytes]; + + var outputData = mdl.Write(); + Directory.CreateDirectory(Path.GetDirectoryName(destinationPath)!); + var tempPath = destinationPath + ".tmp"; + File.WriteAllBytes(tempPath, outputData); + File.Move(tempPath, destinationPath, overwrite: true); + return true; + } + catch (Exception ex) + { + logger.LogWarning(ex, "Failed to decimate model {Path}", sourcePath); + return false; + } + } + + private static bool TryReadModelBytes(string sourcePath, MsLogger logger, out byte[] data) + { + Exception? lastError = null; + for (var attempt = 0; attempt < ReadRetryCount; attempt++) + { + try + { + data = ReadAllBytesShared(sourcePath); + return true; + } + catch (IOException ex) + { + lastError = ex; + } + catch (UnauthorizedAccessException ex) + { + lastError = ex; + } + + if (attempt < ReadRetryCount - 1) + { + Thread.Sleep(ReadRetryDelayMs); + } + } + + if (lastError != null) + { + logger.LogDebug(lastError, "Failed to read model for decimation after {Attempts} attempts: {Path}", ReadRetryCount, sourcePath); + } + + data = []; + return false; + } + + private static byte[] ReadAllBytesShared(string sourcePath) + { + using var stream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete); + var length = stream.Length; + if (length <= 0) + { + throw new IOException("Model file length is zero."); + } + + if (length > int.MaxValue) + { + throw new IOException("Model file too large."); + } + + var buffer = new byte[(int)length]; + var totalRead = 0; + while (totalRead < buffer.Length) + { + var read = stream.Read(buffer, totalRead, buffer.Length - totalRead); + if (read == 0) + { + break; + } + + totalRead += read; + } + + if (totalRead != buffer.Length || stream.Length != length) + { + throw new IOException("Model file length changed during read."); + } + + return buffer; + } + + private static bool TryProcessMesh( + MdlFile mdl, + int lodIndex, + int meshIndex, + MeshStruct mesh, + MdlStructs.SubmeshStruct[] meshSubMeshes, + int triangleThreshold, + double targetRatio, + out MeshStruct updatedMesh, + out MdlStructs.SubmeshStruct[] updatedSubMeshes, + out byte[][] vertexStreams, + out int[] indices, + out bool decimated, + ref DecimationAlgorithm? decimationAlgorithm, + ref int? decimationUvChannelCount, + MsLogger logger) + { + updatedMesh = mesh; + updatedSubMeshes = []; + vertexStreams = [[], [], []]; + indices = []; + decimated = false; + + if (mesh.VertexCount == 0 || mesh.IndexCount == 0) + { + return false; + } + + if (meshSubMeshes.Length == 0) + { + return false; + } + + var triangleCount = (int)(mesh.IndexCount / 3); + if (triangleCount < triangleThreshold) + { + return false; + } + + if (!TryBuildVertexFormat(mdl.VertexDeclarations[meshIndex], out var format, out var reason)) + { + logger.LogDebug("Mesh {MeshIndex} vertex format unsupported: {Reason}", meshIndex, reason); + return false; + } + + if (!TryDecodeMeshData(mdl, lodIndex, mesh, format, meshSubMeshes, out var decoded, out var subMeshIndices, out var decodeReason)) + { + logger.LogDebug("Mesh {MeshIndex} decode failed: {Reason}", meshIndex, decodeReason); + return false; + } + + var targetTriangles = (int)Math.Floor(triangleCount * targetRatio); + if (targetTriangles < 1 || targetTriangles >= triangleCount) + { + return false; + } + + var meshDecimatorMesh = BuildMesh(decoded, subMeshIndices); + var algorithm = GetOrCreateAlgorithm(format, ref decimationAlgorithm, ref decimationUvChannelCount, logger); + algorithm.Initialize(meshDecimatorMesh); + algorithm.DecimateMesh(targetTriangles); + var decimatedMesh = algorithm.ToMesh(); + + if (decimatedMesh.SubMeshCount != meshSubMeshes.Length) + { + logger.LogDebug("Mesh {MeshIndex} submesh count changed after decimation", meshIndex); + return false; + } + + if (!TryEncodeMeshData(decimatedMesh, format, mesh, meshSubMeshes, out updatedMesh, out updatedSubMeshes, out vertexStreams, out indices, out var encodeReason)) + { + logger.LogDebug("Mesh {MeshIndex} encode failed: {Reason}", meshIndex, encodeReason); + return false; + } + + decimated = true; + return true; + } + + private static DecimationAlgorithm GetOrCreateAlgorithm( + VertexFormat format, + ref DecimationAlgorithm? decimationAlgorithm, + ref int? decimationUvChannelCount, + MsLogger logger) + { + var uvChannelCount = format.UvChannelCount; + if (decimationAlgorithm == null || decimationUvChannelCount != uvChannelCount) + { + decimationAlgorithm = MeshDecimation.CreateAlgorithm(Algorithm.Default); + decimationAlgorithm.Logger = logger; + decimationUvChannelCount = uvChannelCount; + } + + return decimationAlgorithm; + } + + private static Mesh BuildMesh(DecodedMeshData decoded, int[][] subMeshIndices) + { + var mesh = new Mesh(decoded.Positions, subMeshIndices); + if (decoded.Normals != null) + { + mesh.Normals = decoded.Normals; + } + + if (decoded.Tangents != null) + { + mesh.Tangents = decoded.Tangents; + } + + if (decoded.Colors != null) + { + mesh.Colors = decoded.Colors; + } + + if (decoded.BoneWeights != null) + { + mesh.BoneWeights = decoded.BoneWeights; + } + + if (decoded.UvChannels != null) + { + for (var channel = 0; channel < decoded.UvChannels.Length; channel++) + { + mesh.SetUVs(channel, decoded.UvChannels[channel]); + } + } + + return mesh; + } + + private static bool TryDecodeMeshData( + MdlFile mdl, + int lodIndex, + MeshStruct mesh, + VertexFormat format, + MdlStructs.SubmeshStruct[] meshSubMeshes, + out DecodedMeshData decoded, + out int[][] subMeshIndices, + out string? reason) + { + decoded = default!; + subMeshIndices = []; + reason = null; + + if (!TryBuildSubMeshIndices(mdl, lodIndex, mesh, meshSubMeshes, out subMeshIndices, out reason)) + { + return false; + } + + var vertexCount = mesh.VertexCount; + var positions = new Vector3d[vertexCount]; + Vector3[]? normals = format.HasNormals ? new Vector3[vertexCount] : null; + Vector4[]? tangents = format.HasTangents ? new Vector4[vertexCount] : null; + Vector4[]? colors = format.HasColors ? new Vector4[vertexCount] : null; + BoneWeight[]? boneWeights = format.HasSkinning ? new BoneWeight[vertexCount] : null; + + Vector2[][]? uvChannels = null; + if (format.UvChannelCount > 0) + { + uvChannels = new Vector2[format.UvChannelCount][]; + for (var channel = 0; channel < format.UvChannelCount; channel++) + { + uvChannels[channel] = new Vector2[vertexCount]; + } + } + + var streams = new BinaryReader[MaxStreams]; + for (var streamIndex = 0; streamIndex < MaxStreams; streamIndex++) + { + streams[streamIndex] = new BinaryReader(new MemoryStream(mdl.RemainingData)); + streams[streamIndex].BaseStream.Position = mdl.VertexOffset[lodIndex] + mesh.VertexBufferOffset(streamIndex); + } + + var uvLookup = format.UvElements.ToDictionary(static element => ElementKey.From(element.Element), static element => element); + for (var vertexIndex = 0; vertexIndex < vertexCount; vertexIndex++) + { + byte[]? indices = null; + float[]? weights = null; + + foreach (var element in format.SortedElements) + { + var usage = (MdlFile.VertexUsage)element.Usage; + var type = (MdlFile.VertexType)element.Type; + var stream = streams[element.Stream]; + + switch (usage) + { + case MdlFile.VertexUsage.Position: + positions[vertexIndex] = ReadPosition(type, stream); + break; + case MdlFile.VertexUsage.Normal when normals != null: + normals[vertexIndex] = ReadNormal(type, stream); + break; + case MdlFile.VertexUsage.Tangent1 when tangents != null: + tangents[vertexIndex] = ReadTangent(type, stream); + break; + case MdlFile.VertexUsage.Color when colors != null: + colors[vertexIndex] = ReadColor(type, stream); + break; + case MdlFile.VertexUsage.BlendIndices: + indices = ReadIndices(type, stream); + break; + case MdlFile.VertexUsage.BlendWeights: + weights = ReadWeights(type, stream); + break; + case MdlFile.VertexUsage.UV when uvChannels != null: + if (!uvLookup.TryGetValue(ElementKey.From(element), out var uvElement)) + { + reason = "UV mapping missing."; + return false; + } + ReadUv(type, stream, uvElement, uvChannels, vertexIndex); + break; + default: + if (usage == MdlFile.VertexUsage.Normal || usage == MdlFile.VertexUsage.Tangent1 + || usage == MdlFile.VertexUsage.Color) + { + _ = ReadAndDiscard(type, stream); + } + break; + } + } + + if (boneWeights != null) + { + if (indices == null || weights == null || indices.Length != 4 || weights.Length != 4) + { + reason = "Missing or invalid skinning data."; + return false; + } + + NormalizeWeights(weights); + boneWeights[vertexIndex] = new BoneWeight(indices[0], indices[1], indices[2], indices[3], weights[0], weights[1], weights[2], weights[3]); + } + } + + decoded = new DecodedMeshData(positions, normals, tangents, colors, boneWeights, uvChannels); + return true; + } + + private static bool TryEncodeMeshData( + Mesh decimatedMesh, + VertexFormat format, + MeshStruct originalMesh, + MdlStructs.SubmeshStruct[] originalSubMeshes, + out MeshStruct updatedMesh, + out MdlStructs.SubmeshStruct[] updatedSubMeshes, + out byte[][] vertexStreams, + out int[] indices, + out string? reason) + { + updatedMesh = originalMesh; + updatedSubMeshes = []; + vertexStreams = [[], [], []]; + indices = []; + reason = null; + + var vertexCount = decimatedMesh.Vertices.Length; + if (vertexCount > ushort.MaxValue) + { + reason = "Vertex count exceeds ushort range."; + return false; + } + + var normals = decimatedMesh.Normals; + var tangents = decimatedMesh.Tangents; + var colors = decimatedMesh.Colors; + var boneWeights = decimatedMesh.BoneWeights; + + if (format.HasNormals && normals == null) + { + reason = "Missing normals after decimation."; + return false; + } + + if (format.HasTangents && tangents == null) + { + reason = "Missing tangents after decimation."; + return false; + } + + if (format.HasColors && colors == null) + { + reason = "Missing colors after decimation."; + return false; + } + + if (format.HasSkinning && boneWeights == null) + { + reason = "Missing bone weights after decimation."; + return false; + } + + var uvChannels = Array.Empty(); + if (format.UvChannelCount > 0) + { + uvChannels = new Vector2[format.UvChannelCount][]; + for (var channel = 0; channel < format.UvChannelCount; channel++) + { + if (decimatedMesh.GetUVDimension(channel) != 2) + { + reason = "Unsupported UV dimension after decimation."; + return false; + } + uvChannels[channel] = decimatedMesh.GetUVs2D(channel); + } + } + + var streamBuffers = new byte[MaxStreams][]; + for (var stream = 0; stream < MaxStreams; stream++) + { + var stride = originalMesh.VertexBufferStride(stream); + if (stride == 0 || vertexCount == 0) + { + streamBuffers[stream] = []; + continue; + } + + streamBuffers[stream] = new byte[stride * vertexCount]; + } + + var uvLookup = format.UvElements.ToDictionary(static element => ElementKey.From(element.Element), static element => element); + + foreach (var element in format.SortedElements) + { + var stride = originalMesh.VertexBufferStride(element.Stream); + if (stride == 0) + { + continue; + } + + var elementSize = GetElementSize((MdlFile.VertexType)element.Type); + if (element.Offset + elementSize > stride) + { + reason = "Vertex element stride overflow."; + return false; + } + } + + for (var vertexIndex = 0; vertexIndex < vertexCount; vertexIndex++) + { + foreach (var element in format.SortedElements) + { + var usage = (MdlFile.VertexUsage)element.Usage; + var type = (MdlFile.VertexType)element.Type; + var stream = element.Stream; + var stride = originalMesh.VertexBufferStride(stream); + if (stride == 0) + { + continue; + } + + var baseOffset = vertexIndex * stride + element.Offset; + var target = streamBuffers[stream].AsSpan(baseOffset, GetElementSize(type)); + + switch (usage) + { + case MdlFile.VertexUsage.Position: + WritePosition(type, decimatedMesh.Vertices[vertexIndex], target); + break; + case MdlFile.VertexUsage.Normal when normals != null: + WriteNormal(type, normals[vertexIndex], target); + break; + case MdlFile.VertexUsage.Tangent1 when tangents != null: + WriteTangent(type, tangents[vertexIndex], target); + break; + case MdlFile.VertexUsage.Color when colors != null: + WriteColor(type, colors[vertexIndex], target); + break; + case MdlFile.VertexUsage.BlendIndices when boneWeights != null: + WriteBlendIndices(type, boneWeights[vertexIndex], target); + break; + case MdlFile.VertexUsage.BlendWeights when boneWeights != null: + WriteBlendWeights(type, boneWeights[vertexIndex], target); + break; + case MdlFile.VertexUsage.UV when format.UvChannelCount > 0: + if (!uvLookup.TryGetValue(ElementKey.From(element), out var uvElement)) + { + reason = "UV mapping missing."; + return false; + } + WriteUv(type, uvElement, uvChannels, vertexIndex, target); + break; + } + } + } + + updatedMesh.VertexCount = (ushort)vertexCount; + + var newSubMeshes = new List(originalSubMeshes.Length); + var indexList = new List(); + + for (var subMeshIndex = 0; subMeshIndex < originalSubMeshes.Length; subMeshIndex++) + { + var subMeshIndices = decimatedMesh.GetIndices(subMeshIndex); + if (subMeshIndices.Any(index => index < 0 || index >= vertexCount)) + { + reason = "Decimated indices out of range."; + return false; + } + + var offset = indexList.Count; + indexList.AddRange(subMeshIndices); + + var updatedSubMesh = originalSubMeshes[subMeshIndex] with + { + IndexOffset = (uint)offset, + IndexCount = (uint)subMeshIndices.Length, + }; + newSubMeshes.Add(updatedSubMesh); + } + + updatedSubMeshes = newSubMeshes.ToArray(); + indices = indexList.ToArray(); + vertexStreams = streamBuffers; + return true; + } + + private static bool TryBuildSubMeshIndices( + MdlFile mdl, + int lodIndex, + MeshStruct mesh, + MdlStructs.SubmeshStruct[] meshSubMeshes, + out int[][] subMeshIndices, + out string? reason) + { + reason = null; + subMeshIndices = new int[meshSubMeshes.Length][]; + var meshIndices = ReadIndices(mdl, lodIndex, mesh); + + for (var subMeshIndex = 0; subMeshIndex < meshSubMeshes.Length; subMeshIndex++) + { + var subMesh = meshSubMeshes[subMeshIndex]; + if (subMesh.IndexCount == 0) + { + subMeshIndices[subMeshIndex] = []; + continue; + } + + var relativeOffset = (int)(subMesh.IndexOffset - mesh.StartIndex); + if (relativeOffset < 0 || relativeOffset + subMesh.IndexCount > meshIndices.Length) + { + reason = "Submesh index range out of bounds."; + return false; + } + + var slice = meshIndices.Skip(relativeOffset).Take((int)subMesh.IndexCount).Select(static i => (int)i).ToArray(); + subMeshIndices[subMeshIndex] = slice; + } + + return true; + } + + private static byte[] BuildIndexBytes(List indices) + { + var indexBytes = new byte[indices.Count * sizeof(ushort)]; + for (var i = 0; i < indices.Count; i++) + { + BinaryPrimitives.WriteUInt16LittleEndian(indexBytes.AsSpan(i * 2, 2), indices[i]); + } + + return indexBytes; + } + + private static int[] ReadIndices(MdlFile mdl, int lodIndex, MeshStruct mesh) + { + using var reader = new BinaryReader(new MemoryStream(mdl.RemainingData)); + reader.BaseStream.Position = mdl.IndexOffset[lodIndex] + mesh.StartIndex * sizeof(ushort); + var values = reader.ReadStructuresAsArray((int)mesh.IndexCount); + return values.Select(static i => (int)i).ToArray(); + } + + private static byte[][] CopyVertexStreams(MdlFile mdl, int lodIndex, MeshStruct mesh) + { + var streams = new byte[MaxStreams][]; + for (var stream = 0; stream < MaxStreams; stream++) + { + var stride = mesh.VertexBufferStride(stream); + if (stride == 0 || mesh.VertexCount == 0) + { + streams[stream] = []; + continue; + } + + var size = stride * mesh.VertexCount; + var offset = mdl.VertexOffset[lodIndex] + mesh.VertexBufferOffset(stream); + streams[stream] = mdl.RemainingData.AsSpan((int)offset, size).ToArray(); + } + + return streams; + } + + private static MdlStructs.SubmeshStruct[] CopySubMeshes(MdlStructs.SubmeshStruct[] source, int newMeshIndexBase, uint meshStartIndex) + { + var result = new MdlStructs.SubmeshStruct[source.Length]; + for (var i = 0; i < source.Length; i++) + { + var relativeOffset = (int)(source[i].IndexOffset - meshStartIndex); + result[i] = source[i] with + { + IndexOffset = (uint)(newMeshIndexBase + relativeOffset), + }; + } + + return result; + } + + private static MdlStructs.SubmeshStruct[] OffsetSubMeshes(MdlStructs.SubmeshStruct[] source, int meshIndexBase) + { + var result = new MdlStructs.SubmeshStruct[source.Length]; + for (var i = 0; i < source.Length; i++) + { + result[i] = source[i] with + { + IndexOffset = (uint)(meshIndexBase + source[i].IndexOffset), + }; + } + + return result; + } + + private static bool TryBuildVertexFormat(MdlStructs.VertexDeclarationStruct declaration, out VertexFormat format, out string? reason) + { + reason = null; + format = default!; + + var elements = declaration.VertexElements; + foreach (var element in elements) + { + if (element.Stream >= MaxStreams) + { + reason = "Vertex stream index out of range."; + return false; + } + + var usage = (MdlFile.VertexUsage)element.Usage; + var type = (MdlFile.VertexType)element.Type; + + if (!SupportedUsages.Contains(usage)) + { + reason = $"Unsupported usage {usage}."; + return false; + } + + if (!SupportedTypes.Contains(type)) + { + reason = $"Unsupported vertex type {type}."; + return false; + } + } + + var positionElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Position).ToArray(); + if (positionElements.Length != 1) + { + reason = "Expected single position element."; + return false; + } + + var positionType = (MdlFile.VertexType)positionElements[0].Type; + if (positionType != MdlFile.VertexType.Single3 && positionType != MdlFile.VertexType.Single4) + { + reason = "Unsupported position element type."; + return false; + } + + var normalElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Normal).ToArray(); + if (normalElements.Length > 1) + { + reason = "Multiple normal elements unsupported."; + return false; + } + + if (normalElements.Length == 1) + { + var normalType = (MdlFile.VertexType)normalElements[0].Type; + if (normalType != MdlFile.VertexType.Single3 && normalType != MdlFile.VertexType.Single4 && normalType != MdlFile.VertexType.NByte4) + { + reason = "Unsupported normal element type."; + return false; + } + } + + var tangentElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Tangent1).ToArray(); + if (tangentElements.Length > 1) + { + reason = "Multiple tangent elements unsupported."; + return false; + } + + if (tangentElements.Length == 1) + { + var tangentType = (MdlFile.VertexType)tangentElements[0].Type; + if (tangentType != MdlFile.VertexType.Single4 && tangentType != MdlFile.VertexType.NByte4) + { + reason = "Unsupported tangent element type."; + return false; + } + } + + var colorElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Color).ToArray(); + if (colorElements.Length > 1) + { + reason = "Multiple color elements unsupported."; + return false; + } + + MdlStructs.VertexElement? colorElement = null; + if (colorElements.Length == 1) + { + var colorType = (MdlFile.VertexType)colorElements[0].Type; + if (colorType != MdlFile.VertexType.UByte4 && colorType != MdlFile.VertexType.NByte4 && colorType != MdlFile.VertexType.Single4) + { + reason = "Unsupported color element type."; + return false; + } + + colorElement = colorElements[0]; + } + + var blendIndicesElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.BlendIndices).ToArray(); + var blendWeightsElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.BlendWeights).ToArray(); + if (blendIndicesElements.Length != blendWeightsElements.Length) + { + reason = "Blend indices/weights mismatch."; + return false; + } + + if (blendIndicesElements.Length > 1 || blendWeightsElements.Length > 1) + { + reason = "Multiple blend elements unsupported."; + return false; + } + + if (blendIndicesElements.Length == 1) + { + var indexType = (MdlFile.VertexType)blendIndicesElements[0].Type; + if (indexType != MdlFile.VertexType.UByte4) + { + reason = "Unsupported blend index type."; + return false; + } + + var weightType = (MdlFile.VertexType)blendWeightsElements[0].Type; + if (weightType != MdlFile.VertexType.UByte4 && weightType != MdlFile.VertexType.NByte4 && weightType != MdlFile.VertexType.Single4) + { + reason = "Unsupported blend weight type."; + return false; + } + } + + if (!TryBuildUvElements(elements, out var uvElements, out var uvChannelCount, out reason)) + { + return false; + } + + var sortedElements = elements.OrderBy(static element => element.Offset).ToList(); + format = new VertexFormat( + sortedElements, + normalElements.Length == 1 ? normalElements[0] : (MdlStructs.VertexElement?)null, + tangentElements.Length == 1 ? tangentElements[0] : (MdlStructs.VertexElement?)null, + colorElement, + blendIndicesElements.Length == 1 ? blendIndicesElements[0] : (MdlStructs.VertexElement?)null, + blendWeightsElements.Length == 1 ? blendWeightsElements[0] : (MdlStructs.VertexElement?)null, + uvElements, + uvChannelCount); + return true; + } + + private static bool TryBuildUvElements( + IReadOnlyList elements, + out List uvElements, + out int uvChannelCount, + out string? reason) + { + uvElements = []; + uvChannelCount = 0; + reason = null; + + var uvList = elements + .Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.UV) + .OrderBy(static e => e.UsageIndex) + .ToList(); + + foreach (var element in uvList) + { + var type = (MdlFile.VertexType)element.Type; + if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + { + if (uvChannelCount + 1 > Mesh.UVChannelCount) + { + reason = "Too many UV channels."; + return false; + } + + uvElements.Add(new UvElementPacking(element, uvChannelCount, null)); + uvChannelCount += 1; + } + else if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + { + if (uvChannelCount + 2 > Mesh.UVChannelCount) + { + reason = "Too many UV channels."; + return false; + } + + uvElements.Add(new UvElementPacking(element, uvChannelCount, uvChannelCount + 1)); + uvChannelCount += 2; + } + else + { + reason = "Unsupported UV type."; + return false; + } + } + + return true; + } + + private static bool HasShapeData(MdlFile mdl) + => mdl.Shapes.Length > 0 + || mdl.ShapeMeshes.Length > 0 + || mdl.ShapeValues.Length > 0 + || mdl.NeckMorphs.Length > 0; + + private static Vector3d ReadPosition(MdlFile.VertexType type, BinaryReader reader) + { + switch (type) + { + case MdlFile.VertexType.Single3: + return new Vector3d(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); + case MdlFile.VertexType.Single4: + var x = reader.ReadSingle(); + var y = reader.ReadSingle(); + var z = reader.ReadSingle(); + _ = reader.ReadSingle(); + return new Vector3d(x, y, z); + default: + throw new InvalidOperationException($"Unsupported position type {type}"); + } + } + + private static Vector3 ReadNormal(MdlFile.VertexType type, BinaryReader reader) + { + switch (type) + { + case MdlFile.VertexType.Single3: + return new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); + case MdlFile.VertexType.Single4: + var x = reader.ReadSingle(); + var y = reader.ReadSingle(); + var z = reader.ReadSingle(); + _ = reader.ReadSingle(); + return new Vector3(x, y, z); + case MdlFile.VertexType.NByte4: + return ReadNByte4(reader).ToVector3(); + default: + throw new InvalidOperationException($"Unsupported normal type {type}"); + } + } + + private static Vector4 ReadTangent(MdlFile.VertexType type, BinaryReader reader) + { + return type switch + { + MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), + MdlFile.VertexType.NByte4 => ReadNByte4(reader), + _ => throw new InvalidOperationException($"Unsupported tangent type {type}"), + }; + } + + private static Vector4 ReadColor(MdlFile.VertexType type, BinaryReader reader) + { + return type switch + { + MdlFile.VertexType.UByte4 => ReadUByte4(reader), + MdlFile.VertexType.NByte4 => ReadUByte4(reader), + MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), + _ => throw new InvalidOperationException($"Unsupported color type {type}"), + }; + } + + private static void ReadUv(MdlFile.VertexType type, BinaryReader reader, UvElementPacking mapping, Vector2[][] uvChannels, int vertexIndex) + { + if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + { + var uv = type == MdlFile.VertexType.Half2 + ? new Vector2(ReadHalf(reader), ReadHalf(reader)) + : new Vector2(reader.ReadSingle(), reader.ReadSingle()); + + uvChannels[mapping.FirstChannel][vertexIndex] = uv; + return; + } + + if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + { + var uv = type == MdlFile.VertexType.Half4 + ? new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)) + : new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); + + uvChannels[mapping.FirstChannel][vertexIndex] = new Vector2(uv.x, uv.y); + if (mapping.SecondChannel.HasValue) + { + uvChannels[mapping.SecondChannel.Value][vertexIndex] = new Vector2(uv.z, uv.w); + } + } + } + + private static byte[] ReadIndices(MdlFile.VertexType type, BinaryReader reader) + { + return type switch + { + MdlFile.VertexType.UByte4 => new[] { reader.ReadByte(), reader.ReadByte(), reader.ReadByte(), reader.ReadByte() }, + _ => throw new InvalidOperationException($"Unsupported indices type {type}"), + }; + } + + private static float[] ReadWeights(MdlFile.VertexType type, BinaryReader reader) + { + return type switch + { + MdlFile.VertexType.UByte4 => ReadUByte4(reader).ToFloatArray(), + MdlFile.VertexType.NByte4 => ReadUByte4(reader).ToFloatArray(), + MdlFile.VertexType.Single4 => new[] { reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle() }, + _ => throw new InvalidOperationException($"Unsupported weights type {type}"), + }; + } + + private static Vector4 ReadUByte4(BinaryReader reader) + { + return new Vector4( + reader.ReadByte() / 255f, + reader.ReadByte() / 255f, + reader.ReadByte() / 255f, + reader.ReadByte() / 255f); + } + + private static Vector4 ReadNByte4(BinaryReader reader) + { + var value = ReadUByte4(reader); + return (value * 2f) - new Vector4(1f, 1f, 1f, 1f); + } + + private static Vector4 ReadAndDiscard(MdlFile.VertexType type, BinaryReader reader) + { + return type switch + { + MdlFile.VertexType.Single2 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), 0, 0), + MdlFile.VertexType.Single3 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), 0), + MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), + MdlFile.VertexType.Half2 => new Vector4(ReadHalf(reader), ReadHalf(reader), 0, 0), + MdlFile.VertexType.Half4 => new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)), + MdlFile.VertexType.UByte4 => ReadUByte4(reader), + MdlFile.VertexType.NByte4 => ReadUByte4(reader), + _ => Vector4.zero, + }; + } + + private static void WritePosition(MdlFile.VertexType type, Vector3d value, Span target) + { + WriteVector3(type, new Vector3((float)value.x, (float)value.y, (float)value.z), target); + } + + private static void WriteNormal(MdlFile.VertexType type, Vector3 value, Span target) + { + WriteVector3(type, value, target, normalized: type == MdlFile.VertexType.NByte4); + } + + private static void WriteTangent(MdlFile.VertexType type, Vector4 value, Span target) + { + if (type == MdlFile.VertexType.NByte4) + { + WriteNByte4(value, target); + return; + } + + WriteVector4(type, value, target); + } + + private static void WriteColor(MdlFile.VertexType type, Vector4 value, Span target) + { + if (type == MdlFile.VertexType.Single4) + { + WriteVector4(type, value, target); + return; + } + + WriteUByte4(value, target); + } + + private static void WriteBlendIndices(MdlFile.VertexType type, BoneWeight weights, Span target) + { + if (type != MdlFile.VertexType.UByte4) + { + return; + } + + target[0] = (byte)Math.Clamp(weights.boneIndex0, 0, 255); + target[1] = (byte)Math.Clamp(weights.boneIndex1, 0, 255); + target[2] = (byte)Math.Clamp(weights.boneIndex2, 0, 255); + target[3] = (byte)Math.Clamp(weights.boneIndex3, 0, 255); + } + + private static void WriteBlendWeights(MdlFile.VertexType type, BoneWeight weights, Span target) + { + if (type != MdlFile.VertexType.UByte4 && type != MdlFile.VertexType.NByte4) + { + if (type == MdlFile.VertexType.Single4) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], weights.boneWeight0); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), weights.boneWeight1); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), weights.boneWeight2); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(12, 4), weights.boneWeight3); + } + return; + } + + var w0 = Clamp01(weights.boneWeight0); + var w1 = Clamp01(weights.boneWeight1); + var w2 = Clamp01(weights.boneWeight2); + var w3 = Clamp01(weights.boneWeight3); + NormalizeWeights(ref w0, ref w1, ref w2, ref w3); + + target[0] = ToByte(w0); + target[1] = ToByte(w1); + target[2] = ToByte(w2); + target[3] = ToByte(w3); + } + + private static void WriteUv(MdlFile.VertexType type, UvElementPacking mapping, Vector2[][] uvChannels, int vertexIndex, Span target) + { + if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + { + var uv = uvChannels[mapping.FirstChannel][vertexIndex]; + WriteVector2(type, uv, target); + return; + } + + if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + { + var uv0 = uvChannels[mapping.FirstChannel][vertexIndex]; + var uv1 = mapping.SecondChannel.HasValue + ? uvChannels[mapping.SecondChannel.Value][vertexIndex] + : Vector2.zero; + WriteVector4(type, new Vector4(uv0.x, uv0.y, uv1.x, uv1.y), target); + } + } + + private static void WriteVector2(MdlFile.VertexType type, Vector2 value, Span target) + { + if (type == MdlFile.VertexType.Single2) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), value.y); + return; + } + + if (type == MdlFile.VertexType.Half2) + { + WriteHalf(target[..2], value.x); + WriteHalf(target.Slice(2, 2), value.y); + } + } + + private static void WriteVector3(MdlFile.VertexType type, Vector3 value, Span target, bool normalized = false) + { + if (type == MdlFile.VertexType.Single3) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), value.y); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), value.z); + return; + } + + if (type == MdlFile.VertexType.Single4) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), value.y); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), value.z); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(12, 4), 1f); + return; + } + + if (type == MdlFile.VertexType.NByte4 && normalized) + { + WriteNByte4(new Vector4(value.x, value.y, value.z, 0f), target); + } + } + + private static void WriteVector4(MdlFile.VertexType type, Vector4 value, Span target) + { + if (type == MdlFile.VertexType.Single4) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), value.y); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), value.z); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(12, 4), value.w); + return; + } + + if (type == MdlFile.VertexType.Half4) + { + WriteHalf(target[..2], value.x); + WriteHalf(target.Slice(2, 2), value.y); + WriteHalf(target.Slice(4, 2), value.z); + WriteHalf(target.Slice(6, 2), value.w); + return; + } + } + + private static void WriteUByte4(Vector4 value, Span target) + { + target[0] = ToByte(Clamp01(value.x)); + target[1] = ToByte(Clamp01(value.y)); + target[2] = ToByte(Clamp01(value.z)); + target[3] = ToByte(Clamp01(value.w)); + } + + private static void WriteNByte4(Vector4 value, Span target) + { + var normalized = (value * 0.5f) + new Vector4(0.5f); + WriteUByte4(normalized, target); + } + + private static void WriteHalf(Span target, float value) + { + var half = (Half)value; + BinaryPrimitives.WriteUInt16LittleEndian(target, BitConverter.HalfToUInt16Bits(half)); + } + + private static float ReadHalf(BinaryReader reader) + => (float)BitConverter.UInt16BitsToHalf(reader.ReadUInt16()); + + private static float Clamp01(float value) + => Math.Clamp(value, 0f, 1f); + + private static byte ToByte(float value) + => (byte)Math.Clamp((int)Math.Round(value * 255f), 0, 255); + + private static void NormalizeWeights(float[] weights) + { + var sum = weights.Sum(); + if (sum <= float.Epsilon) + { + return; + } + + for (var i = 0; i < weights.Length; i++) + { + weights[i] /= sum; + } + } + + private static void NormalizeWeights(ref float w0, ref float w1, ref float w2, ref float w3) + { + var sum = w0 + w1 + w2 + w3; + if (sum <= float.Epsilon) + { + return; + } + + w0 /= sum; + w1 /= sum; + w2 /= sum; + w3 /= sum; + } + + private static int GetElementSize(MdlFile.VertexType type) + => type switch + { + MdlFile.VertexType.Single2 => 8, + MdlFile.VertexType.Single3 => 12, + MdlFile.VertexType.Single4 => 16, + MdlFile.VertexType.Half2 => 4, + MdlFile.VertexType.Half4 => 8, + MdlFile.VertexType.UByte4 => 4, + MdlFile.VertexType.NByte4 => 4, + _ => throw new InvalidOperationException($"Unsupported vertex type {type}"), + }; + + private readonly record struct ElementKey(byte Stream, byte Offset, byte Type, byte Usage, byte UsageIndex) + { + public static ElementKey From(MdlStructs.VertexElement element) + => new(element.Stream, element.Offset, element.Type, element.Usage, element.UsageIndex); + } + + private sealed class VertexFormat + { + public VertexFormat( + List sortedElements, + MdlStructs.VertexElement? normalElement, + MdlStructs.VertexElement? tangentElement, + MdlStructs.VertexElement? colorElement, + MdlStructs.VertexElement? blendIndicesElement, + MdlStructs.VertexElement? blendWeightsElement, + List uvElements, + int uvChannelCount) + { + SortedElements = sortedElements; + NormalElement = normalElement; + TangentElement = tangentElement; + ColorElement = colorElement; + BlendIndicesElement = blendIndicesElement; + BlendWeightsElement = blendWeightsElement; + UvElements = uvElements; + UvChannelCount = uvChannelCount; + } + + public List SortedElements { get; } + public MdlStructs.VertexElement? NormalElement { get; } + public MdlStructs.VertexElement? TangentElement { get; } + public MdlStructs.VertexElement? ColorElement { get; } + public MdlStructs.VertexElement? BlendIndicesElement { get; } + public MdlStructs.VertexElement? BlendWeightsElement { get; } + public List UvElements { get; } + public int UvChannelCount { get; } + + public bool HasNormals => NormalElement.HasValue; + public bool HasTangents => TangentElement.HasValue; + public bool HasColors => ColorElement.HasValue; + public bool HasSkinning => BlendIndicesElement.HasValue && BlendWeightsElement.HasValue; + } + + private readonly record struct UvElementPacking(MdlStructs.VertexElement Element, int FirstChannel, int? SecondChannel); + + private sealed class DecodedMeshData + { + public DecodedMeshData( + Vector3d[] positions, + Vector3[]? normals, + Vector4[]? tangents, + Vector4[]? colors, + BoneWeight[]? boneWeights, + Vector2[][]? uvChannels) + { + Positions = positions; + Normals = normals; + Tangents = tangents; + Colors = colors; + BoneWeights = boneWeights; + UvChannels = uvChannels; + } + + public Vector3d[] Positions { get; } + public Vector3[]? Normals { get; } + public Vector4[]? Tangents { get; } + public Vector4[]? Colors { get; } + public BoneWeight[]? BoneWeights { get; } + public Vector2[][]? UvChannels { get; } + } +} + +internal static class MeshDecimatorVectorExtensions +{ + public static Vector3 ToVector3(this Vector4 value) + => new(value.x, value.y, value.z); + + public static float[] ToFloatArray(this Vector4 value) + => [value.x, value.y, value.z, value.w]; +} diff --git a/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs b/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs new file mode 100644 index 0000000..f666805 --- /dev/null +++ b/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs @@ -0,0 +1,381 @@ +using LightlessSync.FileCache; +using LightlessSync.LightlessConfiguration; +using Microsoft.Extensions.Logging; +using System.Collections.Concurrent; +using System.Globalization; + +namespace LightlessSync.Services.ModelDecimation; + +public sealed class ModelDecimationService +{ + private const int MaxConcurrentJobs = 1; + private const double MinTargetRatio = 0.01; + private const double MaxTargetRatio = 0.99; + + private readonly ILogger _logger; + private readonly LightlessConfigService _configService; + private readonly FileCacheManager _fileCacheManager; + private readonly PlayerPerformanceConfigService _performanceConfigService; + private readonly XivDataStorageService _xivDataStorageService; + private readonly SemaphoreSlim _decimationSemaphore = new(MaxConcurrentJobs); + + private readonly ConcurrentDictionary _activeJobs = new(StringComparer.OrdinalIgnoreCase); + private readonly ConcurrentDictionary _decimatedPaths = new(StringComparer.OrdinalIgnoreCase); + private readonly ConcurrentDictionary _failedHashes = new(StringComparer.OrdinalIgnoreCase); + + public ModelDecimationService( + ILogger logger, + LightlessConfigService configService, + FileCacheManager fileCacheManager, + PlayerPerformanceConfigService performanceConfigService, + XivDataStorageService xivDataStorageService) + { + _logger = logger; + _configService = configService; + _fileCacheManager = fileCacheManager; + _performanceConfigService = performanceConfigService; + _xivDataStorageService = xivDataStorageService; + } + + public void ScheduleDecimation(string hash, string filePath, string? gamePath = null) + { + if (!ShouldScheduleDecimation(hash, filePath, gamePath)) + { + return; + } + + if (_decimatedPaths.ContainsKey(hash) || _failedHashes.ContainsKey(hash) || _activeJobs.ContainsKey(hash)) + { + return; + } + + _logger.LogInformation("Queued model decimation for {Hash}", hash); + + _activeJobs[hash] = Task.Run(async () => + { + await _decimationSemaphore.WaitAsync().ConfigureAwait(false); + try + { + await DecimateInternalAsync(hash, filePath).ConfigureAwait(false); + } + catch (Exception ex) + { + _failedHashes[hash] = 1; + _logger.LogWarning(ex, "Model decimation failed for {Hash}", hash); + } + finally + { + _decimationSemaphore.Release(); + _activeJobs.TryRemove(hash, out _); + } + }, CancellationToken.None); + } + + public bool ShouldScheduleDecimation(string hash, string filePath, string? gamePath = null) + => IsDecimationEnabled() + && filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase) + && IsDecimationAllowed(gamePath) + && !ShouldSkipByTriangleCache(hash); + + public string GetPreferredPath(string hash, string originalPath) + { + if (!IsDecimationEnabled()) + { + return originalPath; + } + + if (_decimatedPaths.TryGetValue(hash, out var existing) && File.Exists(existing)) + { + return existing; + } + + var resolved = GetExistingDecimatedPath(hash); + if (!string.IsNullOrEmpty(resolved)) + { + _decimatedPaths[hash] = resolved; + return resolved; + } + + return originalPath; + } + + public Task WaitForPendingJobsAsync(IEnumerable? hashes, CancellationToken token) + { + if (hashes is null) + { + return Task.CompletedTask; + } + + var pending = new List(); + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var hash in hashes) + { + if (string.IsNullOrEmpty(hash) || !seen.Add(hash)) + { + continue; + } + + if (_activeJobs.TryGetValue(hash, out var job)) + { + pending.Add(job); + } + } + + if (pending.Count == 0) + { + return Task.CompletedTask; + } + + return Task.WhenAll(pending).WaitAsync(token); + } + + private Task DecimateInternalAsync(string hash, string sourcePath) + { + if (!File.Exists(sourcePath)) + { + _failedHashes[hash] = 1; + _logger.LogWarning("Cannot decimate model {Hash}; source path missing: {Path}", hash, sourcePath); + return Task.CompletedTask; + } + + if (!TryGetDecimationSettings(out var triangleThreshold, out var targetRatio)) + { + _logger.LogInformation("Model decimation disabled or invalid settings for {Hash}", hash); + return Task.CompletedTask; + } + + _logger.LogInformation("Starting model decimation for {Hash} (threshold {Threshold}, ratio {Ratio:0.##})", hash, triangleThreshold, targetRatio); + + var destination = Path.Combine(GetDecimatedDirectory(), $"{hash}.mdl"); + if (File.Exists(destination)) + { + RegisterDecimatedModel(hash, sourcePath, destination); + return Task.CompletedTask; + } + + if (!MdlDecimator.TryDecimate(sourcePath, destination, triangleThreshold, targetRatio, _logger)) + { + _failedHashes[hash] = 1; + _logger.LogInformation("Model decimation skipped for {Hash}", hash); + return Task.CompletedTask; + } + + RegisterDecimatedModel(hash, sourcePath, destination); + _logger.LogInformation("Decimated model {Hash} -> {Path}", hash, destination); + return Task.CompletedTask; + } + + private void RegisterDecimatedModel(string hash, string sourcePath, string destination) + { + _decimatedPaths[hash] = destination; + + var performanceConfig = _performanceConfigService.Current; + if (performanceConfig.KeepOriginalModelFiles) + { + return; + } + + if (string.Equals(sourcePath, destination, StringComparison.OrdinalIgnoreCase)) + { + return; + } + + if (!TryReplaceCacheEntryWithDecimated(hash, sourcePath, destination)) + { + return; + } + + TryDelete(sourcePath); + } + + private bool TryReplaceCacheEntryWithDecimated(string hash, string sourcePath, string destination) + { + try + { + var cacheEntry = _fileCacheManager.GetFileCacheByHash(hash); + if (cacheEntry is null || !cacheEntry.IsCacheEntry) + { + return File.Exists(sourcePath) ? false : true; + } + + var cacheFolder = _configService.Current.CacheFolder; + if (string.IsNullOrEmpty(cacheFolder)) + { + return false; + } + + if (!destination.StartsWith(cacheFolder, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + var info = new FileInfo(destination); + if (!info.Exists) + { + return false; + } + + var relative = Path.GetRelativePath(cacheFolder, destination) + .Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar); + var sanitizedRelative = relative.TrimStart(Path.DirectorySeparatorChar); + var prefixed = Path.Combine(FileCacheManager.CachePrefix, sanitizedRelative); + + var replacement = new FileCacheEntity( + hash, + prefixed, + info.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture), + info.Length, + cacheEntry.CompressedSize); + replacement.SetResolvedFilePath(destination); + + if (!string.Equals(cacheEntry.PrefixedFilePath, prefixed, StringComparison.OrdinalIgnoreCase)) + { + _fileCacheManager.RemoveHashedFile(cacheEntry.Hash, cacheEntry.PrefixedFilePath, removeDerivedFiles: false); + } + + _fileCacheManager.UpdateHashedFile(replacement, computeProperties: false); + _fileCacheManager.WriteOutFullCsv(); + + _logger.LogTrace("Replaced cache entry for model {Hash} to decimated path {Path}", hash, destination); + return true; + } + catch (Exception ex) + { + _logger.LogTrace(ex, "Failed to replace cache entry for model {Hash}", hash); + return false; + } + } + + private bool IsDecimationEnabled() + => _performanceConfigService.Current.EnableModelDecimation; + + private bool ShouldSkipByTriangleCache(string hash) + { + if (string.IsNullOrEmpty(hash)) + { + return false; + } + + if (!_xivDataStorageService.Current.TriangleDictionary.TryGetValue(hash, out var cachedTris) || cachedTris <= 0) + { + return false; + } + + var threshold = Math.Max(0, _performanceConfigService.Current.ModelDecimationTriangleThreshold); + return threshold > 0 && cachedTris < threshold; + } + + private bool IsDecimationAllowed(string? gamePath) + { + if (string.IsNullOrWhiteSpace(gamePath)) + { + return true; + } + + var normalized = NormalizeGamePath(gamePath); + if (normalized.Contains("/hair/", StringComparison.Ordinal)) + { + return false; + } + + if (normalized.Contains("/chara/equipment/", StringComparison.Ordinal)) + { + return _performanceConfigService.Current.ModelDecimationAllowClothing; + } + + if (normalized.Contains("/chara/accessory/", StringComparison.Ordinal)) + { + return _performanceConfigService.Current.ModelDecimationAllowAccessories; + } + + if (normalized.Contains("/chara/human/", StringComparison.Ordinal)) + { + if (normalized.Contains("/body/", StringComparison.Ordinal)) + { + return _performanceConfigService.Current.ModelDecimationAllowBody; + } + + if (normalized.Contains("/face/", StringComparison.Ordinal) || normalized.Contains("/head/", StringComparison.Ordinal)) + { + return _performanceConfigService.Current.ModelDecimationAllowFaceHead; + } + + if (normalized.Contains("/tail/", StringComparison.Ordinal)) + { + return _performanceConfigService.Current.ModelDecimationAllowTail; + } + } + + return true; + } + + private static string NormalizeGamePath(string path) + => path.Replace('\\', '/').ToLowerInvariant(); + + private bool TryGetDecimationSettings(out int triangleThreshold, out double targetRatio) + { + triangleThreshold = 15_000; + targetRatio = 0.8; + + var config = _performanceConfigService.Current; + if (!config.EnableModelDecimation) + { + return false; + } + + triangleThreshold = Math.Max(0, config.ModelDecimationTriangleThreshold); + targetRatio = config.ModelDecimationTargetRatio; + if (double.IsNaN(targetRatio) || double.IsInfinity(targetRatio)) + { + return false; + } + + targetRatio = Math.Clamp(targetRatio, MinTargetRatio, MaxTargetRatio); + return true; + } + + private string? GetExistingDecimatedPath(string hash) + { + var candidate = Path.Combine(GetDecimatedDirectory(), $"{hash}.mdl"); + return File.Exists(candidate) ? candidate : null; + } + + private string GetDecimatedDirectory() + { + var directory = Path.Combine(_configService.Current.CacheFolder, "decimated"); + if (!Directory.Exists(directory)) + { + try + { + Directory.CreateDirectory(directory); + } + catch (Exception ex) + { + _logger.LogTrace(ex, "Failed to create decimated directory {Directory}", directory); + } + } + + return directory; + } + + private static void TryDelete(string? path) + { + if (string.IsNullOrEmpty(path)) + { + return; + } + + try + { + if (File.Exists(path)) + { + File.Delete(path); + } + } + catch + { + // ignored + } + } +} diff --git a/LightlessSync/Services/PlayerPerformanceService.cs b/LightlessSync/Services/PlayerPerformanceService.cs index e77ccd7..5fa0049 100644 --- a/LightlessSync/Services/PlayerPerformanceService.cs +++ b/LightlessSync/Services/PlayerPerformanceService.cs @@ -4,6 +4,7 @@ using LightlessSync.LightlessConfiguration; using LightlessSync.PlayerData.Pairs; using LightlessSync.Services.Events; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.TextureCompression; using LightlessSync.UI; using LightlessSync.WebAPI.Files.Models; @@ -18,12 +19,14 @@ public class PlayerPerformanceService private readonly ILogger _logger; private readonly LightlessMediator _mediator; private readonly PlayerPerformanceConfigService _playerPerformanceConfigService; + private readonly ModelDecimationService _modelDecimationService; private readonly TextureDownscaleService _textureDownscaleService; private readonly Dictionary _warnedForPlayers = new(StringComparer.Ordinal); public PlayerPerformanceService(ILogger logger, LightlessMediator mediator, PlayerPerformanceConfigService playerPerformanceConfigService, FileCacheManager fileCacheManager, - XivDataAnalyzer xivDataAnalyzer, TextureDownscaleService textureDownscaleService) + XivDataAnalyzer xivDataAnalyzer, TextureDownscaleService textureDownscaleService, + ModelDecimationService modelDecimationService) { _logger = logger; _mediator = mediator; @@ -31,6 +34,7 @@ public class PlayerPerformanceService _fileCacheManager = fileCacheManager; _xivDataAnalyzer = xivDataAnalyzer; _textureDownscaleService = textureDownscaleService; + _modelDecimationService = modelDecimationService; } public async Task CheckBothThresholds(IPairPerformanceSubject pairHandler, CharacterData charaData) @@ -111,10 +115,12 @@ public class PlayerPerformanceService var config = _playerPerformanceConfigService.Current; long triUsage = 0; + long effectiveTriUsage = 0; if (!charaData.FileReplacements.TryGetValue(API.Data.Enum.ObjectKind.Player, out List? playerReplacements)) { pairHandler.LastAppliedDataTris = 0; + pairHandler.LastAppliedApproximateEffectiveTris = 0; return true; } @@ -123,14 +129,40 @@ public class PlayerPerformanceService .Distinct(StringComparer.OrdinalIgnoreCase) .ToList(); + var skipDecimation = config.SkipModelDecimationForPreferredPairs && pairHandler.IsDirectlyPaired && pairHandler.HasStickyPermissions; + foreach (var hash in moddedModelHashes) { - triUsage += await _xivDataAnalyzer.GetTrianglesByHash(hash).ConfigureAwait(false); + var tris = await _xivDataAnalyzer.GetTrianglesByHash(hash).ConfigureAwait(false); + triUsage += tris; + + long effectiveTris = tris; + var fileEntry = _fileCacheManager.GetFileCacheByHash(hash); + if (fileEntry != null) + { + var preferredPath = fileEntry.ResolvedFilepath; + if (!skipDecimation) + { + preferredPath = _modelDecimationService.GetPreferredPath(hash, fileEntry.ResolvedFilepath); + } + + if (!string.Equals(preferredPath, fileEntry.ResolvedFilepath, StringComparison.OrdinalIgnoreCase)) + { + var decimatedTris = await _xivDataAnalyzer.GetEffectiveTrianglesByHash(hash, preferredPath).ConfigureAwait(false); + if (decimatedTris > 0) + { + effectiveTris = decimatedTris; + } + } + } + + effectiveTriUsage += effectiveTris; } pairHandler.LastAppliedDataTris = triUsage; + pairHandler.LastAppliedApproximateEffectiveTris = effectiveTriUsage; - _logger.LogDebug("Calculated VRAM usage for {p}", pairHandler); + _logger.LogDebug("Calculated triangle usage for {p}", pairHandler); // no warning of any kind on ignored pairs if (config.UIDsToIgnore @@ -167,7 +199,9 @@ public class PlayerPerformanceService public bool ComputeAndAutoPauseOnVRAMUsageThresholds(IPairPerformanceSubject pairHandler, CharacterData charaData, List toDownloadFiles) { var config = _playerPerformanceConfigService.Current; - bool skipDownscale = pairHandler.IsDirectlyPaired && pairHandler.HasStickyPermissions; + bool skipDownscale = config.SkipTextureDownscaleForPreferredPairs + && pairHandler.IsDirectlyPaired + && pairHandler.HasStickyPermissions; long vramUsage = 0; long effectiveVramUsage = 0; @@ -274,4 +308,4 @@ public class PlayerPerformanceService private static bool CheckForThreshold(bool thresholdEnabled, long threshold, long value, bool checkForPrefPerm, bool isPrefPerm) => thresholdEnabled && threshold > 0 && threshold < value && ((checkForPrefPerm && isPrefPerm) || !isPrefPerm); -} \ No newline at end of file +} diff --git a/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs b/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs index 7a09ae7..6fa6f92 100644 --- a/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs +++ b/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs @@ -77,16 +77,39 @@ public sealed class TextureDownscaleService } public void ScheduleDownscale(string hash, string filePath, TextureMapKind mapKind) + => ScheduleDownscale(hash, filePath, () => mapKind); + + public void ScheduleDownscale(string hash, string filePath, Func mapKindFactory) { if (!filePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) return; if (_activeJobs.ContainsKey(hash)) return; _activeJobs[hash] = Task.Run(async () => { + TextureMapKind mapKind; + try + { + mapKind = mapKindFactory(); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to determine texture map kind for {Hash}; skipping downscale", hash); + return; + } + await DownscaleInternalAsync(hash, filePath, mapKind).ConfigureAwait(false); }, CancellationToken.None); } + public bool ShouldScheduleDownscale(string filePath) + { + if (!filePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) + return false; + + var performanceConfig = _playerPerformanceConfigService.Current; + return performanceConfig.EnableNonIndexTextureMipTrim || performanceConfig.EnableIndexTextureDownscale; + } + public string GetPreferredPath(string hash, string originalPath) { if (_downscaledPaths.TryGetValue(hash, out var existing) && File.Exists(existing)) @@ -655,7 +678,7 @@ public sealed class TextureDownscaleService if (!string.Equals(cacheEntry.PrefixedFilePath, prefixed, StringComparison.OrdinalIgnoreCase)) { - _fileCacheManager.RemoveHashedFile(cacheEntry.Hash, cacheEntry.PrefixedFilePath); + _fileCacheManager.RemoveHashedFile(cacheEntry.Hash, cacheEntry.PrefixedFilePath, removeDerivedFiles: false); } _fileCacheManager.UpdateHashedFile(replacement, computeProperties: false); diff --git a/LightlessSync/Services/XivDataAnalyzer.cs b/LightlessSync/Services/XivDataAnalyzer.cs index 9d32883..997df16 100644 --- a/LightlessSync/Services/XivDataAnalyzer.cs +++ b/LightlessSync/Services/XivDataAnalyzer.cs @@ -6,18 +6,22 @@ using FFXIVClientStructs.Havok.Common.Serialize.Util; using LightlessSync.FileCache; using LightlessSync.Interop.GameModel; using LightlessSync.LightlessConfiguration; +using LightlessSync.PlayerData.Factories; using LightlessSync.PlayerData.Handlers; using Microsoft.Extensions.Logging; +using System.Collections.Concurrent; using System.Runtime.InteropServices; +using System.Text.RegularExpressions; namespace LightlessSync.Services; -public sealed class XivDataAnalyzer +public sealed partial class XivDataAnalyzer { private readonly ILogger _logger; private readonly FileCacheManager _fileCacheManager; private readonly XivDataStorageService _configService; private readonly List _failedCalculatedTris = []; + private readonly List _failedCalculatedEffectiveTris = []; public XivDataAnalyzer(ILogger logger, FileCacheManager fileCacheManager, XivDataStorageService configService) @@ -29,127 +33,441 @@ public sealed class XivDataAnalyzer public unsafe Dictionary>? GetSkeletonBoneIndices(GameObjectHandler handler) { - if (handler.Address == nint.Zero) return null; - var chara = (CharacterBase*)(((Character*)handler.Address)->GameObject.DrawObject); - if (chara->GetModelType() != CharacterBase.ModelType.Human) return null; - var resHandles = chara->Skeleton->SkeletonResourceHandles; - Dictionary> outputIndices = []; + if (handler is null || handler.Address == nint.Zero) + return null; + + Dictionary> sets = new(StringComparer.OrdinalIgnoreCase); + try { - for (int i = 0; i < chara->Skeleton->PartialSkeletonCount; i++) + var drawObject = ((Character*)handler.Address)->GameObject.DrawObject; + if (drawObject == null) + return null; + + var chara = (CharacterBase*)drawObject; + if (chara->GetModelType() != CharacterBase.ModelType.Human) + return null; + + var skeleton = chara->Skeleton; + if (skeleton == null) + return null; + + var resHandles = skeleton->SkeletonResourceHandles; + var partialCount = skeleton->PartialSkeletonCount; + if (partialCount <= 0) + return null; + + for (int i = 0; i < partialCount; i++) { var handle = *(resHandles + i); - _logger.LogTrace("Iterating over SkeletonResourceHandle #{i}:{x}", i, ((nint)handle).ToString("X")); - if ((nint)handle == nint.Zero) continue; - var curBones = handle->BoneCount; - // this is unrealistic, the filename shouldn't ever be that long - if (handle->FileName.Length > 1024) continue; - var skeletonName = handle->FileName.ToString(); - if (string.IsNullOrEmpty(skeletonName)) continue; - outputIndices[skeletonName] = []; - for (ushort boneIdx = 0; boneIdx < curBones; boneIdx++) + if ((nint)handle == nint.Zero) + continue; + + if (handle->FileName.Length > 1024) + continue; + + var rawName = handle->FileName.ToString(); + if (string.IsNullOrWhiteSpace(rawName)) + continue; + + var skeletonKey = CanonicalizeSkeletonKey(rawName); + if (string.IsNullOrEmpty(skeletonKey)) + continue; + + var boneCount = handle->BoneCount; + if (boneCount == 0) + continue; + + var havokSkel = handle->HavokSkeleton; + if ((nint)havokSkel == nint.Zero) + continue; + + if (!sets.TryGetValue(skeletonKey, out var set)) { - var boneName = handle->HavokSkeleton->Bones[boneIdx].Name.String; - if (boneName == null) continue; - outputIndices[skeletonName].Add((ushort)(boneIdx + 1)); + set = []; + sets[skeletonKey] = set; } + + uint maxExclusive = boneCount; + uint ushortExclusive = (uint)ushort.MaxValue + 1u; + if (maxExclusive > ushortExclusive) + maxExclusive = ushortExclusive; + + for (uint boneIdx = 0; boneIdx < maxExclusive; boneIdx++) + { + var name = havokSkel->Bones[boneIdx].Name.String; + if (name == null) + continue; + + set.Add((ushort)boneIdx); + } + + _logger.LogTrace("Local skeleton raw file='{raw}', key='{key}', boneCount={count}", + rawName, skeletonKey, boneCount); } } catch (Exception ex) { _logger.LogWarning(ex, "Could not process skeleton data"); + return null; } - return (outputIndices.Count != 0 && outputIndices.Values.All(u => u.Count > 0)) ? outputIndices : null; + if (sets.Count == 0) + return null; + + var output = new Dictionary>(sets.Count, StringComparer.OrdinalIgnoreCase); + foreach (var (key, set) in sets) + { + if (set.Count == 0) + continue; + + var list = set.ToList(); + list.Sort(); + output[key] = list; + } + + return (output.Count != 0 && output.Values.All(v => v.Count > 0)) ? output : null; } - public unsafe Dictionary>? GetBoneIndicesFromPap(string hash) + public unsafe Dictionary>? GetBoneIndicesFromPap(string hash, bool persistToConfig = true) { - if (_configService.Current.BonesDictionary.TryGetValue(hash, out var bones)) return bones; + if (string.IsNullOrWhiteSpace(hash)) + return null; + + if (_configService.Current.BonesDictionary.TryGetValue(hash, out var cached) && cached is not null) + return cached; var cacheEntity = _fileCacheManager.GetFileCacheByHash(hash); - if (cacheEntity == null) return null; + if (cacheEntity == null || string.IsNullOrEmpty(cacheEntity.ResolvedFilepath) || !File.Exists(cacheEntity.ResolvedFilepath)) + return null; - using BinaryReader reader = new(File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read)); + using var fs = File.Open(cacheEntity.ResolvedFilepath, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new BinaryReader(fs); - // most of this shit is from vfxeditor, surely nothing will change in the pap format :copium: - reader.ReadInt32(); // ignore - reader.ReadInt32(); // ignore - reader.ReadInt16(); // read 2 (num animations) - reader.ReadInt16(); // read 2 (modelid) - var type = reader.ReadByte();// read 1 (type) - if (type != 0) return null; // it's not human, just ignore it, whatever + // PAP header (mostly from vfxeditor) + _ = reader.ReadInt32(); // ignore + _ = reader.ReadInt32(); // ignore + _ = reader.ReadInt16(); // num animations + _ = reader.ReadInt16(); // modelid + + var type = reader.ReadByte(); // type + if (type != 0) + return null; // not human + + _ = reader.ReadByte(); // variant + _ = reader.ReadInt32(); // ignore - reader.ReadByte(); // read 1 (variant) - reader.ReadInt32(); // ignore var havokPosition = reader.ReadInt32(); var footerPosition = reader.ReadInt32(); - var havokDataSize = footerPosition - havokPosition; + + // sanity checks + if (havokPosition <= 0 || footerPosition <= havokPosition || footerPosition > fs.Length) + return null; + + var havokDataSizeLong = (long)footerPosition - havokPosition; + if (havokDataSizeLong <= 8 || havokDataSizeLong > int.MaxValue) + return null; + + var havokDataSize = (int)havokDataSizeLong; + reader.BaseStream.Position = havokPosition; var havokData = reader.ReadBytes(havokDataSize); - if (havokData.Length <= 8) return null; // no havok data + if (havokData.Length <= 8) + return null; - var output = new Dictionary>(StringComparer.OrdinalIgnoreCase); - var tempHavokDataPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()) + ".hkx"; - var tempHavokDataPathAnsi = Marshal.StringToHGlobalAnsi(tempHavokDataPath); + var tempSets = new Dictionary>(StringComparer.OrdinalIgnoreCase); + + var tempHavokDataPath = Path.Combine(Path.GetTempPath(), $"lightless_{Guid.NewGuid():N}.hkx"); + IntPtr tempHavokDataPathAnsi = IntPtr.Zero; try { File.WriteAllBytes(tempHavokDataPath, havokData); + if (!File.Exists(tempHavokDataPath)) + { + _logger.LogTrace("Temporary havok file did not exist when attempting to load: {path}", tempHavokDataPath); + return null; + } + + tempHavokDataPathAnsi = Marshal.StringToHGlobalAnsi(tempHavokDataPath); + var loadoptions = stackalloc hkSerializeUtil.LoadOptions[1]; loadoptions->TypeInfoRegistry = hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry(); loadoptions->ClassNameRegistry = hkBuiltinTypeRegistry.Instance()->GetClassNameRegistry(); loadoptions->Flags = new hkFlags { - Storage = (int)(hkSerializeUtil.LoadOptionBits.Default) + Storage = (int)hkSerializeUtil.LoadOptionBits.Default }; var resource = hkSerializeUtil.LoadFromFile((byte*)tempHavokDataPathAnsi, null, loadoptions); if (resource == null) { - throw new InvalidOperationException("Resource was null after loading"); + _logger.LogWarning("Havok resource was null after loading from {path}", tempHavokDataPath); + return null; } var rootLevelName = @"hkRootLevelContainer"u8; fixed (byte* n1 = rootLevelName) { var container = (hkRootLevelContainer*)resource->GetContentsPointer(n1, hkBuiltinTypeRegistry.Instance()->GetTypeInfoRegistry()); + if (container == null) + return null; + var animationName = @"hkaAnimationContainer"u8; fixed (byte* n2 = animationName) { var animContainer = (hkaAnimationContainer*)container->findObjectByName(n2, null); + if (animContainer == null) + return null; + for (int i = 0; i < animContainer->Bindings.Length; i++) { var binding = animContainer->Bindings[i].ptr; + if (binding == null) + continue; + + var rawSkel = binding->OriginalSkeletonName.String; + var skeletonKey = CanonicalizeSkeletonKey(rawSkel); + if (string.IsNullOrEmpty(skeletonKey)) + continue; + var boneTransform = binding->TransformTrackToBoneIndices; - string name = binding->OriginalSkeletonName.String! + "_" + i; - output[name] = []; + if (boneTransform.Length <= 0) + continue; + + if (!tempSets.TryGetValue(skeletonKey, out var set)) + { + set = []; + tempSets[skeletonKey] = set; + } + for (int boneIdx = 0; boneIdx < boneTransform.Length; boneIdx++) { - output[name].Add((ushort)boneTransform[boneIdx]); + var v = boneTransform[boneIdx]; + if (v < 0) continue; + set.Add((ushort)v); } - output[name].Sort(); } - } } } catch (Exception ex) { _logger.LogWarning(ex, "Could not load havok file in {path}", tempHavokDataPath); + return null; } finally { - Marshal.FreeHGlobal(tempHavokDataPathAnsi); - File.Delete(tempHavokDataPath); + if (tempHavokDataPathAnsi != IntPtr.Zero) + Marshal.FreeHGlobal(tempHavokDataPathAnsi); + + try + { + if (File.Exists(tempHavokDataPath)) + File.Delete(tempHavokDataPath); + } + catch (Exception ex) + { + _logger.LogTrace(ex, "Could not delete temporary havok file: {path}", tempHavokDataPath); + } } + if (tempSets.Count == 0) + return null; + + var output = new Dictionary>(tempSets.Count, StringComparer.OrdinalIgnoreCase); + foreach (var (key, set) in tempSets) + { + if (set.Count == 0) continue; + + var list = set.ToList(); + list.Sort(); + output[key] = list; + } + + if (output.Count == 0) + return null; + _configService.Current.BonesDictionary[hash] = output; - _configService.Save(); + + if (persistToConfig) + _configService.Save(); + return output; } + + public static string CanonicalizeSkeletonKey(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + return string.Empty; + + var s = raw.Replace('\\', '/').Trim(); + + var underscore = s.LastIndexOf('_'); + if (underscore > 0 && underscore + 1 < s.Length && char.IsDigit(s[underscore + 1])) + s = s[..underscore]; + + if (s.StartsWith("skeleton", StringComparison.OrdinalIgnoreCase)) + return "skeleton"; + + var m = _bucketPathRegex.Match(s); + if (m.Success) + return m.Groups["bucket"].Value.ToLowerInvariant(); + + m = _bucketSklRegex.Match(s); + if (m.Success) + return m.Groups["bucket"].Value.ToLowerInvariant(); + + m = _bucketLooseRegex.Match(s); + if (m.Success) + return m.Groups["bucket"].Value.ToLowerInvariant(); + + return string.Empty; + } + + public static bool ContainsIndexCompat( + HashSet available, + ushort idx, + bool papLikelyOneBased, + bool allowOneBasedShift, + bool allowNeighborTolerance) + { + Span candidates = stackalloc ushort[2]; + int count = 0; + + candidates[count++] = idx; + + if (allowOneBasedShift && papLikelyOneBased && idx > 0) + candidates[count++] = (ushort)(idx - 1); + + for (int i = 0; i < count; i++) + { + var c = candidates[i]; + + if (available.Contains(c)) + return true; + + if (allowNeighborTolerance) + { + if (c > 0 && available.Contains((ushort)(c - 1))) + return true; + + if (c < ushort.MaxValue && available.Contains((ushort)(c + 1))) + return true; + } + } + + return false; + } + + public static bool IsPapCompatible( + IReadOnlyDictionary> localBoneSets, + IReadOnlyDictionary> papBoneIndices, + AnimationValidationMode mode, + bool allowOneBasedShift, + bool allowNeighborTolerance, + out string reason) + { + reason = string.Empty; + + if (mode == AnimationValidationMode.Unsafe) + return true; + + var papBuckets = papBoneIndices.Keys + .Select(CanonicalizeSkeletonKey) + .Where(k => !string.IsNullOrEmpty(k)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (papBuckets.Count == 0) + { + reason = "No skeleton bucket bindings found in the PAP"; + return false; + } + + if (mode == AnimationValidationMode.Safe) + { + if (papBuckets.Any(b => localBoneSets.ContainsKey(b))) + return true; + + reason = $"No matching skeleton bucket between PAP [{string.Join(", ", papBuckets)}] and local [{string.Join(", ", localBoneSets.Keys.Order())}]."; + return false; + } + + foreach (var bucket in papBuckets) + { + if (!localBoneSets.TryGetValue(bucket, out var available)) + { + reason = $"Missing skeleton bucket '{bucket}' on local actor."; + return false; + } + + var indices = papBoneIndices + .Where(kvp => string.Equals(CanonicalizeSkeletonKey(kvp.Key), bucket, StringComparison.OrdinalIgnoreCase)) + .SelectMany(kvp => kvp.Value ?? Enumerable.Empty()) + .Distinct() + .ToList(); + + if (indices.Count == 0) + continue; + + bool has0 = false, has1 = false; + ushort min = ushort.MaxValue; + foreach (var v in indices) + { + if (v == 0) has0 = true; + if (v == 1) has1 = true; + if (v < min) min = v; + } + bool papLikelyOneBased = allowOneBasedShift && (min == 1) && has1 && !has0; + + foreach (var idx in indices) + { + if (!ContainsIndexCompat(available, idx, papLikelyOneBased, allowOneBasedShift, allowNeighborTolerance)) + { + reason = $"No compatible local skeleton for PAP '{bucket}': missing bone index {idx}."; + return false; + } + } + } + + return true; + } + + public void DumpLocalSkeletonIndices(GameObjectHandler handler, string? filter = null) + { + var skels = GetSkeletonBoneIndices(handler); + if (skels == null) + { + _logger.LogTrace("DumpLocalSkeletonIndices: local skeleton indices are null or not found"); + return; + } + + var keys = skels.Keys + .Order(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + _logger.LogTrace("Local skeleton indices found ({count}): {keys}", + keys.Length, + string.Join(", ", keys)); + + if (!string.IsNullOrWhiteSpace(filter)) + { + var hits = keys.Where(k => + k.Equals(filter, StringComparison.OrdinalIgnoreCase) || + k.StartsWith(filter + "_", StringComparison.OrdinalIgnoreCase) || + filter.StartsWith(k + "_", StringComparison.OrdinalIgnoreCase) || + k.Contains(filter, StringComparison.OrdinalIgnoreCase)) + .ToArray(); + + _logger.LogTrace("Matches found for '{filter}': {hits}", + filter, + hits.Length == 0 ? "" : string.Join(", ", hits)); + } + } + public async Task GetTrianglesByHash(string hash) { if (_configService.Current.TriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0) @@ -162,16 +480,41 @@ public sealed class XivDataAnalyzer if (path == null || !path.ResolvedFilepath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase)) return 0; - var filePath = path.ResolvedFilepath; + return CalculateTrianglesFromPath(hash, path.ResolvedFilepath, _configService.Current.TriangleDictionary, _failedCalculatedTris); + } + public async Task GetEffectiveTrianglesByHash(string hash, string filePath) + { + if (_configService.Current.EffectiveTriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0) + return cachedTris; + + if (_failedCalculatedEffectiveTris.Contains(hash, StringComparer.Ordinal)) + return 0; + + if (string.IsNullOrEmpty(filePath) + || !filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase) + || !File.Exists(filePath)) + { + return 0; + } + + return CalculateTrianglesFromPath(hash, filePath, _configService.Current.EffectiveTriangleDictionary, _failedCalculatedEffectiveTris); + } + + private long CalculateTrianglesFromPath( + string hash, + string filePath, + ConcurrentDictionary cache, + List failedList) + { try { _logger.LogDebug("Detected Model File {path}, calculating Tris", filePath); var file = new MdlFile(filePath); if (file.LodCount <= 0) { - _failedCalculatedTris.Add(hash); - _configService.Current.TriangleDictionary[hash] = 0; + failedList.Add(hash); + cache[hash] = 0; _configService.Save(); return 0; } @@ -195,7 +538,7 @@ public sealed class XivDataAnalyzer if (tris > 0) { _logger.LogDebug("TriAnalysis: {filePath} => {tris} triangles", filePath, tris); - _configService.Current.TriangleDictionary[hash] = tris; + cache[hash] = tris; _configService.Save(); break; } @@ -205,11 +548,30 @@ public sealed class XivDataAnalyzer } catch (Exception e) { - _failedCalculatedTris.Add(hash); - _configService.Current.TriangleDictionary[hash] = 0; + failedList.Add(hash); + cache[hash] = 0; _configService.Save(); _logger.LogWarning(e, "Could not parse file {file}", filePath); return 0; } } + + // Regexes for canonicalizing skeleton keys + private static readonly Regex _bucketPathRegex = + BucketRegex(); + + private static readonly Regex _bucketSklRegex = + SklRegex(); + + private static readonly Regex _bucketLooseRegex = + LooseBucketRegex(); + + [GeneratedRegex(@"(?i)(?:^|/)(?c\d{4})(?:/|$)", RegexOptions.Compiled, "en-NL")] + private static partial Regex BucketRegex(); + + [GeneratedRegex(@"(?i)\bskl_(?c\d{4})[a-z]\d{4}\b", RegexOptions.Compiled, "en-NL")] + private static partial Regex SklRegex(); + + [GeneratedRegex(@"(?i)(?c\d{4})(?!\d)", RegexOptions.Compiled, "en-NL")] + private static partial Regex LooseBucketRegex(); } diff --git a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs new file mode 100644 index 0000000..723eef6 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs @@ -0,0 +1,169 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using Microsoft.Extensions.Logging; + +namespace MeshDecimator.Algorithms +{ + /// + /// A decimation algorithm. + /// + public abstract class DecimationAlgorithm + { + #region Delegates + /// + /// A callback for decimation status reports. + /// + /// The current iteration, starting at zero. + /// The original count of triangles. + /// The current count of triangles. + /// The target count of triangles. + public delegate void StatusReportCallback(int iteration, int originalTris, int currentTris, int targetTris); + #endregion + + #region Fields + private bool preserveBorders = false; + private int maxVertexCount = 0; + private bool verbose = false; + + private StatusReportCallback statusReportInvoker = null; + #endregion + + #region Properties + /// + /// Gets or sets if borders should be kept. + /// Default value: false + /// + [Obsolete("Use the 'DecimationAlgorithm.PreserveBorders' property instead.", false)] + public bool KeepBorders + { + get { return preserveBorders; } + set { preserveBorders = value; } + } + + /// + /// Gets or sets if borders should be preserved. + /// Default value: false + /// + public bool PreserveBorders + { + get { return preserveBorders; } + set { preserveBorders = value; } + } + + /// + /// Gets or sets if linked vertices should be kept. + /// Default value: false + /// + [Obsolete("This feature has been removed, for more details why please read the readme.", true)] + public bool KeepLinkedVertices + { + get { return false; } + set { } + } + + /// + /// Gets or sets the maximum vertex count. Set to zero for no limitation. + /// Default value: 0 (no limitation) + /// + public int MaxVertexCount + { + get { return maxVertexCount; } + set { maxVertexCount = Math.MathHelper.Max(value, 0); } + } + + /// + /// Gets or sets if verbose information should be printed in the console. + /// Default value: false + /// + public bool Verbose + { + get { return verbose; } + set { verbose = value; } + } + + /// + /// Gets or sets the logger used for diagnostics. + /// + public ILogger? Logger { get; set; } + #endregion + + #region Events + /// + /// An event for status reports for this algorithm. + /// + public event StatusReportCallback StatusReport + { + add { statusReportInvoker += value; } + remove { statusReportInvoker -= value; } + } + #endregion + + #region Protected Methods + /// + /// Reports the current status of the decimation. + /// + /// The current iteration, starting at zero. + /// The original count of triangles. + /// The current count of triangles. + /// The target count of triangles. + protected void ReportStatus(int iteration, int originalTris, int currentTris, int targetTris) + { + var statusReportInvoker = this.statusReportInvoker; + if (statusReportInvoker != null) + { + statusReportInvoker.Invoke(iteration, originalTris, currentTris, targetTris); + } + } + #endregion + + #region Public Methods + /// + /// Initializes the algorithm with the original mesh. + /// + /// The mesh. + public abstract void Initialize(Mesh mesh); + + /// + /// Decimates the mesh. + /// + /// The target triangle count. + public abstract void DecimateMesh(int targetTrisCount); + + /// + /// Decimates the mesh without losing any quality. + /// + public abstract void DecimateMeshLossless(); + + /// + /// Returns the resulting mesh. + /// + /// The resulting mesh. + public abstract Mesh ToMesh(); + #endregion + } +} diff --git a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs new file mode 100644 index 0000000..fe22c85 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs @@ -0,0 +1,1549 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +#region Original License +///////////////////////////////////////////// +// +// Mesh Simplification Tutorial +// +// (C) by Sven Forstmann in 2014 +// +// License : MIT +// http://opensource.org/licenses/MIT +// +//https://github.com/sp4cerat/Fast-Quadric-Mesh-Simplification +#endregion + +using System; +using System.Collections.Generic; +using MeshDecimator.Collections; +using MeshDecimator.Math; +using Microsoft.Extensions.Logging; + +namespace MeshDecimator.Algorithms +{ + /// + /// The fast quadric mesh simplification algorithm. + /// + public sealed class FastQuadricMeshSimplification : DecimationAlgorithm + { + #region Consts + private const double DoubleEpsilon = 1.0E-3; + #endregion + + #region Classes + #region Triangle + private struct Triangle + { + #region Fields + public int v0; + public int v1; + public int v2; + public int subMeshIndex; + + public int va0; + public int va1; + public int va2; + + public double err0; + public double err1; + public double err2; + public double err3; + + public bool deleted; + public bool dirty; + public Vector3d n; + #endregion + + #region Properties + public int this[int index] + { + get + { + return (index == 0 ? v0 : (index == 1 ? v1 : v2)); + } + set + { + switch (index) + { + case 0: + v0 = value; + break; + case 1: + v1 = value; + break; + case 2: + v2 = value; + break; + default: + throw new IndexOutOfRangeException(); + } + } + } + #endregion + + #region Constructor + public Triangle(int v0, int v1, int v2, int subMeshIndex) + { + this.v0 = v0; + this.v1 = v1; + this.v2 = v2; + this.subMeshIndex = subMeshIndex; + + this.va0 = v0; + this.va1 = v1; + this.va2 = v2; + + err0 = err1 = err2 = err3 = 0; + deleted = dirty = false; + n = new Vector3d(); + } + #endregion + + #region Public Methods + public void GetAttributeIndices(int[] attributeIndices) + { + attributeIndices[0] = va0; + attributeIndices[1] = va1; + attributeIndices[2] = va2; + } + + public void SetAttributeIndex(int index, int value) + { + switch (index) + { + case 0: + va0 = value; + break; + case 1: + va1 = value; + break; + case 2: + va2 = value; + break; + default: + throw new IndexOutOfRangeException(); + } + } + + public void GetErrors(double[] err) + { + err[0] = err0; + err[1] = err1; + err[2] = err2; + } + #endregion + } + #endregion + + #region Vertex + private struct Vertex + { + public Vector3d p; + public int tstart; + public int tcount; + public SymmetricMatrix q; + public bool border; + public bool seam; + public bool foldover; + + public Vertex(Vector3d p) + { + this.p = p; + this.tstart = 0; + this.tcount = 0; + this.q = new SymmetricMatrix(); + this.border = true; + this.seam = false; + this.foldover = false; + } + } + #endregion + + #region Ref + private struct Ref + { + public int tid; + public int tvertex; + + public void Set(int tid, int tvertex) + { + this.tid = tid; + this.tvertex = tvertex; + } + } + #endregion + + #region Border Vertex + private struct BorderVertex + { + public int index; + public int hash; + + public BorderVertex(int index, int hash) + { + this.index = index; + this.hash = hash; + } + } + #endregion + + #region Border Vertex Comparer + private class BorderVertexComparer : IComparer + { + public static readonly BorderVertexComparer instance = new BorderVertexComparer(); + + public int Compare(BorderVertex x, BorderVertex y) + { + return x.hash.CompareTo(y.hash); + } + } + #endregion + #endregion + + #region Fields + private bool preserveSeams = false; + private bool preserveFoldovers = false; + private bool enableSmartLink = true; + private int maxIterationCount = 100; + private double agressiveness = 7.0; + private double vertexLinkDistanceSqr = double.Epsilon; + + private int subMeshCount = 0; + private ResizableArray triangles = null; + private ResizableArray vertices = null; + private ResizableArray refs = null; + + private ResizableArray vertNormals = null; + private ResizableArray vertTangents = null; + private UVChannels vertUV2D = null; + private UVChannels vertUV3D = null; + private UVChannels vertUV4D = null; + private ResizableArray vertColors = null; + private ResizableArray vertBoneWeights = null; + + private int remainingVertices = 0; + + // Pre-allocated buffers + private double[] errArr = new double[3]; + private int[] attributeIndexArr = new int[3]; + #endregion + + #region Properties + /// + /// Gets or sets if seams should be preserved. + /// Default value: false + /// + public bool PreserveSeams + { + get { return preserveSeams; } + set { preserveSeams = value; } + } + + /// + /// Gets or sets if foldovers should be preserved. + /// Default value: false + /// + public bool PreserveFoldovers + { + get { return preserveFoldovers; } + set { preserveFoldovers = value; } + } + + /// + /// Gets or sets if a feature for smarter vertex linking should be enabled, reducing artifacts in the + /// decimated result at the cost of a slightly more expensive initialization by treating vertices at + /// the same position as the same vertex while separating the attributes. + /// Default value: true + /// + public bool EnableSmartLink + { + get { return enableSmartLink; } + set { enableSmartLink = value; } + } + + /// + /// Gets or sets the maximum iteration count. Higher number is more expensive but can bring you closer to your target quality. + /// Sometimes a lower maximum count might be desired in order to lower the performance cost. + /// Default value: 100 + /// + public int MaxIterationCount + { + get { return maxIterationCount; } + set { maxIterationCount = value; } + } + + /// + /// Gets or sets the agressiveness of this algorithm. Higher number equals higher quality, but more expensive to run. + /// Default value: 7.0 + /// + public double Agressiveness + { + get { return agressiveness; } + set { agressiveness = value; } + } + + /// + /// Gets or sets the maximum squared distance between two vertices in order to link them. + /// Note that this value is only used if EnableSmartLink is true. + /// Default value: double.Epsilon + /// + public double VertexLinkDistanceSqr + { + get { return vertexLinkDistanceSqr; } + set { vertexLinkDistanceSqr = value; } + } + #endregion + + #region Constructor + /// + /// Creates a new fast quadric mesh simplification algorithm. + /// + public FastQuadricMeshSimplification() + { + triangles = new ResizableArray(0); + vertices = new ResizableArray(0); + refs = new ResizableArray(0); + } + #endregion + + #region Private Methods + #region Initialize Vertex Attribute + private ResizableArray InitializeVertexAttribute(T[] attributeValues, string attributeName) + { + if (attributeValues != null && attributeValues.Length == vertices.Length) + { + var newArray = new ResizableArray(attributeValues.Length, attributeValues.Length); + var newArrayData = newArray.Data; + Array.Copy(attributeValues, 0, newArrayData, 0, attributeValues.Length); + return newArray; + } + else if (attributeValues != null && attributeValues.Length > 0) + { + Logger?.LogError( + "Failed to set vertex attribute '{Attribute}' with {ActualLength} length of array, when {ExpectedLength} was needed.", + attributeName, + attributeValues.Length, + vertices.Length); + } + return null; + } + #endregion + + #region Calculate Error + private double VertexError(ref SymmetricMatrix q, double x, double y, double z) + { + return q.m0*x*x + 2*q.m1*x*y + 2*q.m2*x*z + 2*q.m3*x + q.m4*y*y + + 2*q.m5*y*z + 2*q.m6*y + q.m7*z*z + 2*q.m8*z + q.m9; + } + + private double CalculateError(ref Vertex vert0, ref Vertex vert1, out Vector3d result, out int resultIndex) + { + // compute interpolated vertex + SymmetricMatrix q = (vert0.q + vert1.q); + bool border = (vert0.border & vert1.border); + double error = 0.0; + double det = q.Determinant1(); + if (det != 0.0 && !border) + { + // q_delta is invertible + result = new Vector3d( + -1.0 / det * q.Determinant2(), // vx = A41/det(q_delta) + 1.0 / det * q.Determinant3(), // vy = A42/det(q_delta) + -1.0 / det * q.Determinant4()); // vz = A43/det(q_delta) + error = VertexError(ref q, result.x, result.y, result.z); + resultIndex = 2; + } + else + { + // det = 0 -> try to find best result + Vector3d p1 = vert0.p; + Vector3d p2 = vert1.p; + Vector3d p3 = (p1 + p2) * 0.5f; + double error1 = VertexError(ref q, p1.x, p1.y, p1.z); + double error2 = VertexError(ref q, p2.x, p2.y, p2.z); + double error3 = VertexError(ref q, p3.x, p3.y, p3.z); + error = MathHelper.Min(error1, error2, error3); + if (error == error3) + { + result = p3; + resultIndex = 2; + } + else if (error == error2) + { + result = p2; + resultIndex = 1; + } + else if (error == error1) + { + result = p1; + resultIndex = 0; + } + else + { + result = p3; + resultIndex = 2; + } + } + return error; + } + #endregion + + #region Flipped + /// + /// Check if a triangle flips when this edge is removed + /// + private bool Flipped(ref Vector3d p, int i0, int i1, ref Vertex v0, bool[] deleted) + { + int tcount = v0.tcount; + var refs = this.refs.Data; + var triangles = this.triangles.Data; + var vertices = this.vertices.Data; + for (int k = 0; k < tcount; k++) + { + Ref r = refs[v0.tstart + k]; + if (triangles[r.tid].deleted) + continue; + + int s = r.tvertex; + int id1 = triangles[r.tid][(s + 1) % 3]; + int id2 = triangles[r.tid][(s + 2) % 3]; + if (id1 == i1 || id2 == i1) + { + deleted[k] = true; + continue; + } + + Vector3d d1 = vertices[id1].p - p; + d1.Normalize(); + Vector3d d2 = vertices[id2].p - p; + d2.Normalize(); + double dot = Vector3d.Dot(ref d1, ref d2); + if (System.Math.Abs(dot) > 0.999) + return true; + + Vector3d n; + Vector3d.Cross(ref d1, ref d2, out n); + n.Normalize(); + deleted[k] = false; + dot = Vector3d.Dot(ref n, ref triangles[r.tid].n); + if (dot < 0.2) + return true; + } + + return false; + } + #endregion + + #region Update Triangles + /// + /// Update triangle connections and edge error after a edge is collapsed. + /// + private void UpdateTriangles(int i0, int ia0, ref Vertex v, ResizableArray deleted, ref int deletedTriangles) + { + Vector3d p; + int pIndex; + int tcount = v.tcount; + var triangles = this.triangles.Data; + var vertices = this.vertices.Data; + for (int k = 0; k < tcount; k++) + { + Ref r = refs[v.tstart + k]; + int tid = r.tid; + Triangle t = triangles[tid]; + if (t.deleted) + continue; + + if (deleted[k]) + { + triangles[tid].deleted = true; + ++deletedTriangles; + continue; + } + + t[r.tvertex] = i0; + if (ia0 != -1) + { + t.SetAttributeIndex(r.tvertex, ia0); + } + + t.dirty = true; + t.err0 = CalculateError(ref vertices[t.v0], ref vertices[t.v1], out p, out pIndex); + t.err1 = CalculateError(ref vertices[t.v1], ref vertices[t.v2], out p, out pIndex); + t.err2 = CalculateError(ref vertices[t.v2], ref vertices[t.v0], out p, out pIndex); + t.err3 = MathHelper.Min(t.err0, t.err1, t.err2); + triangles[tid] = t; + refs.Add(r); + } + } + #endregion + + #region Move/Merge Vertex Attributes + private void MoveVertexAttributes(int i0, int i1) + { + if (vertNormals != null) + { + vertNormals[i0] = vertNormals[i1]; + } + if (vertTangents != null) + { + vertTangents[i0] = vertTangents[i1]; + } + if (vertUV2D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV2D[i]; + if (vertUV != null) + { + vertUV[i0] = vertUV[i1]; + } + } + } + if (vertUV3D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV3D[i]; + if (vertUV != null) + { + vertUV[i0] = vertUV[i1]; + } + } + } + if (vertUV4D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV4D[i]; + if (vertUV != null) + { + vertUV[i0] = vertUV[i1]; + } + } + } + if (vertColors != null) + { + vertColors[i0] = vertColors[i1]; + } + if (vertBoneWeights != null) + { + vertBoneWeights[i0] = vertBoneWeights[i1]; + } + } + + private void MergeVertexAttributes(int i0, int i1) + { + if (vertNormals != null) + { + vertNormals[i0] = (vertNormals[i0] + vertNormals[i1]) * 0.5f; + } + if (vertTangents != null) + { + vertTangents[i0] = (vertTangents[i0] + vertTangents[i1]) * 0.5f; + } + if (vertUV2D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV2D[i]; + if (vertUV != null) + { + vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; + } + } + } + if (vertUV3D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV3D[i]; + if (vertUV != null) + { + vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; + } + } + } + if (vertUV4D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + var vertUV = vertUV4D[i]; + if (vertUV != null) + { + vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; + } + } + } + if (vertColors != null) + { + vertColors[i0] = (vertColors[i0] + vertColors[i1]) * 0.5f; + } + + // TODO: Do we have to blend bone weights at all or can we just keep them as it is in this scenario? + } + #endregion + + #region Are UVs The Same + private bool AreUVsTheSame(int channel, int indexA, int indexB) + { + if (vertUV2D != null) + { + var vertUV = vertUV2D[channel]; + if (vertUV != null) + { + var uvA = vertUV[indexA]; + var uvB = vertUV[indexB]; + return uvA == uvB; + } + } + + if (vertUV3D != null) + { + var vertUV = vertUV3D[channel]; + if (vertUV != null) + { + var uvA = vertUV[indexA]; + var uvB = vertUV[indexB]; + return uvA == uvB; + } + } + + if (vertUV4D != null) + { + var vertUV = vertUV4D[channel]; + if (vertUV != null) + { + var uvA = vertUV[indexA]; + var uvB = vertUV[indexB]; + return uvA == uvB; + } + } + + return false; + } + #endregion + + #region Remove Vertex Pass + /// + /// Remove vertices and mark deleted triangles + /// + private void RemoveVertexPass(int startTrisCount, int targetTrisCount, double threshold, ResizableArray deleted0, ResizableArray deleted1, ref int deletedTris) + { + var triangles = this.triangles.Data; + int triangleCount = this.triangles.Length; + var vertices = this.vertices.Data; + + bool preserveBorders = base.PreserveBorders; + int maxVertexCount = base.MaxVertexCount; + if (maxVertexCount <= 0) + maxVertexCount = int.MaxValue; + + Vector3d p; + int pIndex; + for (int tid = 0; tid < triangleCount; tid++) + { + if (triangles[tid].dirty || triangles[tid].deleted || triangles[tid].err3 > threshold) + continue; + + triangles[tid].GetErrors(errArr); + triangles[tid].GetAttributeIndices(attributeIndexArr); + for (int edgeIndex = 0; edgeIndex < 3; edgeIndex++) + { + if (errArr[edgeIndex] > threshold) + continue; + + int nextEdgeIndex = ((edgeIndex + 1) % 3); + int i0 = triangles[tid][edgeIndex]; + int i1 = triangles[tid][nextEdgeIndex]; + + // Border check + if (vertices[i0].border != vertices[i1].border) + continue; + // Seam check + else if (vertices[i0].seam != vertices[i1].seam) + continue; + // Foldover check + else if (vertices[i0].foldover != vertices[i1].foldover) + continue; + // If borders should be preserved + else if (preserveBorders && vertices[i0].border) + continue; + // If seams should be preserved + else if (preserveSeams && vertices[i0].seam) + continue; + // If foldovers should be preserved + else if (preserveFoldovers && vertices[i0].foldover) + continue; + + // Compute vertex to collapse to + CalculateError(ref vertices[i0], ref vertices[i1], out p, out pIndex); + deleted0.Resize(vertices[i0].tcount); // normals temporarily + deleted1.Resize(vertices[i1].tcount); // normals temporarily + + // Don't remove if flipped + if (Flipped(ref p, i0, i1, ref vertices[i0], deleted0.Data)) + continue; + if (Flipped(ref p, i1, i0, ref vertices[i1], deleted1.Data)) + continue; + + int ia0 = attributeIndexArr[edgeIndex]; + + // Not flipped, so remove edge + vertices[i0].p = p; + vertices[i0].q += vertices[i1].q; + + if (pIndex == 1) + { + // Move vertex attributes from ia1 to ia0 + int ia1 = attributeIndexArr[nextEdgeIndex]; + MoveVertexAttributes(ia0, ia1); + } + else if (pIndex == 2) + { + // Merge vertex attributes ia0 and ia1 into ia0 + int ia1 = attributeIndexArr[nextEdgeIndex]; + MergeVertexAttributes(ia0, ia1); + } + + if (vertices[i0].seam) + { + ia0 = -1; + } + + int tstart = refs.Length; + UpdateTriangles(i0, ia0, ref vertices[i0], deleted0, ref deletedTris); + UpdateTriangles(i0, ia0, ref vertices[i1], deleted1, ref deletedTris); + + int tcount = refs.Length - tstart; + if (tcount <= vertices[i0].tcount) + { + // save ram + if (tcount > 0) + { + var refsArr = refs.Data; + Array.Copy(refsArr, tstart, refsArr, vertices[i0].tstart, tcount); + } + } + else + { + // append + vertices[i0].tstart = tstart; + } + + vertices[i0].tcount = tcount; + --remainingVertices; + break; + } + + // Check if we are already done + if ((startTrisCount - deletedTris) <= targetTrisCount && remainingVertices < maxVertexCount) + break; + } + } + #endregion + + #region Update Mesh + /// + /// Compact triangles, compute edge error and build reference list. + /// + /// The iteration index. + private void UpdateMesh(int iteration) + { + var triangles = this.triangles.Data; + var vertices = this.vertices.Data; + + int triangleCount = this.triangles.Length; + int vertexCount = this.vertices.Length; + if (iteration > 0) // compact triangles + { + int dst = 0; + for (int i = 0; i < triangleCount; i++) + { + if (!triangles[i].deleted) + { + if (dst != i) + { + triangles[dst] = triangles[i]; + } + dst++; + } + } + this.triangles.Resize(dst); + triangles = this.triangles.Data; + triangleCount = dst; + } + + UpdateReferences(); + + // Identify boundary : vertices[].border=0,1 + if (iteration == 0) + { + var refs = this.refs.Data; + + var vcount = new List(8); + var vids = new List(8); + int vsize = 0; + for (int i = 0; i < vertexCount; i++) + { + vertices[i].border = false; + vertices[i].seam = false; + vertices[i].foldover = false; + } + + int ofs; + int id; + int borderVertexCount = 0; + double borderMinX = double.MaxValue; + double borderMaxX = double.MinValue; + for (int i = 0; i < vertexCount; i++) + { + int tstart = vertices[i].tstart; + int tcount = vertices[i].tcount; + vcount.Clear(); + vids.Clear(); + vsize = 0; + + for (int j = 0; j < tcount; j++) + { + int tid = refs[tstart + j].tid; + for (int k = 0; k < 3; k++) + { + ofs = 0; + id = triangles[tid][k]; + while (ofs < vsize) + { + if (vids[ofs] == id) + break; + + ++ofs; + } + + if (ofs == vsize) + { + vcount.Add(1); + vids.Add(id); + ++vsize; + } + else + { + ++vcount[ofs]; + } + } + } + + for (int j = 0; j < vsize; j++) + { + if (vcount[j] == 1) + { + id = vids[j]; + vertices[id].border = true; + ++borderVertexCount; + + if (enableSmartLink) + { + if (vertices[id].p.x < borderMinX) + { + borderMinX = vertices[id].p.x; + } + if (vertices[id].p.x > borderMaxX) + { + borderMaxX = vertices[id].p.x; + } + } + } + } + } + + if (enableSmartLink) + { + // First find all border vertices + var borderVertices = new BorderVertex[borderVertexCount]; + int borderIndexCount = 0; + double borderAreaWidth = borderMaxX - borderMinX; + for (int i = 0; i < vertexCount; i++) + { + if (vertices[i].border) + { + int vertexHash = (int)(((((vertices[i].p.x - borderMinX) / borderAreaWidth) * 2.0) - 1.0) * int.MaxValue); + borderVertices[borderIndexCount] = new BorderVertex(i, vertexHash); + ++borderIndexCount; + } + } + + // Sort the border vertices by hash + Array.Sort(borderVertices, 0, borderIndexCount, BorderVertexComparer.instance); + + // Calculate the maximum hash distance based on the maximum vertex link distance + double vertexLinkDistance = System.Math.Sqrt(vertexLinkDistanceSqr); + int hashMaxDistance = System.Math.Max((int)((vertexLinkDistance / borderAreaWidth) * int.MaxValue), 1); + + // Then find identical border vertices and bind them together as one + for (int i = 0; i < borderIndexCount; i++) + { + int myIndex = borderVertices[i].index; + if (myIndex == -1) + continue; + + var myPoint = vertices[myIndex].p; + for (int j = i + 1; j < borderIndexCount; j++) + { + int otherIndex = borderVertices[j].index; + if (otherIndex == -1) + continue; + else if ((borderVertices[j].hash - borderVertices[i].hash) > hashMaxDistance) // There is no point to continue beyond this point + break; + + var otherPoint = vertices[otherIndex].p; + var sqrX = ((myPoint.x - otherPoint.x) * (myPoint.x - otherPoint.x)); + var sqrY = ((myPoint.y - otherPoint.y) * (myPoint.y - otherPoint.y)); + var sqrZ = ((myPoint.z - otherPoint.z) * (myPoint.z - otherPoint.z)); + var sqrMagnitude = sqrX + sqrY + sqrZ; + + if (sqrMagnitude <= vertexLinkDistanceSqr) + { + borderVertices[j].index = -1; // NOTE: This makes sure that the "other" vertex is not processed again + vertices[myIndex].border = false; + vertices[otherIndex].border = false; + + if (AreUVsTheSame(0, myIndex, otherIndex)) + { + vertices[myIndex].foldover = true; + vertices[otherIndex].foldover = true; + } + else + { + vertices[myIndex].seam = true; + vertices[otherIndex].seam = true; + } + + int otherTriangleCount = vertices[otherIndex].tcount; + int otherTriangleStart = vertices[otherIndex].tstart; + for (int k = 0; k < otherTriangleCount; k++) + { + var r = refs[otherTriangleStart + k]; + triangles[r.tid][r.tvertex] = myIndex; + } + } + } + } + + // Update the references again + UpdateReferences(); + } + + // Init Quadrics by Plane & Edge Errors + // + // required at the beginning ( iteration == 0 ) + // recomputing during the simplification is not required, + // but mostly improves the result for closed meshes + for (int i = 0; i < vertexCount; i++) + { + vertices[i].q = new SymmetricMatrix(); + } + + int v0, v1, v2; + Vector3d n, p0, p1, p2, p10, p20, dummy; + int dummy2; + SymmetricMatrix sm; + for (int i = 0; i < triangleCount; i++) + { + v0 = triangles[i].v0; + v1 = triangles[i].v1; + v2 = triangles[i].v2; + + p0 = vertices[v0].p; + p1 = vertices[v1].p; + p2 = vertices[v2].p; + p10 = p1 - p0; + p20 = p2 - p0; + Vector3d.Cross(ref p10, ref p20, out n); + n.Normalize(); + triangles[i].n = n; + + sm = new SymmetricMatrix(n.x, n.y, n.z, -Vector3d.Dot(ref n, ref p0)); + vertices[v0].q += sm; + vertices[v1].q += sm; + vertices[v2].q += sm; + } + + for (int i = 0; i < triangleCount; i++) + { + // Calc Edge Error + var triangle = triangles[i]; + triangles[i].err0 = CalculateError(ref vertices[triangle.v0], ref vertices[triangle.v1], out dummy, out dummy2); + triangles[i].err1 = CalculateError(ref vertices[triangle.v1], ref vertices[triangle.v2], out dummy, out dummy2); + triangles[i].err2 = CalculateError(ref vertices[triangle.v2], ref vertices[triangle.v0], out dummy, out dummy2); + triangles[i].err3 = MathHelper.Min(triangles[i].err0, triangles[i].err1, triangles[i].err2); + } + } + } + #endregion + + #region Update References + private void UpdateReferences() + { + int triangleCount = this.triangles.Length; + int vertexCount = this.vertices.Length; + var triangles = this.triangles.Data; + var vertices = this.vertices.Data; + + // Init Reference ID list + for (int i = 0; i < vertexCount; i++) + { + vertices[i].tstart = 0; + vertices[i].tcount = 0; + } + + for (int i = 0; i < triangleCount; i++) + { + ++vertices[triangles[i].v0].tcount; + ++vertices[triangles[i].v1].tcount; + ++vertices[triangles[i].v2].tcount; + } + + int tstart = 0; + remainingVertices = 0; + for (int i = 0; i < vertexCount; i++) + { + vertices[i].tstart = tstart; + if (vertices[i].tcount > 0) + { + tstart += vertices[i].tcount; + vertices[i].tcount = 0; + ++remainingVertices; + } + } + + // Write References + this.refs.Resize(tstart); + var refs = this.refs.Data; + for (int i = 0; i < triangleCount; i++) + { + int v0 = triangles[i].v0; + int v1 = triangles[i].v1; + int v2 = triangles[i].v2; + int start0 = vertices[v0].tstart; + int count0 = vertices[v0].tcount; + int start1 = vertices[v1].tstart; + int count1 = vertices[v1].tcount; + int start2 = vertices[v2].tstart; + int count2 = vertices[v2].tcount; + + refs[start0 + count0].Set(i, 0); + refs[start1 + count1].Set(i, 1); + refs[start2 + count2].Set(i, 2); + + ++vertices[v0].tcount; + ++vertices[v1].tcount; + ++vertices[v2].tcount; + } + } + #endregion + + #region Compact Mesh + /// + /// Finally compact mesh before exiting. + /// + private void CompactMesh() + { + int dst = 0; + var vertices = this.vertices.Data; + int vertexCount = this.vertices.Length; + for (int i = 0; i < vertexCount; i++) + { + vertices[i].tcount = 0; + } + + var vertNormals = (this.vertNormals != null ? this.vertNormals.Data : null); + var vertTangents = (this.vertTangents != null ? this.vertTangents.Data : null); + var vertUV2D = (this.vertUV2D != null ? this.vertUV2D.Data : null); + var vertUV3D = (this.vertUV3D != null ? this.vertUV3D.Data : null); + var vertUV4D = (this.vertUV4D != null ? this.vertUV4D.Data : null); + var vertColors = (this.vertColors != null ? this.vertColors.Data : null); + var vertBoneWeights = (this.vertBoneWeights != null ? this.vertBoneWeights.Data : null); + + var triangles = this.triangles.Data; + int triangleCount = this.triangles.Length; + for (int i = 0; i < triangleCount; i++) + { + var triangle = triangles[i]; + if (!triangle.deleted) + { + if (triangle.va0 != triangle.v0) + { + int iDest = triangle.va0; + int iSrc = triangle.v0; + vertices[iDest].p = vertices[iSrc].p; + if (vertBoneWeights != null) + { + vertBoneWeights[iDest] = vertBoneWeights[iSrc]; + } + triangle.v0 = triangle.va0; + } + if (triangle.va1 != triangle.v1) + { + int iDest = triangle.va1; + int iSrc = triangle.v1; + vertices[iDest].p = vertices[iSrc].p; + if (vertBoneWeights != null) + { + vertBoneWeights[iDest] = vertBoneWeights[iSrc]; + } + triangle.v1 = triangle.va1; + } + if (triangle.va2 != triangle.v2) + { + int iDest = triangle.va2; + int iSrc = triangle.v2; + vertices[iDest].p = vertices[iSrc].p; + if (vertBoneWeights != null) + { + vertBoneWeights[iDest] = vertBoneWeights[iSrc]; + } + triangle.v2 = triangle.va2; + } + + triangles[dst++] = triangle; + + vertices[triangle.v0].tcount = 1; + vertices[triangle.v1].tcount = 1; + vertices[triangle.v2].tcount = 1; + } + } + + triangleCount = dst; + this.triangles.Resize(triangleCount); + triangles = this.triangles.Data; + + dst = 0; + for (int i = 0; i < vertexCount; i++) + { + var vert = vertices[i]; + if (vert.tcount > 0) + { + vert.tstart = dst; + vertices[i] = vert; + + if (dst != i) + { + vertices[dst].p = vert.p; + if (vertNormals != null) vertNormals[dst] = vertNormals[i]; + if (vertTangents != null) vertTangents[dst] = vertTangents[i]; + if (vertUV2D != null) + { + for (int j = 0; j < Mesh.UVChannelCount; j++) + { + var vertUV = vertUV2D[j]; + if (vertUV != null) + { + vertUV[dst] = vertUV[i]; + } + } + } + if (vertUV3D != null) + { + for (int j = 0; j < Mesh.UVChannelCount; j++) + { + var vertUV = vertUV3D[j]; + if (vertUV != null) + { + vertUV[dst] = vertUV[i]; + } + } + } + if (vertUV4D != null) + { + for (int j = 0; j < Mesh.UVChannelCount; j++) + { + var vertUV = vertUV4D[j]; + if (vertUV != null) + { + vertUV[dst] = vertUV[i]; + } + } + } + if (vertColors != null) vertColors[dst] = vertColors[i]; + if (vertBoneWeights != null) vertBoneWeights[dst] = vertBoneWeights[i]; + } + ++dst; + } + } + + for (int i = 0; i < triangleCount; i++) + { + var triangle = triangles[i]; + triangle.v0 = vertices[triangle.v0].tstart; + triangle.v1 = vertices[triangle.v1].tstart; + triangle.v2 = vertices[triangle.v2].tstart; + triangles[i] = triangle; + } + + vertexCount = dst; + this.vertices.Resize(vertexCount); + if (vertNormals != null) this.vertNormals.Resize(vertexCount, true); + if (vertTangents != null) this.vertTangents.Resize(vertexCount, true); + if (vertUV2D != null) this.vertUV2D.Resize(vertexCount, true); + if (vertUV3D != null) this.vertUV3D.Resize(vertexCount, true); + if (vertUV4D != null) this.vertUV4D.Resize(vertexCount, true); + if (vertColors != null) this.vertColors.Resize(vertexCount, true); + if (vertBoneWeights != null) this.vertBoneWeights.Resize(vertexCount, true); + } + #endregion + #endregion + + #region Public Methods + #region Initialize + /// + /// Initializes the algorithm with the original mesh. + /// + /// The mesh. + public override void Initialize(Mesh mesh) + { + if (mesh == null) + throw new ArgumentNullException("mesh"); + + int meshSubMeshCount = mesh.SubMeshCount; + int meshTriangleCount = mesh.TriangleCount; + var meshVertices = mesh.Vertices; + var meshNormals = mesh.Normals; + var meshTangents = mesh.Tangents; + var meshColors = mesh.Colors; + var meshBoneWeights = mesh.BoneWeights; + subMeshCount = meshSubMeshCount; + + vertices.Resize(meshVertices.Length); + var vertArr = vertices.Data; + for (int i = 0; i < meshVertices.Length; i++) + { + vertArr[i] = new Vertex(meshVertices[i]); + } + + triangles.Resize(meshTriangleCount); + var trisArr = triangles.Data; + int triangleIndex = 0; + for (int subMeshIndex = 0; subMeshIndex < meshSubMeshCount; subMeshIndex++) + { + int[] subMeshIndices = mesh.GetIndices(subMeshIndex); + int subMeshTriangleCount = subMeshIndices.Length / 3; + for (int i = 0; i < subMeshTriangleCount; i++) + { + int offset = i * 3; + int v0 = subMeshIndices[offset]; + int v1 = subMeshIndices[offset + 1]; + int v2 = subMeshIndices[offset + 2]; + trisArr[triangleIndex++] = new Triangle(v0, v1, v2, subMeshIndex); + } + } + + vertNormals = InitializeVertexAttribute(meshNormals, "normals"); + vertTangents = InitializeVertexAttribute(meshTangents, "tangents"); + vertColors = InitializeVertexAttribute(meshColors, "colors"); + vertBoneWeights = InitializeVertexAttribute(meshBoneWeights, "boneWeights"); + + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + int uvDim = mesh.GetUVDimension(i); + string uvAttributeName = string.Format("uv{0}", i); + if (uvDim == 2) + { + if (vertUV2D == null) + vertUV2D = new UVChannels(); + + var uvs = mesh.GetUVs2D(i); + vertUV2D[i] = InitializeVertexAttribute(uvs, uvAttributeName); + } + else if (uvDim == 3) + { + if (vertUV3D == null) + vertUV3D = new UVChannels(); + + var uvs = mesh.GetUVs3D(i); + vertUV3D[i] = InitializeVertexAttribute(uvs, uvAttributeName); + } + else if (uvDim == 4) + { + if (vertUV4D == null) + vertUV4D = new UVChannels(); + + var uvs = mesh.GetUVs4D(i); + vertUV4D[i] = InitializeVertexAttribute(uvs, uvAttributeName); + } + } + } + #endregion + + #region Decimate Mesh + /// + /// Decimates the mesh. + /// + /// The target triangle count. + public override void DecimateMesh(int targetTrisCount) + { + if (targetTrisCount < 0) + throw new ArgumentOutOfRangeException("targetTrisCount"); + + int deletedTris = 0; + ResizableArray deleted0 = new ResizableArray(20); + ResizableArray deleted1 = new ResizableArray(20); + var triangles = this.triangles.Data; + int triangleCount = this.triangles.Length; + int startTrisCount = triangleCount; + var vertices = this.vertices.Data; + + int maxVertexCount = base.MaxVertexCount; + if (maxVertexCount <= 0) + maxVertexCount = int.MaxValue; + + for (int iteration = 0; iteration < maxIterationCount; iteration++) + { + ReportStatus(iteration, startTrisCount, (startTrisCount - deletedTris), targetTrisCount); + if ((startTrisCount - deletedTris) <= targetTrisCount && remainingVertices < maxVertexCount) + break; + + // Update mesh once in a while + if ((iteration % 5) == 0) + { + UpdateMesh(iteration); + triangles = this.triangles.Data; + triangleCount = this.triangles.Length; + vertices = this.vertices.Data; + } + + // Clear dirty flag + for (int i = 0; i < triangleCount; i++) + { + triangles[i].dirty = false; + } + + // All triangles with edges below the threshold will be removed + // + // The following numbers works well for most models. + // If it does not, try to adjust the 3 parameters + double threshold = 0.000000001 * System.Math.Pow(iteration + 3, agressiveness); + + if (Verbose && (iteration % 5) == 0) + { + Logger?.LogTrace( + "Iteration {Iteration} - triangles {Triangles} threshold {Threshold}", + iteration, + (startTrisCount - deletedTris), + threshold); + } + + // Remove vertices & mark deleted triangles + RemoveVertexPass(startTrisCount, targetTrisCount, threshold, deleted0, deleted1, ref deletedTris); + } + + CompactMesh(); + } + #endregion + + #region Decimate Mesh Lossless + /// + /// Decimates the mesh without losing any quality. + /// + public override void DecimateMeshLossless() + { + int deletedTris = 0; + ResizableArray deleted0 = new ResizableArray(0); + ResizableArray deleted1 = new ResizableArray(0); + var triangles = this.triangles.Data; + int triangleCount = this.triangles.Length; + int startTrisCount = triangleCount; + var vertices = this.vertices.Data; + + ReportStatus(0, startTrisCount, startTrisCount, -1); + for (int iteration = 0; iteration < 9999; iteration++) + { + // Update mesh constantly + UpdateMesh(iteration); + triangles = this.triangles.Data; + triangleCount = this.triangles.Length; + vertices = this.vertices.Data; + + ReportStatus(iteration, startTrisCount, triangleCount, -1); + + // Clear dirty flag + for (int i = 0; i < triangleCount; i++) + { + triangles[i].dirty = false; + } + + // All triangles with edges below the threshold will be removed + // + // The following numbers works well for most models. + // If it does not, try to adjust the 3 parameters + double threshold = DoubleEpsilon; + + if (Verbose) + { + Logger?.LogTrace("Lossless iteration {Iteration}", iteration); + } + + // Remove vertices & mark deleted triangles + RemoveVertexPass(startTrisCount, 0, threshold, deleted0, deleted1, ref deletedTris); + + if (deletedTris <= 0) + break; + + deletedTris = 0; + } + + CompactMesh(); + } + #endregion + + #region To Mesh + /// + /// Returns the resulting mesh. + /// + /// The resulting mesh. + public override Mesh ToMesh() + { + int vertexCount = this.vertices.Length; + int triangleCount = this.triangles.Length; + var vertices = new Vector3d[vertexCount]; + var indices = new int[subMeshCount][]; + + var vertArr = this.vertices.Data; + for (int i = 0; i < vertexCount; i++) + { + vertices[i] = vertArr[i].p; + } + + // First get the sub-mesh offsets + var triArr = this.triangles.Data; + int[] subMeshOffsets = new int[subMeshCount]; + int lastSubMeshOffset = -1; + for (int i = 0; i < triangleCount; i++) + { + var triangle = triArr[i]; + if (triangle.subMeshIndex != lastSubMeshOffset) + { + for (int j = lastSubMeshOffset + 1; j < triangle.subMeshIndex; j++) + { + subMeshOffsets[j] = i; + } + subMeshOffsets[triangle.subMeshIndex] = i; + lastSubMeshOffset = triangle.subMeshIndex; + } + } + for (int i = lastSubMeshOffset + 1; i < subMeshCount; i++) + { + subMeshOffsets[i] = triangleCount; + } + + // Then setup the sub-meshes + for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) + { + int startOffset = subMeshOffsets[subMeshIndex]; + if (startOffset < triangleCount) + { + int endOffset = ((subMeshIndex + 1) < subMeshCount ? subMeshOffsets[subMeshIndex + 1] : triangleCount); + int subMeshTriangleCount = endOffset - startOffset; + if (subMeshTriangleCount < 0) subMeshTriangleCount = 0; + int[] subMeshIndices = new int[subMeshTriangleCount * 3]; + + for (int triangleIndex = startOffset; triangleIndex < endOffset; triangleIndex++) + { + var triangle = triArr[triangleIndex]; + int offset = (triangleIndex - startOffset) * 3; + subMeshIndices[offset] = triangle.v0; + subMeshIndices[offset + 1] = triangle.v1; + subMeshIndices[offset + 2] = triangle.v2; + } + + indices[subMeshIndex] = subMeshIndices; + } + else + { + // This mesh doesn't have any triangles left + indices[subMeshIndex] = new int[0]; + } + } + + Mesh newMesh = new Mesh(vertices, indices); + + if (vertNormals != null) + { + newMesh.Normals = vertNormals.Data; + } + if (vertTangents != null) + { + newMesh.Tangents = vertTangents.Data; + } + if (vertColors != null) + { + newMesh.Colors = vertColors.Data; + } + if (vertBoneWeights != null) + { + newMesh.BoneWeights = vertBoneWeights.Data; + } + + if (vertUV2D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + if (vertUV2D[i] != null) + { + var uvSet = vertUV2D[i].Data; + newMesh.SetUVs(i, uvSet); + } + } + } + + if (vertUV3D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + if (vertUV3D[i] != null) + { + var uvSet = vertUV3D[i].Data; + newMesh.SetUVs(i, uvSet); + } + } + } + + if (vertUV4D != null) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + if (vertUV4D[i] != null) + { + var uvSet = vertUV4D[i].Data; + newMesh.SetUVs(i, uvSet); + } + } + } + + return newMesh; + } + #endregion + #endregion + } +} diff --git a/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs b/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs new file mode 100644 index 0000000..6501468 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs @@ -0,0 +1,249 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using MeshDecimator.Math; + +namespace MeshDecimator +{ + /// + /// A bone weight. + /// + public struct BoneWeight : IEquatable + { + #region Fields + /// + /// The first bone index. + /// + public int boneIndex0; + /// + /// The second bone index. + /// + public int boneIndex1; + /// + /// The third bone index. + /// + public int boneIndex2; + /// + /// The fourth bone index. + /// + public int boneIndex3; + + /// + /// The first bone weight. + /// + public float boneWeight0; + /// + /// The second bone weight. + /// + public float boneWeight1; + /// + /// The third bone weight. + /// + public float boneWeight2; + /// + /// The fourth bone weight. + /// + public float boneWeight3; + #endregion + + #region Constructor + /// + /// Creates a new bone weight. + /// + /// The first bone index. + /// The second bone index. + /// The third bone index. + /// The fourth bone index. + /// The first bone weight. + /// The second bone weight. + /// The third bone weight. + /// The fourth bone weight. + public BoneWeight(int boneIndex0, int boneIndex1, int boneIndex2, int boneIndex3, float boneWeight0, float boneWeight1, float boneWeight2, float boneWeight3) + { + this.boneIndex0 = boneIndex0; + this.boneIndex1 = boneIndex1; + this.boneIndex2 = boneIndex2; + this.boneIndex3 = boneIndex3; + + this.boneWeight0 = boneWeight0; + this.boneWeight1 = boneWeight1; + this.boneWeight2 = boneWeight2; + this.boneWeight3 = boneWeight3; + } + #endregion + + #region Operators + /// + /// Returns if two bone weights equals eachother. + /// + /// The left hand side bone weight. + /// The right hand side bone weight. + /// If equals. + public static bool operator ==(BoneWeight lhs, BoneWeight rhs) + { + return (lhs.boneIndex0 == rhs.boneIndex0 && lhs.boneIndex1 == rhs.boneIndex1 && lhs.boneIndex2 == rhs.boneIndex2 && lhs.boneIndex3 == rhs.boneIndex3 && + new Vector4(lhs.boneWeight0, lhs.boneWeight1, lhs.boneWeight2, lhs.boneWeight3) == new Vector4(rhs.boneWeight0, rhs.boneWeight1, rhs.boneWeight2, rhs.boneWeight3)); + } + + /// + /// Returns if two bone weights don't equal eachother. + /// + /// The left hand side bone weight. + /// The right hand side bone weight. + /// If not equals. + public static bool operator !=(BoneWeight lhs, BoneWeight rhs) + { + return !(lhs == rhs); + } + #endregion + + #region Private Methods + private void MergeBoneWeight(int boneIndex, float weight) + { + if (boneIndex == boneIndex0) + { + boneWeight0 = (boneWeight0 + weight) * 0.5f; + } + else if (boneIndex == boneIndex1) + { + boneWeight1 = (boneWeight1 + weight) * 0.5f; + } + else if (boneIndex == boneIndex2) + { + boneWeight2 = (boneWeight2 + weight) * 0.5f; + } + else if (boneIndex == boneIndex3) + { + boneWeight3 = (boneWeight3 + weight) * 0.5f; + } + else if(boneWeight0 == 0f) + { + boneIndex0 = boneIndex; + boneWeight0 = weight; + } + else if (boneWeight1 == 0f) + { + boneIndex1 = boneIndex; + boneWeight1 = weight; + } + else if (boneWeight2 == 0f) + { + boneIndex2 = boneIndex; + boneWeight2 = weight; + } + else if (boneWeight3 == 0f) + { + boneIndex3 = boneIndex; + boneWeight3 = weight; + } + Normalize(); + } + + private void Normalize() + { + float mag = (float)System.Math.Sqrt(boneWeight0 * boneWeight0 + boneWeight1 * boneWeight1 + boneWeight2 * boneWeight2 + boneWeight3 * boneWeight3); + if (mag > float.Epsilon) + { + boneWeight0 /= mag; + boneWeight1 /= mag; + boneWeight2 /= mag; + boneWeight3 /= mag; + } + else + { + boneWeight0 = boneWeight1 = boneWeight2 = boneWeight3 = 0f; + } + } + #endregion + + #region Public Methods + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return boneIndex0.GetHashCode() ^ boneIndex1.GetHashCode() << 2 ^ boneIndex2.GetHashCode() >> 2 ^ boneIndex3.GetHashCode() >> + 1 ^ boneWeight0.GetHashCode() << 5 ^ boneWeight1.GetHashCode() << 4 ^ boneWeight2.GetHashCode() >> 4 ^ boneWeight3.GetHashCode() >> 3; + } + + /// + /// Returns if this bone weight is equal to another object. + /// + /// The other object to compare to. + /// If equals. + public override bool Equals(object obj) + { + if (!(obj is BoneWeight)) + { + return false; + } + BoneWeight other = (BoneWeight)obj; + return (boneIndex0 == other.boneIndex0 && boneIndex1 == other.boneIndex1 && boneIndex2 == other.boneIndex2 && boneIndex3 == other.boneIndex3 && + boneWeight0 == other.boneWeight0 && boneWeight1 == other.boneWeight1 && boneWeight2 == other.boneWeight2 && boneWeight3 == other.boneWeight3); + } + + /// + /// Returns if this bone weight is equal to another one. + /// + /// The other bone weight to compare to. + /// If equals. + public bool Equals(BoneWeight other) + { + return (boneIndex0 == other.boneIndex0 && boneIndex1 == other.boneIndex1 && boneIndex2 == other.boneIndex2 && boneIndex3 == other.boneIndex3 && + boneWeight0 == other.boneWeight0 && boneWeight1 == other.boneWeight1 && boneWeight2 == other.boneWeight2 && boneWeight3 == other.boneWeight3); + } + + /// + /// Returns a nicely formatted string for this bone weight. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}:{4:F1}, {1}:{5:F1}, {2}:{6:F1}, {3}:{7:F1})", + boneIndex0, boneIndex1, boneIndex2, boneIndex3, boneWeight0, boneWeight1, boneWeight2, boneWeight3); + } + #endregion + + #region Static + /// + /// Merges two bone weights and stores the merged result in the first parameter. + /// + /// The first bone weight, also stores result. + /// The second bone weight. + public static void Merge(ref BoneWeight a, ref BoneWeight b) + { + if (b.boneWeight0 > 0f) a.MergeBoneWeight(b.boneIndex0, b.boneWeight0); + if (b.boneWeight1 > 0f) a.MergeBoneWeight(b.boneIndex1, b.boneWeight1); + if (b.boneWeight2 > 0f) a.MergeBoneWeight(b.boneIndex2, b.boneWeight2); + if (b.boneWeight3 > 0f) a.MergeBoneWeight(b.boneIndex3, b.boneWeight3); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs b/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs new file mode 100644 index 0000000..2c69814 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs @@ -0,0 +1,179 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; + +namespace MeshDecimator.Collections +{ + /// + /// A resizable array. + /// + /// The item type. + internal sealed class ResizableArray + { + #region Fields + private T[] items = null; + private int length = 0; + + private static T[] emptyArr = new T[0]; + #endregion + + #region Properties + /// + /// Gets the length of this array. + /// + public int Length + { + get { return length; } + } + + /// + /// Gets the internal data buffer for this array. + /// + public T[] Data + { + get { return items; } + } + + /// + /// Gets or sets the element value at a specific index. + /// + /// The element index. + /// The element value. + public T this[int index] + { + get { return items[index]; } + set { items[index] = value; } + } + #endregion + + #region Constructor + /// + /// Creates a new resizable array. + /// + /// The initial array capacity. + public ResizableArray(int capacity) + : this(capacity, 0) + { + + } + + /// + /// Creates a new resizable array. + /// + /// The initial array capacity. + /// The initial length of the array. + public ResizableArray(int capacity, int length) + { + if (capacity < 0) + throw new ArgumentOutOfRangeException("capacity"); + else if (length < 0 || length > capacity) + throw new ArgumentOutOfRangeException("length"); + + if (capacity > 0) + items = new T[capacity]; + else + items = emptyArr; + + this.length = length; + } + #endregion + + #region Private Methods + private void IncreaseCapacity(int capacity) + { + T[] newItems = new T[capacity]; + Array.Copy(items, 0, newItems, 0, System.Math.Min(length, capacity)); + items = newItems; + } + #endregion + + #region Public Methods + /// + /// Clears this array. + /// + public void Clear() + { + Array.Clear(items, 0, length); + length = 0; + } + + /// + /// Resizes this array. + /// + /// The new length. + /// If exess memory should be trimmed. + public void Resize(int length, bool trimExess = false) + { + if (length < 0) + throw new ArgumentOutOfRangeException("capacity"); + + if (length > items.Length) + { + IncreaseCapacity(length); + } + else if (length < this.length) + { + //Array.Clear(items, capacity, length - capacity); + } + + this.length = length; + + if (trimExess) + { + TrimExcess(); + } + } + + /// + /// Trims any excess memory for this array. + /// + public void TrimExcess() + { + if (items.Length == length) // Nothing to do + return; + + T[] newItems = new T[length]; + Array.Copy(items, 0, newItems, 0, length); + items = newItems; + } + + /// + /// Adds a new item to the end of this array. + /// + /// The new item. + public void Add(T item) + { + if (length >= items.Length) + { + IncreaseCapacity(items.Length << 1); + } + + items[length++] = item; + } + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs b/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs new file mode 100644 index 0000000..073728a --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs @@ -0,0 +1,79 @@ +using System; + +namespace MeshDecimator.Collections +{ + /// + /// A collection of UV channels. + /// + /// The UV vector type. + internal sealed class UVChannels + { + #region Fields + private ResizableArray[] channels = null; + private TVec[][] channelsData = null; + #endregion + + #region Properties + /// + /// Gets the channel collection data. + /// + public TVec[][] Data + { + get + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + if (channels[i] != null) + { + channelsData[i] = channels[i].Data; + } + else + { + channelsData[i] = null; + } + } + return channelsData; + } + } + + /// + /// Gets or sets a specific channel by index. + /// + /// The channel index. + public ResizableArray this[int index] + { + get { return channels[index]; } + set { channels[index] = value; } + } + #endregion + + #region Constructor + /// + /// Creates a new collection of UV channels. + /// + public UVChannels() + { + channels = new ResizableArray[Mesh.UVChannelCount]; + channelsData = new TVec[Mesh.UVChannelCount][]; + } + #endregion + + #region Public Methods + /// + /// Resizes all channels at once. + /// + /// The new capacity. + /// If exess memory should be trimmed. + public void Resize(int capacity, bool trimExess = false) + { + for (int i = 0; i < Mesh.UVChannelCount; i++) + { + if (channels[i] != null) + { + channels[i].Resize(capacity, trimExess); + } + } + } + #endregion + } +} diff --git a/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md b/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md new file mode 100644 index 0000000..1f1f192 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs new file mode 100644 index 0000000..b530d3d --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs @@ -0,0 +1,286 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; + +namespace MeshDecimator.Math +{ + /// + /// Math helpers. + /// + public static class MathHelper + { + #region Consts + /// + /// The Pi constant. + /// + public const float PI = 3.14159274f; + + /// + /// The Pi constant. + /// + public const double PId = 3.1415926535897932384626433832795; + + /// + /// Degrees to radian constant. + /// + public const float Deg2Rad = PI / 180f; + + /// + /// Degrees to radian constant. + /// + public const double Deg2Radd = PId / 180.0; + + /// + /// Radians to degrees constant. + /// + public const float Rad2Deg = 180f / PI; + + /// + /// Radians to degrees constant. + /// + public const double Rad2Degd = 180.0 / PId; + #endregion + + #region Min + /// + /// Returns the minimum of two values. + /// + /// The first value. + /// The second value. + /// The minimum value. + public static int Min(int val1, int val2) + { + return (val1 < val2 ? val1 : val2); + } + + /// + /// Returns the minimum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The minimum value. + public static int Min(int val1, int val2, int val3) + { + return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); + } + + /// + /// Returns the minimum of two values. + /// + /// The first value. + /// The second value. + /// The minimum value. + public static float Min(float val1, float val2) + { + return (val1 < val2 ? val1 : val2); + } + + /// + /// Returns the minimum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The minimum value. + public static float Min(float val1, float val2, float val3) + { + return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); + } + + /// + /// Returns the minimum of two values. + /// + /// The first value. + /// The second value. + /// The minimum value. + public static double Min(double val1, double val2) + { + return (val1 < val2 ? val1 : val2); + } + + /// + /// Returns the minimum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The minimum value. + public static double Min(double val1, double val2, double val3) + { + return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); + } + #endregion + + #region Max + /// + /// Returns the maximum of two values. + /// + /// The first value. + /// The second value. + /// The maximum value. + public static int Max(int val1, int val2) + { + return (val1 > val2 ? val1 : val2); + } + + /// + /// Returns the maximum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The maximum value. + public static int Max(int val1, int val2, int val3) + { + return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); + } + + /// + /// Returns the maximum of two values. + /// + /// The first value. + /// The second value. + /// The maximum value. + public static float Max(float val1, float val2) + { + return (val1 > val2 ? val1 : val2); + } + + /// + /// Returns the maximum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The maximum value. + public static float Max(float val1, float val2, float val3) + { + return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); + } + + /// + /// Returns the maximum of two values. + /// + /// The first value. + /// The second value. + /// The maximum value. + public static double Max(double val1, double val2) + { + return (val1 > val2 ? val1 : val2); + } + + /// + /// Returns the maximum of three values. + /// + /// The first value. + /// The second value. + /// The third value. + /// The maximum value. + public static double Max(double val1, double val2, double val3) + { + return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); + } + #endregion + + #region Clamping + /// + /// Clamps a value between a minimum and a maximum value. + /// + /// The value to clamp. + /// The minimum value. + /// The maximum value. + /// The clamped value. + public static float Clamp(float value, float min, float max) + { + return (value >= min ? (value <= max ? value : max) : min); + } + + /// + /// Clamps a value between a minimum and a maximum value. + /// + /// The value to clamp. + /// The minimum value. + /// The maximum value. + /// The clamped value. + public static double Clamp(double value, double min, double max) + { + return (value >= min ? (value <= max ? value : max) : min); + } + + /// + /// Clamps the value between 0 and 1. + /// + /// The value to clamp. + /// The clamped value. + public static float Clamp01(float value) + { + return (value > 0f ? (value < 1f ? value : 1f) : 0f); + } + + /// + /// Clamps the value between 0 and 1. + /// + /// The value to clamp. + /// The clamped value. + public static double Clamp01(double value) + { + return (value > 0.0 ? (value < 1.0 ? value : 1.0) : 0.0); + } + #endregion + + #region Triangle Area + /// + /// Calculates the area of a triangle. + /// + /// The first point. + /// The second point. + /// The third point. + /// The triangle area. + public static float TriangleArea(ref Vector3 p0, ref Vector3 p1, ref Vector3 p2) + { + var dx = p1 - p0; + var dy = p2 - p0; + return dx.Magnitude * ((float)System.Math.Sin(Vector3.Angle(ref dx, ref dy) * Deg2Rad) * dy.Magnitude) * 0.5f; + } + + /// + /// Calculates the area of a triangle. + /// + /// The first point. + /// The second point. + /// The third point. + /// The triangle area. + public static double TriangleArea(ref Vector3d p0, ref Vector3d p1, ref Vector3d p2) + { + var dx = p1 - p0; + var dy = p2 - p0; + return dx.Magnitude * (System.Math.Sin(Vector3d.Angle(ref dx, ref dy) * Deg2Radd) * dy.Magnitude) * 0.5f; + } + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs new file mode 100644 index 0000000..3daa4e7 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs @@ -0,0 +1,303 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; + +namespace MeshDecimator.Math +{ + /// + /// A symmetric matrix. + /// + public struct SymmetricMatrix + { + #region Fields + /// + /// The m11 component. + /// + public double m0; + /// + /// The m12 component. + /// + public double m1; + /// + /// The m13 component. + /// + public double m2; + /// + /// The m14 component. + /// + public double m3; + /// + /// The m22 component. + /// + public double m4; + /// + /// The m23 component. + /// + public double m5; + /// + /// The m24 component. + /// + public double m6; + /// + /// The m33 component. + /// + public double m7; + /// + /// The m34 component. + /// + public double m8; + /// + /// The m44 component. + /// + public double m9; + #endregion + + #region Properties + /// + /// Gets the component value with a specific index. + /// + /// The component index. + /// The value. + public double this[int index] + { + get + { + switch (index) + { + case 0: + return m0; + case 1: + return m1; + case 2: + return m2; + case 3: + return m3; + case 4: + return m4; + case 5: + return m5; + case 6: + return m6; + case 7: + return m7; + case 8: + return m8; + case 9: + return m9; + default: + throw new IndexOutOfRangeException(); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a symmetric matrix with a value in each component. + /// + /// The component value. + public SymmetricMatrix(double c) + { + this.m0 = c; + this.m1 = c; + this.m2 = c; + this.m3 = c; + this.m4 = c; + this.m5 = c; + this.m6 = c; + this.m7 = c; + this.m8 = c; + this.m9 = c; + } + + /// + /// Creates a symmetric matrix. + /// + /// The m11 component. + /// The m12 component. + /// The m13 component. + /// The m14 component. + /// The m22 component. + /// The m23 component. + /// The m24 component. + /// The m33 component. + /// The m34 component. + /// The m44 component. + public SymmetricMatrix(double m0, double m1, double m2, double m3, + double m4, double m5, double m6, double m7, double m8, double m9) + { + this.m0 = m0; + this.m1 = m1; + this.m2 = m2; + this.m3 = m3; + this.m4 = m4; + this.m5 = m5; + this.m6 = m6; + this.m7 = m7; + this.m8 = m8; + this.m9 = m9; + } + + /// + /// Creates a symmetric matrix from a plane. + /// + /// The plane x-component. + /// The plane y-component + /// The plane z-component + /// The plane w-component + public SymmetricMatrix(double a, double b, double c, double d) + { + this.m0 = a * a; + this.m1 = a * b; + this.m2 = a * c; + this.m3 = a * d; + + this.m4 = b * b; + this.m5 = b * c; + this.m6 = b * d; + + this.m7 = c * c; + this.m8 = c * d; + + this.m9 = d * d; + } + #endregion + + #region Operators + /// + /// Adds two matrixes together. + /// + /// The left hand side. + /// The right hand side. + /// The resulting matrix. + public static SymmetricMatrix operator +(SymmetricMatrix a, SymmetricMatrix b) + { + return new SymmetricMatrix( + a.m0 + b.m0, a.m1 + b.m1, a.m2 + b.m2, a.m3 + b.m3, + a.m4 + b.m4, a.m5 + b.m5, a.m6 + b.m6, + a.m7 + b.m7, a.m8 + b.m8, + a.m9 + b.m9 + ); + } + #endregion + + #region Internal Methods + /// + /// Determinant(0, 1, 2, 1, 4, 5, 2, 5, 7) + /// + /// + internal double Determinant1() + { + double det = + m0 * m4 * m7 + + m2 * m1 * m5 + + m1 * m5 * m2 - + m2 * m4 * m2 - + m0 * m5 * m5 - + m1 * m1 * m7; + return det; + } + + /// + /// Determinant(1, 2, 3, 4, 5, 6, 5, 7, 8) + /// + /// + internal double Determinant2() + { + double det = + m1 * m5 * m8 + + m3 * m4 * m7 + + m2 * m6 * m5 - + m3 * m5 * m5 - + m1 * m6 * m7 - + m2 * m4 * m8; + return det; + } + + /// + /// Determinant(0, 2, 3, 1, 5, 6, 2, 7, 8) + /// + /// + internal double Determinant3() + { + double det = + m0 * m5 * m8 + + m3 * m1 * m7 + + m2 * m6 * m2 - + m3 * m5 * m2 - + m0 * m6 * m7 - + m2 * m1 * m8; + return det; + } + + /// + /// Determinant(0, 1, 3, 1, 4, 6, 2, 5, 8) + /// + /// + internal double Determinant4() + { + double det = + m0 * m4 * m8 + + m3 * m1 * m5 + + m1 * m6 * m2 - + m3 * m4 * m2 - + m0 * m6 * m5 - + m1 * m1 * m8; + return det; + } + #endregion + + #region Public Methods + /// + /// Computes the determinant of this matrix. + /// + /// The a11 index. + /// The a12 index. + /// The a13 index. + /// The a21 index. + /// The a22 index. + /// The a23 index. + /// The a31 index. + /// The a32 index. + /// The a33 index. + /// The determinant value. + public double Determinant(int a11, int a12, int a13, + int a21, int a22, int a23, + int a31, int a32, int a33) + { + double det = + this[a11] * this[a22] * this[a33] + + this[a13] * this[a21] * this[a32] + + this[a12] * this[a23] * this[a31] - + this[a13] * this[a22] * this[a31] - + this[a11] * this[a23] * this[a32] - + this[a12] * this[a21] * this[a33]; + return det; + } + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs new file mode 100644 index 0000000..68f06f4 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs @@ -0,0 +1,425 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A single precision 2D vector. + /// + public struct Vector2 : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector2 zero = new Vector2(0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const float Epsilon = 9.99999944E-11f; + #endregion + + #region Fields + /// + /// The x component. + /// + public float x; + /// + /// The y component. + /// + public float y; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public float Magnitude + { + get { return (float)System.Math.Sqrt(x * x + y * y); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public float MagnitudeSqr + { + get { return (x * x + y * y); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector2 Normalized + { + get + { + Vector2 result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public float this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + default: + throw new IndexOutOfRangeException("Invalid Vector2 index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector2 index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector2(float value) + { + this.x = value; + this.y = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + public Vector2(float x, float y) + { + this.x = x; + this.y = y; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2 operator +(Vector2 a, Vector2 b) + { + return new Vector2(a.x + b.x, a.y + b.y); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2 operator -(Vector2 a, Vector2 b) + { + return new Vector2(a.x - b.x, a.y - b.y); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector2 operator *(Vector2 a, float d) + { + return new Vector2(a.x * d, a.y * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector2 operator *(float d, Vector2 a) + { + return new Vector2(a.x * d, a.y * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector2 operator /(Vector2 a, float d) + { + return new Vector2(a.x / d, a.y / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector2 operator -(Vector2 a) + { + return new Vector2(-a.x, -a.y); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector2 lhs, Vector2 rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector2 lhs, Vector2 rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Explicitly converts from a double-precision vector into a single-precision vector. + /// + /// The double-precision vector. + public static explicit operator Vector2(Vector2d v) + { + return new Vector2((float)v.x, (float)v.y); + } + + /// + /// Implicitly converts from an integer vector into a single-precision vector. + /// + /// The integer vector. + public static implicit operator Vector2(Vector2i v) + { + return new Vector2(v.x, v.y); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x and y components of an existing vector. + /// + /// The x value. + /// The y value. + public void Set(float x, float y) + { + this.x = x; + this.y = y; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector2 scale) + { + x *= scale.x; + y *= scale.y; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + float mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + } + else + { + x = y = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(float min, float max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector2)) + { + return false; + } + Vector2 vector = (Vector2)other; + return (x == vector.x && y == vector.y); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector2 other) + { + return (x == other.x && y == other.y); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static float Dot(ref Vector2 lhs, ref Vector2 rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector2 a, ref Vector2 b, float t, out Vector2 result) + { + result = new Vector2(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector2 a, ref Vector2 b, out Vector2 result) + { + result = new Vector2(a.x * b.x, a.y * b.y); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector2 value, out Vector2 result) + { + float mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector2(value.x / mag, value.y / mag); + } + else + { + result = Vector2.zero; + } + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs new file mode 100644 index 0000000..72f62aa --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs @@ -0,0 +1,425 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A double precision 2D vector. + /// + public struct Vector2d : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector2d zero = new Vector2d(0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const double Epsilon = double.Epsilon; + #endregion + + #region Fields + /// + /// The x component. + /// + public double x; + /// + /// The y component. + /// + public double y; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public double Magnitude + { + get { return System.Math.Sqrt(x * x + y * y); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public double MagnitudeSqr + { + get { return (x * x + y * y); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector2d Normalized + { + get + { + Vector2d result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public double this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + default: + throw new IndexOutOfRangeException("Invalid Vector2d index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector2d index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector2d(double value) + { + this.x = value; + this.y = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + public Vector2d(double x, double y) + { + this.x = x; + this.y = y; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2d operator +(Vector2d a, Vector2d b) + { + return new Vector2d(a.x + b.x, a.y + b.y); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2d operator -(Vector2d a, Vector2d b) + { + return new Vector2d(a.x - b.x, a.y - b.y); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector2d operator *(Vector2d a, double d) + { + return new Vector2d(a.x * d, a.y * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector2d operator *(double d, Vector2d a) + { + return new Vector2d(a.x * d, a.y * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector2d operator /(Vector2d a, double d) + { + return new Vector2d(a.x / d, a.y / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector2d operator -(Vector2d a) + { + return new Vector2d(-a.x, -a.y); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector2d lhs, Vector2d rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector2d lhs, Vector2d rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Implicitly converts from a single-precision vector into a double-precision vector. + /// + /// The single-precision vector. + public static implicit operator Vector2d(Vector2 v) + { + return new Vector2d(v.x, v.y); + } + + /// + /// Implicitly converts from an integer vector into a double-precision vector. + /// + /// The integer vector. + public static implicit operator Vector2d(Vector2i v) + { + return new Vector2d(v.x, v.y); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x and y components of an existing vector. + /// + /// The x value. + /// The y value. + public void Set(double x, double y) + { + this.x = x; + this.y = y; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector2d scale) + { + x *= scale.x; + y *= scale.y; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + double mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + } + else + { + x = y = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(double min, double max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector2d)) + { + return false; + } + Vector2d vector = (Vector2d)other; + return (x == vector.x && y == vector.y); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector2d other) + { + return (x == other.x && y == other.y); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static double Dot(ref Vector2d lhs, ref Vector2d rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector2d a, ref Vector2d b, double t, out Vector2d result) + { + result = new Vector2d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector2d a, ref Vector2d b, out Vector2d result) + { + result = new Vector2d(a.x * b.x, a.y * b.y); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector2d value, out Vector2d result) + { + double mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector2d(value.x / mag, value.y / mag); + } + else + { + result = Vector2d.zero; + } + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs new file mode 100644 index 0000000..20b808b --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs @@ -0,0 +1,348 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A 2D integer vector. + /// + public struct Vector2i : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector2i zero = new Vector2i(0, 0); + #endregion + + #region Fields + /// + /// The x component. + /// + public int x; + /// + /// The y component. + /// + public int y; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public int Magnitude + { + get { return (int)System.Math.Sqrt(x * x + y * y); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public int MagnitudeSqr + { + get { return (x * x + y * y); } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public int this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + default: + throw new IndexOutOfRangeException("Invalid Vector2i index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector2i index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector2i(int value) + { + this.x = value; + this.y = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + public Vector2i(int x, int y) + { + this.x = x; + this.y = y; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2i operator +(Vector2i a, Vector2i b) + { + return new Vector2i(a.x + b.x, a.y + b.y); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector2i operator -(Vector2i a, Vector2i b) + { + return new Vector2i(a.x - b.x, a.y - b.y); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector2i operator *(Vector2i a, int d) + { + return new Vector2i(a.x * d, a.y * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector2i operator *(int d, Vector2i a) + { + return new Vector2i(a.x * d, a.y * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector2i operator /(Vector2i a, int d) + { + return new Vector2i(a.x / d, a.y / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector2i operator -(Vector2i a) + { + return new Vector2i(-a.x, -a.y); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector2i lhs, Vector2i rhs) + { + return (lhs.x == rhs.x && lhs.y == rhs.y); + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector2i lhs, Vector2i rhs) + { + return (lhs.x != rhs.x || lhs.y != rhs.y); + } + + /// + /// Explicitly converts from a single-precision vector into an integer vector. + /// + /// The single-precision vector. + public static explicit operator Vector2i(Vector2 v) + { + return new Vector2i((int)v.x, (int)v.y); + } + + /// + /// Explicitly converts from a double-precision vector into an integer vector. + /// + /// The double-precision vector. + public static explicit operator Vector2i(Vector2d v) + { + return new Vector2i((int)v.x, (int)v.y); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x and y components of an existing vector. + /// + /// The x value. + /// The y value. + public void Set(int x, int y) + { + this.x = x; + this.y = y; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector2i scale) + { + x *= scale.x; + y *= scale.y; + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(int min, int max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector2i)) + { + return false; + } + Vector2i vector = (Vector2i)other; + return (x == vector.x && y == vector.y); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector2i other) + { + return (x == other.x && y == other.y); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1})", + x.ToString(CultureInfo.InvariantCulture), + y.ToString(CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The integer format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector2i a, ref Vector2i b, out Vector2i result) + { + result = new Vector2i(a.x * b.x, a.y * b.y); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs new file mode 100644 index 0000000..4c91aa5 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs @@ -0,0 +1,494 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A single precision 3D vector. + /// + public struct Vector3 : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector3 zero = new Vector3(0, 0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const float Epsilon = 9.99999944E-11f; + #endregion + + #region Fields + /// + /// The x component. + /// + public float x; + /// + /// The y component. + /// + public float y; + /// + /// The z component. + /// + public float z; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public float Magnitude + { + get { return (float)System.Math.Sqrt(x * x + y * y + z * z); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public float MagnitudeSqr + { + get { return (x * x + y * y + z * z); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector3 Normalized + { + get + { + Vector3 result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public float this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + default: + throw new IndexOutOfRangeException("Invalid Vector3 index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector3 index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector3(float value) + { + this.x = value; + this.y = value; + this.z = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + public Vector3(float x, float y, float z) + { + this.x = x; + this.y = y; + this.z = z; + } + + /// + /// Creates a new vector from a double precision vector. + /// + /// The double precision vector. + public Vector3(Vector3d vector) + { + this.x = (float)vector.x; + this.y = (float)vector.y; + this.z = (float)vector.z; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3 operator +(Vector3 a, Vector3 b) + { + return new Vector3(a.x + b.x, a.y + b.y, a.z + b.z); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3 operator -(Vector3 a, Vector3 b) + { + return new Vector3(a.x - b.x, a.y - b.y, a.z - b.z); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector3 operator *(Vector3 a, float d) + { + return new Vector3(a.x * d, a.y * d, a.z * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector3 operator *(float d, Vector3 a) + { + return new Vector3(a.x * d, a.y * d, a.z * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector3 operator /(Vector3 a, float d) + { + return new Vector3(a.x / d, a.y / d, a.z / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector3 operator -(Vector3 a) + { + return new Vector3(-a.x, -a.y, -a.z); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector3 lhs, Vector3 rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector3 lhs, Vector3 rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Explicitly converts from a double-precision vector into a single-precision vector. + /// + /// The double-precision vector. + public static explicit operator Vector3(Vector3d v) + { + return new Vector3((float)v.x, (float)v.y, (float)v.z); + } + + /// + /// Implicitly converts from an integer vector into a single-precision vector. + /// + /// The integer vector. + public static implicit operator Vector3(Vector3i v) + { + return new Vector3(v.x, v.y, v.z); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + public void Set(float x, float y, float z) + { + this.x = x; + this.y = y; + this.z = z; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector3 scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + float mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + z /= mag; + } + else + { + x = y = z = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(float min, float max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector3)) + { + return false; + } + Vector3 vector = (Vector3)other; + return (x == vector.x && y == vector.y && z == vector.z); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector3 other) + { + return (x == other.x && y == other.y && z == other.z); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture), + z.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static float Dot(ref Vector3 lhs, ref Vector3 rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; + } + + /// + /// Cross Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + /// The resulting vector. + public static void Cross(ref Vector3 lhs, ref Vector3 rhs, out Vector3 result) + { + result = new Vector3(lhs.y * rhs.z - lhs.z * rhs.y, lhs.z * rhs.x - lhs.x * rhs.z, lhs.x * rhs.y - lhs.y * rhs.x); + } + + /// + /// Calculates the angle between two vectors. + /// + /// The from vector. + /// The to vector. + /// The angle. + public static float Angle(ref Vector3 from, ref Vector3 to) + { + Vector3 fromNormalized = from.Normalized; + Vector3 toNormalized = to.Normalized; + return (float)System.Math.Acos(MathHelper.Clamp(Vector3.Dot(ref fromNormalized, ref toNormalized), -1f, 1f)) * MathHelper.Rad2Deg; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector3 a, ref Vector3 b, float t, out Vector3 result) + { + result = new Vector3(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector3 a, ref Vector3 b, out Vector3 result) + { + result = new Vector3(a.x * b.x, a.y * b.y, a.z * b.z); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector3 value, out Vector3 result) + { + float mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector3(value.x / mag, value.y / mag, value.z / mag); + } + else + { + result = Vector3.zero; + } + } + + /// + /// Normalizes both vectors and makes them orthogonal to each other. + /// + /// The normal vector. + /// The tangent. + public static void OrthoNormalize(ref Vector3 normal, ref Vector3 tangent) + { + normal.Normalize(); + Vector3 proj = normal * Vector3.Dot(ref tangent, ref normal); + tangent -= proj; + tangent.Normalize(); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs new file mode 100644 index 0000000..11ebed1 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs @@ -0,0 +1,481 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A double precision 3D vector. + /// + public struct Vector3d : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector3d zero = new Vector3d(0, 0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const double Epsilon = double.Epsilon; + #endregion + + #region Fields + /// + /// The x component. + /// + public double x; + /// + /// The y component. + /// + public double y; + /// + /// The z component. + /// + public double z; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public double Magnitude + { + get { return System.Math.Sqrt(x * x + y * y + z * z); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public double MagnitudeSqr + { + get { return (x * x + y * y + z * z); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector3d Normalized + { + get + { + Vector3d result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public double this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + default: + throw new IndexOutOfRangeException("Invalid Vector3d index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector3d index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector3d(double value) + { + this.x = value; + this.y = value; + this.z = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + public Vector3d(double x, double y, double z) + { + this.x = x; + this.y = y; + this.z = z; + } + + /// + /// Creates a new vector from a single precision vector. + /// + /// The single precision vector. + public Vector3d(Vector3 vector) + { + this.x = vector.x; + this.y = vector.y; + this.z = vector.z; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3d operator +(Vector3d a, Vector3d b) + { + return new Vector3d(a.x + b.x, a.y + b.y, a.z + b.z); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3d operator -(Vector3d a, Vector3d b) + { + return new Vector3d(a.x - b.x, a.y - b.y, a.z - b.z); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector3d operator *(Vector3d a, double d) + { + return new Vector3d(a.x * d, a.y * d, a.z * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector3d operator *(double d, Vector3d a) + { + return new Vector3d(a.x * d, a.y * d, a.z * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector3d operator /(Vector3d a, double d) + { + return new Vector3d(a.x / d, a.y / d, a.z / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector3d operator -(Vector3d a) + { + return new Vector3d(-a.x, -a.y, -a.z); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector3d lhs, Vector3d rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector3d lhs, Vector3d rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Implicitly converts from a single-precision vector into a double-precision vector. + /// + /// The single-precision vector. + public static implicit operator Vector3d(Vector3 v) + { + return new Vector3d(v.x, v.y, v.z); + } + + /// + /// Implicitly converts from an integer vector into a double-precision vector. + /// + /// The integer vector. + public static implicit operator Vector3d(Vector3i v) + { + return new Vector3d(v.x, v.y, v.z); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + public void Set(double x, double y, double z) + { + this.x = x; + this.y = y; + this.z = z; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector3d scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + double mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + z /= mag; + } + else + { + x = y = z = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(double min, double max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector3d)) + { + return false; + } + Vector3d vector = (Vector3d)other; + return (x == vector.x && y == vector.y && z == vector.z); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector3d other) + { + return (x == other.x && y == other.y && z == other.z); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture), + z.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static double Dot(ref Vector3d lhs, ref Vector3d rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; + } + + /// + /// Cross Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + /// The resulting vector. + public static void Cross(ref Vector3d lhs, ref Vector3d rhs, out Vector3d result) + { + result = new Vector3d(lhs.y * rhs.z - lhs.z * rhs.y, lhs.z * rhs.x - lhs.x * rhs.z, lhs.x * rhs.y - lhs.y * rhs.x); + } + + /// + /// Calculates the angle between two vectors. + /// + /// The from vector. + /// The to vector. + /// The angle. + public static double Angle(ref Vector3d from, ref Vector3d to) + { + Vector3d fromNormalized = from.Normalized; + Vector3d toNormalized = to.Normalized; + return System.Math.Acos(MathHelper.Clamp(Vector3d.Dot(ref fromNormalized, ref toNormalized), -1.0, 1.0)) * MathHelper.Rad2Degd; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector3d a, ref Vector3d b, double t, out Vector3d result) + { + result = new Vector3d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector3d a, ref Vector3d b, out Vector3d result) + { + result = new Vector3d(a.x * b.x, a.y * b.y, a.z * b.z); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector3d value, out Vector3d result) + { + double mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector3d(value.x / mag, value.y / mag, value.z / mag); + } + else + { + result = Vector3d.zero; + } + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs new file mode 100644 index 0000000..d36d6d1 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs @@ -0,0 +1,368 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A 3D integer vector. + /// + public struct Vector3i : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector3i zero = new Vector3i(0, 0, 0); + #endregion + + #region Fields + /// + /// The x component. + /// + public int x; + /// + /// The y component. + /// + public int y; + /// + /// The z component. + /// + public int z; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public int Magnitude + { + get { return (int)System.Math.Sqrt(x * x + y * y + z * z); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public int MagnitudeSqr + { + get { return (x * x + y * y + z * z); } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public int this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + default: + throw new IndexOutOfRangeException("Invalid Vector3i index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector3i index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector3i(int value) + { + this.x = value; + this.y = value; + this.z = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + public Vector3i(int x, int y, int z) + { + this.x = x; + this.y = y; + this.z = z; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3i operator +(Vector3i a, Vector3i b) + { + return new Vector3i(a.x + b.x, a.y + b.y, a.z + b.z); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector3i operator -(Vector3i a, Vector3i b) + { + return new Vector3i(a.x - b.x, a.y - b.y, a.z - b.z); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector3i operator *(Vector3i a, int d) + { + return new Vector3i(a.x * d, a.y * d, a.z * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector3i operator *(int d, Vector3i a) + { + return new Vector3i(a.x * d, a.y * d, a.z * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector3i operator /(Vector3i a, int d) + { + return new Vector3i(a.x / d, a.y / d, a.z / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector3i operator -(Vector3i a) + { + return new Vector3i(-a.x, -a.y, -a.z); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector3i lhs, Vector3i rhs) + { + return (lhs.x == rhs.x && lhs.y == rhs.y && lhs.z == rhs.z); + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector3i lhs, Vector3i rhs) + { + return (lhs.x != rhs.x || lhs.y != rhs.y || lhs.z != rhs.z); + } + + /// + /// Explicitly converts from a single-precision vector into an integer vector. + /// + /// The single-precision vector. + public static implicit operator Vector3i(Vector3 v) + { + return new Vector3i((int)v.x, (int)v.y, (int)v.z); + } + + /// + /// Explicitly converts from a double-precision vector into an integer vector. + /// + /// The double-precision vector. + public static explicit operator Vector3i(Vector3d v) + { + return new Vector3i((int)v.x, (int)v.y, (int)v.z); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + public void Set(int x, int y, int z) + { + this.x = x; + this.y = y; + this.z = z; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector3i scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(int min, int max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector3i)) + { + return false; + } + Vector3i vector = (Vector3i)other; + return (x == vector.x && y == vector.y && z == vector.z); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector3i other) + { + return (x == other.x && y == other.y && z == other.z); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2})", + x.ToString(CultureInfo.InvariantCulture), + y.ToString(CultureInfo.InvariantCulture), + z.ToString(CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The integer format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector3i a, ref Vector3i b, out Vector3i result) + { + result = new Vector3i(a.x * b.x, a.y * b.y, a.z * b.z); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs new file mode 100644 index 0000000..bf1d655 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs @@ -0,0 +1,467 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A single precision 4D vector. + /// + public struct Vector4 : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector4 zero = new Vector4(0, 0, 0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const float Epsilon = 9.99999944E-11f; + #endregion + + #region Fields + /// + /// The x component. + /// + public float x; + /// + /// The y component. + /// + public float y; + /// + /// The z component. + /// + public float z; + /// + /// The w component. + /// + public float w; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public float Magnitude + { + get { return (float)System.Math.Sqrt(x * x + y * y + z * z + w * w); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public float MagnitudeSqr + { + get { return (x * x + y * y + z * z + w * w); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector4 Normalized + { + get + { + Vector4 result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public float this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + default: + throw new IndexOutOfRangeException("Invalid Vector4 index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + case 3: + w = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector4 index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector4(float value) + { + this.x = value; + this.y = value; + this.z = value; + this.w = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public Vector4(float x, float y, float z, float w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4 operator +(Vector4 a, Vector4 b) + { + return new Vector4(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4 operator -(Vector4 a, Vector4 b) + { + return new Vector4(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector4 operator *(Vector4 a, float d) + { + return new Vector4(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector4 operator *(float d, Vector4 a) + { + return new Vector4(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector4 operator /(Vector4 a, float d) + { + return new Vector4(a.x / d, a.y / d, a.z / d, a.w / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector4 operator -(Vector4 a) + { + return new Vector4(-a.x, -a.y, -a.z, -a.w); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector4 lhs, Vector4 rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector4 lhs, Vector4 rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Explicitly converts from a double-precision vector into a single-precision vector. + /// + /// The double-precision vector. + public static explicit operator Vector4(Vector4d v) + { + return new Vector4((float)v.x, (float)v.y, (float)v.z, (float)v.w); + } + + /// + /// Implicitly converts from an integer vector into a single-precision vector. + /// + /// The integer vector. + public static implicit operator Vector4(Vector4i v) + { + return new Vector4(v.x, v.y, v.z, v.w); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public void Set(float x, float y, float z, float w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector4 scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + w *= scale.w; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + float mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + z /= mag; + w /= mag; + } + else + { + x = y = z = w = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(float min, float max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + + if (w < min) w = min; + else if (w > max) w = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector4)) + { + return false; + } + Vector4 vector = (Vector4)other; + return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector4 other) + { + return (x == other.x && y == other.y && z == other.z && w == other.w); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture), + z.ToString("F1", CultureInfo.InvariantCulture), + w.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture), + w.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static float Dot(ref Vector4 lhs, ref Vector4 rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z + lhs.w * rhs.w; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector4 a, ref Vector4 b, float t, out Vector4 result) + { + result = new Vector4(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t, a.w + (b.w - a.w) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector4 a, ref Vector4 b, out Vector4 result) + { + result = new Vector4(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector4 value, out Vector4 result) + { + float mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector4(value.x / mag, value.y / mag, value.z / mag, value.w / mag); + } + else + { + result = Vector4.zero; + } + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs new file mode 100644 index 0000000..c984c08 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs @@ -0,0 +1,467 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A double precision 4D vector. + /// + public struct Vector4d : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector4d zero = new Vector4d(0, 0, 0, 0); + #endregion + + #region Consts + /// + /// The vector epsilon. + /// + public const double Epsilon = double.Epsilon; + #endregion + + #region Fields + /// + /// The x component. + /// + public double x; + /// + /// The y component. + /// + public double y; + /// + /// The z component. + /// + public double z; + /// + /// The w component. + /// + public double w; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public double Magnitude + { + get { return System.Math.Sqrt(x * x + y * y + z * z + w * w); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public double MagnitudeSqr + { + get { return (x * x + y * y + z * z + w * w); } + } + + /// + /// Gets a normalized vector from this vector. + /// + public Vector4d Normalized + { + get + { + Vector4d result; + Normalize(ref this, out result); + return result; + } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public double this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + default: + throw new IndexOutOfRangeException("Invalid Vector4d index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + case 3: + w = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector4d index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector4d(double value) + { + this.x = value; + this.y = value; + this.z = value; + this.w = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public Vector4d(double x, double y, double z, double w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4d operator +(Vector4d a, Vector4d b) + { + return new Vector4d(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4d operator -(Vector4d a, Vector4d b) + { + return new Vector4d(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector4d operator *(Vector4d a, double d) + { + return new Vector4d(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector4d operator *(double d, Vector4d a) + { + return new Vector4d(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector4d operator /(Vector4d a, double d) + { + return new Vector4d(a.x / d, a.y / d, a.z / d, a.w / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector4d operator -(Vector4d a) + { + return new Vector4d(-a.x, -a.y, -a.z, -a.w); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector4d lhs, Vector4d rhs) + { + return (lhs - rhs).MagnitudeSqr < Epsilon; + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector4d lhs, Vector4d rhs) + { + return (lhs - rhs).MagnitudeSqr >= Epsilon; + } + + /// + /// Implicitly converts from a single-precision vector into a double-precision vector. + /// + /// The single-precision vector. + public static implicit operator Vector4d(Vector4 v) + { + return new Vector4d(v.x, v.y, v.z, v.w); + } + + /// + /// Implicitly converts from an integer vector into a double-precision vector. + /// + /// The integer vector. + public static implicit operator Vector4d(Vector4i v) + { + return new Vector4d(v.x, v.y, v.z, v.w); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public void Set(double x, double y, double z, double w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector4d scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + w *= scale.w; + } + + /// + /// Normalizes this vector. + /// + public void Normalize() + { + double mag = this.Magnitude; + if (mag > Epsilon) + { + x /= mag; + y /= mag; + z /= mag; + w /= mag; + } + else + { + x = y = z = w = 0; + } + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(double min, double max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + + if (w < min) w = min; + else if (w > max) w = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector4d)) + { + return false; + } + Vector4d vector = (Vector4d)other; + return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector4d other) + { + return (x == other.x && y == other.y && z == other.z && w == other.w); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString("F1", CultureInfo.InvariantCulture), + y.ToString("F1", CultureInfo.InvariantCulture), + z.ToString("F1", CultureInfo.InvariantCulture), + w.ToString("F1", CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The float format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture), + w.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Dot Product of two vectors. + /// + /// The left hand side vector. + /// The right hand side vector. + public static double Dot(ref Vector4d lhs, ref Vector4d rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z + lhs.w * rhs.w; + } + + /// + /// Performs a linear interpolation between two vectors. + /// + /// The vector to interpolate from. + /// The vector to interpolate to. + /// The time fraction. + /// The resulting vector. + public static void Lerp(ref Vector4d a, ref Vector4d b, double t, out Vector4d result) + { + result = new Vector4d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t, a.w + (b.w - a.w) * t); + } + + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector4d a, ref Vector4d b, out Vector4d result) + { + result = new Vector4d(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); + } + + /// + /// Normalizes a vector. + /// + /// The vector to normalize. + /// The resulting normalized vector. + public static void Normalize(ref Vector4d value, out Vector4d result) + { + double mag = value.Magnitude; + if (mag > Epsilon) + { + result = new Vector4d(value.x / mag, value.y / mag, value.z / mag, value.w / mag); + } + else + { + result = Vector4d.zero; + } + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs new file mode 100644 index 0000000..cc52459 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs @@ -0,0 +1,388 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Globalization; + +namespace MeshDecimator.Math +{ + /// + /// A 4D integer vector. + /// + public struct Vector4i : IEquatable + { + #region Static Read-Only + /// + /// The zero vector. + /// + public static readonly Vector4i zero = new Vector4i(0, 0, 0, 0); + #endregion + + #region Fields + /// + /// The x component. + /// + public int x; + /// + /// The y component. + /// + public int y; + /// + /// The z component. + /// + public int z; + /// + /// The w component. + /// + public int w; + #endregion + + #region Properties + /// + /// Gets the magnitude of this vector. + /// + public int Magnitude + { + get { return (int)System.Math.Sqrt(x * x + y * y + z * z + w * w); } + } + + /// + /// Gets the squared magnitude of this vector. + /// + public int MagnitudeSqr + { + get { return (x * x + y * y + z * z + w * w); } + } + + /// + /// Gets or sets a specific component by index in this vector. + /// + /// The component index. + public int this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + default: + throw new IndexOutOfRangeException("Invalid Vector4i index!"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + case 3: + w = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Vector4i index!"); + } + } + } + #endregion + + #region Constructor + /// + /// Creates a new vector with one value for all components. + /// + /// The value. + public Vector4i(int value) + { + this.x = value; + this.y = value; + this.z = value; + this.w = value; + } + + /// + /// Creates a new vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public Vector4i(int x, int y, int z, int w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + #endregion + + #region Operators + /// + /// Adds two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4i operator +(Vector4i a, Vector4i b) + { + return new Vector4i(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); + } + + /// + /// Subtracts two vectors. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static Vector4i operator -(Vector4i a, Vector4i b) + { + return new Vector4i(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); + } + + /// + /// Scales the vector uniformly. + /// + /// The vector. + /// The scaling value. + /// The resulting vector. + public static Vector4i operator *(Vector4i a, int d) + { + return new Vector4i(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Scales the vector uniformly. + /// + /// The scaling value. + /// The vector. + /// The resulting vector. + public static Vector4i operator *(int d, Vector4i a) + { + return new Vector4i(a.x * d, a.y * d, a.z * d, a.w * d); + } + + /// + /// Divides the vector with a float. + /// + /// The vector. + /// The dividing float value. + /// The resulting vector. + public static Vector4i operator /(Vector4i a, int d) + { + return new Vector4i(a.x / d, a.y / d, a.z / d, a.w / d); + } + + /// + /// Subtracts the vector from a zero vector. + /// + /// The vector. + /// The resulting vector. + public static Vector4i operator -(Vector4i a) + { + return new Vector4i(-a.x, -a.y, -a.z, -a.w); + } + + /// + /// Returns if two vectors equals eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If equals. + public static bool operator ==(Vector4i lhs, Vector4i rhs) + { + return (lhs.x == rhs.x && lhs.y == rhs.y && lhs.z == rhs.z && lhs.w == rhs.w); + } + + /// + /// Returns if two vectors don't equal eachother. + /// + /// The left hand side vector. + /// The right hand side vector. + /// If not equals. + public static bool operator !=(Vector4i lhs, Vector4i rhs) + { + return (lhs.x != rhs.x || lhs.y != rhs.y || lhs.z != rhs.z || lhs.w != rhs.w); + } + + /// + /// Explicitly converts from a single-precision vector into an integer vector. + /// + /// The single-precision vector. + public static explicit operator Vector4i(Vector4 v) + { + return new Vector4i((int)v.x, (int)v.y, (int)v.z, (int)v.w); + } + + /// + /// Explicitly converts from a double-precision vector into an integer vector. + /// + /// The double-precision vector. + public static explicit operator Vector4i(Vector4d v) + { + return new Vector4i((int)v.x, (int)v.y, (int)v.z, (int)v.w); + } + #endregion + + #region Public Methods + #region Instance + /// + /// Set x, y and z components of an existing vector. + /// + /// The x value. + /// The y value. + /// The z value. + /// The w value. + public void Set(int x, int y, int z, int w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + + /// + /// Multiplies with another vector component-wise. + /// + /// The vector to multiply with. + public void Scale(ref Vector4i scale) + { + x *= scale.x; + y *= scale.y; + z *= scale.z; + w *= scale.w; + } + + /// + /// Clamps this vector between a specific range. + /// + /// The minimum component value. + /// The maximum component value. + public void Clamp(int min, int max) + { + if (x < min) x = min; + else if (x > max) x = max; + + if (y < min) y = min; + else if (y > max) y = max; + + if (z < min) z = min; + else if (z > max) z = max; + + if (w < min) w = min; + else if (w > max) w = max; + } + #endregion + + #region Object + /// + /// Returns a hash code for this vector. + /// + /// The hash code. + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public override bool Equals(object other) + { + if (!(other is Vector4i)) + { + return false; + } + Vector4i vector = (Vector4i)other; + return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); + } + + /// + /// Returns if this vector is equal to another one. + /// + /// The other vector to compare to. + /// If equals. + public bool Equals(Vector4i other) + { + return (x == other.x && y == other.y && z == other.z && w == other.w); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The string. + public override string ToString() + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString(CultureInfo.InvariantCulture), + y.ToString(CultureInfo.InvariantCulture), + z.ToString(CultureInfo.InvariantCulture), + w.ToString(CultureInfo.InvariantCulture)); + } + + /// + /// Returns a nicely formatted string for this vector. + /// + /// The integer format. + /// The string. + public string ToString(string format) + { + return string.Format("({0}, {1}, {2}, {3})", + x.ToString(format, CultureInfo.InvariantCulture), + y.ToString(format, CultureInfo.InvariantCulture), + z.ToString(format, CultureInfo.InvariantCulture), + w.ToString(format, CultureInfo.InvariantCulture)); + } + #endregion + + #region Static + /// + /// Multiplies two vectors component-wise. + /// + /// The first vector. + /// The second vector. + /// The resulting vector. + public static void Scale(ref Vector4i a, ref Vector4i b, out Vector4i result) + { + result = new Vector4i(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs b/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs new file mode 100644 index 0000000..2e38821 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs @@ -0,0 +1,955 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using System.Collections.Generic; +using MeshDecimator.Math; + +namespace MeshDecimator +{ + /// + /// A mesh. + /// + public sealed class Mesh + { + #region Consts + /// + /// The count of supported UV channels. + /// + public const int UVChannelCount = 4; + #endregion + + #region Fields + private Vector3d[] vertices = null; + private int[][] indices = null; + private Vector3[] normals = null; + private Vector4[] tangents = null; + private Vector2[][] uvs2D = null; + private Vector3[][] uvs3D = null; + private Vector4[][] uvs4D = null; + private Vector4[] colors = null; + private BoneWeight[] boneWeights = null; + + private static readonly int[] emptyIndices = new int[0]; + #endregion + + #region Properties + /// + /// Gets the count of vertices of this mesh. + /// + public int VertexCount + { + get { return vertices.Length; } + } + + /// + /// Gets or sets the count of submeshes in this mesh. + /// + public int SubMeshCount + { + get { return indices.Length; } + set + { + if (value <= 0) + throw new ArgumentOutOfRangeException("value"); + + int[][] newIndices = new int[value][]; + Array.Copy(indices, 0, newIndices, 0, MathHelper.Min(indices.Length, newIndices.Length)); + indices = newIndices; + } + } + + /// + /// Gets the total count of triangles in this mesh. + /// + public int TriangleCount + { + get + { + int triangleCount = 0; + for (int i = 0; i < indices.Length; i++) + { + if (indices[i] != null) + { + triangleCount += indices[i].Length / 3; + } + } + return triangleCount; + } + } + + /// + /// Gets or sets the vertices for this mesh. Note that this resets all other vertex attributes. + /// + public Vector3d[] Vertices + { + get { return vertices; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + vertices = value; + ClearVertexAttributes(); + } + } + + /// + /// Gets or sets the combined indices for this mesh. Once set, the sub-mesh count gets set to 1. + /// + public int[] Indices + { + get + { + if (indices.Length == 1) + { + return indices[0] ?? emptyIndices; + } + else + { + List indexList = new List(TriangleCount * 3); + for (int i = 0; i < indices.Length; i++) + { + if (indices[i] != null) + { + indexList.AddRange(indices[i]); + } + } + return indexList.ToArray(); + } + } + set + { + if (value == null) + throw new ArgumentNullException("value"); + else if ((value.Length % 3) != 0) + throw new ArgumentException("The index count must be multiple by 3.", "value"); + + SubMeshCount = 1; + SetIndices(0, value); + } + } + + /// + /// Gets or sets the normals for this mesh. + /// + public Vector3[] Normals + { + get { return normals; } + set + { + if (value != null && value.Length != vertices.Length) + throw new ArgumentException(string.Format("The vertex normals must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); + + normals = value; + } + } + + /// + /// Gets or sets the tangents for this mesh. + /// + public Vector4[] Tangents + { + get { return tangents; } + set + { + if (value != null && value.Length != vertices.Length) + throw new ArgumentException(string.Format("The vertex tangents must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); + + tangents = value; + } + } + + /// + /// Gets or sets the first UV set for this mesh. + /// + public Vector2[] UV1 + { + get { return GetUVs2D(0); } + set { SetUVs(0, value); } + } + + /// + /// Gets or sets the second UV set for this mesh. + /// + public Vector2[] UV2 + { + get { return GetUVs2D(1); } + set { SetUVs(1, value); } + } + + /// + /// Gets or sets the third UV set for this mesh. + /// + public Vector2[] UV3 + { + get { return GetUVs2D(2); } + set { SetUVs(2, value); } + } + + /// + /// Gets or sets the fourth UV set for this mesh. + /// + public Vector2[] UV4 + { + get { return GetUVs2D(3); } + set { SetUVs(3, value); } + } + + /// + /// Gets or sets the vertex colors for this mesh. + /// + public Vector4[] Colors + { + get { return colors; } + set + { + if (value != null && value.Length != vertices.Length) + throw new ArgumentException(string.Format("The vertex colors must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); + + colors = value; + } + } + + /// + /// Gets or sets the vertex bone weights for this mesh. + /// + public BoneWeight[] BoneWeights + { + get { return boneWeights; } + set + { + if (value != null && value.Length != vertices.Length) + throw new ArgumentException(string.Format("The vertex bone weights must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); + + boneWeights = value; + } + } + #endregion + + #region Constructor + /// + /// Creates a new mesh. + /// + /// The mesh vertices. + /// The mesh indices. + public Mesh(Vector3d[] vertices, int[] indices) + { + if (vertices == null) + throw new ArgumentNullException("vertices"); + else if (indices == null) + throw new ArgumentNullException("indices"); + else if ((indices.Length % 3) != 0) + throw new ArgumentException("The index count must be multiple by 3.", "indices"); + + this.vertices = vertices; + this.indices = new int[1][]; + this.indices[0] = indices; + } + + /// + /// Creates a new mesh. + /// + /// The mesh vertices. + /// The mesh indices. + public Mesh(Vector3d[] vertices, int[][] indices) + { + if (vertices == null) + throw new ArgumentNullException("vertices"); + else if (indices == null) + throw new ArgumentNullException("indices"); + + for (int i = 0; i < indices.Length; i++) + { + if (indices[i] != null && (indices[i].Length % 3) != 0) + throw new ArgumentException(string.Format("The index count must be multiple by 3 at sub-mesh index {0}.", i), "indices"); + } + + this.vertices = vertices; + this.indices = indices; + } + #endregion + + #region Private Methods + private void ClearVertexAttributes() + { + normals = null; + tangents = null; + uvs2D = null; + uvs3D = null; + uvs4D = null; + colors = null; + boneWeights = null; + } + #endregion + + #region Public Methods + #region Recalculate Normals + /// + /// Recalculates the normals for this mesh smoothly. + /// + public void RecalculateNormals() + { + int vertexCount = vertices.Length; + Vector3[] normals = new Vector3[vertexCount]; + + int subMeshCount = this.indices.Length; + for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) + { + int[] indices = this.indices[subMeshIndex]; + if (indices == null) + continue; + + int indexCount = indices.Length; + for (int i = 0; i < indexCount; i += 3) + { + int i0 = indices[i]; + int i1 = indices[i + 1]; + int i2 = indices[i + 2]; + + var v0 = (Vector3)vertices[i0]; + var v1 = (Vector3)vertices[i1]; + var v2 = (Vector3)vertices[i2]; + + var nx = v1 - v0; + var ny = v2 - v0; + Vector3 normal; + Vector3.Cross(ref nx, ref ny, out normal); + normal.Normalize(); + + normals[i0] += normal; + normals[i1] += normal; + normals[i2] += normal; + } + } + + for (int i = 0; i < vertexCount; i++) + { + normals[i].Normalize(); + } + + this.normals = normals; + } + #endregion + + #region Recalculate Tangents + /// + /// Recalculates the tangents for this mesh. + /// + public void RecalculateTangents() + { + // Make sure we have the normals first + if (normals == null) + return; + + // Also make sure that we have the first UV set + bool uvIs2D = (uvs2D != null && uvs2D[0] != null); + bool uvIs3D = (uvs3D != null && uvs3D[0] != null); + bool uvIs4D = (uvs4D != null && uvs4D[0] != null); + if (!uvIs2D && !uvIs3D && !uvIs4D) + return; + + int vertexCount = vertices.Length; + + var tangents = new Vector4[vertexCount]; + var tan1 = new Vector3[vertexCount]; + var tan2 = new Vector3[vertexCount]; + + Vector2[] uv2D = (uvIs2D ? uvs2D[0] : null); + Vector3[] uv3D = (uvIs3D ? uvs3D[0] : null); + Vector4[] uv4D = (uvIs4D ? uvs4D[0] : null); + + int subMeshCount = this.indices.Length; + for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) + { + int[] indices = this.indices[subMeshIndex]; + if (indices == null) + continue; + + int indexCount = indices.Length; + for (int i = 0; i < indexCount; i += 3) + { + int i0 = indices[i]; + int i1 = indices[i + 1]; + int i2 = indices[i + 2]; + + var v0 = vertices[i0]; + var v1 = vertices[i1]; + var v2 = vertices[i2]; + + float s1, s2, t1, t2; + if (uvIs2D) + { + var w0 = uv2D[i0]; + var w1 = uv2D[i1]; + var w2 = uv2D[i2]; + s1 = w1.x - w0.x; + s2 = w2.x - w0.x; + t1 = w1.y - w0.y; + t2 = w2.y - w0.y; + } + else if (uvIs3D) + { + var w0 = uv3D[i0]; + var w1 = uv3D[i1]; + var w2 = uv3D[i2]; + s1 = w1.x - w0.x; + s2 = w2.x - w0.x; + t1 = w1.y - w0.y; + t2 = w2.y - w0.y; + } + else + { + var w0 = uv4D[i0]; + var w1 = uv4D[i1]; + var w2 = uv4D[i2]; + s1 = w1.x - w0.x; + s2 = w2.x - w0.x; + t1 = w1.y - w0.y; + t2 = w2.y - w0.y; + } + + + float x1 = (float)(v1.x - v0.x); + float x2 = (float)(v2.x - v0.x); + float y1 = (float)(v1.y - v0.y); + float y2 = (float)(v2.y - v0.y); + float z1 = (float)(v1.z - v0.z); + float z2 = (float)(v2.z - v0.z); + float r = 1f / (s1 * t2 - s2 * t1); + + var sdir = new Vector3((t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r); + var tdir = new Vector3((s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r); + + tan1[i0] += sdir; + tan1[i1] += sdir; + tan1[i2] += sdir; + tan2[i0] += tdir; + tan2[i1] += tdir; + tan2[i2] += tdir; + } + } + + for (int i = 0; i < vertexCount; i++) + { + var n = normals[i]; + var t = tan1[i]; + + var tmp = (t - n * Vector3.Dot(ref n, ref t)); + tmp.Normalize(); + + Vector3 c; + Vector3.Cross(ref n, ref t, out c); + float dot = Vector3.Dot(ref c, ref tan2[i]); + float w = (dot < 0f ? -1f : 1f); + tangents[i] = new Vector4(tmp.x, tmp.y, tmp.z, w); + } + + this.tangents = tangents; + } + #endregion + + #region Triangles + /// + /// Returns the count of triangles for a specific sub-mesh in this mesh. + /// + /// The sub-mesh index. + /// The triangle count. + public int GetTriangleCount(int subMeshIndex) + { + if (subMeshIndex < 0 || subMeshIndex >= indices.Length) + throw new IndexOutOfRangeException(); + + return indices[subMeshIndex].Length / 3; + } + + /// + /// Returns the triangle indices of a specific sub-mesh in this mesh. + /// + /// The sub-mesh index. + /// The triangle indices. + public int[] GetIndices(int subMeshIndex) + { + if (subMeshIndex < 0 || subMeshIndex >= indices.Length) + throw new IndexOutOfRangeException(); + + return indices[subMeshIndex] ?? emptyIndices; + } + + /// + /// Returns the triangle indices for all sub-meshes in this mesh. + /// + /// The sub-mesh triangle indices. + public int[][] GetSubMeshIndices() + { + var subMeshIndices = new int[indices.Length][]; + for (int subMeshIndex = 0; subMeshIndex < indices.Length; subMeshIndex++) + { + subMeshIndices[subMeshIndex] = indices[subMeshIndex] ?? emptyIndices; + } + return subMeshIndices; + } + + /// + /// Sets the triangle indices of a specific sub-mesh in this mesh. + /// + /// The sub-mesh index. + /// The triangle indices. + public void SetIndices(int subMeshIndex, int[] indices) + { + if (subMeshIndex < 0 || subMeshIndex >= this.indices.Length) + throw new IndexOutOfRangeException(); + else if (indices == null) + throw new ArgumentNullException("indices"); + else if ((indices.Length % 3) != 0) + throw new ArgumentException("The index count must be multiple by 3.", "indices"); + + this.indices[subMeshIndex] = indices; + } + #endregion + + #region UV Sets + #region Getting + /// + /// Returns the UV dimension for a specific channel. + /// + /// + /// The UV dimension count. + public int GetUVDimension(int channel) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs2D != null && uvs2D[channel] != null) + { + return 2; + } + else if (uvs3D != null && uvs3D[channel] != null) + { + return 3; + } + else if (uvs4D != null && uvs4D[channel] != null) + { + return 4; + } + else + { + return 0; + } + } + + /// + /// Returns the UVs (2D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public Vector2[] GetUVs2D(int channel) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs2D != null && uvs2D[channel] != null) + { + return uvs2D[channel]; + } + else + { + return null; + } + } + + /// + /// Returns the UVs (3D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public Vector3[] GetUVs3D(int channel) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs3D != null && uvs3D[channel] != null) + { + return uvs3D[channel]; + } + else + { + return null; + } + } + + /// + /// Returns the UVs (4D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public Vector4[] GetUVs4D(int channel) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs4D != null && uvs4D[channel] != null) + { + return uvs4D[channel]; + } + else + { + return null; + } + } + + /// + /// Returns the UVs (2D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public void GetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + else if (uvs == null) + throw new ArgumentNullException("uvs"); + + uvs.Clear(); + if (uvs2D != null && uvs2D[channel] != null) + { + var uvData = uvs2D[channel]; + if (uvData != null) + { + uvs.AddRange(uvData); + } + } + } + + /// + /// Returns the UVs (3D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public void GetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + else if (uvs == null) + throw new ArgumentNullException("uvs"); + + uvs.Clear(); + if (uvs3D != null && uvs3D[channel] != null) + { + var uvData = uvs3D[channel]; + if (uvData != null) + { + uvs.AddRange(uvData); + } + } + } + + /// + /// Returns the UVs (4D) from a specific channel. + /// + /// The channel index. + /// The UVs. + public void GetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + else if (uvs == null) + throw new ArgumentNullException("uvs"); + + uvs.Clear(); + if (uvs4D != null && uvs4D[channel] != null) + { + var uvData = uvs4D[channel]; + if (uvData != null) + { + uvs.AddRange(uvData); + } + } + } + #endregion + + #region Setting + /// + /// Sets the UVs (2D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, Vector2[] uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Length > 0) + { + if (uvs.Length != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvs.Length, vertices.Length)); + + if (uvs2D == null) + uvs2D = new Vector2[UVChannelCount][]; + + int uvCount = uvs.Length; + var uvSet = new Vector2[uvCount]; + uvs2D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs2D != null) + { + uvs2D[channel] = null; + } + } + + if (uvs3D != null) + { + uvs3D[channel] = null; + } + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + /// + /// Sets the UVs (3D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, Vector3[] uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Length > 0) + { + int uvCount = uvs.Length; + if (uvCount != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); + + if (uvs3D == null) + uvs3D = new Vector3[UVChannelCount][]; + + var uvSet = new Vector3[uvCount]; + uvs3D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs3D != null) + { + uvs3D[channel] = null; + } + } + + if (uvs2D != null) + { + uvs2D[channel] = null; + } + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + /// + /// Sets the UVs (4D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, Vector4[] uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Length > 0) + { + int uvCount = uvs.Length; + if (uvCount != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); + + if (uvs4D == null) + uvs4D = new Vector4[UVChannelCount][]; + + var uvSet = new Vector4[uvCount]; + uvs4D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + if (uvs2D != null) + { + uvs2D[channel] = null; + } + if (uvs3D != null) + { + uvs3D[channel] = null; + } + } + + /// + /// Sets the UVs (2D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Count > 0) + { + int uvCount = uvs.Count; + if (uvCount != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); + + if (uvs2D == null) + uvs2D = new Vector2[UVChannelCount][]; + + var uvSet = new Vector2[uvCount]; + uvs2D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs2D != null) + { + uvs2D[channel] = null; + } + } + + if (uvs3D != null) + { + uvs3D[channel] = null; + } + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + /// + /// Sets the UVs (3D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Count > 0) + { + int uvCount = uvs.Count; + if (uvCount != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); + + if (uvs3D == null) + uvs3D = new Vector3[UVChannelCount][]; + + var uvSet = new Vector3[uvCount]; + uvs3D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs3D != null) + { + uvs3D[channel] = null; + } + } + + if (uvs2D != null) + { + uvs2D[channel] = null; + } + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + /// + /// Sets the UVs (4D) for a specific channel. + /// + /// The channel index. + /// The UVs. + public void SetUVs(int channel, List uvs) + { + if (channel < 0 || channel >= UVChannelCount) + throw new ArgumentOutOfRangeException("channel"); + + if (uvs != null && uvs.Count > 0) + { + int uvCount = uvs.Count; + if (uvCount != vertices.Length) + throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); + + if (uvs4D == null) + uvs4D = new Vector4[UVChannelCount][]; + + var uvSet = new Vector4[uvCount]; + uvs4D[channel] = uvSet; + uvs.CopyTo(uvSet, 0); + } + else + { + if (uvs4D != null) + { + uvs4D[channel] = null; + } + } + + if (uvs2D != null) + { + uvs2D[channel] = null; + } + if (uvs3D != null) + { + uvs3D[channel] = null; + } + } + #endregion + #endregion + + #region To String + /// + /// Returns the text-representation of this mesh. + /// + /// The text-representation. + public override string ToString() + { + return string.Format("Vertices: {0}", vertices.Length); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs b/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs new file mode 100644 index 0000000..cb13fe8 --- /dev/null +++ b/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs @@ -0,0 +1,180 @@ +#region License +/* +MIT License + +Copyright(c) 2017-2018 Mattias Edlund + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +#endregion + +using System; +using MeshDecimator.Algorithms; + +namespace MeshDecimator +{ + #region Algorithm + /// + /// The decimation algorithms. + /// + public enum Algorithm + { + /// + /// The default algorithm. + /// + Default, + /// + /// The fast quadric mesh simplification algorithm. + /// + FastQuadricMesh + } + #endregion + + /// + /// The mesh decimation API. + /// + public static class MeshDecimation + { + #region Public Methods + #region Create Algorithm + /// + /// Creates a specific decimation algorithm. + /// + /// The desired algorithm. + /// The decimation algorithm. + public static DecimationAlgorithm CreateAlgorithm(Algorithm algorithm) + { + DecimationAlgorithm alg = null; + + switch (algorithm) + { + case Algorithm.Default: + case Algorithm.FastQuadricMesh: + alg = new FastQuadricMeshSimplification(); + break; + default: + throw new ArgumentException("The specified algorithm is not supported.", "algorithm"); + } + + return alg; + } + #endregion + + #region Decimate Mesh + /// + /// Decimates a mesh. + /// + /// The mesh to decimate. + /// The target triangle count. + /// The decimated mesh. + public static Mesh DecimateMesh(Mesh mesh, int targetTriangleCount) + { + return DecimateMesh(Algorithm.Default, mesh, targetTriangleCount); + } + + /// + /// Decimates a mesh. + /// + /// The desired algorithm. + /// The mesh to decimate. + /// The target triangle count. + /// The decimated mesh. + public static Mesh DecimateMesh(Algorithm algorithm, Mesh mesh, int targetTriangleCount) + { + if (mesh == null) + throw new ArgumentNullException("mesh"); + + var decimationAlgorithm = CreateAlgorithm(algorithm); + return DecimateMesh(decimationAlgorithm, mesh, targetTriangleCount); + } + + /// + /// Decimates a mesh. + /// + /// The decimation algorithm. + /// The mesh to decimate. + /// The target triangle count. + /// The decimated mesh. + public static Mesh DecimateMesh(DecimationAlgorithm algorithm, Mesh mesh, int targetTriangleCount) + { + if (algorithm == null) + throw new ArgumentNullException("algorithm"); + else if (mesh == null) + throw new ArgumentNullException("mesh"); + + int currentTriangleCount = mesh.TriangleCount; + if (targetTriangleCount > currentTriangleCount) + targetTriangleCount = currentTriangleCount; + else if (targetTriangleCount < 0) + targetTriangleCount = 0; + + algorithm.Initialize(mesh); + algorithm.DecimateMesh(targetTriangleCount); + return algorithm.ToMesh(); + } + #endregion + + #region Decimate Mesh Lossless + /// + /// Decimates a mesh without losing any quality. + /// + /// The mesh to decimate. + /// The decimated mesh. + public static Mesh DecimateMeshLossless(Mesh mesh) + { + return DecimateMeshLossless(Algorithm.Default, mesh); + } + + /// + /// Decimates a mesh without losing any quality. + /// + /// The desired algorithm. + /// The mesh to decimate. + /// The decimated mesh. + public static Mesh DecimateMeshLossless(Algorithm algorithm, Mesh mesh) + { + if (mesh == null) + throw new ArgumentNullException("mesh"); + + var decimationAlgorithm = CreateAlgorithm(algorithm); + return DecimateMeshLossless(decimationAlgorithm, mesh); + } + + /// + /// Decimates a mesh without losing any quality. + /// + /// The decimation algorithm. + /// The mesh to decimate. + /// The decimated mesh. + public static Mesh DecimateMeshLossless(DecimationAlgorithm algorithm, Mesh mesh) + { + if (algorithm == null) + throw new ArgumentNullException("algorithm"); + else if (mesh == null) + throw new ArgumentNullException("mesh"); + + int currentTriangleCount = mesh.TriangleCount; + algorithm.Initialize(mesh); + algorithm.DecimateMeshLossless(); + return algorithm.ToMesh(); + } + #endregion + #endregion + } +} \ No newline at end of file diff --git a/LightlessSync/UI/CompactUI.cs b/LightlessSync/UI/CompactUI.cs index 79fbc88..43f0c0b 100644 --- a/LightlessSync/UI/CompactUI.cs +++ b/LightlessSync/UI/CompactUI.cs @@ -52,6 +52,10 @@ public class CompactUi : WindowMediatorSubscriberBase private readonly LightlessConfigService _configService; private readonly LightlessMediator _lightlessMediator; private readonly PairLedger _pairLedger; + private readonly ConcurrentDictionary> _currentDownloads = new(); + private readonly DrawEntityFactory _drawEntityFactory; + private readonly FileUploadManager _fileTransferManager; + private readonly PlayerPerformanceConfigService _playerPerformanceConfig; private readonly PairUiService _pairUiService; private readonly PlayerPerformanceConfigService _playerPerformanceConfig; private readonly ServerConfigurationManager _serverManager; @@ -991,6 +995,7 @@ public class CompactUi : WindowMediatorSubscriberBase VisiblePairSortMode.VramUsage => SortVisibleByMetric(entryList, e => e.LastAppliedApproximateVramBytes), VisiblePairSortMode.EffectiveVramUsage => SortVisibleByMetric(entryList, e => e.LastAppliedApproximateEffectiveVramBytes), VisiblePairSortMode.TriangleCount => SortVisibleByMetric(entryList, e => e.LastAppliedDataTris), + VisiblePairSortMode.EffectiveTriangleCount => SortVisibleByMetric(entryList, e => e.LastAppliedApproximateEffectiveTris), VisiblePairSortMode.Alphabetical => [.. entryList.OrderBy(e => AlphabeticalSortKey(e), StringComparer.OrdinalIgnoreCase)], VisiblePairSortMode.PreferredDirectPairs => SortVisibleByPreferred(entryList), _ => SortEntries(entryList), diff --git a/LightlessSync/UI/Components/DrawFolderTag.cs b/LightlessSync/UI/Components/DrawFolderTag.cs index b91617a..0870a0d 100644 --- a/LightlessSync/UI/Components/DrawFolderTag.cs +++ b/LightlessSync/UI/Components/DrawFolderTag.cs @@ -326,6 +326,7 @@ public class DrawFolderTag : DrawFolderBase VisiblePairSortMode.VramUsage => "VRAM usage (descending)", VisiblePairSortMode.EffectiveVramUsage => "Effective VRAM usage (descending)", VisiblePairSortMode.TriangleCount => "Triangle count (descending)", + VisiblePairSortMode.EffectiveTriangleCount => "Effective triangle count (descending)", VisiblePairSortMode.PreferredDirectPairs => "Preferred permissions & Direct pairs", _ => "Default", }; diff --git a/LightlessSync/UI/Components/DrawUserPair.cs b/LightlessSync/UI/Components/DrawUserPair.cs index c8725e2..5524226 100644 --- a/LightlessSync/UI/Components/DrawUserPair.cs +++ b/LightlessSync/UI/Components/DrawUserPair.cs @@ -429,6 +429,7 @@ public class DrawUserPair _pair.LastAppliedApproximateVRAMBytes, _pair.LastAppliedApproximateEffectiveVRAMBytes, _pair.LastAppliedDataTris, + _pair.LastAppliedApproximateEffectiveTris, _pair.IsPaired, groupDisplays is null ? ImmutableArray.Empty : ImmutableArray.CreateRange(groupDisplays)); @@ -444,6 +445,8 @@ public class DrawUserPair private static string BuildTooltip(in TooltipSnapshot snapshot) { var builder = new StringBuilder(256); + static string FormatTriangles(long count) => + count > 1000 ? (count / 1000d).ToString("0.0'k'") : count.ToString(); if (snapshot.IsPaused) { @@ -510,9 +513,13 @@ public class DrawUserPair { builder.Append(Environment.NewLine); builder.Append("Approx. Triangle Count (excl. Vanilla): "); - builder.Append(snapshot.LastAppliedDataTris > 1000 - ? (snapshot.LastAppliedDataTris / 1000d).ToString("0.0'k'") - : snapshot.LastAppliedDataTris); + builder.Append(FormatTriangles(snapshot.LastAppliedDataTris)); + if (snapshot.LastAppliedApproximateEffectiveTris >= 0) + { + builder.Append(" (Effective: "); + builder.Append(FormatTriangles(snapshot.LastAppliedApproximateEffectiveTris)); + builder.Append(')'); + } } } @@ -544,11 +551,12 @@ public class DrawUserPair long LastAppliedApproximateVRAMBytes, long LastAppliedApproximateEffectiveVRAMBytes, long LastAppliedDataTris, + long LastAppliedApproximateEffectiveTris, bool IsPaired, ImmutableArray GroupDisplays) { public static TooltipSnapshot Empty { get; } = - new(false, false, false, IndividualPairStatus.None, string.Empty, string.Empty, -1, -1, -1, -1, false, ImmutableArray.Empty); + new(false, false, false, IndividualPairStatus.None, string.Empty, string.Empty, -1, -1, -1, -1, -1, false, ImmutableArray.Empty); } private void DrawPairedClientMenu() diff --git a/LightlessSync/UI/DataAnalysisUi.cs b/LightlessSync/UI/DataAnalysisUi.cs index a3061a7..e0bfcb1 100644 --- a/LightlessSync/UI/DataAnalysisUi.cs +++ b/LightlessSync/UI/DataAnalysisUi.cs @@ -11,6 +11,7 @@ using LightlessSync.LightlessConfiguration; using LightlessSync.Services; using LightlessSync.Services.Mediator; using LightlessSync.Services.TextureCompression; +using LightlessSync.UI.Models; using LightlessSync.Utils; using Microsoft.Extensions.Logging; using OtterTex; @@ -34,12 +35,15 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private const float TextureDetailSplitterWidth = 12f; private const float TextureDetailSplitterCollapsedWidth = 18f; private const float SelectedFilePanelLogicalHeight = 90f; + private const float TextureHoverPreviewDelaySeconds = 1.75f; + private const float TextureHoverPreviewSize = 350f; private static readonly Vector4 SelectedTextureRowTextColor = new(0f, 0f, 0f, 1f); private readonly CharacterAnalyzer _characterAnalyzer; private readonly Progress _conversionProgress = new(); private readonly IpcManager _ipcManager; private readonly UiSharedService _uiSharedService; + private readonly LightlessConfigService _configService; private readonly PlayerPerformanceConfigService _playerPerformanceConfig; private readonly TransientResourceManager _transientResourceManager; private readonly TransientConfigService _transientConfigService; @@ -77,6 +81,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private string _selectedJobEntry = string.Empty; private string _filterGamePath = string.Empty; private string _filterFilePath = string.Empty; + private string _textureHoverKey = string.Empty; private int _conversionCurrentFileProgress = 0; private int _conversionTotalJobs; @@ -87,6 +92,11 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private bool _textureRowsDirty = true; private bool _textureDetailCollapsed = false; private bool _conversionFailed; + private double _textureHoverStartTime = 0; +#if DEBUG + private bool _debugCompressionModalOpen = false; + private TextureConversionProgress? _debugConversionProgress; +#endif private bool _showAlreadyAddedTransients = false; private bool _acknowledgeReview = false; @@ -98,10 +108,12 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private TextureUsageCategory? _textureCategoryFilter = null; private TextureMapKind? _textureMapFilter = null; private TextureCompressionTarget? _textureTargetFilter = null; + private TextureFormatSortMode _textureFormatSortMode = TextureFormatSortMode.None; public DataAnalysisUi(ILogger logger, LightlessMediator mediator, CharacterAnalyzer characterAnalyzer, IpcManager ipcManager, PerformanceCollectorService performanceCollectorService, UiSharedService uiSharedService, + LightlessConfigService configService, PlayerPerformanceConfigService playerPerformanceConfig, TransientResourceManager transientResourceManager, TransientConfigService transientConfigService, TextureCompressionService textureCompressionService, TextureMetadataHelper textureMetadataHelper) @@ -110,6 +122,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _characterAnalyzer = characterAnalyzer; _ipcManager = ipcManager; _uiSharedService = uiSharedService; + _configService = configService; _playerPerformanceConfig = playerPerformanceConfig; _transientResourceManager = transientResourceManager; _transientConfigService = transientConfigService; @@ -135,21 +148,30 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private void HandleConversionModal() { - if (_conversionTask == null) + bool hasConversion = _conversionTask != null; +#if DEBUG + bool showDebug = _debugCompressionModalOpen && !hasConversion; +#else + const bool showDebug = false; +#endif + if (!hasConversion && !showDebug) { return; } - if (_conversionTask.IsCompleted) + if (hasConversion && _conversionTask!.IsCompleted) { ResetConversionModalState(); - return; + if (!showDebug) + { + return; + } } _showModal = true; - if (ImGui.BeginPopupModal("Texture Compression in Progress", ImGuiWindowFlags.AlwaysAutoResize)) + if (ImGui.BeginPopupModal("Texture Compression in Progress", UiSharedService.PopupWindowFlags)) { - DrawConversionModalContent(); + DrawConversionModalContent(showDebug); ImGui.EndPopup(); } else @@ -164,31 +186,190 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } - private void DrawConversionModalContent() + private void DrawConversionModalContent(bool isDebugPreview) { - var progress = _lastConversionProgress; + var scale = ImGuiHelpers.GlobalScale; + TextureConversionProgress? progress; +#if DEBUG + progress = isDebugPreview ? _debugConversionProgress : _lastConversionProgress; +#else + progress = _lastConversionProgress; +#endif var total = progress?.Total ?? Math.Max(_conversionTotalJobs, 1); var completed = progress != null - ? Math.Min(progress.Completed + 1, total) - : _conversionCurrentFileProgress; - var currentLabel = !string.IsNullOrEmpty(_conversionCurrentFileName) - ? _conversionCurrentFileName - : "Preparing..."; + ? Math.Clamp(progress.Completed + 1, 0, total) + : Math.Clamp(_conversionCurrentFileProgress, 0, total); + var percent = total > 0 ? Math.Clamp(completed / (float)total, 0f, 1f) : 0f; - ImGui.TextUnformatted($"Compressing textures ({completed}/{total})"); - UiSharedService.TextWrapped("Current file: " + currentLabel); + var job = progress?.CurrentJob; + var inputPath = job?.InputFile ?? string.Empty; + var targetLabel = job != null ? job.TargetType.ToString() : "Unknown"; + var currentLabel = !string.IsNullOrEmpty(inputPath) + ? Path.GetFileName(inputPath) + : !string.IsNullOrEmpty(_conversionCurrentFileName) ? _conversionCurrentFileName : "Preparing..."; + var mapKind = !string.IsNullOrEmpty(inputPath) + ? _textureMetadataHelper.DetermineMapKind(inputPath) + : TextureMapKind.Unknown; - if (_conversionFailed) + var accent = UIColors.Get("LightlessPurple"); + var accentBg = new Vector4(accent.X, accent.Y, accent.Z, 0.18f); + var accentBorder = new Vector4(accent.X, accent.Y, accent.Z, 0.4f); + var headerHeight = MathF.Max(ImGui.GetTextLineHeightWithSpacing() * 2.6f, 46f * scale); + + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 6f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(12f * scale, 6f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(12f * scale, 2f * scale))) + using (ImRaii.PushColor(ImGuiCol.ChildBg, UiSharedService.Color(accentBg))) + using (ImRaii.PushColor(ImGuiCol.Border, UiSharedService.Color(accentBorder))) + using (var header = ImRaii.Child("compressionHeader", new Vector2(-1f, headerHeight), true, ImGuiWindowFlags.NoScrollbar | ImGuiWindowFlags.NoScrollWithMouse)) { - UiSharedService.ColorText("Conversion encountered errors. Please review the log for details.", ImGuiColors.DalamudRed); + if (header) + { + if (ImGui.BeginTable("compressionHeaderTable", 2, + ImGuiTableFlags.SizingStretchProp | ImGuiTableFlags.NoBordersInBody | ImGuiTableFlags.NoHostExtendX)) + { + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + DrawCompressionTitle(accent, scale); + + var statusText = isDebugPreview ? "Preview mode" : "Working..."; + var statusColor = isDebugPreview ? UIColors.Get("LightlessYellow") : ImGuiColors.DalamudGrey; + UiSharedService.ColorText(statusText, statusColor); + + ImGui.TableNextColumn(); + var progressText = $"{completed}/{total}"; + var percentText = $"{percent * 100f:0}%"; + var summaryText = $"{progressText} ({percentText})"; + var summaryWidth = ImGui.CalcTextSize(summaryText).X; + ImGui.SetCursorPosX(ImGui.GetCursorPosX() + MathF.Max(0f, ImGui.GetColumnWidth() - summaryWidth)); + UiSharedService.ColorText(summaryText, ImGuiColors.DalamudGrey); + + ImGui.EndTable(); + } + } } - if (_uiSharedService.IconTextButton(FontAwesomeIcon.StopCircle, "Cancel conversion")) + ImGuiHelpers.ScaledDummy(6); + + using (ImRaii.PushStyle(ImGuiStyleVar.FrameRounding, 4f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.FramePadding, new Vector2(0f, 4f * scale))) + using (ImRaii.PushColor(ImGuiCol.FrameBg, UiSharedService.Color(new Vector4(0.15f, 0.15f, 0.18f, 1f)))) + using (ImRaii.PushColor(ImGuiCol.PlotHistogram, UiSharedService.Color(accent))) { - _conversionCancellationTokenSource.Cancel(); + ImGui.ProgressBar(percent, new Vector2(-1f, 0f), $"{percent * 100f:0}%"); } - UiSharedService.SetScaledWindowSize(520); + ImGuiHelpers.ScaledDummy(6); + + var infoAccent = UIColors.Get("LightlessBlue"); + var infoBg = new Vector4(infoAccent.X, infoAccent.Y, infoAccent.Z, 0.12f); + var infoBorder = new Vector4(infoAccent.X, infoAccent.Y, infoAccent.Z, 0.32f); + const int detailRows = 3; + var detailHeight = MathF.Max(ImGui.GetTextLineHeightWithSpacing() * (detailRows + 1.2f), 72f * scale); + + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 5f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(10f * scale, 6f * scale))) + using (ImRaii.PushColor(ImGuiCol.ChildBg, UiSharedService.Color(infoBg))) + using (ImRaii.PushColor(ImGuiCol.Border, UiSharedService.Color(infoBorder))) + using (var details = ImRaii.Child("compressionDetail", new Vector2(-1f, detailHeight), true, ImGuiWindowFlags.NoScrollbar | ImGuiWindowFlags.NoScrollWithMouse)) + { + if (details) + { + if (ImGui.BeginTable("compressionDetailTable", 2, + ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.NoBordersInBody | ImGuiTableFlags.PadOuterX)) + { + DrawDetailRow("Current file", currentLabel, inputPath); + DrawDetailRow("Target format", targetLabel, null); + DrawDetailRow("Map type", mapKind.ToString(), null); + ImGui.EndTable(); + } + } + } + + if (_conversionFailed && !isDebugPreview) + { + ImGuiHelpers.ScaledDummy(4); + _uiSharedService.IconText(FontAwesomeIcon.ExclamationTriangle, ImGuiColors.DalamudRed); + ImGui.SameLine(0f, 6f * scale); + UiSharedService.TextWrapped("Conversion encountered errors. Please review the log for details.", color: ImGuiColors.DalamudRed); + } + + ImGuiHelpers.ScaledDummy(6); + if (!isDebugPreview) + { + if (_uiSharedService.IconTextButton(FontAwesomeIcon.StopCircle, "Cancel conversion")) + { + _conversionCancellationTokenSource.Cancel(); + } + } + else + { +#if DEBUG + if (_uiSharedService.IconTextButton(FontAwesomeIcon.Times, "Close preview")) + { + CloseDebugCompressionModal(); + } +#endif + } + + UiSharedService.SetScaledWindowSize(600); + + void DrawDetailRow(string label, string value, string? tooltip) + { + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + using (ImRaii.PushColor(ImGuiCol.Text, ImGuiColors.DalamudGrey)) + { + ImGui.TextUnformatted(label); + } + ImGui.TableNextColumn(); + ImGui.TextUnformatted(value); + if (!string.IsNullOrEmpty(tooltip)) + { + UiSharedService.AttachToolTip(tooltip); + } + } + + void DrawCompressionTitle(Vector4 iconColor, float localScale) + { + const string title = "Texture Compression"; + var spacing = 6f * localScale; + + var iconText = FontAwesomeIcon.CompressArrowsAlt.ToIconString(); + Vector2 iconSize; + using (_uiSharedService.IconFont.Push()) + { + iconSize = ImGui.CalcTextSize(iconText); + } + + Vector2 titleSize; + using (_uiSharedService.MediumFont.Push()) + { + titleSize = ImGui.CalcTextSize(title); + } + + var lineHeight = MathF.Max(iconSize.Y, titleSize.Y); + var iconOffsetY = (lineHeight - iconSize.Y) / 2f; + var textOffsetY = (lineHeight - titleSize.Y) / 2f; + + var start = ImGui.GetCursorScreenPos(); + var drawList = ImGui.GetWindowDrawList(); + + using (_uiSharedService.IconFont.Push()) + { + drawList.AddText(new Vector2(start.X, start.Y + iconOffsetY), UiSharedService.Color(iconColor), iconText); + } + + using (_uiSharedService.MediumFont.Push()) + { + var textPos = new Vector2(start.X + iconSize.X + spacing, start.Y + textOffsetY); + drawList.AddText(textPos, ImGui.GetColorU32(ImGuiCol.Text), title); + } + + ImGui.Dummy(new Vector2(iconSize.X + spacing + titleSize.X, lineHeight)); + } } private void ResetConversionModalState() @@ -202,6 +383,41 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _conversionTotalJobs = 0; } +#if DEBUG + private void OpenCompressionDebugModal() + { + if (_conversionTask != null && !_conversionTask.IsCompleted) + { + return; + } + + _debugCompressionModalOpen = true; + _debugConversionProgress = new TextureConversionProgress( + Completed: 3, + Total: 10, + CurrentJob: new TextureConversionJob( + @"C:\Lightless\Mods\Textures\example_diffuse.tex", + @"C:\Lightless\Mods\Textures\example_diffuse_bc7.tex", + Penumbra.Api.Enums.TextureType.Bc7Tex)); + _showModal = true; + _modalOpen = false; + } + + private void ResetDebugCompressionModalState() + { + _debugCompressionModalOpen = false; + _debugConversionProgress = null; + } + + private void CloseDebugCompressionModal() + { + ResetDebugCompressionModalState(); + _showModal = false; + _modalOpen = false; + ImGui.CloseCurrentPopup(); + } +#endif + private void RefreshAnalysisCache() { if (!_hasUpdate) @@ -757,6 +973,16 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase ResetTextureFilters(); InvalidateTextureRows(); _conversionFailed = false; +#if DEBUG + ResetDebugCompressionModalState(); +#endif + var savedFormatSort = _configService.Current.TextureFormatSortMode; + if (!Enum.IsDefined(typeof(TextureFormatSortMode), savedFormatSort)) + { + savedFormatSort = TextureFormatSortMode.None; + } + + SetTextureFormatSortMode(savedFormatSort, persist: false); } protected override void Dispose(bool disposing) @@ -1955,6 +2181,17 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase { InvalidateTextureRows(); } +#if DEBUG + ImGui.SameLine(); + using (ImRaii.Disabled(conversionRunning || !UiSharedService.CtrlPressed())) + { + if (_uiSharedService.IconTextButton(FontAwesomeIcon.Cog, "Preview popup (debug)", 200f * scale)) + { + OpenCompressionDebugModal(); + } + } + UiSharedService.AttachToolTip("Hold CTRL to open the compression popup preview."); +#endif TextureRow? lastSelected = null; using (var table = ImRaii.Table("textureDataTable", 9, @@ -1973,26 +2210,56 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase ImGui.TableSetupColumn("Original", ImGuiTableColumnFlags.PreferSortDescending); ImGui.TableSetupColumn("Compressed", ImGuiTableColumnFlags.PreferSortDescending); ImGui.TableSetupScrollFreeze(0, 1); - ImGui.TableHeadersRow(); + DrawTextureTableHeaderRow(); var targets = _textureCompressionService.SelectableTargets; IEnumerable orderedRows = rows; var sortSpecs = ImGui.TableGetSortSpecs(); + var sizeSortColumn = -1; + var sizeSortDirection = ImGuiSortDirection.Ascending; if (sortSpecs.SpecsCount > 0) { var spec = sortSpecs.Specs[0]; - orderedRows = spec.ColumnIndex switch + if (spec.ColumnIndex is 7 or 8) { - 7 => spec.SortDirection == ImGuiSortDirection.Ascending - ? rows.OrderBy(r => r.OriginalSize) - : rows.OrderByDescending(r => r.OriginalSize), - 8 => spec.SortDirection == ImGuiSortDirection.Ascending - ? rows.OrderBy(r => r.CompressedSize) - : rows.OrderByDescending(r => r.CompressedSize), - _ => rows - }; + sizeSortColumn = spec.ColumnIndex; + sizeSortDirection = spec.SortDirection; + } + } + var hasSizeSort = sizeSortColumn != -1; + var indexedRows = rows.Select((row, idx) => (row, idx)); + + if (_textureFormatSortMode != TextureFormatSortMode.None) + { + bool compressedFirst = _textureFormatSortMode == TextureFormatSortMode.CompressedFirst; + int GroupKey(TextureRow row) => row.IsAlreadyCompressed == compressedFirst ? 0 : 1; + long SizeKey(TextureRow row) => sizeSortColumn == 7 ? row.OriginalSize : row.CompressedSize; + + var ordered = indexedRows.OrderBy(pair => GroupKey(pair.row)); + if (hasSizeSort) + { + ordered = sizeSortDirection == ImGuiSortDirection.Ascending + ? ordered.ThenBy(pair => SizeKey(pair.row)) + : ordered.ThenByDescending(pair => SizeKey(pair.row)); + } + + orderedRows = ordered + .ThenBy(pair => pair.idx) + .Select(pair => pair.row); + } + else if (hasSizeSort) + { + long SizeKey(TextureRow row) => sizeSortColumn == 7 ? row.OriginalSize : row.CompressedSize; + + orderedRows = sizeSortDirection == ImGuiSortDirection.Ascending + ? indexedRows.OrderBy(pair => SizeKey(pair.row)).ThenBy(pair => pair.idx).Select(pair => pair.row) + : indexedRows.OrderByDescending(pair => SizeKey(pair.row)).ThenBy(pair => pair.idx).Select(pair => pair.row); + } + + if (sortSpecs.SpecsCount > 0) + { sortSpecs.SpecsDirty = false; } @@ -2034,6 +2301,79 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } } + + private void DrawTextureTableHeaderRow() + { + ImGui.TableNextRow(ImGuiTableRowFlags.Headers); + + DrawHeaderCell(0, "##select"); + DrawHeaderCell(1, "Texture"); + DrawHeaderCell(2, "Slot"); + DrawHeaderCell(3, "Map"); + DrawFormatHeaderCell(); + DrawHeaderCell(5, "Recommended"); + DrawHeaderCell(6, "Target"); + DrawHeaderCell(7, "Original"); + DrawHeaderCell(8, "Compressed"); + } + + private static void DrawHeaderCell(int columnIndex, string label) + { + ImGui.TableSetColumnIndex(columnIndex); + ImGui.TableHeader(label); + } + + private void DrawFormatHeaderCell() + { + ImGui.TableSetColumnIndex(4); + ImGui.TableHeader(GetFormatHeaderLabel()); + + if (ImGui.IsItemClicked(ImGuiMouseButton.Left)) + { + CycleTextureFormatSortMode(); + } + + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Click to cycle sort: normal, compressed first, uncompressed first."); + } + } + + private string GetFormatHeaderLabel() + => _textureFormatSortMode switch + { + TextureFormatSortMode.CompressedFirst => "Format (C)##formatHeader", + TextureFormatSortMode.UncompressedFirst => "Format (U)##formatHeader", + _ => "Format##formatHeader" + }; + + private void SetTextureFormatSortMode(TextureFormatSortMode mode, bool persist = true) + { + if (_textureFormatSortMode == mode) + { + return; + } + + _textureFormatSortMode = mode; + if (persist) + { + _configService.Current.TextureFormatSortMode = mode; + _configService.Save(); + } + } + + private void CycleTextureFormatSortMode() + { + var nextMode = _textureFormatSortMode switch + { + TextureFormatSortMode.None => TextureFormatSortMode.CompressedFirst, + TextureFormatSortMode.CompressedFirst => TextureFormatSortMode.UncompressedFirst, + _ => TextureFormatSortMode.None + }; + + SetTextureFormatSortMode(nextMode); + } + private void StartTextureConversion() { if (_conversionTask != null && !_conversionTask.IsCompleted) @@ -2335,11 +2675,30 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase { if (_texturePreviews.TryGetValue(key, out var state)) { + var loadTask = state.LoadTask; + if (loadTask is { IsCompleted: false }) + { + _ = loadTask.ContinueWith(_ => + { + state.Texture?.Dispose(); + }, TaskScheduler.Default); + } + state.Texture?.Dispose(); _texturePreviews.Remove(key); } } + private void ClearHoverPreview(TextureRow row) + { + if (string.Equals(_selectedTextureKey, row.Key, StringComparison.Ordinal)) + { + return; + } + + ResetPreview(row.Key); + } + private TextureResolutionInfo? GetTextureResolution(TextureRow row) { if (_textureResolutionCache.TryGetValue(row.Key, out var cached)) @@ -2440,7 +2799,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase UiSharedService.AttachToolTip("Already stored in a compressed format; additional compression is disabled."); } - DrawSelectableColumn(isSelected, () => + var nameHovered = DrawSelectableColumn(isSelected, () => { var selectableLabel = $"{row.DisplayName}##texName{index}"; if (ImGui.Selectable(selectableLabel, isSelected)) @@ -2448,20 +2807,20 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _selectedTextureKey = isSelected ? string.Empty : key; } - return () => UiSharedService.AttachToolTip($"{row.PrimaryFilePath}{UiSharedService.TooltipSeparator}{string.Join(Environment.NewLine, row.GamePaths)}"); + return null; }); - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { ImGui.TextUnformatted(row.Slot); return null; }); - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { ImGui.TextUnformatted(row.MapKind.ToString()); return null; }); - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { Action? tooltipAction = null; ImGui.TextUnformatted(row.Format); @@ -2475,7 +2834,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase return tooltipAction; }); - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { if (row.SuggestedTarget.HasValue) { @@ -2537,19 +2896,21 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase UiSharedService.AttachToolTip("This texture is already compressed and cannot be processed again."); } - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { ImGui.TextUnformatted(UiSharedService.ByteToString(row.OriginalSize)); return null; }); - DrawSelectableColumn(isSelected, () => + _ = DrawSelectableColumn(isSelected, () => { ImGui.TextUnformatted(UiSharedService.ByteToString(row.CompressedSize)); return null; }); + + DrawTextureRowHoverTooltip(row, nameHovered); } - private static void DrawSelectableColumn(bool isSelected, Func draw) + private static bool DrawSelectableColumn(bool isSelected, Func draw) { ImGui.TableNextColumn(); if (isSelected) @@ -2558,6 +2919,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } var after = draw(); + var hovered = ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenDisabled | ImGuiHoveredFlags.AllowWhenBlockedByActiveItem); if (isSelected) { @@ -2565,6 +2927,127 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } after?.Invoke(); + return hovered; + } + + private void DrawTextureRowHoverTooltip(TextureRow row, bool isHovered) + { + if (!isHovered) + { + if (string.Equals(_textureHoverKey, row.Key, StringComparison.Ordinal)) + { + _textureHoverKey = string.Empty; + _textureHoverStartTime = 0; + ClearHoverPreview(row); + } + return; + } + + var now = ImGui.GetTime(); + if (!string.Equals(_textureHoverKey, row.Key, StringComparison.Ordinal)) + { + _textureHoverKey = row.Key; + _textureHoverStartTime = now; + } + + var elapsed = now - _textureHoverStartTime; + if (elapsed < TextureHoverPreviewDelaySeconds) + { + var progress = (float)Math.Clamp(elapsed / TextureHoverPreviewDelaySeconds, 0f, 1f); + DrawTextureRowTextTooltip(row, progress); + return; + } + + DrawTextureRowPreviewTooltip(row); + } + + private void DrawTextureRowTextTooltip(TextureRow row, float progress) + { + ImGui.BeginTooltip(); + ImGui.SetWindowFontScale(1f); + DrawTextureRowTooltipBody(row); + ImGuiHelpers.ScaledDummy(4); + DrawTextureHoverProgressBar(progress, GetTooltipContentWidth()); + ImGui.EndTooltip(); + } + + private void DrawTextureRowPreviewTooltip(TextureRow row) + { + ImGui.BeginTooltip(); + ImGui.SetWindowFontScale(1f); + + DrawTextureRowTooltipBody(row); + ImGuiHelpers.ScaledDummy(4); + + var previewSize = new Vector2(TextureHoverPreviewSize * ImGuiHelpers.GlobalScale); + var (previewTexture, previewLoading, previewError) = GetTexturePreview(row); + if (previewTexture != null) + { + ImGui.Image(previewTexture.Handle, previewSize); + } + else + { + using (ImRaii.Child("textureHoverPreview", previewSize, true)) + { + UiSharedService.TextWrapped(previewLoading ? "Loading preview..." : previewError ?? "Preview unavailable."); + } + } + ImGui.EndTooltip(); + } + + private static void DrawTextureRowTooltipBody(TextureRow row) + { + var text = row.GamePaths.Count > 0 + ? $"{row.PrimaryFilePath}{UiSharedService.TooltipSeparator}{string.Join(Environment.NewLine, row.GamePaths)}" + : row.PrimaryFilePath; + + var wrapWidth = GetTextureHoverTooltipWidth(); + ImGui.PushTextWrapPos(ImGui.GetCursorPosX() + wrapWidth); + if (text.Contains(UiSharedService.TooltipSeparator, StringComparison.Ordinal)) + { + var splitText = text.Split(UiSharedService.TooltipSeparator, StringSplitOptions.RemoveEmptyEntries); + for (int i = 0; i < splitText.Length; i++) + { + ImGui.TextUnformatted(splitText[i]); + if (i != splitText.Length - 1) + { + ImGui.Separator(); + } + } + } + else + { + ImGui.TextUnformatted(text); + } + ImGui.PopTextWrapPos(); + } + + private static void DrawTextureHoverProgressBar(float progress, float width) + { + var scale = ImGuiHelpers.GlobalScale; + var barHeight = 4f * scale; + var barWidth = width > 0f ? width : -1f; + using (ImRaii.PushStyle(ImGuiStyleVar.FrameRounding, 3f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.FramePadding, Vector2.Zero)) + using (ImRaii.PushColor(ImGuiCol.PlotHistogram, UiSharedService.Color(UIColors.Get("LightlessPurple")))) + { + ImGui.ProgressBar(progress, new Vector2(barWidth, barHeight), string.Empty); + } + } + + private static float GetTextureHoverTooltipWidth() + => ImGui.GetFontSize() * 35f; + + private static float GetTooltipContentWidth() + { + var min = ImGui.GetWindowContentRegionMin(); + var max = ImGui.GetWindowContentRegionMax(); + var width = max.X - min.X; + if (width <= 0f) + { + width = ImGui.GetContentRegionAvail().X; + } + return width; } private static void ApplyTextureRowBackground(TextureRow row, bool isSelected) diff --git a/LightlessSync/UI/DownloadUi.cs b/LightlessSync/UI/DownloadUi.cs index 2d9cdc1..7ed5629 100644 --- a/LightlessSync/UI/DownloadUi.cs +++ b/LightlessSync/UI/DownloadUi.cs @@ -17,7 +17,7 @@ namespace LightlessSync.UI; public class DownloadUi : WindowMediatorSubscriberBase { private readonly LightlessConfigService _configService; - private readonly ConcurrentDictionary> _currentDownloads = new(); + private readonly ConcurrentDictionary> _currentDownloads = new(); private readonly DalamudUtilService _dalamudUtilService; private readonly FileUploadManager _fileTransferManager; private readonly UiSharedService _uiShared; @@ -25,6 +25,8 @@ public class DownloadUi : WindowMediatorSubscriberBase private readonly ConcurrentDictionary _uploadingPlayers = new(); private readonly Dictionary _smoothed = []; private readonly Dictionary _downloadSpeeds = []; + private readonly Dictionary _downloadInitialTotals = []; + private byte _transferBoxTransparency = 100; private bool _notificationDismissed = true; @@ -66,6 +68,10 @@ public class DownloadUi : WindowMediatorSubscriberBase Mediator.Subscribe(this, (msg) => { _currentDownloads[msg.DownloadId] = msg.DownloadStatus; + // Capture initial totals when download starts + var totalFiles = msg.DownloadStatus.Values.Sum(s => s.TotalFiles); + var totalBytes = msg.DownloadStatus.Values.Sum(s => s.TotalBytes); + _downloadInitialTotals[msg.DownloadId] = (totalFiles, totalBytes); _notificationDismissed = false; }); Mediator.Subscribe(this, (msg) => @@ -164,10 +170,10 @@ public class DownloadUi : WindowMediatorSubscriberBase const float rounding = 6f; var shadowOffset = new Vector2(2, 2); - List>> transfers; + List>> transfers; try { - transfers = _currentDownloads.ToList(); + transfers = [.. _currentDownloads]; } catch (ArgumentException) { @@ -206,12 +212,16 @@ public class DownloadUi : WindowMediatorSubscriberBase var dlQueue = 0; var dlProg = 0; var dlDecomp = 0; + var dlComplete = 0; foreach (var entry in transfer.Value) { var fileStatus = entry.Value; switch (fileStatus.DownloadStatus) { + case DownloadStatus.Initializing: + dlQueue++; + break; case DownloadStatus.WaitingForSlot: dlSlot++; break; @@ -224,15 +234,20 @@ public class DownloadUi : WindowMediatorSubscriberBase case DownloadStatus.Decompressing: dlDecomp++; break; + case DownloadStatus.Completed: + dlComplete++; + break; } } + var isAllComplete = dlComplete > 0 && dlProg == 0 && dlDecomp == 0 && dlQueue == 0 && dlSlot == 0; + string statusText; if (dlProg > 0) { statusText = "Downloading"; } - else if (dlDecomp > 0 || (totalBytes > 0 && transferredBytes >= totalBytes)) + else if (dlDecomp > 0) { statusText = "Decompressing"; } @@ -244,6 +259,10 @@ public class DownloadUi : WindowMediatorSubscriberBase { statusText = "Waiting for slot"; } + else if (isAllComplete) + { + statusText = "Completed"; + } else { statusText = "Waiting"; @@ -309,7 +328,7 @@ public class DownloadUi : WindowMediatorSubscriberBase fillPercent = transferredBytes / (double)totalBytes; showFill = true; } - else if (dlDecomp > 0 || transferredBytes >= totalBytes) + else if (dlDecomp > 0 || dlComplete > 0 || transferredBytes >= totalBytes) { fillPercent = 1.0; showFill = true; @@ -341,10 +360,14 @@ public class DownloadUi : WindowMediatorSubscriberBase downloadText = $"{statusText} {UiSharedService.ByteToString(transferredBytes, addSuffix: false)}/{UiSharedService.ByteToString(totalBytes)}"; } - else if ((dlDecomp > 0 || transferredBytes >= totalBytes) && hasValidSize) + else if (dlDecomp > 0) { downloadText = "Decompressing"; } + else if (isAllComplete) + { + downloadText = "Completed"; + } else { // Waiting states @@ -417,6 +440,7 @@ public class DownloadUi : WindowMediatorSubscriberBase var totalDlQueue = 0; var totalDlProg = 0; var totalDlDecomp = 0; + var totalDlComplete = 0; var perPlayer = new List<( string Name, @@ -428,16 +452,21 @@ public class DownloadUi : WindowMediatorSubscriberBase int DlSlot, int DlQueue, int DlProg, - int DlDecomp)>(); + int DlDecomp, + int DlComplete)>(); foreach (var transfer in _currentDownloads) { var handler = transfer.Key; var statuses = transfer.Value.Values; - var playerTotalFiles = statuses.Sum(s => s.TotalFiles); - var playerTransferredFiles = statuses.Sum(s => s.TransferredFiles); - var playerTotalBytes = statuses.Sum(s => s.TotalBytes); + var (playerTotalFiles, playerTotalBytes) = _downloadInitialTotals.TryGetValue(handler, out var totals) + ? totals + : (statuses.Sum(s => s.TotalFiles), statuses.Sum(s => s.TotalBytes)); + + var playerTransferredFiles = statuses.Count(s => + s.DownloadStatus == DownloadStatus.Decompressing || + s.TransferredBytes >= s.TotalBytes); var playerTransferredBytes = statuses.Sum(s => s.TransferredBytes); totalFiles += playerTotalFiles; @@ -450,12 +479,17 @@ public class DownloadUi : WindowMediatorSubscriberBase var playerDlQueue = 0; var playerDlProg = 0; var playerDlDecomp = 0; + var playerDlComplete = 0; foreach (var entry in transfer.Value) { var fileStatus = entry.Value; switch (fileStatus.DownloadStatus) { + case DownloadStatus.Initializing: + playerDlQueue++; + totalDlQueue++; + break; case DownloadStatus.WaitingForSlot: playerDlSlot++; totalDlSlot++; @@ -472,6 +506,10 @@ public class DownloadUi : WindowMediatorSubscriberBase playerDlDecomp++; totalDlDecomp++; break; + case DownloadStatus.Completed: + playerDlComplete++; + totalDlComplete++; + break; } } @@ -497,7 +535,8 @@ public class DownloadUi : WindowMediatorSubscriberBase playerDlSlot, playerDlQueue, playerDlProg, - playerDlDecomp + playerDlDecomp, + playerDlComplete )); } @@ -521,7 +560,7 @@ public class DownloadUi : WindowMediatorSubscriberBase // Overall texts var headerText = - $"Downloading {transferredFiles}/{totalFiles} files [W:{totalDlSlot}/Q:{totalDlQueue}/P:{totalDlProg}/D:{totalDlDecomp}]"; + $"Downloading {transferredFiles}/{totalFiles} files [W:{totalDlSlot}/Q:{totalDlQueue}/P:{totalDlProg}/D:{totalDlDecomp}/C:{totalDlComplete}]"; var bytesText = $"{UiSharedService.ByteToString(transferredBytes, addSuffix: false)}/{UiSharedService.ByteToString(totalBytes)}"; @@ -544,7 +583,7 @@ public class DownloadUi : WindowMediatorSubscriberBase foreach (var p in perPlayer) { var line = - $"{p.Name} [W:{p.DlSlot}/Q:{p.DlQueue}/P:{p.DlProg}/D:{p.DlDecomp}] {p.TransferredFiles}/{p.TotalFiles}"; + $"{p.Name} [W:{p.DlSlot}/Q:{p.DlQueue}/P:{p.DlProg}/D:{p.DlDecomp}/C:{p.DlComplete}] {p.TransferredFiles}/{p.TotalFiles}"; var lineSize = ImGui.CalcTextSize(line); if (lineSize.X > contentWidth) @@ -662,7 +701,7 @@ public class DownloadUi : WindowMediatorSubscriberBase && p.TransferredBytes > 0; var labelLine = - $"{p.Name} [W:{p.DlSlot}/Q:{p.DlQueue}/P:{p.DlProg}/D:{p.DlDecomp}] {p.TransferredFiles}/{p.TotalFiles}"; + $"{p.Name} [W:{p.DlSlot}/Q:{p.DlQueue}/P:{p.DlProg}/D:{p.DlDecomp}/C:{p.DlComplete}] {p.TransferredFiles}/{p.TotalFiles}"; if (!showBar) { @@ -721,13 +760,18 @@ public class DownloadUi : WindowMediatorSubscriberBase // Text inside bar: downloading vs decompressing string barText; - var isDecompressing = p.DlDecomp > 0 && p.TransferredBytes >= p.TotalBytes && p.TotalBytes > 0; + var isDecompressing = p.DlDecomp > 0; + var isAllComplete = p.DlComplete > 0 && p.DlProg == 0 && p.DlDecomp == 0 && p.DlQueue == 0 && p.DlSlot == 0; if (isDecompressing) { // Keep bar full, static text showing decompressing barText = "Decompressing..."; } + else if (isAllComplete) + { + barText = "Completed"; + } else { var bytesInside = @@ -808,6 +852,7 @@ public class DownloadUi : WindowMediatorSubscriberBase var dlQueue = 0; var dlProg = 0; var dlDecomp = 0; + var dlComplete = 0; long totalBytes = 0; long transferredBytes = 0; @@ -817,22 +862,29 @@ public class DownloadUi : WindowMediatorSubscriberBase var fileStatus = entry.Value; switch (fileStatus.DownloadStatus) { + case DownloadStatus.Initializing: dlQueue++; break; case DownloadStatus.WaitingForSlot: dlSlot++; break; case DownloadStatus.WaitingForQueue: dlQueue++; break; case DownloadStatus.Downloading: dlProg++; break; case DownloadStatus.Decompressing: dlDecomp++; break; + case DownloadStatus.Completed: dlComplete++; break; } totalBytes += fileStatus.TotalBytes; transferredBytes += fileStatus.TransferredBytes; } var progress = totalBytes > 0 ? (float)transferredBytes / totalBytes : 0f; + if (dlComplete > 0 && dlProg == 0 && dlDecomp == 0 && dlQueue == 0 && dlSlot == 0) + { + progress = 1f; + } string status; if (dlDecomp > 0) status = "decompressing"; else if (dlProg > 0) status = "downloading"; else if (dlQueue > 0) status = "queued"; else if (dlSlot > 0) status = "waiting"; + else if (dlComplete > 0) status = "completed"; else status = "completed"; downloadStatus.Add((item.Key.Name, progress, status)); diff --git a/LightlessSync/UI/DrawEntityFactory.cs b/LightlessSync/UI/DrawEntityFactory.cs index e7bcc87..08f81b6 100644 --- a/LightlessSync/UI/DrawEntityFactory.cs +++ b/LightlessSync/UI/DrawEntityFactory.cs @@ -217,6 +217,7 @@ public class DrawEntityFactory entry.PairStatus, handler?.LastAppliedDataBytes ?? -1, handler?.LastAppliedDataTris ?? -1, + handler?.LastAppliedApproximateEffectiveTris ?? -1, handler?.LastAppliedApproximateVRAMBytes ?? -1, handler?.LastAppliedApproximateEffectiveVRAMBytes ?? -1, handler); diff --git a/LightlessSync/UI/DtrEntry.cs b/LightlessSync/UI/DtrEntry.cs index ae94d5e..5aa69eb 100644 --- a/LightlessSync/UI/DtrEntry.cs +++ b/LightlessSync/UI/DtrEntry.cs @@ -103,10 +103,19 @@ public sealed class DtrEntry : IDisposable, IHostedService public async Task StopAsync(CancellationToken cancellationToken) { - await _cancellationTokenSource.CancelAsync().ConfigureAwait(false); + _cancellationTokenSource.Cancel(); + + if (_dalamudUtilService.IsOnFrameworkThread) + { + _logger.LogDebug("Skipping Lightfinder DTR wait on framework thread during shutdown."); + _cancellationTokenSource.Dispose(); + return; + } + try { - await _runTask!.ConfigureAwait(false); + if (_runTask != null) + await _runTask.ConfigureAwait(false); } catch (OperationCanceledException) { diff --git a/LightlessSync/UI/Handlers/IdDisplayHandler.cs b/LightlessSync/UI/Handlers/IdDisplayHandler.cs index 74a6571..46ebe7d 100644 --- a/LightlessSync/UI/Handlers/IdDisplayHandler.cs +++ b/LightlessSync/UI/Handlers/IdDisplayHandler.cs @@ -415,7 +415,9 @@ public class IdDisplayHandler var vramBytes = pair.LastAppliedApproximateEffectiveVRAMBytes >= 0 ? pair.LastAppliedApproximateEffectiveVRAMBytes : pair.LastAppliedApproximateVRAMBytes; - var triangleCount = pair.LastAppliedDataTris; + var triangleCount = pair.LastAppliedApproximateEffectiveTris >= 0 + ? pair.LastAppliedApproximateEffectiveTris + : pair.LastAppliedDataTris; if (vramBytes < 0 && triangleCount < 0) { return null; diff --git a/LightlessSync/UI/Models/PairUiEntry.cs b/LightlessSync/UI/Models/PairUiEntry.cs index c25b6fd..fcda8ec 100644 --- a/LightlessSync/UI/Models/PairUiEntry.cs +++ b/LightlessSync/UI/Models/PairUiEntry.cs @@ -21,6 +21,7 @@ public sealed record PairUiEntry( IndividualPairStatus? PairStatus, long LastAppliedDataBytes, long LastAppliedDataTris, + long LastAppliedApproximateEffectiveTris, long LastAppliedApproximateVramBytes, long LastAppliedApproximateEffectiveVramBytes, IPairHandlerAdapter? Handler) diff --git a/LightlessSync/UI/Models/TextureFormatSortMode.cs b/LightlessSync/UI/Models/TextureFormatSortMode.cs new file mode 100644 index 0000000..165e10d --- /dev/null +++ b/LightlessSync/UI/Models/TextureFormatSortMode.cs @@ -0,0 +1,8 @@ +namespace LightlessSync.UI.Models; + +public enum TextureFormatSortMode +{ + None = 0, + CompressedFirst = 1, + UncompressedFirst = 2 +} diff --git a/LightlessSync/UI/Models/VisiblePairSortMode.cs b/LightlessSync/UI/Models/VisiblePairSortMode.cs index ec133b9..615ac9f 100644 --- a/LightlessSync/UI/Models/VisiblePairSortMode.cs +++ b/LightlessSync/UI/Models/VisiblePairSortMode.cs @@ -7,4 +7,5 @@ public enum VisiblePairSortMode EffectiveVramUsage = 2, TriangleCount = 3, PreferredDirectPairs = 4, + EffectiveTriangleCount = 5, } diff --git a/LightlessSync/UI/SettingsUi.cs b/LightlessSync/UI/SettingsUi.cs index d6b435f..9c2f1ef 100644 --- a/LightlessSync/UI/SettingsUi.cs +++ b/LightlessSync/UI/SettingsUi.cs @@ -15,6 +15,7 @@ using LightlessSync.Interop.Ipc; using LightlessSync.LightlessConfiguration; using LightlessSync.LightlessConfiguration.Configurations; using LightlessSync.LightlessConfiguration.Models; +using LightlessSync.PlayerData.Factories; using LightlessSync.PlayerData.Handlers; using LightlessSync.PlayerData.Pairs; using LightlessSync.Services; @@ -41,6 +42,7 @@ using System.Globalization; using System.Net.Http.Headers; using System.Net.Http.Json; using System.Numerics; +using System.Runtime.InteropServices; using System.Text; using System.Text.Json; @@ -52,7 +54,7 @@ public class SettingsUi : WindowMediatorSubscriberBase private readonly CacheMonitor _cacheMonitor; private readonly LightlessConfigService _configService; private readonly UiThemeConfigService _themeConfigService; - private readonly ConcurrentDictionary> _currentDownloads = new(); + private readonly ConcurrentDictionary> _currentDownloads = new(); private readonly DalamudUtilService _dalamudUtilService; private readonly HttpClient _httpClient; private readonly FileCacheManager _fileCacheManager; @@ -108,8 +110,8 @@ public class SettingsUi : WindowMediatorSubscriberBase }; private readonly UiSharedService.TabOption[] _transferTabOptions = new UiSharedService.TabOption[2]; private readonly List> _serverTabOptions = new(4); - private readonly string[] _generalTreeNavOrder = new[] - { + private readonly string[] _generalTreeNavOrder = + [ "Import & Export", "Popup & Auto Fill", "Behavior", @@ -119,7 +121,8 @@ public class SettingsUi : WindowMediatorSubscriberBase "Colors", "Server Info Bar", "Nameplate", - }; + "Animation & Bones" + ]; private static readonly HashSet _generalNavSeparatorAfter = new(StringComparer.Ordinal) { "Popup & Auto Fill", @@ -581,6 +584,94 @@ public class SettingsUi : WindowMediatorSubscriberBase } } + private void DrawTriangleDecimationCounters() + { + HashSet trackedPairs = new(); + + var snapshot = _pairUiService.GetSnapshot(); + + foreach (var pair in snapshot.DirectPairs) + { + trackedPairs.Add(pair); + } + + foreach (var group in snapshot.GroupPairs.Values) + { + foreach (var pair in group) + { + trackedPairs.Add(pair); + } + } + + long totalOriginalTris = 0; + long totalEffectiveTris = 0; + var hasData = false; + + foreach (var pair in trackedPairs) + { + if (!pair.IsVisible) + continue; + + var original = pair.LastAppliedDataTris; + var effective = pair.LastAppliedApproximateEffectiveTris; + + if (original >= 0) + { + hasData = true; + totalOriginalTris += original; + } + + if (effective >= 0) + { + hasData = true; + totalEffectiveTris += effective; + } + } + + if (!hasData) + { + ImGui.TextDisabled("Triangle usage has not been calculated yet."); + return; + } + + var savedTris = Math.Max(0L, totalOriginalTris - totalEffectiveTris); + var originalText = FormatTriangleCount(totalOriginalTris); + var effectiveText = FormatTriangleCount(totalEffectiveTris); + var savedText = FormatTriangleCount(savedTris); + + ImGui.TextUnformatted($"Total triangle usage (original): {originalText}"); + ImGui.TextUnformatted($"Total triangle usage (effective): {effectiveText}"); + + if (savedTris > 0) + { + UiSharedService.ColorText($"Triangles saved by decimation: {savedText}", UIColors.Get("LightlessGreen")); + } + else + { + ImGui.TextUnformatted($"Triangles saved by decimation: {savedText}"); + } + + static string FormatTriangleCount(long triangleCount) + { + if (triangleCount < 0) + { + return "n/a"; + } + + if (triangleCount >= 1_000_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000_000d:0.#}m tris"); + } + + if (triangleCount >= 1_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000d:0.#}k tris"); + } + + return $"{triangleCount} tris"; + } + } + private void DrawThemeVectorRow(MainStyle.StyleVector2Option option) { ImGui.TableNextRow(); @@ -870,10 +961,11 @@ public class SettingsUi : WindowMediatorSubscriberBase _uiShared.DrawHelpText( $"The download window will show the current progress of outstanding downloads.{Environment.NewLine}{Environment.NewLine}" + - $"What do W/Q/P/D stand for?{Environment.NewLine}W = Waiting for Slot (see Maximum Parallel Downloads){Environment.NewLine}" + + $"What do W/Q/P/D/C stand for?{Environment.NewLine}W = Waiting for Slot (see Maximum Parallel Downloads){Environment.NewLine}" + $"Q = Queued on Server, waiting for queue ready signal{Environment.NewLine}" + $"P = Processing download (aka downloading){Environment.NewLine}" + - $"D = Decompressing download"); + $"D = Decompressing download{Environment.NewLine}" + + $"C = Completed download"); if (!_configService.Current.ShowTransferWindow) ImGui.BeginDisabled(); ImGui.Indent(); @@ -1148,7 +1240,7 @@ public class SettingsUi : WindowMediatorSubscriberBase private async Task?> RunSpeedTest(List servers, CancellationToken token) { - List speedTestResults = new(); + List speedTestResults = []; foreach (var server in servers) { HttpResponseMessage? result = null; @@ -1533,6 +1625,7 @@ public class SettingsUi : WindowMediatorSubscriberBase DrawPairPropertyRow("Approx. VRAM", FormatBytes(pair.LastAppliedApproximateVRAMBytes)); DrawPairPropertyRow("Effective VRAM", FormatBytes(pair.LastAppliedApproximateEffectiveVRAMBytes)); DrawPairPropertyRow("Last Triangles", pair.LastAppliedDataTris < 0 ? "n/a" : pair.LastAppliedDataTris.ToString(CultureInfo.InvariantCulture)); + DrawPairPropertyRow("Effective Triangles", pair.LastAppliedApproximateEffectiveTris < 0 ? "n/a" : pair.LastAppliedApproximateEffectiveTris.ToString(CultureInfo.InvariantCulture)); ImGui.EndTable(); } @@ -1964,14 +2057,25 @@ public class SettingsUi : WindowMediatorSubscriberBase { using (ImRaii.PushIndent(20f)) { - if (_validationTask.IsCompleted) + if (_validationTask.IsCompletedSuccessfully) { UiSharedService.TextWrapped( $"The storage validation has completed and removed {_validationTask.Result.Count} invalid files from storage."); } + else if (_validationTask.IsCanceled) + { + UiSharedService.ColorTextWrapped( + "Storage validation was cancelled.", + UIColors.Get("LightlessYellow")); + } + else if (_validationTask.IsFaulted) + { + UiSharedService.ColorTextWrapped( + "Storage validation failed with an error.", + UIColors.Get("DimRed")); + } else { - UiSharedService.TextWrapped( $"Storage validation is running: {_currentProgress.Item1}/{_currentProgress.Item2}"); if (_currentProgress.Item3 != null) @@ -3127,10 +3231,102 @@ public class SettingsUi : WindowMediatorSubscriberBase } ImGui.Separator(); + ImGui.Dummy(new Vector2(10)); + _uiShared.BigText("Animation"); + + using (var animationTree = BeginGeneralTree("Animation & Bones", UIColors.Get("LightlessPurple"))) + { + if (animationTree.Visible) + { + ImGui.TextUnformatted("Animation Options"); + + var modes = new[] + { + AnimationValidationMode.Unsafe, + AnimationValidationMode.Safe, + AnimationValidationMode.Safest, + }; + + var labels = new[] + { + "Unsafe", + "Safe (Race)", + "Safest (Race + Bones)", + }; + + var tooltips = new[] + { + "No validation. Fastest, but may allow incompatible animations (riskier).", + "Validates skeleton race + modded skeleton check (recommended).", + "Requires matching skeleton race + bone compatibility (strictest).", + }; + + + var currentMode = _configService.Current.AnimationValidationMode; + int selectedIndex = Array.IndexOf(modes, currentMode); + if (selectedIndex < 0) selectedIndex = 1; + + ImGui.SetNextItemWidth(140 * ImGuiHelpers.GlobalScale); + + bool open = ImGui.BeginCombo("Animation validation", labels[selectedIndex]); + + if (ImGui.IsItemHovered()) + ImGui.SetTooltip(tooltips[selectedIndex]); + + if (open) + { + for (int i = 0; i < modes.Length; i++) + { + bool isSelected = (i == selectedIndex); + + if (ImGui.Selectable(labels[i], isSelected)) + { + selectedIndex = i; + _configService.Current.AnimationValidationMode = modes[i]; + _configService.Save(); + } + + if (ImGui.IsItemHovered()) + ImGui.SetTooltip(tooltips[i]); + + if (isSelected) + ImGui.SetItemDefaultFocus(); + } + + ImGui.EndCombo(); + } + + UiSharedService.ColoredSeparator(UIColors.Get("LightlessPurple"), 1.5f); + + var cfg = _configService.Current; + + bool oneBased = cfg.AnimationAllowOneBasedShift; + if (ImGui.Checkbox("Treat 1-based PAP indices as compatible", ref oneBased)) + { + cfg.AnimationAllowOneBasedShift = oneBased; + _configService.Save(); + } + if (ImGui.IsItemHovered()) + ImGui.SetTooltip("Fixes off-by-one PAPs (one bone differance in bones and PAP). Can also increase crashing, toggle off if alot of crashing is happening"); + + bool neighbor = cfg.AnimationAllowNeighborIndexTolerance; + if (ImGui.Checkbox("Allow 1+- bone index tolerance", ref neighbor)) + { + cfg.AnimationAllowNeighborIndexTolerance = neighbor; + _configService.Save(); + } + if (ImGui.IsItemHovered()) + ImGui.SetTooltip("Looser matching on bone matching. Can reduce false blocks happening but also reduces safety and more prone to crashing."); + + ImGui.TreePop(); + animationTree.MarkContentEnd(); + } + } ImGui.EndChild(); ImGui.EndGroup(); + ImGui.Separator(); generalSelune.DrawHighlightOnly(ImGui.GetIO().DeltaTime); } } @@ -3220,6 +3416,7 @@ public class SettingsUi : WindowMediatorSubscriberBase return 1f - (elapsed / GeneralTreeHighlightDuration); } + [StructLayout(LayoutKind.Auto)] private struct GeneralTreeScope : IDisposable { private readonly bool _visible; @@ -3527,7 +3724,7 @@ public class SettingsUi : WindowMediatorSubscriberBase _uiShared.DrawHelpText("Controls whether Lightless reduces index textures that exceed the size limit."); var dimensionOptions = new[] { 512, 1024, 2048, 4096 }; - var optionLabels = dimensionOptions.Select(static value => value.ToString()).ToArray(); + var optionLabels = dimensionOptions.Select(selector: static value => value.ToString()).ToArray(); var currentDimension = textureConfig.TextureDownscaleMaxDimension; var selectedIndex = Array.IndexOf(dimensionOptions, currentDimension); if (selectedIndex < 0) @@ -3553,6 +3750,14 @@ public class SettingsUi : WindowMediatorSubscriberBase ImGui.SameLine(); _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), new SeStringUtils.RichTextEntry("If disabled, saved + effective VRAM usage information will not work.", UIColors.Get("LightlessYellow"))); + var skipPreferredDownscale = textureConfig.SkipTextureDownscaleForPreferredPairs; + if (ImGui.Checkbox("Skip downscale for preferred/direct pairs", ref skipPreferredDownscale)) + { + textureConfig.SkipTextureDownscaleForPreferredPairs = skipPreferredDownscale; + _playerPerformanceConfigService.Save(); + } + _uiShared.DrawHelpText("When enabled, textures for direct pairs with preferred permissions are left untouched."); + if (!textureConfig.EnableNonIndexTextureMipTrim && !textureConfig.EnableIndexTextureDownscale) { UiSharedService.ColorTextWrapped("Both trimming and downscale are disabled. Lightless will keep original textures regardless of size.", UIColors.Get("DimRed")); @@ -3580,6 +3785,160 @@ public class SettingsUi : WindowMediatorSubscriberBase ImGui.TreePop(); } + ImGui.Separator(); + + if (_uiShared.MediumTreeNode("Model Optimization", UIColors.Get("DimRed"))) + { + _uiShared.MediumText("Warning", UIColors.Get("DimRed")); + _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Model decimation is a "), + new SeStringUtils.RichTextEntry("destructive", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" process and may cause broken or incorrect character appearances.")); + + + _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("This feature is encouraged to help "), + new SeStringUtils.RichTextEntry("lower-end systems with limited VRAM", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(" and for use in "), + new SeStringUtils.RichTextEntry("performance-critical scenarios", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Runtime decimation "), + new SeStringUtils.RichTextEntry("MAY", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" cause higher load on the system when processing downloads.")); + + _uiShared.DrawNoteLine("!!! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("When enabled, we cannot provide support for appearance issues caused by this setting!", UIColors.Get("DimRed"), true)); + + ImGui.Dummy(new Vector2(15)); + + _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), + new SeStringUtils.RichTextEntry("If a mesh exceeds the "), + new SeStringUtils.RichTextEntry("triangle threshold", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(", it will be decimated automatically to the set "), + new SeStringUtils.RichTextEntry("target triangle ratio", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(". This will reduce quality of the mesh or may break it's intended structure.")); + + + var performanceConfig = _playerPerformanceConfigService.Current; + var enableDecimation = performanceConfig.EnableModelDecimation; + if (ImGui.Checkbox("Enable model decimation", ref enableDecimation)) + { + performanceConfig.EnableModelDecimation = enableDecimation; + _playerPerformanceConfigService.Save(); + } + _uiShared.DrawHelpText("When enabled, Lightless generates a decimated copy of given model after download."); + + var keepOriginalModels = performanceConfig.KeepOriginalModelFiles; + if (ImGui.Checkbox("Keep original model files", ref keepOriginalModels)) + { + performanceConfig.KeepOriginalModelFiles = keepOriginalModels; + _playerPerformanceConfigService.Save(); + } + _uiShared.DrawHelpText("When disabled, Lightless removes the original model after a decimated copy is created."); + ImGui.SameLine(); + _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), new SeStringUtils.RichTextEntry("If disabled, saved + effective triangle usage information will not work.", UIColors.Get("LightlessYellow"))); + + var skipPreferredDecimation = performanceConfig.SkipModelDecimationForPreferredPairs; + if (ImGui.Checkbox("Skip decimation for preferred/direct pairs", ref skipPreferredDecimation)) + { + performanceConfig.SkipModelDecimationForPreferredPairs = skipPreferredDecimation; + _playerPerformanceConfigService.Save(); + } + _uiShared.DrawHelpText("When enabled, models for direct pairs with preferred permissions are left untouched."); + + var triangleThreshold = performanceConfig.ModelDecimationTriangleThreshold; + ImGui.SetNextItemWidth(300 * ImGuiHelpers.GlobalScale); + if (ImGui.SliderInt("Decimate models above", ref triangleThreshold, 8_000, 100_000)) + { + performanceConfig.ModelDecimationTriangleThreshold = Math.Clamp(triangleThreshold, 8_000, 100_000); + _playerPerformanceConfigService.Save(); + } + ImGui.SameLine(); + ImGui.Text("triangles"); + _uiShared.DrawHelpText($"Models below this triangle count are left untouched.{UiSharedService.TooltipSeparator}Default: 50,000"); + + var targetPercent = (float)(performanceConfig.ModelDecimationTargetRatio * 100.0); + var clampedPercent = Math.Clamp(targetPercent, 60f, 99f); + if (Math.Abs(clampedPercent - targetPercent) > float.Epsilon) + { + performanceConfig.ModelDecimationTargetRatio = clampedPercent / 100.0; + _playerPerformanceConfigService.Save(); + targetPercent = clampedPercent; + } + ImGui.SetNextItemWidth(300 * ImGuiHelpers.GlobalScale); + if (ImGui.SliderFloat("Target triangle ratio", ref targetPercent, 60f, 99f, "%.0f%%")) + { + performanceConfig.ModelDecimationTargetRatio = Math.Clamp(targetPercent / 100f, 0.6f, 0.99f); + _playerPerformanceConfigService.Save(); + } + _uiShared.DrawHelpText($"Target ratio relative to original triangle count (80% keeps 80% of triangles).{UiSharedService.TooltipSeparator}Default: 80%"); + + ImGui.Dummy(new Vector2(15)); + ImGui.TextUnformatted("Decimation targets"); + _uiShared.DrawHelpText("Hair mods are always excluded from decimation."); + + _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), + new SeStringUtils.RichTextEntry("Automatic decimation will only target the selected "), + new SeStringUtils.RichTextEntry("decimation targets", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), + new SeStringUtils.RichTextEntry("It is advised to not decimate any body related meshes which includes: "), + new SeStringUtils.RichTextEntry("facial mods + sculpts, chest, legs, hands and feet", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiShared.DrawNoteLine("!!! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Remember, automatic decimation is not perfect and can cause meshes to be ruined, especially hair mods.", UIColors.Get("DimRed"), true)); + + var allowBody = performanceConfig.ModelDecimationAllowBody; + if (ImGui.Checkbox("Body", ref allowBody)) + { + performanceConfig.ModelDecimationAllowBody = allowBody; + _playerPerformanceConfigService.Save(); + } + + var allowFaceHead = performanceConfig.ModelDecimationAllowFaceHead; + if (ImGui.Checkbox("Face/head", ref allowFaceHead)) + { + performanceConfig.ModelDecimationAllowFaceHead = allowFaceHead; + _playerPerformanceConfigService.Save(); + } + + var allowTail = performanceConfig.ModelDecimationAllowTail; + if (ImGui.Checkbox("Tails/Ears", ref allowTail)) + { + performanceConfig.ModelDecimationAllowTail = allowTail; + _playerPerformanceConfigService.Save(); + } + + var allowClothing = performanceConfig.ModelDecimationAllowClothing; + if (ImGui.Checkbox("Clothing (body/legs/shoes/gloves/hats)", ref allowClothing)) + { + performanceConfig.ModelDecimationAllowClothing = allowClothing; + _playerPerformanceConfigService.Save(); + } + + var allowAccessories = performanceConfig.ModelDecimationAllowAccessories; + if (ImGui.Checkbox("Accessories (earring/rings/bracelet/necklace)", ref allowAccessories)) + { + performanceConfig.ModelDecimationAllowAccessories = allowAccessories; + _playerPerformanceConfigService.Save(); + } + + ImGui.Dummy(new Vector2(5)); + + UiSharedService.ColoredSeparator(UIColors.Get("LightlessGrey"), 3f); + + ImGui.Dummy(new Vector2(5)); + DrawTriangleDecimationCounters(); + ImGui.Dummy(new Vector2(5)); + + UiSharedService.ColoredSeparator(UIColors.Get("DimRed"), 1.5f); + ImGui.TreePop(); + } + ImGui.Separator(); ImGui.Dummy(new Vector2(10)); diff --git a/LightlessSync/UI/ZoneChatUi.cs b/LightlessSync/UI/ZoneChatUi.cs index cb6dae8..a03ceab 100644 --- a/LightlessSync/UI/ZoneChatUi.cs +++ b/LightlessSync/UI/ZoneChatUi.cs @@ -205,10 +205,8 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private void ApplyUiVisibilitySettings() { - var config = _chatConfigService.Current; _uiBuilder.DisableUserUiHide = true; - _uiBuilder.DisableCutsceneUiHide = config.ShowInCutscenes; - _uiBuilder.DisableGposeUiHide = config.ShowInGpose; + _uiBuilder.DisableCutsceneUiHide = true; } private bool ShouldHide() @@ -220,6 +218,16 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase return true; } + if (!config.ShowInGpose && _dalamudUtilService.IsInGpose) + { + return true; + } + + if (!config.ShowInCutscenes && _dalamudUtilService.IsInCutscene) + { + return true; + } + if (config.HideInCombat && _dalamudUtilService.IsInCombat) { return true; diff --git a/LightlessSync/Utils/VariousExtensions.cs b/LightlessSync/Utils/VariousExtensions.cs index 0020bc9..d250279 100644 --- a/LightlessSync/Utils/VariousExtensions.cs +++ b/LightlessSync/Utils/VariousExtensions.cs @@ -57,7 +57,8 @@ public static class VariousExtensions } public static Dictionary> CheckUpdatedData(this CharacterData newData, Guid applicationBase, - CharacterData? oldData, ILogger logger, IPairPerformanceSubject cachedPlayer, bool forceApplyCustomization, bool forceApplyMods) + CharacterData? oldData, ILogger logger, IPairPerformanceSubject cachedPlayer, bool forceApplyCustomization, bool forceApplyMods, + bool suppressForcedRedrawOnForcedModApply = false) { oldData ??= new(); @@ -78,6 +79,7 @@ public static class VariousExtensions bool hasNewAndOldFileReplacements = newFileReplacements != null && existingFileReplacements != null; bool hasNewAndOldGlamourerData = newGlamourerData != null && existingGlamourerData != null; + var forceRedrawOnForcedApply = forceApplyMods && !suppressForcedRedrawOnForcedModApply; if (hasNewButNotOldFileReplacements || hasOldButNotNewFileReplacements || hasNewButNotOldGlamourerData || hasOldButNotNewGlamourerData) { @@ -100,7 +102,7 @@ public static class VariousExtensions { logger.LogDebug("[BASE-{appBase}] Updating {object}/{kind} (FileReplacements not equal) => {change}", applicationBase, cachedPlayer, objectKind, PlayerChanges.ModFiles); charaDataToUpdate[objectKind].Add(PlayerChanges.ModFiles); - if (forceApplyMods || objectKind != ObjectKind.Player) + if (objectKind != ObjectKind.Player || forceRedrawOnForcedApply) { charaDataToUpdate[objectKind].Add(PlayerChanges.ForcedRedraw); } @@ -167,7 +169,7 @@ public static class VariousExtensions if (objectKind != ObjectKind.Player) continue; bool manipDataDifferent = !string.Equals(oldData.ManipulationData, newData.ManipulationData, StringComparison.Ordinal); - if (manipDataDifferent || forceApplyMods) + if (manipDataDifferent || forceRedrawOnForcedApply) { logger.LogDebug("[BASE-{appBase}] Updating {object}/{kind} (Diff manip data) => {change}", applicationBase, cachedPlayer, objectKind, PlayerChanges.ModManip); charaDataToUpdate[objectKind].Add(PlayerChanges.ModManip); diff --git a/LightlessSync/WebAPI/Files/FileDownloadManager.cs b/LightlessSync/WebAPI/Files/FileDownloadManager.cs index 8aa2b0b..2013b50 100644 --- a/LightlessSync/WebAPI/Files/FileDownloadManager.cs +++ b/LightlessSync/WebAPI/Files/FileDownloadManager.cs @@ -6,6 +6,7 @@ using LightlessSync.FileCache; using LightlessSync.LightlessConfiguration; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.TextureCompression; using LightlessSync.WebAPI.Files.Models; using Microsoft.Extensions.Logging; @@ -17,19 +18,21 @@ namespace LightlessSync.WebAPI.Files; public partial class FileDownloadManager : DisposableMediatorSubscriberBase { - private readonly Dictionary _downloadStatus; - private readonly object _downloadStatusLock = new(); + private readonly ConcurrentDictionary _downloadStatus; private readonly FileCompactor _fileCompactor; private readonly FileCacheManager _fileDbManager; private readonly FileTransferOrchestrator _orchestrator; private readonly LightlessConfigService _configService; private readonly TextureDownscaleService _textureDownscaleService; + private readonly ModelDecimationService _modelDecimationService; private readonly TextureMetadataHelper _textureMetadataHelper; private readonly ConcurrentDictionary _activeDownloadStreams; private readonly SemaphoreSlim _decompressGate = new(Math.Max(1, Environment.ProcessorCount / 2), Math.Max(1, Environment.ProcessorCount / 2)); + + private readonly ConcurrentQueue _deferredCompressionQueue = new(); private volatile bool _disableDirectDownloads; private int _consecutiveDirectDownloadFailures; @@ -43,14 +46,16 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase FileCompactor fileCompactor, LightlessConfigService configService, TextureDownscaleService textureDownscaleService, + ModelDecimationService modelDecimationService, TextureMetadataHelper textureMetadataHelper) : base(logger, mediator) { - _downloadStatus = new Dictionary(StringComparer.Ordinal); + _downloadStatus = new ConcurrentDictionary(StringComparer.Ordinal); _orchestrator = orchestrator; _fileDbManager = fileCacheManager; _fileCompactor = fileCompactor; _configService = configService; _textureDownscaleService = textureDownscaleService; + _modelDecimationService = modelDecimationService; _textureMetadataHelper = textureMetadataHelper; _activeDownloadStreams = new(); _lastConfigDirectDownloadsState = _configService.Current.EnableDirectDownloads; @@ -84,19 +89,16 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase public void ClearDownload() { CurrentDownloads.Clear(); - lock (_downloadStatusLock) - { - _downloadStatus.Clear(); - } + _downloadStatus.Clear(); CurrentOwnerToken = null; } - public async Task DownloadFiles(GameObjectHandler? gameObject, List fileReplacementDto, CancellationToken ct, bool skipDownscale = false) + public async Task DownloadFiles(GameObjectHandler? gameObject, List fileReplacementDto, CancellationToken ct, bool skipDownscale = false, bool skipDecimation = false) { Mediator.Publish(new HaltScanMessage(nameof(DownloadFiles))); try { - await DownloadFilesInternal(gameObject, fileReplacementDto, ct, skipDownscale).ConfigureAwait(false); + await DownloadFilesInternal(gameObject, fileReplacementDto, ct, skipDownscale, skipDecimation).ConfigureAwait(false); } catch { @@ -154,29 +156,20 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase private void SetStatus(string key, DownloadStatus status) { - lock (_downloadStatusLock) - { - if (_downloadStatus.TryGetValue(key, out var st)) - st.DownloadStatus = status; - } + if (_downloadStatus.TryGetValue(key, out var st)) + st.DownloadStatus = status; } private void AddTransferredBytes(string key, long delta) { - lock (_downloadStatusLock) - { - if (_downloadStatus.TryGetValue(key, out var st)) - st.TransferredBytes += delta; - } + if (_downloadStatus.TryGetValue(key, out var st)) + st.AddTransferredBytes(delta); } private void MarkTransferredFiles(string key, int files) { - lock (_downloadStatusLock) - { - if (_downloadStatus.TryGetValue(key, out var st)) - st.TransferredFiles = files; - } + if (_downloadStatus.TryGetValue(key, out var st)) + st.SetTransferredFiles(files); } private static byte MungeByte(int byteOrEof) @@ -404,76 +397,32 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase private async Task WaitForDownloadReady(List downloadFileTransfer, Guid requestId, CancellationToken downloadCt) { - bool alreadyCancelled = false; - try + while (true) { - CancellationTokenSource localTimeoutCts = new(); - localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5)); - CancellationTokenSource composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token); + downloadCt.ThrowIfCancellationRequested(); - while (!_orchestrator.IsDownloadReady(requestId)) + if (_orchestrator.IsDownloadReady(requestId)) + break; + + using var resp = await _orchestrator.SendRequestAsync( + HttpMethod.Get, + LightlessFiles.RequestCheckQueueFullPath(downloadFileTransfer[0].DownloadUri, requestId), + downloadFileTransfer.Select(t => t.Hash).ToList(), + downloadCt).ConfigureAwait(false); + + resp.EnsureSuccessStatusCode(); + + var body = (await resp.Content.ReadAsStringAsync(downloadCt).ConfigureAwait(false)).Trim(); + if (string.Equals(body, "true", StringComparison.OrdinalIgnoreCase) || + body.Contains("\"ready\":true", StringComparison.OrdinalIgnoreCase)) { - try - { - await Task.Delay(250, composite.Token).ConfigureAwait(false); - } - catch (TaskCanceledException) - { - if (downloadCt.IsCancellationRequested) throw; - - var req = await _orchestrator.SendRequestAsync( - HttpMethod.Get, - LightlessFiles.RequestCheckQueueFullPath(downloadFileTransfer[0].DownloadUri, requestId), - downloadFileTransfer.Select(c => c.Hash).ToList(), - downloadCt).ConfigureAwait(false); - - req.EnsureSuccessStatusCode(); - - localTimeoutCts.Dispose(); - composite.Dispose(); - - localTimeoutCts = new(); - localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5)); - composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token); - } + break; } - localTimeoutCts.Dispose(); - composite.Dispose(); - - Logger.LogDebug("Download {requestId} ready", requestId); + await Task.Delay(250, downloadCt).ConfigureAwait(false); } - catch (TaskCanceledException) - { - try - { - await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId)) - .ConfigureAwait(false); - alreadyCancelled = true; - } - catch - { - // ignore - } - throw; - } - finally - { - if (downloadCt.IsCancellationRequested && !alreadyCancelled) - { - try - { - await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId)) - .ConfigureAwait(false); - } - catch - { - // ignore - } - } - _orchestrator.ClearDownloadRequest(requestId); - } + _orchestrator.ClearDownloadRequest(requestId); } private async Task DownloadQueuedBlockFileAsync( @@ -502,21 +451,15 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } } - private void RemoveStatus(string key) - { - lock (_downloadStatusLock) - { - _downloadStatus.Remove(key); - } - } - private async Task DecompressBlockFileAsync( string downloadStatusKey, string blockFilePath, Dictionary replacementLookup, + IReadOnlyDictionary rawSizeLookup, string downloadLabel, CancellationToken ct, - bool skipDownscale) + bool skipDownscale, + bool skipDecimation) { SetStatus(downloadStatusKey, DownloadStatus.Decompressing); MarkTransferredFiles(downloadStatusKey, 1); @@ -532,29 +475,33 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase try { - // sanity check length if (fileLengthBytes < 0 || fileLengthBytes > int.MaxValue) throw new InvalidDataException($"Invalid block entry length: {fileLengthBytes}"); - // safe cast after check var len = checked((int)fileLengthBytes); if (!replacementLookup.TryGetValue(fileHash, out var repl)) { Logger.LogWarning("{dlName}: No replacement mapping for {fileHash}", downloadLabel, fileHash); - fileBlockStream.Seek(len, SeekOrigin.Current); + // still need to skip bytes: + var skip = checked((int)fileLengthBytes); + fileBlockStream.Position += skip; continue; } - // decompress var filePath = _fileDbManager.GetCacheFilePath(fileHash, repl.Extension); Logger.LogTrace("{dlName}: Decompressing {file}:{len} => {dest}", downloadLabel, fileHash, fileLengthBytes, filePath); - // read compressed data var compressed = new byte[len]; + await ReadExactlyAsync(fileBlockStream, compressed.AsMemory(0, len), ct).ConfigureAwait(false); - if (len == 0) + MungeBuffer(compressed); + var decompressed = LZ4Wrapper.Unwrap(compressed); + + if (rawSizeLookup.TryGetValue(fileHash, out var expectedRawSize) + && expectedRawSize > 0 + && decompressed.LongLength != expectedRawSize) { await _fileCompactor.WriteAllBytesAsync(filePath, Array.Empty(), ct).ConfigureAwait(false); PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale); @@ -563,21 +510,24 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase MungeBuffer(compressed); - // limit concurrent decompressions await _decompressGate.WaitAsync(ct).ConfigureAwait(false); try { - var sw = System.Diagnostics.Stopwatch.StartNew(); + // offload CPU-intensive decompression to threadpool to free up worker + await Task.Run(async () => + { + var sw = System.Diagnostics.Stopwatch.StartNew(); - // decompress - var decompressed = LZ4Wrapper.Unwrap(compressed); + // decompress + var decompressed = LZ4Wrapper.Unwrap(compressed); - Logger.LogTrace("{dlName}: Unwrap {fileHash} took {ms}ms (compressed {c} bytes, decompressed {d} bytes)", - downloadLabel, fileHash, sw.ElapsedMilliseconds, compressed.Length, decompressed?.Length ?? -1); + Logger.LogTrace("{dlName}: Unwrap {fileHash} took {ms}ms (compressed {c} bytes, decompressed {d} bytes)", + downloadLabel, fileHash, sw.ElapsedMilliseconds, compressed.Length, decompressed?.Length ?? -1); - // write to file - await _fileCompactor.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false); - PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale); + // write to file without compacting during download + await File.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false); + PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale); + }, ct).ConfigureAwait(false); } finally { @@ -594,6 +544,8 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } } } + + SetStatus(downloadStatusKey, DownloadStatus.Completed); } catch (EndOfStreamException) { @@ -603,10 +555,6 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase { Logger.LogError(ex, "{dlName}: Error during block file read", downloadLabel); } - finally - { - RemoveStatus(downloadStatusKey); - } } public async Task> InitiateDownloadList( @@ -644,21 +592,25 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase .. await FilesGetSizes(hashes, ct).ConfigureAwait(false), ]; + Logger.LogDebug("Files with size 0 or less: {files}", + string.Join(", ", downloadFileInfoFromService.Where(f => f.Size <= 0).Select(f => f.Hash))); + foreach (var dto in downloadFileInfoFromService.Where(c => c.IsForbidden)) { if (!_orchestrator.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, dto.Hash, StringComparison.Ordinal))) _orchestrator.ForbiddenTransfers.Add(new DownloadFileTransfer(dto)); } - CurrentDownloads = [.. downloadFileInfoFromService + CurrentDownloads = downloadFileInfoFromService .Distinct() .Select(d => new DownloadFileTransfer(d)) - .Where(d => d.CanBeTransferred)]; + .Where(d => d.CanBeTransferred) + .ToList(); return CurrentDownloads; } - private sealed record BatchChunk(string Key, List Items); + private sealed record BatchChunk(string HostKey, string StatusKey, List Items); private static IEnumerable> ChunkList(List items, int chunkSize) { @@ -666,7 +618,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase yield return items.GetRange(i, Math.Min(chunkSize, items.Count - i)); } - private async Task DownloadFilesInternal(GameObjectHandler? gameObjectHandler, List fileReplacement, CancellationToken ct, bool skipDownscale) + private async Task DownloadFilesInternal(GameObjectHandler? gameObjectHandler, List fileReplacement, CancellationToken ct, bool skipDownscale, bool skipDecimation) { var objectName = gameObjectHandler?.Name ?? "Unknown"; @@ -684,6 +636,20 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var allowDirectDownloads = ShouldUseDirectDownloads(); var replacementLookup = BuildReplacementLookup(fileReplacement); + var rawSizeLookup = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var download in CurrentDownloads) + { + if (string.IsNullOrWhiteSpace(download.Hash)) + { + continue; + } + + if (!rawSizeLookup.TryGetValue(download.Hash, out var existing) || existing <= 0) + { + rawSizeLookup[download.Hash] = download.TotalRaw; + } + } var directDownloads = new List(); var batchDownloads = new List(); @@ -708,39 +674,36 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var chunkSize = (int)Math.Ceiling(list.Count / (double)chunkCount); return ChunkList(list, chunkSize) - .Select(chunk => new BatchChunk(g.Key, chunk)); + .Select((chunk, index) => new BatchChunk(g.Key, $"{g.Key}#{index + 1}", chunk)); }) .ToArray(); // init statuses - lock (_downloadStatusLock) + _downloadStatus.Clear(); + + // direct downloads and batch downloads tracked separately + foreach (var d in directDownloads) { - _downloadStatus.Clear(); - - // direct downloads and batch downloads tracked separately - foreach (var d in directDownloads) + _downloadStatus[d.DirectDownloadUrl!] = new FileDownloadStatus { - _downloadStatus[d.DirectDownloadUrl!] = new FileDownloadStatus - { - DownloadStatus = DownloadStatus.Initializing, - TotalBytes = d.Total, - TotalFiles = 1, - TransferredBytes = 0, - TransferredFiles = 0 - }; - } + DownloadStatus = DownloadStatus.WaitingForSlot, + TotalBytes = d.Total, + TotalFiles = 1, + TransferredBytes = 0, + TransferredFiles = 0 + }; + } - foreach (var g in batchChunks.GroupBy(c => c.Key, StringComparer.Ordinal)) + foreach (var chunk in batchChunks) + { + _downloadStatus[chunk.StatusKey] = new FileDownloadStatus { - _downloadStatus[g.Key] = new FileDownloadStatus - { - DownloadStatus = DownloadStatus.Initializing, - TotalBytes = g.SelectMany(x => x.Items).Sum(x => x.Total), - TotalFiles = 1, - TransferredBytes = 0, - TransferredFiles = 0 - }; - } + DownloadStatus = DownloadStatus.WaitingForQueue, + TotalBytes = chunk.Items.Sum(x => x.Total), + TotalFiles = 1, + TransferredBytes = 0, + TransferredFiles = 0 + }; } if (directDownloads.Count > 0 || batchChunks.Length > 0) @@ -752,30 +715,47 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (gameObjectHandler is not null) Mediator.Publish(new DownloadStartedMessage(gameObjectHandler, _downloadStatus)); + // work based on cpu count and slots + var coreCount = Environment.ProcessorCount; + var baseWorkers = Math.Min(slots, coreCount); + + // only add buffer if decompression has capacity AND we have cores to spare + var availableDecompressSlots = _decompressGate.CurrentCount; + var extraWorkers = (availableDecompressSlots > 0 && coreCount >= 6) ? 2 : 0; + // allow some extra workers so downloads can continue while earlier items decompress. - var workerDop = Math.Clamp(slots * 2, 2, 16); + var workerDop = Math.Clamp(baseWorkers + extraWorkers, 2, coreCount); // batch downloads Task batchTask = batchChunks.Length == 0 ? Task.CompletedTask : Parallel.ForEachAsync(batchChunks, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct }, - async (chunk, token) => await ProcessBatchChunkAsync(chunk, replacementLookup, token, skipDownscale).ConfigureAwait(false)); + async (chunk, token) => await ProcessBatchChunkAsync(chunk, replacementLookup, rawSizeLookup, token, skipDownscale, skipDecimation).ConfigureAwait(false)); // direct downloads Task directTask = directDownloads.Count == 0 ? Task.CompletedTask : Parallel.ForEachAsync(directDownloads, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct }, - async (d, token) => await ProcessDirectAsync(d, replacementLookup, token, skipDownscale).ConfigureAwait(false)); + async (d, token) => await ProcessDirectAsync(d, replacementLookup, rawSizeLookup, token, skipDownscale, skipDecimation).ConfigureAwait(false)); await Task.WhenAll(batchTask, directTask).ConfigureAwait(false); + // process deferred compressions after all downloads complete + await ProcessDeferredCompressionsAsync(ct).ConfigureAwait(false); + Logger.LogDebug("Download end: {id}", objectName); ClearDownload(); } - private async Task ProcessBatchChunkAsync(BatchChunk chunk, Dictionary replacementLookup, CancellationToken ct, bool skipDownscale) + private async Task ProcessBatchChunkAsync( + BatchChunk chunk, + Dictionary replacementLookup, + IReadOnlyDictionary rawSizeLookup, + CancellationToken ct, + bool skipDownscale, + bool skipDecimation) { - var statusKey = chunk.Key; + var statusKey = chunk.StatusKey; // enqueue (no slot) SetStatus(statusKey, DownloadStatus.WaitingForQueue); @@ -793,7 +773,6 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase try { - // download (with slot) var progress = CreateInlineProgress(bytes => AddTransferredBytes(statusKey, bytes)); // Download slot held on get @@ -803,10 +782,11 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!File.Exists(blockFile)) { Logger.LogWarning("{dlName}: Block file missing before extraction, skipping", fi.Name); + SetStatus(statusKey, DownloadStatus.Completed); return; } - await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, fi.Name, ct, skipDownscale).ConfigureAwait(false); + await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, rawSizeLookup, fi.Name, ct, skipDownscale, skipDecimation).ConfigureAwait(false); } catch (OperationCanceledException) { @@ -823,7 +803,13 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } } - private async Task ProcessDirectAsync(DownloadFileTransfer directDownload, Dictionary replacementLookup, CancellationToken ct, bool skipDownscale) + private async Task ProcessDirectAsync( + DownloadFileTransfer directDownload, + Dictionary replacementLookup, + IReadOnlyDictionary rawSizeLookup, + CancellationToken ct, + bool skipDownscale, + bool skipDecimation) { var progress = CreateInlineProgress(bytes => { @@ -833,7 +819,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!ShouldUseDirectDownloads() || string.IsNullOrEmpty(directDownload.DirectDownloadUrl)) { - await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, progress, ct, skipDownscale).ConfigureAwait(false); + await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation).ConfigureAwait(false); return; } @@ -861,6 +847,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!replacementLookup.TryGetValue(directDownload.Hash, out var repl)) { Logger.LogWarning("{hash}: No replacement data found for direct download.", directDownload.Hash); + SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Completed); return; } @@ -873,13 +860,18 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase byte[] compressedBytes = await File.ReadAllBytesAsync(tempFilename, ct).ConfigureAwait(false); var decompressedBytes = LZ4Wrapper.Unwrap(compressedBytes); + if (directDownload.TotalRaw > 0 && decompressedBytes.LongLength != directDownload.TotalRaw) + { + throw new InvalidDataException( + $"{directDownload.Hash}: Decompressed size mismatch (expected {directDownload.TotalRaw}, got {decompressedBytes.LongLength})"); + } + await _fileCompactor.WriteAllBytesAsync(finalFilename, decompressedBytes, ct).ConfigureAwait(false); - PersistFileToStorage(directDownload.Hash, finalFilename, repl.GamePath, skipDownscale); + PersistFileToStorage(directDownload.Hash, finalFilename, repl.GamePath, skipDownscale, skipDecimation); MarkTransferredFiles(directDownload.DirectDownloadUrl!, 1); + SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Completed); Logger.LogDebug("Finished direct download of {hash}.", directDownload.Hash); - - RemoveStatus(directDownload.DirectDownloadUrl!); } catch (OperationCanceledException ex) { @@ -902,7 +894,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase try { - await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, progress, ct, skipDownscale).ConfigureAwait(false); + await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation).ConfigureAwait(false); if (!expectedDirectDownloadFailure && failureCount >= 3 && !_disableDirectDownloads) { @@ -929,9 +921,11 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase private async Task ProcessDirectAsQueuedFallbackAsync( DownloadFileTransfer directDownload, Dictionary replacementLookup, + IReadOnlyDictionary rawSizeLookup, IProgress progress, CancellationToken ct, - bool skipDownscale) + bool skipDownscale, + bool skipDecimation) { if (string.IsNullOrEmpty(directDownload.DirectDownloadUrl)) throw new InvalidOperationException("Direct download fallback requested without a direct download URL."); @@ -956,7 +950,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!File.Exists(blockFile)) throw new FileNotFoundException("Block file missing after direct download fallback.", blockFile); - await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, $"fallback-{directDownload.Hash}", ct, skipDownscale) + await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, rawSizeLookup, $"fallback-{directDownload.Hash}", ct, skipDownscale, skipDecimation) .ConfigureAwait(false); } finally @@ -974,18 +968,16 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!_orchestrator.IsInitialized) throw new InvalidOperationException("FileTransferManager is not initialized"); - // batch request var response = await _orchestrator.SendRequestAsync( HttpMethod.Get, LightlessFiles.ServerFilesGetSizesFullPath(_orchestrator.FilesCdnUri!), hashes, ct).ConfigureAwait(false); - // ensure success return await response.Content.ReadFromJsonAsync>(cancellationToken: ct).ConfigureAwait(false) ?? []; } - private void PersistFileToStorage(string fileHash, string filePath, string gamePath, bool skipDownscale) + private void PersistFileToStorage(string fileHash, string filePath, string gamePath, bool skipDownscale, bool skipDecimation) { var fi = new FileInfo(filePath); @@ -1001,13 +993,26 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase fi.LastAccessTime = DateTime.Today; fi.LastWriteTime = RandomDayInThePast().Invoke(); + // queue file for deferred compression instead of compressing immediately + if (_configService.Current.UseCompactor) + _deferredCompressionQueue.Enqueue(filePath); + try { - var entry = _fileDbManager.CreateCacheEntry(filePath); - var mapKind = _textureMetadataHelper.DetermineMapKind(gamePath, filePath); + var entry = _fileDbManager.CreateCacheEntryWithKnownHash(filePath, fileHash); - if (!skipDownscale) - _textureDownscaleService.ScheduleDownscale(fileHash, filePath, mapKind); + if (!skipDownscale && _textureDownscaleService.ShouldScheduleDownscale(filePath)) + { + _textureDownscaleService.ScheduleDownscale( + fileHash, + filePath, + () => _textureMetadataHelper.DetermineMapKind(gamePath, filePath)); + } + + if (!skipDecimation && _modelDecimationService.ShouldScheduleDecimation(fileHash, filePath, gamePath)) + { + _modelDecimationService.ScheduleDecimation(fileHash, filePath, gamePath); + } if (entry != null && !string.Equals(entry.Hash, fileHash, StringComparison.OrdinalIgnoreCase)) { @@ -1026,6 +1031,52 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase private static IProgress CreateInlineProgress(Action callback) => new InlineProgress(callback); + private async Task ProcessDeferredCompressionsAsync(CancellationToken ct) + { + if (_deferredCompressionQueue.IsEmpty) + return; + + var filesToCompress = new List(); + while (_deferredCompressionQueue.TryDequeue(out var filePath)) + { + if (File.Exists(filePath)) + filesToCompress.Add(filePath); + } + + if (filesToCompress.Count == 0) + return; + + Logger.LogDebug("Starting deferred compression of {count} files", filesToCompress.Count); + + var compressionWorkers = Math.Clamp(Environment.ProcessorCount / 4, 2, 4); + + await Parallel.ForEachAsync(filesToCompress, + new ParallelOptions + { + MaxDegreeOfParallelism = compressionWorkers, + CancellationToken = ct + }, + async (filePath, token) => + { + try + { + await Task.Yield(); + if (_configService.Current.UseCompactor && File.Exists(filePath)) + { + var bytes = await File.ReadAllBytesAsync(filePath, token).ConfigureAwait(false); + await _fileCompactor.WriteAllBytesAsync(filePath, bytes, token).ConfigureAwait(false); + Logger.LogTrace("Compressed file: {filePath}", filePath); + } + } + catch (Exception ex) + { + Logger.LogWarning(ex, "Failed to compress file: {filePath}", filePath); + } + }).ConfigureAwait(false); + + Logger.LogDebug("Completed deferred compression of {count} files", filesToCompress.Count); + } + private sealed class InlineProgress : IProgress { private readonly Action _callback; diff --git a/LightlessSync/WebAPI/Files/Models/DownloadStatus.cs b/LightlessSync/WebAPI/Files/Models/DownloadStatus.cs index 6e10a73..3e210c8 100644 --- a/LightlessSync/WebAPI/Files/Models/DownloadStatus.cs +++ b/LightlessSync/WebAPI/Files/Models/DownloadStatus.cs @@ -6,5 +6,6 @@ public enum DownloadStatus WaitingForSlot, WaitingForQueue, Downloading, - Decompressing + Decompressing, + Completed } \ No newline at end of file diff --git a/LightlessSync/WebAPI/Files/Models/FileDownloadStatus.cs b/LightlessSync/WebAPI/Files/Models/FileDownloadStatus.cs index 9340278..aa0aed0 100644 --- a/LightlessSync/WebAPI/Files/Models/FileDownloadStatus.cs +++ b/LightlessSync/WebAPI/Files/Models/FileDownloadStatus.cs @@ -1,10 +1,46 @@ -namespace LightlessSync.WebAPI.Files.Models; +using System.Threading; + +namespace LightlessSync.WebAPI.Files.Models; public class FileDownloadStatus { - public DownloadStatus DownloadStatus { get; set; } - public long TotalBytes { get; set; } - public int TotalFiles { get; set; } - public long TransferredBytes { get; set; } - public int TransferredFiles { get; set; } -} \ No newline at end of file + private int _downloadStatus; + private long _totalBytes; + private int _totalFiles; + private long _transferredBytes; + private int _transferredFiles; + + public DownloadStatus DownloadStatus + { + get => (DownloadStatus)Volatile.Read(ref _downloadStatus); + set => Volatile.Write(ref _downloadStatus, (int)value); + } + + public long TotalBytes + { + get => Interlocked.Read(ref _totalBytes); + set => Interlocked.Exchange(ref _totalBytes, value); + } + + public int TotalFiles + { + get => Volatile.Read(ref _totalFiles); + set => Volatile.Write(ref _totalFiles, value); + } + + public long TransferredBytes + { + get => Interlocked.Read(ref _transferredBytes); + set => Interlocked.Exchange(ref _transferredBytes, value); + } + + public int TransferredFiles + { + get => Volatile.Read(ref _transferredFiles); + set => Volatile.Write(ref _transferredFiles, value); + } + + public void AddTransferredBytes(long delta) => Interlocked.Add(ref _transferredBytes, delta); + + public void SetTransferredFiles(int files) => Volatile.Write(ref _transferredFiles, files); +}