diff --git a/LightlessCompactor/FileCache/CompactorInterfaces.cs b/LightlessCompactor/FileCache/CompactorInterfaces.cs new file mode 100644 index 0000000..59fc255 --- /dev/null +++ b/LightlessCompactor/FileCache/CompactorInterfaces.cs @@ -0,0 +1,18 @@ +namespace LightlessSync.FileCache; + +public interface ICompactorContext +{ + bool UseCompactor { get; } + string CacheFolder { get; } + bool IsWine { get; } +} + +public interface ICompactionExecutor +{ + bool TryCompact(string filePath); +} + +public sealed class NoopCompactionExecutor : ICompactionExecutor +{ + public bool TryCompact(string filePath) => false; +} diff --git a/LightlessSync/FileCache/FileCompactor.cs b/LightlessCompactor/FileCache/FileCompactor.cs similarity index 94% rename from LightlessSync/FileCache/FileCompactor.cs rename to LightlessCompactor/FileCache/FileCompactor.cs index 771f558..cc3b46c 100644 --- a/LightlessSync/FileCache/FileCompactor.cs +++ b/LightlessCompactor/FileCache/FileCompactor.cs @@ -1,6 +1,4 @@ -using LightlessSync.LightlessConfiguration; -using LightlessSync.Services; -using LightlessSync.Services.Compactor; +using LightlessSync.Services.Compactor; using Microsoft.Extensions.Logging; using Microsoft.Win32.SafeHandles; using System.Collections.Concurrent; @@ -20,8 +18,8 @@ public sealed partial class FileCompactor : IDisposable private readonly ConcurrentDictionary _pendingCompactions; private readonly ILogger _logger; - private readonly LightlessConfigService _lightlessConfigService; - private readonly DalamudUtilService _dalamudUtilService; + private readonly ICompactorContext _context; + private readonly ICompactionExecutor _compactionExecutor; private readonly Channel _compactionQueue; private readonly CancellationTokenSource _compactionCts = new(); @@ -59,12 +57,12 @@ public sealed partial class FileCompactor : IDisposable XPRESS16K = 3 } - public FileCompactor(ILogger logger, LightlessConfigService lightlessConfigService, DalamudUtilService dalamudUtilService) + public FileCompactor(ILogger logger, ICompactorContext context, ICompactionExecutor compactionExecutor) { _pendingCompactions = new(StringComparer.OrdinalIgnoreCase); - _logger = logger; - _lightlessConfigService = lightlessConfigService; - _dalamudUtilService = dalamudUtilService; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _context = context ?? throw new ArgumentNullException(nameof(context)); + _compactionExecutor = compactionExecutor ?? throw new ArgumentNullException(nameof(compactionExecutor)); _isWindows = OperatingSystem.IsWindows(); _compactionQueue = Channel.CreateUnbounded(new UnboundedChannelOptions @@ -94,7 +92,7 @@ public sealed partial class FileCompactor : IDisposable //Uses an batching service for the filefrag command on Linux _fragBatch = new BatchFilefragService( - useShell: _dalamudUtilService.IsWine, + useShell: _context.IsWine, log: _logger, batchSize: 64, flushMs: 25, @@ -118,7 +116,7 @@ public sealed partial class FileCompactor : IDisposable try { - var folder = _lightlessConfigService.Current.CacheFolder; + var folder = _context.CacheFolder; if (string.IsNullOrWhiteSpace(folder) || !Directory.Exists(folder)) { if (_logger.IsEnabled(LogLevel.Warning)) @@ -127,7 +125,7 @@ public sealed partial class FileCompactor : IDisposable return; } - var files = Directory.EnumerateFiles(folder).ToArray(); + var files = Directory.EnumerateFiles(folder, "*", SearchOption.AllDirectories).ToArray(); var total = files.Length; Progress = $"0/{total}"; if (total == 0) return; @@ -155,7 +153,7 @@ public sealed partial class FileCompactor : IDisposable { if (compress) { - if (_lightlessConfigService.Current.UseCompactor) + if (_context.UseCompactor) CompactFile(file, workerId); } else @@ -221,19 +219,52 @@ public sealed partial class FileCompactor : IDisposable await File.WriteAllBytesAsync(filePath, bytes, token).ConfigureAwait(false); - if (_lightlessConfigService.Current.UseCompactor) + if (_context.UseCompactor) EnqueueCompaction(filePath); } + /// + /// Notify the compactor that a file was written directly (streamed) so it can enqueue compaction. + /// + public void NotifyFileWritten(string filePath) + { + EnqueueCompaction(filePath); + } + + public bool TryCompactFile(string filePath) + { + if (string.IsNullOrWhiteSpace(filePath)) + return false; + + if (!_context.UseCompactor || !File.Exists(filePath)) + return false; + + try + { + CompactFile(filePath, workerId: -1); + return true; + } + catch (IOException ioEx) + { + _logger.LogDebug(ioEx, "File being read/written, skipping file: {file}", filePath); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Error compacting file: {file}", filePath); + } + + return false; + } + /// /// Gets the File size for an BTRFS or NTFS file system for the given FileInfo /// /// Amount of blocks used in the disk public long GetFileSizeOnDisk(FileInfo fileInfo) { - var fsType = GetFilesystemType(fileInfo.FullName, _dalamudUtilService.IsWine); + var fsType = GetFilesystemType(fileInfo.FullName, _context.IsWine); - if (fsType == FilesystemType.NTFS && !_dalamudUtilService.IsWine) + if (fsType == FilesystemType.NTFS && !_context.IsWine) { (bool flowControl, long value) = GetFileSizeNTFS(fileInfo); if (!flowControl) @@ -290,7 +321,7 @@ public sealed partial class FileCompactor : IDisposable { try { - var blockSize = GetBlockSizeForPath(fileInfo.FullName, _logger, _dalamudUtilService.IsWine); + var blockSize = GetBlockSizeForPath(fileInfo.FullName, _logger, _context.IsWine); if (blockSize <= 0) throw new InvalidOperationException($"Invalid block size {blockSize} for {fileInfo.FullName}"); @@ -330,7 +361,7 @@ public sealed partial class FileCompactor : IDisposable return; } - var fsType = GetFilesystemType(filePath, _dalamudUtilService.IsWine); + var fsType = GetFilesystemType(filePath, _context.IsWine); var oldSize = fi.Length; int blockSize = (int)(GetFileSizeOnDisk(fi) / 512); @@ -346,7 +377,7 @@ public sealed partial class FileCompactor : IDisposable return; } - if (fsType == FilesystemType.NTFS && !_dalamudUtilService.IsWine) + if (fsType == FilesystemType.NTFS && !_context.IsWine) { if (!IsWOFCompactedFile(filePath)) { @@ -402,9 +433,9 @@ public sealed partial class FileCompactor : IDisposable private void DecompressFile(string filePath, int workerId) { _logger.LogDebug("[W{worker}] Decompress request: {file}", workerId, filePath); - var fsType = GetFilesystemType(filePath, _dalamudUtilService.IsWine); + var fsType = GetFilesystemType(filePath, _context.IsWine); - if (fsType == FilesystemType.NTFS && !_dalamudUtilService.IsWine) + if (fsType == FilesystemType.NTFS && !_context.IsWine) { try { @@ -448,7 +479,7 @@ public sealed partial class FileCompactor : IDisposable { try { - bool isWine = _dalamudUtilService?.IsWine ?? false; + bool isWine = _context.IsWine; string linuxPath = isWine ? ToLinuxPathIfWine(path, isWine) : path; var opts = GetMountOptionsForPath(linuxPath); @@ -961,7 +992,7 @@ public sealed partial class FileCompactor : IDisposable if (finished != bothTasks) return KillProcess(proc, outTask, errTask, token); - bool isWine = _dalamudUtilService?.IsWine ?? false; + bool isWine = _context.IsWine; if (!isWine) { try { proc.WaitForExit(); } catch { /* ignore quirks */ } @@ -1005,7 +1036,7 @@ public sealed partial class FileCompactor : IDisposable if (string.IsNullOrWhiteSpace(filePath)) return; - if (!_lightlessConfigService.Current.UseCompactor) + if (!_context.UseCompactor) return; if (!File.Exists(filePath)) @@ -1017,7 +1048,7 @@ public sealed partial class FileCompactor : IDisposable bool enqueued = false; try { - bool isWine = _dalamudUtilService?.IsWine ?? false; + bool isWine = _context.IsWine; var fsType = GetFilesystemType(filePath, isWine); // If under Wine, we should skip NTFS because its not Windows but might return NTFS. @@ -1070,8 +1101,11 @@ public sealed partial class FileCompactor : IDisposable try { - if (_lightlessConfigService.Current.UseCompactor && File.Exists(filePath)) - CompactFile(filePath, workerId); + if (_context.UseCompactor && File.Exists(filePath)) + { + if (!_compactionExecutor.TryCompact(filePath)) + CompactFile(filePath, workerId); + } } finally { diff --git a/LightlessCompactor/LightlessCompactor.csproj b/LightlessCompactor/LightlessCompactor.csproj new file mode 100644 index 0000000..419cd5c --- /dev/null +++ b/LightlessCompactor/LightlessCompactor.csproj @@ -0,0 +1,15 @@ + + + + net10.0 + latest + enable + enable + true + + + + + + + diff --git a/LightlessSync/Services/Compactor/BatchFileFragService.cs b/LightlessCompactor/Services/Compactor/BatchFileFragService.cs similarity index 100% rename from LightlessSync/Services/Compactor/BatchFileFragService.cs rename to LightlessCompactor/Services/Compactor/BatchFileFragService.cs diff --git a/LightlessSync/Utils/FileSystemHelper.cs b/LightlessCompactor/Utils/FileSystemHelper.cs similarity index 100% rename from LightlessSync/Utils/FileSystemHelper.cs rename to LightlessCompactor/Utils/FileSystemHelper.cs diff --git a/LightlessCompactorWorker/LightlessCompactorWorker.csproj b/LightlessCompactorWorker/LightlessCompactorWorker.csproj new file mode 100644 index 0000000..e943619 --- /dev/null +++ b/LightlessCompactorWorker/LightlessCompactorWorker.csproj @@ -0,0 +1,19 @@ + + + + WinExe + net10.0 + latest + enable + enable + + + + + + + + + + + diff --git a/LightlessCompactorWorker/Program.cs b/LightlessCompactorWorker/Program.cs new file mode 100644 index 0000000..26f09d1 --- /dev/null +++ b/LightlessCompactorWorker/Program.cs @@ -0,0 +1,270 @@ +using LightlessSync.FileCache; +using Microsoft.Extensions.Logging; +using System.Diagnostics; +using System.IO.Pipes; +using System.Text.Json; + +internal sealed class WorkerCompactorContext : ICompactorContext +{ + public WorkerCompactorContext(string cacheFolder, bool isWine) + { + CacheFolder = cacheFolder; + IsWine = isWine; + } + + public bool UseCompactor => true; + public string CacheFolder { get; } + public bool IsWine { get; } +} + +internal sealed class WorkerOptions +{ + public string? FilePath { get; init; } + public bool IsWine { get; init; } + public string CacheFolder { get; init; } = string.Empty; + public LogLevel LogLevel { get; init; } = LogLevel.Information; + public string PipeName { get; init; } = "LightlessCompactor"; + public int? ParentProcessId { get; init; } +} + +internal static class Program +{ + public static async Task Main(string[] args) + { + var options = ParseOptions(args, out var error); + if (options is null) + { + Console.Error.WriteLine(error ?? "Invalid arguments."); + Console.Error.WriteLine("Usage: LightlessCompactorWorker --file [--wine] [--cache-folder ] [--verbose]"); + Console.Error.WriteLine(" or: LightlessCompactorWorker --pipe [--wine] [--parent ] [--verbose]"); + return 2; + } + + TrySetLowPriority(); + + using var loggerFactory = LoggerFactory.Create(builder => + { + builder.SetMinimumLevel(options.LogLevel); + builder.AddSimpleConsole(o => + { + o.SingleLine = true; + o.TimestampFormat = "HH:mm:ss.fff "; + }); + }); + + var logger = loggerFactory.CreateLogger(); + var context = new WorkerCompactorContext(options.CacheFolder, options.IsWine); + + using var compactor = new FileCompactor(logger, context, new NoopCompactionExecutor()); + + if (!string.IsNullOrWhiteSpace(options.FilePath)) + { + var success = compactor.TryCompactFile(options.FilePath!); + return success ? 0 : 1; + } + + var serverLogger = loggerFactory.CreateLogger("CompactorWorker"); + return await RunServerAsync(compactor, options, serverLogger).ConfigureAwait(false); + } + + private static async Task RunServerAsync(FileCompactor compactor, WorkerOptions options, ILogger serverLogger) + { + using var cts = new CancellationTokenSource(); + var token = cts.Token; + + if (options.ParentProcessId.HasValue) + { + _ = Task.Run(() => MonitorParent(options.ParentProcessId.Value, cts)); + } + + serverLogger.LogInformation("Compactor worker listening on pipe {pipe}", options.PipeName); + + try + { + while (!token.IsCancellationRequested) + { + var server = new NamedPipeServerStream( + options.PipeName, + PipeDirection.InOut, + NamedPipeServerStream.MaxAllowedServerInstances, + PipeTransmissionMode.Byte, + PipeOptions.Asynchronous); + + try + { + await server.WaitForConnectionAsync(token).ConfigureAwait(false); + } + catch + { + server.Dispose(); + throw; + } + + _ = Task.Run(() => HandleClientAsync(server, compactor, cts)); + } + } + catch (OperationCanceledException) + { + // shutdown requested + } + catch (Exception ex) + { + serverLogger.LogWarning(ex, "Compactor worker terminated unexpectedly."); + return 1; + } + + return 0; + } + + private static async Task HandleClientAsync(NamedPipeServerStream pipe, FileCompactor compactor, CancellationTokenSource shutdownCts) + { + await using var _ = pipe; + using var reader = new StreamReader(pipe); + using var writer = new StreamWriter(pipe) { AutoFlush = true }; + + var line = await reader.ReadLineAsync().ConfigureAwait(false); + if (string.IsNullOrWhiteSpace(line)) + return; + + CompactorRequest? request = null; + try + { + request = JsonSerializer.Deserialize(line); + } + catch + { + // ignore + } + + CompactorResponse response; + if (request is null) + { + response = new CompactorResponse { Success = false, Error = "Invalid request." }; + } + else if (string.Equals(request.Type, "shutdown", StringComparison.OrdinalIgnoreCase)) + { + shutdownCts.Cancel(); + response = new CompactorResponse { Success = true }; + } + else if (string.Equals(request.Type, "compact", StringComparison.OrdinalIgnoreCase)) + { + var success = compactor.TryCompactFile(request.Path ?? string.Empty); + response = new CompactorResponse { Success = success }; + } + else + { + response = new CompactorResponse { Success = false, Error = "Unknown request type." }; + } + + await writer.WriteLineAsync(JsonSerializer.Serialize(response)).ConfigureAwait(false); + } + + private static void MonitorParent(int parentPid, CancellationTokenSource shutdownCts) + { + try + { + var parent = Process.GetProcessById(parentPid); + parent.WaitForExit(); + } + catch + { + // parent missing + } + finally + { + shutdownCts.Cancel(); + } + } + + private static WorkerOptions? ParseOptions(string[] args, out string? error) + { + string? filePath = null; + bool isWine = false; + string cacheFolder = string.Empty; + var logLevel = LogLevel.Information; + string pipeName = "LightlessCompactor"; + int? parentPid = null; + + for (int i = 0; i < args.Length; i++) + { + var arg = args[i]; + switch (arg) + { + case "--file": + if (i + 1 >= args.Length) + { + error = "Missing value for --file."; + return null; + } + filePath = args[++i]; + break; + case "--cache-folder": + if (i + 1 >= args.Length) + { + error = "Missing value for --cache-folder."; + return null; + } + cacheFolder = args[++i]; + break; + case "--pipe": + if (i + 1 >= args.Length) + { + error = "Missing value for --pipe."; + return null; + } + pipeName = args[++i]; + break; + case "--parent": + if (i + 1 >= args.Length || !int.TryParse(args[++i], out var pid)) + { + error = "Invalid value for --parent."; + return null; + } + parentPid = pid; + break; + case "--wine": + isWine = true; + break; + case "--verbose": + logLevel = LogLevel.Trace; + break; + } + } + + error = null; + return new WorkerOptions + { + FilePath = filePath, + IsWine = isWine, + CacheFolder = cacheFolder, + LogLevel = logLevel, + PipeName = pipeName, + ParentProcessId = parentPid + }; + } + + private static void TrySetLowPriority() + { + try + { + if (OperatingSystem.IsWindows()) + Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.BelowNormal; + } + catch + { + // ignore + } + } + + private sealed class CompactorRequest + { + public string Type { get; init; } = "compact"; + public string? Path { get; init; } + } + + private sealed class CompactorResponse + { + public bool Success { get; init; } + public string? Error { get; init; } + } +} diff --git a/LightlessSync.sln b/LightlessSync.sln index 55bddfd..f69eb4b 100644 --- a/LightlessSync.sln +++ b/LightlessSync.sln @@ -22,6 +22,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OtterGui", "OtterGui\OtterG EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pictomancy", "ffxiv_pictomancy\Pictomancy\Pictomancy.csproj", "{825F17D8-2704-24F6-DF8B-2542AC92C765}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LightlessCompactor", "LightlessCompactor\LightlessCompactor.csproj", "{01F31917-9F1E-426D-BDAE-17268CBF9523}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LightlessCompactorWorker", "LightlessCompactorWorker\LightlessCompactorWorker.csproj", "{72BE3664-CD0E-4DA4-B040-91338A2798E0}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -116,6 +120,30 @@ Global {825F17D8-2704-24F6-DF8B-2542AC92C765}.Release|x64.Build.0 = Release|x64 {825F17D8-2704-24F6-DF8B-2542AC92C765}.Release|x86.ActiveCfg = Release|x64 {825F17D8-2704-24F6-DF8B-2542AC92C765}.Release|x86.Build.0 = Release|x64 + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|Any CPU.Build.0 = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|x64.ActiveCfg = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|x64.Build.0 = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|x86.ActiveCfg = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Debug|x86.Build.0 = Debug|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|Any CPU.ActiveCfg = Release|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|Any CPU.Build.0 = Release|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|x64.ActiveCfg = Release|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|x64.Build.0 = Release|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|x86.ActiveCfg = Release|Any CPU + {01F31917-9F1E-426D-BDAE-17268CBF9523}.Release|x86.Build.0 = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|x64.ActiveCfg = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|x64.Build.0 = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|x86.ActiveCfg = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Debug|x86.Build.0 = Debug|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|Any CPU.Build.0 = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|x64.ActiveCfg = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|x64.Build.0 = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|x86.ActiveCfg = Release|Any CPU + {72BE3664-CD0E-4DA4-B040-91338A2798E0}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/LightlessSync/FileCache/ExternalCompactionExecutor.cs b/LightlessSync/FileCache/ExternalCompactionExecutor.cs new file mode 100644 index 0000000..85c5a64 --- /dev/null +++ b/LightlessSync/FileCache/ExternalCompactionExecutor.cs @@ -0,0 +1,241 @@ +using Microsoft.Extensions.Logging; +using System.Diagnostics; +using System.IO.Pipes; +using System.Text.Json; + +namespace LightlessSync.FileCache; + +internal sealed class ExternalCompactionExecutor : ICompactionExecutor, IDisposable +{ + private readonly ILogger _logger; + private readonly ICompactorContext _context; + private readonly TimeSpan _timeout = TimeSpan.FromMinutes(5); + private readonly string _pipeName; + private Process? _workerProcess; + private bool _disposed; + private readonly object _sync = new(); + + public ExternalCompactionExecutor(ILogger logger, ICompactorContext context) + { + _logger = logger; + _context = context; + _pipeName = $"LightlessCompactor-{Environment.ProcessId}"; + } + + public bool TryCompact(string filePath) + { + if (string.IsNullOrWhiteSpace(filePath) || !File.Exists(filePath)) + return false; + + if (!EnsureWorkerRunning()) + return false; + + try + { + var request = new CompactorRequest + { + Type = "compact", + Path = filePath + }; + + return SendRequest(request, out var response) && response?.Success == true; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "External compactor failed for {file}", filePath); + return false; + } + } + + public void Dispose() + { + if (_disposed) + return; + + _disposed = true; + + try + { + SendRequest(new CompactorRequest { Type = "shutdown" }, out _); + } + catch + { + // ignore + } + + lock (_sync) + { + if (_workerProcess is null) + return; + + TryKill(_workerProcess); + _workerProcess.Dispose(); + _workerProcess = null; + } + } + + private bool EnsureWorkerRunning() + { + lock (_sync) + { + if (_workerProcess is { HasExited: false }) + return true; + + _workerProcess?.Dispose(); + _workerProcess = null; + + var workerPath = ResolveWorkerPath(); + if (string.IsNullOrEmpty(workerPath)) + return false; + + var args = BuildArguments(); + var startInfo = new ProcessStartInfo + { + FileName = workerPath, + Arguments = args, + CreateNoWindow = true, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true + }; + + var process = new Process { StartInfo = startInfo }; + if (!process.Start()) + return false; + + TrySetLowPriority(process); + _ = DrainAsync(process.StandardOutput, "stdout"); + _ = DrainAsync(process.StandardError, "stderr"); + + _workerProcess = process; + return true; + } + } + + private bool SendRequest(CompactorRequest request, out CompactorResponse? response) + { + response = null; + using var pipe = new NamedPipeClientStream(".", _pipeName, PipeDirection.InOut, PipeOptions.Asynchronous); + + try + { + pipe.Connect((int)_timeout.TotalMilliseconds); + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Compactor pipe connection failed."); + return false; + } + + using var writer = new StreamWriter(pipe) { AutoFlush = true }; + using var reader = new StreamReader(pipe); + + var payload = JsonSerializer.Serialize(request); + writer.WriteLine(payload); + + var readTask = reader.ReadLineAsync(); + if (!readTask.Wait(_timeout)) + { + _logger.LogWarning("Compactor pipe timed out waiting for response."); + return false; + } + + var line = readTask.Result; + if (string.IsNullOrWhiteSpace(line)) + return false; + + try + { + response = JsonSerializer.Deserialize(line); + return response is not null; + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Failed to parse compactor response."); + return false; + } + } + + private string? ResolveWorkerPath() + { + var baseDir = AppContext.BaseDirectory; + var exeName = OperatingSystem.IsWindows() || _context.IsWine + ? "LightlessCompactorWorker.exe" + : "LightlessCompactorWorker"; + var path = Path.Combine(baseDir, exeName); + return File.Exists(path) ? path : null; + } + + private string BuildArguments() + { + var args = new List { "--pipe", Quote(_pipeName), "--parent", Environment.ProcessId.ToString() }; + if (_context.IsWine) + args.Add("--wine"); + return string.Join(' ', args); + } + + private static string Quote(string value) + { + if (string.IsNullOrEmpty(value)) + return "\"\""; + + if (!value.Contains('"', StringComparison.Ordinal)) + return "\"" + value + "\""; + + return "\"" + value.Replace("\"", "\\\"", StringComparison.Ordinal) + "\""; + } + + private static void TrySetLowPriority(Process process) + { + try + { + if (OperatingSystem.IsWindows()) + process.PriorityClass = ProcessPriorityClass.BelowNormal; + } + catch + { + // ignore + } + } + + private async Task DrainAsync(StreamReader reader, string label) + { + try + { + string? line; + while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) != null) + { + if (_logger.IsEnabled(LogLevel.Trace)) + _logger.LogTrace("Compactor {label}: {line}", label, line); + } + } + catch + { + // ignore + } + } + + private static void TryKill(Process process) + { + try + { + process.Kill(entireProcessTree: true); + } + catch + { + // ignore + } + } + + private sealed class CompactorRequest + { + public string Type { get; init; } = "compact"; + public string? Path { get; init; } + } + + private sealed class CompactorResponse + { + public bool Success { get; init; } + public string? Error { get; init; } + } +} diff --git a/LightlessSync/FileCache/FileCacheManager.cs b/LightlessSync/FileCache/FileCacheManager.cs index b98b441..886f8cc 100644 --- a/LightlessSync/FileCache/FileCacheManager.cs +++ b/LightlessSync/FileCache/FileCacheManager.cs @@ -115,6 +115,35 @@ public sealed class FileCacheManager : IHostedService return true; } + private static bool TryGetHashFromFileName(FileInfo fileInfo, out string hash) + { + hash = Path.GetFileNameWithoutExtension(fileInfo.Name); + if (string.IsNullOrWhiteSpace(hash)) + { + return false; + } + + if (hash.Length is not (40 or 64)) + { + return false; + } + + for (var i = 0; i < hash.Length; i++) + { + var c = hash[i]; + var isHex = (c >= '0' && c <= '9') + || (c >= 'a' && c <= 'f') + || (c >= 'A' && c <= 'F'); + if (!isHex) + { + return false; + } + } + + hash = hash.ToUpperInvariant(); + return true; + } + private static string BuildVersionHeader() => $"{FileCacheVersionHeaderPrefix}{FileCacheVersion}"; private static bool TryParseVersionHeader(string? line, out int version) @@ -288,6 +317,11 @@ public sealed class FileCacheManager : IHostedService _logger.LogTrace("Creating cache entry for {path}", path); var cacheFolder = _configService.Current.CacheFolder; if (string.IsNullOrEmpty(cacheFolder)) return null; + if (TryGetHashFromFileName(fi, out var hash)) + { + return CreateCacheEntryWithKnownHash(fi.FullName, hash); + } + return CreateFileEntity(cacheFolder, CachePrefix, fi); } diff --git a/LightlessSync/FileCache/PluginCompactorContext.cs b/LightlessSync/FileCache/PluginCompactorContext.cs new file mode 100644 index 0000000..c466a94 --- /dev/null +++ b/LightlessSync/FileCache/PluginCompactorContext.cs @@ -0,0 +1,20 @@ +using LightlessSync.LightlessConfiguration; +using LightlessSync.Services; + +namespace LightlessSync.FileCache; + +internal sealed class PluginCompactorContext : ICompactorContext +{ + private readonly LightlessConfigService _configService; + private readonly DalamudUtilService _dalamudUtilService; + + public PluginCompactorContext(LightlessConfigService configService, DalamudUtilService dalamudUtilService) + { + _configService = configService; + _dalamudUtilService = dalamudUtilService; + } + + public bool UseCompactor => _configService.Current.UseCompactor; + public string CacheFolder => _configService.Current.CacheFolder; + public bool IsWine => _dalamudUtilService.IsWine; +} diff --git a/LightlessSync/FileCache/TransientResourceManager.cs b/LightlessSync/FileCache/TransientResourceManager.cs index 11073dc..1397159 100644 --- a/LightlessSync/FileCache/TransientResourceManager.cs +++ b/LightlessSync/FileCache/TransientResourceManager.cs @@ -25,7 +25,6 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase private readonly object _ownedHandlerLock = new(); private readonly string[] _handledFileTypes = ["tmb", "pap", "avfx", "atex", "sklb", "eid", "phyb", "scd", "skp", "shpk", "kdb"]; private readonly string[] _handledRecordingFileTypes = ["tex", "mdl", "mtrl"]; - private readonly string[] _handledFileTypesWithRecording; private readonly HashSet _playerRelatedPointers = []; private readonly object _playerRelatedLock = new(); private readonly ConcurrentDictionary _playerRelatedByAddress = new(); @@ -42,8 +41,6 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase _dalamudUtil = dalamudUtil; _actorObjectService = actorObjectService; _gameObjectHandlerFactory = gameObjectHandlerFactory; - _handledFileTypesWithRecording = _handledRecordingFileTypes.Concat(_handledFileTypes).ToArray(); - Mediator.Subscribe(this, Manager_PenumbraResourceLoadEvent); Mediator.Subscribe(this, msg => HandleActorTracked(msg.Descriptor)); Mediator.Subscribe(this, msg => HandleActorUntracked(msg.Descriptor)); @@ -523,46 +520,51 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase private void Manager_PenumbraResourceLoadEvent(PenumbraResourceLoadMessage msg) { + var gamePath = msg.GamePath.ToLowerInvariant(); var gameObjectAddress = msg.GameObject; - if (!_cachedFrameAddresses.TryGetValue(gameObjectAddress, out var objectKind)) - { - if (_actorObjectService.TryGetOwnedKind(gameObjectAddress, out var ownedKind)) - { - objectKind = ownedKind; - } - else - { - return; - } - } - - var gamePath = NormalizeGamePath(msg.GamePath); - if (string.IsNullOrEmpty(gamePath)) - { - return; - } + var filePath = msg.FilePath; // ignore files already processed this frame + if (_cachedHandledPaths.Contains(gamePath)) return; + lock (_cacheAdditionLock) { - if (!_cachedHandledPaths.Add(gamePath)) - { - return; - } + _cachedHandledPaths.Add(gamePath); + } + + // replace individual mtrl stuff + if (filePath.StartsWith("|", StringComparison.OrdinalIgnoreCase)) + { + filePath = filePath.Split("|")[2]; + } + // replace filepath + filePath = filePath.ToLowerInvariant().Replace("\\", "/", StringComparison.OrdinalIgnoreCase); + + // ignore files that are the same + var replacedGamePath = gamePath.ToLowerInvariant().Replace("\\", "/", StringComparison.OrdinalIgnoreCase); + if (string.Equals(filePath, replacedGamePath, StringComparison.OrdinalIgnoreCase)) + { + return; } // ignore files to not handle - var handledTypes = IsTransientRecording ? _handledFileTypesWithRecording : _handledFileTypes; - if (!HasHandledFileType(gamePath, handledTypes)) + var handledTypes = IsTransientRecording ? _handledRecordingFileTypes.Concat(_handledFileTypes) : _handledFileTypes; + if (!handledTypes.Any(type => gamePath.EndsWith(type, StringComparison.OrdinalIgnoreCase))) { + lock (_cacheAdditionLock) + { + _cachedHandledPaths.Add(gamePath); + } return; } - var filePath = NormalizeFilePath(msg.FilePath); - - // ignore files that are the same - if (string.Equals(filePath, gamePath, StringComparison.Ordinal)) + // ignore files not belonging to anything player related + if (!_cachedFrameAddresses.TryGetValue(gameObjectAddress, out var objectKind)) { + lock (_cacheAdditionLock) + { + _cachedHandledPaths.Add(gamePath); + } return; } @@ -577,12 +579,13 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase _playerRelatedByAddress.TryGetValue(gameObjectAddress, out var owner); bool alreadyTransient = false; - bool transientContains = transientResources.Contains(gamePath); - bool semiTransientContains = SemiTransientResources.Values.Any(value => value.Contains(gamePath)); + bool transientContains = transientResources.Contains(replacedGamePath); + bool semiTransientContains = SemiTransientResources.SelectMany(k => k.Value) + .Any(f => string.Equals(f, gamePath, StringComparison.OrdinalIgnoreCase)); if (transientContains || semiTransientContains) { if (!IsTransientRecording) - Logger.LogTrace("Not adding {replacedPath} => {filePath}, Reason: Transient: {contains}, SemiTransient: {contains2}", gamePath, filePath, + Logger.LogTrace("Not adding {replacedPath} => {filePath}, Reason: Transient: {contains}, SemiTransient: {contains2}", replacedGamePath, filePath, transientContains, semiTransientContains); alreadyTransient = true; } @@ -590,10 +593,10 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase { if (!IsTransientRecording) { - bool isAdded = transientResources.Add(gamePath); + bool isAdded = transientResources.Add(replacedGamePath); if (isAdded) { - Logger.LogDebug("Adding {replacedGamePath} for {gameObject} ({filePath})", gamePath, owner?.ToString() ?? gameObjectAddress.ToString("X"), filePath); + Logger.LogDebug("Adding {replacedGamePath} for {gameObject} ({filePath})", replacedGamePath, owner?.ToString() ?? gameObjectAddress.ToString("X"), filePath); SendTransients(gameObjectAddress, objectKind); } } @@ -601,7 +604,7 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase if (owner != null && IsTransientRecording) { - _recordedTransients.Add(new TransientRecord(owner, gamePath, filePath, alreadyTransient) { AddTransient = !alreadyTransient }); + _recordedTransients.Add(new TransientRecord(owner, replacedGamePath, filePath, alreadyTransient) { AddTransient = !alreadyTransient }); } } @@ -700,4 +703,4 @@ public sealed class TransientResourceManager : DisposableMediatorSubscriberBase { public bool AddTransient { get; set; } } -} \ No newline at end of file +} diff --git a/LightlessSync/Interop/Ipc/IpcCallerPenumbra.cs b/LightlessSync/Interop/Ipc/IpcCallerPenumbra.cs index e077eab..7ce5aff 100644 --- a/LightlessSync/Interop/Ipc/IpcCallerPenumbra.cs +++ b/LightlessSync/Interop/Ipc/IpcCallerPenumbra.cs @@ -4,7 +4,6 @@ using LightlessSync.Interop.Ipc.Penumbra; using LightlessSync.LightlessConfiguration.Models; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services; -using LightlessSync.Services.ActorTracking; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Logging; using Penumbra.Api.Enums; @@ -36,8 +35,7 @@ public sealed class IpcCallerPenumbra : IpcServiceBase IDalamudPluginInterface pluginInterface, DalamudUtilService dalamudUtil, LightlessMediator mediator, - RedrawManager redrawManager, - ActorObjectService actorObjectService) : base(logger, mediator, pluginInterface, PenumbraDescriptor) + RedrawManager redrawManager) : base(logger, mediator, pluginInterface, PenumbraDescriptor) { _penumbraEnabled = new GetEnabledState(pluginInterface); _penumbraGetModDirectory = new GetModDirectory(pluginInterface); @@ -46,7 +44,7 @@ public sealed class IpcCallerPenumbra : IpcServiceBase _penumbraModSettingChanged = ModSettingChanged.Subscriber(pluginInterface, HandlePenumbraModSettingChanged); _collections = RegisterInterop(new PenumbraCollections(logger, pluginInterface, dalamudUtil, mediator)); - _resources = RegisterInterop(new PenumbraResource(logger, pluginInterface, dalamudUtil, mediator, actorObjectService)); + _resources = RegisterInterop(new PenumbraResource(logger, pluginInterface, dalamudUtil, mediator)); _redraw = RegisterInterop(new PenumbraRedraw(logger, pluginInterface, dalamudUtil, mediator, redrawManager)); _textures = RegisterInterop(new PenumbraTexture(logger, pluginInterface, dalamudUtil, mediator, _redraw)); @@ -104,8 +102,11 @@ public sealed class IpcCallerPenumbra : IpcServiceBase public Task RedrawAsync(ILogger logger, GameObjectHandler handler, Guid applicationId, CancellationToken token) => _redraw.RedrawAsync(logger, handler, applicationId, token); - public Task ConvertTextureFiles(ILogger logger, IReadOnlyList jobs, IProgress? progress, CancellationToken token) - => _textures.ConvertTextureFilesAsync(logger, jobs, progress, token); + public void RequestImmediateRedraw(int objectIndex, RedrawType redrawType) + => _redraw.RequestImmediateRedraw(objectIndex, redrawType); + + public Task ConvertTextureFiles(ILogger logger, IReadOnlyList jobs, IProgress? progress, CancellationToken token, bool requestRedraw = true) + => _textures.ConvertTextureFilesAsync(logger, jobs, progress, token, requestRedraw); public Task ConvertTextureFileDirectAsync(TextureConversionJob job, CancellationToken token) => _textures.ConvertTextureFileDirectAsync(job, token); diff --git a/LightlessSync/Interop/Ipc/Penumbra/PenumbraCollections.cs b/LightlessSync/Interop/Ipc/Penumbra/PenumbraCollections.cs index c095471..c3e497a 100644 --- a/LightlessSync/Interop/Ipc/Penumbra/PenumbraCollections.cs +++ b/LightlessSync/Interop/Ipc/Penumbra/PenumbraCollections.cs @@ -1,10 +1,8 @@ -using System.Collections.Concurrent; using Dalamud.Plugin; using LightlessSync.Interop.Ipc.Framework; using LightlessSync.Services; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Logging; -using Penumbra.Api.Enums; using Penumbra.Api.IpcSubscribers; namespace LightlessSync.Interop.Ipc.Penumbra; @@ -16,10 +14,6 @@ public sealed class PenumbraCollections : PenumbraBase private readonly DeleteTemporaryCollection _removeTemporaryCollection; private readonly AddTemporaryMod _addTemporaryMod; private readonly RemoveTemporaryMod _removeTemporaryMod; - private readonly GetCollections _getCollections; - private readonly ConcurrentDictionary _activeTemporaryCollections = new(); - - private int _cleanupScheduled; public PenumbraCollections( ILogger logger, @@ -32,7 +26,6 @@ public sealed class PenumbraCollections : PenumbraBase _removeTemporaryCollection = new DeleteTemporaryCollection(pluginInterface); _addTemporaryMod = new AddTemporaryMod(pluginInterface); _removeTemporaryMod = new RemoveTemporaryMod(pluginInterface); - _getCollections = new GetCollections(pluginInterface); } public override string Name => "Penumbra.Collections"; @@ -62,16 +55,11 @@ public sealed class PenumbraCollections : PenumbraBase var (collectionId, collectionName) = await DalamudUtil.RunOnFrameworkThread(() => { var name = $"Lightless_{uid}"; - _createNamedTemporaryCollection.Invoke(name, name, out var tempCollectionId); - logger.LogTrace("Creating Temp Collection {CollectionName}, GUID: {CollectionId}", name, tempCollectionId); + var createResult = _createNamedTemporaryCollection.Invoke(name, name, out var tempCollectionId); + logger.LogTrace("Creating Temp Collection {CollectionName}, GUID: {CollectionId}, Result: {Result}", name, tempCollectionId, createResult); return (tempCollectionId, name); }).ConfigureAwait(false); - if (collectionId != Guid.Empty) - { - _activeTemporaryCollections[collectionId] = collectionName; - } - return collectionId; } @@ -89,7 +77,6 @@ public sealed class PenumbraCollections : PenumbraBase logger.LogTrace("[{ApplicationId}] RemoveTemporaryCollection: {Result}", applicationId, result); }).ConfigureAwait(false); - _activeTemporaryCollections.TryRemove(collectionId, out _); } public async Task SetTemporaryModsAsync(ILogger logger, Guid applicationId, Guid collectionId, Dictionary modPaths) @@ -131,67 +118,5 @@ public sealed class PenumbraCollections : PenumbraBase protected override void HandleStateChange(IpcConnectionState previous, IpcConnectionState current) { - if (current == IpcConnectionState.Available) - { - ScheduleCleanup(); - } - else if (previous == IpcConnectionState.Available && current != IpcConnectionState.Available) - { - Interlocked.Exchange(ref _cleanupScheduled, 0); - } } - - private void ScheduleCleanup() - { - if (Interlocked.Exchange(ref _cleanupScheduled, 1) != 0) - { - return; - } - - _ = Task.Run(CleanupTemporaryCollectionsAsync); - } - - private async Task CleanupTemporaryCollectionsAsync() - { - if (!IsAvailable) - { - return; - } - - try - { - var collections = await DalamudUtil.RunOnFrameworkThread(() => _getCollections.Invoke()).ConfigureAwait(false); - foreach (var (collectionId, name) in collections) - { - if (!IsLightlessCollectionName(name) || _activeTemporaryCollections.ContainsKey(collectionId)) - { - continue; - } - - Logger.LogDebug("Cleaning up stale temporary collection {CollectionName} ({CollectionId})", name, collectionId); - var deleteResult = await DalamudUtil.RunOnFrameworkThread(() => - { - var result = (PenumbraApiEc)_removeTemporaryCollection.Invoke(collectionId); - Logger.LogTrace("Cleanup RemoveTemporaryCollection result for {CollectionName} ({CollectionId}): {Result}", name, collectionId, result); - return result; - }).ConfigureAwait(false); - - if (deleteResult == PenumbraApiEc.Success) - { - _activeTemporaryCollections.TryRemove(collectionId, out _); - } - else - { - Logger.LogDebug("Skipped removing temporary collection {CollectionName} ({CollectionId}). Result: {Result}", name, collectionId, deleteResult); - } - } - } - catch (Exception ex) - { - Logger.LogWarning(ex, "Failed to clean up Penumbra temporary collections"); - } - } - - private static bool IsLightlessCollectionName(string? name) - => !string.IsNullOrEmpty(name) && name.StartsWith("Lightless_", StringComparison.Ordinal); } diff --git a/LightlessSync/Interop/Ipc/Penumbra/PenumbraResource.cs b/LightlessSync/Interop/Ipc/Penumbra/PenumbraResource.cs index 73da7cc..9ca2df0 100644 --- a/LightlessSync/Interop/Ipc/Penumbra/PenumbraResource.cs +++ b/LightlessSync/Interop/Ipc/Penumbra/PenumbraResource.cs @@ -2,9 +2,10 @@ using Dalamud.Plugin; using LightlessSync.Interop.Ipc.Framework; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services; -using LightlessSync.Services.ActorTracking; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Logging; +using System.Diagnostics; +using System.Globalization; using Penumbra.Api.Helpers; using Penumbra.Api.IpcSubscribers; @@ -12,7 +13,6 @@ namespace LightlessSync.Interop.Ipc.Penumbra; public sealed class PenumbraResource : PenumbraBase { - private readonly ActorObjectService _actorObjectService; private readonly GetGameObjectResourcePaths _gameObjectResourcePaths; private readonly ResolveGameObjectPath _resolveGameObjectPath; private readonly ReverseResolveGameObjectPath _reverseResolveGameObjectPath; @@ -24,10 +24,8 @@ public sealed class PenumbraResource : PenumbraBase ILogger logger, IDalamudPluginInterface pluginInterface, DalamudUtilService dalamudUtil, - LightlessMediator mediator, - ActorObjectService actorObjectService) : base(logger, pluginInterface, dalamudUtil, mediator) + LightlessMediator mediator) : base(logger, pluginInterface, dalamudUtil, mediator) { - _actorObjectService = actorObjectService; _gameObjectResourcePaths = new GetGameObjectResourcePaths(pluginInterface); _resolveGameObjectPath = new ResolveGameObjectPath(pluginInterface); _reverseResolveGameObjectPath = new ReverseResolveGameObjectPath(pluginInterface); @@ -45,17 +43,33 @@ public sealed class PenumbraResource : PenumbraBase return null; } - return await DalamudUtil.RunOnFrameworkThread(() => + var requestId = Guid.NewGuid(); + var totalTimer = Stopwatch.StartNew(); + logger.LogTrace("[{requestId}] Requesting Penumbra.GetGameObjectResourcePaths for {handler}", requestId, handler); + + var result = await DalamudUtil.RunOnFrameworkThread(() => { - logger.LogTrace("Calling On IPC: Penumbra.GetGameObjectResourcePaths"); var idx = handler.GetGameObject()?.ObjectIndex; if (idx == null) { + logger.LogTrace("[{requestId}] GetGameObjectResourcePaths aborted (missing object index) for {handler}", requestId, handler); return null; } - return _gameObjectResourcePaths.Invoke(idx.Value)[0]; + logger.LogTrace("[{requestId}] Invoking Penumbra.GetGameObjectResourcePaths for index {index}", requestId, idx.Value); + var invokeTimer = Stopwatch.StartNew(); + var data = _gameObjectResourcePaths.Invoke(idx.Value)[0]; + invokeTimer.Stop(); + logger.LogTrace("[{requestId}] Penumbra.GetGameObjectResourcePaths returned {count} entries in {elapsedMs}ms", + requestId, data?.Count ?? 0, invokeTimer.ElapsedMilliseconds); + return data; }).ConfigureAwait(false); + + totalTimer.Stop(); + logger.LogTrace("[{requestId}] Penumbra.GetGameObjectResourcePaths finished in {elapsedMs}ms (null: {isNull})", + requestId, totalTimer.ElapsedMilliseconds, result is null); + + return result; } public string GetMetaManipulations() @@ -79,22 +93,10 @@ public sealed class PenumbraResource : PenumbraBase private void HandleResourceLoaded(nint ptr, string gamePath, string resolvedPath) { - if (ptr == nint.Zero) + if (ptr != nint.Zero && string.Compare(gamePath, resolvedPath, ignoreCase: true, CultureInfo.InvariantCulture) != 0) { - return; + Mediator.Publish(new PenumbraResourceLoadMessage(ptr, gamePath, resolvedPath)); } - - if (!_actorObjectService.TryGetOwnedKind(ptr, out _)) - { - return; - } - - if (string.Compare(gamePath, resolvedPath, StringComparison.OrdinalIgnoreCase) == 0) - { - return; - } - - Mediator.Publish(new PenumbraResourceLoadMessage(ptr, gamePath, resolvedPath)); } protected override void HandleStateChange(IpcConnectionState previous, IpcConnectionState current) diff --git a/LightlessSync/Interop/Ipc/Penumbra/PenumbraTexture.cs b/LightlessSync/Interop/Ipc/Penumbra/PenumbraTexture.cs index e12fd7b..453d211 100644 --- a/LightlessSync/Interop/Ipc/Penumbra/PenumbraTexture.cs +++ b/LightlessSync/Interop/Ipc/Penumbra/PenumbraTexture.cs @@ -26,7 +26,7 @@ public sealed class PenumbraTexture : PenumbraBase public override string Name => "Penumbra.Textures"; - public async Task ConvertTextureFilesAsync(ILogger logger, IReadOnlyList jobs, IProgress? progress, CancellationToken token) + public async Task ConvertTextureFilesAsync(ILogger logger, IReadOnlyList jobs, IProgress? progress, CancellationToken token, bool requestRedraw) { if (!IsAvailable || jobs.Count == 0) { @@ -57,7 +57,7 @@ public sealed class PenumbraTexture : PenumbraBase Mediator.Publish(new ResumeScanMessage(nameof(ConvertTextureFilesAsync))); } - if (completedJobs > 0 && !token.IsCancellationRequested) + if (requestRedraw && completedJobs > 0 && !token.IsCancellationRequested) { await DalamudUtil.RunOnFrameworkThread(async () => { diff --git a/LightlessSync/LightlessConfiguration/Configurations/ChatConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/ChatConfig.cs index 5532d78..48db57e 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/ChatConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/ChatConfig.cs @@ -12,6 +12,9 @@ public sealed class ChatConfig : ILightlessConfiguration public bool ShowMessageTimestamps { get; set; } = true; public bool ShowNotesInSyncshellChat { get; set; } = true; public bool EnableAnimatedEmotes { get; set; } = true; + public float EmoteScale { get; set; } = 1.5f; + public bool EnableMentionNotifications { get; set; } = true; + public bool AutoOpenChatOnNewMessage { get; set; } = false; public float ChatWindowOpacity { get; set; } = .97f; public bool FadeWhenUnfocused { get; set; } = false; public float UnfocusedWindowOpacity { get; set; } = 0.6f; @@ -23,6 +26,9 @@ public sealed class ChatConfig : ILightlessConfiguration public bool ShowWhenUiHidden { get; set; } = true; public bool ShowInCutscenes { get; set; } = true; public bool ShowInGpose { get; set; } = true; + public bool PersistSyncshellHistory { get; set; } = false; public List ChannelOrder { get; set; } = new(); + public Dictionary HiddenChannels { get; set; } = new(StringComparer.Ordinal); + public Dictionary SyncshellChannelHistory { get; set; } = new(StringComparer.Ordinal); public Dictionary PreferNotesForChannels { get; set; } = new(StringComparer.Ordinal); } diff --git a/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs index 9e92b63..e3b2862 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/LightlessConfig.cs @@ -32,6 +32,8 @@ public class LightlessConfig : ILightlessConfiguration public DtrEntry.Colors DtrColorsLightfinderUnavailable { get; set; } = new(Foreground: 0x000000u, Glow: 0x000000u); public LightfinderDtrDisplayMode LightfinderDtrDisplayMode { get; set; } = LightfinderDtrDisplayMode.PendingPairRequests; public bool UseLightlessRedesign { get; set; } = true; + public bool ShowUiWhenUiHidden { get; set; } = true; + public bool ShowUiInGpose { get; set; } = true; public bool EnableRightClickMenus { get; set; } = true; public NotificationLocation ErrorNotification { get; set; } = NotificationLocation.Both; public string ExportFolder { get; set; } = string.Empty; diff --git a/LightlessSync/LightlessConfiguration/Configurations/ModelDecimationSettings.cs b/LightlessSync/LightlessConfiguration/Configurations/ModelDecimationSettings.cs new file mode 100644 index 0000000..eb910f0 --- /dev/null +++ b/LightlessSync/LightlessConfiguration/Configurations/ModelDecimationSettings.cs @@ -0,0 +1,156 @@ +namespace LightlessSync.LightlessConfiguration.Configurations; + +public static class ModelDecimationDefaults +{ + public const bool EnableAutoDecimation = false; + public const int TriangleThreshold = 15_000; + public const double TargetRatio = 0.8; + public const bool NormalizeTangents = true; + public const bool AvoidBodyIntersection = true; + + /// Default triangle threshold for batch decimation (0 = no threshold). + public const int BatchTriangleThreshold = 0; + + /// Default target triangle ratio for batch decimation. + public const double BatchTargetRatio = 0.8; + + /// Default tangent normalization toggle for batch decimation. + public const bool BatchNormalizeTangents = true; + + /// Default body collision guard toggle for batch decimation. + public const bool BatchAvoidBodyIntersection = true; + + /// Default display for the batch decimation warning overlay. + public const bool ShowBatchDecimationWarning = true; + + public const bool KeepOriginalModelFiles = true; + public const bool SkipPreferredPairs = true; + public const bool AllowBody = false; + public const bool AllowFaceHead = false; + public const bool AllowTail = false; + public const bool AllowClothing = true; + public const bool AllowAccessories = true; +} + +public sealed class ModelDecimationAdvancedSettings +{ + /// Minimum triangles per connected component before skipping decimation. + public const int DefaultMinComponentTriangles = 6; + + /// Average-edge multiplier used to cap collapses. + public const float DefaultMaxCollapseEdgeLengthFactor = 1.25f; + + /// Maximum normal deviation (degrees) allowed for a collapse. + public const float DefaultNormalSimilarityThresholdDegrees = 60f; + + /// Minimum bone-weight overlap required to allow a collapse. + public const float DefaultBoneWeightSimilarityThreshold = 0.85f; + + /// UV similarity threshold to protect seams. + public const float DefaultUvSimilarityThreshold = 0.02f; + + /// UV seam cosine threshold for blocking seam collapses. + public const float DefaultUvSeamAngleCos = 0.99f; + + /// Whether to block UV seam vertices from collapsing. + public const bool DefaultBlockUvSeamVertices = true; + + /// Whether to allow collapses on boundary edges. + public const bool DefaultAllowBoundaryCollapses = false; + + /// Body collision distance factor for the primary pass. + public const float DefaultBodyCollisionDistanceFactor = 0.75f; + + /// Body collision distance factor for the relaxed fallback pass. + public const float DefaultBodyCollisionNoOpDistanceFactor = 0.25f; + + /// Relax multiplier applied when the mesh is close to the body. + public const float DefaultBodyCollisionAdaptiveRelaxFactor = 1.0f; + + /// Ratio of near-body vertices required to trigger relaxation. + public const float DefaultBodyCollisionAdaptiveNearRatio = 0.4f; + + /// UV threshold for relaxed body-collision mode. + public const float DefaultBodyCollisionAdaptiveUvThreshold = 0.08f; + + /// UV seam cosine threshold for relaxed body-collision mode. + public const float DefaultBodyCollisionNoOpUvSeamAngleCos = 0.98f; + + /// Expansion factor for protected vertices near the body. + public const float DefaultBodyCollisionProtectionFactor = 1.5f; + + /// Minimum ratio used when decimating the body proxy. + public const float DefaultBodyProxyTargetRatioMin = 0.85f; + + /// Inflation applied to body collision distances. + public const float DefaultBodyCollisionProxyInflate = 0.0005f; + + /// Body collision penetration factor used during collapse checks. + public const float DefaultBodyCollisionPenetrationFactor = 0.75f; + + /// Minimum body collision distance threshold. + public const float DefaultMinBodyCollisionDistance = 0.0001f; + + /// Minimum cell size for body collision spatial hashing. + public const float DefaultMinBodyCollisionCellSize = 0.0001f; + + /// Minimum triangles per connected component before skipping decimation. + public int MinComponentTriangles { get; set; } = DefaultMinComponentTriangles; + + /// Average-edge multiplier used to cap collapses. + public float MaxCollapseEdgeLengthFactor { get; set; } = DefaultMaxCollapseEdgeLengthFactor; + + /// Maximum normal deviation (degrees) allowed for a collapse. + public float NormalSimilarityThresholdDegrees { get; set; } = DefaultNormalSimilarityThresholdDegrees; + + /// Minimum bone-weight overlap required to allow a collapse. + public float BoneWeightSimilarityThreshold { get; set; } = DefaultBoneWeightSimilarityThreshold; + + /// UV similarity threshold to protect seams. + public float UvSimilarityThreshold { get; set; } = DefaultUvSimilarityThreshold; + + /// UV seam cosine threshold for blocking seam collapses. + public float UvSeamAngleCos { get; set; } = DefaultUvSeamAngleCos; + + /// Whether to block UV seam vertices from collapsing. + public bool BlockUvSeamVertices { get; set; } = DefaultBlockUvSeamVertices; + + /// Whether to allow collapses on boundary edges. + public bool AllowBoundaryCollapses { get; set; } = DefaultAllowBoundaryCollapses; + + /// Body collision distance factor for the primary pass. + public float BodyCollisionDistanceFactor { get; set; } = DefaultBodyCollisionDistanceFactor; + + /// Body collision distance factor for the relaxed fallback pass. + public float BodyCollisionNoOpDistanceFactor { get; set; } = DefaultBodyCollisionNoOpDistanceFactor; + + /// Relax multiplier applied when the mesh is close to the body. + public float BodyCollisionAdaptiveRelaxFactor { get; set; } = DefaultBodyCollisionAdaptiveRelaxFactor; + + /// Ratio of near-body vertices required to trigger relaxation. + public float BodyCollisionAdaptiveNearRatio { get; set; } = DefaultBodyCollisionAdaptiveNearRatio; + + /// UV threshold for relaxed body-collision mode. + public float BodyCollisionAdaptiveUvThreshold { get; set; } = DefaultBodyCollisionAdaptiveUvThreshold; + + /// UV seam cosine threshold for relaxed body-collision mode. + public float BodyCollisionNoOpUvSeamAngleCos { get; set; } = DefaultBodyCollisionNoOpUvSeamAngleCos; + + /// Expansion factor for protected vertices near the body. + public float BodyCollisionProtectionFactor { get; set; } = DefaultBodyCollisionProtectionFactor; + + /// Minimum ratio used when decimating the body proxy. + public float BodyProxyTargetRatioMin { get; set; } = DefaultBodyProxyTargetRatioMin; + + /// Inflation applied to body collision distances. + public float BodyCollisionProxyInflate { get; set; } = DefaultBodyCollisionProxyInflate; + + /// Body collision penetration factor used during collapse checks. + public float BodyCollisionPenetrationFactor { get; set; } = DefaultBodyCollisionPenetrationFactor; + + /// Minimum body collision distance threshold. + public float MinBodyCollisionDistance { get; set; } = DefaultMinBodyCollisionDistance; + + /// Minimum cell size for body collision spatial hashing. + public float MinBodyCollisionCellSize { get; set; } = DefaultMinBodyCollisionCellSize; +} diff --git a/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs b/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs index 462a63f..98dbc58 100644 --- a/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs +++ b/LightlessSync/LightlessConfiguration/Configurations/PlayerPerformanceConfig.cs @@ -21,16 +21,26 @@ public class PlayerPerformanceConfig : ILightlessConfiguration public bool EnableIndexTextureDownscale { get; set; } = false; public int TextureDownscaleMaxDimension { get; set; } = 2048; public bool OnlyDownscaleUncompressedTextures { get; set; } = true; + public bool EnableUncompressedTextureCompression { get; set; } = false; + public bool SkipUncompressedTextureCompressionMipMaps { get; set; } = false; public bool KeepOriginalTextureFiles { get; set; } = false; public bool SkipTextureDownscaleForPreferredPairs { get; set; } = true; - public bool EnableModelDecimation { get; set; } = false; - public int ModelDecimationTriangleThreshold { get; set; } = 20_000; - public double ModelDecimationTargetRatio { get; set; } = 0.8; - public bool KeepOriginalModelFiles { get; set; } = true; - public bool SkipModelDecimationForPreferredPairs { get; set; } = true; - public bool ModelDecimationAllowBody { get; set; } = false; - public bool ModelDecimationAllowFaceHead { get; set; } = false; - public bool ModelDecimationAllowTail { get; set; } = false; - public bool ModelDecimationAllowClothing { get; set; } = true; - public bool ModelDecimationAllowAccessories { get; set; } = true; + public bool EnableModelDecimation { get; set; } = ModelDecimationDefaults.EnableAutoDecimation; + public int ModelDecimationTriangleThreshold { get; set; } = ModelDecimationDefaults.TriangleThreshold; + public double ModelDecimationTargetRatio { get; set; } = ModelDecimationDefaults.TargetRatio; + public bool ModelDecimationNormalizeTangents { get; set; } = ModelDecimationDefaults.NormalizeTangents; + public bool ModelDecimationAvoidBodyIntersection { get; set; } = ModelDecimationDefaults.AvoidBodyIntersection; + public ModelDecimationAdvancedSettings ModelDecimationAdvanced { get; set; } = new(); + public int BatchModelDecimationTriangleThreshold { get; set; } = ModelDecimationDefaults.BatchTriangleThreshold; + public double BatchModelDecimationTargetRatio { get; set; } = ModelDecimationDefaults.BatchTargetRatio; + public bool BatchModelDecimationNormalizeTangents { get; set; } = ModelDecimationDefaults.BatchNormalizeTangents; + public bool BatchModelDecimationAvoidBodyIntersection { get; set; } = ModelDecimationDefaults.BatchAvoidBodyIntersection; + public bool ShowBatchModelDecimationWarning { get; set; } = ModelDecimationDefaults.ShowBatchDecimationWarning; + public bool KeepOriginalModelFiles { get; set; } = ModelDecimationDefaults.KeepOriginalModelFiles; + public bool SkipModelDecimationForPreferredPairs { get; set; } = ModelDecimationDefaults.SkipPreferredPairs; + public bool ModelDecimationAllowBody { get; set; } = ModelDecimationDefaults.AllowBody; + public bool ModelDecimationAllowFaceHead { get; set; } = ModelDecimationDefaults.AllowFaceHead; + public bool ModelDecimationAllowTail { get; set; } = ModelDecimationDefaults.AllowTail; + public bool ModelDecimationAllowClothing { get; set; } = ModelDecimationDefaults.AllowClothing; + public bool ModelDecimationAllowAccessories { get; set; } = ModelDecimationDefaults.AllowAccessories; } \ No newline at end of file diff --git a/LightlessSync/LightlessConfiguration/Models/OrphanableTempCollectionEntry.cs b/LightlessSync/LightlessConfiguration/Models/OrphanableTempCollectionEntry.cs new file mode 100644 index 0000000..2288018 --- /dev/null +++ b/LightlessSync/LightlessConfiguration/Models/OrphanableTempCollectionEntry.cs @@ -0,0 +1,7 @@ +namespace LightlessSync.LightlessConfiguration.Models; + +public sealed class OrphanableTempCollectionEntry +{ + public Guid Id { get; set; } + public DateTime RegisteredAtUtc { get; set; } = DateTime.MinValue; +} diff --git a/LightlessSync/LightlessSync.csproj b/LightlessSync/LightlessSync.csproj index b0b7b8e..f2ab377 100644 --- a/LightlessSync/LightlessSync.csproj +++ b/LightlessSync/LightlessSync.csproj @@ -85,6 +85,8 @@ + + @@ -108,5 +110,13 @@ + + + + + + + + diff --git a/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs b/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs index 211a6fc..feb6d41 100644 --- a/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs +++ b/LightlessSync/PlayerData/Factories/FileDownloadManagerFactory.cs @@ -19,6 +19,7 @@ public class FileDownloadManagerFactory private readonly TextureDownscaleService _textureDownscaleService; private readonly ModelDecimationService _modelDecimationService; private readonly TextureMetadataHelper _textureMetadataHelper; + private readonly FileDownloadDeduplicator _downloadDeduplicator; public FileDownloadManagerFactory( ILoggerFactory loggerFactory, @@ -29,7 +30,8 @@ public class FileDownloadManagerFactory LightlessConfigService configService, TextureDownscaleService textureDownscaleService, ModelDecimationService modelDecimationService, - TextureMetadataHelper textureMetadataHelper) + TextureMetadataHelper textureMetadataHelper, + FileDownloadDeduplicator downloadDeduplicator) { _loggerFactory = loggerFactory; _lightlessMediator = lightlessMediator; @@ -40,6 +42,7 @@ public class FileDownloadManagerFactory _textureDownscaleService = textureDownscaleService; _modelDecimationService = modelDecimationService; _textureMetadataHelper = textureMetadataHelper; + _downloadDeduplicator = downloadDeduplicator; } public FileDownloadManager Create() @@ -53,6 +56,7 @@ public class FileDownloadManagerFactory _configService, _textureDownscaleService, _modelDecimationService, - _textureMetadataHelper); + _textureMetadataHelper, + _downloadDeduplicator); } } diff --git a/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs b/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs index e8f3459..9fda2bd 100644 --- a/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs +++ b/LightlessSync/PlayerData/Factories/PlayerDataFactory.cs @@ -9,10 +9,10 @@ using LightlessSync.PlayerData.Data; using LightlessSync.PlayerData.Handlers; using LightlessSync.Services; using LightlessSync.Services.Mediator; +using LightlessSync.Utils; using Microsoft.Extensions.Logging; using System.Collections.Concurrent; using System.Diagnostics; -using System.Runtime.ExceptionServices; using System.Runtime.InteropServices; namespace LightlessSync.PlayerData.Factories; @@ -34,7 +34,7 @@ public class PlayerDataFactory private const int _maxTransientResolvedEntries = 1000; // Character build caches - private readonly ConcurrentDictionary> _characterBuildInflight = new(); + private readonly TaskRegistry _characterBuildInflight = new(); private readonly ConcurrentDictionary _characterBuildCache = new(); // Time out thresholds @@ -170,10 +170,10 @@ public class PlayerDataFactory { var key = obj.Address; - if (_characterBuildCache.TryGetValue(key, out var cached) && IsCacheFresh(cached) && !_characterBuildInflight.ContainsKey(key)) + if (_characterBuildCache.TryGetValue(key, out CacheEntry cached) && IsCacheFresh(cached) && !_characterBuildInflight.TryGetExisting(key, out _)) return cached.Fragment; - var buildTask = _characterBuildInflight.GetOrAdd(key, _ => BuildAndCacheAsync(obj, key)); + Task buildTask = _characterBuildInflight.GetOrStart(key, () => BuildAndCacheAsync(obj, key)); if (_characterBuildCache.TryGetValue(key, out cached)) { @@ -189,20 +189,13 @@ public class PlayerDataFactory private async Task BuildAndCacheAsync(GameObjectHandler obj, nint key) { - try - { - using var cts = new CancellationTokenSource(_hardBuildTimeout); - var fragment = await CreateCharacterDataInternal(obj, cts.Token).ConfigureAwait(false); + using var cts = new CancellationTokenSource(_hardBuildTimeout); + CharacterDataFragment fragment = await CreateCharacterDataInternal(obj, cts.Token).ConfigureAwait(false); - _characterBuildCache[key] = new CacheEntry(fragment, DateTime.UtcNow); - PruneCharacterCacheIfNeeded(); + _characterBuildCache[key] = new CacheEntry(fragment, DateTime.UtcNow); + PruneCharacterCacheIfNeeded(); - return fragment; - } - finally - { - _characterBuildInflight.TryRemove(key, out _); - } + return fragment; } private void PruneCharacterCacheIfNeeded() @@ -257,7 +250,28 @@ public class PlayerDataFactory getMoodlesData = _ipcManager.Moodles.GetStatusAsync(playerRelatedObject.Address); } - var resolvedPaths = await _ipcManager.Penumbra.GetCharacterData(_logger, playerRelatedObject).ConfigureAwait(false) ?? throw new InvalidOperationException("Penumbra returned null data; couldn't proceed with character"); + Guid penumbraRequestId = Guid.Empty; + Stopwatch? penumbraSw = null; + if (logDebug) + { + penumbraRequestId = Guid.NewGuid(); + penumbraSw = Stopwatch.StartNew(); + _logger.LogDebug("Penumbra GetCharacterData start {id} for {obj}", penumbraRequestId, playerRelatedObject); + } + + var resolvedPaths = await _ipcManager.Penumbra.GetCharacterData(_logger, playerRelatedObject).ConfigureAwait(false); + + if (logDebug) + { + penumbraSw!.Stop(); + _logger.LogDebug("Penumbra GetCharacterData done {id} in {elapsedMs}ms (count={count})", + penumbraRequestId, + penumbraSw.ElapsedMilliseconds, + resolvedPaths?.Count ?? -1); + } + + if (resolvedPaths == null) + throw new InvalidOperationException("Penumbra returned null data; couldn't proceed with character"); ct.ThrowIfCancellationRequested(); var staticBuildTask = Task.Run(() => BuildStaticReplacements(resolvedPaths), ct); @@ -476,7 +490,7 @@ public class PlayerDataFactory if (transientPaths.Count == 0) return (new Dictionary(StringComparer.Ordinal), clearedReplacements); - var resolved = await GetFileReplacementsFromPaths(obj, transientPaths, new HashSet(StringComparer.Ordinal)) + var resolved = await GetFileReplacementsFromPaths(transientPaths, new HashSet(StringComparer.Ordinal)) .ConfigureAwait(false); if (_maxTransientResolvedEntries > 0 && resolved.Count > _maxTransientResolvedEntries) @@ -678,7 +692,6 @@ public class PlayerDataFactory private async Task> GetFileReplacementsFromPaths( - GameObjectHandler handler, HashSet forwardResolve, HashSet reverseResolve) { @@ -693,59 +706,6 @@ public class PlayerDataFactory var reversePathsLower = reversePaths.Length == 0 ? [] : reversePaths.Select(p => p.ToLowerInvariant()).ToArray(); Dictionary> resolvedPaths = new(forwardPaths.Length + reversePaths.Length, StringComparer.Ordinal); - if (handler.ObjectKind != ObjectKind.Player) - { - var (objectIndex, forwardResolved, reverseResolved) = await _dalamudUtil.RunOnFrameworkThread(() => - { - var idx = handler.GetGameObject()?.ObjectIndex; - if (!idx.HasValue) - return ((int?)null, Array.Empty(), Array.Empty()); - - var resolvedForward = new string[forwardPaths.Length]; - for (int i = 0; i < forwardPaths.Length; i++) - resolvedForward[i] = _ipcManager.Penumbra.ResolveGameObjectPath(forwardPaths[i], idx.Value); - - var resolvedReverse = new string[reversePaths.Length][]; - for (int i = 0; i < reversePaths.Length; i++) - resolvedReverse[i] = _ipcManager.Penumbra.ReverseResolveGameObjectPath(reversePaths[i], idx.Value); - - return (idx, resolvedForward, resolvedReverse); - }).ConfigureAwait(false); - - if (objectIndex.HasValue) - { - for (int i = 0; i < forwardPaths.Length; i++) - { - var filePath = forwardResolved[i]?.ToLowerInvariant(); - if (string.IsNullOrEmpty(filePath)) - continue; - - if (resolvedPaths.TryGetValue(filePath, out var list)) - list.Add(forwardPaths[i].ToLowerInvariant()); - else - { - resolvedPaths[filePath] = [forwardPathsLower[i]]; - } - } - - for (int i = 0; i < reversePaths.Length; i++) - { - var filePath = reversePathsLower[i]; - var reverseResolvedLower = new string[reverseResolved[i].Length]; - for (var j = 0; j < reverseResolvedLower.Length; j++) - { - reverseResolvedLower[j] = reverseResolved[i][j].ToLowerInvariant(); - } - if (resolvedPaths.TryGetValue(filePath, out var list)) - list.AddRange(reverseResolved[i].Select(c => c.ToLowerInvariant())); - else - resolvedPaths[filePath] = [.. reverseResolved[i].Select(c => c.ToLowerInvariant()).ToList()]; - } - - return resolvedPaths.ToDictionary(k => k.Key, k => k.Value.ToArray(), StringComparer.OrdinalIgnoreCase).AsReadOnly(); - } - } - var (forward, reverse) = await _ipcManager.Penumbra.ResolvePathsAsync(forwardPaths, reversePaths).ConfigureAwait(false); for (int i = 0; i < forwardPaths.Length; i++) diff --git a/LightlessSync/PlayerData/Pairs/IPairHandlerAdapter.cs b/LightlessSync/PlayerData/Pairs/IPairHandlerAdapter.cs index 0566491..8375ed3 100644 --- a/LightlessSync/PlayerData/Pairs/IPairHandlerAdapter.cs +++ b/LightlessSync/PlayerData/Pairs/IPairHandlerAdapter.cs @@ -1,43 +1,42 @@ - using LightlessSync.API.Data; +using LightlessSync.API.Data; - namespace LightlessSync.PlayerData.Pairs; +namespace LightlessSync.PlayerData.Pairs; - /// - /// orchestrates the lifecycle of a paired character - /// - public interface IPairHandlerAdapter : IDisposable, IPairPerformanceSubject - { - new string Ident { get; } - bool Initialized { get; } - bool IsVisible { get; } - bool ScheduledForDeletion { get; set; } - CharacterData? LastReceivedCharacterData { get; } - long LastAppliedDataBytes { get; } - new string? PlayerName { get; } - string PlayerNameHash { get; } - uint PlayerCharacterId { get; } - DateTime? LastDataReceivedAt { get; } - DateTime? LastApplyAttemptAt { get; } - DateTime? LastSuccessfulApplyAt { get; } - string? LastFailureReason { get; } - IReadOnlyList LastBlockingConditions { get; } - bool IsApplying { get; } - bool IsDownloading { get; } - int PendingDownloadCount { get; } - int ForbiddenDownloadCount { get; } - bool PendingModReapply { get; } - bool ModApplyDeferred { get; } - int MissingCriticalMods { get; } - int MissingNonCriticalMods { get; } - int MissingForbiddenMods { get; } - DateTime? InvisibleSinceUtc { get; } - DateTime? VisibilityEvictionDueAtUtc { get; } +/// +/// orchestrates the lifecycle of a paired character +/// +public interface IPairHandlerAdapter : IDisposable, IPairPerformanceSubject +{ + new string Ident { get; } + bool Initialized { get; } + bool IsVisible { get; } + bool ScheduledForDeletion { get; set; } + CharacterData? LastReceivedCharacterData { get; } + long LastAppliedDataBytes { get; } + new string? PlayerName { get; } + string PlayerNameHash { get; } + uint PlayerCharacterId { get; } + DateTime? LastDataReceivedAt { get; } + DateTime? LastApplyAttemptAt { get; } + DateTime? LastSuccessfulApplyAt { get; } + string? LastFailureReason { get; } + IReadOnlyList LastBlockingConditions { get; } + bool IsApplying { get; } + bool IsDownloading { get; } + int PendingDownloadCount { get; } + int ForbiddenDownloadCount { get; } + bool PendingModReapply { get; } + bool ModApplyDeferred { get; } + int MissingCriticalMods { get; } + int MissingNonCriticalMods { get; } + int MissingForbiddenMods { get; } void Initialize(); - void ApplyData(CharacterData data); - void ApplyLastReceivedData(bool forced = false); - bool FetchPerformanceMetricsFromCache(); - void LoadCachedCharacterData(CharacterData data); - void SetUploading(bool uploading); - void SetPaused(bool paused); - } + void ApplyData(CharacterData data); + void ApplyLastReceivedData(bool forced = false); + Task EnsurePerformanceMetricsAsync(CancellationToken cancellationToken); + bool FetchPerformanceMetricsFromCache(); + void LoadCachedCharacterData(CharacterData data); + void SetUploading(bool uploading); + void SetPaused(bool paused); +} diff --git a/LightlessSync/PlayerData/Pairs/Pair.cs b/LightlessSync/PlayerData/Pairs/Pair.cs index e95b7fe..9d794f8 100644 --- a/LightlessSync/PlayerData/Pairs/Pair.cs +++ b/LightlessSync/PlayerData/Pairs/Pair.cs @@ -217,12 +217,6 @@ public class Pair if (handler is null) return PairDebugInfo.Empty; - var now = DateTime.UtcNow; - var dueAt = handler.VisibilityEvictionDueAtUtc; - var remainingSeconds = dueAt.HasValue - ? Math.Max(0, (dueAt.Value - now).TotalSeconds) - : (double?)null; - return new PairDebugInfo( true, handler.Initialized, @@ -231,9 +225,6 @@ public class Pair handler.LastDataReceivedAt, handler.LastApplyAttemptAt, handler.LastSuccessfulApplyAt, - handler.InvisibleSinceUtc, - handler.VisibilityEvictionDueAtUtc, - remainingSeconds, handler.LastFailureReason, handler.LastBlockingConditions, handler.IsApplying, diff --git a/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs b/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs index 713333e..6a5cb45 100644 --- a/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs +++ b/LightlessSync/PlayerData/Pairs/PairCoordinator.Users.cs @@ -137,7 +137,7 @@ public sealed partial class PairCoordinator _pendingCharacterData.TryRemove(user.UID, out _); if (registrationResult.Value.CharacterIdent is not null) { - _ = _handlerRegistry.DeregisterOfflinePair(registrationResult.Value); + _ = _handlerRegistry.DeregisterOfflinePair(registrationResult.Value, forceDisposal: true); } _mediator.Publish(new ClearProfileUserDataMessage(user)); diff --git a/LightlessSync/PlayerData/Pairs/PairDebugInfo.cs b/LightlessSync/PlayerData/Pairs/PairDebugInfo.cs index 60abf35..820c687 100644 --- a/LightlessSync/PlayerData/Pairs/PairDebugInfo.cs +++ b/LightlessSync/PlayerData/Pairs/PairDebugInfo.cs @@ -8,9 +8,6 @@ public sealed record PairDebugInfo( DateTime? LastDataReceivedAt, DateTime? LastApplyAttemptAt, DateTime? LastSuccessfulApplyAt, - DateTime? InvisibleSinceUtc, - DateTime? VisibilityEvictionDueAtUtc, - double? VisibilityEvictionRemainingSeconds, string? LastFailureReason, IReadOnlyList BlockingConditions, bool IsApplying, @@ -32,9 +29,6 @@ public sealed record PairDebugInfo( null, null, null, - null, - null, - null, Array.Empty(), false, false, diff --git a/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs b/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs index eded176..829b290 100644 --- a/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs +++ b/LightlessSync/PlayerData/Pairs/PairHandlerAdapter.cs @@ -24,7 +24,6 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind; using ObjectKind = LightlessSync.API.Data.Enum.ObjectKind; -using FileReplacementDataComparer = LightlessSync.PlayerData.Data.FileReplacementDataComparer; namespace LightlessSync.PlayerData.Pairs; @@ -54,43 +53,39 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private readonly XivDataAnalyzer _modelAnalyzer; private readonly PenumbraTempCollectionJanitor _tempCollectionJanitor; private readonly LightlessConfigService _configService; + private readonly SemaphoreSlim _metricsComputeGate = new(1, 1); private readonly PairManager _pairManager; private readonly IFramework _framework; private CancellationTokenSource? _applicationCancellationTokenSource; private Guid _applicationId; private Task? _applicationTask; private CharacterData? _cachedData = null; + private CharacterData? _lastAppliedData = null; private GameObjectHandler? _charaHandler; private readonly Dictionary _customizeIds = []; private CombatData? _dataReceivedInDowntime; private CancellationTokenSource? _downloadCancellationTokenSource; private bool _forceApplyMods = false; - private bool _forceFullReapply; - private Dictionary<(string GamePath, string? Hash), string>? _lastAppliedModdedPaths; - private bool _needsCollectionRebuild; private bool _pendingModReapply; private bool _lastModApplyDeferred; private int _lastMissingCriticalMods; private int _lastMissingNonCriticalMods; private int _lastMissingForbiddenMods; - private bool _lastMissingCachedFiles; - private string? _lastSuccessfulDataHash; private bool _isVisible; private Guid _penumbraCollection; private readonly object _collectionGate = new(); + private Task? _penumbraCollectionTask; private bool _redrawOnNextApplication = false; private readonly object _initializationGate = new(); private readonly object _pauseLock = new(); private Task _pauseTransitionTask = Task.CompletedTask; private bool _pauseRequested; + private bool _wasRevertedOnPause; private DateTime? _lastDataReceivedAt; private DateTime? _lastApplyAttemptAt; private DateTime? _lastSuccessfulApplyAt; private string? _lastFailureReason; private IReadOnlyList _lastBlockingConditions = Array.Empty(); - private readonly object _visibilityGraceGate = new(); - private CancellationTokenSource? _visibilityGraceCts; - private static readonly TimeSpan VisibilityEvictionGrace = TimeSpan.FromMinutes(1); private readonly object _ownedRetryGate = new(); private readonly Dictionary> _pendingOwnedChanges = new(); private CancellationTokenSource? _ownedRetryCts; @@ -116,8 +111,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private bool _lastAllowNeighborTolerance; private readonly ConcurrentDictionary _dumpedRemoteSkeletonForHash = new(StringComparer.OrdinalIgnoreCase); - private DateTime? _invisibleSinceUtc; - private DateTime? _visibilityEvictionDueAtUtc; private DateTime _nextActorLookupUtc = DateTime.MinValue; private static readonly TimeSpan ActorLookupInterval = TimeSpan.FromSeconds(1); private static readonly SemaphoreSlim ActorInitializationLimiter = new(1, 1); @@ -132,8 +125,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private ushort _lastKnownObjectIndex = ushort.MaxValue; private string? _lastKnownName; - public DateTime? InvisibleSinceUtc => _invisibleSinceUtc; - public DateTime? VisibilityEvictionDueAtUtc => _visibilityEvictionDueAtUtc; public string Ident { get; } public bool Initialized { get; private set; } public bool ScheduledForDeletion { get; set; } @@ -150,23 +141,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (!_isVisible) { DisableSync(); - - _invisibleSinceUtc = DateTime.UtcNow; - _visibilityEvictionDueAtUtc = _invisibleSinceUtc.Value.Add(VisibilityEvictionGrace); - - StartVisibilityGraceTask(); - } - else - { - CancelVisibilityGraceTask(); - - _invisibleSinceUtc = null; - _visibilityEvictionDueAtUtc = null; - - ScheduledForDeletion = false; - - if (_charaHandler is not null && _charaHandler.Address != nint.Zero) - _ = EnsurePenumbraCollection(); } var user = GetPrimaryUserData(); @@ -194,8 +168,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa public string? LastFailureReason => _lastFailureReason; public IReadOnlyList LastBlockingConditions => _lastBlockingConditions; public bool IsApplying => _applicationTask is { IsCompleted: false }; - public bool IsDownloading => _downloadManager.IsDownloading; - public int PendingDownloadCount => _downloadManager.CurrentDownloads.Count; + public bool IsDownloading => _downloadManager.IsDownloadingFor(_charaHandler); + public int PendingDownloadCount => _downloadManager.GetPendingDownloadCount(_charaHandler); public int ForbiddenDownloadCount => _downloadManager.ForbiddenTransfers.Count; public PairHandlerAdapter( @@ -246,6 +220,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _tempCollectionJanitor = tempCollectionJanitor; _modelAnalyzer = modelAnalyzer; _configService = configService; + + _ = EnsurePenumbraCollectionAsync(); } public void Initialize() @@ -282,13 +258,14 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } Mediator.Subscribe(this, _ => { + LogDownloadCancellation("zone switch start"); _downloadCancellationTokenSource?.CancelDispose(); _charaHandler?.Invalidate(); IsVisible = false; }); - Mediator.Subscribe(this, _ => + Mediator.Subscribe(this, __ => { - ResetPenumbraCollection(releaseFromPenumbra: false, reason: "PenumbraInitialized"); + _ = EnsurePenumbraCollectionAsync(); if (!IsVisible && _charaHandler is not null) { PlayerName = string.Empty; @@ -297,7 +274,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } EnableSync(); }); - Mediator.Subscribe(this, _ => ResetPenumbraCollection(releaseFromPenumbra: false, reason: "PenumbraDisposed")); Mediator.Subscribe(this, msg => { if (msg.GameObjectHandler == _charaHandler) @@ -324,23 +300,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - if (_pendingModReapply && IsVisible) - { - if (LastReceivedCharacterData is not null) - { - Logger.LogDebug("Downloads finished for {handler}, reapplying pending mod data", GetLogIdentifier()); - ApplyLastReceivedData(forced: true); - return; - } - - if (_cachedData is not null) - { - Logger.LogDebug("Downloads finished for {handler}, reapplying pending mod data from cache", GetLogIdentifier()); - ApplyCharacterData(Guid.NewGuid(), _cachedData, forceApplyCustomization: true); - return; - } - } - TryApplyQueuedData(); }); @@ -429,32 +388,65 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return string.Equals(alias, Ident, StringComparison.Ordinal) ? alias : $"{alias} ({Ident})"; } - private Guid EnsurePenumbraCollection() + private void LogDownloadCancellation(string reason, Guid? applicationBase = null) { - if (!IsVisible) + if (_downloadCancellationTokenSource is null) { - return Guid.Empty; + return; } + var inFlight = _pairDownloadTask is { IsCompleted: false }; + if (inFlight) + { + if (applicationBase.HasValue) + { + Logger.LogDebug("[BASE-{appBase}] {handler}: Cancelling in-flight download ({reason})", + applicationBase.Value, GetLogIdentifier(), reason); + } + else + { + Logger.LogDebug("{handler}: Cancelling in-flight download ({reason})", + GetLogIdentifier(), reason); + } + } + else + { + if (applicationBase.HasValue) + { + Logger.LogDebug("[BASE-{appBase}] {handler}: Cancelling download token ({reason}, in-flight={inFlight})", + applicationBase.Value, GetLogIdentifier(), reason, inFlight); + } + else + { + Logger.LogDebug("{handler}: Cancelling download token ({reason}, in-flight={inFlight})", + GetLogIdentifier(), reason, inFlight); + } + } + } + + private Task EnsurePenumbraCollectionAsync() + { if (_penumbraCollection != Guid.Empty) { - return _penumbraCollection; + return Task.FromResult(_penumbraCollection); } lock (_collectionGate) { if (_penumbraCollection != Guid.Empty) { - return _penumbraCollection; + return Task.FromResult(_penumbraCollection); } - var cached = _pairStateCache.TryGetTemporaryCollection(Ident); - if (cached.HasValue && cached.Value != Guid.Empty) - { - _penumbraCollection = cached.Value; - return _penumbraCollection; - } + _penumbraCollectionTask ??= Task.Run(CreatePenumbraCollectionAsync); + return _penumbraCollectionTask; + } + } + private async Task CreatePenumbraCollectionAsync() + { + try + { if (!_ipcManager.Penumbra.APIAvailable) { return Guid.Empty; @@ -462,16 +454,28 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa var user = GetPrimaryUserDataSafe(); var uid = !string.IsNullOrEmpty(user.UID) ? user.UID : Ident; - var created = _ipcManager.Penumbra.CreateTemporaryCollectionAsync(Logger, uid) - .ConfigureAwait(false).GetAwaiter().GetResult(); - if (created != Guid.Empty) + var collection = await _ipcManager.Penumbra.CreateTemporaryCollectionAsync(Logger, uid).ConfigureAwait(false); + if (collection != Guid.Empty) { - _penumbraCollection = created; - _pairStateCache.StoreTemporaryCollection(Ident, created); - _tempCollectionJanitor.Register(created); + _tempCollectionJanitor.Register(collection); } - return _penumbraCollection; + lock (_collectionGate) + { + if (_penumbraCollection == Guid.Empty && collection != Guid.Empty) + { + _penumbraCollection = collection; + } + } + + return collection; + } + finally + { + lock (_collectionGate) + { + _penumbraCollectionTask = null; + } } } @@ -489,18 +493,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } - var cached = _pairStateCache.ClearTemporaryCollection(Ident); - if (cached.HasValue && cached.Value != Guid.Empty) - { - toRelease = cached.Value; - hadCollection = true; - } - if (hadCollection) { - _needsCollectionRebuild = true; - _forceFullReapply = true; - _forceApplyMods = true; _tempCollectionJanitor.Unregister(toRelease); } @@ -639,43 +633,18 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return; } - var hasMissingCachedFiles = HasMissingCachedFiles(LastReceivedCharacterData); - var missingStarted = !_lastMissingCachedFiles && hasMissingCachedFiles; - var missingResolved = _lastMissingCachedFiles && !hasMissingCachedFiles; - _lastMissingCachedFiles = hasMissingCachedFiles; - var shouldForce = forced || missingStarted || missingResolved; - var forceApplyCustomization = forced; - if (IsPaused()) { Logger.LogTrace("Permissions paused for {Ident}, skipping reapply", Ident); return; } - var sanitized = CloneAndSanitizeLastReceived(out var dataHash); + var sanitized = CloneAndSanitizeLastReceived(out _); if (sanitized is null) { Logger.LogTrace("Sanitized data null for {Ident}", Ident); return; } - var dataApplied = !string.IsNullOrEmpty(dataHash) - && string.Equals(dataHash, _lastSuccessfulDataHash ?? string.Empty, StringComparison.Ordinal); - var needsApply = !dataApplied; - var modFilesChanged = PlayerModFilesChanged(sanitized, _cachedData); - var shouldForceMods = shouldForce || modFilesChanged; - forceApplyCustomization = forced || needsApply; - var suppressForcedModRedraw = !forced && hasMissingCachedFiles && dataApplied; - - if (shouldForceMods) - { - _forceApplyMods = true; - _forceFullReapply = true; - LastAppliedDataBytes = -1; - LastAppliedDataTris = -1; - LastAppliedApproximateEffectiveTris = -1; - LastAppliedApproximateVRAMBytes = -1; - LastAppliedApproximateEffectiveVRAMBytes = -1; - } _pairStateCache.Store(Ident, sanitized); @@ -696,11 +665,10 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { Logger.LogTrace("Handler for {Ident} not visible, caching sanitized data for later", Ident); _cachedData = sanitized; - _forceFullReapply = true; return; } - ApplyCharacterData(Guid.NewGuid(), sanitized, forceApplyCustomization, suppressForcedModRedraw); + ApplyCharacterData(Guid.NewGuid(), sanitized, forceApplyCustomization: forced); } public bool FetchPerformanceMetricsFromCache() @@ -722,6 +690,74 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return true; } + public async Task EnsurePerformanceMetricsAsync(CancellationToken cancellationToken) + { + EnsureInitialized(); + + if (LastReceivedCharacterData is null || IsApplying) + { + return; + } + + if (LastAppliedApproximateVRAMBytes >= 0 + && LastAppliedDataTris >= 0 + && LastAppliedApproximateEffectiveVRAMBytes >= 0 + && LastAppliedApproximateEffectiveTris >= 0) + { + return; + } + + await _metricsComputeGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + cancellationToken.ThrowIfCancellationRequested(); + + if (LastReceivedCharacterData is null) + { + return; + } + + if (LastAppliedApproximateVRAMBytes >= 0 + && LastAppliedDataTris >= 0 + && LastAppliedApproximateEffectiveVRAMBytes >= 0 + && LastAppliedApproximateEffectiveTris >= 0) + { + return; + } + + var sanitized = CloneAndSanitizeLastReceived(out var dataHash); + if (sanitized is null) + { + return; + } + + if (!string.IsNullOrEmpty(dataHash) && TryApplyCachedMetrics(dataHash)) + { + _cachedData = sanitized; + _pairStateCache.Store(Ident, sanitized); + return; + } + + if (LastAppliedApproximateVRAMBytes < 0 || LastAppliedApproximateEffectiveVRAMBytes < 0) + { + _playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, sanitized, []); + } + + if (LastAppliedDataTris < 0 || LastAppliedApproximateEffectiveTris < 0) + { + await _playerPerformanceService.CheckTriangleUsageThresholds(this, sanitized).ConfigureAwait(false); + } + + StorePerformanceMetrics(sanitized); + _cachedData = sanitized; + _pairStateCache.Store(Ident, sanitized); + } + finally + { + _metricsComputeGate.Release(); + } + } + private CharacterData? CloneAndSanitizeLastReceived(out string? dataHash) { dataHash = null; @@ -799,54 +835,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa new PairPerformanceMetrics(LastAppliedDataTris, LastAppliedApproximateVRAMBytes, LastAppliedApproximateEffectiveVRAMBytes, LastAppliedApproximateEffectiveTris)); } - private bool HasMissingCachedFiles(CharacterData characterData) - { - try - { - HashSet inspectedHashes = new(StringComparer.OrdinalIgnoreCase); - foreach (var replacements in characterData.FileReplacements.Values) - { - foreach (var replacement in replacements) - { - if (!string.IsNullOrEmpty(replacement.FileSwapPath)) - { - if (Path.IsPathRooted(replacement.FileSwapPath) && !File.Exists(replacement.FileSwapPath)) - { - Logger.LogTrace("Missing file swap path {Path} detected for {Handler}", replacement.FileSwapPath, GetLogIdentifier()); - return true; - } - continue; - } - - if (string.IsNullOrEmpty(replacement.Hash) || !inspectedHashes.Add(replacement.Hash)) - { - continue; - } - - var cacheEntry = _fileDbManager.GetFileCacheByHash(replacement.Hash); - if (cacheEntry is null) - { - Logger.LogTrace("Missing cached file {Hash} detected for {Handler}", replacement.Hash, GetLogIdentifier()); - return true; - } - - if (!File.Exists(cacheEntry.ResolvedFilepath)) - { - Logger.LogTrace("Cached file {Hash} missing on disk for {Handler}, removing cache entry", replacement.Hash, GetLogIdentifier()); - _fileDbManager.RemoveHashedFile(cacheEntry.Hash, cacheEntry.PrefixedFilePath); - return true; - } - } - } - } - catch (Exception ex) - { - Logger.LogDebug(ex, "Failed to determine cache availability for {Handler}", GetLogIdentifier()); - } - - return false; - } - private CharacterData? RemoveNotSyncedFiles(CharacterData? data) { Logger.LogTrace("Removing not synced files for {Ident}", Ident); @@ -899,28 +887,43 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return data; } - private bool HasValidCachedModdedPaths() - { - if (_lastAppliedModdedPaths is null || _lastAppliedModdedPaths.Count == 0) - { - return false; - } + private bool IsForbiddenHash(string hash) + => _downloadManager.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, hash, StringComparison.Ordinal)); - foreach (var entry in _lastAppliedModdedPaths) + private bool HasMissingFiles(CharacterData data) + { + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var replacement in data.FileReplacements.SelectMany(k => k.Value)) { - if (string.IsNullOrEmpty(entry.Value) || !File.Exists(entry.Value)) + if (!string.IsNullOrEmpty(replacement.FileSwapPath)) { - Logger.LogDebug("Cached file path {path} missing for {handler}, forcing recalculation", entry.Value ?? "empty", GetLogIdentifier()); - return false; + continue; + } + + var hash = replacement.Hash; + if (string.IsNullOrWhiteSpace(hash) || !seen.Add(hash)) + { + continue; + } + + var fileCache = _fileDbManager.GetFileCacheByHash(hash); + if (fileCache is null || !File.Exists(fileCache.ResolvedFilepath)) + { + if (fileCache is not null) + { + _fileDbManager.RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath); + } + + if (!IsForbiddenHash(hash)) + { + return true; + } } } - return true; + return false; } - private bool IsForbiddenHash(string hash) - => _downloadManager.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, hash, StringComparison.Ordinal)); - private static bool IsNonPriorityModPath(string? gamePath) { if (string.IsNullOrEmpty(gamePath)) @@ -1082,31 +1085,46 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa "Cannot apply character data: Receiving Player is in an invalid state, deferring application"))); Logger.LogDebug("[BASE-{appBase}] Received data but player was in invalid state, charaHandlerIsNull: {charaIsNull}, playerPointerIsNull: {ptrIsNull}", applicationBase, _charaHandler == null, PlayerCharacter == IntPtr.Zero); - var hasDiffMods = characterData.CheckUpdatedData(applicationBase, _cachedData, Logger, + var hasDiffMods = characterData.CheckUpdatedData(applicationBase, _lastAppliedData, Logger, this, forceApplyCustomization, forceApplyMods: false) .Any(p => p.Value.Contains(PlayerChanges.ModManip) || p.Value.Contains(PlayerChanges.ModFiles)); - _forceApplyMods = hasDiffMods || _forceApplyMods || _cachedData == null; + _forceApplyMods = hasDiffMods || _forceApplyMods || _lastAppliedData == null; + _pendingModReapply = true; _cachedData = characterData; - _forceFullReapply = true; Logger.LogDebug("[BASE-{appBase}] Setting data: {hash}, forceApplyMods: {force}", applicationBase, _cachedData.DataHash.Value, _forceApplyMods); + return; } SetUploading(false); Logger.LogDebug("[BASE-{appbase}] Applying data for {player}, forceApplyCustomization: {forced}, forceApplyMods: {forceMods}", applicationBase, GetLogIdentifier(), forceApplyCustomization, _forceApplyMods); - Logger.LogDebug("[BASE-{appbase}] Hash for data is {newHash}, current cache hash is {oldHash}", applicationBase, characterData.DataHash.Value, _cachedData?.DataHash.Value ?? "NODATA"); + Logger.LogDebug("[BASE-{appbase}] Hash for data is {newHash}, last applied hash is {oldHash}", applicationBase, characterData.DataHash.Value, _lastAppliedData?.DataHash.Value ?? "NODATA"); - if (handlerReady - && string.Equals(characterData.DataHash.Value, _cachedData?.DataHash.Value ?? string.Empty, StringComparison.Ordinal) - && !forceApplyCustomization && !_forceApplyMods) + var hasMissingFiles = false; + if (string.Equals(characterData.DataHash.Value, _lastAppliedData?.DataHash.Value ?? string.Empty, StringComparison.Ordinal) + && !forceApplyCustomization + && !_forceApplyMods + && !_pendingModReapply) { - return; + hasMissingFiles = HasMissingFiles(characterData); + if (!hasMissingFiles) + { + return; + } } + _pendingModReapply = false; + _lastModApplyDeferred = false; + _lastMissingCriticalMods = 0; + _lastMissingNonCriticalMods = 0; + _lastMissingForbiddenMods = 0; + Mediator.Publish(new EventMessage(new Event(PlayerName, user, nameof(PairHandlerAdapter), EventSeverity.Informational, "Applying Character Data"))); - var charaDataToUpdate = characterData.CheckUpdatedData(applicationBase, _cachedData?.DeepClone() ?? new(), Logger, this, + _forceApplyMods |= forceApplyCustomization || hasMissingFiles; + + var charaDataToUpdate = characterData.CheckUpdatedData(applicationBase, _lastAppliedData?.DeepClone() ?? new(), Logger, this, forceApplyCustomization, _forceApplyMods, suppressForcedModRedraw); if (handlerReady && _forceApplyMods) @@ -1126,11 +1144,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } Logger.LogDebug("[BASE-{appbase}] Downloading and applying character for {name}", applicationBase, GetPrimaryAliasOrUidSafe()); - - var forceFullReapply = _forceFullReapply - || LastAppliedApproximateVRAMBytes < 0 || LastAppliedDataTris < 0 || LastAppliedApproximateEffectiveTris < 0; - - DownloadAndApplyCharacter(applicationBase, characterData.DeepClone(), charaDataToUpdate, forceFullReapply); + DownloadAndApplyCharacter(applicationBase, characterData.DeepClone(), charaDataToUpdate); } public override string ToString() @@ -1164,46 +1178,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } - private void CancelVisibilityGraceTask() - { - lock (_visibilityGraceGate) - { - _visibilityGraceCts?.CancelDispose(); - _visibilityGraceCts = null; - } - } - - private void StartVisibilityGraceTask() - { - CancellationToken token; - lock (_visibilityGraceGate) - { - _visibilityGraceCts = _visibilityGraceCts?.CancelRecreate() ?? new CancellationTokenSource(); - token = _visibilityGraceCts.Token; - } - - _visibilityGraceTask = Task.Run(async () => - { - try - { - await Task.Delay(VisibilityEvictionGrace, token).ConfigureAwait(false); - token.ThrowIfCancellationRequested(); - if (IsVisible) return; - - ScheduledForDeletion = true; - ResetPenumbraCollection(reason: "VisibilityLostTimeout"); - } - catch (OperationCanceledException) - { - // operation cancelled, do nothing - } - catch (Exception ex) - { - Logger.LogDebug(ex, "Visibility grace task failed for {handler}", GetLogIdentifier()); - } - }, CancellationToken.None); - } - private void ScheduleOwnedObjectRetry(ObjectKind kind, HashSet changes) { if (kind == ObjectKind.Player || changes.Count == 0) @@ -1389,6 +1363,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa Guid applicationId = Guid.NewGuid(); _applicationCancellationTokenSource?.CancelDispose(); _applicationCancellationTokenSource = null; + LogDownloadCancellation("dispose"); _downloadCancellationTokenSource?.CancelDispose(); _downloadCancellationTokenSource = null; ClearAllOwnedObjectRetries(); @@ -1396,10 +1371,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _ownedRetryCts = null; _downloadManager.Dispose(); _charaHandler?.Dispose(); - CancelVisibilityGraceTask(); _charaHandler = null; - _invisibleSinceUtc = null; - _visibilityEvictionDueAtUtc = null; if (!string.IsNullOrEmpty(name)) { @@ -1415,7 +1387,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa var isStopping = _lifetime.ApplicationStopping.IsCancellationRequested; if (isStopping) { - ResetPenumbraCollection(reason: "DisposeStopping", awaitIpc: false); ScheduleSafeRevertOnDisposal(applicationId, name, alias); return; } @@ -1474,9 +1445,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { PlayerName = null; _cachedData = null; - _lastSuccessfulDataHash = null; - _lastAppliedModdedPaths = null; - _needsCollectionRebuild = false; + _lastAppliedData = null; + LastReceivedCharacterData = null; _performanceMetricsCache.Clear(Ident); Logger.LogDebug("Disposing {name} complete", name); } @@ -1797,87 +1767,28 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa return result; } - private static bool PlayerModFilesChanged(CharacterData newData, CharacterData? previousData) - { - return !FileReplacementListsEqual( - TryGetFileReplacementList(newData, ObjectKind.Player), - TryGetFileReplacementList(previousData, ObjectKind.Player)); - } - - private static IReadOnlyCollection? TryGetFileReplacementList(CharacterData? data, ObjectKind objectKind) - { - if (data is null) - { - return null; - } - - return data.FileReplacements.TryGetValue(objectKind, out var list) ? list : null; - } - - private static bool FileReplacementListsEqual(IReadOnlyCollection? left, IReadOnlyCollection? right) - { - if (left is null || left.Count == 0) - { - return right is null || right.Count == 0; - } - - if (right is null || right.Count == 0) - { - return false; - } - - var comparer = FileReplacementDataComparer.Instance; - return !left.Except(right, comparer).Any() && !right.Except(left, comparer).Any(); - } - - private void DownloadAndApplyCharacter(Guid applicationBase, CharacterData charaData, Dictionary> updatedData, bool forceFullReapply) + private void DownloadAndApplyCharacter(Guid applicationBase, CharacterData charaData, Dictionary> updatedData) { if (!updatedData.Any()) { - if (forceFullReapply) - { - updatedData = BuildFullChangeSet(charaData); - } - - if (!updatedData.Any()) - { - Logger.LogDebug("[BASE-{appBase}] Nothing to update for {obj}", applicationBase, GetLogIdentifier()); - _forceFullReapply = false; - return; - } + Logger.LogDebug("[BASE-{appBase}] Nothing to update for {obj}", applicationBase, GetLogIdentifier()); + return; } var updateModdedPaths = updatedData.Values.Any(v => v.Any(p => p == PlayerChanges.ModFiles)); var updateManip = updatedData.Values.Any(v => v.Any(p => p == PlayerChanges.ModManip)); - var needsCollectionRebuild = _needsCollectionRebuild; - var reuseCachedModdedPaths = !updateModdedPaths && needsCollectionRebuild && _lastAppliedModdedPaths is not null; - updateModdedPaths = updateModdedPaths || needsCollectionRebuild; - updateManip = updateManip || needsCollectionRebuild; - Dictionary<(string GamePath, string? Hash), string>? cachedModdedPaths = null; - if (reuseCachedModdedPaths) - { - if (HasValidCachedModdedPaths()) - { - cachedModdedPaths = _lastAppliedModdedPaths; - } - else - { - Logger.LogDebug("{handler}: Cached files missing, recalculating mappings", GetLogIdentifier()); - _lastAppliedModdedPaths = null; - } - } + LogDownloadCancellation("new download request", applicationBase); _downloadCancellationTokenSource = _downloadCancellationTokenSource?.CancelRecreate() ?? new CancellationTokenSource(); var downloadToken = _downloadCancellationTokenSource.Token; - _ = DownloadAndApplyCharacterAsync(applicationBase, charaData, updatedData, updateModdedPaths, updateManip, cachedModdedPaths, downloadToken) + _ = DownloadAndApplyCharacterAsync(applicationBase, charaData, updatedData, updateModdedPaths, updateManip, downloadToken) .ConfigureAwait(false); } private Task? _pairDownloadTask; - private Task _visibilityGraceTask; private async Task DownloadAndApplyCharacterAsync(Guid applicationBase, CharacterData charaData, Dictionary> updatedData, - bool updateModdedPaths, bool updateManip, Dictionary<(string GamePath, string? Hash), string>? cachedModdedPaths, CancellationToken downloadToken) + bool updateModdedPaths, bool updateManip, CancellationToken downloadToken) { var concurrencyLease = await _pairProcessingLimiter.AcquireAsync(downloadToken).ConfigureAwait(false); try @@ -1885,154 +1796,97 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa bool skipDownscaleForPair = ShouldSkipDownscale(); bool skipDecimationForPair = ShouldSkipDecimation(); var user = GetPrimaryUserData(); - Dictionary<(string GamePath, string? Hash), string> moddedPaths; - List missingReplacements = []; + Dictionary<(string GamePath, string? Hash), string> moddedPaths = []; if (updateModdedPaths) { - if (cachedModdedPaths is not null) - { - moddedPaths = new Dictionary<(string GamePath, string? Hash), string>(cachedModdedPaths, cachedModdedPaths.Comparer); - } - else - { - int attempts = 0; - List toDownloadReplacements = TryCalculateModdedDictionary(applicationBase, charaData, out moddedPaths, downloadToken); - missingReplacements = toDownloadReplacements; + int attempts = 0; + List toDownloadReplacements = TryCalculateModdedDictionary(applicationBase, charaData, out moddedPaths, downloadToken); - while (toDownloadReplacements.Count > 0 && attempts++ <= 10 && !downloadToken.IsCancellationRequested) + while (toDownloadReplacements.Count > 0 && attempts++ <= 10 && !downloadToken.IsCancellationRequested) + { + if (_pairDownloadTask != null && !_pairDownloadTask.IsCompleted) { - if (_pairDownloadTask != null && !_pairDownloadTask.IsCompleted) - { - Logger.LogDebug("[BASE-{appBase}] Finishing prior running download task for player {name}, {kind}", applicationBase, PlayerName, updatedData); - await _pairDownloadTask.ConfigureAwait(false); - } - - Logger.LogDebug("[BASE-{appBase}] Downloading missing files for player {name}, {kind}", applicationBase, PlayerName, updatedData); - - Mediator.Publish(new EventMessage(new Event(PlayerName, user, nameof(PairHandlerAdapter), EventSeverity.Informational, - $"Starting download for {toDownloadReplacements.Count} files"))); - var toDownloadFiles = await _downloadManager.InitiateDownloadList(_charaHandler!, toDownloadReplacements, downloadToken).ConfigureAwait(false); - - if (!_playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, charaData, toDownloadFiles)) - { - RecordFailure("Auto pause triggered by VRAM usage thresholds", "VRAMThreshold"); - _downloadManager.ClearDownload(); - return; - } - - var handlerForDownload = _charaHandler; - _pairDownloadTask = Task.Run(async () => await _downloadManager.DownloadFiles(handlerForDownload, toDownloadReplacements, downloadToken, skipDownscaleForPair, skipDecimationForPair).ConfigureAwait(false)); - + Logger.LogDebug("[BASE-{appBase}] Finishing prior running download task for player {name}, {kind}", applicationBase, PlayerName, updatedData); await _pairDownloadTask.ConfigureAwait(false); - - if (downloadToken.IsCancellationRequested) - { - Logger.LogTrace("[BASE-{appBase}] Detected cancellation", applicationBase); - RecordFailure("Download cancelled", "Cancellation"); - return; - } - - if (!skipDownscaleForPair) - { - var downloadedTextureHashes = toDownloadReplacements - .Where(static replacement => replacement.GamePaths.Any(static path => path.EndsWith(".tex", StringComparison.OrdinalIgnoreCase))) - .Select(static replacement => replacement.Hash) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToList(); - - if (downloadedTextureHashes.Count > 0) - { - await _textureDownscaleService.WaitForPendingJobsAsync(downloadedTextureHashes, downloadToken).ConfigureAwait(false); - } - } - - if (!skipDecimationForPair) - { - var downloadedModelHashes = toDownloadReplacements - .Where(static replacement => replacement.GamePaths.Any(static path => path.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase))) - .Select(static replacement => replacement.Hash) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToList(); - - if (downloadedModelHashes.Count > 0) - { - await _modelDecimationService.WaitForPendingJobsAsync(downloadedModelHashes, downloadToken).ConfigureAwait(false); - } - } - - toDownloadReplacements = TryCalculateModdedDictionary(applicationBase, charaData, out moddedPaths, downloadToken); - missingReplacements = toDownloadReplacements; - - if (toDownloadReplacements.TrueForAll(c => _downloadManager.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, c.Hash, StringComparison.Ordinal)))) - { - break; - } - - await Task.Delay(TimeSpan.FromSeconds(2), downloadToken).ConfigureAwait(false); } - if (!await _playerPerformanceService.CheckBothThresholds(this, charaData).ConfigureAwait(false)) + Logger.LogDebug("[BASE-{appBase}] Downloading missing files for player {name}, {kind}", applicationBase, PlayerName, updatedData); + + Mediator.Publish(new EventMessage(new Event(PlayerName, user, nameof(PairHandlerAdapter), EventSeverity.Informational, + $"Starting download for {toDownloadReplacements.Count} files"))); + var toDownloadFiles = await _downloadManager.InitiateDownloadList(_charaHandler!, toDownloadReplacements, downloadToken).ConfigureAwait(false); + + if (!_playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, charaData, toDownloadFiles)) { - RecordFailure("Auto pause triggered by performance thresholds", "PerformanceThreshold"); + RecordFailure("Auto pause triggered by VRAM usage thresholds", "VRAMThreshold"); + _downloadManager.ClearDownload(); return; } + + var handlerForDownload = _charaHandler; + _pairDownloadTask = _downloadManager.DownloadFiles(handlerForDownload, toDownloadReplacements, toDownloadFiles, downloadToken, skipDownscaleForPair, skipDecimationForPair); + + await _pairDownloadTask.ConfigureAwait(false); + + if (downloadToken.IsCancellationRequested) + { + Logger.LogTrace("[BASE-{appBase}] Detected cancellation", applicationBase); + _pendingModReapply = true; + RecordFailure("Download cancelled", "Cancellation"); + return; + } + + if (!skipDownscaleForPair) + { + var downloadedTextureHashes = toDownloadReplacements + .Where(static replacement => replacement.GamePaths.Any(static path => path.EndsWith(".tex", StringComparison.OrdinalIgnoreCase))) + .Select(static replacement => replacement.Hash) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (downloadedTextureHashes.Count > 0) + { + await _textureDownscaleService.WaitForPendingJobsAsync(downloadedTextureHashes, downloadToken).ConfigureAwait(false); + } + } + + if (!skipDecimationForPair) + { + var downloadedModelHashes = toDownloadReplacements + .Where(static replacement => replacement.GamePaths.Any(static path => path.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase))) + .Select(static replacement => replacement.Hash) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (downloadedModelHashes.Count > 0) + { + await _modelDecimationService.WaitForPendingJobsAsync(downloadedModelHashes, downloadToken).ConfigureAwait(false); + } + } + + toDownloadReplacements = TryCalculateModdedDictionary(applicationBase, charaData, out moddedPaths, downloadToken); + + if (toDownloadReplacements.TrueForAll(c => _downloadManager.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, c.Hash, StringComparison.Ordinal)))) + { + break; + } + + await Task.Delay(TimeSpan.FromSeconds(2), downloadToken).ConfigureAwait(false); } - } - else - { - moddedPaths = cachedModdedPaths is not null - ? new Dictionary<(string GamePath, string? Hash), string>(cachedModdedPaths, cachedModdedPaths.Comparer) - : []; - } - var wantsModApply = updateModdedPaths || updateManip; - var pendingModReapply = false; - var deferModApply = false; - - if (wantsModApply && missingReplacements.Count > 0) - { - CountMissingReplacements(missingReplacements, out var missingCritical, out var missingNonCritical, out var missingForbidden); - _lastMissingCriticalMods = missingCritical; - _lastMissingNonCriticalMods = missingNonCritical; - _lastMissingForbiddenMods = missingForbidden; - - var hasCriticalMissing = missingCritical > 0; - var hasNonCriticalMissing = missingNonCritical > 0; - var hasDownloadableMissing = missingReplacements.Any(replacement => !IsForbiddenHash(replacement.Hash)); - var hasDownloadableCriticalMissing = hasCriticalMissing - && missingReplacements.Any(replacement => !IsForbiddenHash(replacement.Hash) && IsCriticalModReplacement(replacement)); - - pendingModReapply = hasDownloadableMissing; - _lastModApplyDeferred = false; - - if (hasDownloadableCriticalMissing) + if (!await _playerPerformanceService.CheckBothThresholds(this, charaData).ConfigureAwait(false)) { - deferModApply = true; - _lastModApplyDeferred = true; - Logger.LogDebug("[BASE-{appBase}] Critical mod files missing for {handler}, deferring mod apply ({count} missing)", - applicationBase, GetLogIdentifier(), missingReplacements.Count); + RecordFailure("Auto pause triggered by performance thresholds", "PerformanceThreshold"); + return; } - else if (hasNonCriticalMissing && hasDownloadableMissing) - { - Logger.LogDebug("[BASE-{appBase}] Non-critical mod files missing for {handler}, applying partial mods and reapplying after downloads ({count} missing)", - applicationBase, GetLogIdentifier(), missingReplacements.Count); - } - } - else - { - _lastMissingCriticalMods = 0; - _lastMissingNonCriticalMods = 0; - _lastMissingForbiddenMods = 0; - _lastModApplyDeferred = false; } - if (deferModApply) - { - updateModdedPaths = false; - updateManip = false; - RemoveModApplyChanges(updatedData); - } + _pendingModReapply = false; + _lastMissingCriticalMods = 0; + _lastMissingNonCriticalMods = 0; + _lastMissingForbiddenMods = 0; + _lastModApplyDeferred = false; downloadToken.ThrowIfCancellationRequested(); @@ -2040,9 +1894,9 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (handlerForApply is null || handlerForApply.Address == nint.Zero) { Logger.LogDebug("[BASE-{appBase}] Handler not available for {player}, cached data for later application", applicationBase, GetLogIdentifier()); + _pendingModReapply = true; _cachedData = charaData; _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; RecordFailure("Handler not available for application", "HandlerUnavailable"); return; } @@ -2059,7 +1913,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (downloadToken.IsCancellationRequested || (appToken?.IsCancellationRequested ?? false)) { - _forceFullReapply = true; RecordFailure("Application cancelled", "Cancellation"); return; } @@ -2067,7 +1920,12 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _applicationCancellationTokenSource = _applicationCancellationTokenSource.CancelRecreate() ?? new CancellationTokenSource(); var token = _applicationCancellationTokenSource.Token; - _applicationTask = ApplyCharacterDataAsync(applicationBase, handlerForApply, charaData, updatedData, updateModdedPaths, updateManip, moddedPaths, wantsModApply, pendingModReapply, token); + _applicationTask = ApplyCharacterDataAsync(applicationBase, handlerForApply, charaData, updatedData, updateModdedPaths, updateManip, moddedPaths, token); + } + catch (OperationCanceledException) when (downloadToken.IsCancellationRequested) + { + _pendingModReapply = true; + RecordFailure("Download cancelled", "Cancellation"); } finally { @@ -2076,7 +1934,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } private async Task ApplyCharacterDataAsync(Guid applicationBase, GameObjectHandler handlerForApply, CharacterData charaData, Dictionary> updatedData, bool updateModdedPaths, bool updateManip, - Dictionary<(string GamePath, string? Hash), string> moddedPaths, bool wantsModApply, bool pendingModReapply, CancellationToken token) + Dictionary<(string GamePath, string? Hash), string> moddedPaths, CancellationToken token) { try { @@ -2092,9 +1950,9 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { Logger.LogDebug("[BASE-{applicationId}] Timed out waiting for {handler} to fully load, caching data for later application", applicationBase, GetLogIdentifier()); + _pendingModReapply = true; _cachedData = charaData; _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; RecordFailure("Actor not fully loaded within timeout", "FullyLoadedTimeout"); return; } @@ -2105,13 +1963,13 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa Guid penumbraCollection = Guid.Empty; if (updateModdedPaths || updateManip) { - penumbraCollection = EnsurePenumbraCollection(); + penumbraCollection = await EnsurePenumbraCollectionAsync().ConfigureAwait(false); if (penumbraCollection == Guid.Empty) { Logger.LogTrace("[BASE-{applicationId}] Penumbra collection unavailable for {handler}, caching data for later application", applicationBase, GetLogIdentifier()); + _pendingModReapply = true; _cachedData = charaData; _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; RecordFailure("Penumbra collection unavailable", "PenumbraUnavailable"); return; } @@ -2119,7 +1977,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (updateModdedPaths) { - // ensure collection is set var objIndex = await _dalamudUtil.RunOnFrameworkThread(() => { var gameObject = handlerForApply.GetGameObject(); @@ -2129,43 +1986,41 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (!objIndex.HasValue) { Logger.LogDebug("[BASE-{applicationId}] GameObject not available for {handler}, caching data for later application", applicationBase, GetLogIdentifier()); + _pendingModReapply = true; _cachedData = charaData; _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; RecordFailure("Game object not available for application", "GameObjectUnavailable"); return; } - SplitPapMappings(moddedPaths, out var withoutPap, out var papOnly); - await _ipcManager.Penumbra.AssignTemporaryCollectionAsync(Logger, penumbraCollection, objIndex.Value).ConfigureAwait(false); + SplitPapMappings(moddedPaths, out var withoutPap, out var papOnly); + var hasPap = papOnly.Count > 0; + await _ipcManager.Penumbra.SetTemporaryModsAsync( Logger, _applicationId, penumbraCollection, withoutPap.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)) .ConfigureAwait(false); - await _ipcManager.Penumbra.RedrawAsync(Logger, handlerForApply, _applicationId, token).ConfigureAwait(false); - if (handlerForApply.Address != nint.Zero) - await _actorObjectService.WaitForFullyLoadedAsync(handlerForApply.Address, token).ConfigureAwait(false); - - var removedPap = await StripIncompatiblePapAsync(handlerForApply, charaData, papOnly, token).ConfigureAwait(false); - if (removedPap > 0) + if (hasPap) { - Logger.LogTrace("[{applicationId}] Removed {removedPap} incompatible PAP mappings found for {handler}", _applicationId, removedPap, GetLogIdentifier()); + var removedPap = await StripIncompatiblePapAsync(handlerForApply, charaData, papOnly, token).ConfigureAwait(false); + if (removedPap > 0) + { + Logger.LogTrace("[{applicationId}] Removed {removedPap} incompatible PAP mappings found for {handler}", _applicationId, removedPap, GetLogIdentifier()); + } + + var merged = new Dictionary<(string GamePath, string? Hash), string>(withoutPap, withoutPap.Comparer); + foreach (var kv in papOnly) + merged[kv.Key] = kv.Value; + + await _ipcManager.Penumbra.SetTemporaryModsAsync( + Logger, _applicationId, penumbraCollection, + merged.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)) + .ConfigureAwait(false); } - var merged = new Dictionary<(string GamePath, string? Hash), string>(withoutPap, withoutPap.Comparer); - foreach (var kv in papOnly) - merged[kv.Key] = kv.Value; - - await _ipcManager.Penumbra.SetTemporaryModsAsync( - Logger, _applicationId, penumbraCollection, - merged.ToDictionary(k => k.Key.GamePath, k => k.Value, StringComparer.Ordinal)) - .ConfigureAwait(false); - - _lastAppliedModdedPaths = new Dictionary<(string GamePath, string? Hash), string>(merged, merged.Comparer); - LastAppliedDataBytes = -1; foreach (var path in moddedPaths.Values.Distinct(StringComparer.OrdinalIgnoreCase).Select(v => new FileInfo(v)).Where(p => p.Exists)) { @@ -2197,13 +2052,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } _cachedData = charaData; + _lastAppliedData = charaData; _pairStateCache.Store(Ident, charaData); - if (wantsModApply) - { - _pendingModReapply = pendingModReapply; - } - _forceFullReapply = _pendingModReapply; - _needsCollectionRebuild = false; if (LastAppliedApproximateVRAMBytes < 0 || LastAppliedApproximateEffectiveVRAMBytes < 0) { _playerPerformanceService.ComputeAndAutoPauseOnVRAMUsageThresholds(this, charaData, new List()); @@ -2215,34 +2065,31 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } StorePerformanceMetrics(charaData); - _lastSuccessfulDataHash = GetDataHashSafe(charaData); _lastSuccessfulApplyAt = DateTime.UtcNow; ClearFailureState(); Logger.LogDebug("[{applicationId}] Application finished", _applicationId); - } - catch (OperationCanceledException) - { - Logger.LogDebug("[{applicationId}] Application cancelled for {handler}", _applicationId, GetLogIdentifier()); - _cachedData = charaData; - _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; - RecordFailure("Application cancelled", "Cancellation"); - } - catch (Exception ex) - { - if (ex is AggregateException aggr && aggr.InnerExceptions.Any(e => e is ArgumentNullException)) + } + catch (OperationCanceledException) { - IsVisible = false; + Logger.LogDebug("[{applicationId}] Application cancelled for {handler}", _applicationId, GetLogIdentifier()); + _pendingModReapply = true; + _cachedData = charaData; + _pairStateCache.Store(Ident, charaData); + RecordFailure("Application cancelled", "Cancellation"); + } + catch (Exception ex) + { + if (ex is AggregateException aggr && aggr.InnerExceptions.Any(e => e is ArgumentNullException)) + { + IsVisible = false; _forceApplyMods = true; _cachedData = charaData; _pairStateCache.Store(Ident, charaData); - _forceFullReapply = true; Logger.LogDebug("[{applicationId}] Cancelled, player turned null during application", _applicationId); } else { Logger.LogWarning(ex, "[{applicationId}] Cancelled", _applicationId); - _forceFullReapply = true; } RecordFailure($"Application failed: {ex.Message}", "Exception"); } @@ -2286,7 +2133,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { try { - _forceFullReapply = true; ApplyCharacterData(appData, cachedData!, forceApplyCustomization: true); } catch (Exception ex) @@ -2303,7 +2149,6 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { try { - _forceFullReapply = true; ApplyLastReceivedData(forced: true); } catch (Exception ex) @@ -2323,6 +2168,25 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } TryApplyQueuedData(); + + if (_pendingModReapply && IsVisible && !IsApplying && LastReceivedCharacterData is not null && CanApplyNow()) + { + var now = DateTime.UtcNow; + if (!_lastApplyAttemptAt.HasValue || now - _lastApplyAttemptAt.Value > TimeSpan.FromSeconds(5)) + { + _ = Task.Run(() => + { + try + { + ApplyLastReceivedData(forced: true); + } + catch (Exception ex) + { + Logger.LogError(ex, "Failed to reapply pending data for {handler}", GetLogIdentifier()); + } + }); + } + } } private void HandleVisibilityLoss(bool logChange) @@ -2330,6 +2194,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa IsVisible = false; _charaHandler?.Invalidate(); ClearAllOwnedObjectRetries(); + LogDownloadCancellation("visibility lost"); _downloadCancellationTokenSource?.CancelDispose(); _downloadCancellationTokenSource = null; if (logChange) @@ -2367,6 +2232,29 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa _ = ReapplyPetNamesAsync(petNamesData!); }); + + var handlerForAssign = _charaHandler; + _ = Task.Run(async () => + { + if (handlerForAssign is null) + { + return; + } + + var penumbraCollection = await EnsurePenumbraCollectionAsync().ConfigureAwait(false); + if (penumbraCollection == Guid.Empty) + { + return; + } + + var objIndex = await _dalamudUtil.RunOnFrameworkThread(() => handlerForAssign.GetGameObject()?.ObjectIndex) + .ConfigureAwait(false); + if (objIndex.HasValue) + { + await _ipcManager.Penumbra.AssignTemporaryCollectionAsync(Logger, penumbraCollection, objIndex.Value) + .ConfigureAwait(false); + } + }); } private async Task ReapplyHonorificAsync(string honorificData) @@ -2472,6 +2360,11 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa (item) => { token.ThrowIfCancellationRequested(); + if (string.IsNullOrWhiteSpace(item.Hash)) + { + Logger.LogTrace("[BASE-{appBase}] Skipping replacement with empty hash for paths: {paths}", applicationBase, string.Join(", ", item.GamePaths)); + return; + } var fileCache = _fileDbManager.GetFileCacheByHash(item.Hash); if (fileCache is not null && !File.Exists(fileCache.ResolvedFilepath)) { @@ -2512,9 +2405,16 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa continue; } - var preferredPath = skipDownscaleForPair - ? fileCache.ResolvedFilepath - : _textureDownscaleService.GetPreferredPath(item.Hash, fileCache.ResolvedFilepath); + var preferredPath = fileCache.ResolvedFilepath; + if (!skipDownscaleForPair && gamePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) + { + preferredPath = _textureDownscaleService.GetPreferredPath(item.Hash, preferredPath); + } + + if (!skipDecimationForPair && gamePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase)) + { + preferredPath = _modelDecimationService.GetPreferredPath(item.Hash, preferredPath); + } outputDict[(gamePath, item.Hash)] = preferredPath; } @@ -2558,6 +2458,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa { Logger.LogDebug("Pausing handler {handler}", GetLogIdentifier()); DisableSync(); + _wasRevertedOnPause = false; if (_charaHandler is null || _charaHandler.Address == nint.Zero) { @@ -2566,7 +2467,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } var applicationId = Guid.NewGuid(); - await RevertToRestoredAsync(applicationId).ConfigureAwait(false); + _wasRevertedOnPause = await RevertToRestoredAsync(applicationId).ConfigureAwait(false); IsVisible = false; } catch (Exception ex) @@ -2590,9 +2491,12 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa IsVisible = true; } + var forceApply = _wasRevertedOnPause; + _wasRevertedOnPause = false; + if (LastReceivedCharacterData is not null) { - ApplyLastReceivedData(forced: true); + ApplyLastReceivedData(forced: forceApply); } } catch (Exception ex) @@ -2601,29 +2505,40 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa } } - private async Task RevertToRestoredAsync(Guid applicationId) + private async Task RevertToRestoredAsync(Guid applicationId) { - if (_charaHandler is null || _charaHandler.Address == nint.Zero) + var handler = _charaHandler; + if (handler is null || handler.Address == nint.Zero) { - return; + return false; } try { - var gameObject = await _dalamudUtil.RunOnFrameworkThread(() => _charaHandler.GetGameObject()).ConfigureAwait(false); + var reverted = false; + var gameObject = await _dalamudUtil.RunOnFrameworkThread(() => + { + if (handler.Address == nint.Zero) + { + return null; + } + + return handler.GetGameObject(); + }).ConfigureAwait(false); if (gameObject is not Dalamud.Game.ClientState.Objects.Types.ICharacter character) { - return; + return false; } if (_ipcManager.Penumbra.APIAvailable) { - var penumbraCollection = EnsurePenumbraCollection(); + var penumbraCollection = await EnsurePenumbraCollectionAsync().ConfigureAwait(false); if (penumbraCollection != Guid.Empty) { await _ipcManager.Penumbra.AssignTemporaryCollectionAsync(Logger, penumbraCollection, character.ObjectIndex).ConfigureAwait(false); await _ipcManager.Penumbra.SetTemporaryModsAsync(Logger, applicationId, penumbraCollection, new Dictionary(StringComparer.Ordinal)).ConfigureAwait(false); await _ipcManager.Penumbra.SetManipulationDataAsync(Logger, applicationId, penumbraCollection, string.Empty).ConfigureAwait(false); + reverted = true; } } @@ -2646,29 +2561,28 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (string.IsNullOrEmpty(characterName)) { Logger.LogWarning("[{applicationId}] Failed to determine character name for {handler} while reverting", applicationId, GetLogIdentifier()); - return; + return reverted; } foreach (var kind in kinds) { await RevertCustomizationDataAsync(kind, characterName, applicationId, CancellationToken.None).ConfigureAwait(false); + reverted = true; } - _cachedData = null; - LastAppliedDataBytes = -1; - LastAppliedDataTris = -1; - LastAppliedApproximateEffectiveTris = -1; - LastAppliedApproximateVRAMBytes = -1; - LastAppliedApproximateEffectiveVRAMBytes = -1; + return reverted; } catch (Exception ex) { Logger.LogWarning(ex, "Failed to revert handler {handler} during pause", GetLogIdentifier()); } + + return false; } private void DisableSync() { + LogDownloadCancellation("sync disabled"); _downloadCancellationTokenSource = _downloadCancellationTokenSource?.CancelRecreate(); _applicationCancellationTokenSource = _applicationCancellationTokenSource?.CancelRecreate(); } @@ -2676,6 +2590,11 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa private void EnableSync() { TryApplyQueuedData(); + + if (_pendingModReapply && LastReceivedCharacterData is not null && !IsApplying && CanApplyNow()) + { + ApplyLastReceivedData(forced: true); + } } private void TryApplyQueuedData() @@ -2699,10 +2618,10 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa ApplyCharacterData(pending.ApplicationId, pending.CharacterData, pending.Forced); } catch (Exception ex) - { - Logger.LogError(ex, "Failed applying queued data for {handler}", GetLogIdentifier()); - } - }); + { + Logger.LogError(ex, "Failed applying queued data for {handler}", GetLogIdentifier()); + } + }); } private void HandleActorTracked(ActorObjectService.ActorDescriptor descriptor) @@ -2827,7 +2746,7 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa HandleVisibilityLoss(logChange: false); } - private static bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid) + private bool TryResolveDescriptorHash(ActorObjectService.ActorDescriptor descriptor, out string hashedCid) { hashedCid = descriptor.HashedContentId ?? string.Empty; if (!string.IsNullOrEmpty(hashedCid)) @@ -2836,8 +2755,8 @@ internal sealed class PairHandlerAdapter : DisposableMediatorSubscriberBase, IPa if (descriptor.ObjectKind != DalamudObjectKind.Player || descriptor.Address == nint.Zero) return false; - hashedCid = DalamudUtilService.GetHashedCIDFromPlayerPointer(descriptor.Address); - return !string.IsNullOrEmpty(hashedCid); + return _dalamudUtil.TryGetHashedCIDFromAddress(descriptor.Address, out hashedCid) + && !string.IsNullOrEmpty(hashedCid); } private void UpdateLastKnownActor(ActorObjectService.ActorDescriptor descriptor) diff --git a/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs b/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs index 881c35c..9ffcd1f 100644 --- a/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs +++ b/LightlessSync/PlayerData/Pairs/PairHandlerRegistry.cs @@ -136,6 +136,7 @@ public sealed class PairHandlerRegistry : IDisposable if (TryFinalizeHandlerRemoval(handler)) { handler.Dispose(); + _pairStateCache.Clear(registration.CharacterIdent); } } else if (shouldScheduleRemoval && handler is not null) @@ -356,6 +357,7 @@ public sealed class PairHandlerRegistry : IDisposable finally { _pairPerformanceMetricsCache.Clear(handler.Ident); + _pairStateCache.Clear(handler.Ident); } } } @@ -377,6 +379,7 @@ public sealed class PairHandlerRegistry : IDisposable { handler.Dispose(); _pairPerformanceMetricsCache.Clear(handler.Ident); + _pairStateCache.Clear(handler.Ident); } } @@ -401,6 +404,7 @@ public sealed class PairHandlerRegistry : IDisposable if (TryFinalizeHandlerRemoval(handler)) { handler.Dispose(); + _pairStateCache.Clear(handler.Ident); } } diff --git a/LightlessSync/PlayerData/Pairs/PairLedger.cs b/LightlessSync/PlayerData/Pairs/PairLedger.cs index fdb226e..2fe2205 100644 --- a/LightlessSync/PlayerData/Pairs/PairLedger.cs +++ b/LightlessSync/PlayerData/Pairs/PairLedger.cs @@ -271,7 +271,20 @@ public sealed class PairLedger : DisposableMediatorSubscriberBase try { - handler.ApplyLastReceivedData(forced: true); + _ = Task.Run(async () => + { + try + { + await handler.EnsurePerformanceMetricsAsync(CancellationToken.None).ConfigureAwait(false); + } + catch (Exception ex) + { + if (_logger.IsEnabled(LogLevel.Debug)) + { + _logger.LogDebug(ex, "Failed to ensure performance metrics for {Ident}", handler.Ident); + } + } + }); } catch (Exception ex) { diff --git a/LightlessSync/PlayerData/Pairs/PairManager.cs b/LightlessSync/PlayerData/Pairs/PairManager.cs index eb70a54..0a18a9d 100644 --- a/LightlessSync/PlayerData/Pairs/PairManager.cs +++ b/LightlessSync/PlayerData/Pairs/PairManager.cs @@ -160,8 +160,9 @@ public sealed class PairManager return PairOperationResult.Fail($"Pair {user.UID} not found."); } + var ident = connection.Ident; connection.SetOffline(); - return PairOperationResult.Ok(new PairRegistration(new PairUniqueIdentifier(user.UID), connection.Ident)); + return PairOperationResult.Ok(new PairRegistration(new PairUniqueIdentifier(user.UID), ident)); } } @@ -530,6 +531,7 @@ public sealed class PairManager return null; } + var ident = connection.Ident; if (connection.IsOnline) { connection.SetOffline(); @@ -542,7 +544,7 @@ public sealed class PairManager shell.Users.Remove(userId); } - return new PairRegistration(new PairUniqueIdentifier(userId), connection.Ident); + return new PairRegistration(new PairUniqueIdentifier(userId), ident); } public static PairConnection CreateFromFullData(UserFullPairDto dto) diff --git a/LightlessSync/PlayerData/Pairs/PairModels.cs b/LightlessSync/PlayerData/Pairs/PairModels.cs index 9f34ab2..3b37ce2 100644 --- a/LightlessSync/PlayerData/Pairs/PairModels.cs +++ b/LightlessSync/PlayerData/Pairs/PairModels.cs @@ -76,6 +76,7 @@ public sealed class PairConnection public void SetOffline() { IsOnline = false; + Ident = null; } public void UpdatePermissions(UserPermissions own, UserPermissions other) diff --git a/LightlessSync/Plugin.cs b/LightlessSync/Plugin.cs index 4e1ed4e..f14aeda 100644 --- a/LightlessSync/Plugin.cs +++ b/LightlessSync/Plugin.cs @@ -129,12 +129,15 @@ public sealed class Plugin : IDalamudPlugin services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); @@ -331,8 +334,7 @@ public sealed class Plugin : IDalamudPlugin pluginInterface, sp.GetRequiredService(), sp.GetRequiredService(), - sp.GetRequiredService(), - sp.GetRequiredService())); + sp.GetRequiredService())); services.AddSingleton(sp => new IpcCallerGlamourer( sp.GetRequiredService>(), @@ -516,6 +518,7 @@ public sealed class Plugin : IDalamudPlugin sp.GetRequiredService>(), pluginInterface.UiBuilder, sp.GetRequiredService(), + sp.GetRequiredService(), sp.GetRequiredService(), sp.GetServices(), sp.GetRequiredService(), diff --git a/LightlessSync/Services/ActorTracking/ActorObjectService.cs b/LightlessSync/Services/ActorTracking/ActorObjectService.cs index e443496..bb9ce7a 100644 --- a/LightlessSync/Services/ActorTracking/ActorObjectService.cs +++ b/LightlessSync/Services/ActorTracking/ActorObjectService.cs @@ -93,6 +93,7 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS } RefreshTrackedActors(force: true); }); + _mediator.Subscribe(this, _ => ClearTrackingState()); } private bool IsZoning => _condition[ConditionFlag.BetweenAreas] || _condition[ConditionFlag.BetweenAreas51]; @@ -342,18 +343,8 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS public Task StopAsync(CancellationToken cancellationToken) { DisposeHooks(); - _activePlayers.Clear(); - _gposePlayers.Clear(); - _actorsByHash.Clear(); - _actorsByName.Clear(); - _pendingHashResolutions.Clear(); + ClearTrackingState(); _mediator.UnsubscribeAll(this); - lock (_playerRelatedHandlerLock) - { - _playerRelatedHandlers.Clear(); - } - Volatile.Write(ref _snapshot, ActorSnapshot.Empty); - Volatile.Write(ref _gposeSnapshot, GposeSnapshot.Empty); return Task.CompletedTask; } @@ -580,36 +571,19 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS if (localPlayerAddress == nint.Zero) return nint.Zero; - var playerObject = (GameObject*)localPlayerAddress; - var candidateAddress = _objectTable.GetObjectAddress(playerObject->ObjectIndex + 1); if (ownerEntityId == 0) return nint.Zero; - if (candidateAddress != nint.Zero) - { - var candidate = (GameObject*)candidateAddress; - var candidateKind = (DalamudObjectKind)candidate->ObjectKind; - if (candidateKind is DalamudObjectKind.MountType or DalamudObjectKind.Companion) - { - if (ResolveOwnerId(candidate) == ownerEntityId) - return candidateAddress; - } - } + var playerObject = (GameObject*)localPlayerAddress; + var candidateAddress = _objectTable.GetObjectAddress(playerObject->ObjectIndex + 1); + if (candidateAddress == nint.Zero) + return nint.Zero; - foreach (var obj in _objectTable) - { - if (obj is null || obj.Address == nint.Zero || obj.Address == localPlayerAddress) - continue; - - if (obj.ObjectKind is not (DalamudObjectKind.MountType or DalamudObjectKind.Companion)) - continue; - - var candidate = (GameObject*)obj.Address; - if (ResolveOwnerId(candidate) == ownerEntityId) - return obj.Address; - } - - return nint.Zero; + var candidate = (GameObject*)candidateAddress; + var candidateKind = (DalamudObjectKind)candidate->ObjectKind; + return candidateKind is DalamudObjectKind.MountType or DalamudObjectKind.Companion + ? candidateAddress + : nint.Zero; } private unsafe nint GetPetAddress(nint localPlayerAddress, uint ownerEntityId) @@ -629,22 +603,6 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS } } - foreach (var obj in _objectTable) - { - if (obj is null || obj.Address == nint.Zero || obj.Address == localPlayerAddress) - continue; - - if (obj.ObjectKind != DalamudObjectKind.BattleNpc) - continue; - - var candidate = (GameObject*)obj.Address; - if (candidate->BattleNpcSubKind != BattleNpcSubKind.Pet) - continue; - - if (ResolveOwnerId(candidate) == ownerEntityId) - return obj.Address; - } - return nint.Zero; } @@ -664,23 +622,6 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS return candidate; } } - - foreach (var obj in _objectTable) - { - if (obj is null || obj.Address == nint.Zero || obj.Address == localPlayerAddress) - continue; - - if (obj.ObjectKind != DalamudObjectKind.BattleNpc) - continue; - - var candidate = (GameObject*)obj.Address; - if (candidate->BattleNpcSubKind != BattleNpcSubKind.Buddy) - continue; - - if (ResolveOwnerId(candidate) == ownerEntityId) - return obj.Address; - } - return nint.Zero; } @@ -1077,6 +1018,22 @@ public sealed class ActorObjectService : IHostedService, IDisposable, IMediatorS } } + private void ClearTrackingState() + { + _activePlayers.Clear(); + _gposePlayers.Clear(); + _actorsByHash.Clear(); + _actorsByName.Clear(); + _pendingHashResolutions.Clear(); + lock (_playerRelatedHandlerLock) + { + _playerRelatedHandlers.Clear(); + } + Volatile.Write(ref _snapshot, ActorSnapshot.Empty); + Volatile.Write(ref _gposeSnapshot, GposeSnapshot.Empty); + _nextRefreshAllowed = DateTime.MinValue; + } + public void Dispose() { DisposeHooks(); diff --git a/LightlessSync/Services/CharacterAnalyzer.cs b/LightlessSync/Services/CharacterAnalyzer.cs index 58388ae..34bb47d 100644 --- a/LightlessSync/Services/CharacterAnalyzer.cs +++ b/LightlessSync/Services/CharacterAnalyzer.cs @@ -106,7 +106,7 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable _baseAnalysisCts.Dispose(); } - public async Task UpdateFileEntriesAsync(IEnumerable filePaths, CancellationToken token) + public async Task UpdateFileEntriesAsync(IEnumerable filePaths, CancellationToken token, bool force = false) { var normalized = new HashSet( filePaths.Where(path => !string.IsNullOrWhiteSpace(path)), @@ -115,6 +115,8 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable { return; } + + var updated = false; foreach (var objectEntries in LastAnalysis.Values) { foreach (var entry in objectEntries.Values) @@ -124,9 +126,26 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable continue; } token.ThrowIfCancellationRequested(); - await entry.ComputeSizes(_fileCacheManager, token).ConfigureAwait(false); + await entry.ComputeSizes(_fileCacheManager, token, force).ConfigureAwait(false); + + if (string.Equals(entry.FileType, "mdl", StringComparison.OrdinalIgnoreCase)) + { + var sourcePath = entry.FilePaths.FirstOrDefault(path => !string.IsNullOrWhiteSpace(path)); + if (!string.IsNullOrWhiteSpace(sourcePath)) + { + entry.UpdateTriangles(_xivDataAnalyzer.RefreshTrianglesForPath(entry.Hash, sourcePath)); + } + } + + updated = true; } } + + if (updated) + { + RecalculateSummary(); + Mediator.Publish(new CharacterDataAnalyzedMessage()); + } } private async Task BaseAnalysis(CharacterData charaData, CancellationToken token) @@ -311,6 +330,10 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable var original = new FileInfo(path).Length; var compressedLen = await fileCacheManager.GetCompressedSizeAsync(Hash, token).ConfigureAwait(false); + if (compressedLen <= 0 && !string.Equals(FileType, "tex", StringComparison.OrdinalIgnoreCase)) + { + compressedLen = original; + } fileCacheManager.SetSizeInfo(Hash, original, compressedLen); FileCacheManager.ApplySizesToEntries(CacheEntries, original, compressedLen); @@ -326,6 +349,7 @@ public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable private Lazy? _format; public void RefreshFormat() => _format = CreateFormatValue(); + public void UpdateTriangles(long triangles) => Triangles = triangles; private Lazy CreateFormatValue() => new(() => diff --git a/LightlessSync/Services/Chat/ZoneChatService.cs b/LightlessSync/Services/Chat/ZoneChatService.cs index 54dd2d9..67ae117 100644 --- a/LightlessSync/Services/Chat/ZoneChatService.cs +++ b/LightlessSync/Services/Chat/ZoneChatService.cs @@ -8,18 +8,26 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using LightlessSync.UI.Services; using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Models; +using System.Text.Json; +using System.Text.Json.Serialization; namespace LightlessSync.Services.Chat; public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedService { - private const int MaxMessageHistory = 150; + private const int MaxMessageHistory = 200; internal const int MaxOutgoingLength = 200; private const int MaxUnreadCount = 999; private const string ZoneUnavailableMessage = "Zone chat is only available in major cities."; private const string ZoneChannelKey = "zone"; private const int MaxReportReasonLength = 100; private const int MaxReportContextLength = 1000; + private static readonly JsonSerializerOptions PersistedHistorySerializerOptions = new() + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; private readonly ApiController _apiController; private readonly DalamudUtilService _dalamudUtilService; @@ -376,6 +384,7 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS public Task StartAsync(CancellationToken cancellationToken) { + LoadPersistedSyncshellHistory(); Mediator.Subscribe(this, _ => HandleLogin()); Mediator.Subscribe(this, _ => HandleLogout()); Mediator.Subscribe(this, _ => ScheduleZonePresenceUpdate()); @@ -1000,11 +1009,22 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS private void OnChatMessageReceived(ChatMessageDto dto) { - var descriptor = dto.Channel.WithNormalizedCustomKey(); - var key = descriptor.Type == ChatChannelType.Zone ? ZoneChannelKey : BuildChannelKey(descriptor); - var fromSelf = IsMessageFromSelf(dto, key); - var message = BuildMessage(dto, fromSelf); + ChatChannelDescriptor descriptor = dto.Channel.WithNormalizedCustomKey(); + string key = descriptor.Type == ChatChannelType.Zone ? ZoneChannelKey : BuildChannelKey(descriptor); + bool fromSelf = IsMessageFromSelf(dto, key); + ChatMessageEntry message = BuildMessage(dto, fromSelf); + bool mentionNotificationsEnabled = _chatConfigService.Current.EnableMentionNotifications; + bool notifyMention = mentionNotificationsEnabled + && !fromSelf + && descriptor.Type == ChatChannelType.Group + && TryGetSelfMentionToken(dto.Message, out _); + + string? mentionChannelName = null; + string? mentionSenderName = null; bool publishChannelList = false; + bool shouldPersistHistory = _chatConfigService.Current.PersistSyncshellHistory; + List? persistedMessages = null; + string? persistedChannelKey = null; using (_sync.EnterScope()) { @@ -1042,6 +1062,12 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS state.Messages.RemoveAt(0); } + if (notifyMention) + { + mentionChannelName = state.DisplayName; + mentionSenderName = message.DisplayName; + } + if (string.Equals(_activeChannelKey, key, StringComparison.Ordinal)) { state.HasUnread = false; @@ -1058,10 +1084,29 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS } MarkChannelsSnapshotDirtyLocked(); + + if (shouldPersistHistory && state.Type == ChatChannelType.Group) + { + persistedChannelKey = state.Key; + persistedMessages = BuildPersistedHistoryLocked(state); + } } Mediator.Publish(new ChatChannelMessageAdded(key, message)); + if (persistedMessages is not null && persistedChannelKey is not null) + { + PersistSyncshellHistory(persistedChannelKey, persistedMessages); + } + + if (notifyMention) + { + string channelName = mentionChannelName ?? "Syncshell"; + string senderName = mentionSenderName ?? "Someone"; + string notificationText = $"You were mentioned by {senderName} in {channelName}."; + Mediator.Publish(new NotificationMessage("Syncshell mention", notificationText, NotificationType.Info)); + } + if (publishChannelList) { using (_sync.EnterScope()) @@ -1108,6 +1153,113 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS return false; } + private bool TryGetSelfMentionToken(string message, out string matchedToken) + { + matchedToken = string.Empty; + if (string.IsNullOrWhiteSpace(message)) + { + return false; + } + + HashSet tokens = BuildSelfMentionTokens(); + if (tokens.Count == 0) + { + return false; + } + + return TryFindMentionToken(message, tokens, out matchedToken); + } + + private HashSet BuildSelfMentionTokens() + { + HashSet tokens = new(StringComparer.OrdinalIgnoreCase); + string uid = _apiController.UID; + if (IsValidMentionToken(uid)) + { + tokens.Add(uid); + } + + string displayName = _apiController.DisplayName; + if (IsValidMentionToken(displayName)) + { + tokens.Add(displayName); + } + + return tokens; + } + + private static bool IsValidMentionToken(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + for (int i = 0; i < value.Length; i++) + { + if (!IsMentionChar(value[i])) + { + return false; + } + } + + return true; + } + + private static bool TryFindMentionToken(string message, IReadOnlyCollection tokens, out string matchedToken) + { + matchedToken = string.Empty; + if (tokens.Count == 0 || string.IsNullOrEmpty(message)) + { + return false; + } + + int index = 0; + while (index < message.Length) + { + if (message[index] != '@') + { + index++; + continue; + } + + if (index > 0 && IsMentionChar(message[index - 1])) + { + index++; + continue; + } + + int start = index + 1; + int end = start; + while (end < message.Length && IsMentionChar(message[end])) + { + end++; + } + + if (end == start) + { + index++; + continue; + } + + string token = message.Substring(start, end - start); + if (tokens.Contains(token)) + { + matchedToken = token; + return true; + } + + index = end; + } + + return false; + } + + private static bool IsMentionChar(char value) + { + return char.IsLetterOrDigit(value) || value == '_' || value == '-' || value == '\''; + } + private ChatMessageEntry BuildMessage(ChatMessageDto dto, bool fromSelf) { var displayName = ResolveDisplayName(dto, fromSelf); @@ -1364,6 +1516,313 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS return 0; } + private void LoadPersistedSyncshellHistory() + { + if (!_chatConfigService.Current.PersistSyncshellHistory) + { + return; + } + + Dictionary persisted = _chatConfigService.Current.SyncshellChannelHistory; + if (persisted.Count == 0) + { + return; + } + + List invalidKeys = new(); + foreach (KeyValuePair entry in persisted) + { + if (string.IsNullOrWhiteSpace(entry.Key) || string.IsNullOrWhiteSpace(entry.Value)) + { + invalidKeys.Add(entry.Key); + continue; + } + + if (!TryDecodePersistedHistory(entry.Value, out List persistedMessages)) + { + invalidKeys.Add(entry.Key); + continue; + } + + if (persistedMessages.Count == 0) + { + invalidKeys.Add(entry.Key); + continue; + } + + if (persistedMessages.Count > MaxMessageHistory) + { + int startIndex = Math.Max(0, persistedMessages.Count - MaxMessageHistory); + persistedMessages = persistedMessages.GetRange(startIndex, persistedMessages.Count - startIndex); + } + + List restoredMessages = new(persistedMessages.Count); + foreach (PersistedChatMessage persistedMessage in persistedMessages) + { + if (!TryBuildRestoredMessage(entry.Key, persistedMessage, out ChatMessageEntry restoredMessage)) + { + continue; + } + + restoredMessages.Add(restoredMessage); + } + + if (restoredMessages.Count == 0) + { + invalidKeys.Add(entry.Key); + continue; + } + + using (_sync.EnterScope()) + { + _messageHistoryCache[entry.Key] = restoredMessages; + } + } + + if (invalidKeys.Count > 0) + { + foreach (string key in invalidKeys) + { + persisted.Remove(key); + } + + _chatConfigService.Save(); + } + } + + private List BuildPersistedHistoryLocked(ChatChannelState state) + { + int startIndex = Math.Max(0, state.Messages.Count - MaxMessageHistory); + List persistedMessages = new(state.Messages.Count - startIndex); + for (int i = startIndex; i < state.Messages.Count; i++) + { + ChatMessageEntry entry = state.Messages[i]; + if (entry.Payload is not { } payload) + { + continue; + } + + persistedMessages.Add(new PersistedChatMessage( + payload.Message, + entry.DisplayName, + entry.FromSelf, + entry.ReceivedAtUtc, + payload.SentAtUtc)); + } + + return persistedMessages; + } + + private void PersistSyncshellHistory(string channelKey, List persistedMessages) + { + if (!_chatConfigService.Current.PersistSyncshellHistory) + { + return; + } + + Dictionary persisted = _chatConfigService.Current.SyncshellChannelHistory; + if (persistedMessages.Count == 0) + { + if (persisted.Remove(channelKey)) + { + _chatConfigService.Save(); + } + + return; + } + + string? base64 = EncodePersistedMessages(persistedMessages); + if (string.IsNullOrWhiteSpace(base64)) + { + if (persisted.Remove(channelKey)) + { + _chatConfigService.Save(); + } + + return; + } + + persisted[channelKey] = base64; + _chatConfigService.Save(); + } + + private static string? EncodePersistedMessages(List persistedMessages) + { + if (persistedMessages.Count == 0) + { + return null; + } + + byte[] jsonBytes = JsonSerializer.SerializeToUtf8Bytes(persistedMessages, PersistedHistorySerializerOptions); + return Convert.ToBase64String(jsonBytes); + } + + private static bool TryDecodePersistedHistory(string base64, out List persistedMessages) + { + persistedMessages = new List(); + if (string.IsNullOrWhiteSpace(base64)) + { + return false; + } + + try + { + byte[] jsonBytes = Convert.FromBase64String(base64); + List? decoded = JsonSerializer.Deserialize>(jsonBytes, PersistedHistorySerializerOptions); + if (decoded is null) + { + return false; + } + + persistedMessages = decoded; + return true; + } + catch + { + return false; + } + } + + private static bool TryBuildRestoredMessage(string channelKey, PersistedChatMessage persistedMessage, out ChatMessageEntry restoredMessage) + { + restoredMessage = default; + string messageText = persistedMessage.Message; + DateTime sentAtUtc = persistedMessage.SentAtUtc; + if (string.IsNullOrWhiteSpace(messageText) && persistedMessage.LegacyPayload is { } legacy) + { + messageText = legacy.Message; + sentAtUtc = legacy.SentAtUtc; + } + + if (string.IsNullOrWhiteSpace(messageText)) + { + return false; + } + + ChatChannelDescriptor descriptor = BuildDescriptorFromChannelKey(channelKey); + ChatSenderDescriptor sender = new ChatSenderDescriptor( + ChatSenderKind.Anonymous, + string.Empty, + null, + null, + null, + false); + + ChatMessageDto payload = new ChatMessageDto(descriptor, sender, messageText, sentAtUtc, string.Empty); + restoredMessage = new ChatMessageEntry(payload, persistedMessage.DisplayName, persistedMessage.FromSelf, persistedMessage.ReceivedAtUtc); + return true; + } + + private static ChatChannelDescriptor BuildDescriptorFromChannelKey(string channelKey) + { + if (string.Equals(channelKey, ZoneChannelKey, StringComparison.Ordinal)) + { + return new ChatChannelDescriptor { Type = ChatChannelType.Zone }; + } + + int separatorIndex = channelKey.IndexOf(':', StringComparison.Ordinal); + if (separatorIndex <= 0 || separatorIndex >= channelKey.Length - 1) + { + return new ChatChannelDescriptor { Type = ChatChannelType.Group }; + } + + string typeValue = channelKey[..separatorIndex]; + if (!int.TryParse(typeValue, out int parsedType)) + { + return new ChatChannelDescriptor { Type = ChatChannelType.Group }; + } + + string customKey = channelKey[(separatorIndex + 1)..]; + ChatChannelType channelType = parsedType switch + { + (int)ChatChannelType.Zone => ChatChannelType.Zone, + (int)ChatChannelType.Group => ChatChannelType.Group, + _ => ChatChannelType.Group + }; + + return new ChatChannelDescriptor + { + Type = channelType, + CustomKey = customKey + }; + } + + public void ClearPersistedSyncshellHistory(bool clearLoadedMessages) + { + bool shouldPublish = false; + bool saveConfig = false; + + using (_sync.EnterScope()) + { + Dictionary> cache = _messageHistoryCache; + if (cache.Count > 0) + { + List keysToRemove = new(); + foreach (string key in cache.Keys) + { + if (!string.Equals(key, ZoneChannelKey, StringComparison.Ordinal)) + { + keysToRemove.Add(key); + } + } + + foreach (string key in keysToRemove) + { + cache.Remove(key); + } + + if (keysToRemove.Count > 0) + { + shouldPublish = true; + } + } + + if (clearLoadedMessages) + { + foreach (ChatChannelState state in _channels.Values) + { + if (state.Type != ChatChannelType.Group) + { + continue; + } + + if (state.Messages.Count == 0 && state.UnreadCount == 0 && !state.HasUnread) + { + continue; + } + + state.Messages.Clear(); + state.HasUnread = false; + state.UnreadCount = 0; + _lastReadCounts[state.Key] = 0; + shouldPublish = true; + } + } + + Dictionary persisted = _chatConfigService.Current.SyncshellChannelHistory; + if (persisted.Count > 0) + { + persisted.Clear(); + saveConfig = true; + } + + if (shouldPublish) + { + MarkChannelsSnapshotDirtyLocked(); + } + } + + if (saveConfig) + { + _chatConfigService.Save(); + } + + if (shouldPublish) + { + PublishChannelListChanged(); + } + } + private sealed class ChatChannelState { public ChatChannelState(string key, ChatChannelType type, string displayName, ChatChannelDescriptor descriptor) @@ -1400,4 +1859,12 @@ public sealed class ZoneChatService : DisposableMediatorSubscriberBase, IHostedS bool IsOwner); private readonly record struct PendingSelfMessage(string ChannelKey, string Message); + + public sealed record PersistedChatMessage( + string Message = "", + string DisplayName = "", + bool FromSelf = false, + DateTime ReceivedAtUtc = default, + DateTime SentAtUtc = default, + [property: JsonPropertyName("Payload")] ChatMessageDto? LegacyPayload = null); } diff --git a/LightlessSync/Services/DalamudUtilService.cs b/LightlessSync/Services/DalamudUtilService.cs index 6f0869d..d7a814a 100644 --- a/LightlessSync/Services/DalamudUtilService.cs +++ b/LightlessSync/Services/DalamudUtilService.cs @@ -22,10 +22,8 @@ using LightlessSync.Utils; using Lumina.Excel.Sheets; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; -using System.Diagnostics; using System.Numerics; using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; using System.Text; using BattleNpcSubKind = FFXIVClientStructs.FFXIV.Client.Game.Object.BattleNpcSubKind; using DalamudObjectKind = Dalamud.Game.ClientState.Objects.Enums.ObjectKind; @@ -229,6 +227,28 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber _ = RunOnFrameworkThread(ReleaseFocusUnsafe); } + public void TargetPlayerByAddress(nint address) + { + if (address == nint.Zero) return; + if (_clientState.IsPvP) return; + + _ = RunOnFrameworkThread(() => + { + var gameObject = CreateGameObject(address); + if (gameObject is null) return; + + var useFocusTarget = _configService.Current.UseFocusTarget; + if (useFocusTarget) + { + _targetManager.FocusTarget = gameObject; + } + else + { + _targetManager.Target = gameObject; + } + }); + } + private void FocusPairUnsafe(nint address, PairUniqueIdentifier pairIdent) { var target = CreateGameObject(address); @@ -404,38 +424,7 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber if (playerPointer == IntPtr.Zero) return IntPtr.Zero; var playerAddress = playerPointer.Value; - var ownerEntityId = ((Character*)playerAddress)->EntityId; - var candidateAddress = _objectTable.GetObjectAddress(((GameObject*)playerAddress)->ObjectIndex + 1); - if (ownerEntityId == 0) return candidateAddress; - - if (playerAddress == _actorObjectService.LocalPlayerAddress) - { - var localOwned = _actorObjectService.LocalMinionOrMountAddress; - if (localOwned != nint.Zero) - { - return localOwned; - } - } - - if (candidateAddress != nint.Zero) - { - var candidate = (GameObject*)candidateAddress; - var candidateKind = (DalamudObjectKind)candidate->ObjectKind; - if ((candidateKind == DalamudObjectKind.MountType || candidateKind == DalamudObjectKind.Companion) - && ResolveOwnerId(candidate) == ownerEntityId) - { - return candidateAddress; - } - } - - var ownedObject = FindOwnedObject(ownerEntityId, playerAddress, static kind => - kind == DalamudObjectKind.MountType || kind == DalamudObjectKind.Companion); - if (ownedObject != nint.Zero) - { - return ownedObject; - } - - return candidateAddress; + return _objectTable.GetObjectAddress(((GameObject*)playerAddress)->ObjectIndex + 1); } public async Task GetMinionOrMountAsync(IntPtr? playerPointer = null) @@ -465,7 +454,7 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber } } - return FindOwnedPet(ownerEntityId, ownerAddress); + return IntPtr.Zero; } public async Task GetPetAsync(IntPtr? playerPointer = null) @@ -473,69 +462,6 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber return await RunOnFrameworkThread(() => GetPetPtr(playerPointer)).ConfigureAwait(false); } - private unsafe nint FindOwnedObject(uint ownerEntityId, nint ownerAddress, Func matchesKind) - { - if (ownerEntityId == 0) - { - return nint.Zero; - } - - foreach (var obj in _objectTable) - { - if (obj is null || obj.Address == nint.Zero || obj.Address == ownerAddress) - { - continue; - } - - if (!matchesKind(obj.ObjectKind)) - { - continue; - } - - var candidate = (GameObject*)obj.Address; - if (ResolveOwnerId(candidate) == ownerEntityId) - { - return obj.Address; - } - } - - return nint.Zero; - } - - private unsafe nint FindOwnedPet(uint ownerEntityId, nint ownerAddress) - { - if (ownerEntityId == 0) - { - return nint.Zero; - } - - foreach (var obj in _objectTable) - { - if (obj is null || obj.Address == nint.Zero || obj.Address == ownerAddress) - { - continue; - } - - if (obj.ObjectKind != DalamudObjectKind.BattleNpc) - { - continue; - } - - var candidate = (GameObject*)obj.Address; - if (candidate->BattleNpcSubKind != BattleNpcSubKind.Pet) - { - continue; - } - - if (ResolveOwnerId(candidate) == ownerEntityId) - { - return obj.Address; - } - } - - return nint.Zero; - } - private static unsafe bool IsPetMatch(GameObject* candidate, uint ownerEntityId) { if (candidate == null) @@ -634,6 +560,37 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber return true; } + public bool TryGetHashedCIDFromAddress(nint address, out string hashedCid) + { + hashedCid = string.Empty; + if (address == nint.Zero) + return false; + + if (_framework.IsInFrameworkUpdateThread) + { + return TryGetHashedCIDFromAddressInternal(address, out hashedCid); + } + + var result = _framework.RunOnFrameworkThread(() => + { + var success = TryGetHashedCIDFromAddressInternal(address, out var resolved); + return (success, resolved); + }).GetAwaiter().GetResult(); + + hashedCid = result.resolved; + return result.success; + } + + private bool TryGetHashedCIDFromAddressInternal(nint address, out string hashedCid) + { + hashedCid = string.Empty; + var player = _objectTable.CreateObjectReference(address) as IPlayerCharacter; + if (player == null || player.Address != address) + return false; + + return TryGetHashedCID(player, out hashedCid); + } + public unsafe static string GetHashedCIDFromPlayerPointer(nint ptr) { return ((BattleChara*)ptr)->Character.ContentId.ToString().GetHash256(); @@ -816,9 +773,12 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber { _logger.LogInformation("Starting DalamudUtilService"); _framework.Update += FrameworkOnUpdate; - if (IsLoggedIn) + _clientState.Login += OnClientLogin; + _clientState.Logout += OnClientLogout; + + if (_clientState.IsLoggedIn) { - _classJobId = _objectTable.LocalPlayer!.ClassJob.RowId; + OnClientLogin(); } _logger.LogInformation("Started DalamudUtilService"); @@ -831,6 +791,8 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber Mediator.UnsubscribeAll(this); _framework.Update -= FrameworkOnUpdate; + _clientState.Login -= OnClientLogin; + _clientState.Logout -= OnClientLogout; if (_FocusPairIdent.HasValue) { if (_framework.IsInFrameworkUpdateThread) @@ -845,43 +807,72 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber return Task.CompletedTask; } - public async Task WaitWhileCharacterIsDrawing( - ILogger logger, - GameObjectHandler handler, - Guid redrawId, - int timeOut = 5000, - CancellationToken? ct = null) + private void OnClientLogin() + { + if (IsLoggedIn) + return; + + _ = RunOnFrameworkThread(() => + { + if (IsLoggedIn) + return; + + var localPlayer = _objectTable.LocalPlayer; + IsLoggedIn = true; + _lastZone = _clientState.TerritoryType; + if (localPlayer != null) + { + _lastWorldId = (ushort)localPlayer.CurrentWorld.RowId; + _classJobId = localPlayer.ClassJob.RowId; + } + + _cid = RebuildCID(); + Mediator.Publish(new DalamudLoginMessage()); + }); + } + + private void OnClientLogout(int type, int code) + { + if (!IsLoggedIn) + return; + _ = RunOnFrameworkThread(() => + { + if (!IsLoggedIn) + return; + + IsLoggedIn = false; + _lastWorldId = 0; + Mediator.Publish(new DalamudLogoutMessage()); + }); + } + + public async Task WaitWhileCharacterIsDrawing(ILogger logger, GameObjectHandler handler, Guid redrawId, int timeOut = 5000, CancellationToken? ct = null) { if (!_clientState.IsLoggedIn) return; - var token = ct ?? CancellationToken.None; + if (ct == null) + ct = CancellationToken.None; const int tick = 250; - const int initialSettle = 50; - - var sw = Stopwatch.StartNew(); - + int curWaitTime = 0; try { logger.LogTrace("[{redrawId}] Starting wait for {handler} to draw", redrawId, handler); + await Task.Delay(tick, ct.Value).ConfigureAwait(true); + curWaitTime += tick; - await Task.Delay(initialSettle, token).ConfigureAwait(false); - - while (!token.IsCancellationRequested - && sw.ElapsedMilliseconds < timeOut - && await handler.IsBeingDrawnRunOnFrameworkAsync().ConfigureAwait(false)) + while ((!ct.Value.IsCancellationRequested) + && curWaitTime < timeOut + && await handler.IsBeingDrawnRunOnFrameworkAsync().ConfigureAwait(false)) // 0b100000000000 is "still rendering" or something { logger.LogTrace("[{redrawId}] Waiting for {handler} to finish drawing", redrawId, handler); - await Task.Delay(tick, token).ConfigureAwait(false); + curWaitTime += tick; + await Task.Delay(tick, ct.Value).ConfigureAwait(true); } - logger.LogTrace("[{redrawId}] Finished drawing after {ms}ms", redrawId, sw.ElapsedMilliseconds); + logger.LogTrace("[{redrawId}] Finished drawing after {curWaitTime}ms", redrawId, curWaitTime); } - catch (OperationCanceledException) - { - // ignore - } - catch (Exception ex) + catch (AccessViolationException ex) { logger.LogWarning(ex, "Error accessing {handler}, object does not exist anymore?", handler); } @@ -931,109 +922,37 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber return WorldData.Value.TryGetValue(worldId, out var worldName) ? worldName : null; } - public void TargetPlayerByAddress(nint address) - { - if (address == nint.Zero) return; - if (_clientState.IsPvP) return; - - _ = RunOnFrameworkThread(() => - { - var gameObject = CreateGameObject(address); - if (gameObject is null) return; - - var useFocusTarget = _configService.Current.UseFocusTarget; - if (useFocusTarget) - { - _targetManager.FocusTarget = gameObject; - } - else - { - _targetManager.Target = gameObject; - } - }); - } - - [DllImport("kernel32.dll", SetLastError = true)] - private static extern bool IsBadReadPtr(IntPtr ptr, UIntPtr size); - - private static bool IsValidPointer(nint ptr, int size = 8) - { - if (ptr == nint.Zero) - return false; - - try - { - if (!Util.IsWine()) - { - return !IsBadReadPtr(ptr, (UIntPtr)size); - } - return ptr != nint.Zero && (ptr % IntPtr.Size) == 0; - } - catch - { - return false; - } - } - private unsafe void CheckCharacterForDrawing(nint address, string characterName) { - if (address == nint.Zero) - return; - - if (!IsValidPointer(address)) - { - _logger.LogDebug("Invalid pointer for character {name} at {addr}", characterName, address.ToString("X")); - return; - } - var gameObj = (GameObject*)address; - - if (gameObj == null) - return; - - if (!_objectTable.Any(o => o?.Address == address)) - { - _logger.LogDebug("Character {name} at {addr} no longer in object table", characterName, address.ToString("X")); - return; - } - - if (gameObj->ObjectKind == 0) - return; - var drawObj = gameObj->DrawObject; bool isDrawing = false; bool isDrawingChanged = false; - - if ((nint)drawObj != IntPtr.Zero && IsValidPointer((nint)drawObj)) + if ((nint)drawObj != IntPtr.Zero) { isDrawing = gameObj->RenderFlags == (VisibilityFlags)0b100000000000; - if (!isDrawing) { - var charBase = (CharacterBase*)drawObj; - if (charBase != null && IsValidPointer((nint)charBase)) + isDrawing = ((CharacterBase*)drawObj)->HasModelInSlotLoaded != 0; + if (!isDrawing) { - isDrawing = charBase->HasModelInSlotLoaded != 0; - if (!isDrawing) + isDrawing = ((CharacterBase*)drawObj)->HasModelFilesInSlotLoaded != 0; + if (isDrawing && !string.Equals(_lastGlobalBlockPlayer, characterName, StringComparison.Ordinal) + && !string.Equals(_lastGlobalBlockReason, "HasModelFilesInSlotLoaded", StringComparison.Ordinal)) { - isDrawing = charBase->HasModelFilesInSlotLoaded != 0; - if (isDrawing && !string.Equals(_lastGlobalBlockPlayer, characterName, StringComparison.Ordinal) - && !string.Equals(_lastGlobalBlockReason, "HasModelFilesInSlotLoaded", StringComparison.Ordinal)) - { - _lastGlobalBlockPlayer = characterName; - _lastGlobalBlockReason = "HasModelFilesInSlotLoaded"; - isDrawingChanged = true; - } + _lastGlobalBlockPlayer = characterName; + _lastGlobalBlockReason = "HasModelFilesInSlotLoaded"; + isDrawingChanged = true; } - else + } + else + { + if (!string.Equals(_lastGlobalBlockPlayer, characterName, StringComparison.Ordinal) + && !string.Equals(_lastGlobalBlockReason, "HasModelInSlotLoaded", StringComparison.Ordinal)) { - if (!string.Equals(_lastGlobalBlockPlayer, characterName, StringComparison.Ordinal) - && !string.Equals(_lastGlobalBlockReason, "HasModelInSlotLoaded", StringComparison.Ordinal)) - { - _lastGlobalBlockPlayer = characterName; - _lastGlobalBlockReason = "HasModelInSlotLoaded"; - isDrawingChanged = true; - } + _lastGlobalBlockPlayer = characterName; + _lastGlobalBlockReason = "HasModelInSlotLoaded"; + isDrawingChanged = true; } } } @@ -1064,21 +983,39 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber private unsafe void FrameworkOnUpdateInternal() { - if (!_clientState.IsLoggedIn || _objectTable.LocalPlayer == null) - { - return; - } - - if ((_objectTable.LocalPlayer?.IsDead ?? false) && _condition[ConditionFlag.BoundByDuty]) + var localPlayer = _objectTable.LocalPlayer; + if ((localPlayer?.IsDead ?? false) && _condition[ConditionFlag.BoundByDuty]) { return; } bool isNormalFrameworkUpdate = DateTime.UtcNow < _delayedFrameworkUpdateCheck.AddSeconds(1); + var clientLoggedIn = _clientState.IsLoggedIn; _performanceCollector.LogPerformance(this, $"FrameworkOnUpdateInternal+{(isNormalFrameworkUpdate ? "Regular" : "Delayed")}", () => { IsAnythingDrawing = false; + + if (!isNormalFrameworkUpdate) + { + if (_gameConfig != null + && _gameConfig.TryGet(Dalamud.Game.Config.SystemConfigOption.LodType_DX11, out bool lodEnabled)) + { + IsLodEnabled = lodEnabled; + } + + if (IsInCombat || IsPerforming || IsInInstance) + Mediator.Publish(new FrameworkUpdateMessage()); + + Mediator.Publish(new DelayedFrameworkUpdateMessage()); + + _delayedFrameworkUpdateCheck = DateTime.UtcNow; + } + + if (!clientLoggedIn) + { + return; + } _performanceCollector.LogPerformance(this, $"TrackedActorsToState", () => { @@ -1087,40 +1024,46 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber _actorObjectService.RefreshTrackedActors(); } - var playerDescriptors = _actorObjectService.PlayerDescriptors; - var descriptorCount = playerDescriptors.Count; - - for (var i = 0; i < descriptorCount; i++) + if (_clientState.IsLoggedIn && localPlayer != null) { - if (i >= playerDescriptors.Count) - break; - - var actor = playerDescriptors[i]; - - var playerAddress = actor.Address; - if (playerAddress == nint.Zero || !IsValidPointer(playerAddress)) - continue; - - if (actor.ObjectIndex >= 200) - continue; - - if (_blockedCharacterHandler.IsCharacterBlocked(playerAddress, actor.ObjectIndex, out bool firstTime) && firstTime) + var playerDescriptors = _actorObjectService.PlayerDescriptors; + for (var i = 0; i < playerDescriptors.Count; i++) { - _logger.LogTrace("Skipping character {addr}, blocked/muted", playerAddress.ToString("X")); - continue; - } + var actor = playerDescriptors[i]; - if (!IsAnythingDrawing) - { - if (!_objectTable.Any(o => o?.Address == playerAddress)) + var playerAddress = actor.Address; + if (playerAddress == nint.Zero) + continue; + + if (actor.ObjectIndex >= 200) + continue; + + var obj = _objectTable[actor.ObjectIndex]; + if (obj is not IPlayerCharacter player || player.Address != playerAddress) + continue; + + if (_blockedCharacterHandler.IsCharacterBlocked(playerAddress, actor.ObjectIndex, out bool firstTime) && firstTime) { + _logger.LogTrace("Skipping character {addr}, blocked/muted", playerAddress.ToString("X")); continue; } - CheckCharacterForDrawing(playerAddress, actor.Name); + if (!IsAnythingDrawing) + { + var charaName = player.Name.TextValue; + if (string.IsNullOrEmpty(charaName)) + { + charaName = actor.Name; + } - if (IsAnythingDrawing) + CheckCharacterForDrawing(playerAddress, charaName); + if (IsAnythingDrawing) + break; + } + else + { break; + } } } }); @@ -1246,7 +1189,6 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber } - var localPlayer = _objectTable.LocalPlayer; if (localPlayer != null) { _classJobId = localPlayer.ClassJob.RowId; @@ -1268,39 +1210,6 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber Mediator.Publish(new FrameworkUpdateMessage()); Mediator.Publish(new PriorityFrameworkUpdateMessage()); - - if (isNormalFrameworkUpdate) - return; - - if (localPlayer != null && !IsLoggedIn) - { - _logger.LogDebug("Logged in"); - IsLoggedIn = true; - _lastZone = _clientState.TerritoryType; - _lastWorldId = (ushort)localPlayer.CurrentWorld.RowId; - _cid = RebuildCID(); - Mediator.Publish(new DalamudLoginMessage()); - } - else if (localPlayer == null && IsLoggedIn) - { - _logger.LogDebug("Logged out"); - IsLoggedIn = false; - _lastWorldId = 0; - Mediator.Publish(new DalamudLogoutMessage()); - } - - if (_gameConfig != null - && _gameConfig.TryGet(Dalamud.Game.Config.SystemConfigOption.LodType_DX11, out bool lodEnabled)) - { - IsLodEnabled = lodEnabled; - } - - if (IsInCombat || IsPerforming || IsInInstance) - Mediator.Publish(new FrameworkUpdateMessage()); - - Mediator.Publish(new DelayedFrameworkUpdateMessage()); - - _delayedFrameworkUpdateCheck = DateTime.UtcNow; }); } @@ -1330,4 +1239,4 @@ public class DalamudUtilService : IHostedService, IMediatorSubscriber onExit(); } } -} \ No newline at end of file +} diff --git a/LightlessSync/Services/Mediator/Messages.cs b/LightlessSync/Services/Mediator/Messages.cs index e6db9e7..f3cbd75 100644 --- a/LightlessSync/Services/Mediator/Messages.cs +++ b/LightlessSync/Services/Mediator/Messages.cs @@ -21,6 +21,12 @@ public record SwitchToIntroUiMessage : MessageBase; public record SwitchToMainUiMessage : MessageBase; public record OpenSettingsUiMessage : MessageBase; public record OpenLightfinderSettingsMessage : MessageBase; +public enum PerformanceSettingsSection +{ + TextureOptimization, + ModelOptimization, +} +public record OpenPerformanceSettingsMessage(PerformanceSettingsSection Section) : MessageBase; public record DalamudLoginMessage : MessageBase; public record DalamudLogoutMessage : MessageBase; public record ActorTrackedMessage(ActorObjectService.ActorDescriptor Descriptor) : SameThreadMessage; diff --git a/LightlessSync/Services/ModelDecimation/MdlDecimator.cs b/LightlessSync/Services/ModelDecimation/MdlDecimator.cs index a7af13f..55b511c 100644 --- a/LightlessSync/Services/ModelDecimation/MdlDecimator.cs +++ b/LightlessSync/Services/ModelDecimation/MdlDecimator.cs @@ -1,19 +1,24 @@ +using LightlessSync.LightlessConfiguration.Configurations; using Lumina.Data.Parsing; using Lumina.Extensions; -using MeshDecimator; -using MeshDecimator.Algorithms; -using MeshDecimator.Math; using Microsoft.Extensions.Logging; +using Nano = Nanomesh; using Penumbra.GameData.Files.ModelStructs; using System.Buffers.Binary; +using BoneWeight = Nanomesh.BoneWeight; using MdlFile = Penumbra.GameData.Files.MdlFile; using MsLogger = Microsoft.Extensions.Logging.ILogger; +using Vector2 = Nanomesh.Vector2F; +using Vector3 = Nanomesh.Vector3F; +using Vector3d = Nanomesh.Vector3; +using Vector4 = Nanomesh.Vector4F; namespace LightlessSync.Services.ModelDecimation; - + // if you're coming from another sync service, then kindly fuck off. lightless ftw lil bro internal static class MdlDecimator { private const int MaxStreams = 3; + private const int MaxUvChannels = 4; private const int ReadRetryCount = 8; private const int ReadRetryDelayMs = 250; @@ -22,6 +27,7 @@ internal static class MdlDecimator MdlFile.VertexUsage.Position, MdlFile.VertexUsage.Normal, MdlFile.VertexUsage.Tangent1, + MdlFile.VertexUsage.Tangent2, MdlFile.VertexUsage.UV, MdlFile.VertexUsage.Color, MdlFile.VertexUsage.BlendWeights, @@ -30,6 +36,7 @@ internal static class MdlDecimator private static readonly HashSet SupportedTypes = [ + MdlFile.VertexType.Single1, MdlFile.VertexType.Single2, MdlFile.VertexType.Single3, MdlFile.VertexType.Single4, @@ -37,33 +44,40 @@ internal static class MdlDecimator MdlFile.VertexType.Half4, MdlFile.VertexType.UByte4, MdlFile.VertexType.NByte4, + MdlFile.VertexType.Short2, + MdlFile.VertexType.Short4, + MdlFile.VertexType.NShort2, + MdlFile.VertexType.NShort4, + MdlFile.VertexType.UShort2, + MdlFile.VertexType.UShort4, ]; - public static bool TryDecimate(string sourcePath, string destinationPath, int triangleThreshold, double targetRatio, MsLogger logger) + public static bool TryDecimate(string sourcePath, string destinationPath, ModelDecimationSettings settings, MsLogger logger) { try { + var tuning = settings.Advanced; if (!TryReadModelBytes(sourcePath, logger, out var data)) { - logger.LogInformation("Skipping model decimation; source file locked or unreadable: {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; source file locked or unreadable: {Path}", sourcePath); return false; } var mdl = new MdlFile(data); if (!mdl.Valid) { - logger.LogInformation("Skipping model decimation; invalid mdl: {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; invalid mdl: {Path}", sourcePath); return false; } if (mdl.LodCount != 1) { - logger.LogInformation("Skipping model decimation; unsupported LOD count for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; unsupported LOD count for {Path}", sourcePath); return false; } if (HasShapeData(mdl)) { - logger.LogInformation("Skipping model decimation; shape/morph data present for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; shape/morph data present for {Path}", sourcePath); return false; } @@ -72,13 +86,13 @@ internal static class MdlDecimator var meshes = mdl.Meshes.ToArray(); if (meshes.Length == 0) { - logger.LogInformation("Skipping model decimation; no meshes for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; no meshes for {Path}", sourcePath); return false; } if (lod.MeshCount == 0) { - logger.LogInformation("Skipping model decimation; no meshes for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; no meshes for {Path}", sourcePath); return false; } @@ -86,17 +100,34 @@ internal static class MdlDecimator var lodMeshEnd = lodMeshStart + lod.MeshCount; if (lodMeshStart < 0 || lodMeshEnd > meshes.Length) { - logger.LogInformation("Skipping model decimation; invalid LOD mesh range for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; invalid LOD mesh range for {Path}", sourcePath); return false; } + Dictionary bodyMeshOverrides = []; + BodyCollisionData? bodyCollision = null; + if (settings.AvoidBodyIntersection) + { + if (!TryBuildBodyCollisionData( + mdl, + lodIndex, + lodMeshStart, + lodMeshEnd, + settings, + tuning, + out bodyCollision, + out bodyMeshOverrides, + logger)) + { + bodyCollision = null; + } + } + var anyDecimated = false; var newSubMeshes = new List(mdl.SubMeshes.Length); var newVertexBuffer = new List(mdl.VertexBufferSize[lodIndex] > 0 ? (int)mdl.VertexBufferSize[lodIndex] : 0); var newIndexBuffer = new List(mdl.IndexBufferSize[lodIndex] > 0 ? (int)(mdl.IndexBufferSize[lodIndex] / sizeof(ushort)) : 0); var subMeshCursor = 0; - DecimationAlgorithm? decimationAlgorithm = null; - int? decimationUvChannelCount = null; for (var meshIndex = 0; meshIndex < meshes.Length; meshIndex++) { @@ -115,15 +146,22 @@ internal static class MdlDecimator int[] indices; bool decimated; - if (meshIndex >= lodMeshStart && meshIndex < lodMeshEnd - && TryProcessMesh(mdl, lodIndex, meshIndex, mesh, meshSubMeshes, triangleThreshold, targetRatio, + if (bodyMeshOverrides.TryGetValue(meshIndex, out var bodyOverride)) + { + updatedMesh = bodyOverride.Mesh; + updatedSubMeshes = bodyOverride.SubMeshes; + vertexStreams = bodyOverride.VertexStreams; + indices = bodyOverride.Indices; + decimated = bodyOverride.Decimated; + updatedSubMeshes = OffsetSubMeshes(updatedSubMeshes, meshIndexBase); + } + else if (meshIndex >= lodMeshStart && meshIndex < lodMeshEnd + && TryProcessMesh(mdl, lodIndex, meshIndex, mesh, meshSubMeshes, settings, tuning, bodyCollision, out updatedMesh, out updatedSubMeshes, out vertexStreams, out indices, out decimated, - ref decimationAlgorithm, - ref decimationUvChannelCount, logger)) { updatedSubMeshes = OffsetSubMeshes(updatedSubMeshes, meshIndexBase); @@ -176,7 +214,7 @@ internal static class MdlDecimator if (!anyDecimated) { - logger.LogInformation("Skipping model decimation; no eligible meshes for {Path}", sourcePath); + logger.LogDebug("Skipping model decimation; no eligible meshes for {Path}", sourcePath); return false; } @@ -307,15 +345,14 @@ internal static class MdlDecimator int meshIndex, MeshStruct mesh, MdlStructs.SubmeshStruct[] meshSubMeshes, - int triangleThreshold, - double targetRatio, + ModelDecimationSettings settings, + ModelDecimationAdvancedSettings tuning, + BodyCollisionData? bodyCollision, out MeshStruct updatedMesh, out MdlStructs.SubmeshStruct[] updatedSubMeshes, out byte[][] vertexStreams, out int[] indices, out bool decimated, - ref DecimationAlgorithm? decimationAlgorithm, - ref int? decimationUvChannelCount, MsLogger logger) { updatedMesh = mesh; @@ -335,7 +372,7 @@ internal static class MdlDecimator } var triangleCount = (int)(mesh.IndexCount / 3); - if (triangleCount < triangleThreshold) + if (triangleCount < settings.TriangleThreshold) { return false; } @@ -352,25 +389,66 @@ internal static class MdlDecimator return false; } - var targetTriangles = (int)Math.Floor(triangleCount * targetRatio); + var targetTriangles = (int)Math.Floor(triangleCount * settings.TargetRatio); if (targetTriangles < 1 || targetTriangles >= triangleCount) { + logger.LogDebug( + "Mesh {MeshIndex} decimation target invalid ({Target} vs {Triangles})", + meshIndex, + targetTriangles, + triangleCount); return false; } - var meshDecimatorMesh = BuildMesh(decoded, subMeshIndices); - var algorithm = GetOrCreateAlgorithm(format, ref decimationAlgorithm, ref decimationUvChannelCount, logger); - algorithm.Initialize(meshDecimatorMesh); - algorithm.DecimateMesh(targetTriangles); - var decimatedMesh = algorithm.ToMesh(); + var collisionData = bodyCollision; + if (collisionData != null && IsBodyMesh(mdl, mesh)) + { + collisionData = null; + } - if (decimatedMesh.SubMeshCount != meshSubMeshes.Length) + if (!TryDecimateWithNanomesh(decoded, subMeshIndices, format, targetTriangles, tuning, collisionData, out var decimatedData, out var decimatedSubMeshIndices, out var decimationStats, out var decimationReason)) + { + logger.LogDebug("Mesh {MeshIndex} decimation failed: {Reason}", meshIndex, decimationReason); + return false; + } + + if (decimatedSubMeshIndices.Length != meshSubMeshes.Length) { logger.LogDebug("Mesh {MeshIndex} submesh count changed after decimation", meshIndex); return false; } - if (!TryEncodeMeshData(decimatedMesh, format, mesh, meshSubMeshes, out updatedMesh, out updatedSubMeshes, out vertexStreams, out indices, out var encodeReason)) + var decimatedTriangles = 0; + for (var subMeshIndex = 0; subMeshIndex < decimatedSubMeshIndices.Length; subMeshIndex++) + { + decimatedTriangles += decimatedSubMeshIndices[subMeshIndex].Length / 3; + } + + if (decimatedTriangles <= 0 || decimatedTriangles >= triangleCount) + { + logger.LogDebug( + "Mesh {MeshIndex} decimation produced no reduction (before {Before}, after {After}, components {Components}, eligible {Eligible}, min {Min}, max {Max}, avg {Avg:0.##}, eval {Evaluated}, collapsed {Collapsed}, reject bone {RejectBone}, body {RejectBody}, topo {RejectTopo}, invert {RejectInvert} (deg {RejectDeg}, area {RejectArea}, flip {RejectFlip})", + meshIndex, + triangleCount, + decimatedTriangles, + decimationStats.TotalComponents, + decimationStats.EligibleComponents, + decimationStats.MinTriangles, + decimationStats.MaxTriangles, + decimationStats.AvgTriangles, + decimationStats.EvaluatedEdges, + decimationStats.CollapsedEdges, + decimationStats.RejectedBoneWeights, + decimationStats.RejectedBodyCollision, + decimationStats.RejectedTopology, + decimationStats.RejectedInversion, + decimationStats.RejectedDegenerate, + decimationStats.RejectedArea, + decimationStats.RejectedFlip); + return false; + } + + if (!TryEncodeMeshData(decimatedData, decimatedSubMeshIndices, format, mesh, meshSubMeshes, settings.NormalizeTangents, out updatedMesh, out updatedSubMeshes, out vertexStreams, out indices, out var encodeReason)) { logger.LogDebug("Mesh {MeshIndex} encode failed: {Reason}", meshIndex, encodeReason); return false; @@ -380,55 +458,1533 @@ internal static class MdlDecimator return true; } - private static DecimationAlgorithm GetOrCreateAlgorithm( + private static bool TryDecimateWithNanomesh( + DecodedMeshData decoded, + int[][] subMeshIndices, VertexFormat format, - ref DecimationAlgorithm? decimationAlgorithm, - ref int? decimationUvChannelCount, - MsLogger logger) + int targetTriangles, + ModelDecimationAdvancedSettings tuning, + BodyCollisionData? bodyCollision, + out DecodedMeshData decimated, + out int[][] decimatedSubMeshIndices, + out ComponentStats componentStats, + out string? reason) { - var uvChannelCount = format.UvChannelCount; - if (decimationAlgorithm == null || decimationUvChannelCount != uvChannelCount) + decimated = default!; + decimatedSubMeshIndices = []; + componentStats = default; + reason = null; + + var totalTriangles = 0; + for (var i = 0; i < subMeshIndices.Length; i++) { - decimationAlgorithm = MeshDecimation.CreateAlgorithm(Algorithm.Default); - decimationAlgorithm.Logger = logger; - decimationUvChannelCount = uvChannelCount; + totalTriangles += subMeshIndices[i].Length / 3; } - return decimationAlgorithm; - } - - private static Mesh BuildMesh(DecodedMeshData decoded, int[][] subMeshIndices) - { - var mesh = new Mesh(decoded.Positions, subMeshIndices); - if (decoded.Normals != null) + if (totalTriangles <= 0) { - mesh.Normals = decoded.Normals; + reason = "No triangles to decimate."; + return false; } - if (decoded.Tangents != null) + var targetRatio = Math.Clamp(targetTriangles / (float)totalTriangles, 0f, 1f); + var outputSubMeshes = new List[subMeshIndices.Length]; + for (var i = 0; i < outputSubMeshes.Length; i++) { - mesh.Tangents = decoded.Tangents; + outputSubMeshes[i] = new List(); } - if (decoded.Colors != null) + var positions = new List(); + var normals = format.HasNormals ? new List() : null; + var tangents = format.HasTangent1 ? new List() : null; + var tangents2 = format.HasTangent2 ? new List() : null; + var colors = format.HasColors ? new List() : null; + var boneWeights = format.HasSkinning ? new List() : null; + var positionWs = format.HasPositionW ? new List() : null; + var normalWs = format.HasNormalW ? new List() : null; + List[]? uvChannels = null; + if (format.UvChannelCount > 0) { - mesh.Colors = decoded.Colors; - } - - if (decoded.BoneWeights != null) - { - mesh.BoneWeights = decoded.BoneWeights; - } - - if (decoded.UvChannels != null) - { - for (var channel = 0; channel < decoded.UvChannels.Length; channel++) + uvChannels = new List[format.UvChannelCount]; + for (var channel = 0; channel < format.UvChannelCount; channel++) { - mesh.SetUVs(channel, decoded.UvChannels[channel]); + uvChannels[channel] = new List(); } } - return mesh; + var componentCount = 0; + var eligibleCount = 0; + var minComponentTriangles = int.MaxValue; + var maxComponentTriangles = 0; + var totalComponentTriangles = 0; + var evaluatedEdges = 0; + var collapsedEdges = 0; + var rejectedBoneWeights = 0; + var rejectedTopology = 0; + var rejectedInversion = 0; + var rejectedDegenerate = 0; + var rejectedArea = 0; + var rejectedFlip = 0; + var rejectedBodyCollision = 0; + + for (var subMeshIndex = 0; subMeshIndex < subMeshIndices.Length; subMeshIndex++) + { + var indices = subMeshIndices[subMeshIndex]; + if (indices.Length == 0) + { + continue; + } + + var components = BuildComponentsForSubMesh(indices); + foreach (var componentIndices in components) + { + if (componentIndices.Length == 0) + { + continue; + } + + var componentTriangles = componentIndices.Length / 3; + if (componentTriangles == 0) + { + continue; + } + + var componentTarget = ComputeComponentTarget(componentTriangles, targetRatio, tuning.MinComponentTriangles); + componentCount++; + totalComponentTriangles += componentTriangles; + minComponentTriangles = Math.Min(minComponentTriangles, componentTriangles); + maxComponentTriangles = Math.Max(maxComponentTriangles, componentTriangles); + if (componentTarget < componentTriangles) + { + eligibleCount++; + } + + if (!TryBuildComponentDecoded(decoded, format, componentIndices, out var componentDecoded, out var componentLocalIndices, out reason)) + { + return false; + } + + DecodedMeshData componentDecimated = componentDecoded; + var componentDecimatedIndices = componentLocalIndices; + + if (componentTarget < componentTriangles) + { + if (TryDecimateComponent(componentDecoded, format, componentLocalIndices, componentTarget, decoded.BlendWeightEncoding, tuning, bodyCollision, out var decimatedComponent, out var decimatedComponentIndices, out var decimatorStats, out _)) + { + componentDecimated = decimatedComponent; + componentDecimatedIndices = decimatedComponentIndices; + evaluatedEdges += decimatorStats.EvaluatedEdges; + collapsedEdges += decimatorStats.CollapsedEdges; + rejectedBoneWeights += decimatorStats.RejectedBoneWeights; + rejectedTopology += decimatorStats.RejectedTopology; + rejectedInversion += decimatorStats.RejectedInversion; + rejectedDegenerate += decimatorStats.RejectedDegenerate; + rejectedArea += decimatorStats.RejectedArea; + rejectedFlip += decimatorStats.RejectedFlip; + rejectedBodyCollision += decimatorStats.RejectedBodyCollision; + } + } + + if (!AppendComponentData( + componentDecimated, + componentDecimatedIndices, + format, + positions, + normals, + tangents, + tangents2, + colors, + boneWeights, + uvChannels, + positionWs, + normalWs, + outputSubMeshes[subMeshIndex], + out reason)) + { + return false; + } + } + } + + if (positions.Count > ushort.MaxValue) + { + reason = "Decimated mesh exceeds vertex limit."; + return false; + } + + componentStats = BuildComponentStats( + componentCount, + eligibleCount, + minComponentTriangles, + maxComponentTriangles, + totalComponentTriangles, + evaluatedEdges, + collapsedEdges, + rejectedBoneWeights, + rejectedTopology, + rejectedInversion, + rejectedDegenerate, + rejectedArea, + rejectedFlip, + rejectedBodyCollision); + + decimated = new DecodedMeshData( + positions.ToArray(), + normals?.ToArray(), + tangents?.ToArray(), + tangents2?.ToArray(), + colors?.ToArray(), + boneWeights?.ToArray(), + uvChannels?.Select(channel => channel.ToArray()).ToArray(), + positionWs?.ToArray(), + normalWs?.ToArray(), + decoded.BlendWeightEncoding); + + decimatedSubMeshIndices = outputSubMeshes.Select(list => list.ToArray()).ToArray(); + return true; + } + + private static ComponentStats BuildComponentStats( + int componentCount, + int eligibleCount, + int minTriangles, + int maxTriangles, + int totalTriangles, + int evaluatedEdges, + int collapsedEdges, + int rejectedBoneWeights, + int rejectedTopology, + int rejectedInversion, + int rejectedDegenerate, + int rejectedArea, + int rejectedFlip, + int rejectedBodyCollision) + { + if (componentCount <= 0) + { + return new ComponentStats(0, 0, 0, 0, 0d, 0, 0, 0, 0, 0, 0, 0, 0, 0); + } + + var average = totalTriangles / (double)componentCount; + return new ComponentStats( + componentCount, + eligibleCount, + minTriangles == int.MaxValue ? 0 : minTriangles, + maxTriangles, + average, + evaluatedEdges, + collapsedEdges, + rejectedBoneWeights, + rejectedTopology, + rejectedInversion, + rejectedDegenerate, + rejectedArea, + rejectedFlip, + rejectedBodyCollision); + } + + private readonly record struct ComponentStats( + int TotalComponents, + int EligibleComponents, + int MinTriangles, + int MaxTriangles, + double AvgTriangles, + int EvaluatedEdges, + int CollapsedEdges, + int RejectedBoneWeights, + int RejectedTopology, + int RejectedInversion, + int RejectedDegenerate, + int RejectedArea, + int RejectedFlip, + int RejectedBodyCollision); + + private static int ComputeComponentTarget(int componentTriangles, float targetRatio, int minComponentTriangles) + { + var minTriangles = Math.Max(1, minComponentTriangles); + if (componentTriangles <= minTriangles) + { + return componentTriangles; + } + + var target = (int)MathF.Round(componentTriangles * targetRatio); + target = Math.Max(1, target); + return Math.Min(componentTriangles, Math.Max(minTriangles, target)); + } + + private static List BuildComponentsForSubMesh(int[] indices) + { + var components = new List(); + if (indices.Length == 0) + { + return components; + } + + var triangleCount = indices.Length / 3; + if (triangleCount <= 1) + { + components.Add(indices); + return components; + } + + var parent = new int[triangleCount]; + var rank = new byte[triangleCount]; + for (var i = 0; i < triangleCount; i++) + { + parent[i] = i; + } + + var vertexToTriangle = new Dictionary(); + for (var tri = 0; tri < triangleCount; tri++) + { + var baseIndex = tri * 3; + for (var v = 0; v < 3; v++) + { + var vertexIndex = indices[baseIndex + v]; + if (vertexToTriangle.TryGetValue(vertexIndex, out var existing)) + { + Union(parent, rank, tri, existing); + } + else + { + vertexToTriangle[vertexIndex] = tri; + } + } + } + + var componentMap = new Dictionary>(); + for (var tri = 0; tri < triangleCount; tri++) + { + var root = Find(parent, tri); + if (!componentMap.TryGetValue(root, out var list)) + { + list = []; + componentMap[root] = list; + } + + list.Add(tri); + } + + foreach (var component in componentMap.Values) + { + var slice = new int[component.Count * 3]; + var cursor = 0; + foreach (var tri in component) + { + Array.Copy(indices, tri * 3, slice, cursor, 3); + cursor += 3; + } + + components.Add(slice); + } + + return components; + } + + private static bool TryBuildComponentDecoded( + DecodedMeshData decoded, + VertexFormat format, + int[] componentIndices, + out DecodedMeshData componentDecoded, + out int[] componentLocalIndices, + out string? reason) + { + componentDecoded = default!; + componentLocalIndices = []; + reason = null; + + if (componentIndices.Length == 0) + { + reason = "Component has no indices."; + return false; + } + + var vertexMap = new Dictionary(); + var positions = new List(); + var normals = format.HasNormals ? new List() : null; + var tangents = format.HasTangent1 ? new List() : null; + var tangents2 = format.HasTangent2 ? new List() : null; + var colors = format.HasColors ? new List() : null; + var boneWeights = format.HasSkinning ? new List() : null; + var positionWs = format.HasPositionW ? new List() : null; + var normalWs = format.HasNormalW ? new List() : null; + List[]? uvChannels = null; + if (format.UvChannelCount > 0) + { + uvChannels = new List[format.UvChannelCount]; + for (var channel = 0; channel < format.UvChannelCount; channel++) + { + uvChannels[channel] = new List(); + } + } + + componentLocalIndices = new int[componentIndices.Length]; + for (var i = 0; i < componentIndices.Length; i++) + { + var globalIndex = componentIndices[i]; + if (globalIndex < 0 || globalIndex >= decoded.Positions.Length) + { + reason = "Component vertex index out of bounds."; + return false; + } + + if (!vertexMap.TryGetValue(globalIndex, out var localIndex)) + { + localIndex = positions.Count; + vertexMap[globalIndex] = localIndex; + positions.Add(decoded.Positions[globalIndex]); + + if (normals != null) + { + normals.Add(decoded.Normals != null ? decoded.Normals[globalIndex] : default); + } + if (tangents != null) + { + tangents.Add(decoded.Tangents != null ? decoded.Tangents[globalIndex] : default); + } + if (tangents2 != null) + { + tangents2.Add(decoded.Tangents2 != null ? decoded.Tangents2[globalIndex] : default); + } + if (colors != null) + { + colors.Add(decoded.Colors != null ? decoded.Colors[globalIndex] : default); + } + if (boneWeights != null) + { + boneWeights.Add(decoded.BoneWeights != null ? decoded.BoneWeights[globalIndex] : default); + } + if (positionWs != null) + { + positionWs.Add(decoded.PositionWs != null ? decoded.PositionWs[globalIndex] : 0f); + } + if (normalWs != null) + { + normalWs.Add(decoded.NormalWs != null ? decoded.NormalWs[globalIndex] : 0f); + } + if (uvChannels != null) + { + for (var channel = 0; channel < uvChannels.Length; channel++) + { + var source = decoded.UvChannels != null && channel < decoded.UvChannels.Length + ? decoded.UvChannels[channel] + : null; + uvChannels[channel].Add(source != null ? source[globalIndex] : default); + } + } + } + + componentLocalIndices[i] = localIndex; + } + + componentDecoded = new DecodedMeshData( + positions.ToArray(), + normals?.ToArray(), + tangents?.ToArray(), + tangents2?.ToArray(), + colors?.ToArray(), + boneWeights?.ToArray(), + uvChannels?.Select(channel => channel.ToArray()).ToArray(), + positionWs?.ToArray(), + normalWs?.ToArray(), + decoded.BlendWeightEncoding); + + return true; + } + + private static bool TryDecimateComponent( + DecodedMeshData componentDecoded, + VertexFormat format, + int[] componentIndices, + int targetTriangles, + BlendWeightEncoding blendWeightEncoding, + ModelDecimationAdvancedSettings tuning, + BodyCollisionData? bodyCollision, + out DecodedMeshData decimated, + out int[] decimatedIndices, + out Nano.DecimationStats decimatorStats, + out string? reason) + { + decimated = default!; + decimatedIndices = []; + decimatorStats = default; + reason = null; + + var componentTriangles = componentIndices.Length / 3; + var avgEdgeLength = ComputeAverageEdgeLength(componentDecoded.Positions, componentIndices); + bool RunDecimation( + float bodyCollisionDistanceFactor, + bool allowProtectedVertices, + bool expandProtectedVertices, + bool allowProtectedVerticesWhenRelaxed, + bool forceRelaxTopology, + bool blockUvSeamVertices, + float? uvSeamAngleCosOverride, + out DecodedMeshData runDecimated, + out int[] runDecimatedIndices, + out Nano.DecimationStats runDecimatorStats, + out string? runReason) + { + runDecimated = default!; + runDecimatedIndices = []; + runDecimatorStats = default; + runReason = null; + + if (!TryBuildNanomeshMesh(componentDecoded, [componentIndices], format, out var sharedMesh, out runReason)) + { + return false; + } + + if (avgEdgeLength > 0f && tuning.MaxCollapseEdgeLengthFactor > 0f) + { + Nano.DecimateModifier.LimitCollapseEdgeLength = true; + Nano.DecimateModifier.MaxCollapseEdgeLength = avgEdgeLength * tuning.MaxCollapseEdgeLengthFactor; + } + else + { + Nano.DecimateModifier.LimitCollapseEdgeLength = false; + Nano.DecimateModifier.MaxCollapseEdgeLength = float.PositiveInfinity; + } + + var relaxTopology = forceRelaxTopology; + var decimator = new Nano.DecimateModifier(); + if (bodyCollision != null) + { + var threshold = MathF.Max(avgEdgeLength * bodyCollisionDistanceFactor + tuning.BodyCollisionProxyInflate, tuning.MinBodyCollisionDistance); + var bodyDistanceSq = bodyCollision.ComputeDistanceSq(componentDecoded.Positions, threshold); + if (bodyDistanceSq != null) + { + var thresholdSq = threshold * threshold; + var protectionThreshold = MathF.Max(threshold * tuning.BodyCollisionProtectionFactor, threshold); + var protectionThresholdSq = protectionThreshold * protectionThreshold; + var protectedDistanceSq = allowProtectedVertices + ? bodyCollision.ComputeDistanceSq(componentDecoded.Positions, protectionThreshold) + : null; + var relaxedBodyGuard = forceRelaxTopology; + if (!forceRelaxTopology && IsNearBodyDominant(bodyDistanceSq, thresholdSq, componentDecoded.Positions.Length, tuning.BodyCollisionAdaptiveNearRatio)) + { + threshold = MathF.Max(threshold * tuning.BodyCollisionAdaptiveRelaxFactor, tuning.MinBodyCollisionDistance); + thresholdSq = threshold * threshold; + relaxedBodyGuard = true; + relaxTopology = true; + + protectionThreshold = MathF.Max(threshold * tuning.BodyCollisionProtectionFactor, threshold); + protectionThresholdSq = protectionThreshold * protectionThreshold; + if (allowProtectedVertices) + { + protectedDistanceSq = bodyCollision.ComputeDistanceSq(componentDecoded.Positions, protectionThreshold); + } + } + + decimator.SetBodyCollision(bodyDistanceSq, thresholdSq, point => bodyCollision.DistanceSq(point, thresholdSq)); + if (allowProtectedVertices && (!relaxedBodyGuard || allowProtectedVerticesWhenRelaxed)) + { + var useExpandedProtection = expandProtectedVertices && !relaxTopology; + var protectedVertices = protectedDistanceSq != null + ? BuildProtectedVertices(componentDecoded.Positions.Length, componentIndices, protectedDistanceSq, protectionThresholdSq, useExpandedProtection) + : null; + if (protectedVertices != null) + { + decimator.SetProtectedVertices(protectedVertices); + } + } + } + } + + if (relaxTopology) + { + sharedMesh.attributeDefinitions = [new Nano.AttributeDefinition(Nano.AttributeType.Normals, 0d, 0)]; + } + + var connectedMesh = sharedMesh.ToConnectedMesh(); + Nano.DecimateModifier.CollapseToEndpointsOnly = true; + var previousNormalSimilarity = Nano.DecimateModifier.NormalSimilarityThresholdDegrees; + var previousBoneWeightSimilarity = Nano.DecimateModifier.BoneWeightSimilarityThreshold; + var previousBodyPenetration = Nano.DecimateModifier.BodyCollisionPenetrationFactor; + var previousUvThreshold = Nano.DecimateModifier.UvSimilarityThreshold; + var previousAllowBoundary = Nano.DecimateModifier.AllowBoundaryCollapses; + var previousBlockUvSeamVertices = Nano.DecimateModifier.BlockUvSeamVertices; + var previousUvSeamAngleCos = Nano.DecimateModifier.UvSeamAngleCos; + try + { + Nano.DecimateModifier.NormalSimilarityThresholdDegrees = tuning.NormalSimilarityThresholdDegrees; + Nano.DecimateModifier.BoneWeightSimilarityThreshold = tuning.BoneWeightSimilarityThreshold; + Nano.DecimateModifier.BodyCollisionPenetrationFactor = tuning.BodyCollisionPenetrationFactor; + Nano.DecimateModifier.UvSimilarityThreshold = tuning.UvSimilarityThreshold; + Nano.DecimateModifier.AllowBoundaryCollapses = tuning.AllowBoundaryCollapses; + Nano.DecimateModifier.BlockUvSeamVertices = blockUvSeamVertices && tuning.BlockUvSeamVertices; + Nano.DecimateModifier.UvSeamAngleCos = tuning.UvSeamAngleCos; + + if (relaxTopology) + { + Nano.DecimateModifier.UvSimilarityThreshold = tuning.BodyCollisionAdaptiveUvThreshold; + Nano.DecimateModifier.AllowBoundaryCollapses = false; + } + + if (uvSeamAngleCosOverride.HasValue) + { + Nano.DecimateModifier.UvSeamAngleCos = uvSeamAngleCosOverride.Value; + } + + decimator.Initialize(connectedMesh); + decimator.DecimateToPolycount(targetTriangles); + runDecimatorStats = decimator.GetStats(); + } + finally + { + Nano.DecimateModifier.NormalSimilarityThresholdDegrees = previousNormalSimilarity; + Nano.DecimateModifier.BoneWeightSimilarityThreshold = previousBoneWeightSimilarity; + Nano.DecimateModifier.BodyCollisionPenetrationFactor = previousBodyPenetration; + Nano.DecimateModifier.UvSimilarityThreshold = previousUvThreshold; + Nano.DecimateModifier.AllowBoundaryCollapses = previousAllowBoundary; + Nano.DecimateModifier.BlockUvSeamVertices = previousBlockUvSeamVertices; + Nano.DecimateModifier.UvSeamAngleCos = previousUvSeamAngleCos; + } + + var decimatedShared = connectedMesh.ToSharedMesh(); + if (!TryConvertNanomeshMesh(decimatedShared, format, 1, blendWeightEncoding, out runDecimated, out var subMeshes, out runReason)) + { + return false; + } + + if (subMeshes.Length > 0) + { + runDecimatedIndices = subMeshes[0]; + } + + return true; + } + + if (!RunDecimation( + tuning.BodyCollisionDistanceFactor, + allowProtectedVertices: true, + expandProtectedVertices: true, + allowProtectedVerticesWhenRelaxed: true, + forceRelaxTopology: false, + blockUvSeamVertices: true, + uvSeamAngleCosOverride: null, + out decimated, + out decimatedIndices, + out decimatorStats, + out reason)) + { + return false; + } + + if (decimatorStats.CollapsedEdges == 0 && targetTriangles < componentTriangles && bodyCollision != null) + { + if (RunDecimation( + tuning.BodyCollisionNoOpDistanceFactor, + allowProtectedVertices: true, + expandProtectedVertices: false, + allowProtectedVerticesWhenRelaxed: true, + forceRelaxTopology: true, + blockUvSeamVertices: false, + uvSeamAngleCosOverride: tuning.BodyCollisionNoOpUvSeamAngleCos, + out var fallbackDecimated, + out var fallbackDecimatedIndices, + out var fallbackStats, + out _)) + { + var fallbackTriangles = fallbackDecimatedIndices.Length / 3; + if (fallbackStats.CollapsedEdges > 0 && fallbackTriangles > 0 && fallbackTriangles < componentTriangles) + { + decimated = fallbackDecimated; + decimatedIndices = fallbackDecimatedIndices; + decimatorStats = fallbackStats; + } + } + } + + return true; + } + + private static float ComputeAverageEdgeLength(Vector3d[] positions, int[] indices) + { + if (positions.Length == 0 || indices.Length < 3) + { + return 0f; + } + + double sum = 0d; + int count = 0; + for (var i = 0; i + 2 < indices.Length; i += 3) + { + var i0 = indices[i]; + var i1 = indices[i + 1]; + var i2 = indices[i + 2]; + if ((uint)i0 >= positions.Length || (uint)i1 >= positions.Length || (uint)i2 >= positions.Length) + { + continue; + } + + sum += Vector3d.Distance(positions[i0], positions[i1]); + sum += Vector3d.Distance(positions[i1], positions[i2]); + sum += Vector3d.Distance(positions[i2], positions[i0]); + count += 3; + } + + return count > 0 ? (float)(sum / count) : 0f; + } + + private static bool[]? BuildProtectedVertices(int vertexCount, int[] indices, float[] distanceSq, float thresholdSq, bool expand) + { + if (vertexCount <= 0 || distanceSq.Length == 0) + { + return null; + } + + var seed = new bool[vertexCount]; + var seedCount = 0; + var limit = Math.Min(vertexCount, distanceSq.Length); + for (var i = 0; i < limit; i++) + { + if (distanceSq[i] <= thresholdSq) + { + seed[i] = true; + seedCount++; + } + } + + if (seedCount == 0) + { + return null; + } + + if (!expand || indices.Length < 3) + { + return seed; + } + + var expanded = (bool[])seed.Clone(); + for (var i = 0; i + 2 < indices.Length; i += 3) + { + var a = indices[i]; + var b = indices[i + 1]; + var c = indices[i + 2]; + if ((uint)a >= vertexCount || (uint)b >= vertexCount || (uint)c >= vertexCount) + { + continue; + } + + if (seed[a] || seed[b] || seed[c]) + { + expanded[a] = true; + expanded[b] = true; + expanded[c] = true; + } + } + + return expanded; + } + + private static bool IsNearBodyDominant(float[] distanceSq, float thresholdSq, int vertexCount, float adaptiveNearRatio) + { + if (vertexCount <= 0 || distanceSq.Length == 0 || thresholdSq <= 0f) + { + return false; + } + + var limit = Math.Min(vertexCount, distanceSq.Length); + var nearCount = 0; + for (var i = 0; i < limit; i++) + { + if (distanceSq[i] <= thresholdSq) + { + nearCount++; + } + } + + return nearCount >= limit * adaptiveNearRatio; + } + + private sealed record PreprocessedMeshOutput( + MeshStruct Mesh, + MdlStructs.SubmeshStruct[] SubMeshes, + byte[][] VertexStreams, + int[] Indices, + bool Decimated); + + private static bool TryBuildBodyCollisionData( + MdlFile mdl, + int lodIndex, + int lodMeshStart, + int lodMeshEnd, + ModelDecimationSettings settings, + ModelDecimationAdvancedSettings tuning, + out BodyCollisionData? bodyCollision, + out Dictionary bodyMeshOverrides, + MsLogger logger) + { + bodyCollision = null; + bodyMeshOverrides = []; + + var meshCount = Math.Max(0, lodMeshEnd - lodMeshStart); + logger.LogDebug("Body collision: scanning {MeshCount} meshes, {MaterialCount} materials", meshCount, mdl.Materials.Length); + + if (mdl.Materials.Length == 0) + { + logger.LogDebug("Body collision: no materials found, skipping body collision."); + return false; + } + + var materialList = string.Join(", ", mdl.Materials); + logger.LogDebug("Body collision: model materials = {Materials}", materialList); + + var proxyTargetRatio = Math.Clamp(Math.Max(settings.TargetRatio, tuning.BodyProxyTargetRatioMin), 0d, 1d); + var bodyPositions = new List(); + var bodyIndices = new List(); + var foundBody = false; + + for (var meshIndex = lodMeshStart; meshIndex < lodMeshEnd; meshIndex++) + { + var mesh = mdl.Meshes[meshIndex]; + var material = mesh.MaterialIndex < mdl.Materials.Length + ? mdl.Materials[mesh.MaterialIndex] + : "(missing material)"; + var isBody = IsBodyMaterial(material); + logger.LogDebug("Body collision: mesh {MeshIndex} material {Material} body {IsBody}", meshIndex, material, isBody); + + if (!isBody) + { + continue; + } + + foundBody = true; + var meshSubMeshes = mdl.SubMeshes + .Skip(mesh.SubMeshIndex) + .Take(mesh.SubMeshCount) + .ToArray(); + + if (!TryBuildVertexFormat(mdl.VertexDeclarations[meshIndex], out var format, out var formatReason)) + { + logger.LogDebug("Body mesh {MeshIndex} vertex format unsupported: {Reason}", meshIndex, formatReason); + continue; + } + + if (!TryDecodeMeshData(mdl, lodIndex, mesh, format, meshSubMeshes, out var decoded, out var subMeshIndices, out var decodeReason)) + { + logger.LogDebug("Body mesh {MeshIndex} decode failed: {Reason}", meshIndex, decodeReason); + continue; + } + + var triangleCount = (int)(mesh.IndexCount / 3); + var updatedMesh = mesh; + var updatedSubMeshes = CopySubMeshes(meshSubMeshes, 0, mesh.StartIndex); + var vertexStreams = CopyVertexStreams(mdl, lodIndex, mesh); + var indices = ReadIndices(mdl, lodIndex, mesh); + var decimated = false; + + var collisionDecoded = decoded; + var collisionSubMeshIndices = subMeshIndices; + + if (triangleCount >= settings.TriangleThreshold) + { + var targetTriangles = (int)Math.Floor(triangleCount * proxyTargetRatio); + if (targetTriangles >= 1 && targetTriangles < triangleCount) + { + if (TryDecimateWithNanomesh(decoded, subMeshIndices, format, targetTriangles, tuning, null, out var decimatedData, out var decimatedSubMeshIndices, out _, out var decimationReason)) + { + if (TryEncodeMeshData(decimatedData, decimatedSubMeshIndices, format, mesh, meshSubMeshes, settings.NormalizeTangents, out updatedMesh, out updatedSubMeshes, out vertexStreams, out indices, out var encodeReason)) + { + decimated = true; + collisionDecoded = decimatedData; + collisionSubMeshIndices = decimatedSubMeshIndices; + } + else + { + logger.LogDebug("Body mesh {MeshIndex} encode failed: {Reason}", meshIndex, encodeReason); + } + } + else + { + logger.LogDebug("Body mesh {MeshIndex} decimation failed: {Reason}", meshIndex, decimationReason); + } + } + } + + bodyMeshOverrides[meshIndex] = new PreprocessedMeshOutput(updatedMesh, updatedSubMeshes, vertexStreams, indices, decimated); + + var baseIndex = bodyPositions.Count; + bodyPositions.AddRange(collisionDecoded.Positions); + foreach (var subMesh in collisionSubMeshIndices) + { + for (var i = 0; i < subMesh.Length; i++) + { + bodyIndices.Add(subMesh[i] + baseIndex); + } + } + } + + if (!foundBody) + { + logger.LogDebug("Body collision: no body meshes matched filter."); + return false; + } + + if (bodyPositions.Count == 0 || bodyIndices.Count == 0) + { + logger.LogDebug("Body collision enabled but no body vertices were collected."); + return false; + } + + var positionArray = bodyPositions.ToArray(); + var indexArray = bodyIndices.ToArray(); + var avgEdgeLength = ComputeAverageEdgeLength(positionArray, indexArray); + var cellSize = MathF.Max(avgEdgeLength, tuning.MinBodyCollisionCellSize); + bodyCollision = new BodyCollisionData(positionArray, indexArray, cellSize, tuning.MinBodyCollisionCellSize); + return true; + } + + private static bool IsBodyMesh(MdlFile mdl, MeshStruct mesh) + { + if (mesh.MaterialIndex >= mdl.Materials.Length) + { + return false; + } + + return IsBodyMaterial(mdl.Materials[mesh.MaterialIndex]); + } + + private static bool IsBodyMaterial(string materialPath) + { + if (string.IsNullOrWhiteSpace(materialPath)) + { + return false; + } + + var normalized = materialPath.Replace('\\', '/').ToLowerInvariant(); + var nameStart = normalized.LastIndexOf('/'); + var fileName = nameStart >= 0 ? normalized[(nameStart + 1)..] : normalized; + return fileName.Contains("_bibo", StringComparison.Ordinal) + || fileName.EndsWith("_a.mtrl", StringComparison.Ordinal); + } + + private sealed class BodyCollisionData + { + private readonly Vector3d[] _positions; + private readonly BodyTriangle[] _triangles; + private readonly Dictionary> _triangleCells; + private readonly float _cellSize; + private readonly float _cellSizeInv; + + public BodyCollisionData(Vector3d[] positions, int[] indices, float cellSize, float minCellSize) + { + _positions = positions; + _cellSize = cellSize > 0f ? cellSize : minCellSize; + _cellSizeInv = 1f / _cellSize; + + var triangles = new List(); + for (var i = 0; i + 2 < indices.Length; i += 3) + { + var a = indices[i]; + var b = indices[i + 1]; + var c = indices[i + 2]; + if ((uint)a >= _positions.Length || (uint)b >= _positions.Length || (uint)c >= _positions.Length) + { + continue; + } + + var p0 = _positions[a]; + var p1 = _positions[b]; + var p2 = _positions[c]; + var min = Vector3d.Min(p0, Vector3d.Min(p1, p2)); + var max = Vector3d.Max(p0, Vector3d.Max(p1, p2)); + triangles.Add(new BodyTriangle(a, b, c, min, max)); + } + + _triangles = triangles.ToArray(); + _triangleCells = new Dictionary>(); + + for (var triIndex = 0; triIndex < _triangles.Length; triIndex++) + { + var tri = _triangles[triIndex]; + var minCell = ToCell(tri.Min); + var maxCell = ToCell(tri.Max); + for (var x = minCell.X; x <= maxCell.X; x++) + { + for (var y = minCell.Y; y <= maxCell.Y; y++) + { + for (var z = minCell.Z; z <= maxCell.Z; z++) + { + var key = new CellKey(x, y, z); + if (!_triangleCells.TryGetValue(key, out var list)) + { + list = []; + _triangleCells[key] = list; + } + + list.Add(triIndex); + } + } + } + } + } + + public float[]? ComputeDistanceSq(Vector3d[] queryPositions, float maxDistance) + { + if (_positions.Length == 0 || queryPositions.Length == 0 || maxDistance <= 0f || _triangles.Length == 0 || _triangleCells.Count == 0) + { + return null; + } + + var result = new float[queryPositions.Length]; + var maxDistanceSq = maxDistance * maxDistance; + var radius = Math.Max(1, (int)MathF.Ceiling(maxDistance / _cellSize)); + + for (var i = 0; i < queryPositions.Length; i++) + { + var cell = ToCell(queryPositions[i]); + double minSq = double.PositiveInfinity; + var found = false; + + for (var x = -radius; x <= radius && !found; x++) + { + for (var y = -radius; y <= radius && !found; y++) + { + for (var z = -radius; z <= radius; z++) + { + var key = new CellKey(cell.X + x, cell.Y + y, cell.Z + z); + if (!_triangleCells.TryGetValue(key, out var list)) + { + continue; + } + + for (var idx = 0; idx < list.Count; idx++) + { + var tri = _triangles[list[idx]]; + var sq = PointTriangleDistanceSq(queryPositions[i], _positions[tri.A], _positions[tri.B], _positions[tri.C]); + if (sq < minSq) + { + minSq = sq; + } + + if (minSq <= maxDistanceSq) + { + found = true; + break; + } + } + } + } + } + + result[i] = minSq < double.PositiveInfinity ? (float)minSq : float.PositiveInfinity; + } + + return result; + } + + public float DistanceSq(in Vector3d point, float maxDistanceSq) + { + if (_positions.Length == 0 || _triangles.Length == 0 || _triangleCells.Count == 0) + { + return float.PositiveInfinity; + } + + if (maxDistanceSq <= 0f) + { + return float.PositiveInfinity; + } + + var maxDistance = MathF.Sqrt(maxDistanceSq); + var radius = Math.Max(1, (int)MathF.Ceiling(maxDistance / _cellSize)); + var cell = ToCell(point); + double minSq = double.PositiveInfinity; + + for (var x = -radius; x <= radius; x++) + { + for (var y = -radius; y <= radius; y++) + { + for (var z = -radius; z <= radius; z++) + { + var key = new CellKey(cell.X + x, cell.Y + y, cell.Z + z); + if (!_triangleCells.TryGetValue(key, out var list)) + { + continue; + } + + for (var idx = 0; idx < list.Count; idx++) + { + var tri = _triangles[list[idx]]; + var sq = PointTriangleDistanceSq(point, _positions[tri.A], _positions[tri.B], _positions[tri.C]); + if (sq < minSq) + { + minSq = sq; + } + + if (minSq <= maxDistanceSq) + { + return (float)minSq; + } + } + } + } + } + + return minSq < double.PositiveInfinity ? (float)minSq : float.PositiveInfinity; + } + + private CellKey ToCell(in Vector3d position) + => new( + (int)Math.Floor(position.x * _cellSizeInv), + (int)Math.Floor(position.y * _cellSizeInv), + (int)Math.Floor(position.z * _cellSizeInv)); + } + + private readonly record struct BodyTriangle(int A, int B, int C, Vector3d Min, Vector3d Max); + + private readonly record struct CellKey(int X, int Y, int Z); + + private static double PointTriangleDistanceSq(in Vector3d p, in Vector3d a, in Vector3d b, in Vector3d c) + { + var ab = b - a; + var ac = c - a; + var ap = p - a; + var d1 = Vector3d.Dot(ab, ap); + var d2 = Vector3d.Dot(ac, ap); + if (d1 <= 0d && d2 <= 0d) + { + return (p - a).LengthSquared; + } + + var bp = p - b; + var d3 = Vector3d.Dot(ab, bp); + var d4 = Vector3d.Dot(ac, bp); + if (d3 >= 0d && d4 <= d3) + { + return (p - b).LengthSquared; + } + + var vc = d1 * d4 - d3 * d2; + if (vc <= 0d && d1 >= 0d && d3 <= 0d) + { + var v = d1 / (d1 - d3); + var proj = a + ab * v; + return (p - proj).LengthSquared; + } + + var cp = p - c; + var d5 = Vector3d.Dot(ab, cp); + var d6 = Vector3d.Dot(ac, cp); + if (d6 >= 0d && d5 <= d6) + { + return (p - c).LengthSquared; + } + + var vb = d5 * d2 - d1 * d6; + if (vb <= 0d && d2 >= 0d && d6 <= 0d) + { + var w = d2 / (d2 - d6); + var proj = a + ac * w; + return (p - proj).LengthSquared; + } + + var va = d3 * d6 - d5 * d4; + if (va <= 0d && (d4 - d3) >= 0d && (d5 - d6) >= 0d) + { + var w = (d4 - d3) / ((d4 - d3) + (d5 - d6)); + var proj = b + (c - b) * w; + return (p - proj).LengthSquared; + } + + var denom = 1d / (va + vb + vc); + var v2 = vb * denom; + var w2 = vc * denom; + var projPoint = a + ab * v2 + ac * w2; + return (p - projPoint).LengthSquared; + } + + private static bool AppendComponentData( + DecodedMeshData component, + int[] componentIndices, + VertexFormat format, + List positions, + List? normals, + List? tangents, + List? tangents2, + List? colors, + List? boneWeights, + List[]? uvChannels, + List? positionWs, + List? normalWs, + List outputIndices, + out string? reason) + { + reason = null; + + if (component.Positions.Length == 0 || componentIndices.Length == 0) + { + return true; + } + + var baseIndex = positions.Count; + positions.AddRange(component.Positions); + + if (normals != null && component.Normals != null) + { + normals.AddRange(component.Normals); + } + if (tangents != null && component.Tangents != null) + { + tangents.AddRange(component.Tangents); + } + if (tangents2 != null && component.Tangents2 != null) + { + tangents2.AddRange(component.Tangents2); + } + if (colors != null && component.Colors != null) + { + colors.AddRange(component.Colors); + } + if (boneWeights != null && component.BoneWeights != null) + { + boneWeights.AddRange(component.BoneWeights); + } + if (positionWs != null && component.PositionWs != null) + { + positionWs.AddRange(component.PositionWs); + } + if (normalWs != null && component.NormalWs != null) + { + normalWs.AddRange(component.NormalWs); + } + if (uvChannels != null && component.UvChannels != null) + { + if (uvChannels.Length != component.UvChannels.Length) + { + reason = "UV channel mismatch while merging components."; + return false; + } + + for (var channel = 0; channel < uvChannels.Length; channel++) + { + uvChannels[channel].AddRange(component.UvChannels[channel]); + } + } + + for (var i = 0; i < componentIndices.Length; i++) + { + outputIndices.Add(componentIndices[i] + baseIndex); + } + + return true; + } + + private static int Find(int[] parent, int value) + { + var root = value; + while (parent[root] != root) + { + root = parent[root]; + } + + while (parent[value] != value) + { + var next = parent[value]; + parent[value] = root; + value = next; + } + + return root; + } + + private static void Union(int[] parent, byte[] rank, int a, int b) + { + var rootA = Find(parent, a); + var rootB = Find(parent, b); + if (rootA == rootB) + { + return; + } + + if (rank[rootA] < rank[rootB]) + { + parent[rootA] = rootB; + return; + } + + parent[rootB] = rootA; + if (rank[rootA] == rank[rootB]) + { + rank[rootA]++; + } + } + + private static bool TryBuildNanomeshMesh( + DecodedMeshData decoded, + int[][] subMeshIndices, + VertexFormat format, + out Nano.SharedMesh sharedMesh, + out string? reason) + { + sharedMesh = default!; + reason = null; + + var vertexCount = decoded.Positions.Length; + if (vertexCount == 0) + { + reason = "No vertices to decimate."; + return false; + } + + if (subMeshIndices.Length == 0) + { + reason = "No submesh indices."; + return false; + } + + var positions = decoded.Positions; + + var totalIndexCount = 0; + for (var i = 0; i < subMeshIndices.Length; i++) + { + totalIndexCount += subMeshIndices[i].Length; + } + + var triangles = new int[totalIndexCount]; + var groups = new Nano.Group[subMeshIndices.Length]; + var cursor = 0; + for (var i = 0; i < subMeshIndices.Length; i++) + { + var subMesh = subMeshIndices[i]; + if (subMesh.Length > 0) + { + Array.Copy(subMesh, 0, triangles, cursor, subMesh.Length); + } + groups[i] = new Nano.Group { firstIndex = cursor, indexCount = subMesh.Length }; + cursor += subMesh.Length; + } + + var flags = BuildFfxivAttributeFlags(format); + var attributes = new Nano.MetaAttributeList(vertexCount); + + for (var i = 0; i < vertexCount; i++) + { + var attr = new Nano.FfxivVertexAttribute( + flags, + format.HasNormals && decoded.Normals != null ? decoded.Normals[i] : default, + format.HasTangent1 && decoded.Tangents != null ? decoded.Tangents[i] : default, + format.HasTangent2 && decoded.Tangents2 != null ? decoded.Tangents2[i] : default, + format.UvChannelCount > 0 && decoded.UvChannels != null ? decoded.UvChannels[0][i] : default, + format.UvChannelCount > 1 && decoded.UvChannels != null ? decoded.UvChannels[1][i] : default, + format.UvChannelCount > 2 && decoded.UvChannels != null ? decoded.UvChannels[2][i] : default, + format.UvChannelCount > 3 && decoded.UvChannels != null ? decoded.UvChannels[3][i] : default, + format.HasColors && decoded.Colors != null ? decoded.Colors[i] : default, + format.HasSkinning && decoded.BoneWeights != null ? decoded.BoneWeights[i] : default, + format.HasPositionW && decoded.PositionWs != null ? decoded.PositionWs[i] : 0f, + format.HasNormalW && decoded.NormalWs != null ? decoded.NormalWs[i] : 0f); + + attributes[i] = new Nano.MetaAttribute(attr); + } + + sharedMesh = new Nano.SharedMesh + { + positions = positions, + triangles = triangles, + groups = groups, + attributes = attributes, + attributeDefinitions = [new Nano.AttributeDefinition(Nano.AttributeType.Normals, Nano.ConnectedMesh.EdgeBorderPenalty, 0)], + }; + + return true; + } + + private static bool TryConvertNanomeshMesh( + Nano.SharedMesh decimatedShared, + VertexFormat format, + int expectedSubMeshCount, + BlendWeightEncoding blendWeightEncoding, + out DecodedMeshData decimated, + out int[][] decimatedSubMeshIndices, + out string? reason) + { + decimated = default!; + decimatedSubMeshIndices = []; + reason = null; + + if (decimatedShared.triangles == null || decimatedShared.triangles.Length == 0) + { + reason = "No triangles after decimation."; + return false; + } + + var groups = decimatedShared.groups; + var triangles = decimatedShared.triangles; + int[][] subMeshIndices; + + if (groups != null && groups.Length == expectedSubMeshCount) + { + subMeshIndices = new int[groups.Length][]; + for (var i = 0; i < groups.Length; i++) + { + var group = groups[i]; + if (group.firstIndex < 0 || group.indexCount < 0 || group.firstIndex + group.indexCount > triangles.Length) + { + reason = "Invalid submesh group range after decimation."; + return false; + } + + var slice = new int[group.indexCount]; + if (group.indexCount > 0) + { + Array.Copy(triangles, group.firstIndex, slice, 0, group.indexCount); + } + subMeshIndices[i] = slice; + } + } + else if (expectedSubMeshCount == 1) + { + subMeshIndices = [triangles]; + } + else + { + reason = "Submesh group count mismatch after decimation."; + return false; + } + + var vertexCount = decimatedShared.positions.Length; + var positions = decimatedShared.positions; + + var attrList = decimatedShared.attributes as Nano.MetaAttributeList; + if (attrList == null) + { + reason = "Missing vertex attributes after decimation."; + return false; + } + + Vector3[]? normals = format.HasNormals ? new Vector3[vertexCount] : null; + Vector4[]? tangents = format.HasTangent1 ? new Vector4[vertexCount] : null; + Vector4[]? tangents2 = format.HasTangent2 ? new Vector4[vertexCount] : null; + Vector4[]? colors = format.HasColors ? new Vector4[vertexCount] : null; + BoneWeight[]? boneWeights = format.HasSkinning ? new BoneWeight[vertexCount] : null; + float[]? positionWs = format.HasPositionW ? new float[vertexCount] : null; + float[]? normalWs = format.HasNormalW ? new float[vertexCount] : null; + + Vector2[][]? uvChannels = null; + if (format.UvChannelCount > 0) + { + uvChannels = new Vector2[format.UvChannelCount][]; + for (var channel = 0; channel < format.UvChannelCount; channel++) + { + uvChannels[channel] = new Vector2[vertexCount]; + } + } + + for (var i = 0; i < vertexCount; i++) + { + var attr = (Nano.MetaAttribute)attrList[i]; + var data = attr.attr0; + + if (normals != null) + { + normals[i] = data.normal; + } + + if (tangents != null) + { + tangents[i] = data.tangent1; + } + + if (tangents2 != null) + { + tangents2[i] = data.tangent2; + } + + if (colors != null) + { + colors[i] = data.color; + } + + if (boneWeights != null) + { + boneWeights[i] = data.boneWeight; + } + + if (positionWs != null) + { + positionWs[i] = data.positionW; + } + + if (normalWs != null) + { + normalWs[i] = data.normalW; + } + + if (uvChannels != null) + { + if (uvChannels.Length > 0) + { + uvChannels[0][i] = data.uv0; + } + if (uvChannels.Length > 1) + { + uvChannels[1][i] = data.uv1; + } + if (uvChannels.Length > 2) + { + uvChannels[2][i] = data.uv2; + } + if (uvChannels.Length > 3) + { + uvChannels[3][i] = data.uv3; + } + } + } + + decimated = new DecodedMeshData(positions, normals, tangents, tangents2, colors, boneWeights, uvChannels, positionWs, normalWs, blendWeightEncoding); + decimatedSubMeshIndices = subMeshIndices; + return true; + } + + private static Nano.FfxivAttributeFlags BuildFfxivAttributeFlags(VertexFormat format) + { + var flags = Nano.FfxivAttributeFlags.None; + if (format.HasNormals) + { + flags |= Nano.FfxivAttributeFlags.Normal; + } + if (format.HasTangent1) + { + flags |= Nano.FfxivAttributeFlags.Tangent1; + } + if (format.HasTangent2) + { + flags |= Nano.FfxivAttributeFlags.Tangent2; + } + if (format.HasColors) + { + flags |= Nano.FfxivAttributeFlags.Color; + } + if (format.HasSkinning) + { + flags |= Nano.FfxivAttributeFlags.BoneWeights; + } + if (format.HasPositionW) + { + flags |= Nano.FfxivAttributeFlags.PositionW; + } + if (format.HasNormalW) + { + flags |= Nano.FfxivAttributeFlags.NormalW; + } + if (format.UvChannelCount > 0) + { + flags |= Nano.FfxivAttributeFlags.Uv0; + } + if (format.UvChannelCount > 1) + { + flags |= Nano.FfxivAttributeFlags.Uv1; + } + if (format.UvChannelCount > 2) + { + flags |= Nano.FfxivAttributeFlags.Uv2; + } + if (format.UvChannelCount > 3) + { + flags |= Nano.FfxivAttributeFlags.Uv3; + } + return flags; } private static bool TryDecodeMeshData( @@ -453,9 +2009,12 @@ internal static class MdlDecimator var vertexCount = mesh.VertexCount; var positions = new Vector3d[vertexCount]; Vector3[]? normals = format.HasNormals ? new Vector3[vertexCount] : null; - Vector4[]? tangents = format.HasTangents ? new Vector4[vertexCount] : null; + Vector4[]? tangents = format.HasTangent1 ? new Vector4[vertexCount] : null; + Vector4[]? tangents2 = format.HasTangent2 ? new Vector4[vertexCount] : null; Vector4[]? colors = format.HasColors ? new Vector4[vertexCount] : null; BoneWeight[]? boneWeights = format.HasSkinning ? new BoneWeight[vertexCount] : null; + float[]? positionWs = format.HasPositionW ? new float[vertexCount] : null; + float[]? normalWs = format.HasNormalW ? new float[vertexCount] : null; Vector2[][]? uvChannels = null; if (format.UvChannelCount > 0) @@ -467,6 +2026,8 @@ internal static class MdlDecimator } } + var blendWeightEncoding = DetectBlendWeightEncoding(mdl, lodIndex, mesh, format); + var streams = new BinaryReader[MaxStreams]; for (var streamIndex = 0; streamIndex < MaxStreams; streamIndex++) { @@ -477,7 +2038,7 @@ internal static class MdlDecimator var uvLookup = format.UvElements.ToDictionary(static element => ElementKey.From(element.Element), static element => element); for (var vertexIndex = 0; vertexIndex < vertexCount; vertexIndex++) { - byte[]? indices = null; + int[]? indices = null; float[]? weights = null; foreach (var element in format.SortedElements) @@ -489,14 +2050,31 @@ internal static class MdlDecimator switch (usage) { case MdlFile.VertexUsage.Position: - positions[vertexIndex] = ReadPosition(type, stream); + if (type == MdlFile.VertexType.Single4 && positionWs != null) + { + positions[vertexIndex] = ReadPositionWithW(stream, out positionWs[vertexIndex]); + } + else + { + positions[vertexIndex] = ReadPosition(type, stream); + } break; case MdlFile.VertexUsage.Normal when normals != null: - normals[vertexIndex] = ReadNormal(type, stream); + if (type == MdlFile.VertexType.Single4 && normalWs != null) + { + normals[vertexIndex] = ReadNormalWithW(stream, out normalWs[vertexIndex]); + } + else + { + normals[vertexIndex] = ReadNormal(type, stream); + } break; case MdlFile.VertexUsage.Tangent1 when tangents != null: tangents[vertexIndex] = ReadTangent(type, stream); break; + case MdlFile.VertexUsage.Tangent2 when tangents2 != null: + tangents2[vertexIndex] = ReadTangent(type, stream); + break; case MdlFile.VertexUsage.Color when colors != null: colors[vertexIndex] = ReadColor(type, stream); break; @@ -504,7 +2082,7 @@ internal static class MdlDecimator indices = ReadIndices(type, stream); break; case MdlFile.VertexUsage.BlendWeights: - weights = ReadWeights(type, stream); + weights = ReadWeights(type, stream, blendWeightEncoding); break; case MdlFile.VertexUsage.UV when uvChannels != null: if (!uvLookup.TryGetValue(ElementKey.From(element), out var uvElement)) @@ -516,6 +2094,7 @@ internal static class MdlDecimator break; default: if (usage == MdlFile.VertexUsage.Normal || usage == MdlFile.VertexUsage.Tangent1 + || usage == MdlFile.VertexUsage.Tangent2 || usage == MdlFile.VertexUsage.Color) { _ = ReadAndDiscard(type, stream); @@ -532,20 +2111,21 @@ internal static class MdlDecimator return false; } - NormalizeWeights(weights); boneWeights[vertexIndex] = new BoneWeight(indices[0], indices[1], indices[2], indices[3], weights[0], weights[1], weights[2], weights[3]); } } - decoded = new DecodedMeshData(positions, normals, tangents, colors, boneWeights, uvChannels); + decoded = new DecodedMeshData(positions, normals, tangents, tangents2, colors, boneWeights, uvChannels, positionWs, normalWs, blendWeightEncoding); return true; } private static bool TryEncodeMeshData( - Mesh decimatedMesh, + DecodedMeshData decimated, + int[][] decimatedSubMeshIndices, VertexFormat format, MeshStruct originalMesh, MdlStructs.SubmeshStruct[] originalSubMeshes, + bool normalizeTangents, out MeshStruct updatedMesh, out MdlStructs.SubmeshStruct[] updatedSubMeshes, out byte[][] vertexStreams, @@ -558,17 +2138,26 @@ internal static class MdlDecimator indices = []; reason = null; - var vertexCount = decimatedMesh.Vertices.Length; + if (decimatedSubMeshIndices.Length != originalSubMeshes.Length) + { + reason = "Decimated submesh count mismatch."; + return false; + } + + var vertexCount = decimated.Positions.Length; if (vertexCount > ushort.MaxValue) { reason = "Vertex count exceeds ushort range."; return false; } - var normals = decimatedMesh.Normals; - var tangents = decimatedMesh.Tangents; - var colors = decimatedMesh.Colors; - var boneWeights = decimatedMesh.BoneWeights; + var normals = decimated.Normals; + var tangents = decimated.Tangents; + var tangents2 = decimated.Tangents2; + var colors = decimated.Colors; + var boneWeights = decimated.BoneWeights; + var positionWs = decimated.PositionWs; + var normalWs = decimated.NormalWs; if (format.HasNormals && normals == null) { @@ -576,12 +2165,24 @@ internal static class MdlDecimator return false; } - if (format.HasTangents && tangents == null) + if (format.HasTangent1 && tangents == null) { - reason = "Missing tangents after decimation."; + reason = "Missing tangent1 after decimation."; return false; } + if (format.HasTangent2 && tangents2 == null) + { + reason = "Missing tangent2 after decimation."; + return false; + } + + if (normalizeTangents) + { + NormalizeTangents(tangents, clampW: true); + NormalizeTangents(tangents2, clampW: true); + } + if (format.HasColors && colors == null) { reason = "Missing colors after decimation."; @@ -594,19 +2195,28 @@ internal static class MdlDecimator return false; } + if (format.HasPositionW && positionWs == null) + { + reason = "Missing position W after decimation."; + return false; + } + + if (format.HasNormalW && normalWs == null) + { + reason = "Missing normal W after decimation."; + return false; + } + var uvChannels = Array.Empty(); if (format.UvChannelCount > 0) { - uvChannels = new Vector2[format.UvChannelCount][]; - for (var channel = 0; channel < format.UvChannelCount; channel++) + if (decimated.UvChannels == null || decimated.UvChannels.Length < format.UvChannelCount) { - if (decimatedMesh.GetUVDimension(channel) != 2) - { - reason = "Unsupported UV dimension after decimation."; - return false; - } - uvChannels[channel] = decimatedMesh.GetUVs2D(channel); + reason = "Missing UV channels after decimation."; + return false; } + + uvChannels = decimated.UvChannels; } var streamBuffers = new byte[MaxStreams][]; @@ -659,14 +2269,17 @@ internal static class MdlDecimator switch (usage) { case MdlFile.VertexUsage.Position: - WritePosition(type, decimatedMesh.Vertices[vertexIndex], target); + WritePosition(type, decimated.Positions[vertexIndex], target, positionWs != null ? positionWs[vertexIndex] : null); break; case MdlFile.VertexUsage.Normal when normals != null: - WriteNormal(type, normals[vertexIndex], target); + WriteNormal(type, normals[vertexIndex], target, normalWs != null ? normalWs[vertexIndex] : null); break; case MdlFile.VertexUsage.Tangent1 when tangents != null: WriteTangent(type, tangents[vertexIndex], target); break; + case MdlFile.VertexUsage.Tangent2 when tangents2 != null: + WriteTangent(type, tangents2[vertexIndex], target); + break; case MdlFile.VertexUsage.Color when colors != null: WriteColor(type, colors[vertexIndex], target); break; @@ -674,7 +2287,7 @@ internal static class MdlDecimator WriteBlendIndices(type, boneWeights[vertexIndex], target); break; case MdlFile.VertexUsage.BlendWeights when boneWeights != null: - WriteBlendWeights(type, boneWeights[vertexIndex], target); + WriteBlendWeights(type, boneWeights[vertexIndex], decimated.BlendWeightEncoding, target); break; case MdlFile.VertexUsage.UV when format.UvChannelCount > 0: if (!uvLookup.TryGetValue(ElementKey.From(element), out var uvElement)) @@ -695,7 +2308,7 @@ internal static class MdlDecimator for (var subMeshIndex = 0; subMeshIndex < originalSubMeshes.Length; subMeshIndex++) { - var subMeshIndices = decimatedMesh.GetIndices(subMeshIndex); + var subMeshIndices = decimatedSubMeshIndices[subMeshIndex]; if (subMeshIndices.Any(index => index < 0 || index >= vertexCount)) { reason = "Decimated indices out of range."; @@ -876,26 +2489,50 @@ internal static class MdlDecimator if (normalElements.Length == 1) { var normalType = (MdlFile.VertexType)normalElements[0].Type; - if (normalType != MdlFile.VertexType.Single3 && normalType != MdlFile.VertexType.Single4 && normalType != MdlFile.VertexType.NByte4) + if (normalType != MdlFile.VertexType.Single3 + && normalType != MdlFile.VertexType.Single4 + && normalType != MdlFile.VertexType.NByte4 + && normalType != MdlFile.VertexType.NShort4) { reason = "Unsupported normal element type."; return false; } } - var tangentElements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Tangent1).ToArray(); - if (tangentElements.Length > 1) + var tangent1Elements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Tangent1).ToArray(); + if (tangent1Elements.Length > 1) { - reason = "Multiple tangent elements unsupported."; + reason = "Multiple tangent1 elements unsupported."; return false; } - if (tangentElements.Length == 1) + if (tangent1Elements.Length == 1) { - var tangentType = (MdlFile.VertexType)tangentElements[0].Type; - if (tangentType != MdlFile.VertexType.Single4 && tangentType != MdlFile.VertexType.NByte4) + var tangentType = (MdlFile.VertexType)tangent1Elements[0].Type; + if (tangentType != MdlFile.VertexType.Single4 + && tangentType != MdlFile.VertexType.NByte4 + && tangentType != MdlFile.VertexType.NShort4) { - reason = "Unsupported tangent element type."; + reason = "Unsupported tangent1 element type."; + return false; + } + } + + var tangent2Elements = elements.Where(static e => (MdlFile.VertexUsage)e.Usage == MdlFile.VertexUsage.Tangent2).ToArray(); + if (tangent2Elements.Length > 1) + { + reason = "Multiple tangent2 elements unsupported."; + return false; + } + + if (tangent2Elements.Length == 1) + { + var tangentType = (MdlFile.VertexType)tangent2Elements[0].Type; + if (tangentType != MdlFile.VertexType.Single4 + && tangentType != MdlFile.VertexType.NByte4 + && tangentType != MdlFile.VertexType.NShort4) + { + reason = "Unsupported tangent2 element type."; return false; } } @@ -911,7 +2548,12 @@ internal static class MdlDecimator if (colorElements.Length == 1) { var colorType = (MdlFile.VertexType)colorElements[0].Type; - if (colorType != MdlFile.VertexType.UByte4 && colorType != MdlFile.VertexType.NByte4 && colorType != MdlFile.VertexType.Single4) + if (colorType != MdlFile.VertexType.UByte4 + && colorType != MdlFile.VertexType.NByte4 + && colorType != MdlFile.VertexType.Single4 + && colorType != MdlFile.VertexType.Short4 + && colorType != MdlFile.VertexType.NShort4 + && colorType != MdlFile.VertexType.UShort4) { reason = "Unsupported color element type."; return false; @@ -937,14 +2579,18 @@ internal static class MdlDecimator if (blendIndicesElements.Length == 1) { var indexType = (MdlFile.VertexType)blendIndicesElements[0].Type; - if (indexType != MdlFile.VertexType.UByte4) + if (indexType != MdlFile.VertexType.UByte4 && indexType != MdlFile.VertexType.UShort4) { reason = "Unsupported blend index type."; return false; } var weightType = (MdlFile.VertexType)blendWeightsElements[0].Type; - if (weightType != MdlFile.VertexType.UByte4 && weightType != MdlFile.VertexType.NByte4 && weightType != MdlFile.VertexType.Single4) + if (weightType != MdlFile.VertexType.UByte4 + && weightType != MdlFile.VertexType.NByte4 + && weightType != MdlFile.VertexType.Single4 + && weightType != MdlFile.VertexType.UShort4 + && weightType != MdlFile.VertexType.NShort4) { reason = "Unsupported blend weight type."; return false; @@ -956,11 +2602,14 @@ internal static class MdlDecimator return false; } + var positionElement = positionElements[0]; var sortedElements = elements.OrderBy(static element => element.Offset).ToList(); format = new VertexFormat( sortedElements, + positionElement, normalElements.Length == 1 ? normalElements[0] : (MdlStructs.VertexElement?)null, - tangentElements.Length == 1 ? tangentElements[0] : (MdlStructs.VertexElement?)null, + tangent1Elements.Length == 1 ? tangent1Elements[0] : (MdlStructs.VertexElement?)null, + tangent2Elements.Length == 1 ? tangent2Elements[0] : (MdlStructs.VertexElement?)null, colorElement, blendIndicesElements.Length == 1 ? blendIndicesElements[0] : (MdlStructs.VertexElement?)null, blendWeightsElements.Length == 1 ? blendWeightsElements[0] : (MdlStructs.VertexElement?)null, @@ -987,9 +2636,14 @@ internal static class MdlDecimator foreach (var element in uvList) { var type = (MdlFile.VertexType)element.Type; - if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + if (type == MdlFile.VertexType.Half2 + || type == MdlFile.VertexType.Single2 + || type == MdlFile.VertexType.Short2 + || type == MdlFile.VertexType.NShort2 + || type == MdlFile.VertexType.UShort2 + || type == MdlFile.VertexType.Single1) { - if (uvChannelCount + 1 > Mesh.UVChannelCount) + if (uvChannelCount + 1 > MaxUvChannels) { reason = "Too many UV channels."; return false; @@ -998,9 +2652,13 @@ internal static class MdlDecimator uvElements.Add(new UvElementPacking(element, uvChannelCount, null)); uvChannelCount += 1; } - else if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + else if (type == MdlFile.VertexType.Half4 + || type == MdlFile.VertexType.Single4 + || type == MdlFile.VertexType.Short4 + || type == MdlFile.VertexType.NShort4 + || type == MdlFile.VertexType.UShort4) { - if (uvChannelCount + 2 > Mesh.UVChannelCount) + if (uvChannelCount + 2 > MaxUvChannels) { reason = "Too many UV channels."; return false; @@ -1042,6 +2700,15 @@ internal static class MdlDecimator } } + private static Vector3d ReadPositionWithW(BinaryReader reader, out float w) + { + var x = reader.ReadSingle(); + var y = reader.ReadSingle(); + var z = reader.ReadSingle(); + w = reader.ReadSingle(); + return new Vector3d(x, y, z); + } + private static Vector3 ReadNormal(MdlFile.VertexType type, BinaryReader reader) { switch (type) @@ -1056,17 +2723,29 @@ internal static class MdlDecimator return new Vector3(x, y, z); case MdlFile.VertexType.NByte4: return ReadNByte4(reader).ToVector3(); + case MdlFile.VertexType.NShort4: + return ReadNShort4(reader).ToVector3(); default: throw new InvalidOperationException($"Unsupported normal type {type}"); } } + private static Vector3 ReadNormalWithW(BinaryReader reader, out float w) + { + var x = reader.ReadSingle(); + var y = reader.ReadSingle(); + var z = reader.ReadSingle(); + w = reader.ReadSingle(); + return new Vector3(x, y, z); + } + private static Vector4 ReadTangent(MdlFile.VertexType type, BinaryReader reader) { return type switch { MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), MdlFile.VertexType.NByte4 => ReadNByte4(reader), + MdlFile.VertexType.NShort4 => ReadNShort4(reader), _ => throw new InvalidOperationException($"Unsupported tangent type {type}"), }; } @@ -1078,27 +2757,83 @@ internal static class MdlDecimator MdlFile.VertexType.UByte4 => ReadUByte4(reader), MdlFile.VertexType.NByte4 => ReadUByte4(reader), MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), + MdlFile.VertexType.Short4 => ReadShort4(reader), + MdlFile.VertexType.NShort4 => ReadUShort4Normalized(reader), + MdlFile.VertexType.UShort4 => ReadUShort4Normalized(reader), _ => throw new InvalidOperationException($"Unsupported color type {type}"), }; } + private static void NormalizeTangents(Vector4[]? tangents, bool clampW) + { + if (tangents == null) + { + return; + } + + for (var i = 0; i < tangents.Length; i++) + { + var tangent = tangents[i]; + var x = tangent.x; + var y = tangent.y; + var z = tangent.z; + var w = tangent.w; + var length = MathF.Sqrt((x * x) + (y * y) + (z * z)); + if (length > 1e-6f) + { + x /= length; + y /= length; + z /= length; + } + + if (clampW) + { + w = w >= 0f ? 1f : -1f; + } + + tangents[i] = new Vector4(x, y, z, w); + } + } + private static void ReadUv(MdlFile.VertexType type, BinaryReader reader, UvElementPacking mapping, Vector2[][] uvChannels, int vertexIndex) { - if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + if (type == MdlFile.VertexType.Half2 + || type == MdlFile.VertexType.Single2 + || type == MdlFile.VertexType.Short2 + || type == MdlFile.VertexType.NShort2 + || type == MdlFile.VertexType.UShort2 + || type == MdlFile.VertexType.Single1) { - var uv = type == MdlFile.VertexType.Half2 - ? new Vector2(ReadHalf(reader), ReadHalf(reader)) - : new Vector2(reader.ReadSingle(), reader.ReadSingle()); + var uv = type switch + { + MdlFile.VertexType.Half2 => new Vector2(ReadHalf(reader), ReadHalf(reader)), + MdlFile.VertexType.Single2 => new Vector2(reader.ReadSingle(), reader.ReadSingle()), + MdlFile.VertexType.Short2 => ReadShort2(reader), + MdlFile.VertexType.NShort2 => ReadUShort2Normalized(reader), + MdlFile.VertexType.UShort2 => ReadUShort2Normalized(reader), + MdlFile.VertexType.Single1 => new Vector2(reader.ReadSingle(), 0f), + _ => Vector2.Zero, + }; uvChannels[mapping.FirstChannel][vertexIndex] = uv; return; } - if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + if (type == MdlFile.VertexType.Half4 + || type == MdlFile.VertexType.Single4 + || type == MdlFile.VertexType.Short4 + || type == MdlFile.VertexType.NShort4 + || type == MdlFile.VertexType.UShort4) { - var uv = type == MdlFile.VertexType.Half4 - ? new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)) - : new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); + var uv = type switch + { + MdlFile.VertexType.Half4 => new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)), + MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), + MdlFile.VertexType.Short4 => ReadShort4(reader), + MdlFile.VertexType.NShort4 => ReadUShort4Normalized(reader), + MdlFile.VertexType.UShort4 => ReadUShort4Normalized(reader), + _ => new Vector4(0f, 0f, 0f, 0f), + }; uvChannels[mapping.FirstChannel][vertexIndex] = new Vector2(uv.x, uv.y); if (mapping.SecondChannel.HasValue) @@ -1108,26 +2843,40 @@ internal static class MdlDecimator } } - private static byte[] ReadIndices(MdlFile.VertexType type, BinaryReader reader) + private static int[] ReadIndices(MdlFile.VertexType type, BinaryReader reader) { return type switch { - MdlFile.VertexType.UByte4 => new[] { reader.ReadByte(), reader.ReadByte(), reader.ReadByte(), reader.ReadByte() }, + MdlFile.VertexType.UByte4 => new[] { (int)reader.ReadByte(), (int)reader.ReadByte(), (int)reader.ReadByte(), (int)reader.ReadByte() }, + MdlFile.VertexType.UShort4 => new[] { (int)reader.ReadUInt16(), (int)reader.ReadUInt16(), (int)reader.ReadUInt16(), (int)reader.ReadUInt16() }, _ => throw new InvalidOperationException($"Unsupported indices type {type}"), }; } - private static float[] ReadWeights(MdlFile.VertexType type, BinaryReader reader) + private static float[] ReadWeights(MdlFile.VertexType type, BinaryReader reader, BlendWeightEncoding encoding) { return type switch { MdlFile.VertexType.UByte4 => ReadUByte4(reader).ToFloatArray(), MdlFile.VertexType.NByte4 => ReadUByte4(reader).ToFloatArray(), MdlFile.VertexType.Single4 => new[] { reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle() }, + MdlFile.VertexType.NShort4 => ReadUShort4Normalized(reader).ToFloatArray(), + MdlFile.VertexType.UShort4 => encoding == BlendWeightEncoding.UShortAsByte + ? ReadUShort4AsByte(reader) + : ReadUShort4Normalized(reader).ToFloatArray(), _ => throw new InvalidOperationException($"Unsupported weights type {type}"), }; } + private static float[] ReadUShort4AsByte(BinaryReader reader) + { + var w0 = reader.ReadUInt16(); + var w1 = reader.ReadUInt16(); + var w2 = reader.ReadUInt16(); + var w3 = reader.ReadUInt16(); + return new[] { w0 / 255f, w1 / 255f, w2 / 255f, w3 / 255f }; + } + private static Vector4 ReadUByte4(BinaryReader reader) { return new Vector4( @@ -1143,29 +2892,98 @@ internal static class MdlDecimator return (value * 2f) - new Vector4(1f, 1f, 1f, 1f); } - private static Vector4 ReadAndDiscard(MdlFile.VertexType type, BinaryReader reader) + private static Vector2 ReadShort2(BinaryReader reader) + => new(reader.ReadInt16(), reader.ReadInt16()); + + private static Vector4 ReadShort4(BinaryReader reader) + => new(reader.ReadInt16(), reader.ReadInt16(), reader.ReadInt16(), reader.ReadInt16()); + + /* these really don't have a use currently, we don't need to read raw unnormalized ushorts :3 + private static Vector2 ReadUShort2(BinaryReader reader) + => new(reader.ReadUInt16(), reader.ReadUInt16()); + + private static Vector4 ReadUShort4(BinaryReader reader) + => new(reader.ReadUInt16(), reader.ReadUInt16(), reader.ReadUInt16(), reader.ReadUInt16()); + */ + + private static Vector2 ReadUShort2Normalized(BinaryReader reader) + => new(reader.ReadUInt16() / (float)ushort.MaxValue, reader.ReadUInt16() / (float)ushort.MaxValue); + + private static Vector4 ReadUShort4Normalized(BinaryReader reader) + => new(reader.ReadUInt16() / (float)ushort.MaxValue, reader.ReadUInt16() / (float)ushort.MaxValue, reader.ReadUInt16() / (float)ushort.MaxValue, reader.ReadUInt16() / (float)ushort.MaxValue); + + private static Vector4 ReadNShort4(BinaryReader reader) { - return type switch - { - MdlFile.VertexType.Single2 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), 0, 0), - MdlFile.VertexType.Single3 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), 0), - MdlFile.VertexType.Single4 => new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()), - MdlFile.VertexType.Half2 => new Vector4(ReadHalf(reader), ReadHalf(reader), 0, 0), - MdlFile.VertexType.Half4 => new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)), - MdlFile.VertexType.UByte4 => ReadUByte4(reader), - MdlFile.VertexType.NByte4 => ReadUByte4(reader), - _ => Vector4.zero, - }; + var value = ReadUShort4Normalized(reader); + return (value * 2f) - new Vector4(1f, 1f, 1f, 1f); } - private static void WritePosition(MdlFile.VertexType type, Vector3d value, Span target) + private static Vector4 ReadAndDiscard(MdlFile.VertexType type, BinaryReader reader) { + switch (type) + { + case MdlFile.VertexType.Single1: + return new Vector4(reader.ReadSingle(), 0, 0, 0); + case MdlFile.VertexType.Single2: + return new Vector4(reader.ReadSingle(), reader.ReadSingle(), 0, 0); + case MdlFile.VertexType.Single3: + return new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), 0); + case MdlFile.VertexType.Single4: + return new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); + case MdlFile.VertexType.Half2: + return new Vector4(ReadHalf(reader), ReadHalf(reader), 0, 0); + case MdlFile.VertexType.Half4: + return new Vector4(ReadHalf(reader), ReadHalf(reader), ReadHalf(reader), ReadHalf(reader)); + case MdlFile.VertexType.UByte4: + return ReadUByte4(reader); + case MdlFile.VertexType.NByte4: + return ReadUByte4(reader); + case MdlFile.VertexType.Short2: + { + var value = ReadShort2(reader); + return new Vector4(value.x, value.y, 0, 0); + } + case MdlFile.VertexType.Short4: + return ReadShort4(reader); + case MdlFile.VertexType.NShort2: + { + var value = ReadUShort2Normalized(reader); + return new Vector4(value.x, value.y, 0, 0); + } + case MdlFile.VertexType.NShort4: + return ReadUShort4Normalized(reader); + case MdlFile.VertexType.UShort2: + { + var value = ReadUShort2Normalized(reader); + return new Vector4(value.x, value.y, 0, 0); + } + case MdlFile.VertexType.UShort4: + return ReadUShort4Normalized(reader); + default: + return new Vector4(0f, 0f, 0f, 0f); + } + } + + private static void WritePosition(MdlFile.VertexType type, Vector3d value, Span target, float? wOverride = null) + { + if (type == MdlFile.VertexType.Single4 && wOverride.HasValue) + { + WriteVector4(type, new Vector4((float)value.x, (float)value.y, (float)value.z, wOverride.Value), target); + return; + } + WriteVector3(type, new Vector3((float)value.x, (float)value.y, (float)value.z), target); } - private static void WriteNormal(MdlFile.VertexType type, Vector3 value, Span target) + private static void WriteNormal(MdlFile.VertexType type, Vector3 value, Span target, float? wOverride = null) { - WriteVector3(type, value, target, normalized: type == MdlFile.VertexType.NByte4); + if (type == MdlFile.VertexType.Single4 && wOverride.HasValue) + { + WriteVector4(type, new Vector4(value.x, value.y, value.z, wOverride.Value), target); + return; + } + + WriteVector3(type, value, target, normalized: type == MdlFile.VertexType.NByte4 || type == MdlFile.VertexType.NShort4); } private static void WriteTangent(MdlFile.VertexType type, Vector4 value, Span target) @@ -1176,12 +2994,21 @@ internal static class MdlDecimator return; } + if (type == MdlFile.VertexType.NShort4) + { + WriteNShort4(value, target); + return; + } + WriteVector4(type, value, target); } private static void WriteColor(MdlFile.VertexType type, Vector4 value, Span target) { - if (type == MdlFile.VertexType.Single4) + if (type == MdlFile.VertexType.Single4 + || type == MdlFile.VertexType.Short4 + || type == MdlFile.VertexType.NShort4 + || type == MdlFile.VertexType.UShort4) { WriteVector4(type, value, target); return; @@ -1192,64 +3019,111 @@ internal static class MdlDecimator private static void WriteBlendIndices(MdlFile.VertexType type, BoneWeight weights, Span target) { - if (type != MdlFile.VertexType.UByte4) + if (type == MdlFile.VertexType.UByte4) { + target[0] = (byte)Math.Clamp(weights.index0, 0, 255); + target[1] = (byte)Math.Clamp(weights.index1, 0, 255); + target[2] = (byte)Math.Clamp(weights.index2, 0, 255); + target[3] = (byte)Math.Clamp(weights.index3, 0, 255); return; } - target[0] = (byte)Math.Clamp(weights.boneIndex0, 0, 255); - target[1] = (byte)Math.Clamp(weights.boneIndex1, 0, 255); - target[2] = (byte)Math.Clamp(weights.boneIndex2, 0, 255); - target[3] = (byte)Math.Clamp(weights.boneIndex3, 0, 255); + if (type == MdlFile.VertexType.UShort4) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShort(weights.index0)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShort(weights.index1)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), ToUShort(weights.index2)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), ToUShort(weights.index3)); + } } - private static void WriteBlendWeights(MdlFile.VertexType type, BoneWeight weights, Span target) + private static void WriteBlendWeights(MdlFile.VertexType type, BoneWeight weights, BlendWeightEncoding encoding, Span target) { - if (type != MdlFile.VertexType.UByte4 && type != MdlFile.VertexType.NByte4) + if (type == MdlFile.VertexType.Single4) { - if (type == MdlFile.VertexType.Single4) - { - BinaryPrimitives.WriteSingleLittleEndian(target[..4], weights.boneWeight0); - BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), weights.boneWeight1); - BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), weights.boneWeight2); - BinaryPrimitives.WriteSingleLittleEndian(target.Slice(12, 4), weights.boneWeight3); - } + BinaryPrimitives.WriteSingleLittleEndian(target[..4], weights.weight0); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(4, 4), weights.weight1); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(8, 4), weights.weight2); + BinaryPrimitives.WriteSingleLittleEndian(target.Slice(12, 4), weights.weight3); return; } - var w0 = Clamp01(weights.boneWeight0); - var w1 = Clamp01(weights.boneWeight1); - var w2 = Clamp01(weights.boneWeight2); - var w3 = Clamp01(weights.boneWeight3); - NormalizeWeights(ref w0, ref w1, ref w2, ref w3); + if (type != MdlFile.VertexType.UByte4 + && type != MdlFile.VertexType.NByte4 + && type != MdlFile.VertexType.UShort4 + && type != MdlFile.VertexType.NShort4) + { + return; + } - target[0] = ToByte(w0); - target[1] = ToByte(w1); - target[2] = ToByte(w2); - target[3] = ToByte(w3); + var w0 = Clamp01(weights.weight0); + var w1 = Clamp01(weights.weight1); + var w2 = Clamp01(weights.weight2); + var w3 = Clamp01(weights.weight3); + + if (type == MdlFile.VertexType.UShort4 && encoding == BlendWeightEncoding.UShortAsByte) + { + WriteUShort4AsByte(w0, w1, w2, w3, target); + return; + } + + if (type == MdlFile.VertexType.UShort4 || type == MdlFile.VertexType.NShort4) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShortNormalized(w0)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShortNormalized(w1)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), ToUShortNormalized(w2)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), ToUShortNormalized(w3)); + return; + } + + WriteByteWeights(w0, w1, w2, w3, target); + } + + private static void WriteUShort4AsByte(float w0, float w1, float w2, float w3, Span target) + { + QuantizeByteWeights(w0, w1, w2, w3, out var b0, out var b1, out var b2, out var b3); + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], (ushort)b0); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), (ushort)b1); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), (ushort)b2); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), (ushort)b3); } private static void WriteUv(MdlFile.VertexType type, UvElementPacking mapping, Vector2[][] uvChannels, int vertexIndex, Span target) { - if (type == MdlFile.VertexType.Half2 || type == MdlFile.VertexType.Single2) + if (type == MdlFile.VertexType.Half2 + || type == MdlFile.VertexType.Single2 + || type == MdlFile.VertexType.Short2 + || type == MdlFile.VertexType.NShort2 + || type == MdlFile.VertexType.UShort2 + || type == MdlFile.VertexType.Single1) { var uv = uvChannels[mapping.FirstChannel][vertexIndex]; WriteVector2(type, uv, target); return; } - if (type == MdlFile.VertexType.Half4 || type == MdlFile.VertexType.Single4) + if (type == MdlFile.VertexType.Half4 + || type == MdlFile.VertexType.Single4 + || type == MdlFile.VertexType.Short4 + || type == MdlFile.VertexType.NShort4 + || type == MdlFile.VertexType.UShort4) { var uv0 = uvChannels[mapping.FirstChannel][vertexIndex]; var uv1 = mapping.SecondChannel.HasValue ? uvChannels[mapping.SecondChannel.Value][vertexIndex] - : Vector2.zero; + : Vector2.Zero; WriteVector4(type, new Vector4(uv0.x, uv0.y, uv1.x, uv1.y), target); } } private static void WriteVector2(MdlFile.VertexType type, Vector2 value, Span target) { + if (type == MdlFile.VertexType.Single1) + { + BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); + return; + } + if (type == MdlFile.VertexType.Single2) { BinaryPrimitives.WriteSingleLittleEndian(target[..4], value.x); @@ -1261,6 +3135,24 @@ internal static class MdlDecimator { WriteHalf(target[..2], value.x); WriteHalf(target.Slice(2, 2), value.y); + return; + } + + if (type == MdlFile.VertexType.Short2) + { + WriteShort2(value, target); + return; + } + + if (type == MdlFile.VertexType.NShort2) + { + WriteUShort2Normalized(value, target); + return; + } + + if (type == MdlFile.VertexType.UShort2) + { + WriteUShort2Normalized(value, target); } } @@ -1286,6 +3178,12 @@ internal static class MdlDecimator if (type == MdlFile.VertexType.NByte4 && normalized) { WriteNByte4(new Vector4(value.x, value.y, value.z, 0f), target); + return; + } + + if (type == MdlFile.VertexType.NShort4 && normalized) + { + WriteNShort4(new Vector4(value.x, value.y, value.z, 0f), target); } } @@ -1308,6 +3206,23 @@ internal static class MdlDecimator WriteHalf(target.Slice(6, 2), value.w); return; } + + if (type == MdlFile.VertexType.Short4) + { + WriteShort4(value, target); + return; + } + + if (type == MdlFile.VertexType.NShort4) + { + WriteUShort4Normalized(value, target); + return; + } + + if (type == MdlFile.VertexType.UShort4) + { + WriteUShort4Normalized(value, target); + } } private static void WriteUByte4(Vector4 value, Span target) @@ -1320,10 +3235,62 @@ internal static class MdlDecimator private static void WriteNByte4(Vector4 value, Span target) { - var normalized = (value * 0.5f) + new Vector4(0.5f); + var normalized = (value * 0.5f) + new Vector4(0.5f, 0.5f, 0.5f, 0.5f); WriteUByte4(normalized, target); } + private static void WriteShort2(Vector2 value, Span target) + { + BinaryPrimitives.WriteInt16LittleEndian(target[..2], ToShort(value.x)); + BinaryPrimitives.WriteInt16LittleEndian(target.Slice(2, 2), ToShort(value.y)); + } + + private static void WriteShort4(Vector4 value, Span target) + { + BinaryPrimitives.WriteInt16LittleEndian(target[..2], ToShort(value.x)); + BinaryPrimitives.WriteInt16LittleEndian(target.Slice(2, 2), ToShort(value.y)); + BinaryPrimitives.WriteInt16LittleEndian(target.Slice(4, 2), ToShort(value.z)); + BinaryPrimitives.WriteInt16LittleEndian(target.Slice(6, 2), ToShort(value.w)); + } + + /* same thing as read here, we don't need to write currently either + private static void WriteUShort2(Vector2 value, Span target) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShort(value.x)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShort(value.y)); + } + + private static void WriteUShort4(Vector4 value, Span target) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShort(value.x)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShort(value.y)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), ToUShort(value.z)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), ToUShort(value.w)); + } + */ + + private static void WriteUShort2Normalized(Vector2 value, Span target) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShortNormalized(value.x)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShortNormalized(value.y)); + } + + private static void WriteUShort4Normalized(Vector4 value, Span target) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShortNormalized(value.x)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShortNormalized(value.y)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), ToUShortNormalized(value.z)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), ToUShortNormalized(value.w)); + } + + private static void WriteNShort4(Vector4 value, Span target) + { + BinaryPrimitives.WriteUInt16LittleEndian(target[..2], ToUShortSnorm(value.x)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(2, 2), ToUShortSnorm(value.y)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(4, 2), ToUShortSnorm(value.z)); + BinaryPrimitives.WriteUInt16LittleEndian(target.Slice(6, 2), ToUShortSnorm(value.w)); + } + private static void WriteHalf(Span target, float value) { var half = (Half)value; @@ -1336,9 +3303,127 @@ internal static class MdlDecimator private static float Clamp01(float value) => Math.Clamp(value, 0f, 1f); + private static float ClampMinusOneToOne(float value) + => Math.Clamp(value, -1f, 1f); + private static byte ToByte(float value) => (byte)Math.Clamp((int)Math.Round(value * 255f), 0, 255); + private static void WriteByteWeights(float w0, float w1, float w2, float w3, Span target) + { + QuantizeByteWeights(w0, w1, w2, w3, out var b0, out var b1, out var b2, out var b3); + target[0] = (byte)b0; + target[1] = (byte)b1; + target[2] = (byte)b2; + target[3] = (byte)b3; + } + + private static void QuantizeByteWeights(float w0, float w1, float w2, float w3, out int b0, out int b1, out int b2, out int b3) + { + var sum = w0 + w1 + w2 + w3; + if (sum <= 1e-6f) + { + w0 = 1f; + w1 = 0f; + w2 = 0f; + w3 = 0f; + sum = 1f; + } + + var targetSum = (int)MathF.Round(sum * 255f); + if (sum > 0f && targetSum == 0) + { + targetSum = 1; + } + + targetSum = Math.Clamp(targetSum, 0, 255); + if (targetSum == 0) + { + b0 = 0; + b1 = 0; + b2 = 0; + b3 = 0; + return; + } + + var scale = targetSum / sum; + var scaled0 = w0 * scale; + var scaled1 = w1 * scale; + var scaled2 = w2 * scale; + var scaled3 = w3 * scale; + + b0 = (int)MathF.Floor(scaled0); + b1 = (int)MathF.Floor(scaled1); + b2 = (int)MathF.Floor(scaled2); + b3 = (int)MathF.Floor(scaled3); + + var remainder = targetSum - (b0 + b1 + b2 + b3); + if (remainder > 0) + { + Span fractions = stackalloc float[4]; + fractions[0] = scaled0 - b0; + fractions[1] = scaled1 - b1; + fractions[2] = scaled2 - b2; + fractions[3] = scaled3 - b3; + + Span order = stackalloc int[4] { 0, 1, 2, 3 }; + for (var i = 0; i < order.Length - 1; i++) + { + for (var j = i + 1; j < order.Length; j++) + { + if (fractions[order[j]] > fractions[order[i]]) + { + (order[i], order[j]) = (order[j], order[i]); + } + } + } + + for (var i = 0; i < remainder && i < order.Length; i++) + { + switch (order[i]) + { + case 0: + b0++; + break; + case 1: + b1++; + break; + case 2: + b2++; + break; + case 3: + b3++; + break; + } + } + } + + b0 = Math.Clamp(b0, 0, 255); + b1 = Math.Clamp(b1, 0, 255); + b2 = Math.Clamp(b2, 0, 255); + b3 = Math.Clamp(b3, 0, 255); + } + + private static short ToShort(float value) + => (short)Math.Clamp((int)Math.Round(value), short.MinValue, short.MaxValue); + + private static ushort ToUShort(int value) + => (ushort)Math.Clamp(value, ushort.MinValue, ushort.MaxValue); + + /* + private static ushort ToUShort(float value) + => (ushort)Math.Clamp((int)Math.Round(value), ushort.MinValue, ushort.MaxValue); + */ + + private static ushort ToUShortNormalized(float value) + => (ushort)Math.Clamp((int)Math.Round(Clamp01(value) * ushort.MaxValue), ushort.MinValue, ushort.MaxValue); + + private static ushort ToUShortSnorm(float value) + { + var normalized = (ClampMinusOneToOne(value) * 0.5f) + 0.5f; + return ToUShortNormalized(normalized); + } + private static void NormalizeWeights(float[] weights) { var sum = weights.Sum(); @@ -1367,9 +3452,54 @@ internal static class MdlDecimator w3 /= sum; } + private static BlendWeightEncoding DetectBlendWeightEncoding(MdlFile mdl, int lodIndex, MeshStruct mesh, VertexFormat format) + { + if (!format.BlendWeightsElement.HasValue) + { + return BlendWeightEncoding.Default; + } + + var blendWeightsElement = format.BlendWeightsElement.Value; + if ((MdlFile.VertexType)blendWeightsElement.Type != MdlFile.VertexType.UShort4) + { + return BlendWeightEncoding.Default; + } + + var stride = mesh.VertexBufferStride(blendWeightsElement.Stream); + if (stride == 0 || mesh.VertexCount == 0) + { + return BlendWeightEncoding.Default; + } + + var elementSize = GetElementSize(MdlFile.VertexType.UShort4); + var baseOffset = (int)(mdl.VertexOffset[lodIndex] + mesh.VertexBufferOffset(blendWeightsElement.Stream)); + var data = mdl.RemainingData.AsSpan(); + + for (var vertexIndex = 0; vertexIndex < mesh.VertexCount; vertexIndex++) + { + var offset = baseOffset + (vertexIndex * stride) + blendWeightsElement.Offset; + if (offset < 0 || offset + elementSize > data.Length) + { + return BlendWeightEncoding.Default; + } + + var w0 = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(offset, 2)); + var w1 = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(offset + 2, 2)); + var w2 = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(offset + 4, 2)); + var w3 = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(offset + 6, 2)); + if (w0 > byte.MaxValue || w1 > byte.MaxValue || w2 > byte.MaxValue || w3 > byte.MaxValue) + { + return BlendWeightEncoding.Default; + } + } + + return BlendWeightEncoding.UShortAsByte; + } + private static int GetElementSize(MdlFile.VertexType type) => type switch { + MdlFile.VertexType.Single1 => 4, MdlFile.VertexType.Single2 => 8, MdlFile.VertexType.Single3 => 12, MdlFile.VertexType.Single4 => 16, @@ -1377,9 +3507,21 @@ internal static class MdlDecimator MdlFile.VertexType.Half4 => 8, MdlFile.VertexType.UByte4 => 4, MdlFile.VertexType.NByte4 => 4, + MdlFile.VertexType.Short2 => 4, + MdlFile.VertexType.Short4 => 8, + MdlFile.VertexType.NShort2 => 4, + MdlFile.VertexType.NShort4 => 8, + MdlFile.VertexType.UShort2 => 4, + MdlFile.VertexType.UShort4 => 8, _ => throw new InvalidOperationException($"Unsupported vertex type {type}"), }; + private enum BlendWeightEncoding + { + Default, + UShortAsByte, + } + private readonly record struct ElementKey(byte Stream, byte Offset, byte Type, byte Usage, byte UsageIndex) { public static ElementKey From(MdlStructs.VertexElement element) @@ -1390,8 +3532,10 @@ internal static class MdlDecimator { public VertexFormat( List sortedElements, + MdlStructs.VertexElement positionElement, MdlStructs.VertexElement? normalElement, - MdlStructs.VertexElement? tangentElement, + MdlStructs.VertexElement? tangent1Element, + MdlStructs.VertexElement? tangent2Element, MdlStructs.VertexElement? colorElement, MdlStructs.VertexElement? blendIndicesElement, MdlStructs.VertexElement? blendWeightsElement, @@ -1399,8 +3543,10 @@ internal static class MdlDecimator int uvChannelCount) { SortedElements = sortedElements; + PositionElement = positionElement; NormalElement = normalElement; - TangentElement = tangentElement; + Tangent1Element = tangent1Element; + Tangent2Element = tangent2Element; ColorElement = colorElement; BlendIndicesElement = blendIndicesElement; BlendWeightsElement = blendWeightsElement; @@ -1409,8 +3555,10 @@ internal static class MdlDecimator } public List SortedElements { get; } + public MdlStructs.VertexElement PositionElement { get; } public MdlStructs.VertexElement? NormalElement { get; } - public MdlStructs.VertexElement? TangentElement { get; } + public MdlStructs.VertexElement? Tangent1Element { get; } + public MdlStructs.VertexElement? Tangent2Element { get; } public MdlStructs.VertexElement? ColorElement { get; } public MdlStructs.VertexElement? BlendIndicesElement { get; } public MdlStructs.VertexElement? BlendWeightsElement { get; } @@ -1418,9 +3566,12 @@ internal static class MdlDecimator public int UvChannelCount { get; } public bool HasNormals => NormalElement.HasValue; - public bool HasTangents => TangentElement.HasValue; + public bool HasTangent1 => Tangent1Element.HasValue; + public bool HasTangent2 => Tangent2Element.HasValue; public bool HasColors => ColorElement.HasValue; public bool HasSkinning => BlendIndicesElement.HasValue && BlendWeightsElement.HasValue; + public bool HasPositionW => (MdlFile.VertexType)PositionElement.Type == MdlFile.VertexType.Single4; + public bool HasNormalW => NormalElement.HasValue && (MdlFile.VertexType)NormalElement.Value.Type == MdlFile.VertexType.Single4; } private readonly record struct UvElementPacking(MdlStructs.VertexElement Element, int FirstChannel, int? SecondChannel); @@ -1431,28 +3582,40 @@ internal static class MdlDecimator Vector3d[] positions, Vector3[]? normals, Vector4[]? tangents, + Vector4[]? tangents2, Vector4[]? colors, BoneWeight[]? boneWeights, - Vector2[][]? uvChannels) + Vector2[][]? uvChannels, + float[]? positionWs, + float[]? normalWs, + BlendWeightEncoding blendWeightEncoding) { Positions = positions; Normals = normals; Tangents = tangents; + Tangents2 = tangents2; Colors = colors; BoneWeights = boneWeights; UvChannels = uvChannels; + PositionWs = positionWs; + NormalWs = normalWs; + BlendWeightEncoding = blendWeightEncoding; } public Vector3d[] Positions { get; } public Vector3[]? Normals { get; } public Vector4[]? Tangents { get; } + public Vector4[]? Tangents2 { get; } public Vector4[]? Colors { get; } public BoneWeight[]? BoneWeights { get; } public Vector2[][]? UvChannels { get; } + public float[]? PositionWs { get; } + public float[]? NormalWs { get; } + public BlendWeightEncoding BlendWeightEncoding { get; } } } -internal static class MeshDecimatorVectorExtensions +internal static class NanomeshVectorExtensions { public static Vector3 ToVector3(this Vector4 value) => new(value.x, value.y, value.z); diff --git a/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs b/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs index f666805..00406f6 100644 --- a/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs +++ b/LightlessSync/Services/ModelDecimation/ModelDecimationService.cs @@ -1,5 +1,7 @@ using LightlessSync.FileCache; using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Configurations; +using LightlessSync.Utils; using Microsoft.Extensions.Logging; using System.Collections.Concurrent; using System.Globalization; @@ -19,7 +21,7 @@ public sealed class ModelDecimationService private readonly XivDataStorageService _xivDataStorageService; private readonly SemaphoreSlim _decimationSemaphore = new(MaxConcurrentJobs); - private readonly ConcurrentDictionary _activeJobs = new(StringComparer.OrdinalIgnoreCase); + private readonly TaskRegistry _decimationDeduplicator = new(); private readonly ConcurrentDictionary _decimatedPaths = new(StringComparer.OrdinalIgnoreCase); private readonly ConcurrentDictionary _failedHashes = new(StringComparer.OrdinalIgnoreCase); @@ -44,14 +46,14 @@ public sealed class ModelDecimationService return; } - if (_decimatedPaths.ContainsKey(hash) || _failedHashes.ContainsKey(hash) || _activeJobs.ContainsKey(hash)) + if (_decimatedPaths.ContainsKey(hash) || _failedHashes.ContainsKey(hash) || _decimationDeduplicator.TryGetExisting(hash, out _)) { return; } - _logger.LogInformation("Queued model decimation for {Hash}", hash); + _logger.LogDebug("Queued model decimation for {Hash}", hash); - _activeJobs[hash] = Task.Run(async () => + _decimationDeduplicator.GetOrStart(hash, async () => { await _decimationSemaphore.WaitAsync().ConfigureAwait(false); try @@ -66,16 +68,54 @@ public sealed class ModelDecimationService finally { _decimationSemaphore.Release(); - _activeJobs.TryRemove(hash, out _); } - }, CancellationToken.None); + }); + } + + public void ScheduleBatchDecimation(string hash, string filePath, ModelDecimationSettings settings) + { + if (!ShouldScheduleBatchDecimation(hash, filePath, settings)) + { + return; + } + + if (_decimationDeduplicator.TryGetExisting(hash, out _)) + { + return; + } + + _failedHashes.TryRemove(hash, out _); + _decimatedPaths.TryRemove(hash, out _); + + _logger.LogInformation("Queued batch model decimation for {Hash}", hash); + + _decimationDeduplicator.GetOrStart(hash, async () => + { + await _decimationSemaphore.WaitAsync().ConfigureAwait(false); + try + { + await DecimateInternalAsync(hash, filePath, settings, allowExisting: false, destinationOverride: filePath, registerDecimatedPath: false).ConfigureAwait(false); + } + catch (Exception ex) + { + _failedHashes[hash] = 1; + _logger.LogWarning(ex, "Batch model decimation failed for {Hash}", hash); + } + finally + { + _decimationSemaphore.Release(); + } + }); } public bool ShouldScheduleDecimation(string hash, string filePath, string? gamePath = null) - => IsDecimationEnabled() + { + var threshold = Math.Max(0, _performanceConfigService.Current.ModelDecimationTriangleThreshold); + return IsDecimationEnabled() && filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase) && IsDecimationAllowed(gamePath) - && !ShouldSkipByTriangleCache(hash); + && !ShouldSkipByTriangleCache(hash, threshold); + } public string GetPreferredPath(string hash, string originalPath) { @@ -116,7 +156,7 @@ public sealed class ModelDecimationService continue; } - if (_activeJobs.TryGetValue(hash, out var job)) + if (_decimationDeduplicator.TryGetExisting(hash, out var job)) { pending.Add(job); } @@ -131,6 +171,23 @@ public sealed class ModelDecimationService } private Task DecimateInternalAsync(string hash, string sourcePath) + { + if (!TryGetDecimationSettings(out var settings)) + { + _logger.LogDebug("Model decimation disabled or invalid settings for {Hash}", hash); + return Task.CompletedTask; + } + + return DecimateInternalAsync(hash, sourcePath, settings, allowExisting: true); + } + + private Task DecimateInternalAsync( + string hash, + string sourcePath, + ModelDecimationSettings settings, + bool allowExisting, + string? destinationOverride = null, + bool registerDecimatedPath = true) { if (!File.Exists(sourcePath)) { @@ -139,30 +196,48 @@ public sealed class ModelDecimationService return Task.CompletedTask; } - if (!TryGetDecimationSettings(out var triangleThreshold, out var targetRatio)) + if (!TryNormalizeSettings(settings, out var normalized)) { - _logger.LogInformation("Model decimation disabled or invalid settings for {Hash}", hash); + _logger.LogDebug("Model decimation skipped for {Hash}; invalid settings.", hash); return Task.CompletedTask; } - _logger.LogInformation("Starting model decimation for {Hash} (threshold {Threshold}, ratio {Ratio:0.##})", hash, triangleThreshold, targetRatio); + _logger.LogDebug( + "Starting model decimation for {Hash} (threshold {Threshold}, ratio {Ratio:0.##}, normalize tangents {NormalizeTangents}, avoid body intersection {AvoidBodyIntersection})", + hash, + normalized.TriangleThreshold, + normalized.TargetRatio, + normalized.NormalizeTangents, + normalized.AvoidBodyIntersection); - var destination = Path.Combine(GetDecimatedDirectory(), $"{hash}.mdl"); - if (File.Exists(destination)) + var destination = destinationOverride ?? Path.Combine(GetDecimatedDirectory(), $"{hash}.mdl"); + var inPlace = string.Equals(destination, sourcePath, StringComparison.OrdinalIgnoreCase); + if (!inPlace && File.Exists(destination)) { - RegisterDecimatedModel(hash, sourcePath, destination); - return Task.CompletedTask; + if (allowExisting) + { + if (registerDecimatedPath) + { + RegisterDecimatedModel(hash, sourcePath, destination); + } + return Task.CompletedTask; + } + + TryDelete(destination); } - if (!MdlDecimator.TryDecimate(sourcePath, destination, triangleThreshold, targetRatio, _logger)) + if (!MdlDecimator.TryDecimate(sourcePath, destination, normalized, _logger)) { _failedHashes[hash] = 1; - _logger.LogInformation("Model decimation skipped for {Hash}", hash); + _logger.LogDebug("Model decimation skipped for {Hash}", hash); return Task.CompletedTask; } - RegisterDecimatedModel(hash, sourcePath, destination); - _logger.LogInformation("Decimated model {Hash} -> {Path}", hash, destination); + if (registerDecimatedPath) + { + RegisterDecimatedModel(hash, sourcePath, destination); + } + _logger.LogDebug("Decimated model {Hash} -> {Path}", hash, destination); return Task.CompletedTask; } @@ -250,7 +325,7 @@ public sealed class ModelDecimationService private bool IsDecimationEnabled() => _performanceConfigService.Current.EnableModelDecimation; - private bool ShouldSkipByTriangleCache(string hash) + private bool ShouldSkipByTriangleCache(string hash, int triangleThreshold) { if (string.IsNullOrEmpty(hash)) { @@ -262,7 +337,7 @@ public sealed class ModelDecimationService return false; } - var threshold = Math.Max(0, _performanceConfigService.Current.ModelDecimationTriangleThreshold); + var threshold = Math.Max(0, triangleThreshold); return threshold > 0 && cachedTris < threshold; } @@ -313,10 +388,14 @@ public sealed class ModelDecimationService private static string NormalizeGamePath(string path) => path.Replace('\\', '/').ToLowerInvariant(); - private bool TryGetDecimationSettings(out int triangleThreshold, out double targetRatio) + private bool TryGetDecimationSettings(out ModelDecimationSettings settings) { - triangleThreshold = 15_000; - targetRatio = 0.8; + settings = new ModelDecimationSettings( + ModelDecimationDefaults.TriangleThreshold, + ModelDecimationDefaults.TargetRatio, + ModelDecimationDefaults.NormalizeTangents, + ModelDecimationDefaults.AvoidBodyIntersection, + new ModelDecimationAdvancedSettings()); var config = _performanceConfigService.Current; if (!config.EnableModelDecimation) @@ -324,14 +403,86 @@ public sealed class ModelDecimationService return false; } - triangleThreshold = Math.Max(0, config.ModelDecimationTriangleThreshold); - targetRatio = config.ModelDecimationTargetRatio; - if (double.IsNaN(targetRatio) || double.IsInfinity(targetRatio)) + var advanced = NormalizeAdvancedSettings(config.ModelDecimationAdvanced); + settings = new ModelDecimationSettings( + Math.Max(0, config.ModelDecimationTriangleThreshold), + config.ModelDecimationTargetRatio, + config.ModelDecimationNormalizeTangents, + config.ModelDecimationAvoidBodyIntersection, + advanced); + + return TryNormalizeSettings(settings, out settings); + } + + private static bool TryNormalizeSettings(ModelDecimationSettings settings, out ModelDecimationSettings normalized) + { + var ratio = settings.TargetRatio; + if (double.IsNaN(ratio) || double.IsInfinity(ratio)) + { + normalized = default; + return false; + } + + ratio = Math.Clamp(ratio, MinTargetRatio, MaxTargetRatio); + var advanced = NormalizeAdvancedSettings(settings.Advanced); + normalized = new ModelDecimationSettings( + Math.Max(0, settings.TriangleThreshold), + ratio, + settings.NormalizeTangents, + settings.AvoidBodyIntersection, + advanced); + return true; + } + + private static ModelDecimationAdvancedSettings NormalizeAdvancedSettings(ModelDecimationAdvancedSettings? settings) + { + var source = settings ?? new ModelDecimationAdvancedSettings(); + return new ModelDecimationAdvancedSettings + { + MinComponentTriangles = Math.Clamp(source.MinComponentTriangles, 0, 1000), + MaxCollapseEdgeLengthFactor = ClampFloat(source.MaxCollapseEdgeLengthFactor, 0.1f, 10f, ModelDecimationAdvancedSettings.DefaultMaxCollapseEdgeLengthFactor), + NormalSimilarityThresholdDegrees = ClampFloat(source.NormalSimilarityThresholdDegrees, 0f, 180f, ModelDecimationAdvancedSettings.DefaultNormalSimilarityThresholdDegrees), + BoneWeightSimilarityThreshold = ClampFloat(source.BoneWeightSimilarityThreshold, 0f, 1f, ModelDecimationAdvancedSettings.DefaultBoneWeightSimilarityThreshold), + UvSimilarityThreshold = ClampFloat(source.UvSimilarityThreshold, 0f, 1f, ModelDecimationAdvancedSettings.DefaultUvSimilarityThreshold), + UvSeamAngleCos = ClampFloat(source.UvSeamAngleCos, -1f, 1f, ModelDecimationAdvancedSettings.DefaultUvSeamAngleCos), + BlockUvSeamVertices = source.BlockUvSeamVertices, + AllowBoundaryCollapses = source.AllowBoundaryCollapses, + BodyCollisionDistanceFactor = ClampFloat(source.BodyCollisionDistanceFactor, 0f, 10f, ModelDecimationAdvancedSettings.DefaultBodyCollisionDistanceFactor), + BodyCollisionNoOpDistanceFactor = ClampFloat(source.BodyCollisionNoOpDistanceFactor, 0f, 10f, ModelDecimationAdvancedSettings.DefaultBodyCollisionNoOpDistanceFactor), + BodyCollisionAdaptiveRelaxFactor = ClampFloat(source.BodyCollisionAdaptiveRelaxFactor, 0f, 10f, ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveRelaxFactor), + BodyCollisionAdaptiveNearRatio = ClampFloat(source.BodyCollisionAdaptiveNearRatio, 0f, 1f, ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveNearRatio), + BodyCollisionAdaptiveUvThreshold = ClampFloat(source.BodyCollisionAdaptiveUvThreshold, 0f, 1f, ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveUvThreshold), + BodyCollisionNoOpUvSeamAngleCos = ClampFloat(source.BodyCollisionNoOpUvSeamAngleCos, -1f, 1f, ModelDecimationAdvancedSettings.DefaultBodyCollisionNoOpUvSeamAngleCos), + BodyCollisionProtectionFactor = ClampFloat(source.BodyCollisionProtectionFactor, 0f, 10f, ModelDecimationAdvancedSettings.DefaultBodyCollisionProtectionFactor), + BodyProxyTargetRatioMin = ClampFloat(source.BodyProxyTargetRatioMin, 0f, 1f, ModelDecimationAdvancedSettings.DefaultBodyProxyTargetRatioMin), + BodyCollisionProxyInflate = ClampFloat(source.BodyCollisionProxyInflate, 0f, 0.1f, ModelDecimationAdvancedSettings.DefaultBodyCollisionProxyInflate), + BodyCollisionPenetrationFactor = ClampFloat(source.BodyCollisionPenetrationFactor, 0f, 1f, ModelDecimationAdvancedSettings.DefaultBodyCollisionPenetrationFactor), + MinBodyCollisionDistance = ClampFloat(source.MinBodyCollisionDistance, 1e-6f, 1f, ModelDecimationAdvancedSettings.DefaultMinBodyCollisionDistance), + MinBodyCollisionCellSize = ClampFloat(source.MinBodyCollisionCellSize, 1e-6f, 1f, ModelDecimationAdvancedSettings.DefaultMinBodyCollisionCellSize), + }; + } + + private static float ClampFloat(float value, float min, float max, float fallback) + { + if (float.IsNaN(value) || float.IsInfinity(value)) + { + return fallback; + } + + return Math.Clamp(value, min, max); + } + + private bool ShouldScheduleBatchDecimation(string hash, string filePath, ModelDecimationSettings settings) + { + if (string.IsNullOrWhiteSpace(filePath) || !filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase)) { return false; } - targetRatio = Math.Clamp(targetRatio, MinTargetRatio, MaxTargetRatio); + if (!TryNormalizeSettings(settings, out _)) + { + return false; + } return true; } diff --git a/LightlessSync/Services/ModelDecimation/ModelDecimationSettings.cs b/LightlessSync/Services/ModelDecimation/ModelDecimationSettings.cs new file mode 100644 index 0000000..4b5adc2 --- /dev/null +++ b/LightlessSync/Services/ModelDecimation/ModelDecimationSettings.cs @@ -0,0 +1,10 @@ +using LightlessSync.LightlessConfiguration.Configurations; + +namespace LightlessSync.Services.ModelDecimation; + +public readonly record struct ModelDecimationSettings( + int TriangleThreshold, + double TargetRatio, + bool NormalizeTangents, + bool AvoidBodyIntersection, + ModelDecimationAdvancedSettings Advanced); diff --git a/LightlessSync/Services/PenumbraTempCollectionJanitor.cs b/LightlessSync/Services/PenumbraTempCollectionJanitor.cs index 03fb53b..87d37ac 100644 --- a/LightlessSync/Services/PenumbraTempCollectionJanitor.cs +++ b/LightlessSync/Services/PenumbraTempCollectionJanitor.cs @@ -1,4 +1,6 @@ -using LightlessSync.Interop.Ipc; +using System.Linq; +using LightlessSync.Interop.Ipc; +using LightlessSync.LightlessConfiguration.Models; using LightlessSync.LightlessConfiguration; using LightlessSync.Services.Mediator; using Microsoft.Extensions.Logging; @@ -10,6 +12,7 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber private readonly IpcManager _ipc; private readonly LightlessConfigService _config; private int _ran; + private static readonly TimeSpan OrphanCleanupDelay = TimeSpan.FromDays(1); public PenumbraTempCollectionJanitor( ILogger logger, @@ -26,15 +29,46 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber public void Register(Guid id) { if (id == Guid.Empty) return; - if (_config.Current.OrphanableTempCollections.Add(id)) + var changed = false; + var config = _config.Current; + if (config.OrphanableTempCollections.Add(id)) + { + changed = true; + } + + var now = DateTime.UtcNow; + var existing = config.OrphanableTempCollectionEntries.FirstOrDefault(entry => entry.Id == id); + if (existing is null) + { + config.OrphanableTempCollectionEntries.Add(new OrphanableTempCollectionEntry + { + Id = id, + RegisteredAtUtc = now + }); + changed = true; + } + else if (existing.RegisteredAtUtc == DateTime.MinValue) + { + existing.RegisteredAtUtc = now; + changed = true; + } + + if (changed) + { _config.Save(); + } } public void Unregister(Guid id) { if (id == Guid.Empty) return; - if (_config.Current.OrphanableTempCollections.Remove(id)) + var config = _config.Current; + var changed = config.OrphanableTempCollections.Remove(id); + changed |= RemoveEntry(config.OrphanableTempCollectionEntries, id) > 0; + if (changed) + { _config.Save(); + } } private void CleanupOrphansOnBoot() @@ -45,14 +79,33 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber if (!_ipc.Penumbra.APIAvailable) return; - var ids = _config.Current.OrphanableTempCollections.ToArray(); - if (ids.Length == 0) + var config = _config.Current; + var ids = config.OrphanableTempCollections; + var entries = config.OrphanableTempCollectionEntries; + if (ids.Count == 0 && entries.Count == 0) return; - var appId = Guid.NewGuid(); - Logger.LogInformation("Cleaning up {count} orphaned Lightless temp collections found in configuration", ids.Length); + var now = DateTime.UtcNow; + var changed = EnsureEntries(ids, entries, now); + var cutoff = now - OrphanCleanupDelay; + var expired = entries + .Where(entry => entry.Id != Guid.Empty && entry.RegisteredAtUtc != DateTime.MinValue && entry.RegisteredAtUtc <= cutoff) + .Select(entry => entry.Id) + .Distinct() + .ToList(); + if (expired.Count == 0) + { + if (changed) + { + _config.Save(); + } + return; + } - foreach (var id in ids) + var appId = Guid.NewGuid(); + Logger.LogInformation("Cleaning up {count} orphaned Lightless temp collections older than {delay}", expired.Count, OrphanCleanupDelay); + + foreach (var id in expired) { try { @@ -65,7 +118,70 @@ public sealed class PenumbraTempCollectionJanitor : DisposableMediatorSubscriber } } - _config.Current.OrphanableTempCollections.Clear(); + foreach (var id in expired) + { + ids.Remove(id); + } + + foreach (var id in expired) + { + RemoveEntry(entries, id); + } + _config.Save(); } -} \ No newline at end of file + + private static int RemoveEntry(List entries, Guid id) + { + var removed = 0; + for (var i = entries.Count - 1; i >= 0; i--) + { + if (entries[i].Id != id) + { + continue; + } + + entries.RemoveAt(i); + removed++; + } + + return removed; + } + + private static bool EnsureEntries(HashSet ids, List entries, DateTime now) + { + var changed = false; + foreach (var id in ids) + { + if (id == Guid.Empty) + { + continue; + } + + if (entries.Any(entry => entry.Id == id)) + { + continue; + } + + entries.Add(new OrphanableTempCollectionEntry + { + Id = id, + RegisteredAtUtc = now + }); + changed = true; + } + + foreach (var entry in entries) + { + if (entry.Id == Guid.Empty || entry.RegisteredAtUtc != DateTime.MinValue) + { + continue; + } + + entry.RegisteredAtUtc = now; + changed = true; + } + + return changed; + } +} diff --git a/LightlessSync/Services/PerformanceCollectorService.cs b/LightlessSync/Services/PerformanceCollectorService.cs index 75fe736..5bec813 100644 --- a/LightlessSync/Services/PerformanceCollectorService.cs +++ b/LightlessSync/Services/PerformanceCollectorService.cs @@ -131,7 +131,10 @@ public sealed class PerformanceCollectorService : IHostedService DrawSeparator(sb, longestCounterName); } - var pastEntries = limitBySeconds > 0 ? entry.Value.Where(e => e.Item1.AddMinutes(limitBySeconds / 60.0d) >= TimeOnly.FromDateTime(DateTime.Now)).ToList() : [.. entry.Value]; + var snapshot = entry.Value.Snapshot(); + var pastEntries = limitBySeconds > 0 + ? snapshot.Where(e => e.Item1.AddMinutes(limitBySeconds / 60.0d) >= TimeOnly.FromDateTime(DateTime.Now)).ToList() + : snapshot; if (pastEntries.Any()) { @@ -189,7 +192,11 @@ public sealed class PerformanceCollectorService : IHostedService { try { - var last = entries.Value.ToList()[^1]; + if (!entries.Value.TryGetLast(out var last)) + { + continue; + } + if (last.Item1.AddMinutes(10) < TimeOnly.FromDateTime(DateTime.Now) && !PerformanceCounters.TryRemove(entries.Key, out _)) { _logger.LogDebug("Could not remove performance counter {counter}", entries.Key); diff --git a/LightlessSync/Services/TextureCompression/TextureCompressionService.cs b/LightlessSync/Services/TextureCompression/TextureCompressionService.cs index c31539f..81b3c52 100644 --- a/LightlessSync/Services/TextureCompression/TextureCompressionService.cs +++ b/LightlessSync/Services/TextureCompression/TextureCompressionService.cs @@ -2,6 +2,7 @@ using LightlessSync.Interop.Ipc; using LightlessSync.FileCache; using Microsoft.Extensions.Logging; using Penumbra.Api.Enums; +using System.Globalization; namespace LightlessSync.Services.TextureCompression; @@ -27,7 +28,9 @@ public sealed class TextureCompressionService public async Task ConvertTexturesAsync( IReadOnlyList requests, IProgress? progress, - CancellationToken token) + CancellationToken token, + bool requestRedraw = true, + bool includeMipMaps = true) { if (requests.Count == 0) { @@ -48,7 +51,7 @@ public sealed class TextureCompressionService continue; } - await RunPenumbraConversionAsync(request, textureType, total, completed, progress, token).ConfigureAwait(false); + await RunPenumbraConversionAsync(request, textureType, total, completed, progress, token, requestRedraw, includeMipMaps).ConfigureAwait(false); completed++; } @@ -65,14 +68,16 @@ public sealed class TextureCompressionService int total, int completedBefore, IProgress? progress, - CancellationToken token) + CancellationToken token, + bool requestRedraw, + bool includeMipMaps) { var primaryPath = request.PrimaryFilePath; var displayJob = new TextureConversionJob( primaryPath, primaryPath, targetType, - IncludeMipMaps: true, + IncludeMipMaps: includeMipMaps, request.DuplicateFilePaths); var backupPath = CreateBackupCopy(primaryPath); @@ -83,7 +88,7 @@ public sealed class TextureCompressionService try { WaitForAccess(primaryPath); - await _ipcManager.Penumbra.ConvertTextureFiles(_logger, new[] { conversionJob }, null, token).ConfigureAwait(false); + await _ipcManager.Penumbra.ConvertTextureFiles(_logger, new[] { conversionJob }, null, token, requestRedraw).ConfigureAwait(false); if (!IsValidConversionResult(displayJob.OutputFile)) { @@ -128,19 +133,46 @@ public sealed class TextureCompressionService var cacheEntries = _fileCacheManager.GetFileCachesByPaths(paths.ToArray()); foreach (var path in paths) { + var hasExpectedHash = TryGetExpectedHashFromPath(path, out var expectedHash); if (!cacheEntries.TryGetValue(path, out var entry) || entry is null) { - entry = _fileCacheManager.CreateFileEntry(path); + if (hasExpectedHash) + { + entry = _fileCacheManager.CreateCacheEntryWithKnownHash(path, expectedHash); + } + + entry ??= _fileCacheManager.CreateFileEntry(path); if (entry is null) { _logger.LogWarning("Unable to locate cache entry for {Path}; skipping hash refresh", path); continue; } } + else if (hasExpectedHash && entry.IsCacheEntry && !string.Equals(entry.Hash, expectedHash, StringComparison.OrdinalIgnoreCase)) + { + _logger.LogDebug("Fixing cache hash mismatch for {Path}: {Current} -> {Expected}", path, entry.Hash, expectedHash); + _fileCacheManager.RemoveHashedFile(entry.Hash, entry.PrefixedFilePath, removeDerivedFiles: false); + var corrected = _fileCacheManager.CreateCacheEntryWithKnownHash(path, expectedHash); + if (corrected is not null) + { + entry = corrected; + } + } try { - _fileCacheManager.UpdateHashedFile(entry); + if (entry.IsCacheEntry) + { + var info = new FileInfo(path); + entry.Size = info.Length; + entry.CompressedSize = null; + entry.LastModifiedDateTicks = info.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture); + _fileCacheManager.UpdateHashedFile(entry, computeProperties: false); + } + else + { + _fileCacheManager.UpdateHashedFile(entry); + } } catch (Exception ex) { @@ -149,6 +181,35 @@ public sealed class TextureCompressionService } } + private static bool TryGetExpectedHashFromPath(string path, out string hash) + { + hash = Path.GetFileNameWithoutExtension(path); + if (string.IsNullOrWhiteSpace(hash)) + { + return false; + } + + if (hash.Length is not (40 or 64)) + { + return false; + } + + for (var i = 0; i < hash.Length; i++) + { + var c = hash[i]; + var isHex = (c >= '0' && c <= '9') + || (c >= 'a' && c <= 'f') + || (c >= 'A' && c <= 'F'); + if (!isHex) + { + return false; + } + } + + hash = hash.ToUpperInvariant(); + return true; + } + private static readonly string WorkingDirectory = Path.Combine(Path.GetTempPath(), "LightlessSync.TextureCompression"); diff --git a/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs b/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs index 6fa6f92..b5d677c 100644 --- a/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs +++ b/LightlessSync/Services/TextureCompression/TextureDownscaleService.cs @@ -4,9 +4,11 @@ using System.Buffers.Binary; using System.Globalization; using System.IO; using System.Runtime.InteropServices; +using System.Threading; using OtterTex; using OtterImage = OtterTex.Image; using LightlessSync.LightlessConfiguration; +using LightlessSync.Utils; using LightlessSync.FileCache; using Microsoft.Extensions.Logging; using Lumina.Data.Files; @@ -30,10 +32,12 @@ public sealed class TextureDownscaleService private readonly LightlessConfigService _configService; private readonly PlayerPerformanceConfigService _playerPerformanceConfigService; private readonly FileCacheManager _fileCacheManager; + private readonly TextureCompressionService _textureCompressionService; - private readonly ConcurrentDictionary _activeJobs = new(StringComparer.OrdinalIgnoreCase); + private readonly TaskRegistry _downscaleDeduplicator = new(); private readonly ConcurrentDictionary _downscaledPaths = new(StringComparer.OrdinalIgnoreCase); private readonly SemaphoreSlim _downscaleSemaphore = new(4); + private readonly SemaphoreSlim _compressionSemaphore = new(1); private static readonly IReadOnlyDictionary BlockCompressedFormatMap = new Dictionary { @@ -68,12 +72,14 @@ public sealed class TextureDownscaleService ILogger logger, LightlessConfigService configService, PlayerPerformanceConfigService playerPerformanceConfigService, - FileCacheManager fileCacheManager) + FileCacheManager fileCacheManager, + TextureCompressionService textureCompressionService) { _logger = logger; _configService = configService; _playerPerformanceConfigService = playerPerformanceConfigService; _fileCacheManager = fileCacheManager; + _textureCompressionService = textureCompressionService; } public void ScheduleDownscale(string hash, string filePath, TextureMapKind mapKind) @@ -82,9 +88,9 @@ public sealed class TextureDownscaleService public void ScheduleDownscale(string hash, string filePath, Func mapKindFactory) { if (!filePath.EndsWith(".tex", StringComparison.OrdinalIgnoreCase)) return; - if (_activeJobs.ContainsKey(hash)) return; + if (_downscaleDeduplicator.TryGetExisting(hash, out _)) return; - _activeJobs[hash] = Task.Run(async () => + _downscaleDeduplicator.GetOrStart(hash, async () => { TextureMapKind mapKind; try @@ -98,7 +104,7 @@ public sealed class TextureDownscaleService } await DownscaleInternalAsync(hash, filePath, mapKind).ConfigureAwait(false); - }, CancellationToken.None); + }); } public bool ShouldScheduleDownscale(string filePath) @@ -107,7 +113,9 @@ public sealed class TextureDownscaleService return false; var performanceConfig = _playerPerformanceConfigService.Current; - return performanceConfig.EnableNonIndexTextureMipTrim || performanceConfig.EnableIndexTextureDownscale; + return performanceConfig.EnableNonIndexTextureMipTrim + || performanceConfig.EnableIndexTextureDownscale + || performanceConfig.EnableUncompressedTextureCompression; } public string GetPreferredPath(string hash, string originalPath) @@ -144,7 +152,7 @@ public sealed class TextureDownscaleService continue; } - if (_activeJobs.TryGetValue(hash, out var job)) + if (_downscaleDeduplicator.TryGetExisting(hash, out var job)) { pending.Add(job); } @@ -182,10 +190,18 @@ public sealed class TextureDownscaleService targetMaxDimension = ResolveTargetMaxDimension(); onlyDownscaleUncompressed = performanceConfig.OnlyDownscaleUncompressedTextures; + if (onlyDownscaleUncompressed && !headerInfo.HasValue) + { + _downscaledPaths[hash] = sourcePath; + _logger.LogTrace("Skipping downscale for texture {Hash}; format unknown and only-uncompressed enabled.", hash); + return; + } + destination = Path.Combine(GetDownscaledDirectory(), $"{hash}.tex"); if (File.Exists(destination)) { RegisterDownscaledTexture(hash, sourcePath, destination); + await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false); return; } @@ -196,6 +212,7 @@ public sealed class TextureDownscaleService if (performanceConfig.EnableNonIndexTextureMipTrim && await TryDropTopMipAsync(hash, sourcePath, destination, targetMaxDimension, onlyDownscaleUncompressed, headerInfo).ConfigureAwait(false)) { + await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false); return; } @@ -206,6 +223,7 @@ public sealed class TextureDownscaleService _downscaledPaths[hash] = sourcePath; _logger.LogTrace("Skipping downscale for non-index texture {Hash}; no mip reduction required.", hash); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); return; } @@ -213,6 +231,7 @@ public sealed class TextureDownscaleService { _downscaledPaths[hash] = sourcePath; _logger.LogTrace("Skipping downscale for index texture {Hash}; feature disabled.", hash); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); return; } @@ -222,6 +241,7 @@ public sealed class TextureDownscaleService { _downscaledPaths[hash] = sourcePath; _logger.LogTrace("Skipping downscale for index texture {Hash}; header dimensions {Width}x{Height} within target.", hash, headerValue.Width, headerValue.Height); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); return; } @@ -229,10 +249,12 @@ public sealed class TextureDownscaleService { _downscaledPaths[hash] = sourcePath; _logger.LogTrace("Skipping downscale for index texture {Hash}; block compressed format {Format}.", hash, headerInfo.Value.Format); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); return; } using var sourceScratch = TexFileHelper.Load(sourcePath); + var sourceFormat = sourceScratch.Meta.Format; using var rgbaScratch = sourceScratch.GetRGBA(out var rgbaInfo).ThrowIfError(rgbaInfo); var bytesPerPixel = rgbaInfo.Meta.Format.BitsPerPixel() / 8; @@ -248,16 +270,39 @@ public sealed class TextureDownscaleService { _downscaledPaths[hash] = sourcePath; _logger.LogTrace("Skipping downscale for index texture {Hash}; already within bounds.", hash); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); return; } using var resized = IndexDownscaler.Downscale(originalImage, targetSize.width, targetSize.height, BlockMultiple); + var canReencodeWithPenumbra = TryResolveCompressionTarget(headerInfo, sourceFormat, out var compressionTarget); using var resizedScratch = CreateScratchImage(resized, targetSize.width, targetSize.height); - using var finalScratch = resizedScratch.Convert(DXGIFormat.B8G8R8A8UNorm); + if (!TryConvertForSave(resizedScratch, sourceFormat, out var finalScratch, canReencodeWithPenumbra)) + { + if (canReencodeWithPenumbra + && await TryReencodeWithPenumbraAsync(hash, sourcePath, destination, resizedScratch, compressionTarget).ConfigureAwait(false)) + { + await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false); + return; + } - TexFileHelper.Save(destination, finalScratch); - RegisterDownscaledTexture(hash, sourcePath, destination); + _downscaledPaths[hash] = sourcePath; + _logger.LogTrace( + "Skipping downscale for index texture {Hash}; failed to re-encode to {Format}.", + hash, + sourceFormat); + await TryAutoCompressAsync(hash, sourcePath, mapKind, headerInfo).ConfigureAwait(false); + return; + } + + using (finalScratch) + { + TexFileHelper.Save(destination, finalScratch); + RegisterDownscaledTexture(hash, sourcePath, destination); + } + + await TryAutoCompressAsync(hash, destination, mapKind, null).ConfigureAwait(false); } catch (Exception ex) { @@ -277,7 +322,6 @@ public sealed class TextureDownscaleService finally { _downscaleSemaphore.Release(); - _activeJobs.TryRemove(hash, out _); } } @@ -330,6 +374,157 @@ public sealed class TextureDownscaleService } } + private bool TryConvertForSave( + ScratchImage source, + DXGIFormat sourceFormat, + out ScratchImage result, + bool attemptPenumbraFallback) + { + var isCompressed = sourceFormat.IsCompressed(); + var targetFormat = isCompressed ? sourceFormat : DXGIFormat.B8G8R8A8UNorm; + try + { + result = source.Convert(targetFormat); + return true; + } + catch (Exception ex) + { + var compressedFallback = attemptPenumbraFallback + ? " Attempting Penumbra re-encode." + : " Skipping downscale."; + _logger.LogWarning( + ex, + "Failed to convert downscaled texture to {Format}.{Fallback}", + targetFormat, + isCompressed ? compressedFallback : " Falling back to B8G8R8A8."); + if (isCompressed) + { + result = default!; + return false; + } + + result = source.Convert(DXGIFormat.B8G8R8A8UNorm); + return true; + } + } + + private bool TryResolveCompressionTarget(TexHeaderInfo? headerInfo, DXGIFormat sourceFormat, out TextureCompressionTarget target) + { + if (headerInfo is { } info && TryGetCompressionTarget(info.Format, out target)) + { + return _textureCompressionService.IsTargetSelectable(target); + } + + if (sourceFormat.IsCompressed() && BlockCompressedFormatMap.TryGetValue((int)sourceFormat, out target)) + { + return _textureCompressionService.IsTargetSelectable(target); + } + + target = default; + return false; + } + + private async Task TryReencodeWithPenumbraAsync( + string hash, + string sourcePath, + string destination, + ScratchImage resizedScratch, + TextureCompressionTarget target) + { + try + { + using var uncompressed = resizedScratch.Convert(DXGIFormat.B8G8R8A8UNorm); + TexFileHelper.Save(destination, uncompressed); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to save uncompressed downscaled texture for {Hash}. Skipping downscale.", hash); + TryDelete(destination); + return false; + } + + await _compressionSemaphore.WaitAsync().ConfigureAwait(false); + try + { + var request = new TextureCompressionRequest(destination, Array.Empty(), target); + await _textureCompressionService + .ConvertTexturesAsync(new[] { request }, null, CancellationToken.None, requestRedraw: false) + .ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to re-encode downscaled texture {Hash} to {Target}. Skipping downscale.", hash, target); + TryDelete(destination); + return false; + } + finally + { + _compressionSemaphore.Release(); + } + + RegisterDownscaledTexture(hash, sourcePath, destination); + _logger.LogDebug("Downscaled texture {Hash} -> {Path} (re-encoded via Penumbra).", hash, destination); + return true; + } + + private async Task TryAutoCompressAsync(string hash, string texturePath, TextureMapKind mapKind, TexHeaderInfo? headerInfo) + { + var performanceConfig = _playerPerformanceConfigService.Current; + if (!performanceConfig.EnableUncompressedTextureCompression) + { + return; + } + + if (string.IsNullOrEmpty(texturePath) || !File.Exists(texturePath)) + { + return; + } + + var info = headerInfo ?? (TryReadTexHeader(texturePath, out var header) ? header : (TexHeaderInfo?)null); + if (!info.HasValue) + { + _logger.LogTrace("Skipping auto-compress for texture {Hash}; unable to read header.", hash); + return; + } + + if (IsBlockCompressedFormat(info.Value.Format)) + { + _logger.LogTrace("Skipping auto-compress for texture {Hash}; already block-compressed.", hash); + return; + } + + var suggestion = TextureMetadataHelper.GetSuggestedTarget(info.Value.Format.ToString(), mapKind, texturePath); + if (suggestion is null) + { + return; + } + + var target = _textureCompressionService.NormalizeTarget(suggestion.Value.Target); + if (!_textureCompressionService.IsTargetSelectable(target)) + { + _logger.LogTrace("Skipping auto-compress for texture {Hash}; target {Target} not supported.", hash, target); + return; + } + + await _compressionSemaphore.WaitAsync().ConfigureAwait(false); + try + { + var includeMipMaps = !performanceConfig.SkipUncompressedTextureCompressionMipMaps; + var request = new TextureCompressionRequest(texturePath, Array.Empty(), target); + await _textureCompressionService + .ConvertTexturesAsync(new[] { request }, null, CancellationToken.None, requestRedraw: false, includeMipMaps: includeMipMaps) + .ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Auto-compress failed for texture {Hash} ({Path})", hash, texturePath); + } + finally + { + _compressionSemaphore.Release(); + } + } + private static bool IsIndexMap(TextureMapKind kind) => kind is TextureMapKind.Mask or TextureMapKind.Index; diff --git a/LightlessSync/Services/UiService.cs b/LightlessSync/Services/UiService.cs index 16f0f4f..7cc9f9b 100644 --- a/LightlessSync/Services/UiService.cs +++ b/LightlessSync/Services/UiService.cs @@ -13,16 +13,20 @@ namespace LightlessSync.Services; public sealed class UiService : DisposableMediatorSubscriberBase { private readonly List _createdWindows = []; + private readonly List _registeredWindows = []; + private readonly HashSet _uiHiddenWindows = []; private readonly IUiBuilder _uiBuilder; private readonly FileDialogManager _fileDialogManager; private readonly ILogger _logger; private readonly LightlessConfigService _lightlessConfigService; + private readonly DalamudUtilService _dalamudUtilService; private readonly WindowSystem _windowSystem; private readonly UiFactory _uiFactory; private readonly PairFactory _pairFactory; + private bool _uiHideActive; public UiService(ILogger logger, IUiBuilder uiBuilder, - LightlessConfigService lightlessConfigService, WindowSystem windowSystem, + LightlessConfigService lightlessConfigService, DalamudUtilService dalamudUtilService, WindowSystem windowSystem, IEnumerable windows, UiFactory uiFactory, FileDialogManager fileDialogManager, LightlessMediator lightlessMediator, PairFactory pairFactory) : base(logger, lightlessMediator) @@ -31,6 +35,7 @@ public sealed class UiService : DisposableMediatorSubscriberBase _logger.LogTrace("Creating {type}", GetType().Name); _uiBuilder = uiBuilder; _lightlessConfigService = lightlessConfigService; + _dalamudUtilService = dalamudUtilService; _windowSystem = windowSystem; _uiFactory = uiFactory; _pairFactory = pairFactory; @@ -43,6 +48,7 @@ public sealed class UiService : DisposableMediatorSubscriberBase foreach (var window in windows) { + _registeredWindows.Add(window); _windowSystem.AddWindow(window); } @@ -176,6 +182,8 @@ public sealed class UiService : DisposableMediatorSubscriberBase { _windowSystem.RemoveWindow(msg.Window); _createdWindows.Remove(msg.Window); + _registeredWindows.Remove(msg.Window); + _uiHiddenWindows.Remove(msg.Window); msg.Window.Dispose(); }); } @@ -219,12 +227,72 @@ public sealed class UiService : DisposableMediatorSubscriberBase MainStyle.PushStyle(); try { + var hideOtherUi = ShouldHideOtherUi(); + UpdateUiHideState(hideOtherUi); _windowSystem.Draw(); - _fileDialogManager.Draw(); + if (!hideOtherUi) + _fileDialogManager.Draw(); } finally { MainStyle.PopStyle(); } } -} \ No newline at end of file + + private bool ShouldHideOtherUi() + { + var config = _lightlessConfigService.Current; + if (!config.ShowUiWhenUiHidden && _dalamudUtilService.IsGameUiHidden) + return true; + + if (!config.ShowUiInGpose && _dalamudUtilService.IsInGpose) + return true; + + return false; + } + + private void UpdateUiHideState(bool hideOtherUi) + { + if (!hideOtherUi) + { + if (_uiHideActive) + { + foreach (var window in _uiHiddenWindows) + { + window.IsOpen = true; + } + + _uiHiddenWindows.Clear(); + _uiHideActive = false; + } + + return; + } + + _uiHideActive = true; + foreach (var window in EnumerateManagedWindows()) + { + if (window is ZoneChatUi) + continue; + + if (!window.IsOpen) + continue; + + _uiHiddenWindows.Add(window); + window.IsOpen = false; + } + } + + private IEnumerable EnumerateManagedWindows() + { + foreach (var window in _registeredWindows) + { + yield return window; + } + + foreach (var window in _createdWindows) + { + yield return window; + } + } +} diff --git a/LightlessSync/Services/XivDataAnalyzer.cs b/LightlessSync/Services/XivDataAnalyzer.cs index c15ac5c..b18f3f4 100644 --- a/LightlessSync/Services/XivDataAnalyzer.cs +++ b/LightlessSync/Services/XivDataAnalyzer.cs @@ -480,6 +480,20 @@ public sealed partial class XivDataAnalyzer return CalculateTrianglesFromPath(hash, path.ResolvedFilepath, _configService.Current.TriangleDictionary, _failedCalculatedTris); } + public long RefreshTrianglesForPath(string hash, string filePath) + { + if (string.IsNullOrEmpty(filePath) + || !filePath.EndsWith(".mdl", StringComparison.OrdinalIgnoreCase) + || !File.Exists(filePath)) + { + return 0; + } + + _failedCalculatedTris.RemoveAll(entry => entry.Equals(hash, StringComparison.Ordinal)); + _configService.Current.TriangleDictionary.TryRemove(hash, out _); + return CalculateTrianglesFromPath(hash, filePath, _configService.Current.TriangleDictionary, _failedCalculatedTris); + } + public async Task GetEffectiveTrianglesByHash(string hash, string filePath) { if (_configService.Current.EffectiveTriangleDictionary.TryGetValue(hash, out var cachedTris) && cachedTris > 0) diff --git a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs deleted file mode 100644 index 723eef6..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/DecimationAlgorithm.cs +++ /dev/null @@ -1,169 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using Microsoft.Extensions.Logging; - -namespace MeshDecimator.Algorithms -{ - /// - /// A decimation algorithm. - /// - public abstract class DecimationAlgorithm - { - #region Delegates - /// - /// A callback for decimation status reports. - /// - /// The current iteration, starting at zero. - /// The original count of triangles. - /// The current count of triangles. - /// The target count of triangles. - public delegate void StatusReportCallback(int iteration, int originalTris, int currentTris, int targetTris); - #endregion - - #region Fields - private bool preserveBorders = false; - private int maxVertexCount = 0; - private bool verbose = false; - - private StatusReportCallback statusReportInvoker = null; - #endregion - - #region Properties - /// - /// Gets or sets if borders should be kept. - /// Default value: false - /// - [Obsolete("Use the 'DecimationAlgorithm.PreserveBorders' property instead.", false)] - public bool KeepBorders - { - get { return preserveBorders; } - set { preserveBorders = value; } - } - - /// - /// Gets or sets if borders should be preserved. - /// Default value: false - /// - public bool PreserveBorders - { - get { return preserveBorders; } - set { preserveBorders = value; } - } - - /// - /// Gets or sets if linked vertices should be kept. - /// Default value: false - /// - [Obsolete("This feature has been removed, for more details why please read the readme.", true)] - public bool KeepLinkedVertices - { - get { return false; } - set { } - } - - /// - /// Gets or sets the maximum vertex count. Set to zero for no limitation. - /// Default value: 0 (no limitation) - /// - public int MaxVertexCount - { - get { return maxVertexCount; } - set { maxVertexCount = Math.MathHelper.Max(value, 0); } - } - - /// - /// Gets or sets if verbose information should be printed in the console. - /// Default value: false - /// - public bool Verbose - { - get { return verbose; } - set { verbose = value; } - } - - /// - /// Gets or sets the logger used for diagnostics. - /// - public ILogger? Logger { get; set; } - #endregion - - #region Events - /// - /// An event for status reports for this algorithm. - /// - public event StatusReportCallback StatusReport - { - add { statusReportInvoker += value; } - remove { statusReportInvoker -= value; } - } - #endregion - - #region Protected Methods - /// - /// Reports the current status of the decimation. - /// - /// The current iteration, starting at zero. - /// The original count of triangles. - /// The current count of triangles. - /// The target count of triangles. - protected void ReportStatus(int iteration, int originalTris, int currentTris, int targetTris) - { - var statusReportInvoker = this.statusReportInvoker; - if (statusReportInvoker != null) - { - statusReportInvoker.Invoke(iteration, originalTris, currentTris, targetTris); - } - } - #endregion - - #region Public Methods - /// - /// Initializes the algorithm with the original mesh. - /// - /// The mesh. - public abstract void Initialize(Mesh mesh); - - /// - /// Decimates the mesh. - /// - /// The target triangle count. - public abstract void DecimateMesh(int targetTrisCount); - - /// - /// Decimates the mesh without losing any quality. - /// - public abstract void DecimateMeshLossless(); - - /// - /// Returns the resulting mesh. - /// - /// The resulting mesh. - public abstract Mesh ToMesh(); - #endregion - } -} diff --git a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs b/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs deleted file mode 100644 index fe22c85..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Algorithms/FastQuadricMeshSimplification.cs +++ /dev/null @@ -1,1549 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -#region Original License -///////////////////////////////////////////// -// -// Mesh Simplification Tutorial -// -// (C) by Sven Forstmann in 2014 -// -// License : MIT -// http://opensource.org/licenses/MIT -// -//https://github.com/sp4cerat/Fast-Quadric-Mesh-Simplification -#endregion - -using System; -using System.Collections.Generic; -using MeshDecimator.Collections; -using MeshDecimator.Math; -using Microsoft.Extensions.Logging; - -namespace MeshDecimator.Algorithms -{ - /// - /// The fast quadric mesh simplification algorithm. - /// - public sealed class FastQuadricMeshSimplification : DecimationAlgorithm - { - #region Consts - private const double DoubleEpsilon = 1.0E-3; - #endregion - - #region Classes - #region Triangle - private struct Triangle - { - #region Fields - public int v0; - public int v1; - public int v2; - public int subMeshIndex; - - public int va0; - public int va1; - public int va2; - - public double err0; - public double err1; - public double err2; - public double err3; - - public bool deleted; - public bool dirty; - public Vector3d n; - #endregion - - #region Properties - public int this[int index] - { - get - { - return (index == 0 ? v0 : (index == 1 ? v1 : v2)); - } - set - { - switch (index) - { - case 0: - v0 = value; - break; - case 1: - v1 = value; - break; - case 2: - v2 = value; - break; - default: - throw new IndexOutOfRangeException(); - } - } - } - #endregion - - #region Constructor - public Triangle(int v0, int v1, int v2, int subMeshIndex) - { - this.v0 = v0; - this.v1 = v1; - this.v2 = v2; - this.subMeshIndex = subMeshIndex; - - this.va0 = v0; - this.va1 = v1; - this.va2 = v2; - - err0 = err1 = err2 = err3 = 0; - deleted = dirty = false; - n = new Vector3d(); - } - #endregion - - #region Public Methods - public void GetAttributeIndices(int[] attributeIndices) - { - attributeIndices[0] = va0; - attributeIndices[1] = va1; - attributeIndices[2] = va2; - } - - public void SetAttributeIndex(int index, int value) - { - switch (index) - { - case 0: - va0 = value; - break; - case 1: - va1 = value; - break; - case 2: - va2 = value; - break; - default: - throw new IndexOutOfRangeException(); - } - } - - public void GetErrors(double[] err) - { - err[0] = err0; - err[1] = err1; - err[2] = err2; - } - #endregion - } - #endregion - - #region Vertex - private struct Vertex - { - public Vector3d p; - public int tstart; - public int tcount; - public SymmetricMatrix q; - public bool border; - public bool seam; - public bool foldover; - - public Vertex(Vector3d p) - { - this.p = p; - this.tstart = 0; - this.tcount = 0; - this.q = new SymmetricMatrix(); - this.border = true; - this.seam = false; - this.foldover = false; - } - } - #endregion - - #region Ref - private struct Ref - { - public int tid; - public int tvertex; - - public void Set(int tid, int tvertex) - { - this.tid = tid; - this.tvertex = tvertex; - } - } - #endregion - - #region Border Vertex - private struct BorderVertex - { - public int index; - public int hash; - - public BorderVertex(int index, int hash) - { - this.index = index; - this.hash = hash; - } - } - #endregion - - #region Border Vertex Comparer - private class BorderVertexComparer : IComparer - { - public static readonly BorderVertexComparer instance = new BorderVertexComparer(); - - public int Compare(BorderVertex x, BorderVertex y) - { - return x.hash.CompareTo(y.hash); - } - } - #endregion - #endregion - - #region Fields - private bool preserveSeams = false; - private bool preserveFoldovers = false; - private bool enableSmartLink = true; - private int maxIterationCount = 100; - private double agressiveness = 7.0; - private double vertexLinkDistanceSqr = double.Epsilon; - - private int subMeshCount = 0; - private ResizableArray triangles = null; - private ResizableArray vertices = null; - private ResizableArray refs = null; - - private ResizableArray vertNormals = null; - private ResizableArray vertTangents = null; - private UVChannels vertUV2D = null; - private UVChannels vertUV3D = null; - private UVChannels vertUV4D = null; - private ResizableArray vertColors = null; - private ResizableArray vertBoneWeights = null; - - private int remainingVertices = 0; - - // Pre-allocated buffers - private double[] errArr = new double[3]; - private int[] attributeIndexArr = new int[3]; - #endregion - - #region Properties - /// - /// Gets or sets if seams should be preserved. - /// Default value: false - /// - public bool PreserveSeams - { - get { return preserveSeams; } - set { preserveSeams = value; } - } - - /// - /// Gets or sets if foldovers should be preserved. - /// Default value: false - /// - public bool PreserveFoldovers - { - get { return preserveFoldovers; } - set { preserveFoldovers = value; } - } - - /// - /// Gets or sets if a feature for smarter vertex linking should be enabled, reducing artifacts in the - /// decimated result at the cost of a slightly more expensive initialization by treating vertices at - /// the same position as the same vertex while separating the attributes. - /// Default value: true - /// - public bool EnableSmartLink - { - get { return enableSmartLink; } - set { enableSmartLink = value; } - } - - /// - /// Gets or sets the maximum iteration count. Higher number is more expensive but can bring you closer to your target quality. - /// Sometimes a lower maximum count might be desired in order to lower the performance cost. - /// Default value: 100 - /// - public int MaxIterationCount - { - get { return maxIterationCount; } - set { maxIterationCount = value; } - } - - /// - /// Gets or sets the agressiveness of this algorithm. Higher number equals higher quality, but more expensive to run. - /// Default value: 7.0 - /// - public double Agressiveness - { - get { return agressiveness; } - set { agressiveness = value; } - } - - /// - /// Gets or sets the maximum squared distance between two vertices in order to link them. - /// Note that this value is only used if EnableSmartLink is true. - /// Default value: double.Epsilon - /// - public double VertexLinkDistanceSqr - { - get { return vertexLinkDistanceSqr; } - set { vertexLinkDistanceSqr = value; } - } - #endregion - - #region Constructor - /// - /// Creates a new fast quadric mesh simplification algorithm. - /// - public FastQuadricMeshSimplification() - { - triangles = new ResizableArray(0); - vertices = new ResizableArray(0); - refs = new ResizableArray(0); - } - #endregion - - #region Private Methods - #region Initialize Vertex Attribute - private ResizableArray InitializeVertexAttribute(T[] attributeValues, string attributeName) - { - if (attributeValues != null && attributeValues.Length == vertices.Length) - { - var newArray = new ResizableArray(attributeValues.Length, attributeValues.Length); - var newArrayData = newArray.Data; - Array.Copy(attributeValues, 0, newArrayData, 0, attributeValues.Length); - return newArray; - } - else if (attributeValues != null && attributeValues.Length > 0) - { - Logger?.LogError( - "Failed to set vertex attribute '{Attribute}' with {ActualLength} length of array, when {ExpectedLength} was needed.", - attributeName, - attributeValues.Length, - vertices.Length); - } - return null; - } - #endregion - - #region Calculate Error - private double VertexError(ref SymmetricMatrix q, double x, double y, double z) - { - return q.m0*x*x + 2*q.m1*x*y + 2*q.m2*x*z + 2*q.m3*x + q.m4*y*y - + 2*q.m5*y*z + 2*q.m6*y + q.m7*z*z + 2*q.m8*z + q.m9; - } - - private double CalculateError(ref Vertex vert0, ref Vertex vert1, out Vector3d result, out int resultIndex) - { - // compute interpolated vertex - SymmetricMatrix q = (vert0.q + vert1.q); - bool border = (vert0.border & vert1.border); - double error = 0.0; - double det = q.Determinant1(); - if (det != 0.0 && !border) - { - // q_delta is invertible - result = new Vector3d( - -1.0 / det * q.Determinant2(), // vx = A41/det(q_delta) - 1.0 / det * q.Determinant3(), // vy = A42/det(q_delta) - -1.0 / det * q.Determinant4()); // vz = A43/det(q_delta) - error = VertexError(ref q, result.x, result.y, result.z); - resultIndex = 2; - } - else - { - // det = 0 -> try to find best result - Vector3d p1 = vert0.p; - Vector3d p2 = vert1.p; - Vector3d p3 = (p1 + p2) * 0.5f; - double error1 = VertexError(ref q, p1.x, p1.y, p1.z); - double error2 = VertexError(ref q, p2.x, p2.y, p2.z); - double error3 = VertexError(ref q, p3.x, p3.y, p3.z); - error = MathHelper.Min(error1, error2, error3); - if (error == error3) - { - result = p3; - resultIndex = 2; - } - else if (error == error2) - { - result = p2; - resultIndex = 1; - } - else if (error == error1) - { - result = p1; - resultIndex = 0; - } - else - { - result = p3; - resultIndex = 2; - } - } - return error; - } - #endregion - - #region Flipped - /// - /// Check if a triangle flips when this edge is removed - /// - private bool Flipped(ref Vector3d p, int i0, int i1, ref Vertex v0, bool[] deleted) - { - int tcount = v0.tcount; - var refs = this.refs.Data; - var triangles = this.triangles.Data; - var vertices = this.vertices.Data; - for (int k = 0; k < tcount; k++) - { - Ref r = refs[v0.tstart + k]; - if (triangles[r.tid].deleted) - continue; - - int s = r.tvertex; - int id1 = triangles[r.tid][(s + 1) % 3]; - int id2 = triangles[r.tid][(s + 2) % 3]; - if (id1 == i1 || id2 == i1) - { - deleted[k] = true; - continue; - } - - Vector3d d1 = vertices[id1].p - p; - d1.Normalize(); - Vector3d d2 = vertices[id2].p - p; - d2.Normalize(); - double dot = Vector3d.Dot(ref d1, ref d2); - if (System.Math.Abs(dot) > 0.999) - return true; - - Vector3d n; - Vector3d.Cross(ref d1, ref d2, out n); - n.Normalize(); - deleted[k] = false; - dot = Vector3d.Dot(ref n, ref triangles[r.tid].n); - if (dot < 0.2) - return true; - } - - return false; - } - #endregion - - #region Update Triangles - /// - /// Update triangle connections and edge error after a edge is collapsed. - /// - private void UpdateTriangles(int i0, int ia0, ref Vertex v, ResizableArray deleted, ref int deletedTriangles) - { - Vector3d p; - int pIndex; - int tcount = v.tcount; - var triangles = this.triangles.Data; - var vertices = this.vertices.Data; - for (int k = 0; k < tcount; k++) - { - Ref r = refs[v.tstart + k]; - int tid = r.tid; - Triangle t = triangles[tid]; - if (t.deleted) - continue; - - if (deleted[k]) - { - triangles[tid].deleted = true; - ++deletedTriangles; - continue; - } - - t[r.tvertex] = i0; - if (ia0 != -1) - { - t.SetAttributeIndex(r.tvertex, ia0); - } - - t.dirty = true; - t.err0 = CalculateError(ref vertices[t.v0], ref vertices[t.v1], out p, out pIndex); - t.err1 = CalculateError(ref vertices[t.v1], ref vertices[t.v2], out p, out pIndex); - t.err2 = CalculateError(ref vertices[t.v2], ref vertices[t.v0], out p, out pIndex); - t.err3 = MathHelper.Min(t.err0, t.err1, t.err2); - triangles[tid] = t; - refs.Add(r); - } - } - #endregion - - #region Move/Merge Vertex Attributes - private void MoveVertexAttributes(int i0, int i1) - { - if (vertNormals != null) - { - vertNormals[i0] = vertNormals[i1]; - } - if (vertTangents != null) - { - vertTangents[i0] = vertTangents[i1]; - } - if (vertUV2D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV2D[i]; - if (vertUV != null) - { - vertUV[i0] = vertUV[i1]; - } - } - } - if (vertUV3D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV3D[i]; - if (vertUV != null) - { - vertUV[i0] = vertUV[i1]; - } - } - } - if (vertUV4D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV4D[i]; - if (vertUV != null) - { - vertUV[i0] = vertUV[i1]; - } - } - } - if (vertColors != null) - { - vertColors[i0] = vertColors[i1]; - } - if (vertBoneWeights != null) - { - vertBoneWeights[i0] = vertBoneWeights[i1]; - } - } - - private void MergeVertexAttributes(int i0, int i1) - { - if (vertNormals != null) - { - vertNormals[i0] = (vertNormals[i0] + vertNormals[i1]) * 0.5f; - } - if (vertTangents != null) - { - vertTangents[i0] = (vertTangents[i0] + vertTangents[i1]) * 0.5f; - } - if (vertUV2D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV2D[i]; - if (vertUV != null) - { - vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; - } - } - } - if (vertUV3D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV3D[i]; - if (vertUV != null) - { - vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; - } - } - } - if (vertUV4D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - var vertUV = vertUV4D[i]; - if (vertUV != null) - { - vertUV[i0] = (vertUV[i0] + vertUV[i1]) * 0.5f; - } - } - } - if (vertColors != null) - { - vertColors[i0] = (vertColors[i0] + vertColors[i1]) * 0.5f; - } - - // TODO: Do we have to blend bone weights at all or can we just keep them as it is in this scenario? - } - #endregion - - #region Are UVs The Same - private bool AreUVsTheSame(int channel, int indexA, int indexB) - { - if (vertUV2D != null) - { - var vertUV = vertUV2D[channel]; - if (vertUV != null) - { - var uvA = vertUV[indexA]; - var uvB = vertUV[indexB]; - return uvA == uvB; - } - } - - if (vertUV3D != null) - { - var vertUV = vertUV3D[channel]; - if (vertUV != null) - { - var uvA = vertUV[indexA]; - var uvB = vertUV[indexB]; - return uvA == uvB; - } - } - - if (vertUV4D != null) - { - var vertUV = vertUV4D[channel]; - if (vertUV != null) - { - var uvA = vertUV[indexA]; - var uvB = vertUV[indexB]; - return uvA == uvB; - } - } - - return false; - } - #endregion - - #region Remove Vertex Pass - /// - /// Remove vertices and mark deleted triangles - /// - private void RemoveVertexPass(int startTrisCount, int targetTrisCount, double threshold, ResizableArray deleted0, ResizableArray deleted1, ref int deletedTris) - { - var triangles = this.triangles.Data; - int triangleCount = this.triangles.Length; - var vertices = this.vertices.Data; - - bool preserveBorders = base.PreserveBorders; - int maxVertexCount = base.MaxVertexCount; - if (maxVertexCount <= 0) - maxVertexCount = int.MaxValue; - - Vector3d p; - int pIndex; - for (int tid = 0; tid < triangleCount; tid++) - { - if (triangles[tid].dirty || triangles[tid].deleted || triangles[tid].err3 > threshold) - continue; - - triangles[tid].GetErrors(errArr); - triangles[tid].GetAttributeIndices(attributeIndexArr); - for (int edgeIndex = 0; edgeIndex < 3; edgeIndex++) - { - if (errArr[edgeIndex] > threshold) - continue; - - int nextEdgeIndex = ((edgeIndex + 1) % 3); - int i0 = triangles[tid][edgeIndex]; - int i1 = triangles[tid][nextEdgeIndex]; - - // Border check - if (vertices[i0].border != vertices[i1].border) - continue; - // Seam check - else if (vertices[i0].seam != vertices[i1].seam) - continue; - // Foldover check - else if (vertices[i0].foldover != vertices[i1].foldover) - continue; - // If borders should be preserved - else if (preserveBorders && vertices[i0].border) - continue; - // If seams should be preserved - else if (preserveSeams && vertices[i0].seam) - continue; - // If foldovers should be preserved - else if (preserveFoldovers && vertices[i0].foldover) - continue; - - // Compute vertex to collapse to - CalculateError(ref vertices[i0], ref vertices[i1], out p, out pIndex); - deleted0.Resize(vertices[i0].tcount); // normals temporarily - deleted1.Resize(vertices[i1].tcount); // normals temporarily - - // Don't remove if flipped - if (Flipped(ref p, i0, i1, ref vertices[i0], deleted0.Data)) - continue; - if (Flipped(ref p, i1, i0, ref vertices[i1], deleted1.Data)) - continue; - - int ia0 = attributeIndexArr[edgeIndex]; - - // Not flipped, so remove edge - vertices[i0].p = p; - vertices[i0].q += vertices[i1].q; - - if (pIndex == 1) - { - // Move vertex attributes from ia1 to ia0 - int ia1 = attributeIndexArr[nextEdgeIndex]; - MoveVertexAttributes(ia0, ia1); - } - else if (pIndex == 2) - { - // Merge vertex attributes ia0 and ia1 into ia0 - int ia1 = attributeIndexArr[nextEdgeIndex]; - MergeVertexAttributes(ia0, ia1); - } - - if (vertices[i0].seam) - { - ia0 = -1; - } - - int tstart = refs.Length; - UpdateTriangles(i0, ia0, ref vertices[i0], deleted0, ref deletedTris); - UpdateTriangles(i0, ia0, ref vertices[i1], deleted1, ref deletedTris); - - int tcount = refs.Length - tstart; - if (tcount <= vertices[i0].tcount) - { - // save ram - if (tcount > 0) - { - var refsArr = refs.Data; - Array.Copy(refsArr, tstart, refsArr, vertices[i0].tstart, tcount); - } - } - else - { - // append - vertices[i0].tstart = tstart; - } - - vertices[i0].tcount = tcount; - --remainingVertices; - break; - } - - // Check if we are already done - if ((startTrisCount - deletedTris) <= targetTrisCount && remainingVertices < maxVertexCount) - break; - } - } - #endregion - - #region Update Mesh - /// - /// Compact triangles, compute edge error and build reference list. - /// - /// The iteration index. - private void UpdateMesh(int iteration) - { - var triangles = this.triangles.Data; - var vertices = this.vertices.Data; - - int triangleCount = this.triangles.Length; - int vertexCount = this.vertices.Length; - if (iteration > 0) // compact triangles - { - int dst = 0; - for (int i = 0; i < triangleCount; i++) - { - if (!triangles[i].deleted) - { - if (dst != i) - { - triangles[dst] = triangles[i]; - } - dst++; - } - } - this.triangles.Resize(dst); - triangles = this.triangles.Data; - triangleCount = dst; - } - - UpdateReferences(); - - // Identify boundary : vertices[].border=0,1 - if (iteration == 0) - { - var refs = this.refs.Data; - - var vcount = new List(8); - var vids = new List(8); - int vsize = 0; - for (int i = 0; i < vertexCount; i++) - { - vertices[i].border = false; - vertices[i].seam = false; - vertices[i].foldover = false; - } - - int ofs; - int id; - int borderVertexCount = 0; - double borderMinX = double.MaxValue; - double borderMaxX = double.MinValue; - for (int i = 0; i < vertexCount; i++) - { - int tstart = vertices[i].tstart; - int tcount = vertices[i].tcount; - vcount.Clear(); - vids.Clear(); - vsize = 0; - - for (int j = 0; j < tcount; j++) - { - int tid = refs[tstart + j].tid; - for (int k = 0; k < 3; k++) - { - ofs = 0; - id = triangles[tid][k]; - while (ofs < vsize) - { - if (vids[ofs] == id) - break; - - ++ofs; - } - - if (ofs == vsize) - { - vcount.Add(1); - vids.Add(id); - ++vsize; - } - else - { - ++vcount[ofs]; - } - } - } - - for (int j = 0; j < vsize; j++) - { - if (vcount[j] == 1) - { - id = vids[j]; - vertices[id].border = true; - ++borderVertexCount; - - if (enableSmartLink) - { - if (vertices[id].p.x < borderMinX) - { - borderMinX = vertices[id].p.x; - } - if (vertices[id].p.x > borderMaxX) - { - borderMaxX = vertices[id].p.x; - } - } - } - } - } - - if (enableSmartLink) - { - // First find all border vertices - var borderVertices = new BorderVertex[borderVertexCount]; - int borderIndexCount = 0; - double borderAreaWidth = borderMaxX - borderMinX; - for (int i = 0; i < vertexCount; i++) - { - if (vertices[i].border) - { - int vertexHash = (int)(((((vertices[i].p.x - borderMinX) / borderAreaWidth) * 2.0) - 1.0) * int.MaxValue); - borderVertices[borderIndexCount] = new BorderVertex(i, vertexHash); - ++borderIndexCount; - } - } - - // Sort the border vertices by hash - Array.Sort(borderVertices, 0, borderIndexCount, BorderVertexComparer.instance); - - // Calculate the maximum hash distance based on the maximum vertex link distance - double vertexLinkDistance = System.Math.Sqrt(vertexLinkDistanceSqr); - int hashMaxDistance = System.Math.Max((int)((vertexLinkDistance / borderAreaWidth) * int.MaxValue), 1); - - // Then find identical border vertices and bind them together as one - for (int i = 0; i < borderIndexCount; i++) - { - int myIndex = borderVertices[i].index; - if (myIndex == -1) - continue; - - var myPoint = vertices[myIndex].p; - for (int j = i + 1; j < borderIndexCount; j++) - { - int otherIndex = borderVertices[j].index; - if (otherIndex == -1) - continue; - else if ((borderVertices[j].hash - borderVertices[i].hash) > hashMaxDistance) // There is no point to continue beyond this point - break; - - var otherPoint = vertices[otherIndex].p; - var sqrX = ((myPoint.x - otherPoint.x) * (myPoint.x - otherPoint.x)); - var sqrY = ((myPoint.y - otherPoint.y) * (myPoint.y - otherPoint.y)); - var sqrZ = ((myPoint.z - otherPoint.z) * (myPoint.z - otherPoint.z)); - var sqrMagnitude = sqrX + sqrY + sqrZ; - - if (sqrMagnitude <= vertexLinkDistanceSqr) - { - borderVertices[j].index = -1; // NOTE: This makes sure that the "other" vertex is not processed again - vertices[myIndex].border = false; - vertices[otherIndex].border = false; - - if (AreUVsTheSame(0, myIndex, otherIndex)) - { - vertices[myIndex].foldover = true; - vertices[otherIndex].foldover = true; - } - else - { - vertices[myIndex].seam = true; - vertices[otherIndex].seam = true; - } - - int otherTriangleCount = vertices[otherIndex].tcount; - int otherTriangleStart = vertices[otherIndex].tstart; - for (int k = 0; k < otherTriangleCount; k++) - { - var r = refs[otherTriangleStart + k]; - triangles[r.tid][r.tvertex] = myIndex; - } - } - } - } - - // Update the references again - UpdateReferences(); - } - - // Init Quadrics by Plane & Edge Errors - // - // required at the beginning ( iteration == 0 ) - // recomputing during the simplification is not required, - // but mostly improves the result for closed meshes - for (int i = 0; i < vertexCount; i++) - { - vertices[i].q = new SymmetricMatrix(); - } - - int v0, v1, v2; - Vector3d n, p0, p1, p2, p10, p20, dummy; - int dummy2; - SymmetricMatrix sm; - for (int i = 0; i < triangleCount; i++) - { - v0 = triangles[i].v0; - v1 = triangles[i].v1; - v2 = triangles[i].v2; - - p0 = vertices[v0].p; - p1 = vertices[v1].p; - p2 = vertices[v2].p; - p10 = p1 - p0; - p20 = p2 - p0; - Vector3d.Cross(ref p10, ref p20, out n); - n.Normalize(); - triangles[i].n = n; - - sm = new SymmetricMatrix(n.x, n.y, n.z, -Vector3d.Dot(ref n, ref p0)); - vertices[v0].q += sm; - vertices[v1].q += sm; - vertices[v2].q += sm; - } - - for (int i = 0; i < triangleCount; i++) - { - // Calc Edge Error - var triangle = triangles[i]; - triangles[i].err0 = CalculateError(ref vertices[triangle.v0], ref vertices[triangle.v1], out dummy, out dummy2); - triangles[i].err1 = CalculateError(ref vertices[triangle.v1], ref vertices[triangle.v2], out dummy, out dummy2); - triangles[i].err2 = CalculateError(ref vertices[triangle.v2], ref vertices[triangle.v0], out dummy, out dummy2); - triangles[i].err3 = MathHelper.Min(triangles[i].err0, triangles[i].err1, triangles[i].err2); - } - } - } - #endregion - - #region Update References - private void UpdateReferences() - { - int triangleCount = this.triangles.Length; - int vertexCount = this.vertices.Length; - var triangles = this.triangles.Data; - var vertices = this.vertices.Data; - - // Init Reference ID list - for (int i = 0; i < vertexCount; i++) - { - vertices[i].tstart = 0; - vertices[i].tcount = 0; - } - - for (int i = 0; i < triangleCount; i++) - { - ++vertices[triangles[i].v0].tcount; - ++vertices[triangles[i].v1].tcount; - ++vertices[triangles[i].v2].tcount; - } - - int tstart = 0; - remainingVertices = 0; - for (int i = 0; i < vertexCount; i++) - { - vertices[i].tstart = tstart; - if (vertices[i].tcount > 0) - { - tstart += vertices[i].tcount; - vertices[i].tcount = 0; - ++remainingVertices; - } - } - - // Write References - this.refs.Resize(tstart); - var refs = this.refs.Data; - for (int i = 0; i < triangleCount; i++) - { - int v0 = triangles[i].v0; - int v1 = triangles[i].v1; - int v2 = triangles[i].v2; - int start0 = vertices[v0].tstart; - int count0 = vertices[v0].tcount; - int start1 = vertices[v1].tstart; - int count1 = vertices[v1].tcount; - int start2 = vertices[v2].tstart; - int count2 = vertices[v2].tcount; - - refs[start0 + count0].Set(i, 0); - refs[start1 + count1].Set(i, 1); - refs[start2 + count2].Set(i, 2); - - ++vertices[v0].tcount; - ++vertices[v1].tcount; - ++vertices[v2].tcount; - } - } - #endregion - - #region Compact Mesh - /// - /// Finally compact mesh before exiting. - /// - private void CompactMesh() - { - int dst = 0; - var vertices = this.vertices.Data; - int vertexCount = this.vertices.Length; - for (int i = 0; i < vertexCount; i++) - { - vertices[i].tcount = 0; - } - - var vertNormals = (this.vertNormals != null ? this.vertNormals.Data : null); - var vertTangents = (this.vertTangents != null ? this.vertTangents.Data : null); - var vertUV2D = (this.vertUV2D != null ? this.vertUV2D.Data : null); - var vertUV3D = (this.vertUV3D != null ? this.vertUV3D.Data : null); - var vertUV4D = (this.vertUV4D != null ? this.vertUV4D.Data : null); - var vertColors = (this.vertColors != null ? this.vertColors.Data : null); - var vertBoneWeights = (this.vertBoneWeights != null ? this.vertBoneWeights.Data : null); - - var triangles = this.triangles.Data; - int triangleCount = this.triangles.Length; - for (int i = 0; i < triangleCount; i++) - { - var triangle = triangles[i]; - if (!triangle.deleted) - { - if (triangle.va0 != triangle.v0) - { - int iDest = triangle.va0; - int iSrc = triangle.v0; - vertices[iDest].p = vertices[iSrc].p; - if (vertBoneWeights != null) - { - vertBoneWeights[iDest] = vertBoneWeights[iSrc]; - } - triangle.v0 = triangle.va0; - } - if (triangle.va1 != triangle.v1) - { - int iDest = triangle.va1; - int iSrc = triangle.v1; - vertices[iDest].p = vertices[iSrc].p; - if (vertBoneWeights != null) - { - vertBoneWeights[iDest] = vertBoneWeights[iSrc]; - } - triangle.v1 = triangle.va1; - } - if (triangle.va2 != triangle.v2) - { - int iDest = triangle.va2; - int iSrc = triangle.v2; - vertices[iDest].p = vertices[iSrc].p; - if (vertBoneWeights != null) - { - vertBoneWeights[iDest] = vertBoneWeights[iSrc]; - } - triangle.v2 = triangle.va2; - } - - triangles[dst++] = triangle; - - vertices[triangle.v0].tcount = 1; - vertices[triangle.v1].tcount = 1; - vertices[triangle.v2].tcount = 1; - } - } - - triangleCount = dst; - this.triangles.Resize(triangleCount); - triangles = this.triangles.Data; - - dst = 0; - for (int i = 0; i < vertexCount; i++) - { - var vert = vertices[i]; - if (vert.tcount > 0) - { - vert.tstart = dst; - vertices[i] = vert; - - if (dst != i) - { - vertices[dst].p = vert.p; - if (vertNormals != null) vertNormals[dst] = vertNormals[i]; - if (vertTangents != null) vertTangents[dst] = vertTangents[i]; - if (vertUV2D != null) - { - for (int j = 0; j < Mesh.UVChannelCount; j++) - { - var vertUV = vertUV2D[j]; - if (vertUV != null) - { - vertUV[dst] = vertUV[i]; - } - } - } - if (vertUV3D != null) - { - for (int j = 0; j < Mesh.UVChannelCount; j++) - { - var vertUV = vertUV3D[j]; - if (vertUV != null) - { - vertUV[dst] = vertUV[i]; - } - } - } - if (vertUV4D != null) - { - for (int j = 0; j < Mesh.UVChannelCount; j++) - { - var vertUV = vertUV4D[j]; - if (vertUV != null) - { - vertUV[dst] = vertUV[i]; - } - } - } - if (vertColors != null) vertColors[dst] = vertColors[i]; - if (vertBoneWeights != null) vertBoneWeights[dst] = vertBoneWeights[i]; - } - ++dst; - } - } - - for (int i = 0; i < triangleCount; i++) - { - var triangle = triangles[i]; - triangle.v0 = vertices[triangle.v0].tstart; - triangle.v1 = vertices[triangle.v1].tstart; - triangle.v2 = vertices[triangle.v2].tstart; - triangles[i] = triangle; - } - - vertexCount = dst; - this.vertices.Resize(vertexCount); - if (vertNormals != null) this.vertNormals.Resize(vertexCount, true); - if (vertTangents != null) this.vertTangents.Resize(vertexCount, true); - if (vertUV2D != null) this.vertUV2D.Resize(vertexCount, true); - if (vertUV3D != null) this.vertUV3D.Resize(vertexCount, true); - if (vertUV4D != null) this.vertUV4D.Resize(vertexCount, true); - if (vertColors != null) this.vertColors.Resize(vertexCount, true); - if (vertBoneWeights != null) this.vertBoneWeights.Resize(vertexCount, true); - } - #endregion - #endregion - - #region Public Methods - #region Initialize - /// - /// Initializes the algorithm with the original mesh. - /// - /// The mesh. - public override void Initialize(Mesh mesh) - { - if (mesh == null) - throw new ArgumentNullException("mesh"); - - int meshSubMeshCount = mesh.SubMeshCount; - int meshTriangleCount = mesh.TriangleCount; - var meshVertices = mesh.Vertices; - var meshNormals = mesh.Normals; - var meshTangents = mesh.Tangents; - var meshColors = mesh.Colors; - var meshBoneWeights = mesh.BoneWeights; - subMeshCount = meshSubMeshCount; - - vertices.Resize(meshVertices.Length); - var vertArr = vertices.Data; - for (int i = 0; i < meshVertices.Length; i++) - { - vertArr[i] = new Vertex(meshVertices[i]); - } - - triangles.Resize(meshTriangleCount); - var trisArr = triangles.Data; - int triangleIndex = 0; - for (int subMeshIndex = 0; subMeshIndex < meshSubMeshCount; subMeshIndex++) - { - int[] subMeshIndices = mesh.GetIndices(subMeshIndex); - int subMeshTriangleCount = subMeshIndices.Length / 3; - for (int i = 0; i < subMeshTriangleCount; i++) - { - int offset = i * 3; - int v0 = subMeshIndices[offset]; - int v1 = subMeshIndices[offset + 1]; - int v2 = subMeshIndices[offset + 2]; - trisArr[triangleIndex++] = new Triangle(v0, v1, v2, subMeshIndex); - } - } - - vertNormals = InitializeVertexAttribute(meshNormals, "normals"); - vertTangents = InitializeVertexAttribute(meshTangents, "tangents"); - vertColors = InitializeVertexAttribute(meshColors, "colors"); - vertBoneWeights = InitializeVertexAttribute(meshBoneWeights, "boneWeights"); - - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - int uvDim = mesh.GetUVDimension(i); - string uvAttributeName = string.Format("uv{0}", i); - if (uvDim == 2) - { - if (vertUV2D == null) - vertUV2D = new UVChannels(); - - var uvs = mesh.GetUVs2D(i); - vertUV2D[i] = InitializeVertexAttribute(uvs, uvAttributeName); - } - else if (uvDim == 3) - { - if (vertUV3D == null) - vertUV3D = new UVChannels(); - - var uvs = mesh.GetUVs3D(i); - vertUV3D[i] = InitializeVertexAttribute(uvs, uvAttributeName); - } - else if (uvDim == 4) - { - if (vertUV4D == null) - vertUV4D = new UVChannels(); - - var uvs = mesh.GetUVs4D(i); - vertUV4D[i] = InitializeVertexAttribute(uvs, uvAttributeName); - } - } - } - #endregion - - #region Decimate Mesh - /// - /// Decimates the mesh. - /// - /// The target triangle count. - public override void DecimateMesh(int targetTrisCount) - { - if (targetTrisCount < 0) - throw new ArgumentOutOfRangeException("targetTrisCount"); - - int deletedTris = 0; - ResizableArray deleted0 = new ResizableArray(20); - ResizableArray deleted1 = new ResizableArray(20); - var triangles = this.triangles.Data; - int triangleCount = this.triangles.Length; - int startTrisCount = triangleCount; - var vertices = this.vertices.Data; - - int maxVertexCount = base.MaxVertexCount; - if (maxVertexCount <= 0) - maxVertexCount = int.MaxValue; - - for (int iteration = 0; iteration < maxIterationCount; iteration++) - { - ReportStatus(iteration, startTrisCount, (startTrisCount - deletedTris), targetTrisCount); - if ((startTrisCount - deletedTris) <= targetTrisCount && remainingVertices < maxVertexCount) - break; - - // Update mesh once in a while - if ((iteration % 5) == 0) - { - UpdateMesh(iteration); - triangles = this.triangles.Data; - triangleCount = this.triangles.Length; - vertices = this.vertices.Data; - } - - // Clear dirty flag - for (int i = 0; i < triangleCount; i++) - { - triangles[i].dirty = false; - } - - // All triangles with edges below the threshold will be removed - // - // The following numbers works well for most models. - // If it does not, try to adjust the 3 parameters - double threshold = 0.000000001 * System.Math.Pow(iteration + 3, agressiveness); - - if (Verbose && (iteration % 5) == 0) - { - Logger?.LogTrace( - "Iteration {Iteration} - triangles {Triangles} threshold {Threshold}", - iteration, - (startTrisCount - deletedTris), - threshold); - } - - // Remove vertices & mark deleted triangles - RemoveVertexPass(startTrisCount, targetTrisCount, threshold, deleted0, deleted1, ref deletedTris); - } - - CompactMesh(); - } - #endregion - - #region Decimate Mesh Lossless - /// - /// Decimates the mesh without losing any quality. - /// - public override void DecimateMeshLossless() - { - int deletedTris = 0; - ResizableArray deleted0 = new ResizableArray(0); - ResizableArray deleted1 = new ResizableArray(0); - var triangles = this.triangles.Data; - int triangleCount = this.triangles.Length; - int startTrisCount = triangleCount; - var vertices = this.vertices.Data; - - ReportStatus(0, startTrisCount, startTrisCount, -1); - for (int iteration = 0; iteration < 9999; iteration++) - { - // Update mesh constantly - UpdateMesh(iteration); - triangles = this.triangles.Data; - triangleCount = this.triangles.Length; - vertices = this.vertices.Data; - - ReportStatus(iteration, startTrisCount, triangleCount, -1); - - // Clear dirty flag - for (int i = 0; i < triangleCount; i++) - { - triangles[i].dirty = false; - } - - // All triangles with edges below the threshold will be removed - // - // The following numbers works well for most models. - // If it does not, try to adjust the 3 parameters - double threshold = DoubleEpsilon; - - if (Verbose) - { - Logger?.LogTrace("Lossless iteration {Iteration}", iteration); - } - - // Remove vertices & mark deleted triangles - RemoveVertexPass(startTrisCount, 0, threshold, deleted0, deleted1, ref deletedTris); - - if (deletedTris <= 0) - break; - - deletedTris = 0; - } - - CompactMesh(); - } - #endregion - - #region To Mesh - /// - /// Returns the resulting mesh. - /// - /// The resulting mesh. - public override Mesh ToMesh() - { - int vertexCount = this.vertices.Length; - int triangleCount = this.triangles.Length; - var vertices = new Vector3d[vertexCount]; - var indices = new int[subMeshCount][]; - - var vertArr = this.vertices.Data; - for (int i = 0; i < vertexCount; i++) - { - vertices[i] = vertArr[i].p; - } - - // First get the sub-mesh offsets - var triArr = this.triangles.Data; - int[] subMeshOffsets = new int[subMeshCount]; - int lastSubMeshOffset = -1; - for (int i = 0; i < triangleCount; i++) - { - var triangle = triArr[i]; - if (triangle.subMeshIndex != lastSubMeshOffset) - { - for (int j = lastSubMeshOffset + 1; j < triangle.subMeshIndex; j++) - { - subMeshOffsets[j] = i; - } - subMeshOffsets[triangle.subMeshIndex] = i; - lastSubMeshOffset = triangle.subMeshIndex; - } - } - for (int i = lastSubMeshOffset + 1; i < subMeshCount; i++) - { - subMeshOffsets[i] = triangleCount; - } - - // Then setup the sub-meshes - for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) - { - int startOffset = subMeshOffsets[subMeshIndex]; - if (startOffset < triangleCount) - { - int endOffset = ((subMeshIndex + 1) < subMeshCount ? subMeshOffsets[subMeshIndex + 1] : triangleCount); - int subMeshTriangleCount = endOffset - startOffset; - if (subMeshTriangleCount < 0) subMeshTriangleCount = 0; - int[] subMeshIndices = new int[subMeshTriangleCount * 3]; - - for (int triangleIndex = startOffset; triangleIndex < endOffset; triangleIndex++) - { - var triangle = triArr[triangleIndex]; - int offset = (triangleIndex - startOffset) * 3; - subMeshIndices[offset] = triangle.v0; - subMeshIndices[offset + 1] = triangle.v1; - subMeshIndices[offset + 2] = triangle.v2; - } - - indices[subMeshIndex] = subMeshIndices; - } - else - { - // This mesh doesn't have any triangles left - indices[subMeshIndex] = new int[0]; - } - } - - Mesh newMesh = new Mesh(vertices, indices); - - if (vertNormals != null) - { - newMesh.Normals = vertNormals.Data; - } - if (vertTangents != null) - { - newMesh.Tangents = vertTangents.Data; - } - if (vertColors != null) - { - newMesh.Colors = vertColors.Data; - } - if (vertBoneWeights != null) - { - newMesh.BoneWeights = vertBoneWeights.Data; - } - - if (vertUV2D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - if (vertUV2D[i] != null) - { - var uvSet = vertUV2D[i].Data; - newMesh.SetUVs(i, uvSet); - } - } - } - - if (vertUV3D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - if (vertUV3D[i] != null) - { - var uvSet = vertUV3D[i].Data; - newMesh.SetUVs(i, uvSet); - } - } - } - - if (vertUV4D != null) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - if (vertUV4D[i] != null) - { - var uvSet = vertUV4D[i].Data; - newMesh.SetUVs(i, uvSet); - } - } - } - - return newMesh; - } - #endregion - #endregion - } -} diff --git a/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs b/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs deleted file mode 100644 index 6501468..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/BoneWeight.cs +++ /dev/null @@ -1,249 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using MeshDecimator.Math; - -namespace MeshDecimator -{ - /// - /// A bone weight. - /// - public struct BoneWeight : IEquatable - { - #region Fields - /// - /// The first bone index. - /// - public int boneIndex0; - /// - /// The second bone index. - /// - public int boneIndex1; - /// - /// The third bone index. - /// - public int boneIndex2; - /// - /// The fourth bone index. - /// - public int boneIndex3; - - /// - /// The first bone weight. - /// - public float boneWeight0; - /// - /// The second bone weight. - /// - public float boneWeight1; - /// - /// The third bone weight. - /// - public float boneWeight2; - /// - /// The fourth bone weight. - /// - public float boneWeight3; - #endregion - - #region Constructor - /// - /// Creates a new bone weight. - /// - /// The first bone index. - /// The second bone index. - /// The third bone index. - /// The fourth bone index. - /// The first bone weight. - /// The second bone weight. - /// The third bone weight. - /// The fourth bone weight. - public BoneWeight(int boneIndex0, int boneIndex1, int boneIndex2, int boneIndex3, float boneWeight0, float boneWeight1, float boneWeight2, float boneWeight3) - { - this.boneIndex0 = boneIndex0; - this.boneIndex1 = boneIndex1; - this.boneIndex2 = boneIndex2; - this.boneIndex3 = boneIndex3; - - this.boneWeight0 = boneWeight0; - this.boneWeight1 = boneWeight1; - this.boneWeight2 = boneWeight2; - this.boneWeight3 = boneWeight3; - } - #endregion - - #region Operators - /// - /// Returns if two bone weights equals eachother. - /// - /// The left hand side bone weight. - /// The right hand side bone weight. - /// If equals. - public static bool operator ==(BoneWeight lhs, BoneWeight rhs) - { - return (lhs.boneIndex0 == rhs.boneIndex0 && lhs.boneIndex1 == rhs.boneIndex1 && lhs.boneIndex2 == rhs.boneIndex2 && lhs.boneIndex3 == rhs.boneIndex3 && - new Vector4(lhs.boneWeight0, lhs.boneWeight1, lhs.boneWeight2, lhs.boneWeight3) == new Vector4(rhs.boneWeight0, rhs.boneWeight1, rhs.boneWeight2, rhs.boneWeight3)); - } - - /// - /// Returns if two bone weights don't equal eachother. - /// - /// The left hand side bone weight. - /// The right hand side bone weight. - /// If not equals. - public static bool operator !=(BoneWeight lhs, BoneWeight rhs) - { - return !(lhs == rhs); - } - #endregion - - #region Private Methods - private void MergeBoneWeight(int boneIndex, float weight) - { - if (boneIndex == boneIndex0) - { - boneWeight0 = (boneWeight0 + weight) * 0.5f; - } - else if (boneIndex == boneIndex1) - { - boneWeight1 = (boneWeight1 + weight) * 0.5f; - } - else if (boneIndex == boneIndex2) - { - boneWeight2 = (boneWeight2 + weight) * 0.5f; - } - else if (boneIndex == boneIndex3) - { - boneWeight3 = (boneWeight3 + weight) * 0.5f; - } - else if(boneWeight0 == 0f) - { - boneIndex0 = boneIndex; - boneWeight0 = weight; - } - else if (boneWeight1 == 0f) - { - boneIndex1 = boneIndex; - boneWeight1 = weight; - } - else if (boneWeight2 == 0f) - { - boneIndex2 = boneIndex; - boneWeight2 = weight; - } - else if (boneWeight3 == 0f) - { - boneIndex3 = boneIndex; - boneWeight3 = weight; - } - Normalize(); - } - - private void Normalize() - { - float mag = (float)System.Math.Sqrt(boneWeight0 * boneWeight0 + boneWeight1 * boneWeight1 + boneWeight2 * boneWeight2 + boneWeight3 * boneWeight3); - if (mag > float.Epsilon) - { - boneWeight0 /= mag; - boneWeight1 /= mag; - boneWeight2 /= mag; - boneWeight3 /= mag; - } - else - { - boneWeight0 = boneWeight1 = boneWeight2 = boneWeight3 = 0f; - } - } - #endregion - - #region Public Methods - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return boneIndex0.GetHashCode() ^ boneIndex1.GetHashCode() << 2 ^ boneIndex2.GetHashCode() >> 2 ^ boneIndex3.GetHashCode() >> - 1 ^ boneWeight0.GetHashCode() << 5 ^ boneWeight1.GetHashCode() << 4 ^ boneWeight2.GetHashCode() >> 4 ^ boneWeight3.GetHashCode() >> 3; - } - - /// - /// Returns if this bone weight is equal to another object. - /// - /// The other object to compare to. - /// If equals. - public override bool Equals(object obj) - { - if (!(obj is BoneWeight)) - { - return false; - } - BoneWeight other = (BoneWeight)obj; - return (boneIndex0 == other.boneIndex0 && boneIndex1 == other.boneIndex1 && boneIndex2 == other.boneIndex2 && boneIndex3 == other.boneIndex3 && - boneWeight0 == other.boneWeight0 && boneWeight1 == other.boneWeight1 && boneWeight2 == other.boneWeight2 && boneWeight3 == other.boneWeight3); - } - - /// - /// Returns if this bone weight is equal to another one. - /// - /// The other bone weight to compare to. - /// If equals. - public bool Equals(BoneWeight other) - { - return (boneIndex0 == other.boneIndex0 && boneIndex1 == other.boneIndex1 && boneIndex2 == other.boneIndex2 && boneIndex3 == other.boneIndex3 && - boneWeight0 == other.boneWeight0 && boneWeight1 == other.boneWeight1 && boneWeight2 == other.boneWeight2 && boneWeight3 == other.boneWeight3); - } - - /// - /// Returns a nicely formatted string for this bone weight. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}:{4:F1}, {1}:{5:F1}, {2}:{6:F1}, {3}:{7:F1})", - boneIndex0, boneIndex1, boneIndex2, boneIndex3, boneWeight0, boneWeight1, boneWeight2, boneWeight3); - } - #endregion - - #region Static - /// - /// Merges two bone weights and stores the merged result in the first parameter. - /// - /// The first bone weight, also stores result. - /// The second bone weight. - public static void Merge(ref BoneWeight a, ref BoneWeight b) - { - if (b.boneWeight0 > 0f) a.MergeBoneWeight(b.boneIndex0, b.boneWeight0); - if (b.boneWeight1 > 0f) a.MergeBoneWeight(b.boneIndex1, b.boneWeight1); - if (b.boneWeight2 > 0f) a.MergeBoneWeight(b.boneIndex2, b.boneWeight2); - if (b.boneWeight3 > 0f) a.MergeBoneWeight(b.boneIndex3, b.boneWeight3); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs b/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs deleted file mode 100644 index 2c69814..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Collections/ResizableArray.cs +++ /dev/null @@ -1,179 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; - -namespace MeshDecimator.Collections -{ - /// - /// A resizable array. - /// - /// The item type. - internal sealed class ResizableArray - { - #region Fields - private T[] items = null; - private int length = 0; - - private static T[] emptyArr = new T[0]; - #endregion - - #region Properties - /// - /// Gets the length of this array. - /// - public int Length - { - get { return length; } - } - - /// - /// Gets the internal data buffer for this array. - /// - public T[] Data - { - get { return items; } - } - - /// - /// Gets or sets the element value at a specific index. - /// - /// The element index. - /// The element value. - public T this[int index] - { - get { return items[index]; } - set { items[index] = value; } - } - #endregion - - #region Constructor - /// - /// Creates a new resizable array. - /// - /// The initial array capacity. - public ResizableArray(int capacity) - : this(capacity, 0) - { - - } - - /// - /// Creates a new resizable array. - /// - /// The initial array capacity. - /// The initial length of the array. - public ResizableArray(int capacity, int length) - { - if (capacity < 0) - throw new ArgumentOutOfRangeException("capacity"); - else if (length < 0 || length > capacity) - throw new ArgumentOutOfRangeException("length"); - - if (capacity > 0) - items = new T[capacity]; - else - items = emptyArr; - - this.length = length; - } - #endregion - - #region Private Methods - private void IncreaseCapacity(int capacity) - { - T[] newItems = new T[capacity]; - Array.Copy(items, 0, newItems, 0, System.Math.Min(length, capacity)); - items = newItems; - } - #endregion - - #region Public Methods - /// - /// Clears this array. - /// - public void Clear() - { - Array.Clear(items, 0, length); - length = 0; - } - - /// - /// Resizes this array. - /// - /// The new length. - /// If exess memory should be trimmed. - public void Resize(int length, bool trimExess = false) - { - if (length < 0) - throw new ArgumentOutOfRangeException("capacity"); - - if (length > items.Length) - { - IncreaseCapacity(length); - } - else if (length < this.length) - { - //Array.Clear(items, capacity, length - capacity); - } - - this.length = length; - - if (trimExess) - { - TrimExcess(); - } - } - - /// - /// Trims any excess memory for this array. - /// - public void TrimExcess() - { - if (items.Length == length) // Nothing to do - return; - - T[] newItems = new T[length]; - Array.Copy(items, 0, newItems, 0, length); - items = newItems; - } - - /// - /// Adds a new item to the end of this array. - /// - /// The new item. - public void Add(T item) - { - if (length >= items.Length) - { - IncreaseCapacity(items.Length << 1); - } - - items[length++] = item; - } - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs b/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs deleted file mode 100644 index 073728a..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Collections/UVChannels.cs +++ /dev/null @@ -1,79 +0,0 @@ -using System; - -namespace MeshDecimator.Collections -{ - /// - /// A collection of UV channels. - /// - /// The UV vector type. - internal sealed class UVChannels - { - #region Fields - private ResizableArray[] channels = null; - private TVec[][] channelsData = null; - #endregion - - #region Properties - /// - /// Gets the channel collection data. - /// - public TVec[][] Data - { - get - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - if (channels[i] != null) - { - channelsData[i] = channels[i].Data; - } - else - { - channelsData[i] = null; - } - } - return channelsData; - } - } - - /// - /// Gets or sets a specific channel by index. - /// - /// The channel index. - public ResizableArray this[int index] - { - get { return channels[index]; } - set { channels[index] = value; } - } - #endregion - - #region Constructor - /// - /// Creates a new collection of UV channels. - /// - public UVChannels() - { - channels = new ResizableArray[Mesh.UVChannelCount]; - channelsData = new TVec[Mesh.UVChannelCount][]; - } - #endregion - - #region Public Methods - /// - /// Resizes all channels at once. - /// - /// The new capacity. - /// If exess memory should be trimmed. - public void Resize(int capacity, bool trimExess = false) - { - for (int i = 0; i < Mesh.UVChannelCount; i++) - { - if (channels[i] != null) - { - channels[i].Resize(capacity, trimExess); - } - } - } - #endregion - } -} diff --git a/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md b/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md deleted file mode 100644 index 1f1f192..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs deleted file mode 100644 index b530d3d..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/MathHelper.cs +++ /dev/null @@ -1,286 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; - -namespace MeshDecimator.Math -{ - /// - /// Math helpers. - /// - public static class MathHelper - { - #region Consts - /// - /// The Pi constant. - /// - public const float PI = 3.14159274f; - - /// - /// The Pi constant. - /// - public const double PId = 3.1415926535897932384626433832795; - - /// - /// Degrees to radian constant. - /// - public const float Deg2Rad = PI / 180f; - - /// - /// Degrees to radian constant. - /// - public const double Deg2Radd = PId / 180.0; - - /// - /// Radians to degrees constant. - /// - public const float Rad2Deg = 180f / PI; - - /// - /// Radians to degrees constant. - /// - public const double Rad2Degd = 180.0 / PId; - #endregion - - #region Min - /// - /// Returns the minimum of two values. - /// - /// The first value. - /// The second value. - /// The minimum value. - public static int Min(int val1, int val2) - { - return (val1 < val2 ? val1 : val2); - } - - /// - /// Returns the minimum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The minimum value. - public static int Min(int val1, int val2, int val3) - { - return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); - } - - /// - /// Returns the minimum of two values. - /// - /// The first value. - /// The second value. - /// The minimum value. - public static float Min(float val1, float val2) - { - return (val1 < val2 ? val1 : val2); - } - - /// - /// Returns the minimum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The minimum value. - public static float Min(float val1, float val2, float val3) - { - return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); - } - - /// - /// Returns the minimum of two values. - /// - /// The first value. - /// The second value. - /// The minimum value. - public static double Min(double val1, double val2) - { - return (val1 < val2 ? val1 : val2); - } - - /// - /// Returns the minimum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The minimum value. - public static double Min(double val1, double val2, double val3) - { - return (val1 < val2 ? (val1 < val3 ? val1 : val3) : (val2 < val3 ? val2 : val3)); - } - #endregion - - #region Max - /// - /// Returns the maximum of two values. - /// - /// The first value. - /// The second value. - /// The maximum value. - public static int Max(int val1, int val2) - { - return (val1 > val2 ? val1 : val2); - } - - /// - /// Returns the maximum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The maximum value. - public static int Max(int val1, int val2, int val3) - { - return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); - } - - /// - /// Returns the maximum of two values. - /// - /// The first value. - /// The second value. - /// The maximum value. - public static float Max(float val1, float val2) - { - return (val1 > val2 ? val1 : val2); - } - - /// - /// Returns the maximum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The maximum value. - public static float Max(float val1, float val2, float val3) - { - return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); - } - - /// - /// Returns the maximum of two values. - /// - /// The first value. - /// The second value. - /// The maximum value. - public static double Max(double val1, double val2) - { - return (val1 > val2 ? val1 : val2); - } - - /// - /// Returns the maximum of three values. - /// - /// The first value. - /// The second value. - /// The third value. - /// The maximum value. - public static double Max(double val1, double val2, double val3) - { - return (val1 > val2 ? (val1 > val3 ? val1 : val3) : (val2 > val3 ? val2 : val3)); - } - #endregion - - #region Clamping - /// - /// Clamps a value between a minimum and a maximum value. - /// - /// The value to clamp. - /// The minimum value. - /// The maximum value. - /// The clamped value. - public static float Clamp(float value, float min, float max) - { - return (value >= min ? (value <= max ? value : max) : min); - } - - /// - /// Clamps a value between a minimum and a maximum value. - /// - /// The value to clamp. - /// The minimum value. - /// The maximum value. - /// The clamped value. - public static double Clamp(double value, double min, double max) - { - return (value >= min ? (value <= max ? value : max) : min); - } - - /// - /// Clamps the value between 0 and 1. - /// - /// The value to clamp. - /// The clamped value. - public static float Clamp01(float value) - { - return (value > 0f ? (value < 1f ? value : 1f) : 0f); - } - - /// - /// Clamps the value between 0 and 1. - /// - /// The value to clamp. - /// The clamped value. - public static double Clamp01(double value) - { - return (value > 0.0 ? (value < 1.0 ? value : 1.0) : 0.0); - } - #endregion - - #region Triangle Area - /// - /// Calculates the area of a triangle. - /// - /// The first point. - /// The second point. - /// The third point. - /// The triangle area. - public static float TriangleArea(ref Vector3 p0, ref Vector3 p1, ref Vector3 p2) - { - var dx = p1 - p0; - var dy = p2 - p0; - return dx.Magnitude * ((float)System.Math.Sin(Vector3.Angle(ref dx, ref dy) * Deg2Rad) * dy.Magnitude) * 0.5f; - } - - /// - /// Calculates the area of a triangle. - /// - /// The first point. - /// The second point. - /// The third point. - /// The triangle area. - public static double TriangleArea(ref Vector3d p0, ref Vector3d p1, ref Vector3d p2) - { - var dx = p1 - p0; - var dy = p2 - p0; - return dx.Magnitude * (System.Math.Sin(Vector3d.Angle(ref dx, ref dy) * Deg2Radd) * dy.Magnitude) * 0.5f; - } - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs deleted file mode 100644 index 3daa4e7..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/SymmetricMatrix.cs +++ /dev/null @@ -1,303 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; - -namespace MeshDecimator.Math -{ - /// - /// A symmetric matrix. - /// - public struct SymmetricMatrix - { - #region Fields - /// - /// The m11 component. - /// - public double m0; - /// - /// The m12 component. - /// - public double m1; - /// - /// The m13 component. - /// - public double m2; - /// - /// The m14 component. - /// - public double m3; - /// - /// The m22 component. - /// - public double m4; - /// - /// The m23 component. - /// - public double m5; - /// - /// The m24 component. - /// - public double m6; - /// - /// The m33 component. - /// - public double m7; - /// - /// The m34 component. - /// - public double m8; - /// - /// The m44 component. - /// - public double m9; - #endregion - - #region Properties - /// - /// Gets the component value with a specific index. - /// - /// The component index. - /// The value. - public double this[int index] - { - get - { - switch (index) - { - case 0: - return m0; - case 1: - return m1; - case 2: - return m2; - case 3: - return m3; - case 4: - return m4; - case 5: - return m5; - case 6: - return m6; - case 7: - return m7; - case 8: - return m8; - case 9: - return m9; - default: - throw new IndexOutOfRangeException(); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a symmetric matrix with a value in each component. - /// - /// The component value. - public SymmetricMatrix(double c) - { - this.m0 = c; - this.m1 = c; - this.m2 = c; - this.m3 = c; - this.m4 = c; - this.m5 = c; - this.m6 = c; - this.m7 = c; - this.m8 = c; - this.m9 = c; - } - - /// - /// Creates a symmetric matrix. - /// - /// The m11 component. - /// The m12 component. - /// The m13 component. - /// The m14 component. - /// The m22 component. - /// The m23 component. - /// The m24 component. - /// The m33 component. - /// The m34 component. - /// The m44 component. - public SymmetricMatrix(double m0, double m1, double m2, double m3, - double m4, double m5, double m6, double m7, double m8, double m9) - { - this.m0 = m0; - this.m1 = m1; - this.m2 = m2; - this.m3 = m3; - this.m4 = m4; - this.m5 = m5; - this.m6 = m6; - this.m7 = m7; - this.m8 = m8; - this.m9 = m9; - } - - /// - /// Creates a symmetric matrix from a plane. - /// - /// The plane x-component. - /// The plane y-component - /// The plane z-component - /// The plane w-component - public SymmetricMatrix(double a, double b, double c, double d) - { - this.m0 = a * a; - this.m1 = a * b; - this.m2 = a * c; - this.m3 = a * d; - - this.m4 = b * b; - this.m5 = b * c; - this.m6 = b * d; - - this.m7 = c * c; - this.m8 = c * d; - - this.m9 = d * d; - } - #endregion - - #region Operators - /// - /// Adds two matrixes together. - /// - /// The left hand side. - /// The right hand side. - /// The resulting matrix. - public static SymmetricMatrix operator +(SymmetricMatrix a, SymmetricMatrix b) - { - return new SymmetricMatrix( - a.m0 + b.m0, a.m1 + b.m1, a.m2 + b.m2, a.m3 + b.m3, - a.m4 + b.m4, a.m5 + b.m5, a.m6 + b.m6, - a.m7 + b.m7, a.m8 + b.m8, - a.m9 + b.m9 - ); - } - #endregion - - #region Internal Methods - /// - /// Determinant(0, 1, 2, 1, 4, 5, 2, 5, 7) - /// - /// - internal double Determinant1() - { - double det = - m0 * m4 * m7 + - m2 * m1 * m5 + - m1 * m5 * m2 - - m2 * m4 * m2 - - m0 * m5 * m5 - - m1 * m1 * m7; - return det; - } - - /// - /// Determinant(1, 2, 3, 4, 5, 6, 5, 7, 8) - /// - /// - internal double Determinant2() - { - double det = - m1 * m5 * m8 + - m3 * m4 * m7 + - m2 * m6 * m5 - - m3 * m5 * m5 - - m1 * m6 * m7 - - m2 * m4 * m8; - return det; - } - - /// - /// Determinant(0, 2, 3, 1, 5, 6, 2, 7, 8) - /// - /// - internal double Determinant3() - { - double det = - m0 * m5 * m8 + - m3 * m1 * m7 + - m2 * m6 * m2 - - m3 * m5 * m2 - - m0 * m6 * m7 - - m2 * m1 * m8; - return det; - } - - /// - /// Determinant(0, 1, 3, 1, 4, 6, 2, 5, 8) - /// - /// - internal double Determinant4() - { - double det = - m0 * m4 * m8 + - m3 * m1 * m5 + - m1 * m6 * m2 - - m3 * m4 * m2 - - m0 * m6 * m5 - - m1 * m1 * m8; - return det; - } - #endregion - - #region Public Methods - /// - /// Computes the determinant of this matrix. - /// - /// The a11 index. - /// The a12 index. - /// The a13 index. - /// The a21 index. - /// The a22 index. - /// The a23 index. - /// The a31 index. - /// The a32 index. - /// The a33 index. - /// The determinant value. - public double Determinant(int a11, int a12, int a13, - int a21, int a22, int a23, - int a31, int a32, int a33) - { - double det = - this[a11] * this[a22] * this[a33] + - this[a13] * this[a21] * this[a32] + - this[a12] * this[a23] * this[a31] - - this[a13] * this[a22] * this[a31] - - this[a11] * this[a23] * this[a32] - - this[a12] * this[a21] * this[a33]; - return det; - } - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs deleted file mode 100644 index 68f06f4..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2.cs +++ /dev/null @@ -1,425 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A single precision 2D vector. - /// - public struct Vector2 : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector2 zero = new Vector2(0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const float Epsilon = 9.99999944E-11f; - #endregion - - #region Fields - /// - /// The x component. - /// - public float x; - /// - /// The y component. - /// - public float y; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public float Magnitude - { - get { return (float)System.Math.Sqrt(x * x + y * y); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public float MagnitudeSqr - { - get { return (x * x + y * y); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector2 Normalized - { - get - { - Vector2 result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public float this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - default: - throw new IndexOutOfRangeException("Invalid Vector2 index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector2 index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector2(float value) - { - this.x = value; - this.y = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - public Vector2(float x, float y) - { - this.x = x; - this.y = y; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2 operator +(Vector2 a, Vector2 b) - { - return new Vector2(a.x + b.x, a.y + b.y); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2 operator -(Vector2 a, Vector2 b) - { - return new Vector2(a.x - b.x, a.y - b.y); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector2 operator *(Vector2 a, float d) - { - return new Vector2(a.x * d, a.y * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector2 operator *(float d, Vector2 a) - { - return new Vector2(a.x * d, a.y * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector2 operator /(Vector2 a, float d) - { - return new Vector2(a.x / d, a.y / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector2 operator -(Vector2 a) - { - return new Vector2(-a.x, -a.y); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector2 lhs, Vector2 rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector2 lhs, Vector2 rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Explicitly converts from a double-precision vector into a single-precision vector. - /// - /// The double-precision vector. - public static explicit operator Vector2(Vector2d v) - { - return new Vector2((float)v.x, (float)v.y); - } - - /// - /// Implicitly converts from an integer vector into a single-precision vector. - /// - /// The integer vector. - public static implicit operator Vector2(Vector2i v) - { - return new Vector2(v.x, v.y); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x and y components of an existing vector. - /// - /// The x value. - /// The y value. - public void Set(float x, float y) - { - this.x = x; - this.y = y; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector2 scale) - { - x *= scale.x; - y *= scale.y; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - float mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - } - else - { - x = y = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(float min, float max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector2)) - { - return false; - } - Vector2 vector = (Vector2)other; - return (x == vector.x && y == vector.y); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector2 other) - { - return (x == other.x && y == other.y); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static float Dot(ref Vector2 lhs, ref Vector2 rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector2 a, ref Vector2 b, float t, out Vector2 result) - { - result = new Vector2(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result = new Vector2(a.x * b.x, a.y * b.y); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector2 value, out Vector2 result) - { - float mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector2(value.x / mag, value.y / mag); - } - else - { - result = Vector2.zero; - } - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs deleted file mode 100644 index 72f62aa..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2d.cs +++ /dev/null @@ -1,425 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A double precision 2D vector. - /// - public struct Vector2d : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector2d zero = new Vector2d(0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const double Epsilon = double.Epsilon; - #endregion - - #region Fields - /// - /// The x component. - /// - public double x; - /// - /// The y component. - /// - public double y; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public double Magnitude - { - get { return System.Math.Sqrt(x * x + y * y); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public double MagnitudeSqr - { - get { return (x * x + y * y); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector2d Normalized - { - get - { - Vector2d result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public double this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - default: - throw new IndexOutOfRangeException("Invalid Vector2d index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector2d index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector2d(double value) - { - this.x = value; - this.y = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - public Vector2d(double x, double y) - { - this.x = x; - this.y = y; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2d operator +(Vector2d a, Vector2d b) - { - return new Vector2d(a.x + b.x, a.y + b.y); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2d operator -(Vector2d a, Vector2d b) - { - return new Vector2d(a.x - b.x, a.y - b.y); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector2d operator *(Vector2d a, double d) - { - return new Vector2d(a.x * d, a.y * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector2d operator *(double d, Vector2d a) - { - return new Vector2d(a.x * d, a.y * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector2d operator /(Vector2d a, double d) - { - return new Vector2d(a.x / d, a.y / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector2d operator -(Vector2d a) - { - return new Vector2d(-a.x, -a.y); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector2d lhs, Vector2d rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector2d lhs, Vector2d rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Implicitly converts from a single-precision vector into a double-precision vector. - /// - /// The single-precision vector. - public static implicit operator Vector2d(Vector2 v) - { - return new Vector2d(v.x, v.y); - } - - /// - /// Implicitly converts from an integer vector into a double-precision vector. - /// - /// The integer vector. - public static implicit operator Vector2d(Vector2i v) - { - return new Vector2d(v.x, v.y); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x and y components of an existing vector. - /// - /// The x value. - /// The y value. - public void Set(double x, double y) - { - this.x = x; - this.y = y; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector2d scale) - { - x *= scale.x; - y *= scale.y; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - double mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - } - else - { - x = y = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(double min, double max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector2d)) - { - return false; - } - Vector2d vector = (Vector2d)other; - return (x == vector.x && y == vector.y); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector2d other) - { - return (x == other.x && y == other.y); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static double Dot(ref Vector2d lhs, ref Vector2d rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector2d a, ref Vector2d b, double t, out Vector2d result) - { - result = new Vector2d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result = new Vector2d(a.x * b.x, a.y * b.y); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector2d value, out Vector2d result) - { - double mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector2d(value.x / mag, value.y / mag); - } - else - { - result = Vector2d.zero; - } - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs deleted file mode 100644 index 20b808b..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector2i.cs +++ /dev/null @@ -1,348 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A 2D integer vector. - /// - public struct Vector2i : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector2i zero = new Vector2i(0, 0); - #endregion - - #region Fields - /// - /// The x component. - /// - public int x; - /// - /// The y component. - /// - public int y; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public int Magnitude - { - get { return (int)System.Math.Sqrt(x * x + y * y); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public int MagnitudeSqr - { - get { return (x * x + y * y); } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public int this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - default: - throw new IndexOutOfRangeException("Invalid Vector2i index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector2i index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector2i(int value) - { - this.x = value; - this.y = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - public Vector2i(int x, int y) - { - this.x = x; - this.y = y; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2i operator +(Vector2i a, Vector2i b) - { - return new Vector2i(a.x + b.x, a.y + b.y); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector2i operator -(Vector2i a, Vector2i b) - { - return new Vector2i(a.x - b.x, a.y - b.y); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector2i operator *(Vector2i a, int d) - { - return new Vector2i(a.x * d, a.y * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector2i operator *(int d, Vector2i a) - { - return new Vector2i(a.x * d, a.y * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector2i operator /(Vector2i a, int d) - { - return new Vector2i(a.x / d, a.y / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector2i operator -(Vector2i a) - { - return new Vector2i(-a.x, -a.y); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector2i lhs, Vector2i rhs) - { - return (lhs.x == rhs.x && lhs.y == rhs.y); - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector2i lhs, Vector2i rhs) - { - return (lhs.x != rhs.x || lhs.y != rhs.y); - } - - /// - /// Explicitly converts from a single-precision vector into an integer vector. - /// - /// The single-precision vector. - public static explicit operator Vector2i(Vector2 v) - { - return new Vector2i((int)v.x, (int)v.y); - } - - /// - /// Explicitly converts from a double-precision vector into an integer vector. - /// - /// The double-precision vector. - public static explicit operator Vector2i(Vector2d v) - { - return new Vector2i((int)v.x, (int)v.y); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x and y components of an existing vector. - /// - /// The x value. - /// The y value. - public void Set(int x, int y) - { - this.x = x; - this.y = y; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector2i scale) - { - x *= scale.x; - y *= scale.y; - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(int min, int max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector2i)) - { - return false; - } - Vector2i vector = (Vector2i)other; - return (x == vector.x && y == vector.y); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector2i other) - { - return (x == other.x && y == other.y); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1})", - x.ToString(CultureInfo.InvariantCulture), - y.ToString(CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The integer format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector2i a, ref Vector2i b, out Vector2i result) - { - result = new Vector2i(a.x * b.x, a.y * b.y); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs deleted file mode 100644 index 4c91aa5..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3.cs +++ /dev/null @@ -1,494 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A single precision 3D vector. - /// - public struct Vector3 : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector3 zero = new Vector3(0, 0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const float Epsilon = 9.99999944E-11f; - #endregion - - #region Fields - /// - /// The x component. - /// - public float x; - /// - /// The y component. - /// - public float y; - /// - /// The z component. - /// - public float z; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public float Magnitude - { - get { return (float)System.Math.Sqrt(x * x + y * y + z * z); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public float MagnitudeSqr - { - get { return (x * x + y * y + z * z); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector3 Normalized - { - get - { - Vector3 result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public float this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - default: - throw new IndexOutOfRangeException("Invalid Vector3 index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector3 index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector3(float value) - { - this.x = value; - this.y = value; - this.z = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - public Vector3(float x, float y, float z) - { - this.x = x; - this.y = y; - this.z = z; - } - - /// - /// Creates a new vector from a double precision vector. - /// - /// The double precision vector. - public Vector3(Vector3d vector) - { - this.x = (float)vector.x; - this.y = (float)vector.y; - this.z = (float)vector.z; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3 operator +(Vector3 a, Vector3 b) - { - return new Vector3(a.x + b.x, a.y + b.y, a.z + b.z); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3 operator -(Vector3 a, Vector3 b) - { - return new Vector3(a.x - b.x, a.y - b.y, a.z - b.z); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector3 operator *(Vector3 a, float d) - { - return new Vector3(a.x * d, a.y * d, a.z * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector3 operator *(float d, Vector3 a) - { - return new Vector3(a.x * d, a.y * d, a.z * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector3 operator /(Vector3 a, float d) - { - return new Vector3(a.x / d, a.y / d, a.z / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector3 operator -(Vector3 a) - { - return new Vector3(-a.x, -a.y, -a.z); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector3 lhs, Vector3 rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector3 lhs, Vector3 rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Explicitly converts from a double-precision vector into a single-precision vector. - /// - /// The double-precision vector. - public static explicit operator Vector3(Vector3d v) - { - return new Vector3((float)v.x, (float)v.y, (float)v.z); - } - - /// - /// Implicitly converts from an integer vector into a single-precision vector. - /// - /// The integer vector. - public static implicit operator Vector3(Vector3i v) - { - return new Vector3(v.x, v.y, v.z); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - public void Set(float x, float y, float z) - { - this.x = x; - this.y = y; - this.z = z; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector3 scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - float mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - z /= mag; - } - else - { - x = y = z = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(float min, float max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector3)) - { - return false; - } - Vector3 vector = (Vector3)other; - return (x == vector.x && y == vector.y && z == vector.z); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector3 other) - { - return (x == other.x && y == other.y && z == other.z); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture), - z.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static float Dot(ref Vector3 lhs, ref Vector3 rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; - } - - /// - /// Cross Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - /// The resulting vector. - public static void Cross(ref Vector3 lhs, ref Vector3 rhs, out Vector3 result) - { - result = new Vector3(lhs.y * rhs.z - lhs.z * rhs.y, lhs.z * rhs.x - lhs.x * rhs.z, lhs.x * rhs.y - lhs.y * rhs.x); - } - - /// - /// Calculates the angle between two vectors. - /// - /// The from vector. - /// The to vector. - /// The angle. - public static float Angle(ref Vector3 from, ref Vector3 to) - { - Vector3 fromNormalized = from.Normalized; - Vector3 toNormalized = to.Normalized; - return (float)System.Math.Acos(MathHelper.Clamp(Vector3.Dot(ref fromNormalized, ref toNormalized), -1f, 1f)) * MathHelper.Rad2Deg; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector3 a, ref Vector3 b, float t, out Vector3 result) - { - result = new Vector3(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result = new Vector3(a.x * b.x, a.y * b.y, a.z * b.z); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector3 value, out Vector3 result) - { - float mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector3(value.x / mag, value.y / mag, value.z / mag); - } - else - { - result = Vector3.zero; - } - } - - /// - /// Normalizes both vectors and makes them orthogonal to each other. - /// - /// The normal vector. - /// The tangent. - public static void OrthoNormalize(ref Vector3 normal, ref Vector3 tangent) - { - normal.Normalize(); - Vector3 proj = normal * Vector3.Dot(ref tangent, ref normal); - tangent -= proj; - tangent.Normalize(); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs deleted file mode 100644 index 11ebed1..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3d.cs +++ /dev/null @@ -1,481 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A double precision 3D vector. - /// - public struct Vector3d : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector3d zero = new Vector3d(0, 0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const double Epsilon = double.Epsilon; - #endregion - - #region Fields - /// - /// The x component. - /// - public double x; - /// - /// The y component. - /// - public double y; - /// - /// The z component. - /// - public double z; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public double Magnitude - { - get { return System.Math.Sqrt(x * x + y * y + z * z); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public double MagnitudeSqr - { - get { return (x * x + y * y + z * z); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector3d Normalized - { - get - { - Vector3d result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public double this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - default: - throw new IndexOutOfRangeException("Invalid Vector3d index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector3d index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector3d(double value) - { - this.x = value; - this.y = value; - this.z = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - public Vector3d(double x, double y, double z) - { - this.x = x; - this.y = y; - this.z = z; - } - - /// - /// Creates a new vector from a single precision vector. - /// - /// The single precision vector. - public Vector3d(Vector3 vector) - { - this.x = vector.x; - this.y = vector.y; - this.z = vector.z; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3d operator +(Vector3d a, Vector3d b) - { - return new Vector3d(a.x + b.x, a.y + b.y, a.z + b.z); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3d operator -(Vector3d a, Vector3d b) - { - return new Vector3d(a.x - b.x, a.y - b.y, a.z - b.z); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector3d operator *(Vector3d a, double d) - { - return new Vector3d(a.x * d, a.y * d, a.z * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector3d operator *(double d, Vector3d a) - { - return new Vector3d(a.x * d, a.y * d, a.z * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector3d operator /(Vector3d a, double d) - { - return new Vector3d(a.x / d, a.y / d, a.z / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector3d operator -(Vector3d a) - { - return new Vector3d(-a.x, -a.y, -a.z); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector3d lhs, Vector3d rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector3d lhs, Vector3d rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Implicitly converts from a single-precision vector into a double-precision vector. - /// - /// The single-precision vector. - public static implicit operator Vector3d(Vector3 v) - { - return new Vector3d(v.x, v.y, v.z); - } - - /// - /// Implicitly converts from an integer vector into a double-precision vector. - /// - /// The integer vector. - public static implicit operator Vector3d(Vector3i v) - { - return new Vector3d(v.x, v.y, v.z); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - public void Set(double x, double y, double z) - { - this.x = x; - this.y = y; - this.z = z; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector3d scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - double mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - z /= mag; - } - else - { - x = y = z = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(double min, double max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector3d)) - { - return false; - } - Vector3d vector = (Vector3d)other; - return (x == vector.x && y == vector.y && z == vector.z); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector3d other) - { - return (x == other.x && y == other.y && z == other.z); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture), - z.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static double Dot(ref Vector3d lhs, ref Vector3d rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; - } - - /// - /// Cross Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - /// The resulting vector. - public static void Cross(ref Vector3d lhs, ref Vector3d rhs, out Vector3d result) - { - result = new Vector3d(lhs.y * rhs.z - lhs.z * rhs.y, lhs.z * rhs.x - lhs.x * rhs.z, lhs.x * rhs.y - lhs.y * rhs.x); - } - - /// - /// Calculates the angle between two vectors. - /// - /// The from vector. - /// The to vector. - /// The angle. - public static double Angle(ref Vector3d from, ref Vector3d to) - { - Vector3d fromNormalized = from.Normalized; - Vector3d toNormalized = to.Normalized; - return System.Math.Acos(MathHelper.Clamp(Vector3d.Dot(ref fromNormalized, ref toNormalized), -1.0, 1.0)) * MathHelper.Rad2Degd; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector3d a, ref Vector3d b, double t, out Vector3d result) - { - result = new Vector3d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result = new Vector3d(a.x * b.x, a.y * b.y, a.z * b.z); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector3d value, out Vector3d result) - { - double mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector3d(value.x / mag, value.y / mag, value.z / mag); - } - else - { - result = Vector3d.zero; - } - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs deleted file mode 100644 index d36d6d1..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector3i.cs +++ /dev/null @@ -1,368 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A 3D integer vector. - /// - public struct Vector3i : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector3i zero = new Vector3i(0, 0, 0); - #endregion - - #region Fields - /// - /// The x component. - /// - public int x; - /// - /// The y component. - /// - public int y; - /// - /// The z component. - /// - public int z; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public int Magnitude - { - get { return (int)System.Math.Sqrt(x * x + y * y + z * z); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public int MagnitudeSqr - { - get { return (x * x + y * y + z * z); } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public int this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - default: - throw new IndexOutOfRangeException("Invalid Vector3i index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector3i index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector3i(int value) - { - this.x = value; - this.y = value; - this.z = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - public Vector3i(int x, int y, int z) - { - this.x = x; - this.y = y; - this.z = z; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3i operator +(Vector3i a, Vector3i b) - { - return new Vector3i(a.x + b.x, a.y + b.y, a.z + b.z); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector3i operator -(Vector3i a, Vector3i b) - { - return new Vector3i(a.x - b.x, a.y - b.y, a.z - b.z); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector3i operator *(Vector3i a, int d) - { - return new Vector3i(a.x * d, a.y * d, a.z * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector3i operator *(int d, Vector3i a) - { - return new Vector3i(a.x * d, a.y * d, a.z * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector3i operator /(Vector3i a, int d) - { - return new Vector3i(a.x / d, a.y / d, a.z / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector3i operator -(Vector3i a) - { - return new Vector3i(-a.x, -a.y, -a.z); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector3i lhs, Vector3i rhs) - { - return (lhs.x == rhs.x && lhs.y == rhs.y && lhs.z == rhs.z); - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector3i lhs, Vector3i rhs) - { - return (lhs.x != rhs.x || lhs.y != rhs.y || lhs.z != rhs.z); - } - - /// - /// Explicitly converts from a single-precision vector into an integer vector. - /// - /// The single-precision vector. - public static implicit operator Vector3i(Vector3 v) - { - return new Vector3i((int)v.x, (int)v.y, (int)v.z); - } - - /// - /// Explicitly converts from a double-precision vector into an integer vector. - /// - /// The double-precision vector. - public static explicit operator Vector3i(Vector3d v) - { - return new Vector3i((int)v.x, (int)v.y, (int)v.z); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - public void Set(int x, int y, int z) - { - this.x = x; - this.y = y; - this.z = z; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector3i scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(int min, int max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector3i)) - { - return false; - } - Vector3i vector = (Vector3i)other; - return (x == vector.x && y == vector.y && z == vector.z); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector3i other) - { - return (x == other.x && y == other.y && z == other.z); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2})", - x.ToString(CultureInfo.InvariantCulture), - y.ToString(CultureInfo.InvariantCulture), - z.ToString(CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The integer format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector3i a, ref Vector3i b, out Vector3i result) - { - result = new Vector3i(a.x * b.x, a.y * b.y, a.z * b.z); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs deleted file mode 100644 index bf1d655..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4.cs +++ /dev/null @@ -1,467 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A single precision 4D vector. - /// - public struct Vector4 : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector4 zero = new Vector4(0, 0, 0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const float Epsilon = 9.99999944E-11f; - #endregion - - #region Fields - /// - /// The x component. - /// - public float x; - /// - /// The y component. - /// - public float y; - /// - /// The z component. - /// - public float z; - /// - /// The w component. - /// - public float w; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public float Magnitude - { - get { return (float)System.Math.Sqrt(x * x + y * y + z * z + w * w); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public float MagnitudeSqr - { - get { return (x * x + y * y + z * z + w * w); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector4 Normalized - { - get - { - Vector4 result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public float this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - case 3: - return w; - default: - throw new IndexOutOfRangeException("Invalid Vector4 index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - case 3: - w = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector4 index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector4(float value) - { - this.x = value; - this.y = value; - this.z = value; - this.w = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public Vector4(float x, float y, float z, float w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4 operator +(Vector4 a, Vector4 b) - { - return new Vector4(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4 operator -(Vector4 a, Vector4 b) - { - return new Vector4(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector4 operator *(Vector4 a, float d) - { - return new Vector4(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector4 operator *(float d, Vector4 a) - { - return new Vector4(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector4 operator /(Vector4 a, float d) - { - return new Vector4(a.x / d, a.y / d, a.z / d, a.w / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector4 operator -(Vector4 a) - { - return new Vector4(-a.x, -a.y, -a.z, -a.w); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector4 lhs, Vector4 rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector4 lhs, Vector4 rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Explicitly converts from a double-precision vector into a single-precision vector. - /// - /// The double-precision vector. - public static explicit operator Vector4(Vector4d v) - { - return new Vector4((float)v.x, (float)v.y, (float)v.z, (float)v.w); - } - - /// - /// Implicitly converts from an integer vector into a single-precision vector. - /// - /// The integer vector. - public static implicit operator Vector4(Vector4i v) - { - return new Vector4(v.x, v.y, v.z, v.w); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public void Set(float x, float y, float z, float w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector4 scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - w *= scale.w; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - float mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - z /= mag; - w /= mag; - } - else - { - x = y = z = w = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(float min, float max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - - if (w < min) w = min; - else if (w > max) w = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector4)) - { - return false; - } - Vector4 vector = (Vector4)other; - return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector4 other) - { - return (x == other.x && y == other.y && z == other.z && w == other.w); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture), - z.ToString("F1", CultureInfo.InvariantCulture), - w.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture), - w.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static float Dot(ref Vector4 lhs, ref Vector4 rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z + lhs.w * rhs.w; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector4 a, ref Vector4 b, float t, out Vector4 result) - { - result = new Vector4(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t, a.w + (b.w - a.w) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result = new Vector4(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector4 value, out Vector4 result) - { - float mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector4(value.x / mag, value.y / mag, value.z / mag, value.w / mag); - } - else - { - result = Vector4.zero; - } - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs deleted file mode 100644 index c984c08..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4d.cs +++ /dev/null @@ -1,467 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A double precision 4D vector. - /// - public struct Vector4d : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector4d zero = new Vector4d(0, 0, 0, 0); - #endregion - - #region Consts - /// - /// The vector epsilon. - /// - public const double Epsilon = double.Epsilon; - #endregion - - #region Fields - /// - /// The x component. - /// - public double x; - /// - /// The y component. - /// - public double y; - /// - /// The z component. - /// - public double z; - /// - /// The w component. - /// - public double w; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public double Magnitude - { - get { return System.Math.Sqrt(x * x + y * y + z * z + w * w); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public double MagnitudeSqr - { - get { return (x * x + y * y + z * z + w * w); } - } - - /// - /// Gets a normalized vector from this vector. - /// - public Vector4d Normalized - { - get - { - Vector4d result; - Normalize(ref this, out result); - return result; - } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public double this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - case 3: - return w; - default: - throw new IndexOutOfRangeException("Invalid Vector4d index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - case 3: - w = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector4d index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector4d(double value) - { - this.x = value; - this.y = value; - this.z = value; - this.w = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public Vector4d(double x, double y, double z, double w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4d operator +(Vector4d a, Vector4d b) - { - return new Vector4d(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4d operator -(Vector4d a, Vector4d b) - { - return new Vector4d(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector4d operator *(Vector4d a, double d) - { - return new Vector4d(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector4d operator *(double d, Vector4d a) - { - return new Vector4d(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector4d operator /(Vector4d a, double d) - { - return new Vector4d(a.x / d, a.y / d, a.z / d, a.w / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector4d operator -(Vector4d a) - { - return new Vector4d(-a.x, -a.y, -a.z, -a.w); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector4d lhs, Vector4d rhs) - { - return (lhs - rhs).MagnitudeSqr < Epsilon; - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector4d lhs, Vector4d rhs) - { - return (lhs - rhs).MagnitudeSqr >= Epsilon; - } - - /// - /// Implicitly converts from a single-precision vector into a double-precision vector. - /// - /// The single-precision vector. - public static implicit operator Vector4d(Vector4 v) - { - return new Vector4d(v.x, v.y, v.z, v.w); - } - - /// - /// Implicitly converts from an integer vector into a double-precision vector. - /// - /// The integer vector. - public static implicit operator Vector4d(Vector4i v) - { - return new Vector4d(v.x, v.y, v.z, v.w); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public void Set(double x, double y, double z, double w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector4d scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - w *= scale.w; - } - - /// - /// Normalizes this vector. - /// - public void Normalize() - { - double mag = this.Magnitude; - if (mag > Epsilon) - { - x /= mag; - y /= mag; - z /= mag; - w /= mag; - } - else - { - x = y = z = w = 0; - } - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(double min, double max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - - if (w < min) w = min; - else if (w > max) w = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector4d)) - { - return false; - } - Vector4d vector = (Vector4d)other; - return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector4d other) - { - return (x == other.x && y == other.y && z == other.z && w == other.w); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString("F1", CultureInfo.InvariantCulture), - y.ToString("F1", CultureInfo.InvariantCulture), - z.ToString("F1", CultureInfo.InvariantCulture), - w.ToString("F1", CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The float format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture), - w.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Dot Product of two vectors. - /// - /// The left hand side vector. - /// The right hand side vector. - public static double Dot(ref Vector4d lhs, ref Vector4d rhs) - { - return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z + lhs.w * rhs.w; - } - - /// - /// Performs a linear interpolation between two vectors. - /// - /// The vector to interpolate from. - /// The vector to interpolate to. - /// The time fraction. - /// The resulting vector. - public static void Lerp(ref Vector4d a, ref Vector4d b, double t, out Vector4d result) - { - result = new Vector4d(a.x + (b.x - a.x) * t, a.y + (b.y - a.y) * t, a.z + (b.z - a.z) * t, a.w + (b.w - a.w) * t); - } - - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result = new Vector4d(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); - } - - /// - /// Normalizes a vector. - /// - /// The vector to normalize. - /// The resulting normalized vector. - public static void Normalize(ref Vector4d value, out Vector4d result) - { - double mag = value.Magnitude; - if (mag > Epsilon) - { - result = new Vector4d(value.x / mag, value.y / mag, value.z / mag, value.w / mag); - } - else - { - result = Vector4d.zero; - } - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs b/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs deleted file mode 100644 index cc52459..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Math/Vector4i.cs +++ /dev/null @@ -1,388 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Globalization; - -namespace MeshDecimator.Math -{ - /// - /// A 4D integer vector. - /// - public struct Vector4i : IEquatable - { - #region Static Read-Only - /// - /// The zero vector. - /// - public static readonly Vector4i zero = new Vector4i(0, 0, 0, 0); - #endregion - - #region Fields - /// - /// The x component. - /// - public int x; - /// - /// The y component. - /// - public int y; - /// - /// The z component. - /// - public int z; - /// - /// The w component. - /// - public int w; - #endregion - - #region Properties - /// - /// Gets the magnitude of this vector. - /// - public int Magnitude - { - get { return (int)System.Math.Sqrt(x * x + y * y + z * z + w * w); } - } - - /// - /// Gets the squared magnitude of this vector. - /// - public int MagnitudeSqr - { - get { return (x * x + y * y + z * z + w * w); } - } - - /// - /// Gets or sets a specific component by index in this vector. - /// - /// The component index. - public int this[int index] - { - get - { - switch (index) - { - case 0: - return x; - case 1: - return y; - case 2: - return z; - case 3: - return w; - default: - throw new IndexOutOfRangeException("Invalid Vector4i index!"); - } - } - set - { - switch (index) - { - case 0: - x = value; - break; - case 1: - y = value; - break; - case 2: - z = value; - break; - case 3: - w = value; - break; - default: - throw new IndexOutOfRangeException("Invalid Vector4i index!"); - } - } - } - #endregion - - #region Constructor - /// - /// Creates a new vector with one value for all components. - /// - /// The value. - public Vector4i(int value) - { - this.x = value; - this.y = value; - this.z = value; - this.w = value; - } - - /// - /// Creates a new vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public Vector4i(int x, int y, int z, int w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - #endregion - - #region Operators - /// - /// Adds two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4i operator +(Vector4i a, Vector4i b) - { - return new Vector4i(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); - } - - /// - /// Subtracts two vectors. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static Vector4i operator -(Vector4i a, Vector4i b) - { - return new Vector4i(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); - } - - /// - /// Scales the vector uniformly. - /// - /// The vector. - /// The scaling value. - /// The resulting vector. - public static Vector4i operator *(Vector4i a, int d) - { - return new Vector4i(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Scales the vector uniformly. - /// - /// The scaling value. - /// The vector. - /// The resulting vector. - public static Vector4i operator *(int d, Vector4i a) - { - return new Vector4i(a.x * d, a.y * d, a.z * d, a.w * d); - } - - /// - /// Divides the vector with a float. - /// - /// The vector. - /// The dividing float value. - /// The resulting vector. - public static Vector4i operator /(Vector4i a, int d) - { - return new Vector4i(a.x / d, a.y / d, a.z / d, a.w / d); - } - - /// - /// Subtracts the vector from a zero vector. - /// - /// The vector. - /// The resulting vector. - public static Vector4i operator -(Vector4i a) - { - return new Vector4i(-a.x, -a.y, -a.z, -a.w); - } - - /// - /// Returns if two vectors equals eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If equals. - public static bool operator ==(Vector4i lhs, Vector4i rhs) - { - return (lhs.x == rhs.x && lhs.y == rhs.y && lhs.z == rhs.z && lhs.w == rhs.w); - } - - /// - /// Returns if two vectors don't equal eachother. - /// - /// The left hand side vector. - /// The right hand side vector. - /// If not equals. - public static bool operator !=(Vector4i lhs, Vector4i rhs) - { - return (lhs.x != rhs.x || lhs.y != rhs.y || lhs.z != rhs.z || lhs.w != rhs.w); - } - - /// - /// Explicitly converts from a single-precision vector into an integer vector. - /// - /// The single-precision vector. - public static explicit operator Vector4i(Vector4 v) - { - return new Vector4i((int)v.x, (int)v.y, (int)v.z, (int)v.w); - } - - /// - /// Explicitly converts from a double-precision vector into an integer vector. - /// - /// The double-precision vector. - public static explicit operator Vector4i(Vector4d v) - { - return new Vector4i((int)v.x, (int)v.y, (int)v.z, (int)v.w); - } - #endregion - - #region Public Methods - #region Instance - /// - /// Set x, y and z components of an existing vector. - /// - /// The x value. - /// The y value. - /// The z value. - /// The w value. - public void Set(int x, int y, int z, int w) - { - this.x = x; - this.y = y; - this.z = z; - this.w = w; - } - - /// - /// Multiplies with another vector component-wise. - /// - /// The vector to multiply with. - public void Scale(ref Vector4i scale) - { - x *= scale.x; - y *= scale.y; - z *= scale.z; - w *= scale.w; - } - - /// - /// Clamps this vector between a specific range. - /// - /// The minimum component value. - /// The maximum component value. - public void Clamp(int min, int max) - { - if (x < min) x = min; - else if (x > max) x = max; - - if (y < min) y = min; - else if (y > max) y = max; - - if (z < min) z = min; - else if (z > max) z = max; - - if (w < min) w = min; - else if (w > max) w = max; - } - #endregion - - #region Object - /// - /// Returns a hash code for this vector. - /// - /// The hash code. - public override int GetHashCode() - { - return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public override bool Equals(object other) - { - if (!(other is Vector4i)) - { - return false; - } - Vector4i vector = (Vector4i)other; - return (x == vector.x && y == vector.y && z == vector.z && w == vector.w); - } - - /// - /// Returns if this vector is equal to another one. - /// - /// The other vector to compare to. - /// If equals. - public bool Equals(Vector4i other) - { - return (x == other.x && y == other.y && z == other.z && w == other.w); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The string. - public override string ToString() - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString(CultureInfo.InvariantCulture), - y.ToString(CultureInfo.InvariantCulture), - z.ToString(CultureInfo.InvariantCulture), - w.ToString(CultureInfo.InvariantCulture)); - } - - /// - /// Returns a nicely formatted string for this vector. - /// - /// The integer format. - /// The string. - public string ToString(string format) - { - return string.Format("({0}, {1}, {2}, {3})", - x.ToString(format, CultureInfo.InvariantCulture), - y.ToString(format, CultureInfo.InvariantCulture), - z.ToString(format, CultureInfo.InvariantCulture), - w.ToString(format, CultureInfo.InvariantCulture)); - } - #endregion - - #region Static - /// - /// Multiplies two vectors component-wise. - /// - /// The first vector. - /// The second vector. - /// The resulting vector. - public static void Scale(ref Vector4i a, ref Vector4i b, out Vector4i result) - { - result = new Vector4i(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs b/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs deleted file mode 100644 index 2e38821..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/Mesh.cs +++ /dev/null @@ -1,955 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Collections.Generic; -using MeshDecimator.Math; - -namespace MeshDecimator -{ - /// - /// A mesh. - /// - public sealed class Mesh - { - #region Consts - /// - /// The count of supported UV channels. - /// - public const int UVChannelCount = 4; - #endregion - - #region Fields - private Vector3d[] vertices = null; - private int[][] indices = null; - private Vector3[] normals = null; - private Vector4[] tangents = null; - private Vector2[][] uvs2D = null; - private Vector3[][] uvs3D = null; - private Vector4[][] uvs4D = null; - private Vector4[] colors = null; - private BoneWeight[] boneWeights = null; - - private static readonly int[] emptyIndices = new int[0]; - #endregion - - #region Properties - /// - /// Gets the count of vertices of this mesh. - /// - public int VertexCount - { - get { return vertices.Length; } - } - - /// - /// Gets or sets the count of submeshes in this mesh. - /// - public int SubMeshCount - { - get { return indices.Length; } - set - { - if (value <= 0) - throw new ArgumentOutOfRangeException("value"); - - int[][] newIndices = new int[value][]; - Array.Copy(indices, 0, newIndices, 0, MathHelper.Min(indices.Length, newIndices.Length)); - indices = newIndices; - } - } - - /// - /// Gets the total count of triangles in this mesh. - /// - public int TriangleCount - { - get - { - int triangleCount = 0; - for (int i = 0; i < indices.Length; i++) - { - if (indices[i] != null) - { - triangleCount += indices[i].Length / 3; - } - } - return triangleCount; - } - } - - /// - /// Gets or sets the vertices for this mesh. Note that this resets all other vertex attributes. - /// - public Vector3d[] Vertices - { - get { return vertices; } - set - { - if (value == null) - throw new ArgumentNullException("value"); - - vertices = value; - ClearVertexAttributes(); - } - } - - /// - /// Gets or sets the combined indices for this mesh. Once set, the sub-mesh count gets set to 1. - /// - public int[] Indices - { - get - { - if (indices.Length == 1) - { - return indices[0] ?? emptyIndices; - } - else - { - List indexList = new List(TriangleCount * 3); - for (int i = 0; i < indices.Length; i++) - { - if (indices[i] != null) - { - indexList.AddRange(indices[i]); - } - } - return indexList.ToArray(); - } - } - set - { - if (value == null) - throw new ArgumentNullException("value"); - else if ((value.Length % 3) != 0) - throw new ArgumentException("The index count must be multiple by 3.", "value"); - - SubMeshCount = 1; - SetIndices(0, value); - } - } - - /// - /// Gets or sets the normals for this mesh. - /// - public Vector3[] Normals - { - get { return normals; } - set - { - if (value != null && value.Length != vertices.Length) - throw new ArgumentException(string.Format("The vertex normals must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); - - normals = value; - } - } - - /// - /// Gets or sets the tangents for this mesh. - /// - public Vector4[] Tangents - { - get { return tangents; } - set - { - if (value != null && value.Length != vertices.Length) - throw new ArgumentException(string.Format("The vertex tangents must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); - - tangents = value; - } - } - - /// - /// Gets or sets the first UV set for this mesh. - /// - public Vector2[] UV1 - { - get { return GetUVs2D(0); } - set { SetUVs(0, value); } - } - - /// - /// Gets or sets the second UV set for this mesh. - /// - public Vector2[] UV2 - { - get { return GetUVs2D(1); } - set { SetUVs(1, value); } - } - - /// - /// Gets or sets the third UV set for this mesh. - /// - public Vector2[] UV3 - { - get { return GetUVs2D(2); } - set { SetUVs(2, value); } - } - - /// - /// Gets or sets the fourth UV set for this mesh. - /// - public Vector2[] UV4 - { - get { return GetUVs2D(3); } - set { SetUVs(3, value); } - } - - /// - /// Gets or sets the vertex colors for this mesh. - /// - public Vector4[] Colors - { - get { return colors; } - set - { - if (value != null && value.Length != vertices.Length) - throw new ArgumentException(string.Format("The vertex colors must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); - - colors = value; - } - } - - /// - /// Gets or sets the vertex bone weights for this mesh. - /// - public BoneWeight[] BoneWeights - { - get { return boneWeights; } - set - { - if (value != null && value.Length != vertices.Length) - throw new ArgumentException(string.Format("The vertex bone weights must be as many as the vertices. Assigned: {0} Require: {1}", value.Length, vertices.Length)); - - boneWeights = value; - } - } - #endregion - - #region Constructor - /// - /// Creates a new mesh. - /// - /// The mesh vertices. - /// The mesh indices. - public Mesh(Vector3d[] vertices, int[] indices) - { - if (vertices == null) - throw new ArgumentNullException("vertices"); - else if (indices == null) - throw new ArgumentNullException("indices"); - else if ((indices.Length % 3) != 0) - throw new ArgumentException("The index count must be multiple by 3.", "indices"); - - this.vertices = vertices; - this.indices = new int[1][]; - this.indices[0] = indices; - } - - /// - /// Creates a new mesh. - /// - /// The mesh vertices. - /// The mesh indices. - public Mesh(Vector3d[] vertices, int[][] indices) - { - if (vertices == null) - throw new ArgumentNullException("vertices"); - else if (indices == null) - throw new ArgumentNullException("indices"); - - for (int i = 0; i < indices.Length; i++) - { - if (indices[i] != null && (indices[i].Length % 3) != 0) - throw new ArgumentException(string.Format("The index count must be multiple by 3 at sub-mesh index {0}.", i), "indices"); - } - - this.vertices = vertices; - this.indices = indices; - } - #endregion - - #region Private Methods - private void ClearVertexAttributes() - { - normals = null; - tangents = null; - uvs2D = null; - uvs3D = null; - uvs4D = null; - colors = null; - boneWeights = null; - } - #endregion - - #region Public Methods - #region Recalculate Normals - /// - /// Recalculates the normals for this mesh smoothly. - /// - public void RecalculateNormals() - { - int vertexCount = vertices.Length; - Vector3[] normals = new Vector3[vertexCount]; - - int subMeshCount = this.indices.Length; - for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) - { - int[] indices = this.indices[subMeshIndex]; - if (indices == null) - continue; - - int indexCount = indices.Length; - for (int i = 0; i < indexCount; i += 3) - { - int i0 = indices[i]; - int i1 = indices[i + 1]; - int i2 = indices[i + 2]; - - var v0 = (Vector3)vertices[i0]; - var v1 = (Vector3)vertices[i1]; - var v2 = (Vector3)vertices[i2]; - - var nx = v1 - v0; - var ny = v2 - v0; - Vector3 normal; - Vector3.Cross(ref nx, ref ny, out normal); - normal.Normalize(); - - normals[i0] += normal; - normals[i1] += normal; - normals[i2] += normal; - } - } - - for (int i = 0; i < vertexCount; i++) - { - normals[i].Normalize(); - } - - this.normals = normals; - } - #endregion - - #region Recalculate Tangents - /// - /// Recalculates the tangents for this mesh. - /// - public void RecalculateTangents() - { - // Make sure we have the normals first - if (normals == null) - return; - - // Also make sure that we have the first UV set - bool uvIs2D = (uvs2D != null && uvs2D[0] != null); - bool uvIs3D = (uvs3D != null && uvs3D[0] != null); - bool uvIs4D = (uvs4D != null && uvs4D[0] != null); - if (!uvIs2D && !uvIs3D && !uvIs4D) - return; - - int vertexCount = vertices.Length; - - var tangents = new Vector4[vertexCount]; - var tan1 = new Vector3[vertexCount]; - var tan2 = new Vector3[vertexCount]; - - Vector2[] uv2D = (uvIs2D ? uvs2D[0] : null); - Vector3[] uv3D = (uvIs3D ? uvs3D[0] : null); - Vector4[] uv4D = (uvIs4D ? uvs4D[0] : null); - - int subMeshCount = this.indices.Length; - for (int subMeshIndex = 0; subMeshIndex < subMeshCount; subMeshIndex++) - { - int[] indices = this.indices[subMeshIndex]; - if (indices == null) - continue; - - int indexCount = indices.Length; - for (int i = 0; i < indexCount; i += 3) - { - int i0 = indices[i]; - int i1 = indices[i + 1]; - int i2 = indices[i + 2]; - - var v0 = vertices[i0]; - var v1 = vertices[i1]; - var v2 = vertices[i2]; - - float s1, s2, t1, t2; - if (uvIs2D) - { - var w0 = uv2D[i0]; - var w1 = uv2D[i1]; - var w2 = uv2D[i2]; - s1 = w1.x - w0.x; - s2 = w2.x - w0.x; - t1 = w1.y - w0.y; - t2 = w2.y - w0.y; - } - else if (uvIs3D) - { - var w0 = uv3D[i0]; - var w1 = uv3D[i1]; - var w2 = uv3D[i2]; - s1 = w1.x - w0.x; - s2 = w2.x - w0.x; - t1 = w1.y - w0.y; - t2 = w2.y - w0.y; - } - else - { - var w0 = uv4D[i0]; - var w1 = uv4D[i1]; - var w2 = uv4D[i2]; - s1 = w1.x - w0.x; - s2 = w2.x - w0.x; - t1 = w1.y - w0.y; - t2 = w2.y - w0.y; - } - - - float x1 = (float)(v1.x - v0.x); - float x2 = (float)(v2.x - v0.x); - float y1 = (float)(v1.y - v0.y); - float y2 = (float)(v2.y - v0.y); - float z1 = (float)(v1.z - v0.z); - float z2 = (float)(v2.z - v0.z); - float r = 1f / (s1 * t2 - s2 * t1); - - var sdir = new Vector3((t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r); - var tdir = new Vector3((s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r); - - tan1[i0] += sdir; - tan1[i1] += sdir; - tan1[i2] += sdir; - tan2[i0] += tdir; - tan2[i1] += tdir; - tan2[i2] += tdir; - } - } - - for (int i = 0; i < vertexCount; i++) - { - var n = normals[i]; - var t = tan1[i]; - - var tmp = (t - n * Vector3.Dot(ref n, ref t)); - tmp.Normalize(); - - Vector3 c; - Vector3.Cross(ref n, ref t, out c); - float dot = Vector3.Dot(ref c, ref tan2[i]); - float w = (dot < 0f ? -1f : 1f); - tangents[i] = new Vector4(tmp.x, tmp.y, tmp.z, w); - } - - this.tangents = tangents; - } - #endregion - - #region Triangles - /// - /// Returns the count of triangles for a specific sub-mesh in this mesh. - /// - /// The sub-mesh index. - /// The triangle count. - public int GetTriangleCount(int subMeshIndex) - { - if (subMeshIndex < 0 || subMeshIndex >= indices.Length) - throw new IndexOutOfRangeException(); - - return indices[subMeshIndex].Length / 3; - } - - /// - /// Returns the triangle indices of a specific sub-mesh in this mesh. - /// - /// The sub-mesh index. - /// The triangle indices. - public int[] GetIndices(int subMeshIndex) - { - if (subMeshIndex < 0 || subMeshIndex >= indices.Length) - throw new IndexOutOfRangeException(); - - return indices[subMeshIndex] ?? emptyIndices; - } - - /// - /// Returns the triangle indices for all sub-meshes in this mesh. - /// - /// The sub-mesh triangle indices. - public int[][] GetSubMeshIndices() - { - var subMeshIndices = new int[indices.Length][]; - for (int subMeshIndex = 0; subMeshIndex < indices.Length; subMeshIndex++) - { - subMeshIndices[subMeshIndex] = indices[subMeshIndex] ?? emptyIndices; - } - return subMeshIndices; - } - - /// - /// Sets the triangle indices of a specific sub-mesh in this mesh. - /// - /// The sub-mesh index. - /// The triangle indices. - public void SetIndices(int subMeshIndex, int[] indices) - { - if (subMeshIndex < 0 || subMeshIndex >= this.indices.Length) - throw new IndexOutOfRangeException(); - else if (indices == null) - throw new ArgumentNullException("indices"); - else if ((indices.Length % 3) != 0) - throw new ArgumentException("The index count must be multiple by 3.", "indices"); - - this.indices[subMeshIndex] = indices; - } - #endregion - - #region UV Sets - #region Getting - /// - /// Returns the UV dimension for a specific channel. - /// - /// - /// The UV dimension count. - public int GetUVDimension(int channel) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs2D != null && uvs2D[channel] != null) - { - return 2; - } - else if (uvs3D != null && uvs3D[channel] != null) - { - return 3; - } - else if (uvs4D != null && uvs4D[channel] != null) - { - return 4; - } - else - { - return 0; - } - } - - /// - /// Returns the UVs (2D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public Vector2[] GetUVs2D(int channel) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs2D != null && uvs2D[channel] != null) - { - return uvs2D[channel]; - } - else - { - return null; - } - } - - /// - /// Returns the UVs (3D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public Vector3[] GetUVs3D(int channel) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs3D != null && uvs3D[channel] != null) - { - return uvs3D[channel]; - } - else - { - return null; - } - } - - /// - /// Returns the UVs (4D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public Vector4[] GetUVs4D(int channel) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs4D != null && uvs4D[channel] != null) - { - return uvs4D[channel]; - } - else - { - return null; - } - } - - /// - /// Returns the UVs (2D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public void GetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - else if (uvs == null) - throw new ArgumentNullException("uvs"); - - uvs.Clear(); - if (uvs2D != null && uvs2D[channel] != null) - { - var uvData = uvs2D[channel]; - if (uvData != null) - { - uvs.AddRange(uvData); - } - } - } - - /// - /// Returns the UVs (3D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public void GetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - else if (uvs == null) - throw new ArgumentNullException("uvs"); - - uvs.Clear(); - if (uvs3D != null && uvs3D[channel] != null) - { - var uvData = uvs3D[channel]; - if (uvData != null) - { - uvs.AddRange(uvData); - } - } - } - - /// - /// Returns the UVs (4D) from a specific channel. - /// - /// The channel index. - /// The UVs. - public void GetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - else if (uvs == null) - throw new ArgumentNullException("uvs"); - - uvs.Clear(); - if (uvs4D != null && uvs4D[channel] != null) - { - var uvData = uvs4D[channel]; - if (uvData != null) - { - uvs.AddRange(uvData); - } - } - } - #endregion - - #region Setting - /// - /// Sets the UVs (2D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, Vector2[] uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Length > 0) - { - if (uvs.Length != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvs.Length, vertices.Length)); - - if (uvs2D == null) - uvs2D = new Vector2[UVChannelCount][]; - - int uvCount = uvs.Length; - var uvSet = new Vector2[uvCount]; - uvs2D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs2D != null) - { - uvs2D[channel] = null; - } - } - - if (uvs3D != null) - { - uvs3D[channel] = null; - } - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - /// - /// Sets the UVs (3D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, Vector3[] uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Length > 0) - { - int uvCount = uvs.Length; - if (uvCount != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); - - if (uvs3D == null) - uvs3D = new Vector3[UVChannelCount][]; - - var uvSet = new Vector3[uvCount]; - uvs3D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs3D != null) - { - uvs3D[channel] = null; - } - } - - if (uvs2D != null) - { - uvs2D[channel] = null; - } - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - /// - /// Sets the UVs (4D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, Vector4[] uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Length > 0) - { - int uvCount = uvs.Length; - if (uvCount != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); - - if (uvs4D == null) - uvs4D = new Vector4[UVChannelCount][]; - - var uvSet = new Vector4[uvCount]; - uvs4D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - if (uvs2D != null) - { - uvs2D[channel] = null; - } - if (uvs3D != null) - { - uvs3D[channel] = null; - } - } - - /// - /// Sets the UVs (2D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Count > 0) - { - int uvCount = uvs.Count; - if (uvCount != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); - - if (uvs2D == null) - uvs2D = new Vector2[UVChannelCount][]; - - var uvSet = new Vector2[uvCount]; - uvs2D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs2D != null) - { - uvs2D[channel] = null; - } - } - - if (uvs3D != null) - { - uvs3D[channel] = null; - } - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - /// - /// Sets the UVs (3D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Count > 0) - { - int uvCount = uvs.Count; - if (uvCount != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); - - if (uvs3D == null) - uvs3D = new Vector3[UVChannelCount][]; - - var uvSet = new Vector3[uvCount]; - uvs3D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs3D != null) - { - uvs3D[channel] = null; - } - } - - if (uvs2D != null) - { - uvs2D[channel] = null; - } - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - /// - /// Sets the UVs (4D) for a specific channel. - /// - /// The channel index. - /// The UVs. - public void SetUVs(int channel, List uvs) - { - if (channel < 0 || channel >= UVChannelCount) - throw new ArgumentOutOfRangeException("channel"); - - if (uvs != null && uvs.Count > 0) - { - int uvCount = uvs.Count; - if (uvCount != vertices.Length) - throw new ArgumentException(string.Format("The vertex UVs must be as many as the vertices. Assigned: {0} Require: {1}", uvCount, vertices.Length), "uvs"); - - if (uvs4D == null) - uvs4D = new Vector4[UVChannelCount][]; - - var uvSet = new Vector4[uvCount]; - uvs4D[channel] = uvSet; - uvs.CopyTo(uvSet, 0); - } - else - { - if (uvs4D != null) - { - uvs4D[channel] = null; - } - } - - if (uvs2D != null) - { - uvs2D[channel] = null; - } - if (uvs3D != null) - { - uvs3D[channel] = null; - } - } - #endregion - #endregion - - #region To String - /// - /// Returns the text-representation of this mesh. - /// - /// The text-representation. - public override string ToString() - { - return string.Format("Vertices: {0}", vertices.Length); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs b/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs deleted file mode 100644 index cb13fe8..0000000 --- a/LightlessSync/ThirdParty/MeshDecimator/MeshDecimation.cs +++ /dev/null @@ -1,180 +0,0 @@ -#region License -/* -MIT License - -Copyright(c) 2017-2018 Mattias Edlund - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using MeshDecimator.Algorithms; - -namespace MeshDecimator -{ - #region Algorithm - /// - /// The decimation algorithms. - /// - public enum Algorithm - { - /// - /// The default algorithm. - /// - Default, - /// - /// The fast quadric mesh simplification algorithm. - /// - FastQuadricMesh - } - #endregion - - /// - /// The mesh decimation API. - /// - public static class MeshDecimation - { - #region Public Methods - #region Create Algorithm - /// - /// Creates a specific decimation algorithm. - /// - /// The desired algorithm. - /// The decimation algorithm. - public static DecimationAlgorithm CreateAlgorithm(Algorithm algorithm) - { - DecimationAlgorithm alg = null; - - switch (algorithm) - { - case Algorithm.Default: - case Algorithm.FastQuadricMesh: - alg = new FastQuadricMeshSimplification(); - break; - default: - throw new ArgumentException("The specified algorithm is not supported.", "algorithm"); - } - - return alg; - } - #endregion - - #region Decimate Mesh - /// - /// Decimates a mesh. - /// - /// The mesh to decimate. - /// The target triangle count. - /// The decimated mesh. - public static Mesh DecimateMesh(Mesh mesh, int targetTriangleCount) - { - return DecimateMesh(Algorithm.Default, mesh, targetTriangleCount); - } - - /// - /// Decimates a mesh. - /// - /// The desired algorithm. - /// The mesh to decimate. - /// The target triangle count. - /// The decimated mesh. - public static Mesh DecimateMesh(Algorithm algorithm, Mesh mesh, int targetTriangleCount) - { - if (mesh == null) - throw new ArgumentNullException("mesh"); - - var decimationAlgorithm = CreateAlgorithm(algorithm); - return DecimateMesh(decimationAlgorithm, mesh, targetTriangleCount); - } - - /// - /// Decimates a mesh. - /// - /// The decimation algorithm. - /// The mesh to decimate. - /// The target triangle count. - /// The decimated mesh. - public static Mesh DecimateMesh(DecimationAlgorithm algorithm, Mesh mesh, int targetTriangleCount) - { - if (algorithm == null) - throw new ArgumentNullException("algorithm"); - else if (mesh == null) - throw new ArgumentNullException("mesh"); - - int currentTriangleCount = mesh.TriangleCount; - if (targetTriangleCount > currentTriangleCount) - targetTriangleCount = currentTriangleCount; - else if (targetTriangleCount < 0) - targetTriangleCount = 0; - - algorithm.Initialize(mesh); - algorithm.DecimateMesh(targetTriangleCount); - return algorithm.ToMesh(); - } - #endregion - - #region Decimate Mesh Lossless - /// - /// Decimates a mesh without losing any quality. - /// - /// The mesh to decimate. - /// The decimated mesh. - public static Mesh DecimateMeshLossless(Mesh mesh) - { - return DecimateMeshLossless(Algorithm.Default, mesh); - } - - /// - /// Decimates a mesh without losing any quality. - /// - /// The desired algorithm. - /// The mesh to decimate. - /// The decimated mesh. - public static Mesh DecimateMeshLossless(Algorithm algorithm, Mesh mesh) - { - if (mesh == null) - throw new ArgumentNullException("mesh"); - - var decimationAlgorithm = CreateAlgorithm(algorithm); - return DecimateMeshLossless(decimationAlgorithm, mesh); - } - - /// - /// Decimates a mesh without losing any quality. - /// - /// The decimation algorithm. - /// The mesh to decimate. - /// The decimated mesh. - public static Mesh DecimateMeshLossless(DecimationAlgorithm algorithm, Mesh mesh) - { - if (algorithm == null) - throw new ArgumentNullException("algorithm"); - else if (mesh == null) - throw new ArgumentNullException("mesh"); - - int currentTriangleCount = mesh.TriangleCount; - algorithm.Initialize(mesh); - algorithm.DecimateMeshLossless(); - return algorithm.ToMesh(); - } - #endregion - #endregion - } -} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/Decimate.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/Decimate.cs new file mode 100644 index 0000000..a69485b --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/Decimate.cs @@ -0,0 +1,1325 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; + +namespace Nanomesh +{ + public partial class DecimateModifier + { + // Heuristics + internal static bool UpdateFarNeighbors = false; + internal static bool UpdateMinsOnCollapse = true; + internal static float MergeNormalsThresholdDegrees = 90; + internal static float NormalSimilarityThresholdDegrees = 60; + internal static float CollapseToMidpointPenalty = 0.4716252f; + internal static bool CollapseToEndpointsOnly = false; + internal static float UvSimilarityThreshold = 0.02f; + internal static float UvSeamAngleCos = 0.99f; + internal static bool BlockUvSeamVertices = true; + internal static float BoneWeightSimilarityThreshold = 0.85f; + internal static bool LimitCollapseEdgeLength = false; + internal static float MaxCollapseEdgeLength = float.PositiveInfinity; + internal static bool AllowBoundaryCollapses = false; + internal static float BodyCollisionPenetrationFactor = 0.75f; + + // Constants + private const double _DeterminantEpsilon = 0.001f; + private const float _MinTriangleAreaRatio = 0.05f; + private const float _UvDirEpsilonSq = 1e-12f; + private const double _OFFSET_HARD = 1e6; + private const double _OFFSET_NOCOLLAPSE = 1e300; + + // Instance + private ConnectedMesh _mesh; + private SymmetricMatrix[] _matrices; + private FastHashSet _pairs; + private LinkedHashSet _mins; + private int _lastProgress = int.MinValue; + private int _initialTriangleCount; + private float _mergeNormalsThresholdCos = MathF.Cos(MergeNormalsThresholdDegrees * MathF.PI / 180f); + private float _normalSimilarityThresholdCos = MathF.Cos(NormalSimilarityThresholdDegrees * MathF.PI / 180f); + private int _evaluatedEdges; + private int _collapsedEdges; + private int _rejectedBoneWeights; + private int _rejectedTopology; + private int _rejectedInversion; + private int _rejectedDegenerate; + private int _rejectedArea; + private int _rejectedFlip; + private int _rejectedBodyCollision; + private float[]? _bodyDistanceSq; + private float _bodyDistanceThresholdSq; + private Func? _bodyDistanceSqEvaluator; + private bool[]? _protectedVertices; + + public ConnectedMesh Mesh => _mesh; + + public DecimationStats GetStats() + => new DecimationStats( + _evaluatedEdges, + _collapsedEdges, + _rejectedBoneWeights, + _rejectedTopology, + _rejectedInversion, + _rejectedDegenerate, + _rejectedArea, + _rejectedFlip, + _rejectedBodyCollision); + + public void SetBodyCollision(float[]? bodyDistanceSq, float bodyDistanceThresholdSq, Func? bodyDistanceSqEvaluator = null) + { + _bodyDistanceSq = bodyDistanceSq; + _bodyDistanceThresholdSq = bodyDistanceThresholdSq; + _bodyDistanceSqEvaluator = bodyDistanceSqEvaluator; + } + + public void SetProtectedVertices(bool[]? protectedVertices) + { + _protectedVertices = protectedVertices; + } + + public void Initialize(ConnectedMesh mesh) + { + _mesh = mesh; + ResetStats(); + + _initialTriangleCount = mesh.FaceCount; + + _matrices = new SymmetricMatrix[mesh.positions.Length]; + _pairs = new FastHashSet(); + _mins = new LinkedHashSet(); + + InitializePairs(); + + for (int p = 0; p < _mesh.PositionToNode.Length; p++) + { + if (_mesh.PositionToNode[p] != -1) + CalculateQuadric(p); + } + + foreach (EdgeCollapse pair in _pairs) + { + CalculateError(pair); + } + } + + public void DecimateToError(float maximumError) + { + while (GetPairWithMinimumError().error <= maximumError && _pairs.Count > 0) + { + Iterate(); + } + } + + public void DecimateToRatio(float targetTriangleRatio) + { + targetTriangleRatio = MathF.Clamp(targetTriangleRatio, 0f, 1f); + DecimateToPolycount((int)MathF.Round(targetTriangleRatio * _mesh.FaceCount)); + } + + public void DecimatePolycount(int polycount) + { + DecimateToPolycount((int)MathF.Round(_mesh.FaceCount - polycount)); + } + + public void DecimateToPolycount(int targetTriangleCount) + { + while (_mesh.FaceCount > targetTriangleCount && _pairs.Count > 0) + { + Iterate(); + + int progress = (int)MathF.Round(100f * (_initialTriangleCount - _mesh.FaceCount) / (_initialTriangleCount - targetTriangleCount)); + if (progress >= _lastProgress + 10) + { + _lastProgress = progress; + } + } + } + + public void Iterate() + { + EdgeCollapse pair = GetPairWithMinimumError(); + while (pair != null && pair.error >= _OFFSET_NOCOLLAPSE) + { + _pairs.Remove(pair); + _mins.Remove(pair); + pair = GetPairWithMinimumError(); + } + + if (pair == null) + return; + + Debug.Assert(_mesh.CheckEdge(_mesh.PositionToNode[pair.posA], _mesh.PositionToNode[pair.posB])); + + _pairs.Remove(pair); + _mins.Remove(pair); + + CollapseEdge(pair); + } + + public double GetMinimumError() + { + return GetPairWithMinimumError()?.error ?? double.PositiveInfinity; + } + + private EdgeCollapse GetPairWithMinimumError() + { + if (_mins.Count == 0) + ComputeMins(); + + LinkedHashSet.LinkedHashNode edge = _mins.First; + + return edge?.Value; + } + + private int MinsCount => MathF.Clamp(500, 0, _pairs.Count); + + private void ComputeMins() + { + _mins = new LinkedHashSet(_pairs.OrderBy(x => x).Take(MinsCount)); + } + + private void InitializePairs() + { + _pairs.Clear(); + _mins.Clear(); + + for (int p = 0; p < _mesh.PositionToNode.Length; p++) + { + int nodeIndex = _mesh.PositionToNode[p]; + if (nodeIndex < 0) + { + continue; + } + + int sibling = nodeIndex; + do + { + int firstRelative = _mesh.nodes[sibling].relative; + int secondRelative = _mesh.nodes[firstRelative].relative; + + EdgeCollapse pair = new EdgeCollapse(_mesh.nodes[firstRelative].position, _mesh.nodes[secondRelative].position); + + _pairs.Add(pair); + + Debug.Assert(_mesh.CheckEdge(_mesh.PositionToNode[pair.posA], _mesh.PositionToNode[pair.posB])); + + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + } + } + + private void CalculateQuadric(int position) + { + int nodeIndex = _mesh.PositionToNode[position]; + + Debug.Assert(nodeIndex >= 0); + Debug.Assert(!_mesh.nodes[nodeIndex].IsRemoved); + + SymmetricMatrix symmetricMatrix = new SymmetricMatrix(); + + int sibling = nodeIndex; + do + { + Debug.Assert(_mesh.CheckRelatives(sibling)); + + Vector3 faceNormal = _mesh.GetFaceNormal(sibling); + double dot = Vector3.Dot(-faceNormal, _mesh.positions[_mesh.nodes[sibling].position]); + symmetricMatrix += new SymmetricMatrix(faceNormal.x, faceNormal.y, faceNormal.z, dot); + + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + + _matrices[position] = symmetricMatrix; + } + + private readonly HashSet _adjacentEdges = new HashSet(); + private readonly HashSet _adjacentEdgesA = new HashSet(); + private readonly HashSet _adjacentEdgesB = new HashSet(); + + private IEnumerable GetAdjacentPositions(int nodeIndex, int nodeAvoid) + { + _adjacentEdges.Clear(); + + int posToAvoid = _mesh.nodes[nodeAvoid].position; + + int sibling = nodeIndex; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + if (_mesh.nodes[relative].position != posToAvoid) + { + _adjacentEdges.Add(_mesh.nodes[relative].position); + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + + return _adjacentEdges; + } + + private void FillAdjacentPositions(int nodeIndex, int nodeAvoid, HashSet output) + { + output.Clear(); + + int posToAvoid = _mesh.nodes[nodeAvoid].position; + + int sibling = nodeIndex; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + if (_mesh.nodes[relative].position != posToAvoid) + { + output.Add(_mesh.nodes[relative].position); + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + } + + private void FillAdjacentPositionsByPos(int nodeIndex, int posToAvoid, HashSet output) + { + output.Clear(); + + int sibling = nodeIndex; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + int pos = _mesh.nodes[relative].position; + if (pos != posToAvoid) + { + output.Add(pos); + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + } + + private double GetEdgeTopo(EdgeCollapse edge) + { + if (edge.Weight == -1) + { + edge.SetWeight(_mesh.GetEdgeTopo(_mesh.PositionToNode[edge.posA], _mesh.PositionToNode[edge.posB])); + } + return edge.Weight; + } + + public static bool UseEdgeLength = true; + + private void CalculateError(EdgeCollapse pair) + { + Debug.Assert(_mesh.CheckEdge(_mesh.PositionToNode[pair.posA], _mesh.PositionToNode[pair.posB])); + + Vector3 posA = _mesh.positions[pair.posA]; + Vector3 posB = _mesh.positions[pair.posB]; + _evaluatedEdges++; + + if (ShouldBlockBoneWeightCollapse(pair.posA, pair.posB)) + { + _rejectedBoneWeights++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + if (ShouldBlockNormalCollapse(pair.posA, pair.posB)) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + if (ShouldBlockUvCollapse(pair.posA, pair.posB)) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + if (IsProtectedVertex(pair.posA) || IsProtectedVertex(pair.posB)) + { + _rejectedBodyCollision++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + + var edgeTopo = GetEdgeTopo(pair); + if (edgeTopo > 0d && !AllowBoundaryCollapses) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + Vector3 posC = (posB + posA) / 2; + + int nodeA = _mesh.PositionToNode[pair.posA]; + int nodeB = _mesh.PositionToNode[pair.posB]; + if (!CollapsePreservesTopology(pair)) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + if (!AllowBoundaryCollapses && (IsBoundaryVertex(nodeA) || IsBoundaryVertex(nodeB))) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + + double errorCollapseToO; + Vector3 posO = Vector3.PositiveInfinity; + + // If a node is smooth (no hard edge connected, no uv break or no border), we can compute a quadric error + // Otherwise, we add up linear errors for every non smooth source. + // If both nodes of the edge are smooth, we can find the optimal position to collapse to by inverting the + // quadric matrix, otherwise, we pick the best between A, B, and the position in the middle, C. + + SymmetricMatrix q = _matrices[pair.posA] + _matrices[pair.posB]; + double det = q.DeterminantXYZ(); + + if (det > _DeterminantEpsilon || det < -_DeterminantEpsilon) + { + posO = new Vector3( + -1d / det * q.DeterminantX(), + +1d / det * q.DeterminantY(), + -1d / det * q.DeterminantZ()); + errorCollapseToO = ComputeVertexError(q, posO.x, posO.y, posO.z); + } + else + { + errorCollapseToO = _OFFSET_NOCOLLAPSE; + } + + double errorCollapseToA = ComputeVertexError(q, posA.x, posA.y, posA.z); + double errorCollapseToB = ComputeVertexError(q, posB.x, posB.y, posB.z); + double errorCollapseToC = ComputeVertexError(q, posC.x, posC.y, posC.z); + + int pA = _mesh.nodes[nodeA].position; + int pB = _mesh.nodes[nodeB].position; + + // We multiply by edge length to be agnotics with quadrics error. + // Otherwise it becomes too scale dependent + double length = (posB - posA).Length; + if (LimitCollapseEdgeLength && length > MaxCollapseEdgeLength) + { + _rejectedTopology++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + + foreach (int pD in GetAdjacentPositions(nodeA, nodeB)) + { + Vector3 posD = _mesh.positions[pD]; + EdgeCollapse edge = new EdgeCollapse(pA, pD); + if (_pairs.TryGetValue(edge, out EdgeCollapse realEdge)) + { + double weight = GetEdgeTopo(realEdge); + errorCollapseToB += weight * length * ComputeLineicError(posB, posD, posA); + errorCollapseToC += weight * length * ComputeLineicError(posC, posD, posA); + } + } + + foreach (int pD in GetAdjacentPositions(nodeB, nodeA)) + { + Vector3 posD = _mesh.positions[pD]; + EdgeCollapse edge = new EdgeCollapse(pB, pD); + if (_pairs.TryGetValue(edge, out EdgeCollapse realEdge)) + { + double weight = GetEdgeTopo(realEdge); + errorCollapseToA += weight * length * ComputeLineicError(posA, posD, posB); + errorCollapseToC += weight * length * ComputeLineicError(posC, posD, posB); + } + } + + errorCollapseToC *= CollapseToMidpointPenalty; + + if (CollapseToEndpointsOnly) + { + errorCollapseToO = _OFFSET_NOCOLLAPSE; + errorCollapseToC = _OFFSET_NOCOLLAPSE; + } + + if (CollapseToEndpointsOnly && _bodyDistanceSq != null && _bodyDistanceThresholdSq > 0f) + { + var hasA = TryGetBodyDistanceSq(pair.posA, out var distASq); + var hasB = TryGetBodyDistanceSq(pair.posB, out var distBSq); + var nearA = hasA && distASq <= _bodyDistanceThresholdSq; + var nearB = hasB && distBSq <= _bodyDistanceThresholdSq; + + if (nearA && nearB) + { + if (distASq > distBSq) + { + errorCollapseToB = _OFFSET_NOCOLLAPSE; + } + else if (distBSq > distASq) + { + errorCollapseToA = _OFFSET_NOCOLLAPSE; + } + else + { + errorCollapseToA = _OFFSET_NOCOLLAPSE; + errorCollapseToB = _OFFSET_NOCOLLAPSE; + } + } + else + { + if (nearA) + { + errorCollapseToA = _OFFSET_NOCOLLAPSE; + } + + if (nearB) + { + errorCollapseToB = _OFFSET_NOCOLLAPSE; + } + } + + if (hasA && hasB) + { + if (distASq > distBSq) + { + errorCollapseToB = _OFFSET_NOCOLLAPSE; + } + else if (distBSq > distASq) + { + errorCollapseToA = _OFFSET_NOCOLLAPSE; + } + } + + if (errorCollapseToA >= _OFFSET_NOCOLLAPSE && errorCollapseToB >= _OFFSET_NOCOLLAPSE) + { + _rejectedBodyCollision++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + } + + if (!CollapseToEndpointsOnly && IsPointNearBody((posA + posB) * 0.5)) + { + _rejectedBodyCollision++; + pair.error = _OFFSET_NOCOLLAPSE; + return; + } + + MathUtils.SelectMin( + errorCollapseToO, errorCollapseToA, errorCollapseToB, errorCollapseToC, + posO, posA, posB, posC, + out pair.error, out pair.result); + + pair.error = Math.Max(0d, pair.error); + + if (!CollapseWillInvert(pair)) + { + pair.error = _OFFSET_NOCOLLAPSE; + } + + // TODO : Make it insensitive to model scale + } + + private bool CollapsePreservesTopology(EdgeCollapse edge) + { + int nodeIndexA = _mesh.PositionToNode[edge.posA]; + int nodeIndexB = _mesh.PositionToNode[edge.posB]; + if (nodeIndexA < 0 || nodeIndexB < 0) + { + return true; + } + + FillAdjacentPositions(nodeIndexA, nodeIndexB, _adjacentEdgesA); + FillAdjacentPositions(nodeIndexB, nodeIndexA, _adjacentEdgesB); + + int shared = 0; + foreach (var neighbor in _adjacentEdgesA) + { + if (_adjacentEdgesB.Contains(neighbor)) + { + shared++; + if (shared > 2) + { + return false; + } + } + } + + return AllowBoundaryCollapses ? shared >= 1 : shared == 2; + } + + private bool IsBoundaryVertex(int nodeIndex) + { + if (nodeIndex < 0) + { + return false; + } + + int sibling = nodeIndex; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + if (_mesh.GetEdgeTopo(sibling, relative) >= ConnectedMesh.EdgeBorderPenalty) + { + return true; + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + + return false; + } + + private bool ShouldBlockBoneWeightCollapse(int posA, int posB) + { + if (_mesh.attributes is not MetaAttributeList attrList) + { + return false; + } + + int nodeA = _mesh.PositionToNode[posA]; + int nodeB = _mesh.PositionToNode[posB]; + if (nodeA < 0 || nodeB < 0) + { + return false; + } + + bool hasWeights = false; + int siblingA = nodeA; + do + { + var attrA = (MetaAttribute)attrList[_mesh.nodes[siblingA].attribute]; + if ((attrA.attr0.flags & FfxivAttributeFlags.BoneWeights) != 0) + { + hasWeights = true; + int siblingB = nodeB; + do + { + var attrB = (MetaAttribute)attrList[_mesh.nodes[siblingB].attribute]; + if ((attrB.attr0.flags & FfxivAttributeFlags.BoneWeights) != 0 + && HasMatchingDominantBone(attrA.attr0.boneWeight, attrB.attr0.boneWeight) + && GetBoneWeightOverlapNormalized(attrA.attr0.boneWeight, attrB.attr0.boneWeight) >= BoneWeightSimilarityThreshold) + { + return false; + } + } while ((siblingB = _mesh.nodes[siblingB].sibling) != nodeB); + } + } while ((siblingA = _mesh.nodes[siblingA].sibling) != nodeA); + + return hasWeights; + } + + private bool ShouldBlockUvCollapse(int posA, int posB) + { + if (_mesh.attributes is not MetaAttributeList attrList) + { + return false; + } + + var attrA = ((MetaAttribute)attrList[posA]).attr0; + var attrB = ((MetaAttribute)attrList[posB]).attr0; + var flags = attrA.flags | attrB.flags; + if ((flags & FfxivAttributeFlags.Uv0) == 0) + { + return false; + } + + var isSeam = IsUvSeamEdge(attrA.uv0, attrB.uv0); + if (!isSeam) + { + if (BlockUvSeamVertices && (HasUvSeamAtVertex(posA, posB, attrList, attrA) || HasUvSeamAtVertex(posB, posA, attrList, attrB))) + { + return true; + } + + return false; + } + + if (!CheckUvSeamAngleAtVertex(posA, posB, attrList, attrA, attrB)) + { + return true; + } + + if (!CheckUvSeamAngleAtVertex(posB, posA, attrList, attrB, attrA)) + { + return true; + } + + return false; + } + + private bool ShouldBlockNormalCollapse(int posA, int posB) + { + if (_mesh.attributes is not MetaAttributeList attrList) + { + return false; + } + + var attrA = ((MetaAttribute)attrList[posA]).attr0; + var attrB = ((MetaAttribute)attrList[posB]).attr0; + if ((attrA.flags & FfxivAttributeFlags.Normal) == 0 || (attrB.flags & FfxivAttributeFlags.Normal) == 0) + { + return false; + } + + var dot = Vector3F.Dot(attrA.normal, attrB.normal); + return dot < _normalSimilarityThresholdCos; + } + + private static float UvDistanceSq(in Vector2F a, in Vector2F b) + { + var dx = a.x - b.x; + var dy = a.y - b.y; + return (dx * dx) + (dy * dy); + } + + private static bool IsUvSeamEdge(in Vector2F uvA, in Vector2F uvB) + { + var thresholdSq = UvSimilarityThreshold * UvSimilarityThreshold; + return UvDistanceSq(uvA, uvB) > thresholdSq; + } + + private bool HasUvSeamAtVertex(int posCenter, int posExclude, MetaAttributeList attrList, in FfxivVertexAttribute attrCenter) + { + int nodeCenter = _mesh.PositionToNode[posCenter]; + if (nodeCenter < 0) + { + return false; + } + + FillAdjacentPositionsByPos(nodeCenter, posExclude, _adjacentEdges); + foreach (int neighborPos in _adjacentEdges) + { + var attrNeighbor = ((MetaAttribute)attrList[neighborPos]).attr0; + if (((attrNeighbor.flags | attrCenter.flags) & FfxivAttributeFlags.Uv0) == 0) + { + continue; + } + + if (IsUvSeamEdge(attrCenter.uv0, attrNeighbor.uv0)) + { + return true; + } + } + + return false; + } + + private bool CheckUvSeamAngleAtVertex(int posCenter, int posOther, MetaAttributeList attrList, in FfxivVertexAttribute attrCenter, in FfxivVertexAttribute attrOther) + { + int nodeCenter = _mesh.PositionToNode[posCenter]; + if (nodeCenter < 0) + { + return true; + } + + FillAdjacentPositionsByPos(nodeCenter, posOther, _adjacentEdges); + + int seamEdges = 1; + int otherSeamPos = -1; + + foreach (int neighborPos in _adjacentEdges) + { + var attrNeighbor = ((MetaAttribute)attrList[neighborPos]).attr0; + if (((attrNeighbor.flags | attrCenter.flags) & FfxivAttributeFlags.Uv0) == 0) + { + continue; + } + + if (IsUvSeamEdge(attrCenter.uv0, attrNeighbor.uv0)) + { + seamEdges++; + otherSeamPos = neighborPos; + if (seamEdges > 2) + { + return false; + } + } + } + + if (otherSeamPos < 0) + { + return true; + } + + var attrOtherSeam = ((MetaAttribute)attrList[otherSeamPos]).attr0; + if (!TryNormalizeUvDirection(attrCenter.uv0, attrOther.uv0, out var dir1) + || !TryNormalizeUvDirection(attrCenter.uv0, attrOtherSeam.uv0, out var dir2)) + { + return false; + } + + var dot = (dir1.x * dir2.x) + (dir1.y * dir2.y); + return dot >= UvSeamAngleCos; + } + + private static bool TryNormalizeUvDirection(in Vector2F from, in Vector2F to, out Vector2F direction) + { + var dx = to.x - from.x; + var dy = to.y - from.y; + var lenSq = (dx * dx) + (dy * dy); + if (lenSq <= _UvDirEpsilonSq) + { + direction = default; + return false; + } + + var invLen = 1f / MathF.Sqrt(lenSq); + direction = new Vector2F(dx * invLen, dy * invLen); + return true; + } + + private bool TryGetBodyDistanceSq(int pos, out float distanceSq) + { + distanceSq = float.NaN; + if (_bodyDistanceSq == null) + { + return false; + } + + if ((uint)pos >= (uint)_bodyDistanceSq.Length) + { + return false; + } + + distanceSq = _bodyDistanceSq[pos]; + return !float.IsNaN(distanceSq); + } + + private static float GetBoneWeightOverlapNormalized(in BoneWeight a, in BoneWeight b) + { + var overlap = GetBoneWeightOverlap(a, b); + var sumA = GetBoneWeightSum(a); + var sumB = GetBoneWeightSum(b); + var denom = MathF.Max(sumA, sumB); + if (denom <= 1e-6f) + { + return 1f; + } + + return overlap / denom; + } + + private static bool HasMatchingDominantBone(in BoneWeight a, in BoneWeight b) + { + var dominantA = GetDominantBoneIndex(a); + if (dominantA < 0) + { + return true; + } + + var dominantB = GetDominantBoneIndex(b); + if (dominantB < 0) + { + return true; + } + + return dominantA == dominantB; + } + + private static int GetDominantBoneIndex(in BoneWeight weight) + { + var max = weight.weight0; + var index = weight.index0; + + if (weight.weight1 > max) + { + max = weight.weight1; + index = weight.index1; + } + if (weight.weight2 > max) + { + max = weight.weight2; + index = weight.index2; + } + if (weight.weight3 > max) + { + max = weight.weight3; + index = weight.index3; + } + + return max > 0f ? index : -1; + } + + private static float GetBoneWeightOverlap(in BoneWeight a, in BoneWeight b) + { + float overlap = 0f; + AddSharedWeight(a.index0, a.weight0, b, ref overlap); + AddSharedWeight(a.index1, a.weight1, b, ref overlap); + AddSharedWeight(a.index2, a.weight2, b, ref overlap); + AddSharedWeight(a.index3, a.weight3, b, ref overlap); + return overlap; + } + + private static float GetBoneWeightSum(in BoneWeight weight) + => weight.weight0 + weight.weight1 + weight.weight2 + weight.weight3; + + private static void AddSharedWeight(int index, float weight, in BoneWeight other, ref float overlap) + { + if (weight <= 0f) + { + return; + } + + if (index == other.index0) + { + overlap += MathF.Min(weight, other.weight0); + } + else if (index == other.index1) + { + overlap += MathF.Min(weight, other.weight1); + } + else if (index == other.index2) + { + overlap += MathF.Min(weight, other.weight2); + } + else if (index == other.index3) + { + overlap += MathF.Min(weight, other.weight3); + } + } + + // TODO : Fix this (doesn't seems to work properly + public bool CollapseWillInvert(EdgeCollapse edge) + { + int nodeIndexA = _mesh.PositionToNode[edge.posA]; + int nodeIndexB = _mesh.PositionToNode[edge.posB]; + Vector3 positionA = _mesh.positions[edge.posA]; + Vector3 positionB = _mesh.positions[edge.posB]; + var minAreaRatioSq = _MinTriangleAreaRatio * _MinTriangleAreaRatio; + + int sibling = nodeIndexA; + do + { + int posC = _mesh.nodes[_mesh.nodes[sibling].relative].position; + int posD = _mesh.nodes[_mesh.nodes[_mesh.nodes[sibling].relative].relative].position; + + if (posC == edge.posB || posD == edge.posB) + { + continue; + } + + Vector3F edgeAC = _mesh.positions[posC] - positionA; + Vector3F edgeAD = _mesh.positions[posD] - positionA; + Vector3F edgeCD = _mesh.positions[posD] - _mesh.positions[posC]; + var normalBefore = Vector3F.Cross(edgeAC, edgeAD); + + Vector3F edgeRC = _mesh.positions[posC] - edge.result; + Vector3F edgeRD = _mesh.positions[posD] - edge.result; + var normalAfter = Vector3F.Cross(edgeRC, edgeRD); + if (ShouldRejectBodyTriangle(edge.result, _mesh.positions[posC], _mesh.positions[posD])) + { + _rejectedBodyCollision++; + return false; + } + if (IsDegenerateTriangle(edgeAC, edgeAD, edgeCD, normalBefore) + || IsDegenerateTriangle(edgeRC, edgeRD, edgeCD, normalAfter)) + { + _rejectedDegenerate++; + _rejectedInversion++; + return false; + } + if (normalAfter.SqrMagnitude < normalBefore.SqrMagnitude * minAreaRatioSq) + { + _rejectedArea++; + _rejectedInversion++; + return false; + } + + var dot = Vector3F.Dot(normalBefore, normalAfter); + if (dot <= 0f) + { + _rejectedFlip++; + _rejectedInversion++; + return false; + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndexA); + + sibling = nodeIndexB; + do + { + int posC = _mesh.nodes[_mesh.nodes[sibling].relative].position; + int posD = _mesh.nodes[_mesh.nodes[_mesh.nodes[sibling].relative].relative].position; + + if (posC == edge.posA || posD == edge.posA) + { + continue; + } + + Vector3F edgeAC = _mesh.positions[posC] - positionB; + Vector3F edgeAD = _mesh.positions[posD] - positionB; + Vector3F edgeCD = _mesh.positions[posD] - _mesh.positions[posC]; + var normalBefore = Vector3F.Cross(edgeAC, edgeAD); + + Vector3F edgeRC = _mesh.positions[posC] - edge.result; + Vector3F edgeRD = _mesh.positions[posD] - edge.result; + var normalAfter = Vector3F.Cross(edgeRC, edgeRD); + if (ShouldRejectBodyTriangle(edge.result, _mesh.positions[posC], _mesh.positions[posD])) + { + _rejectedBodyCollision++; + return false; + } + if (IsDegenerateTriangle(edgeAC, edgeAD, edgeCD, normalBefore) + || IsDegenerateTriangle(edgeRC, edgeRD, edgeCD, normalAfter)) + { + _rejectedDegenerate++; + _rejectedInversion++; + return false; + } + if (normalAfter.SqrMagnitude < normalBefore.SqrMagnitude * minAreaRatioSq) + { + _rejectedArea++; + _rejectedInversion++; + return false; + } + + var dot = Vector3F.Dot(normalBefore, normalAfter); + if (dot <= 0f) + { + _rejectedFlip++; + _rejectedInversion++; + return false; + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndexB); + + return true; + } + + /// + /// A |\ + /// | \ + /// |__\ X + /// | / + /// | / + /// B |/ + /// + /// + /// + /// + /// + private double ComputeLineicError(in Vector3 A, in Vector3 B, in Vector3 X) + { + return Vector3.DistancePointLine(X, A, B); + } + + private double ComputeVertexError(in SymmetricMatrix q, double x, double y, double z) + { + return q.m0 * x * x + 2 * q.m1 * x * y + 2 * q.m2 * x * z + 2 * q.m3 * x + + q.m4 * y * y + 2 * q.m5 * y * z + 2 * q.m6 * y + + q.m7 * z * z + 2 * q.m8 * z + + q.m9; + } + + private void InterpolateAttributes(EdgeCollapse pair) + { + int posA = pair.posA; + int posB = pair.posB; + + int nodeIndexA = _mesh.PositionToNode[posA]; + int nodeIndexB = _mesh.PositionToNode[posB]; + + Vector3 positionA = _mesh.positions[posA]; + Vector3 positionB = _mesh.positions[posB]; + + HashSet procAttributes = new HashSet(); + + Vector3 positionN = pair.result; + double AN = Vector3.Magnitude(positionA - positionN); + double BN = Vector3.Magnitude(positionB - positionN); + double ratio = MathUtils.DivideSafe(AN, AN + BN); + + /* // Other way (same results I think) + double ratio = 0; + double dot = Vector3.Dot(pair.result - positionA, positionB - positionA); + if (dot > 0) + ratio = Math.Sqrt(dot); + ratio /= (positionB - positionA).Length; + */ + + // TODO : Probleme d'interpolation + + + int siblingOfA = nodeIndexA; + do // Iterator over faces around A + { + int relativeOfA = siblingOfA; + do // Circulate around face + { + if (_mesh.nodes[relativeOfA].position == posB) + { + if (!procAttributes.Add(_mesh.nodes[siblingOfA].attribute)) + continue; + + if (!procAttributes.Add(_mesh.nodes[relativeOfA].attribute)) + continue; + + if (_mesh.attributes != null && _mesh.attributeDefinitions.Length > 0) + { + IMetaAttribute attributeA = _mesh.attributes[_mesh.nodes[siblingOfA].attribute]; + IMetaAttribute attributeB = _mesh.attributes[_mesh.nodes[relativeOfA].attribute]; + + for (int i = 0; i < _mesh.attributeDefinitions.Length; i++) + { + if (_mesh.attributeDefinitions[i].type == AttributeType.Normals) + { + Vector3F normalA = attributeA.Get(i); + Vector3F normalB = attributeB.Get(i); + + float dot = Vector3F.Dot(normalA, normalB); + + if (dot < _mergeNormalsThresholdCos) + { + continue; + } + } + + _mesh.attributes.Interpolate(i, _mesh.nodes[siblingOfA].attribute, _mesh.nodes[relativeOfA].attribute, ratio); + } + } + } + } while ((relativeOfA = _mesh.nodes[relativeOfA].relative) != siblingOfA); + + } while ((siblingOfA = _mesh.nodes[siblingOfA].sibling) != nodeIndexA); + + + /* + int attrIndex = _mesh.nodes[nodeIndexA].attribute; + + int siblingOfA = nodeIndexA; + do + { + _mesh.nodes[siblingOfA].attribute = attrIndex; + } while ((siblingOfA = _mesh.nodes[siblingOfA].sibling) != nodeIndexA); + + int siblingOfB = nodeIndexB; + do + { + _mesh.nodes[siblingOfB].attribute = attrIndex; + } while ((siblingOfB = _mesh.nodes[siblingOfB].sibling) != nodeIndexB); + */ + } + + private readonly Dictionary _uniqueAttributes = new Dictionary(); + + private void MergeAttributes(int nodeIndex) + { + if (_mesh.attributeDefinitions.Length == 0) + return; + + _uniqueAttributes.Clear(); + + int sibling = nodeIndex; + do + { + _uniqueAttributes.TryAdd(_mesh.attributes[_mesh.nodes[sibling].attribute], _mesh.nodes[sibling].attribute); + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + + sibling = nodeIndex; + do + { + _mesh.nodes[sibling].attribute = _uniqueAttributes[_mesh.attributes[_mesh.nodes[sibling].attribute]]; + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndex); + } + + private readonly HashSet _edgeToRefresh = new HashSet(); + + private void CollapseEdge(EdgeCollapse pair) + { + _collapsedEdges++; + int nodeIndexA = _mesh.PositionToNode[pair.posA]; + int nodeIndexB = _mesh.PositionToNode[pair.posB]; + + int posA = pair.posA; + int posB = pair.posB; + + // Remove all edges around A + int sibling = nodeIndexA; + //for (relative = sibling; relative != sibling; relative = _mesh.nodes[relative].relative) + //for (sibling = nodeIndexA; sibling != nodeIndexA; sibling = _mesh.nodes[sibling].sibling) + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + int posC = _mesh.nodes[relative].position; + EdgeCollapse pairAC = new EdgeCollapse(posA, posC); + // Todo : Optimization by only removing first pair (first edge) + if (_pairs.Remove(pairAC)) + { + _mins.Remove(pairAC); + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndexA); + + // Remove all edges around B + sibling = nodeIndexB; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + int posC = _mesh.nodes[relative].position; + EdgeCollapse pairBC = new EdgeCollapse(posB, posC); + if (_pairs.Remove(pairBC)) + { + _mins.Remove(pairBC); + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != nodeIndexB); + + // Interpolates attributes + InterpolateAttributes(pair); + + // Collapse edge + int validNode = _mesh.CollapseEdge(nodeIndexA, nodeIndexB); + + // A disconnected triangle has been collapsed, there are no edges to register + if (validNode < 0) + { + return; + } + + posA = _mesh.nodes[validNode].position; + + _mesh.positions[posA] = pair.result; + + MergeAttributes(validNode); + + CalculateQuadric(posA); + + _edgeToRefresh.Clear(); + + sibling = validNode; + do + { + for (int relative = sibling; (relative = _mesh.nodes[relative].relative) != sibling;) + { + int posC = _mesh.nodes[relative].position; + _edgeToRefresh.Add(new EdgeCollapse(posA, posC)); + + if (UpdateFarNeighbors) + { + int sibling2 = relative; + while ((sibling2 = _mesh.nodes[sibling2].sibling) != relative) + { + int relative2 = sibling2; + while ((relative2 = _mesh.nodes[relative2].relative) != sibling2) + { + int posD = _mesh.nodes[relative2].position; + if (posD != posC) + { + _edgeToRefresh.Add(new EdgeCollapse(posC, posD)); + } + } + } + } + } + } while ((sibling = _mesh.nodes[sibling].sibling) != validNode); + + foreach (EdgeCollapse edge in _edgeToRefresh) + { + CalculateQuadric(edge.posB); + edge.SetWeight(-1); + _pairs.Remove(edge); + _pairs.Add(edge); + } + + foreach (EdgeCollapse edge in _edgeToRefresh) + { + CalculateError(edge); + _mins.Remove(edge); + if (UpdateMinsOnCollapse) + { + _mins.AddMin(edge); + } + } + } + + private void ResetStats() + { + _evaluatedEdges = 0; + _collapsedEdges = 0; + _rejectedBoneWeights = 0; + _rejectedTopology = 0; + _rejectedInversion = 0; + _rejectedDegenerate = 0; + _rejectedArea = 0; + _rejectedFlip = 0; + _rejectedBodyCollision = 0; + } + + private bool IsPointNearBody(in Vector3 point) + { + if (_bodyDistanceSqEvaluator == null || _bodyDistanceThresholdSq <= 0f) + { + return false; + } + + var sq = _bodyDistanceSqEvaluator(point); + return !float.IsNaN(sq) && sq <= _bodyDistanceThresholdSq; + } + + private bool IsPointNearBody(in Vector3 point, float thresholdSq) + { + if (_bodyDistanceSqEvaluator == null || thresholdSq <= 0f) + { + return false; + } + + var sq = _bodyDistanceSqEvaluator(point); + return !float.IsNaN(sq) && sq <= thresholdSq; + } + + private bool ShouldRejectBodyTriangle(in Vector3 a, in Vector3 b, in Vector3 c) + { + if (_bodyDistanceSqEvaluator == null || _bodyDistanceThresholdSq <= 0f) + { + return false; + } + + var centroid = (a + b + c) / 3d; + if (!CollapseToEndpointsOnly) + { + return IsPointNearBody(centroid); + } + + var penetrationFactor = MathF.Max(0f, BodyCollisionPenetrationFactor); + var penetrationThresholdSq = _bodyDistanceThresholdSq * penetrationFactor * penetrationFactor; + if (IsPointNearBody(centroid, penetrationThresholdSq)) + { + return true; + } + + var ab = (a + b) * 0.5; + var bc = (b + c) * 0.5; + var ca = (c + a) * 0.5; + return IsPointNearBody(ab, penetrationThresholdSq) + || IsPointNearBody(bc, penetrationThresholdSq) + || IsPointNearBody(ca, penetrationThresholdSq); + } + + private bool IsProtectedVertex(int pos) + { + if (_protectedVertices == null) + { + return false; + } + + return (uint)pos < (uint)_protectedVertices.Length && _protectedVertices[pos]; + } + + private static bool IsDegenerateTriangle(in Vector3F edge0, in Vector3F edge1, in Vector3F edge2, in Vector3F normal) + { + var maxEdgeSq = MathF.Max(edge0.SqrMagnitude, MathF.Max(edge1.SqrMagnitude, edge2.SqrMagnitude)); + if (maxEdgeSq <= 0f) + { + return true; + } + + var minNormalSq = (float)(_DeterminantEpsilon * _DeterminantEpsilon) * maxEdgeSq * maxEdgeSq; + return normal.SqrMagnitude <= minNormalSq; + } + } + + public readonly record struct DecimationStats( + int EvaluatedEdges, + int CollapsedEdges, + int RejectedBoneWeights, + int RejectedTopology, + int RejectedInversion, + int RejectedDegenerate, + int RejectedArea, + int RejectedFlip, + int RejectedBodyCollision); +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeCollapse.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeCollapse.cs new file mode 100644 index 0000000..62cae64 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeCollapse.cs @@ -0,0 +1,88 @@ +using System; + +namespace Nanomesh +{ + public partial class DecimateModifier + { + public class EdgeCollapse : IComparable, IEquatable + { + public int posA; + public int posB; + public Vector3 result; + public double error; + + private double _weight = -1; + + public ref double Weight => ref _weight; + + public void SetWeight(double weight) + { + _weight = weight; + } + + public EdgeCollapse(int posA, int posB) + { + this.posA = posA; + this.posB = posB; + } + + public override int GetHashCode() + { + unchecked + { + return posA + posB; + } + } + + public override bool Equals(object obj) + { + return Equals((EdgeCollapse)obj); + } + + public bool Equals(EdgeCollapse pc) + { + if (ReferenceEquals(pc, null)) + return false; + + if (ReferenceEquals(this, pc)) + { + return true; + } + else + { + return (posA == pc.posA && posB == pc.posB) || (posA == pc.posB && posB == pc.posA); + } + } + + public int CompareTo(EdgeCollapse other) + { + return error > other.error ? 1 : error < other.error ? -1 : 0; + } + + public static bool operator >(EdgeCollapse x, EdgeCollapse y) + { + return x.error > y.error; + } + + public static bool operator >=(EdgeCollapse x, EdgeCollapse y) + { + return x.error >= y.error; + } + + public static bool operator <(EdgeCollapse x, EdgeCollapse y) + { + return x.error < y.error; + } + + public static bool operator <=(EdgeCollapse x, EdgeCollapse y) + { + return x.error <= y.error; + } + + public override string ToString() + { + return $""; + } + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeComparer.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeComparer.cs new file mode 100644 index 0000000..4fb45e6 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/EdgeComparer.cs @@ -0,0 +1,15 @@ +using System.Collections.Generic; + +namespace Nanomesh +{ + public partial class DecimateModifier + { + private class EdgeComparer : IComparer + { + public int Compare(EdgeCollapse x, EdgeCollapse y) + { + return x.CompareTo(y); + } + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/SceneDecimator.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/SceneDecimator.cs new file mode 100644 index 0000000..c21cbc2 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/Decimation/SceneDecimator.cs @@ -0,0 +1,72 @@ +using System.Collections.Generic; +using System.Linq; + +namespace Nanomesh +{ + public class SceneDecimator + { + private class ModifierAndOccurrences + { + public int occurrences = 1; + public DecimateModifier modifier = new DecimateModifier(); + } + + private Dictionary _modifiers; + + public void Initialize(IEnumerable meshes) + { + _modifiers = new Dictionary(); + + foreach (ConnectedMesh mesh in meshes) + { + ModifierAndOccurrences modifier; + if (_modifiers.ContainsKey(mesh)) + { + modifier = _modifiers[mesh]; + modifier.occurrences++; + } + else + { + _modifiers.Add(mesh, modifier = new ModifierAndOccurrences()); + //System.Console.WriteLine($"Faces:{mesh.FaceCount}"); + modifier.modifier.Initialize(mesh); + } + + _faceCount += mesh.FaceCount; + } + + _initalFaceCount = _faceCount; + } + + private int _faceCount; + private int _initalFaceCount; + + public void DecimateToRatio(float targetTriangleRatio) + { + targetTriangleRatio = MathF.Clamp(targetTriangleRatio, 0f, 1f); + DecimateToPolycount((int)MathF.Round(targetTriangleRatio * _initalFaceCount)); + } + + public void DecimatePolycount(int polycount) + { + DecimateToPolycount((int)MathF.Round(_initalFaceCount - polycount)); + } + + public void DecimateToPolycount(int targetTriangleCount) + { + //System.Console.WriteLine($"Faces:{_faceCount} Target:{targetTriangleCount}"); + while (_faceCount > targetTriangleCount) + { + KeyValuePair pair = _modifiers.OrderBy(x => x.Value.modifier.GetMinimumError()).First(); + + int facesBefore = pair.Key.FaceCount; + pair.Value.modifier.Iterate(); + + if (facesBefore == pair.Key.FaceCount) + break; // Exit ! + + _faceCount -= (facesBefore - pair.Key.FaceCount) * pair.Value.occurrences; + } + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsCreator.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsCreator.cs new file mode 100644 index 0000000..5c37321 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsCreator.cs @@ -0,0 +1,76 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace Nanomesh +{ + public class NormalsModifier + { + public struct PosAndAttribute : IEquatable + { + public int position; + public Attribute attribute; + + public override int GetHashCode() + { + return position.GetHashCode() ^ (attribute.GetHashCode() << 2); + } + + public bool Equals(PosAndAttribute other) + { + return position == other.position && attribute.Equals(other.attribute); + } + } + + public void Run(ConnectedMesh mesh, float smoothingAngle) + { + float cosineThreshold = MathF.Cos(smoothingAngle * MathF.PI / 180f); + + int[] positionToNode = mesh.GetPositionToNode(); + + Dictionary attributeToIndex = new Dictionary(); + + for (int p = 0; p < positionToNode.Length; p++) + { + int nodeIndex = positionToNode[p]; + if (nodeIndex < 0) + { + continue; + } + + Debug.Assert(!mesh.nodes[nodeIndex].IsRemoved); + + int sibling1 = nodeIndex; + do + { + Vector3F sum = Vector3F.Zero; + + Vector3F normal1 = mesh.GetFaceNormal(sibling1); + + int sibling2 = nodeIndex; + do + { + Vector3F normal2 = mesh.GetFaceNormal(sibling2); + + float dot = Vector3F.Dot(normal1, normal2); + + if (dot >= cosineThreshold) + { + // Area and angle weighting (it gives better results) + sum += mesh.GetFaceArea(sibling2) * mesh.GetAngleRadians(sibling2) * normal2; + } + + } while ((sibling2 = mesh.nodes[sibling2].sibling) != nodeIndex); + + sum = sum.Normalized; + + + } while ((sibling1 = mesh.nodes[sibling1].sibling) != nodeIndex); + } + + // Assign new attributes + + // TODO : Fix + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsFixer.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsFixer.cs new file mode 100644 index 0000000..5e65476 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/NormalsFixer.cs @@ -0,0 +1,17 @@ +namespace Nanomesh +{ + public class NormalsFixer + { + public void Start(ConnectedMesh mesh) + { + /* + for (int i = 0; i < mesh.attributes.Length; i++) + { + Attribute attribute = mesh.attributes[i]; + attribute.normal = attribute.normal.Normalized; + mesh.attributes[i] = attribute; + } + */ + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Algo/Triangulate.cs b/LightlessSync/ThirdParty/Nanomesh/Algo/Triangulate.cs new file mode 100644 index 0000000..8c69394 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Algo/Triangulate.cs @@ -0,0 +1,27 @@ +using System; + +namespace Nanomesh +{ + public class TriangulateModifier + { + public void Run(ConnectedMesh mesh) + { + for (int i = 0; i < mesh.nodes.Length; i++) + { + int edgeCount = 0; + int relative = i; + while ((relative = mesh.nodes[relative].relative) != i) // Circulate around face + { + edgeCount++; + } + + if (edgeCount > 2) + { + throw new Exception("Mesh has polygons of dimension 4 or greater"); + } + } + + // Todo : Implement + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/BoneWeight.cs b/LightlessSync/ThirdParty/Nanomesh/Base/BoneWeight.cs new file mode 100644 index 0000000..c784572 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/BoneWeight.cs @@ -0,0 +1,144 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Nanomesh +{ + public readonly struct BoneWeight : IEquatable, IInterpolable + { + public readonly int index0; + public readonly int index1; + public readonly int index2; + public readonly int index3; + public readonly float weight0; + public readonly float weight1; + public readonly float weight2; + public readonly float weight3; + + public int GetIndex(int i) + { + switch (i) + { + case 0: return index0; + case 1: return index1; + case 2: return index2; + case 3: return index3; + default: return -1; + } + } + + public float GetWeight(int i) + { + switch (i) + { + case 0: return weight0; + case 1: return weight1; + case 2: return weight2; + case 3: return weight3; + default: return -1; + } + } + + public BoneWeight(int index0, int index1, int index2, int index3, float weight0, float weight1, float weight2, float weight3) + { + this.index0 = index0; + this.index1 = index1; + this.index2 = index2; + this.index3 = index3; + this.weight0 = weight0; + this.weight1 = weight1; + this.weight2 = weight2; + this.weight3 = weight3; + } + + public bool Equals(BoneWeight other) + { + return index0 == other.index0 + && index1 == other.index1 + && index2 == other.index2 + && index3 == other.index3 + && weight0 == other.weight0 + && weight1 == other.weight1 + && weight2 == other.weight2 + && weight3 == other.weight3; + } + + public override int GetHashCode() + { + unchecked + { + int hash = 17; + hash = hash * 31 + index0; + hash = hash * 31 + index1; + hash = hash * 31 + index2; + hash = hash * 31 + index3; + hash = hash * 31 + weight0.GetHashCode(); + hash = hash * 31 + weight1.GetHashCode(); + hash = hash * 31 + weight2.GetHashCode(); + hash = hash * 31 + weight3.GetHashCode(); + return hash; + } + } + + public unsafe BoneWeight Interpolate(BoneWeight other, double ratio) + { + BoneWeight boneWeightA = this; + BoneWeight boneWeightB = other; + + Dictionary newBoneWeight = new Dictionary(); + + // Map weights and indices + for (int i = 0; i < 4; i++) + { + newBoneWeight.TryAdd(boneWeightA.GetIndex(i), 0); + newBoneWeight.TryAdd(boneWeightB.GetIndex(i), 0); + newBoneWeight[boneWeightA.GetIndex(i)] += (float)((1 - ratio) * boneWeightA.GetWeight(i)); + newBoneWeight[boneWeightB.GetIndex(i)] += (float)(ratio * boneWeightB.GetWeight(i)); + } + + int* newIndices = stackalloc int[4]; + float* newWeights = stackalloc float[4]; + + // Order from biggest to smallest weight, and drop bones above 4th + float totalWeight = 0; + int k = 0; + foreach (KeyValuePair boneWeightN in newBoneWeight.OrderByDescending(x => x.Value)) + { + newIndices[k] = boneWeightN.Key; + newWeights[k] = boneWeightN.Value; + totalWeight += boneWeightN.Value; + if (k == 3) + break; + k++; + } + + var sumA = boneWeightA.weight0 + boneWeightA.weight1 + boneWeightA.weight2 + boneWeightA.weight3; + var sumB = boneWeightB.weight0 + boneWeightB.weight1 + boneWeightB.weight2 + boneWeightB.weight3; + var targetSum = (float)((1d - ratio) * sumA + ratio * sumB); + + // Normalize and re-scale to preserve original weight sum. + if (totalWeight > 0f) + { + var scale = targetSum / totalWeight; + for (int j = 0; j < 4; j++) + { + newWeights[j] *= scale; + } + } + + return new BoneWeight( + newIndices[0], newIndices[1], newIndices[2], newIndices[3], + newWeights[0], newWeights[1], newWeights[2], newWeights[3]); + + //return new BoneWeight( + // ratio < 0.5f ? index0 : other.index0, + // ratio < 0.5f ? index1 : other.index1, + // ratio < 0.5f ? index2 : other.index2, + // ratio < 0.5f ? index3 : other.index3, + // (float)(ratio * weight0 + (1 - ratio) * other.weight0), + // (float)(ratio * weight1 + (1 - ratio) * other.weight1), + // (float)(ratio * weight2 + (1 - ratio) * other.weight2), + // (float)(ratio * weight3 + (1 - ratio) * other.weight3)); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Color32.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Color32.cs new file mode 100644 index 0000000..49a4216 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Color32.cs @@ -0,0 +1,110 @@ +using System; +using System.Runtime.InteropServices; + +namespace Nanomesh +{ + [StructLayout(LayoutKind.Explicit)] + public readonly struct Color32 : IEquatable, IInterpolable + { + [FieldOffset(0)] + internal readonly int rgba; + + [FieldOffset(0)] + public readonly byte r; + + [FieldOffset(1)] + public readonly byte g; + + [FieldOffset(2)] + public readonly byte b; + + [FieldOffset(3)] + public readonly byte a; + + public Color32(byte r, byte g, byte b, byte a) + { + rgba = 0; + this.r = r; + this.g = g; + this.b = b; + this.a = a; + } + + public Color32(float r, float g, float b, float a) + { + rgba = 0; + this.r = (byte)MathF.Round(r); + this.g = (byte)MathF.Round(g); + this.b = (byte)MathF.Round(b); + this.a = (byte)MathF.Round(a); + } + + public Color32(double r, double g, double b, double a) + { + rgba = 0; + this.r = (byte)Math.Round(r); + this.g = (byte)Math.Round(g); + this.b = (byte)Math.Round(b); + this.a = (byte)Math.Round(a); + } + + public bool Equals(Color32 other) + { + return other.rgba == rgba; + } + + public Color32 Interpolate(Color32 other, double ratio) + { + return ratio * this + (1 - ratio) * other; + } + + /// + /// Adds two colors. + /// + /// + public static Color32 operator +(Color32 a, Color32 b) { return new Color32(a.r + b.r, a.g + b.g, a.b + b.b, a.a + b.a); } + + /// + /// Subtracts one color from another. + /// + /// + public static Color32 operator -(Color32 a, Color32 b) { return new Color32(1f * a.r - b.r, a.g - b.g, a.b - b.b, a.a - b.a); } + + /// + /// Multiplies one color by another. + /// + /// + public static Color32 operator *(Color32 a, Color32 b) { return new Color32(1f * a.r * b.r, 1f * a.g * b.g, 1f * a.b * b.b, 1f * a.a * b.a); } + + /// + /// Divides one color over another. + /// + /// + public static Color32 operator /(Color32 a, Color32 b) { return new Color32(1f * a.r / b.r, 1f * a.g / b.g, 1f * a.b / b.b, 1f * a.a / b.a); } + + + /// + /// Multiplies a color by a number. + /// + /// + /// + /// + public static Color32 operator *(Color32 a, float d) { return new Color32(d * a.r, d * a.g, d * a.b, d * a.a); } + + public static Color32 operator *(Color32 a, double d) { return new Color32(d * a.r, d * a.g, d * a.b, d * a.a); } + + /// + /// Multiplies a color by a number. + /// + /// + public static Color32 operator *(float d, Color32 a) { return new Color32(d * a.r, d * a.g, d * a.b, d * a.a); } + + public static Color32 operator *(double d, Color32 a) { return new Color32(d * a.r, d * a.g, d * a.b, d * a.a); } + + /// + /// Divides a color by a number. + /// + /// + public static Color32 operator /(Color32 a, float d) { return new Color32(1f * a.r / d, 1f * a.g / d, 1f * a.b / d, 1f * a.a / d); } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/FfxivVertexAttribute.cs b/LightlessSync/ThirdParty/Nanomesh/Base/FfxivVertexAttribute.cs new file mode 100644 index 0000000..f0bacb0 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/FfxivVertexAttribute.cs @@ -0,0 +1,347 @@ +using System; +using System.Runtime.InteropServices; + +namespace Nanomesh +{ + [Flags] + public enum FfxivAttributeFlags : uint + { + None = 0, + Normal = 1u << 0, + Tangent1 = 1u << 1, + Tangent2 = 1u << 2, + Color = 1u << 3, + BoneWeights = 1u << 4, + PositionW = 1u << 5, + NormalW = 1u << 6, + Uv0 = 1u << 7, + Uv1 = 1u << 8, + Uv2 = 1u << 9, + Uv3 = 1u << 10, + } + + [StructLayout(LayoutKind.Sequential)] + public readonly struct FfxivVertexAttribute : IEquatable, IInterpolable + { + public readonly Vector3F normal; + public readonly Vector4F tangent1; + public readonly Vector4F tangent2; + public readonly Vector2F uv0; + public readonly Vector2F uv1; + public readonly Vector2F uv2; + public readonly Vector2F uv3; + public readonly Vector4F color; + public readonly BoneWeight boneWeight; + public readonly float positionW; + public readonly float normalW; + public readonly FfxivAttributeFlags flags; + + public FfxivVertexAttribute( + FfxivAttributeFlags flags, + Vector3F normal, + Vector4F tangent1, + Vector4F tangent2, + Vector2F uv0, + Vector2F uv1, + Vector2F uv2, + Vector2F uv3, + Vector4F color, + BoneWeight boneWeight, + float positionW, + float normalW) + { + this.flags = flags; + this.normal = normal; + this.tangent1 = tangent1; + this.tangent2 = tangent2; + this.uv0 = uv0; + this.uv1 = uv1; + this.uv2 = uv2; + this.uv3 = uv3; + this.color = color; + this.boneWeight = boneWeight; + this.positionW = positionW; + this.normalW = normalW; + } + + public FfxivVertexAttribute Interpolate(FfxivVertexAttribute other, double ratio) + { + var t = (float)ratio; + var inv = 1f - t; + var combinedFlags = flags | other.flags; + + var normal = (combinedFlags & FfxivAttributeFlags.Normal) != 0 + ? NormalizeVector3(new Vector3F( + (this.normal.x * inv) + (other.normal.x * t), + (this.normal.y * inv) + (other.normal.y * t), + (this.normal.z * inv) + (other.normal.z * t))) + : default; + + var tangent1 = (combinedFlags & FfxivAttributeFlags.Tangent1) != 0 + ? BlendTangent(this.tangent1, other.tangent1, t) + : default; + + var tangent2 = (combinedFlags & FfxivAttributeFlags.Tangent2) != 0 + ? BlendTangent(this.tangent2, other.tangent2, t) + : default; + + var uv0 = (combinedFlags & FfxivAttributeFlags.Uv0) != 0 + ? Vector2F.LerpUnclamped(this.uv0, other.uv0, t) + : default; + + var uv1 = (combinedFlags & FfxivAttributeFlags.Uv1) != 0 + ? Vector2F.LerpUnclamped(this.uv1, other.uv1, t) + : default; + + var uv2 = (combinedFlags & FfxivAttributeFlags.Uv2) != 0 + ? Vector2F.LerpUnclamped(this.uv2, other.uv2, t) + : default; + + var uv3 = (combinedFlags & FfxivAttributeFlags.Uv3) != 0 + ? Vector2F.LerpUnclamped(this.uv3, other.uv3, t) + : default; + + var color = (combinedFlags & FfxivAttributeFlags.Color) != 0 + ? new Vector4F( + (this.color.x * inv) + (other.color.x * t), + (this.color.y * inv) + (other.color.y * t), + (this.color.z * inv) + (other.color.z * t), + (this.color.w * inv) + (other.color.w * t)) + : default; + + var boneWeight = (combinedFlags & FfxivAttributeFlags.BoneWeights) != 0 + ? BlendBoneWeights(this.boneWeight, other.boneWeight, t) + : default; + + var positionW = (combinedFlags & FfxivAttributeFlags.PositionW) != 0 + ? (this.positionW * inv) + (other.positionW * t) + : 0f; + + var normalW = (combinedFlags & FfxivAttributeFlags.NormalW) != 0 + ? (this.normalW * inv) + (other.normalW * t) + : 0f; + + return new FfxivVertexAttribute( + combinedFlags, + normal, + tangent1, + tangent2, + uv0, + uv1, + uv2, + uv3, + color, + boneWeight, + positionW, + normalW); + } + + public bool Equals(FfxivVertexAttribute other) + { + if (flags != other.flags) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Normal) != 0 && !normal.Equals(other.normal)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Tangent1) != 0 && !tangent1.Equals(other.tangent1)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Tangent2) != 0 && !tangent2.Equals(other.tangent2)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Uv0) != 0 && !uv0.Equals(other.uv0)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Uv1) != 0 && !uv1.Equals(other.uv1)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Uv2) != 0 && !uv2.Equals(other.uv2)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Uv3) != 0 && !uv3.Equals(other.uv3)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.Color) != 0 && !color.Equals(other.color)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.BoneWeights) != 0 && !boneWeight.Equals(other.boneWeight)) + { + return false; + } + + if ((flags & FfxivAttributeFlags.PositionW) != 0 && positionW != other.positionW) + { + return false; + } + + if ((flags & FfxivAttributeFlags.NormalW) != 0 && normalW != other.normalW) + { + return false; + } + + return true; + } + + public override bool Equals(object? obj) + => obj is FfxivVertexAttribute other && Equals(other); + + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(normal); + hash.Add(tangent1); + hash.Add(tangent2); + hash.Add(uv0); + hash.Add(uv1); + hash.Add(uv2); + hash.Add(uv3); + hash.Add(color); + hash.Add(boneWeight); + hash.Add(positionW); + hash.Add(normalW); + hash.Add(flags); + return hash.ToHashCode(); + } + + private static Vector3F NormalizeVector3(in Vector3F value) + { + var length = Vector3F.Magnitude(value); + return length > 0f ? value / length : value; + } + + private static Vector4F BlendTangent(in Vector4F a, in Vector4F b, float t) + { + var inv = 1f - t; + var blended = new Vector3F( + (a.x * inv) + (b.x * t), + (a.y * inv) + (b.y * t), + (a.z * inv) + (b.z * t)); + blended = NormalizeVector3(blended); + + var w = t >= 0.5f ? b.w : a.w; + if (w != 0f) + { + w = w >= 0f ? 1f : -1f; + } + + return new Vector4F(blended.x, blended.y, blended.z, w); + } + + private static BoneWeight BlendBoneWeights(in BoneWeight a, in BoneWeight b, float ratio) + { + Span indices = stackalloc int[8]; + Span weights = stackalloc float[8]; + var count = 0; + + static void AddWeight(Span indices, Span weights, ref int count, int index, float weight) + { + if (weight <= 0f) + { + return; + } + + for (var i = 0; i < count; i++) + { + if (indices[i] == index) + { + weights[i] += weight; + return; + } + } + + if (count < indices.Length) + { + indices[count] = index; + weights[count] = weight; + count++; + } + } + + var inv = 1f - ratio; + var sumA = a.weight0 + a.weight1 + a.weight2 + a.weight3; + var sumB = b.weight0 + b.weight1 + b.weight2 + b.weight3; + var targetSum = (sumA * inv) + (sumB * ratio); + AddWeight(indices, weights, ref count, a.index0, a.weight0 * inv); + AddWeight(indices, weights, ref count, a.index1, a.weight1 * inv); + AddWeight(indices, weights, ref count, a.index2, a.weight2 * inv); + AddWeight(indices, weights, ref count, a.index3, a.weight3 * inv); + AddWeight(indices, weights, ref count, b.index0, b.weight0 * ratio); + AddWeight(indices, weights, ref count, b.index1, b.weight1 * ratio); + AddWeight(indices, weights, ref count, b.index2, b.weight2 * ratio); + AddWeight(indices, weights, ref count, b.index3, b.weight3 * ratio); + + if (count == 0) + { + return a; + } + + Span topIndices = stackalloc int[4]; + Span topWeights = stackalloc float[4]; + for (var i = 0; i < 4; i++) + { + topIndices[i] = -1; + topWeights[i] = 0f; + } + + for (var i = 0; i < count; i++) + { + var weight = weights[i]; + var index = indices[i]; + for (var slot = 0; slot < 4; slot++) + { + if (weight > topWeights[slot]) + { + for (var shift = 3; shift > slot; shift--) + { + topWeights[shift] = topWeights[shift - 1]; + topIndices[shift] = topIndices[shift - 1]; + } + + topWeights[slot] = weight; + topIndices[slot] = index; + break; + } + } + } + + var sum = topWeights[0] + topWeights[1] + topWeights[2] + topWeights[3]; + if (sum > 0f) + { + var scale = targetSum > 0f ? targetSum / sum : 0f; + for (var i = 0; i < 4; i++) + { + topWeights[i] *= scale; + } + } + + return new BoneWeight( + topIndices[0] < 0 ? 0 : topIndices[0], + topIndices[1] < 0 ? 0 : topIndices[1], + topIndices[2] < 0 ? 0 : topIndices[2], + topIndices[3] < 0 ? 0 : topIndices[3], + topWeights[0], + topWeights[1], + topWeights[2], + topWeights[3]); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/IInterpolable.cs b/LightlessSync/ThirdParty/Nanomesh/Base/IInterpolable.cs new file mode 100644 index 0000000..3118194 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/IInterpolable.cs @@ -0,0 +1,7 @@ +namespace Nanomesh +{ + public interface IInterpolable + { + T Interpolate(T other, double ratio); + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/MathF.cs b/LightlessSync/ThirdParty/Nanomesh/Base/MathF.cs new file mode 100644 index 0000000..c1aef5e --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/MathF.cs @@ -0,0 +1,356 @@ +using System; + +namespace Nanomesh +{ + public static partial class MathF + { + // Returns the sine of angle /f/ in radians. + public static float Sin(float f) { return (float)Math.Sin(f); } + + // Returns the cosine of angle /f/ in radians. + public static float Cos(float f) { return (float)Math.Cos(f); } + + // Returns the tangent of angle /f/ in radians. + public static float Tan(float f) { return (float)Math.Tan(f); } + + // Returns the arc-sine of /f/ - the angle in radians whose sine is /f/. + public static float Asin(float f) { return (float)Math.Asin(f); } + + // Returns the arc-cosine of /f/ - the angle in radians whose cosine is /f/. + public static float Acos(float f) { return (float)Math.Acos(f); } + + // Returns the arc-tangent of /f/ - the angle in radians whose tangent is /f/. + public static float Atan(float f) { return (float)Math.Atan(f); } + + // Returns the angle in radians whose ::ref::Tan is @@y/x@@. + public static float Atan2(float y, float x) { return (float)Math.Atan2(y, x); } + + // Returns square root of /f/. + public static float Sqrt(float f) { return (float)Math.Sqrt(f); } + + // Returns the absolute value of /f/. + public static float Abs(float f) { return (float)Math.Abs(f); } + + // Returns the absolute value of /value/. + public static int Abs(int value) { return Math.Abs(value); } + + /// *listonly* + public static float Min(float a, float b) { return a < b ? a : b; } + // Returns the smallest of two or more values. + public static float Min(params float[] values) + { + int len = values.Length; + if (len == 0) + { + return 0; + } + + float m = values[0]; + for (int i = 1; i < len; i++) + { + if (values[i] < m) + { + m = values[i]; + } + } + return m; + } + + /// *listonly* + public static int Min(int a, int b) { return a < b ? a : b; } + // Returns the smallest of two or more values. + public static int Min(params int[] values) + { + int len = values.Length; + if (len == 0) + { + return 0; + } + + int m = values[0]; + for (int i = 1; i < len; i++) + { + if (values[i] < m) + { + m = values[i]; + } + } + return m; + } + + /// *listonly* + public static float Max(float a, float b) { return a > b ? a : b; } + // Returns largest of two or more values. + public static float Max(params float[] values) + { + int len = values.Length; + if (len == 0) + { + return 0; + } + + float m = values[0]; + for (int i = 1; i < len; i++) + { + if (values[i] > m) + { + m = values[i]; + } + } + return m; + } + + /// *listonly* + public static int Max(int a, int b) { return a > b ? a : b; } + // Returns the largest of two or more values. + public static int Max(params int[] values) + { + int len = values.Length; + if (len == 0) + { + return 0; + } + + int m = values[0]; + for (int i = 1; i < len; i++) + { + if (values[i] > m) + { + m = values[i]; + } + } + return m; + } + + // Returns /f/ raised to power /p/. + public static float Pow(float f, float p) { return (float)Math.Pow(f, p); } + + // Returns e raised to the specified power. + public static float Exp(float power) { return (float)Math.Exp(power); } + + // Returns the logarithm of a specified number in a specified base. + public static float Log(float f, float p) { return (float)Math.Log(f, p); } + + // Returns the natural (base e) logarithm of a specified number. + public static float Log(float f) { return (float)Math.Log(f); } + + // Returns the base 10 logarithm of a specified number. + public static float Log10(float f) { return (float)Math.Log10(f); } + + // Returns the smallest integer greater to or equal to /f/. + public static float Ceil(float f) { return (float)Math.Ceiling(f); } + + // Returns the largest integer smaller to or equal to /f/. + public static float Floor(float f) { return (float)Math.Floor(f); } + + // Returns /f/ rounded to the nearest integer. + public static float Round(float f) { return (float)Math.Round(f); } + + // Returns the smallest integer greater to or equal to /f/. + public static int CeilToInt(float f) { return (int)Math.Ceiling(f); } + + // Returns the largest integer smaller to or equal to /f/. + public static int FloorToInt(float f) { return (int)Math.Floor(f); } + + // Returns /f/ rounded to the nearest integer. + public static int RoundToInt(float f) { return (int)Math.Round(f); } + + // Returns the sign of /f/. + public static float Sign(float f) { return f >= 0F ? 1F : -1F; } + + // The infamous ''3.14159265358979...'' value (RO). + public const float PI = (float)Math.PI; + + // A representation of positive infinity (RO). + public const float Infinity = float.PositiveInfinity; + + // A representation of negative infinity (RO). + public const float NegativeInfinity = float.NegativeInfinity; + + // Degrees-to-radians conversion constant (RO). + public const float Deg2Rad = PI * 2F / 360F; + + // Radians-to-degrees conversion constant (RO). + public const float Rad2Deg = 1F / Deg2Rad; + + // Clamps a value between a minimum float and maximum float value. + public static double Clamp(double value, double min, double max) + { + if (value < min) + { + value = min; + } + else if (value > max) + { + value = max; + } + + return value; + } + + // Clamps a value between a minimum float and maximum float value. + public static float Clamp(float value, float min, float max) + { + if (value < min) + { + value = min; + } + else if (value > max) + { + value = max; + } + + return value; + } + + // Clamps value between min and max and returns value. + // Set the position of the transform to be that of the time + // but never less than 1 or more than 3 + // + public static int Clamp(int value, int min, int max) + { + if (value < min) + { + value = min; + } + else if (value > max) + { + value = max; + } + + return value; + } + + // Clamps value between 0 and 1 and returns value + public static float Clamp01(float value) + { + if (value < 0F) + { + return 0F; + } + else if (value > 1F) + { + return 1F; + } + else + { + return value; + } + } + + // Interpolates between /a/ and /b/ by /t/. /t/ is clamped between 0 and 1. + public static float Lerp(float a, float b, float t) + { + return a + (b - a) * Clamp01(t); + } + + // Interpolates between /a/ and /b/ by /t/ without clamping the interpolant. + public static float LerpUnclamped(float a, float b, float t) + { + return a + (b - a) * t; + } + + // Same as ::ref::Lerp but makes sure the values interpolate correctly when they wrap around 360 degrees. + public static float LerpAngle(float a, float b, float t) + { + float delta = Repeat((b - a), 360); + if (delta > 180) + { + delta -= 360; + } + + return a + delta * Clamp01(t); + } + + // Moves a value /current/ towards /target/. + public static float MoveTowards(float current, float target, float maxDelta) + { + if (MathF.Abs(target - current) <= maxDelta) + { + return target; + } + + return current + MathF.Sign(target - current) * maxDelta; + } + + // Same as ::ref::MoveTowards but makes sure the values interpolate correctly when they wrap around 360 degrees. + public static float MoveTowardsAngle(float current, float target, float maxDelta) + { + float deltaAngle = DeltaAngle(current, target); + if (-maxDelta < deltaAngle && deltaAngle < maxDelta) + { + return target; + } + + target = current + deltaAngle; + return MoveTowards(current, target, maxDelta); + } + + // Interpolates between /min/ and /max/ with smoothing at the limits. + public static float SmoothStep(float from, float to, float t) + { + t = MathF.Clamp01(t); + t = -2.0F * t * t * t + 3.0F * t * t; + return to * t + from * (1F - t); + } + + //*undocumented + public static float Gamma(float value, float absmax, float gamma) + { + bool negative = value < 0F; + float absval = Abs(value); + if (absval > absmax) + { + return negative ? -absval : absval; + } + + float result = Pow(absval / absmax, gamma) * absmax; + return negative ? -result : result; + } + + // Loops the value t, so that it is never larger than length and never smaller than 0. + public static float Repeat(float t, float length) + { + return Clamp(t - MathF.Floor(t / length) * length, 0.0f, length); + } + + // PingPongs the value t, so that it is never larger than length and never smaller than 0. + public static float PingPong(float t, float length) + { + t = Repeat(t, length * 2F); + return length - MathF.Abs(t - length); + } + + // Calculates the ::ref::Lerp parameter between of two values. + public static float InverseLerp(float a, float b, float value) + { + if (a != b) + { + return Clamp01((value - a) / (b - a)); + } + else + { + return 0.0f; + } + } + + // Calculates the shortest difference between two given angles. + public static float DeltaAngle(float current, float target) + { + float delta = MathF.Repeat((target - current), 360.0F); + if (delta > 180.0F) + { + delta -= 360.0F; + } + + return delta; + } + + internal static long RandomToLong(System.Random r) + { + byte[] buffer = new byte[8]; + r.NextBytes(buffer); + return (long)(System.BitConverter.ToUInt64(buffer, 0) & long.MaxValue); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/MathUtils.cs b/LightlessSync/ThirdParty/Nanomesh/Base/MathUtils.cs new file mode 100644 index 0000000..9c49ae0 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/MathUtils.cs @@ -0,0 +1,114 @@ +using System.Runtime.CompilerServices; + +namespace Nanomesh +{ + public static class MathUtils + { + public const float EpsilonFloat = 1e-15f; + public const double EpsilonDouble = 1e-40f; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static float DivideSafe(float numerator, float denominator) + { + return (denominator > -EpsilonFloat && denominator < EpsilonFloat) ? 0f : numerator / denominator; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static double DivideSafe(double numerator, double denominator) + { + return (denominator > -EpsilonDouble && denominator < EpsilonDouble) ? 0d : numerator / denominator; + } + + public static void SelectMin(double e1, double e2, double e3, in T v1, in T v2, in T v3, out double e, out T v) + { + if (e1 < e2) + { + if (e1 < e3) + { + e = e1; + v = v1; + } + else + { + e = e3; + v = v3; + } + } + else + { + if (e2 < e3) + { + e = e2; + v = v2; + } + else + { + e = e3; + v = v3; + } + } + } + + public static void SelectMin(double e1, double e2, double e3, double e4, in T v1, in T v2, in T v3, in T v4, out double e, out T v) + { + if (e1 < e2) + { + if (e1 < e3) + { + if (e1 < e4) + { + e = e1; + v = v1; + } + else + { + e = e4; + v = v4; + } + } + else + { + if (e3 < e4) + { + e = e3; + v = v3; + } + else + { + e = e4; + v = v4; + } + } + } + else + { + if (e2 < e3) + { + if (e2 < e4) + { + e = e2; + v = v2; + } + else + { + e = e4; + v = v4; + } + } + else + { + if (e3 < e4) + { + e = e3; + v = v3; + } + else + { + e = e4; + v = v4; + } + } + } + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Profiling.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Profiling.cs new file mode 100644 index 0000000..d102493 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Profiling.cs @@ -0,0 +1,50 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace Nanomesh +{ + public static class Profiling + { + private static readonly Dictionary stopwatches = new Dictionary(); + + public static void Start(string key) + { + if (!stopwatches.ContainsKey(key)) + { + stopwatches.Add(key, Stopwatch.StartNew()); + } + else + { + stopwatches[key] = Stopwatch.StartNew(); + } + } + + public static string End(string key) + { + TimeSpan time = EndTimer(key); + return $"{key} done in {time.ToString("mm':'ss':'fff")}"; + } + + private static TimeSpan EndTimer(string key) + { + if (!stopwatches.ContainsKey(key)) + { + return TimeSpan.MinValue; + } + + Stopwatch sw = stopwatches[key]; + sw.Stop(); + stopwatches.Remove(key); + return sw.Elapsed; + } + + public static TimeSpan Time(Action toTime) + { + Stopwatch timer = Stopwatch.StartNew(); + toTime(); + timer.Stop(); + return timer.Elapsed; + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Quaternion.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Quaternion.cs new file mode 100644 index 0000000..d819f21 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Quaternion.cs @@ -0,0 +1,632 @@ +using System; +using System.Runtime.InteropServices; + +namespace Nanomesh +{ + [StructLayout(LayoutKind.Sequential)] + public partial struct Quaternion : IEquatable + { + private const double radToDeg = 180.0 / Math.PI; + private const double degToRad = Math.PI / 180.0; + + public const double kEpsilon = 1E-20; // should probably be used in the 0 tests in LookRotation or Slerp + + public Vector3 xyz + { + set + { + x = value.x; + y = value.y; + z = value.z; + } + get => new Vector3(x, y, z); + } + + public double x; + + public double y; + + public double z; + + public double w; + + public double this[int index] + { + get + { + switch (index) + { + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + default: + throw new IndexOutOfRangeException("Invalid Quaternion index: " + index + ", can use only 0,1,2,3"); + } + } + set + { + switch (index) + { + case 0: + x = value; + break; + case 1: + y = value; + break; + case 2: + z = value; + break; + case 3: + w = value; + break; + default: + throw new IndexOutOfRangeException("Invalid Quaternion index: " + index + ", can use only 0,1,2,3"); + } + } + } + /// + /// The identity rotation (RO). + /// + public static Quaternion identity => new Quaternion(0, 0, 0, 1); + + /// + /// Gets the length (magnitude) of the quaternion. + /// + /// + public double Length => (double)System.Math.Sqrt(x * x + y * y + z * z + w * w); + + /// + /// Gets the square of the quaternion length (magnitude). + /// + public double LengthSquared => x * x + y * y + z * z + w * w; + + /// + /// Constructs new Quaternion with given x,y,z,w components. + /// + /// + /// + /// + /// + public Quaternion(double x, double y, double z, double w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + + /// + /// Construct a new Quaternion from vector and w components + /// + /// The vector part + /// The w part + public Quaternion(Vector3 v, double w) + { + x = v.x; + y = v.y; + z = v.z; + this.w = w; + } + + /// + /// Set x, y, z and w components of an existing Quaternion. + /// + /// + /// + /// + /// + public void Set(double new_x, double new_y, double new_z, double new_w) + { + x = new_x; + y = new_y; + z = new_z; + w = new_w; + } + + /// + /// Scales the Quaternion to unit length. + /// + public static Quaternion Normalize(Quaternion q) + { + double mag = Math.Sqrt(Dot(q, q)); + + if (mag < kEpsilon) + { + return Quaternion.identity; + } + + return new Quaternion(q.x / mag, q.y / mag, q.z / mag, q.w / mag); + } + + /// + /// Scale the given quaternion to unit length + /// + /// The quaternion to normalize + /// The normalized quaternion + public void Normalize() + { + this = Normalize(this); + } + + /// + /// The dot product between two rotations. + /// + /// + /// + public static double Dot(Quaternion a, Quaternion b) + { + return a.x * b.x + a.y * b.y + a.z * b.z + a.w * b.w; + } + + /// + /// Creates a rotation which rotates /angle/ degrees around /axis/. + /// + /// + /// + public static Quaternion AngleAxis(double angle, Vector3 axis) + { + return Quaternion.AngleAxis(angle, ref axis); + } + + private static Quaternion AngleAxis(double degress, ref Vector3 axis) + { + if (axis.LengthSquared == 0.0) + { + return identity; + } + + Quaternion result = identity; + double radians = degress * degToRad; + radians *= 0.5; + axis = axis.Normalized; + axis = axis * Math.Sin(radians); + result.x = axis.x; + result.y = axis.y; + result.z = axis.z; + result.w = Math.Cos(radians); + + return Normalize(result); + } + + public void ToAngleAxis(out double angle, out Vector3 axis) + { + Quaternion.ToAxisAngleRad(this, out axis, out angle); + angle *= radToDeg; + } + + /// + /// Creates a rotation which rotates from /fromDirection/ to /toDirection/. + /// + /// + /// + public static Quaternion FromToRotation(Vector3 fromDirection, Vector3 toDirection) + { + return RotateTowards(LookRotation(fromDirection), LookRotation(toDirection), double.MaxValue); + } + + /// + /// Creates a rotation which rotates from /fromDirection/ to /toDirection/. + /// + /// + /// + public void SetFromToRotation(Vector3 fromDirection, Vector3 toDirection) + { + this = Quaternion.FromToRotation(fromDirection, toDirection); + } + + /// + /// Creates a rotation with the specified /forward/ and /upwards/ directions. + /// + /// The direction to look in. + /// The vector that defines in which direction up is. + public static Quaternion LookRotation(Vector3 forward, Vector3 upwards) + { + return Quaternion.LookRotation(ref forward, ref upwards); + } + + public static Quaternion LookRotation(Vector3 forward) + { + Vector3 up = new Vector3(1, 0, 0); + return Quaternion.LookRotation(ref forward, ref up); + } + + private static Quaternion LookRotation(ref Vector3 forward, ref Vector3 up) + { + forward = Vector3.Normalize(forward); + Vector3 right = Vector3.Normalize(Vector3.Cross(up, forward)); + up = Vector3.Cross(forward, right); + double m00 = right.x; + double m01 = right.y; + double m02 = right.z; + double m10 = up.x; + double m11 = up.y; + double m12 = up.z; + double m20 = forward.x; + double m21 = forward.y; + double m22 = forward.z; + + double num8 = (m00 + m11) + m22; + Quaternion quaternion = new Quaternion(); + if (num8 > 0) + { + double num = Math.Sqrt(num8 + 1); + quaternion.w = num * 0.5; + num = 0.5 / num; + quaternion.x = (m12 - m21) * num; + quaternion.y = (m20 - m02) * num; + quaternion.z = (m01 - m10) * num; + return quaternion; + } + if ((m00 >= m11) && (m00 >= m22)) + { + double num7 = Math.Sqrt(((1 + m00) - m11) - m22); + double num4 = 0.5 / num7; + quaternion.x = 0.5 * num7; + quaternion.y = (m01 + m10) * num4; + quaternion.z = (m02 + m20) * num4; + quaternion.w = (m12 - m21) * num4; + return quaternion; + } + if (m11 > m22) + { + double num6 = Math.Sqrt(((1 + m11) - m00) - m22); + double num3 = 0.5 / num6; + quaternion.x = (m10 + m01) * num3; + quaternion.y = 0.5 * num6; + quaternion.z = (m21 + m12) * num3; + quaternion.w = (m20 - m02) * num3; + return quaternion; + } + double num5 = Math.Sqrt(((1 + m22) - m00) - m11); + double num2 = 0.5 / num5; + quaternion.x = (m20 + m02) * num2; + quaternion.y = (m21 + m12) * num2; + quaternion.z = 0.5 * num5; + quaternion.w = (m01 - m10) * num2; + return quaternion; + } + + public void SetLookRotation(Vector3 view) + { + Vector3 up = new Vector3(1, 0, 0); + SetLookRotation(view, up); + } + + /// + /// Creates a rotation with the specified /forward/ and /upwards/ directions. + /// + /// The direction to look in. + /// The vector that defines in which direction up is. + public void SetLookRotation(Vector3 view, Vector3 up) + { + this = Quaternion.LookRotation(view, up); + } + + /// + /// Spherically interpolates between /a/ and /b/ by t. The parameter /t/ is clamped to the range [0, 1]. + /// + /// + /// + /// + public static Quaternion Slerp(Quaternion a, Quaternion b, double t) + { + return Quaternion.Slerp(ref a, ref b, t); + } + + private static Quaternion Slerp(ref Quaternion a, ref Quaternion b, double t) + { + if (t > 1) + { + t = 1; + } + + if (t < 0) + { + t = 0; + } + + return SlerpUnclamped(ref a, ref b, t); + } + + /// + /// Spherically interpolates between /a/ and /b/ by t. The parameter /t/ is not clamped. + /// + /// + /// + /// + public static Quaternion SlerpUnclamped(Quaternion a, Quaternion b, double t) + { + + return Quaternion.SlerpUnclamped(ref a, ref b, t); + } + private static Quaternion SlerpUnclamped(ref Quaternion a, ref Quaternion b, double t) + { + // if either input is zero, return the other. + if (a.LengthSquared == 0.0) + { + if (b.LengthSquared == 0.0) + { + return identity; + } + return b; + } + else if (b.LengthSquared == 0.0) + { + return a; + } + + double cosHalfAngle = a.w * b.w + Vector3.Dot(a.xyz, b.xyz); + + if (cosHalfAngle >= 1.0 || cosHalfAngle <= -1.0) + { + // angle = 0.0f, so just return one input. + return a; + } + else if (cosHalfAngle < 0.0) + { + b.xyz = -b.xyz; + b.w = -b.w; + cosHalfAngle = -cosHalfAngle; + } + + double blendA; + double blendB; + if (cosHalfAngle < 0.99) + { + // do proper slerp for big angles + double halfAngle = Math.Acos(cosHalfAngle); + double sinHalfAngle = Math.Sin(halfAngle); + double oneOverSinHalfAngle = 1.0 / sinHalfAngle; + blendA = Math.Sin(halfAngle * (1.0 - t)) * oneOverSinHalfAngle; + blendB = Math.Sin(halfAngle * t) * oneOverSinHalfAngle; + } + else + { + // do lerp if angle is really small. + blendA = 1.0f - t; + blendB = t; + } + + Quaternion result = new Quaternion(blendA * a.xyz + blendB * b.xyz, blendA * a.w + blendB * b.w); + if (result.LengthSquared > 0.0) + { + return Normalize(result); + } + else + { + return identity; + } + } + + /// + /// Interpolates between /a/ and /b/ by /t/ and normalizes the result afterwards. The parameter /t/ is clamped to the range [0, 1]. + /// + /// + /// + /// + public static Quaternion Lerp(Quaternion a, Quaternion b, double t) + { + if (t > 1) + { + t = 1; + } + + if (t < 0) + { + t = 0; + } + + return Slerp(ref a, ref b, t); // TODO: use lerp not slerp, "Because quaternion works in 4D. Rotation in 4D are linear" ??? + } + + /// + /// Interpolates between /a/ and /b/ by /t/ and normalizes the result afterwards. The parameter /t/ is not clamped. + /// + /// + /// + /// + public static Quaternion LerpUnclamped(Quaternion a, Quaternion b, double t) + { + return Slerp(ref a, ref b, t); + } + + /// + /// Rotates a rotation /from/ towards /to/. + /// + /// + /// + /// + public static Quaternion RotateTowards(Quaternion from, Quaternion to, double maxDegreesDelta) + { + double num = Quaternion.Angle(from, to); + if (num == 0) + { + return to; + } + double t = Math.Min(1, maxDegreesDelta / num); + return Quaternion.SlerpUnclamped(from, to, t); + } + + /// + /// Returns the Inverse of /rotation/. + /// + /// + public static Quaternion Inverse(Quaternion rotation) + { + double lengthSq = rotation.LengthSquared; + if (lengthSq != 0.0) + { + double i = 1.0 / lengthSq; + return new Quaternion(rotation.xyz * -i, rotation.w * i); + } + return rotation; + } + + /// + /// Returns a nicely formatted string of the Quaternion. + /// + /// + public override string ToString() + { + return $"{x}, {y}, {z}, {w}"; + } + + /// + /// Returns a nicely formatted string of the Quaternion. + /// + /// + public string ToString(string format) + { + return string.Format("({0}, {1}, {2}, {3})", x.ToString(format), y.ToString(format), z.ToString(format), w.ToString(format)); + } + + /// + /// Returns the angle in degrees between two rotations /a/ and /b/. + /// + /// + /// + public static double Angle(Quaternion a, Quaternion b) + { + double f = Quaternion.Dot(a, b); + return Math.Acos(Math.Min(Math.Abs(f), 1)) * 2 * radToDeg; + } + + /// + /// Returns a rotation that rotates z degrees around the z axis, x degrees around the x axis, and y degrees around the y axis (in that order). + /// + /// + /// + /// + public static Quaternion Euler(double x, double y, double z) + { + return Quaternion.FromEulerRad(new Vector3((double)x, (double)y, (double)z) * degToRad); + } + + /// + /// Returns a rotation that rotates z degrees around the z axis, x degrees around the x axis, and y degrees around the y axis (in that order). + /// + /// + public static Quaternion Euler(Vector3 euler) + { + return Quaternion.FromEulerRad(euler * degToRad); + } + + private static double NormalizeAngle(double angle) + { + while (angle > 360) + { + angle -= 360; + } + + while (angle < 0) + { + angle += 360; + } + + return angle; + } + + private static Quaternion FromEulerRad(Vector3 euler) + { + double yaw = euler.x; + double pitch = euler.y; + double roll = euler.z; + double rollOver2 = roll * 0.5; + double sinRollOver2 = (double)System.Math.Sin((double)rollOver2); + double cosRollOver2 = (double)System.Math.Cos((double)rollOver2); + double pitchOver2 = pitch * 0.5; + double sinPitchOver2 = (double)System.Math.Sin((double)pitchOver2); + double cosPitchOver2 = (double)System.Math.Cos((double)pitchOver2); + double yawOver2 = yaw * 0.5; + double sinYawOver2 = (double)System.Math.Sin((double)yawOver2); + double cosYawOver2 = (double)System.Math.Cos((double)yawOver2); + Quaternion result; + result.x = cosYawOver2 * cosPitchOver2 * cosRollOver2 + sinYawOver2 * sinPitchOver2 * sinRollOver2; + result.y = cosYawOver2 * cosPitchOver2 * sinRollOver2 - sinYawOver2 * sinPitchOver2 * cosRollOver2; + result.z = cosYawOver2 * sinPitchOver2 * cosRollOver2 + sinYawOver2 * cosPitchOver2 * sinRollOver2; + result.w = sinYawOver2 * cosPitchOver2 * cosRollOver2 - cosYawOver2 * sinPitchOver2 * sinRollOver2; + return result; + } + + private static void ToAxisAngleRad(Quaternion q, out Vector3 axis, out double angle) + { + if (System.Math.Abs(q.w) > 1.0) + { + q.Normalize(); + } + + angle = 2.0f * (double)System.Math.Acos(q.w); // angle + double den = (double)System.Math.Sqrt(1.0 - q.w * q.w); + if (den > 0.0001) + { + axis = q.xyz / den; + } + else + { + // This occurs when the angle is zero. + // Not a problem: just set an arbitrary normalized axis. + axis = new Vector3(1, 0, 0); + } + } + + public override int GetHashCode() + { + return x.GetHashCode() ^ y.GetHashCode() << 2 ^ z.GetHashCode() >> 2 ^ w.GetHashCode() >> 1; + } + public override bool Equals(object other) + { + if (!(other is Quaternion)) + { + return false; + } + Quaternion quaternion = (Quaternion)other; + return x.Equals(quaternion.x) && y.Equals(quaternion.y) && z.Equals(quaternion.z) && w.Equals(quaternion.w); + } + + public bool Equals(Quaternion other) + { + return x.Equals(other.x) && y.Equals(other.y) && z.Equals(other.z) && w.Equals(other.w); + } + + public static Quaternion operator *(Quaternion lhs, Quaternion rhs) + { + return new Quaternion(lhs.w * rhs.x + lhs.x * rhs.w + lhs.y * rhs.z - lhs.z * rhs.y, lhs.w * rhs.y + lhs.y * rhs.w + lhs.z * rhs.x - lhs.x * rhs.z, lhs.w * rhs.z + lhs.z * rhs.w + lhs.x * rhs.y - lhs.y * rhs.x, lhs.w * rhs.w - lhs.x * rhs.x - lhs.y * rhs.y - lhs.z * rhs.z); + } + + public static Vector3 operator *(Quaternion rotation, Vector3 point) + { + double num = rotation.x * 2; + double num2 = rotation.y * 2; + double num3 = rotation.z * 2; + double num4 = rotation.x * num; + double num5 = rotation.y * num2; + double num6 = rotation.z * num3; + double num7 = rotation.x * num2; + double num8 = rotation.x * num3; + double num9 = rotation.y * num3; + double num10 = rotation.w * num; + double num11 = rotation.w * num2; + double num12 = rotation.w * num3; + + return new Vector3( + (1 - (num5 + num6)) * point.x + (num7 - num12) * point.y + (num8 + num11) * point.z, + (num7 + num12) * point.x + (1 - (num4 + num6)) * point.y + (num9 - num10) * point.z, + (num8 - num11) * point.x + (num9 + num10) * point.y + (1 - (num4 + num5)) * point.z); + } + + public static bool operator ==(Quaternion lhs, Quaternion rhs) + { + return Quaternion.Dot(lhs, rhs) > 0.999999999; + } + + public static bool operator !=(Quaternion lhs, Quaternion rhs) + { + return Quaternion.Dot(lhs, rhs) <= 0.999999999; + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/SymmetricMatrix.cs b/LightlessSync/ThirdParty/Nanomesh/Base/SymmetricMatrix.cs new file mode 100644 index 0000000..d7be6a4 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/SymmetricMatrix.cs @@ -0,0 +1,97 @@ +namespace Nanomesh +{ + public readonly struct SymmetricMatrix + { + public readonly double m0, m1, m2, m3, m4, m5, m6, m7, m8, m9; + + public SymmetricMatrix(in double m0, in double m1, in double m2, in double m3, in double m4, in double m5, in double m6, in double m7, in double m8, in double m9) + { + this.m0 = m0; + this.m1 = m1; + this.m2 = m2; + this.m3 = m3; + this.m4 = m4; + this.m5 = m5; + this.m6 = m6; + this.m7 = m7; + this.m8 = m8; + this.m9 = m9; + } + + public SymmetricMatrix(in double a, in double b, in double c, in double d) + { + m0 = a * a; + m1 = a * b; + m2 = a * c; + m3 = a * d; + + m4 = b * b; + m5 = b * c; + m6 = b * d; + + m7 = c * c; + m8 = c * d; + + m9 = d * d; + } + + public static SymmetricMatrix operator +(in SymmetricMatrix a, in SymmetricMatrix b) + { + return new SymmetricMatrix( + a.m0 + b.m0, a.m1 + b.m1, a.m2 + b.m2, a.m3 + b.m3, + a.m4 + b.m4, a.m5 + b.m5, a.m6 + b.m6, + a.m7 + b.m7, a.m8 + b.m8, + a.m9 + b.m9 + ); + } + + public double DeterminantXYZ() + { + return + m0 * m4 * m7 + + m2 * m1 * m5 + + m1 * m5 * m2 - + m2 * m4 * m2 - + m0 * m5 * m5 - + m1 * m1 * m7; + } + + public double DeterminantX() + { + return + m1 * m5 * m8 + + m3 * m4 * m7 + + m2 * m6 * m5 - + m3 * m5 * m5 - + m1 * m6 * m7 - + m2 * m4 * m8; + } + + public double DeterminantY() + { + return + m0 * m5 * m8 + + m3 * m1 * m7 + + m2 * m6 * m2 - + m3 * m5 * m2 - + m0 * m6 * m7 - + m2 * m1 * m8; + } + + public double DeterminantZ() + { + return + m0 * m4 * m8 + + m3 * m1 * m5 + + m1 * m6 * m2 - + m3 * m4 * m2 - + m0 * m6 * m5 - + m1 * m1 * m8; + } + + public override string ToString() + { + return $"{m0} {m1} {m2} {m3}| {m4} {m5} {m6} | {m7} {m8} | {m9}"; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/TextUtils.cs b/LightlessSync/ThirdParty/Nanomesh/Base/TextUtils.cs new file mode 100644 index 0000000..6669eec --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/TextUtils.cs @@ -0,0 +1,26 @@ +using System.Globalization; +using System.Runtime.CompilerServices; + +namespace Nanomesh +{ + public static class TextUtils + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static double ToDouble(this string text) + { + return double.Parse(text, CultureInfo.InvariantCulture); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static float ToFloat(this string text) + { + return float.Parse(text, CultureInfo.InvariantCulture); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int ToInt(this string text) + { + return int.Parse(text, CultureInfo.InvariantCulture); + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector2.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2.cs new file mode 100644 index 0000000..484d8ba --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2.cs @@ -0,0 +1,377 @@ +using System; + +namespace Nanomesh +{ + public readonly struct Vector2 : IEquatable, IInterpolable + { + public readonly double x; + public readonly double y; + + // Access the /x/ or /y/ component using [0] or [1] respectively. + public double this[int index] + { + get + { + switch (index) + { + case 0: return x; + case 1: return y; + default: + throw new IndexOutOfRangeException("Invalid Vector2 index!"); + } + } + } + + // Constructs a new vector with given x, y components. + public Vector2(double x, double y) { this.x = x; this.y = y; } + + // Linearly interpolates between two vectors. + public static Vector2 Lerp(Vector2 a, Vector2 b, double t) + { + t = MathF.Clamp(t, 0, 1); + return new Vector2( + a.x + (b.x - a.x) * t, + a.y + (b.y - a.y) * t + ); + } + + // Linearly interpolates between two vectors without clamping the interpolant + public static Vector2 LerpUnclamped(Vector2 a, Vector2 b, double t) + { + return new Vector2( + a.x + (b.x - a.x) * t, + a.y + (b.y - a.y) * t + ); + } + + // Moves a point /current/ towards /target/. + public static Vector2 MoveTowards(Vector2 current, Vector2 target, double maxDistanceDelta) + { + // avoid vector ops because current scripting backends are terrible at inlining + double toVector_x = target.x - current.x; + double toVector_y = target.y - current.y; + + double sqDist = toVector_x * toVector_x + toVector_y * toVector_y; + + if (sqDist == 0 || (maxDistanceDelta >= 0 && sqDist <= maxDistanceDelta * maxDistanceDelta)) + { + return target; + } + + double dist = Math.Sqrt(sqDist); + + return new Vector2(current.x + toVector_x / dist * maxDistanceDelta, + current.y + toVector_y / dist * maxDistanceDelta); + } + + // Multiplies two vectors component-wise. + public static Vector2 Scale(Vector2 a, Vector2 b) => new Vector2(a.x * b.x, a.y * b.y); + + public static Vector2 Normalize(in Vector2 value) + { + double mag = Magnitude(in value); + if (mag > K_EPSILON) + { + return value / mag; + } + else + { + return Zero; + } + } + + public Vector2 Normalize() => Normalize(in this); + + public static double SqrMagnitude(in Vector2 a) => a.x * a.x + a.y * a.y; + + /// + /// Returns the squared length of this vector (RO). + /// + public double SqrMagnitude() => SqrMagnitude(in this); + + public static double Magnitude(in Vector2 vector) => Math.Sqrt(SqrMagnitude(in vector)); + + public double Magnitude() => Magnitude(this); + + // used to allow Vector2s to be used as keys in hash tables + public override int GetHashCode() + { + return x.GetHashCode() ^ (y.GetHashCode() << 2); + } + + // also required for being able to use Vector2s as keys in hash tables + public override bool Equals(object other) + { + if (!(other is Vector2)) + { + return false; + } + + return Equals((Vector2)other); + } + + + public bool Equals(Vector2 other) + { + return x == other.x && y == other.y; + } + + public static Vector2 Reflect(Vector2 inDirection, Vector2 inNormal) + { + double factor = -2F * Dot(inNormal, inDirection); + return new Vector2(factor * inNormal.x + inDirection.x, factor * inNormal.y + inDirection.y); + } + + + public static Vector2 Perpendicular(Vector2 inDirection) + { + return new Vector2(-inDirection.y, inDirection.x); + } + + /// + /// Returns the dot Product of two vectors. + /// + /// + /// + /// + public static double Dot(Vector2 lhs, Vector2 rhs) { return lhs.x * rhs.x + lhs.y * rhs.y; } + + /// + /// Returns the angle in radians between /from/ and /to/. + /// + /// + /// + /// + public static double AngleRadians(Vector2 from, Vector2 to) + { + // sqrt(a) * sqrt(b) = sqrt(a * b) -- valid for real numbers + double denominator = Math.Sqrt(from.SqrMagnitude() * to.SqrMagnitude()); + if (denominator < K_EPSILON_NORMAL_SQRT) + { + return 0F; + } + + double dot = MathF.Clamp(Dot(from, to) / denominator, -1F, 1F); + return Math.Acos(dot); + } + + public static double AngleDegrees(Vector2 from, Vector2 to) + { + return AngleRadians(from, to) / MathF.PI * 180f; + } + + /// + /// Returns the signed angle in degrees between /from/ and /to/. Always returns the smallest possible angle + /// + /// + /// + /// + public static double SignedAngle(Vector2 from, Vector2 to) + { + double unsigned_angle = AngleDegrees(from, to); + double sign = Math.Sign(from.x * to.y - from.y * to.x); + return unsigned_angle * sign; + } + + /// + /// Returns the distance between /a/ and /b/. + /// + /// + /// + /// + public static double Distance(Vector2 a, Vector2 b) + { + double diff_x = a.x - b.x; + double diff_y = a.y - b.y; + return Math.Sqrt(diff_x * diff_x + diff_y * diff_y); + } + + /// + /// Returns a copy of /vector/ with its magnitude clamped to /maxLength/. + /// + /// + /// + /// + public static Vector2 ClampMagnitude(Vector2 vector, double maxLength) + { + double sqrMagnitude = vector.SqrMagnitude(); + if (sqrMagnitude > maxLength * maxLength) + { + double mag = Math.Sqrt(sqrMagnitude); + + //these intermediate variables force the intermediate result to be + //of double precision. without this, the intermediate result can be of higher + //precision, which changes behavior. + double normalized_x = vector.x / mag; + double normalized_y = vector.y / mag; + return new Vector2(normalized_x * maxLength, + normalized_y * maxLength); + } + return vector; + } + + /// + /// Returns a vector that is made from the smallest components of two vectors. + /// + /// + /// + /// + public static Vector2 Min(Vector2 lhs, Vector2 rhs) { return new Vector2(Math.Min(lhs.x, rhs.x), Math.Min(lhs.y, rhs.y)); } + + /// + /// Returns a vector that is made from the largest components of two vectors. + /// + /// + /// + /// + public static Vector2 Max(Vector2 lhs, Vector2 rhs) { return new Vector2(Math.Max(lhs.x, rhs.x), Math.Max(lhs.y, rhs.y)); } + + public Vector2 Interpolate(Vector2 other, double ratio) => this * ratio + other * (1 - ratio); + + /// + /// Adds two vectors. + /// + /// + /// + /// + public static Vector2 operator +(Vector2 a, Vector2 b) { return new Vector2(a.x + b.x, a.y + b.y); } + + /// + /// Subtracts one vector from another. + /// + /// + /// + /// + public static Vector2 operator -(Vector2 a, Vector2 b) { return new Vector2(a.x - b.x, a.y - b.y); } + + /// + /// Multiplies one vector by another. + /// + /// + /// + /// + public static Vector2 operator *(Vector2 a, Vector2 b) { return new Vector2(a.x * b.x, a.y * b.y); } + + /// + /// Divides one vector over another. + /// + /// + /// + /// + public static Vector2 operator /(Vector2 a, Vector2 b) { return new Vector2(a.x / b.x, a.y / b.y); } + + /// + /// Negates a vector. + /// + /// + /// + public static Vector2 operator -(Vector2 a) { return new Vector2(-a.x, -a.y); } + + /// + /// Multiplies a vector by a number. + /// + /// + /// + /// + public static Vector2 operator *(Vector2 a, double d) { return new Vector2(a.x * d, a.y * d); } + + /// + /// Multiplies a vector by a number. + /// + /// + /// + /// + public static Vector2 operator *(double d, Vector2 a) { return new Vector2(a.x * d, a.y * d); } + + /// + /// Divides a vector by a number. + /// + /// + /// + /// + public static Vector2 operator /(Vector2 a, double d) { return new Vector2(a.x / d, a.y / d); } + + /// + /// Returns true if the vectors are equal. + /// + /// + /// + /// + public static bool operator ==(Vector2 lhs, Vector2 rhs) + { + // Returns false in the presence of NaN values. + double diff_x = lhs.x - rhs.x; + double diff_y = lhs.y - rhs.y; + return (diff_x * diff_x + diff_y * diff_y) < K_EPSILON * K_EPSILON; + } + + /// + /// Returns true if vectors are different. + /// + /// + /// + /// + public static bool operator !=(Vector2 lhs, Vector2 rhs) + { + // Returns true in the presence of NaN values. + return !(lhs == rhs); + } + + /// + /// Converts a [[Vector3]] to a Vector2. + /// + /// + public static implicit operator Vector2(Vector3F v) + { + return new Vector2(v.x, v.y); + } + + /// + /// Converts a Vector2 to a [[Vector3]]. + /// + /// + public static implicit operator Vector3(Vector2 v) + { + return new Vector3(v.x, v.y, 0); + } + + public static implicit operator Vector2F(Vector2 vec) + { + return new Vector2F((float)vec.x, (float)vec.y); + } + + public static explicit operator Vector2(Vector2F vec) + { + return new Vector2(vec.x, vec.y); + } + + public static readonly Vector2 zeroVector = new Vector2(0F, 0F); + public static readonly Vector2 oneVector = new Vector2(1F, 1F); + public static readonly Vector2 upVector = new Vector2(0F, 1F); + public static readonly Vector2 downVector = new Vector2(0F, -1F); + public static readonly Vector2 leftVector = new Vector2(-1F, 0F); + public static readonly Vector2 rightVector = new Vector2(1F, 0F); + public static readonly Vector2 positiveInfinityVector = new Vector2(double.PositiveInfinity, double.PositiveInfinity); + public static readonly Vector2 negativeInfinityVector = new Vector2(double.NegativeInfinity, double.NegativeInfinity); + + public static Vector2 Zero => zeroVector; + + public static Vector2 One => oneVector; + + public static Vector2 Up => upVector; + + public static Vector2 Down => downVector; + + public static Vector2 Left => leftVector; + + public static Vector2 Right => rightVector; + + public static Vector2 PositiveInfinity => positiveInfinityVector; + + public static Vector2 NegativeInfinity => negativeInfinityVector; + + public const double K_EPSILON = 0.00001F; + + public const double K_EPSILON_NORMAL_SQRT = 1e-15f; + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector2F.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2F.cs new file mode 100644 index 0000000..9f3faa5 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2F.cs @@ -0,0 +1,371 @@ +using System; + +namespace Nanomesh +{ + public readonly struct Vector2F : IEquatable, IInterpolable + { + public readonly float x; + public readonly float y; + + // Access the /x/ or /y/ component using [0] or [1] respectively. + public float this[int index] + { + get + { + switch (index) + { + case 0: return x; + case 1: return y; + default: + throw new IndexOutOfRangeException("Invalid Vector2 index!"); + } + } + } + + // Constructs a new vector with given x, y components. + public Vector2F(float x, float y) { this.x = x; this.y = y; } + + // Linearly interpolates between two vectors. + public static Vector2F Lerp(Vector2F a, Vector2F b, float t) + { + t = MathF.Clamp(t, 0, 1); + return new Vector2F( + a.x + (b.x - a.x) * t, + a.y + (b.y - a.y) * t + ); + } + + // Linearly interpolates between two vectors without clamping the interpolant + public static Vector2F LerpUnclamped(Vector2F a, Vector2F b, float t) + { + return new Vector2F( + a.x + (b.x - a.x) * t, + a.y + (b.y - a.y) * t + ); + } + + // Moves a point /current/ towards /target/. + public static Vector2F MoveTowards(Vector2F current, Vector2F target, float maxDistanceDelta) + { + // avoid vector ops because current scripting backends are terrible at inlining + float toVector_x = target.x - current.x; + float toVector_y = target.y - current.y; + + float sqDist = toVector_x * toVector_x + toVector_y * toVector_y; + + if (sqDist == 0 || (maxDistanceDelta >= 0 && sqDist <= maxDistanceDelta * maxDistanceDelta)) + { + return target; + } + + float dist = MathF.Sqrt(sqDist); + + return new Vector2F(current.x + toVector_x / dist * maxDistanceDelta, + current.y + toVector_y / dist * maxDistanceDelta); + } + + // Multiplies two vectors component-wise. + public static Vector2F Scale(Vector2F a, Vector2F b) { return new Vector2F(a.x * b.x, a.y * b.y); } + + public static Vector2F Normalize(in Vector2F value) + { + float mag = Magnitude(in value); + if (mag > K_EPSILON) + { + return value / mag; + } + else + { + return Zero; + } + } + + public Vector2F Normalize() => Normalize(in this); + + public static float SqrMagnitude(in Vector2F a) => a.x * a.x + a.y * a.y; + + /// + /// Returns the squared length of this vector (RO). + /// + public float SqrMagnitude() => SqrMagnitude(in this); + + public static float Magnitude(in Vector2F vector) => (float)Math.Sqrt(SqrMagnitude(in vector)); + + public float Magnitude() => Magnitude(this); + + // used to allow Vector2s to be used as keys in hash tables + public override int GetHashCode() + { + return x.GetHashCode() ^ (y.GetHashCode() << 2); + } + + // also required for being able to use Vector2s as keys in hash tables + public override bool Equals(object other) + { + if (!(other is Vector2F)) + { + return false; + } + + return Equals((Vector2F)other); + } + + + public bool Equals(Vector2F other) + { + return Vector2FComparer.Default.Equals(this, other); + //return x == other.x && y == other.y; + } + + public static Vector2F Reflect(Vector2F inDirection, Vector2F inNormal) + { + float factor = -2F * Dot(inNormal, inDirection); + return new Vector2F(factor * inNormal.x + inDirection.x, factor * inNormal.y + inDirection.y); + } + + public static Vector2F Perpendicular(Vector2F inDirection) + { + return new Vector2F(-inDirection.y, inDirection.x); + } + + /// + /// Returns the dot Product of two vectors. + /// + /// + /// + /// + public static float Dot(Vector2F lhs, Vector2F rhs) { return lhs.x * rhs.x + lhs.y * rhs.y; } + + /// + /// Returns the angle in radians between /from/ and /to/. + /// + /// + /// + /// + public static float AngleRadians(Vector2F from, Vector2F to) + { + // sqrt(a) * sqrt(b) = sqrt(a * b) -- valid for real numbers + float denominator = MathF.Sqrt(from.SqrMagnitude() * to.SqrMagnitude()); + if (denominator < K_EPSILON_NORMAL_SQRT) + { + return 0F; + } + + float dot = MathF.Clamp(Dot(from, to) / denominator, -1F, 1F); + return MathF.Acos(dot); + } + + public static float AngleDegrees(Vector2F from, Vector2F to) + { + return AngleRadians(from, to) / MathF.PI * 180f; + } + + /// + /// Returns the signed angle in degrees between /from/ and /to/. Always returns the smallest possible angle + /// + /// + /// + /// + public static float SignedAngle(Vector2F from, Vector2F to) + { + float unsigned_angle = AngleDegrees(from, to); + float sign = MathF.Sign(from.x * to.y - from.y * to.x); + return unsigned_angle * sign; + } + + /// + /// Returns the distance between /a/ and /b/. + /// + /// + /// + /// + public static float Distance(Vector2F a, Vector2F b) + { + float diff_x = a.x - b.x; + float diff_y = a.y - b.y; + return MathF.Sqrt(diff_x * diff_x + diff_y * diff_y); + } + + /// + /// Returns a copy of /vector/ with its magnitude clamped to /maxLength/. + /// + /// + /// + /// + public static Vector2F ClampMagnitude(Vector2F vector, float maxLength) + { + float sqrMagnitude = vector.SqrMagnitude(); + if (sqrMagnitude > maxLength * maxLength) + { + float mag = MathF.Sqrt(sqrMagnitude); + + //these intermediate variables force the intermediate result to be + //of float precision. without this, the intermediate result can be of higher + //precision, which changes behavior. + float normalized_x = vector.x / mag; + float normalized_y = vector.y / mag; + return new Vector2F(normalized_x * maxLength, + normalized_y * maxLength); + } + return vector; + } + + /// + /// Returns a vector that is made from the smallest components of two vectors. + /// + /// + /// + /// + public static Vector2F Min(Vector2F lhs, Vector2F rhs) { return new Vector2F(MathF.Min(lhs.x, rhs.x), MathF.Min(lhs.y, rhs.y)); } + + /// + /// Returns a vector that is made from the largest components of two vectors. + /// + /// + /// + /// + public static Vector2F Max(Vector2F lhs, Vector2F rhs) { return new Vector2F(MathF.Max(lhs.x, rhs.x), MathF.Max(lhs.y, rhs.y)); } + + public Vector2F Interpolate(Vector2F other, double ratio) => this * ratio + other * (1 - ratio); + + /// + /// Adds two vectors. + /// + /// + /// + /// + public static Vector2F operator +(Vector2F a, Vector2F b) { return new Vector2F(a.x + b.x, a.y + b.y); } + + /// + /// Subtracts one vector from another. + /// + /// + /// + /// + public static Vector2F operator -(Vector2F a, Vector2F b) { return new Vector2F(a.x - b.x, a.y - b.y); } + + /// + /// Multiplies one vector by another. + /// + /// + /// + /// + public static Vector2F operator *(Vector2F a, Vector2F b) { return new Vector2F(a.x * b.x, a.y * b.y); } + + /// + /// Divides one vector over another. + /// + /// + /// + /// + public static Vector2F operator /(Vector2F a, Vector2F b) { return new Vector2F(a.x / b.x, a.y / b.y); } + + /// + /// Negates a vector. + /// + /// + /// + public static Vector2F operator -(Vector2F a) { return new Vector2F(-a.x, -a.y); } + + /// + /// Multiplies a vector by a number. + /// + /// + /// + /// + public static Vector2F operator *(Vector2F a, float d) { return new Vector2F(a.x * d, a.y * d); } + + public static Vector2 operator *(Vector2F a, double d) { return new Vector2(a.x * d, a.y * d); } + + /// + /// Multiplies a vector by a number. + /// + /// + /// + /// + public static Vector2F operator *(float d, Vector2F a) { return new Vector2F(a.x * d, a.y * d); } + + public static Vector2 operator *(double d, Vector2F a) { return new Vector2(a.x * d, a.y * d); } + + /// + /// Divides a vector by a number. + /// + /// + /// + /// + public static Vector2F operator /(Vector2F a, float d) { return new Vector2F(a.x / d, a.y / d); } + + /// + /// Returns true if the vectors are equal. + /// + /// + /// + /// + public static bool operator ==(Vector2F lhs, Vector2F rhs) + { + // Returns false in the presence of NaN values. + float diff_x = lhs.x - rhs.x; + float diff_y = lhs.y - rhs.y; + return (diff_x * diff_x + diff_y * diff_y) < K_EPSILON * K_EPSILON; + } + + /// + /// Returns true if vectors are different. + /// + /// + /// + /// + public static bool operator !=(Vector2F lhs, Vector2F rhs) + { + // Returns true in the presence of NaN values. + return !(lhs == rhs); + } + + /// + /// Converts a [[Vector3]] to a Vector2. + /// + /// + public static implicit operator Vector2F(Vector3F v) + { + return new Vector2F(v.x, v.y); + } + + /// + /// Converts a Vector2 to a [[Vector3]]. + /// + /// + public static implicit operator Vector3(Vector2F v) + { + return new Vector3(v.x, v.y, 0); + } + + public static readonly Vector2F zeroVector = new Vector2F(0F, 0F); + public static readonly Vector2F oneVector = new Vector2F(1F, 1F); + public static readonly Vector2F upVector = new Vector2F(0F, 1F); + public static readonly Vector2F downVector = new Vector2F(0F, -1F); + public static readonly Vector2F leftVector = new Vector2F(-1F, 0F); + public static readonly Vector2F rightVector = new Vector2F(1F, 0F); + public static readonly Vector2F positiveInfinityVector = new Vector2F(float.PositiveInfinity, float.PositiveInfinity); + public static readonly Vector2F negativeInfinityVector = new Vector2F(float.NegativeInfinity, float.NegativeInfinity); + + public static Vector2F Zero => zeroVector; + + public static Vector2F One => oneVector; + + public static Vector2F Up => upVector; + + public static Vector2F Down => downVector; + + public static Vector2F Left => leftVector; + + public static Vector2F Right => rightVector; + + public static Vector2F PositiveInfinity => positiveInfinityVector; + + public static Vector2F NegativeInfinity => negativeInfinityVector; + + public const float K_EPSILON = 0.00001F; + + public const float K_EPSILON_NORMAL_SQRT = 1e-15f; + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector2FComparer.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2FComparer.cs new file mode 100644 index 0000000..2519aaf --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector2FComparer.cs @@ -0,0 +1,28 @@ +using System.Collections.Generic; + +namespace Nanomesh +{ + public class Vector2FComparer : IEqualityComparer + { + private static Vector2FComparer _instance; + public static Vector2FComparer Default => _instance ?? (_instance = new Vector2FComparer(0.0001f)); + + private readonly float _tolerance; + + public Vector2FComparer(float tolerance) + { + _tolerance = tolerance; + } + + public bool Equals(Vector2F x, Vector2F y) + { + return (int)(x.x / _tolerance) == (int)(y.x / _tolerance) + && (int)(x.y / _tolerance) == (int)(y.y / _tolerance); + } + + public int GetHashCode(Vector2F obj) + { + return (int)(obj.x / _tolerance) ^ ((int)(obj.y / _tolerance) << 2); + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector3.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3.cs new file mode 100644 index 0000000..96f79f9 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3.cs @@ -0,0 +1,191 @@ +using System; + +namespace Nanomesh +{ + public readonly struct Vector3 : IEquatable, IInterpolable + { + public readonly double x; + public readonly double y; + public readonly double z; + + public Vector3(double x, double y, double z) + { + this.x = x; + this.y = y; + this.z = z; + } + + public Vector3(double x, double y) + { + this.x = x; + this.y = y; + z = 0.0; + } + + public double this[int index] + { + get + { + switch (index) + { + case 0: return x; + case 1: return y; + case 2: return z; + default: + throw new IndexOutOfRangeException("Invalid Vector3 index!"); + } + } + } + + public override int GetHashCode() + { + return x.GetHashCode() ^ (y.GetHashCode() << 2) ^ (z.GetHashCode() >> 2); + } + + public override bool Equals(object other) + { + if (!(other is Vector3)) + { + return false; + } + + return Equals((Vector3)other); + } + + public bool Equals(Vector3 other) + { + return x == other.x && y == other.y && z == other.z; + } + + public static Vector3 operator +(in Vector3 a, in Vector3 b) { return new Vector3(a.x + b.x, a.y + b.y, a.z + b.z); } + + public static Vector3 operator -(in Vector3 a, in Vector3 b) { return new Vector3(a.x - b.x, a.y - b.y, a.z - b.z); } + + public static Vector3 operator -(in Vector3 a) { return new Vector3(-a.x, -a.y, -a.z); } + + public static Vector3 operator *(in Vector3 a, double d) { return new Vector3(a.x * d, a.y * d, a.z * d); } + + public static Vector3 operator *(double d, in Vector3 a) { return new Vector3(a.x * d, a.y * d, a.z * d); } + + public static Vector3 operator /(in Vector3 a, double d) { return new Vector3(MathUtils.DivideSafe(a.x, d), MathUtils.DivideSafe(a.y, d), MathUtils.DivideSafe(a.z, d)); } + + public static bool operator ==(in Vector3 lhs, in Vector3 rhs) + { + double diff_x = lhs.x - rhs.x; + double diff_y = lhs.y - rhs.y; + double diff_z = lhs.z - rhs.z; + double sqrmag = diff_x * diff_x + diff_y * diff_y + diff_z * diff_z; + return sqrmag < MathUtils.EpsilonDouble; + } + + public static bool operator !=(in Vector3 lhs, in Vector3 rhs) + { + return !(lhs == rhs); + } + public static Vector3 Cross(in Vector3 lhs, in Vector3 rhs) + { + return new Vector3( + lhs.y * rhs.z - lhs.z * rhs.y, + lhs.z * rhs.x - lhs.x * rhs.z, + lhs.x * rhs.y - lhs.y * rhs.x); + } + + public static implicit operator Vector3F(Vector3 vec) + { + return new Vector3F((float)vec.x, (float)vec.y, (float)vec.z); + } + + public static explicit operator Vector3(Vector3F vec) + { + return new Vector3(vec.x, vec.y, vec.z); + } + + public static double Dot(in Vector3 lhs, in Vector3 rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; + } + + public static Vector3 Normalize(in Vector3 value) + { + double mag = Magnitude(value); + return value / mag; + } + + public Vector3 Normalized => Vector3.Normalize(this); + + public static double Distance(in Vector3 a, in Vector3 b) + { + double diff_x = a.x - b.x; + double diff_y = a.y - b.y; + double diff_z = a.z - b.z; + return Math.Sqrt(diff_x * diff_x + diff_y * diff_y + diff_z * diff_z); + } + + public static double Magnitude(in Vector3 vector) + { + return Math.Sqrt(vector.x * vector.x + vector.y * vector.y + vector.z * vector.z); + } + + public static Vector3 ProjectPointOnLine(in Vector3 linePoint, in Vector3 lineVec, in Vector3 point) + { + Vector3 linePointToPoint = point - linePoint; + return linePoint + lineVec * Dot(linePointToPoint, lineVec); + } + + public static double DistancePointLine(in Vector3 point, in Vector3 lineStart, in Vector3 lineEnd) + { + return Magnitude(ProjectPointOnLine(lineStart, (lineEnd - lineStart).Normalized, point) - point); + } + + public double LengthSquared => x * x + y * y + z * z; + + public double Length => Math.Sqrt(x * x + y * y + z * z); + + public static Vector3 Min(in Vector3 lhs, in Vector3 rhs) + { + return new Vector3(Math.Min(lhs.x, rhs.x), Math.Min(lhs.y, rhs.y), Math.Min(lhs.z, rhs.z)); + } + + public static Vector3 Max(in Vector3 lhs, in Vector3 rhs) + { + return new Vector3(Math.Max(lhs.x, rhs.x), Math.Max(lhs.y, rhs.y), Math.Max(lhs.z, rhs.z)); + } + + public static readonly Vector3 zeroVector = new Vector3(0f, 0f, 0f); + public static readonly Vector3 oneVector = new Vector3(1f, 1f, 1f); + public static readonly Vector3 positiveInfinityVector = new Vector3(float.PositiveInfinity, float.PositiveInfinity, float.PositiveInfinity); + public static readonly Vector3 negativeInfinityVector = new Vector3(float.NegativeInfinity, float.NegativeInfinity, float.NegativeInfinity); + + public static Vector3 Zero => zeroVector; + + public static Vector3 One => oneVector; + + public static Vector3 PositiveInfinity => positiveInfinityVector; + + public static Vector3 NegativeInfinity => negativeInfinityVector; + + public static double AngleRadians(in Vector3 from, in Vector3 to) + { + double denominator = Math.Sqrt(from.LengthSquared * to.LengthSquared); + if (denominator < 1e-15F) + { + return 0F; + } + + double dot = MathF.Clamp(Dot(from, to) / denominator, -1.0, 1.0); + return Math.Acos(dot); + } + + public static double AngleDegrees(in Vector3 from, in Vector3 to) + { + return AngleRadians(from, to) / Math.PI * 180d; + } + + public override string ToString() + { + return $"{x}, {y}, {z}"; + } + + public Vector3 Interpolate(Vector3 other, double ratio) => this * ratio + other * (1 - ratio); + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector3Comparer.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3Comparer.cs new file mode 100644 index 0000000..9dbf2fb --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3Comparer.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; + +namespace Nanomesh +{ + public class Vector3Comparer : IEqualityComparer + { + private readonly double _tolerance; + + public Vector3Comparer(double tolerance) + { + _tolerance = tolerance; + } + + public bool Equals(Vector3 x, Vector3 y) + { + return (int)(x.x / _tolerance) == (int)(y.x / _tolerance) + && (int)(x.y / _tolerance) == (int)(y.y / _tolerance) + && (int)(x.z / _tolerance) == (int)(y.z / _tolerance); + } + + public int GetHashCode(Vector3 obj) + { + return (int)(obj.x / _tolerance) ^ ((int)(obj.y / _tolerance) << 2) ^ ((int)(obj.z / _tolerance) >> 2); + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector3F.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3F.cs new file mode 100644 index 0000000..57b92bf --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3F.cs @@ -0,0 +1,172 @@ +using System; + +namespace Nanomesh +{ + public readonly struct Vector3F : IEquatable, IInterpolable + { + public readonly float x; + public readonly float y; + public readonly float z; + + public Vector3F(float x, float y, float z) + { + this.x = x; + this.y = y; + this.z = z; + } + + public Vector3F(float x, float y) + { + this.x = x; + this.y = y; + z = 0F; + } + + public float this[int index] + { + get + { + switch (index) + { + case 0: return x; + case 1: return y; + case 2: return z; + default: + throw new IndexOutOfRangeException("Invalid Vector3F index!"); + } + } + } + + public override int GetHashCode() + { + return Vector3FComparer.Default.GetHashCode(this); + //return x.GetHashCode() ^ (y.GetHashCode() << 2) ^ (z.GetHashCode() >> 2); + } + + public override bool Equals(object other) + { + if (!(other is Vector3F)) + { + return false; + } + + return Equals((Vector3F)other); + } + + public bool Equals(Vector3F other) + { + return Vector3FComparer.Default.Equals(this, other); + //return x == other.x && y == other.y && z == other.z; + } + + public static Vector3F operator +(in Vector3F a, in Vector3F b) { return new Vector3F(a.x + b.x, a.y + b.y, a.z + b.z); } + + public static Vector3F operator -(in Vector3F a, in Vector3F b) { return new Vector3F(a.x - b.x, a.y - b.y, a.z - b.z); } + + public static Vector3F operator -(in Vector3F a) { return new Vector3F(-a.x, -a.y, -a.z); } + + public static Vector3F operator *(in Vector3F a, float d) { return new Vector3F(a.x * d, a.y * d, a.z * d); } + + public static Vector3F operator *(float d, in Vector3F a) { return new Vector3F(a.x * d, a.y * d, a.z * d); } + + public static Vector3 operator *(double d, in Vector3F a) { return new Vector3(a.x * d, a.y * d, a.z * d); } + + public static Vector3F operator /(in Vector3F a, float d) { return new Vector3F(MathUtils.DivideSafe(a.x, d), MathUtils.DivideSafe(a.y, d), MathUtils.DivideSafe(a.z, d)); } + + public static bool operator ==(in Vector3F lhs, in Vector3F rhs) + { + float diff_x = lhs.x - rhs.x; + float diff_y = lhs.y - rhs.y; + float diff_z = lhs.z - rhs.z; + float sqrmag = diff_x * diff_x + diff_y * diff_y + diff_z * diff_z; + return sqrmag < MathUtils.EpsilonFloat; + } + + public static bool operator !=(in Vector3F lhs, in Vector3F rhs) + { + return !(lhs == rhs); + } + public static Vector3F Cross(in Vector3F lhs, in Vector3F rhs) + { + return new Vector3F( + lhs.y * rhs.z - lhs.z * rhs.y, + lhs.z * rhs.x - lhs.x * rhs.z, + lhs.x * rhs.y - lhs.y * rhs.x); + } + + public static float Dot(in Vector3F lhs, in Vector3F rhs) + { + return lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z; + } + + public static Vector3F Normalize(in Vector3F value) + { + float mag = Magnitude(value); + return value / mag; + } + + public Vector3F Normalized => Vector3F.Normalize(this); + + public static float Distance(in Vector3F a, in Vector3F b) + { + float diff_x = a.x - b.x; + float diff_y = a.y - b.y; + float diff_z = a.z - b.z; + return MathF.Sqrt(diff_x * diff_x + diff_y * diff_y + diff_z * diff_z); + } + + public static float Magnitude(in Vector3F vector) + { + return MathF.Sqrt(vector.x * vector.x + vector.y * vector.y + vector.z * vector.z); + } + + public float SqrMagnitude => x * x + y * y + z * z; + + public static Vector3F Min(in Vector3F lhs, in Vector3F rhs) + { + return new Vector3F(MathF.Min(lhs.x, rhs.x), MathF.Min(lhs.y, rhs.y), MathF.Min(lhs.z, rhs.z)); + } + + public static Vector3F Max(in Vector3F lhs, in Vector3F rhs) + { + return new Vector3F(MathF.Max(lhs.x, rhs.x), MathF.Max(lhs.y, rhs.y), MathF.Max(lhs.z, rhs.z)); + } + + public static readonly Vector3F zeroVector = new Vector3F(0f, 0f, 0f); + public static readonly Vector3F oneVector = new Vector3F(1f, 1f, 1f); + public static readonly Vector3F positiveInfinityVector = new Vector3F(float.PositiveInfinity, float.PositiveInfinity, float.PositiveInfinity); + public static readonly Vector3F negativeInfinityVector = new Vector3F(float.NegativeInfinity, float.NegativeInfinity, float.NegativeInfinity); + + public static Vector3F Zero => zeroVector; + + public static Vector3F One => oneVector; + + public static Vector3F PositiveInfinity => positiveInfinityVector; + + public static Vector3F NegativeInfinity => negativeInfinityVector; + + public static float AngleRadians(in Vector3F from, in Vector3F to) + { + float denominator = MathF.Sqrt(from.SqrMagnitude * to.SqrMagnitude); + if (denominator < 1e-15F) + { + return 0F; + } + + float dot = MathF.Clamp(Dot(from, to) / denominator, -1F, 1F); + return MathF.Acos(dot); + } + + public static float AngleDegrees(in Vector3F from, in Vector3F to) + { + return AngleRadians(from, to) / MathF.PI * 180f; + } + + public override string ToString() + { + return $"{x}, {y}, {z}"; + } + + public Vector3F Interpolate(Vector3F other, double ratio) => (ratio * this + (1 - ratio) * other).Normalized; + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector3FComparer.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3FComparer.cs new file mode 100644 index 0000000..b0fc5fc --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector3FComparer.cs @@ -0,0 +1,29 @@ +using System.Collections.Generic; + +namespace Nanomesh +{ + public class Vector3FComparer : IEqualityComparer + { + private static Vector3FComparer _instance; + public static Vector3FComparer Default => _instance ?? (_instance = new Vector3FComparer(0.001f)); + + private readonly float _tolerance; + + public Vector3FComparer(float tolerance) + { + _tolerance = tolerance; + } + + public bool Equals(Vector3F x, Vector3F y) + { + return (int)(x.x / _tolerance) == (int)(y.x / _tolerance) + && (int)(x.y / _tolerance) == (int)(y.y / _tolerance) + && (int)(x.z / _tolerance) == (int)(y.z / _tolerance); + } + + public int GetHashCode(Vector3F obj) + { + return (int)(obj.x / _tolerance) ^ ((int)(obj.y / _tolerance) << 2) ^ ((int)(obj.z / _tolerance) >> 2); + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector4F.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector4F.cs new file mode 100644 index 0000000..c93b65e --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector4F.cs @@ -0,0 +1,91 @@ +using System; + +namespace Nanomesh +{ + public readonly struct Vector4F : IEquatable, IInterpolable + { + public readonly float x; + public readonly float y; + public readonly float z; + public readonly float w; + + public Vector4F(float x, float y, float z, float w) + { + this.x = x; + this.y = y; + this.z = z; + this.w = w; + } + + public float this[int index] + { + get + { + switch (index) + { + case 0: return x; + case 1: return y; + case 2: return z; + case 3: return w; + default: + throw new IndexOutOfRangeException("Invalid Vector4F index!"); + } + } + } + + public override int GetHashCode() + { + return Vector4FComparer.Default.GetHashCode(this); + } + + public override bool Equals(object other) + { + if (!(other is Vector4F)) + { + return false; + } + + return Equals((Vector4F)other); + } + + public bool Equals(Vector4F other) + { + return Vector4FComparer.Default.Equals(this, other); + } + + public static Vector4F operator +(in Vector4F a, in Vector4F b) + => new(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); + + public static Vector4F operator -(in Vector4F a, in Vector4F b) + => new(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); + + public static Vector4F operator *(in Vector4F a, float d) + => new(a.x * d, a.y * d, a.z * d, a.w * d); + + public static Vector4F operator *(float d, in Vector4F a) + => new(a.x * d, a.y * d, a.z * d, a.w * d); + + public static Vector4F operator /(in Vector4F a, float d) + => new(MathUtils.DivideSafe(a.x, d), MathUtils.DivideSafe(a.y, d), MathUtils.DivideSafe(a.z, d), MathUtils.DivideSafe(a.w, d)); + + public static bool operator ==(in Vector4F lhs, in Vector4F rhs) + => Vector4FComparer.Default.Equals(lhs, rhs); + + public static bool operator !=(in Vector4F lhs, in Vector4F rhs) + => !Vector4FComparer.Default.Equals(lhs, rhs); + + public static float Dot(in Vector4F lhs, in Vector4F rhs) + => (lhs.x * rhs.x) + (lhs.y * rhs.y) + (lhs.z * rhs.z) + (lhs.w * rhs.w); + + public Vector4F Interpolate(Vector4F other, double ratio) + { + var t = (float)ratio; + var inv = 1f - t; + return new Vector4F( + (x * inv) + (other.x * t), + (y * inv) + (other.y * t), + (z * inv) + (other.z * t), + (w * inv) + (other.w * t)); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/Vector4FComparer.cs b/LightlessSync/ThirdParty/Nanomesh/Base/Vector4FComparer.cs new file mode 100644 index 0000000..7bc348d --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/Vector4FComparer.cs @@ -0,0 +1,33 @@ +using System.Collections.Generic; + +namespace Nanomesh +{ + public class Vector4FComparer : IEqualityComparer + { + private static Vector4FComparer? _instance; + public static Vector4FComparer Default => _instance ??= new Vector4FComparer(0.0001f); + + private readonly float _tolerance; + + public Vector4FComparer(float tolerance) + { + _tolerance = tolerance; + } + + public bool Equals(Vector4F x, Vector4F y) + { + return (int)(x.x / _tolerance) == (int)(y.x / _tolerance) + && (int)(x.y / _tolerance) == (int)(y.y / _tolerance) + && (int)(x.z / _tolerance) == (int)(y.z / _tolerance) + && (int)(x.w / _tolerance) == (int)(y.w / _tolerance); + } + + public int GetHashCode(Vector4F obj) + { + return (int)(obj.x / _tolerance) + ^ ((int)(obj.y / _tolerance) << 2) + ^ ((int)(obj.z / _tolerance) >> 2) + ^ ((int)(obj.w / _tolerance) << 1); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Base/VertexData.cs b/LightlessSync/ThirdParty/Nanomesh/Base/VertexData.cs new file mode 100644 index 0000000..9dade3e --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Base/VertexData.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; + +namespace Nanomesh +{ + public struct VertexData : IEquatable + { + public int position; + public List attributes; // TODO : This is not optimal regarding memory + + public VertexData(int pos) + { + position = pos; + attributes = new List(); + } + + public override int GetHashCode() + { + unchecked + { + int hash = 17; + hash = hash * 31 + position; + foreach (object attr in attributes) + { + hash = hash * 31 + attr.GetHashCode(); + } + return hash; + } + } + + public bool Equals(VertexData other) + { + if (!position.Equals(other.position)) + return false; + + if (attributes.Count != other.attributes.Count) + return false; + + for (int i = 0; i < attributes.Count; i++) + { + if (!attributes[i].Equals(other.attributes[i])) + return false; + } + + return true; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/CollectionUtils.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/CollectionUtils.cs new file mode 100644 index 0000000..ed754bc --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/CollectionUtils.cs @@ -0,0 +1,52 @@ +using System; +using System.Collections.Generic; + +namespace Nanomesh +{ + public static class CollectionUtils + { + public static T[] ToArray(this HashSet items, ref T[] array) + { + int i = 0; + foreach (T item in items) + { + array[i++] = item; + } + + return array; + } + + public static bool TryAdd(this Dictionary dictionary, K key, V value) + { + if (dictionary.ContainsKey(key)) + { + return false; + } + + dictionary.Add(key, value); + return true; + } + + public static bool TryAdd(this Dictionary dictionary, K key, Func valueFactory) + { + if (dictionary.ContainsKey(key)) + { + return false; + } + + dictionary.Add(key, valueFactory(key)); + return true; + } + + public static V GetOrAdd(this Dictionary dictionary, K key, V value) + { + if (dictionary.TryGetValue(key, out V existingValue)) + { + return existingValue; + } + + dictionary.Add(key, value); + return value; + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/FastHashSet.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/FastHashSet.cs new file mode 100644 index 0000000..60d4f12 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/FastHashSet.cs @@ -0,0 +1,3872 @@ +//#define Exclude_Check_For_Set_Modifications_In_Enumerator +//#define Exclude_Check_For_Is_Disposed_In_Enumerator +//#define Exclude_No_Hash_Array_Implementation +//#define Exclude_Cache_Optimize_Resize + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Runtime.CompilerServices; + +namespace Nanomesh +{ + // didn't implement ISerializable and IDeserializationCallback + // these are implemented in the .NET HashSet + // the 7th HashSet constructor has params for serialization -implement that if serialization is implemented + // also add using System.Runtime.Serialization; + + public class FastHashSet : ICollection, IEnumerable, IEnumerable, IReadOnlyCollection, ISet + { + private const int MaxSlotsArraySize = int.MaxValue - 2; + + // this is the size of the non-hash array used to make small counts of items faster + private const int InitialArraySize = 8; + + // this is the # of initial nodes for the slots array after going into hashing after using the noHashArray + // this is 16 + 1; the + 1 is for the first node (node at index 0) which doesn't get used because 0 is the NullIndex + private const int InitialSlotsArraySize = 17; + + // this indicates end of chain if the nextIndex of a node has this value and also indicates no chain if a buckets array element has this value + private const int NullIndex = 0; + + // if a node's nextIndex = this value, then it is a blank node - this isn't a valid nextIndex when unmarked and also when marked (because we don't allow int.MaxValue items) + private const int BlankNextIndexIndicator = int.MaxValue; + + // use this instead of the negate negative logic when getting hashindex - this saves an if (hashindex < 0) which can be the source of bad branch prediction + private const int HighBitNotSet = unchecked(0b0111_1111_1111_1111_1111_1111_1111_1111); + + // The Mark... constants below are for marking, unmarking, and checking if an item is marked. + // This is usefull for some set operations. + + // doing an | (bitwise or) with this and the nextIndex marks the node, setting the bit back will give the original nextIndex value + private const int MarkNextIndexBitMask = unchecked((int)0b1000_0000_0000_0000_0000_0000_0000_0000); + + // doing an & (bitwise and) with this and the nextIndex sets it back to the original value (unmarks it) + private const int MarkNextIndexBitMaskInverted = ~MarkNextIndexBitMask; + + // FastHashSet doesn't allow using an item/node index as high as int.MaxValue. + // There are 2 reasons for this: The first is that int.MaxValue is used as a special indicator + private const int LargestPrimeLessThanMaxInt = 2147483629; + + // these are primes above the .75 loadfactor of the power of 2 except from 30,000 through 80,000, where we conserve space to help with cache space + private static readonly int[] bucketsSizeArray = { 11, 23, 47, 89, 173, 347, 691, 1367, 2741, 5471, 10_937, 19_841/*16_411/*21_851*/, 40_241/*32_771/*43_711*/, 84_463/*65_537/*87_383*/, /*131_101*/174_767, + /*262_147*/349_529, 699_053, 1_398_107, 2_796_221, 5_592_407, 11_184_829, 22_369_661, 44_739_259, 89_478_503, 17_8956_983, 35_7913_951, 715_827_947, 143_1655_777, LargestPrimeLessThanMaxInt}; + + // the buckets array can be pre-allocated to a large size, but it's not good to use that entire size for hashing because of cache locality + // instead do at most 3 size steps (for 3 levels of cache) before using its actual allocated size + + // when an initial capacity is selected in the constructor or later, allocate the required space for the buckets array, but only use a subset of this space until the load factor is met + // limit the # of used elements to optimize for cpu caches + private static readonly int[] bucketsSizeArrayForCacheOptimization = { 3_371, 62_851, 701_819 }; + + private const double LoadFactorConst = .75; + + private int currentIndexIntoBucketsSizeArray; + + private int bucketsModSize; + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + private int incrementForEverySetModification; +#endif + + // resize the buckets array when the count reaches this value + private int resizeBucketsCountThreshold; + + private int count; + + private int nextBlankIndex; + + // this is needed because if items are removed, they get added into the blank list starting at nextBlankIndex, but we may want to TrimExcess capacity, so this is a quick way to see what the ExcessCapacity is + private int firstBlankAtEndIndex; + + private readonly IEqualityComparer comparer; + + // make the buckets size a primary number to make the mod function less predictable + private int[] buckets; + + private TNode[] slots; + +#if !Exclude_No_Hash_Array_Implementation + // used for small sets - when the count of items is small, it is usually faster to just use an array of the items and not do hashing at all (this can also use slightly less memory) + // There may be some cases where the sets can be very small, but there can be very many of these sets. This can be good for these cases. + private T[] noHashArray; +#endif + + internal enum FoundType + { + FoundFirstTime, + FoundNotFirstTime, + NotFound + } + + internal struct TNode + { + // the cached hash code of the item - this is so we don't have to call GetHashCode multiple times, also doubles as a nextIndex for blanks, since blank nodes don't need a hash code + public int hashOrNextIndexForBlanks; + + public int nextIndex; + + public T item; + + public TNode(T elem, int nextIndex, int hash) + { + item = elem; + + this.nextIndex = nextIndex; + + hashOrNextIndexForBlanks = hash; + } + } + + // 1 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet. + public FastHashSet() + { + comparer = EqualityComparer.Default; + SetInitialCapacity(InitialArraySize); + } + + // 2 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet. + /// The collection to initially add to the FastHashSet. + public FastHashSet(IEnumerable collection) + { + comparer = EqualityComparer.Default; + AddInitialEnumerable(collection); + } + + // 3 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet. + /// The IEqualityComparer to use for determining equality of elements in the FastHashSet. + public FastHashSet(IEqualityComparer comparer) + { + this.comparer = comparer ?? EqualityComparer.Default; + SetInitialCapacity(InitialArraySize); + } + + // 4 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet. + /// The initial capacity of the FastHashSet. + public FastHashSet(int capacity) + { + comparer = EqualityComparer.Default; + SetInitialCapacity(capacity); + } + + // 5 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet + /// The collection to initially add to the FastHashSet. + /// The IEqualityComparer to use for determining equality of elements in the FastHashSet. + public FastHashSet(IEnumerable collection, IEqualityComparer comparer) + { + this.comparer = comparer ?? EqualityComparer.Default; + AddInitialEnumerable(collection); + } + + // 6 - same constructor params as HashSet + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the set + /// The initial capacity of the FastHashSet. + /// The IEqualityComparer to use for determining equality of elements in the FastHashSet. + public FastHashSet(int capacity, IEqualityComparer comparer) + { + this.comparer = comparer ?? EqualityComparer.Default; + SetInitialCapacity(capacity); + } + + /// Initializes a new instance of the FastHashSet<>. + /// The element type of the FastHashSet + /// The collection to initially add to the FastHashSet. + /// True if the collection items are all unique. The collection items can be added more quickly if they are known to be unique. + /// The initial capacity of the FastHashSet. + /// The IEqualityComparer to use for determining equality of elements in the FastHashSet. +#if false // removed for now because it's probably not that useful and needs some changes to be correct + public FastHashSet(IEnumerable collection, bool areAllCollectionItemsDefinitelyUnique, int capacity, IEqualityComparer comparer = null) + { + this.comparer = comparer ?? EqualityComparer.Default; + SetInitialCapacity(capacity); + + if (areAllCollectionItemsDefinitelyUnique) + { + // this and the call below must deal correctly with an initial capacity already set + AddInitialUniqueValuesEnumerable(collection); + } + else + { + AddInitialEnumerable(collection); + } + } +#endif + + private void AddInitialUniqueValuesEnumerable(IEnumerable collection) + { + int itemsCount = 0; +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + nextBlankIndex = 1; + foreach (T item in collection) + { + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + int index = buckets[hashIndex]; + buckets[hashIndex] = nextBlankIndex; + + ref TNode t = ref slots[nextBlankIndex]; + + t.hashOrNextIndexForBlanks = hash; + t.nextIndex = index; + t.item = item; + + nextBlankIndex++; + itemsCount++; + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + foreach (T item in collection) + { + noHashArray[itemsCount++] = item; + } + } +#endif + count = itemsCount; + firstBlankAtEndIndex = nextBlankIndex; + } + + private void AddInitialEnumerableWithEnoughCapacity(IEnumerable collection) + { + // this assumes we are hashing + foreach (T item in collection) + { + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + goto Found; // item was found + } + + index = t.nextIndex; + } + + ref TNode tBlank = ref slots[nextBlankIndex]; + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = buckets[hashIndex]; + tBlank.item = item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex++; + +#if !Exclude_Cache_Optimize_Resize + count++; + + if (count >= resizeBucketsCountThreshold) + { + ResizeBucketsArrayForward(GetNewBucketsArraySize()); + } +#endif + Found:; + } + firstBlankAtEndIndex = nextBlankIndex; +#if Exclude_Cache_Optimize_Resize + count = nextBlankIndex - 1; +#endif + } + + private void AddInitialEnumerable(IEnumerable collection) + { + FastHashSet fhset = collection as FastHashSet; + if (fhset != null && Equals(fhset.Comparer, Comparer)) + { + // a set with the same item comparer must have all items unique + // so Count will be the exact Count of the items added + // also don't have to check for equals of items + // and a FastHashSet has the additional advantage of not having to call GetHashCode() if it is hashing + // and it has access to the internal slots array so we don't have to use the foreach/enumerator + + int count = fhset.Count; + SetInitialCapacity(count); + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { + if (fhset.IsHashing) + { +#endif + // this FastHashSet is hashing and collection is a FastHashSet (with equal comparer) and it is also hashing + + nextBlankIndex = 1; + int maxNodeIndex = fhset.slots.Length - 1; + if (fhset.firstBlankAtEndIndex <= maxNodeIndex) + { + maxNodeIndex = fhset.firstBlankAtEndIndex - 1; + } + + for (int i = 1; i <= maxNodeIndex; i++) + { + ref TNode t2 = ref fhset.slots[i]; + if (t2.nextIndex != BlankNextIndexIndicator) + { + int hash = t2.hashOrNextIndexForBlanks; + int hashIndex = hash % bucketsModSize; + + ref TNode t = ref slots[nextBlankIndex]; + + t.hashOrNextIndexForBlanks = hash; + t.nextIndex = buckets[hashIndex]; + t.item = t2.item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex++; + } + } + this.count = count; + firstBlankAtEndIndex = nextBlankIndex; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + // this FastHashSet is hashing and collection is a FastHashSet (with equal comparer) and it is NOT hashing + + nextBlankIndex = 1; + for (int i = 0; i < fhset.count; i++) + { + ref T item = ref noHashArray[i]; + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + ref TNode t = ref slots[nextBlankIndex]; + + t.hashOrNextIndexForBlanks = hash; + t.nextIndex = buckets[hashIndex]; + t.item = item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex++; + } + } + } + else + { + // this FastHashSet is not hashing + + AddInitialUniqueValuesEnumerable(collection); + } +#endif + } + else + { + // collection is not a FastHashSet with equal comparer + + HashSet hset = collection as HashSet; + if (hset != null && Equals(hset.Comparer, Comparer)) + { + // a set with the same item comparer must have all items unique + // so Count will be the exact Count of the items added + // also don't have to check for equals of items + + int usedCount = hset.Count; + SetInitialCapacity(usedCount); + + AddInitialUniqueValuesEnumerable(collection); + } + else + { + ICollection coll = collection as ICollection; + if (coll != null) + { + SetInitialCapacity(coll.Count); +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + // call SetInitialCapacity and then set the capacity back to get rid of the excess? + + AddInitialEnumerableWithEnoughCapacity(collection); + + TrimExcess(); +#if !Exclude_No_Hash_Array_Implementation + } + else + { + foreach (T item in collection) + { + Add(item); + } + } +#endif + } + else + { + SetInitialCapacity(InitialArraySize); + + foreach (T item in collection) + { + Add(in item); + } + } + } + } + } + + private void SetInitialCapacity(int capacity) + { +#if !Exclude_No_Hash_Array_Implementation + if (capacity > InitialArraySize) + { +#endif + // skip using the array and go right into hashing + InitHashing(capacity); +#if !Exclude_No_Hash_Array_Implementation + } + else + { + CreateNoHashArray(); // don't set the capacity/size of the noHashArray + } +#endif + } + +#if !Exclude_No_Hash_Array_Implementation + // this function can be called to switch from using the noHashArray and start using the hashing arrays (slots and buckets) + // this function can also be called before noHashArray is even allocated in order to skip using the array and go right into hashing + private void SwitchToHashing(int capacityIncrease = -1) + { + InitHashing(capacityIncrease); + + if (noHashArray != null) + { + // i is the index into noHashArray + for (int i = 0; i < count; i++) + { + ref T item = ref noHashArray[i]; + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + ref TNode t = ref slots[nextBlankIndex]; + + t.hashOrNextIndexForBlanks = hash; + t.nextIndex = buckets[hashIndex]; + t.item = item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex++; + } + noHashArray = null; // this array can now be garbage collected because it is no longer referenced + } + + firstBlankAtEndIndex = nextBlankIndex; + } +#endif + + private void InitHashing(int capacity = -1) + { + int newSlotsArraySize; + int newBucketsArraySize; + int newBucketsArrayModSize; + + bool setThresh = false; + if (capacity == -1) + { + newSlotsArraySize = InitialSlotsArraySize; + + newBucketsArraySize = bucketsSizeArray[0]; + if (newBucketsArraySize < newSlotsArraySize) + { + for (currentIndexIntoBucketsSizeArray = 1; currentIndexIntoBucketsSizeArray < bucketsSizeArray.Length; currentIndexIntoBucketsSizeArray++) + { + newBucketsArraySize = bucketsSizeArray[currentIndexIntoBucketsSizeArray]; + if (newBucketsArraySize >= newSlotsArraySize) + { + break; + } + } + } + newBucketsArrayModSize = newBucketsArraySize; + } + else + { + newSlotsArraySize = capacity + 1; // add 1 to accomodate blank first node (node at 0 index) + + newBucketsArraySize = FastHashSetUtil.GetEqualOrClosestHigherPrime((int)(newSlotsArraySize / LoadFactorConst)); + +#if !Exclude_Cache_Optimize_Resize + if (newBucketsArraySize > bucketsSizeArrayForCacheOptimization[0]) + { + newBucketsArrayModSize = bucketsSizeArrayForCacheOptimization[0]; + setThresh = true; + } + else +#endif + { + newBucketsArrayModSize = newBucketsArraySize; + } + } + + if (newSlotsArraySize == 0) + { + // this is an error, the int.MaxValue has been used for capacity and we require more - throw an Exception for this + // could try this with HashSet and see what exception it throws? + throw new InvalidOperationException("Exceeded maximum number of items allowed for this container."); + } + + slots = new TNode[newSlotsArraySize]; // the slots array has an extra item as it's first item (0 index) that is for available items - the memory is wasted, but it simplifies things + buckets = new int[newBucketsArraySize]; // these will be initially set to 0, so make 0 the blank(available) value and reduce all indices by one to get to the actual index into the slots array + bucketsModSize = newBucketsArrayModSize; + + if (setThresh) + { + resizeBucketsCountThreshold = (int)(newBucketsArrayModSize * LoadFactorConst); + } + else + { + CalcUsedItemsLoadFactorThreshold(); + } + + nextBlankIndex = 1; // start at 1 because 0 is the blank item + + firstBlankAtEndIndex = nextBlankIndex; + } + +#if !Exclude_No_Hash_Array_Implementation + private void CreateNoHashArray() + { + noHashArray = new T[InitialArraySize]; + } +#endif + + private void CalcUsedItemsLoadFactorThreshold() + { + if (buckets != null) + { + if (buckets.Length == bucketsModSize) + { + resizeBucketsCountThreshold = slots.Length; // with this value, the buckets array should always resize after the slots array (in the same public function call) + } + else + { + // when buckets.Length > bucketsModSize, this means we want to more slowly increase the bucketsModSize to keep things in the L1-3 caches + resizeBucketsCountThreshold = (int)(bucketsModSize * LoadFactorConst); + } + } + } + + /// True if the FastHashSet if read-only. This is always false. This is only present to implement ICollection, it has no real value otherwise. + bool ICollection.IsReadOnly => false; + + /// Copies all elements of the FastHashSet<> into an array starting at arrayIndex. This implements ICollection.CopyTo(T[], Int32). + /// The destination array. + /// The starting array index to copy elements to. + public void CopyTo(T[] array, int arrayIndex) + { + CopyTo(array, arrayIndex, count); + } + + /// Copies all elements of the FastHashSet<> into an array starting at the first array index. + /// The destination array. + public void CopyTo(T[] array) + { + CopyTo(array, 0, count); + } + + // not really sure how this can be useful because you never know exactly what elements you will get copied (unless you copy them all) + // it could easily vary for different implementations or if items were added in different order or if items were added removed and then added, instead of just added + /// Copies count number of elements of the FastHashSet<> into an array starting at arrayIndex. + /// The destination array. + /// The starting array index to copy elements to. + /// The number of elements to copy. + public void CopyTo(T[] array, int arrayIndex, int count) + { + if (array == null) + { + throw new ArgumentNullException(nameof(array), "Value cannot be null."); + } + + if (arrayIndex < 0) + { + throw new ArgumentOutOfRangeException(nameof(arrayIndex), "Non negative number is required."); + } + + if (count < 0) + { + throw new ArgumentOutOfRangeException(nameof(count), "Non negative number is required."); + } + + if (arrayIndex + count > array.Length) + { + throw new ArgumentException("Destination array is not long enough to copy all the items in the collection. Check array index and length."); + } + + if (count == 0) + { + return; + } + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int pastNodeIndex = slots.Length; + if (firstBlankAtEndIndex < pastNodeIndex) + { + pastNodeIndex = firstBlankAtEndIndex; + } + + int cnt = 0; + for (int i = 1; i < pastNodeIndex; i++) + { + if (slots[i].nextIndex != BlankNextIndexIndicator) + { + array[arrayIndex++] = slots[i].item; + if (++cnt == count) + { + break; + } + } + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int cnt = this.count; + if (cnt > count) + { + cnt = count; + } + + // for small arrays, I think the for loop below will actually be faster than Array.Copy because of the overhead of that function - could test this + //Array.Copy(noHashArray, 0, array, arrayIndex, cnt); + + for (int i = 0; i < cnt; i++) + { + array[arrayIndex++] = noHashArray[i]; + } + } +#endif + } + + /// + /// Gets the IEqualityComparer used to determine equality for items of this FastHashSet. + /// + public IEqualityComparer Comparer => + // if not set, return the default - this is what HashSet does + // even if it is set to null explicitly, it will still return the default + // this behavior is implmented in the constructor + comparer; + + /// + /// >Gets the number of items in this FastHashSet. + /// + public int Count => count; + + // this is the percent of used items to all items (used + blank/available) + // at which point any additional added items will + // first resize the buckets array to the next prime to avoid too many collisions and chains becoming too large + /// + /// Gets the fraction of 'used items count' divided by 'used items plus available/blank items count'. + /// The buckets array is resized when adding items and this fraction is reached, so this is the minimum LoadFactor for the buckets array. + /// + public double LoadFactor => LoadFactorConst; + + // this is the capacity that can be trimmed with TrimExcessCapacity + // items that were removed from the hash arrays can't be trimmed by calling TrimExcessCapacity, only the blank items at the end + // items that were removed from the noHashArray can be trimmed by calling TrimExcessCapacity because the items after are moved to fill the blank space + /// + /// Gets the capacity that can be trimmed with TrimExcessCapacity. + /// + public int ExcessCapacity + { + get + { + int excessCapacity; +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + excessCapacity = slots.Length - firstBlankAtEndIndex; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + excessCapacity = noHashArray.Length - count; + } +#endif + return excessCapacity; + } + } + + /// + /// Gets the capacity of the FastHashSet, which is the number of elements that can be contained without resizing. + /// + public int Capacity + { + get + { +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + return slots.Length - 1; // subtract 1 for blank node at 0 index +#if !Exclude_No_Hash_Array_Implementation + } + else + { + return noHashArray.Length; + } +#endif + } + } + + /// + /// Gets the size of the next capacity increase of the FastHashSet. + /// + public int NextCapacityIncreaseSize => GetNewSlotsArraySizeIncrease(out int oldSlotsArraySize); + + /// + /// Gets the count of items when the next capacity increase (resize) of the FastHashSet will happen. + /// + public int NextCapacityIncreaseAtCount => resizeBucketsCountThreshold; + + public bool IsHashing => noHashArray == null; + + // the actual capacity at the end of this function may be more than specified + // (in the case when it was more before this function was called - nothing is trimmed by this function, or in the case that slighly more capacity was allocated by this function) + /// + /// Allocate enough space (or make sure existing space is enough) for capacity number of items to be stored in the FastHashSet without any further allocations. + /// + /// The capacity to ensure. + /// The actual capacity at the end of this function. + public int EnsureCapacity(int capacity) + { + // this function is only in .net core for HashSet as of 4/15/2019 +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + int currentCapacity; + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + currentCapacity = slots.Length - count; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + currentCapacity = noHashArray.Length - count; + } +#endif + + if (currentCapacity < capacity) + { + IncreaseCapacity(capacity - currentCapacity); + } + + // this should be the number where the next lowest number would force a resize of buckets array with the current loadfactor and the entire slots array is full + int calcedNewBucketsArraySize = (int)(slots.Length / LoadFactorConst) + 1; + + if (calcedNewBucketsArraySize < 0 && calcedNewBucketsArraySize > LargestPrimeLessThanMaxInt) + { + calcedNewBucketsArraySize = LargestPrimeLessThanMaxInt; + } + else + { + calcedNewBucketsArraySize = FastHashSetUtil.GetEqualOrClosestHigherPrime(calcedNewBucketsArraySize); + } + + if (buckets.Length < calcedNewBucketsArraySize) + { + // -1 means stop trying to increase the size based on the array of primes + // instead calc based on 2 * existing length and then get the next higher prime + currentIndexIntoBucketsSizeArray = -1; + + ResizeBucketsArrayForward(calcedNewBucketsArraySize); + } + + return slots.Length - count; + } + + // return true if bucketsModSize was set, false otherwise + private bool CheckForModSizeIncrease() + { + if (bucketsModSize < buckets.Length) + { + // instead of array, just have 3 constants + int partLength = (int)(buckets.Length * .75); + + int size0 = bucketsSizeArrayForCacheOptimization[0]; + int size1 = bucketsSizeArrayForCacheOptimization[1]; + if (bucketsModSize == size0) + { + if (size1 <= partLength) + { + bucketsModSize = size1; + return true; + } + else + { + bucketsModSize = buckets.Length; + return true; + } + } + else + { + int size2 = bucketsSizeArrayForCacheOptimization[2]; + if (bucketsModSize == size1) + { + if (size2 <= partLength) + { + bucketsModSize = size2; + return true; + } + else + { + bucketsModSize = buckets.Length; + return true; + } + } + else if (bucketsModSize == size2) + { + bucketsModSize = buckets.Length; + return true; + } + } + } + return false; + } + + private int GetNewSlotsArraySizeIncrease(out int oldArraySize) + { + if (slots != null) + { + oldArraySize = slots.Length; + } + else + { + oldArraySize = InitialSlotsArraySize; // this isn't the old array size, but it is the initial size we should start at + } + + int increaseInSize; + + if (oldArraySize == 1) + { + increaseInSize = InitialSlotsArraySize - 1; + } + else + { + increaseInSize = oldArraySize - 1; + } + + int maxIncreaseInSize = MaxSlotsArraySize - oldArraySize; + + if (increaseInSize > maxIncreaseInSize) + { + increaseInSize = maxIncreaseInSize; + } + return increaseInSize; + } + + // if the value returned gets used and that value is different than the current buckets.Length, then the calling code should increment currentIndexIntoSizeArray because this would now be the current + private int GetNewBucketsArraySize() + { + int newArraySize; + + if (currentIndexIntoBucketsSizeArray >= 0) + { + if (currentIndexIntoBucketsSizeArray + 1 < bucketsSizeArray.Length) + { + newArraySize = bucketsSizeArray[currentIndexIntoBucketsSizeArray + 1]; + } + else + { + newArraySize = buckets.Length; + } + } + else + { + // -1 means stop trying to increase the size based on the array of primes + // instead calc based on 2 * existing length and then get the next higher prime + newArraySize = buckets.Length; + if (newArraySize < int.MaxValue / 2) + { + newArraySize = FastHashSetUtil.GetEqualOrClosestHigherPrime(newArraySize + newArraySize); + } + else + { + newArraySize = LargestPrimeLessThanMaxInt; + } + } + + return newArraySize; + } + + // if hashing, increase the size of the slots array + // if not yet hashing, switch to hashing + private void IncreaseCapacity(int capacityIncrease = -1) + { + // this function might be a fair bit over overhead for resizing at small sizes (like 33 and 65) + // could try to reduce the overhead - there could just be a nextSlotsArraySize (don't need increase?), or nextSlotsArraySizeIncrease? + // then we don't have to call GetNewSlotsArraySizeIncrease at all? + // could test the overhead by just replacing all of the code with +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int newSlotsArraySizeIncrease; + int oldSlotsArraySize; + + if (capacityIncrease == -1) + { + newSlotsArraySizeIncrease = GetNewSlotsArraySizeIncrease(out oldSlotsArraySize); + } + else + { + newSlotsArraySizeIncrease = capacityIncrease; + oldSlotsArraySize = slots.Length; + } + + if (newSlotsArraySizeIncrease <= 0) + { + throw new InvalidOperationException("Exceeded maximum number of items allowed for this container."); + } + + int newSlotsArraySize = oldSlotsArraySize + newSlotsArraySizeIncrease; + + TNode[] newSlotsArray = new TNode[newSlotsArraySize]; + Array.Copy(slots, 0, newSlotsArray, 0, slots.Length); // check the IL, I think Array.Resize and Array.Copy without the start param calls this, so avoid the overhead by calling directly + slots = newSlotsArray; + +#if !Exclude_No_Hash_Array_Implementation + } + else + { + SwitchToHashing(capacityIncrease); + } +#endif + } + + private TNode[] IncreaseCapacityNoCopy(int capacityIncrease = -1) + { +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int newSlotsrraySizeIncrease; + int oldSlotsArraySize; + + if (capacityIncrease == -1) + { + newSlotsrraySizeIncrease = GetNewSlotsArraySizeIncrease(out oldSlotsArraySize); + } + else + { + newSlotsrraySizeIncrease = capacityIncrease; + oldSlotsArraySize = slots.Length; + } + + if (newSlotsrraySizeIncrease <= 0) + { + throw new InvalidOperationException("Exceeded maximum number of items allowed for this container."); + } + + int newSlotsArraySize = oldSlotsArraySize + newSlotsrraySizeIncrease; + + TNode[] newSlotsArray = new TNode[newSlotsArraySize]; + return newSlotsArray; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + SwitchToHashing(capacityIncrease); + return null; + } +#endif + } + + private void ResizeBucketsArrayForward(int newBucketsArraySize) + { + if (newBucketsArraySize == buckets.Length) + { + // this will still work if no increase in size - it just might be slower than if you could increase the buckets array size + } + else + { + if (!CheckForModSizeIncrease()) //??? clean this up, it isn't really good to do it this way - no need to call GetNewBucketsArraySize before calling this function + { + buckets = new int[newBucketsArraySize]; + bucketsModSize = newBucketsArraySize; + + if (currentIndexIntoBucketsSizeArray >= 0) + { + currentIndexIntoBucketsSizeArray++; // when the newBucketsArraySize gets used in the above code, point to the next avaialble size - ??? not sure this is the best place to increment this + } + } + else + { + Array.Clear(buckets, 0, bucketsModSize); + } + + CalcUsedItemsLoadFactorThreshold(); + + int bucketsArrayLength = buckets.Length; + + int pastNodeIndex = slots.Length; + if (firstBlankAtEndIndex < pastNodeIndex) + { + pastNodeIndex = firstBlankAtEndIndex; + } + + //??? for a loop where the end is array.Length, the compiler can skip any array bounds checking - can it do it for this code - it should be able to because pastIndex is no more than buckets.Length + if (firstBlankAtEndIndex == count + 1) + { + // this means there aren't any blank nodes + for (int i = 1; i < pastNodeIndex; i++) + { + ref TNode t = ref slots[i]; + + int hashIndex = t.hashOrNextIndexForBlanks % bucketsArrayLength; + t.nextIndex = buckets[hashIndex]; + + buckets[hashIndex] = i; + } + } + else + { + // this means there are some blank nodes + for (int i = 1; i < pastNodeIndex; i++) + { + ref TNode t = ref slots[i]; + if (t.nextIndex != BlankNextIndexIndicator) // skip blank nodes + { + int hashIndex = t.hashOrNextIndexForBlanks % bucketsArrayLength; + t.nextIndex = buckets[hashIndex]; + + buckets[hashIndex] = i; + } + } + } + } + } + + private void ResizeBucketsArrayForwardKeepMarks(int newBucketsArraySize) + { + if (newBucketsArraySize == buckets.Length) + { + // this will still work if no increase in size - it just might be slower than if you could increase the buckets array size + } + else + { + //??? what if there is a high percent of blank/unused items in the slots array before the firstBlankAtEndIndex (mabye because of lots of removes)? + // It would probably be faster to loop through the buckets array and then do chaining to find the used nodes - one problem with this is that you would have to find blank nodes - but they would be chained + // this probably isn't a very likely scenario + + if (!CheckForModSizeIncrease()) //??? clean this up, it isn't really good to do it this way - no need to call GetNewBucketsArraySize before calling this function + { + buckets = new int[newBucketsArraySize]; + bucketsModSize = newBucketsArraySize; + + if (currentIndexIntoBucketsSizeArray >= 0) + { + currentIndexIntoBucketsSizeArray++; // when the newBucketsArraySize gets used in the above code, point to the next avaialble size - ??? not sure this is the best place to increment this + } + } + + CalcUsedItemsLoadFactorThreshold(); + + int bucketsArrayLength = buckets.Length; + + int pastNodeIndex = slots.Length; + if (firstBlankAtEndIndex < pastNodeIndex) + { + pastNodeIndex = firstBlankAtEndIndex; + } + + //??? for a loop where the end is array.Length, the compiler can skip any array bounds checking - can it do it for this code - it should be able to because pastIndex is no more than buckets.Length + if (firstBlankAtEndIndex == count + 1) + { + // this means there aren't any blank nodes + for (int i = 1; i < pastNodeIndex; i++) + { + ref TNode t = ref slots[i]; + + int hashIndex = t.hashOrNextIndexForBlanks % bucketsArrayLength; + t.nextIndex = buckets[hashIndex] | (t.nextIndex & MarkNextIndexBitMask); + + buckets[hashIndex] = i; + } + } + else + { + // this means there are some blank nodes + for (int i = 1; i < pastNodeIndex; i++) + { + ref TNode t = ref slots[i]; + if (t.nextIndex != BlankNextIndexIndicator) // skip blank nodes + { + int hashIndex = t.hashOrNextIndexForBlanks % bucketsArrayLength; + t.nextIndex = buckets[hashIndex] | (t.nextIndex & MarkNextIndexBitMask); + + buckets[hashIndex] = i; + } + } + } + } + } + + /// + /// Removes all items from the FastHashSet, but does not do any trimming of the resulting unused memory. + /// To trim the unused memory, call TrimExcess. + /// + public void Clear() + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) +#endif + { + firstBlankAtEndIndex = 1; + nextBlankIndex = 1; + Array.Clear(buckets, 0, buckets.Length); + } + + count = 0; + } + + // documentation states: + // You can use the TrimExcess method to minimize a HashSet object's memory overhead once it is known that no new elements will be added + // To completely clear a HashSet object and release all memory referenced by it, call this method after calling the Clear method. + /// + /// Trims excess capacity to minimize the FastHashSet's memory overhead. + /// + public void TrimExcess() + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + if (slots.Length > firstBlankAtEndIndex && firstBlankAtEndIndex > 0) + { + Array.Resize(ref slots, firstBlankAtEndIndex); + // when firstBlankAtEndIndex == slots.Length, that means there are no blank at end items + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + if (noHashArray != null && noHashArray.Length > count && count > 0) + { + Array.Resize(ref noHashArray, count); + } + } +#endif + } + + // this is only present to implement ICollection - it has no real value otherwise because the Add method with bool return value already does this + /// + /// Implements the ICollection<T> Add method. If possible, use the FastHashSet Add method instead to avoid any slight overhead and return a bool that indicates if the item was added. + /// + /// The item to add. + void ICollection.Add(T item) + { + Add(in item); + } + + // we need 2 versions of Add, one with 'in' and one without 'in' because the one without 'in' is needed to implement the ISet Add method + // always keep the code for these 2 Add methods exactly the same + /// + /// Add an item to the FastHashSet using a read-only reference (in) parameter. Use this version of the Add method when item is a large value type to avoid copying large objects. + /// + /// The item to add. + /// True if the item was added, or false if the FastHashSet already contains the item. + public bool Add(in T item) + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return false; // item was found, so return false to indicate it was not added + } + + index = t.nextIndex; + } + + if (nextBlankIndex >= slots.Length) + { + // there aren't any more blank nodes to add items, so we need to increase capacity + IncreaseCapacity(); + } + + int firstIndex = buckets[hashIndex]; + buckets[hashIndex] = nextBlankIndex; + + ref TNode tBlank = ref slots[nextBlankIndex]; + if (nextBlankIndex >= firstBlankAtEndIndex) + { + // the blank nodes starting at firstBlankAtEndIndex aren't chained + nextBlankIndex = ++firstBlankAtEndIndex; + } + else + { + // the blank nodes before firstBlankAtEndIndex are chained (the hashOrNextIndexForBlanks points to the next blank node) + nextBlankIndex = tBlank.hashOrNextIndexForBlanks; + } + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = firstIndex; + tBlank.item = item; + + count++; + + if (count >= resizeBucketsCountThreshold) + { + ResizeBucketsArrayForward(GetNewBucketsArraySize()); + } + + return true; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + return false; + } + } + + if (i == noHashArray.Length) + { + SwitchToHashing(); + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + ref TNode tBlank = ref slots[nextBlankIndex]; + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = buckets[hashIndex]; + tBlank.item = item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex = ++firstBlankAtEndIndex; + + count++; + + return true; + } + else + { + // add to noHashArray + noHashArray[i] = item; + count++; + return true; + } + } +#endif + } + + /// + /// Add an item to the FastHashSet. + /// + /// The item to add. + /// True if the item was added, or false if the FastHashSet already contains the item. + public bool Add(T item) + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return false; // item was found, so return false to indicate it was not added + } + + index = t.nextIndex; + } + + if (nextBlankIndex >= slots.Length) + { + // there aren't any more blank nodes to add items, so we need to increase capacity + IncreaseCapacity(); + } + + int firstIndex = buckets[hashIndex]; + buckets[hashIndex] = nextBlankIndex; + + ref TNode tBlank = ref slots[nextBlankIndex]; + if (nextBlankIndex >= firstBlankAtEndIndex) + { + // the blank nodes starting at firstBlankAtEndIndex aren't chained + nextBlankIndex = ++firstBlankAtEndIndex; + } + else + { + // the blank nodes before firstBlankAtEndIndex are chained (the hashOrNextIndexForBlanks points to the next blank node) + nextBlankIndex = tBlank.hashOrNextIndexForBlanks; + } + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = firstIndex; + tBlank.item = item; + + count++; + + if (count >= resizeBucketsCountThreshold) + { + ResizeBucketsArrayForward(GetNewBucketsArraySize()); + } + + return true; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + return false; + } + } + + if (i == noHashArray.Length) + { + SwitchToHashing(); + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + ref TNode tBlank = ref slots[nextBlankIndex]; + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = buckets[hashIndex]; + tBlank.item = item; + + buckets[hashIndex] = nextBlankIndex; + + nextBlankIndex = ++firstBlankAtEndIndex; + + count++; + + return true; + } + else + { + // add to noHashArray + noHashArray[i] = item; + count++; + return true; + } + } +#endif + } + + // return the index in the slots array of the item that was added or found + private int AddToHashSetIfNotFound(in T item, int hash, out bool isFound) + { + // this assmes we are hashing + + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + isFound = true; + return index; // item was found, so return the index of the found item + } + + index = t.nextIndex; + } + + if (nextBlankIndex >= slots.Length) + { + // there aren't any more blank nodes to add items, so we need to increase capacity + IncreaseCapacity(); + ResizeBucketsArrayForward(GetNewBucketsArraySize()); + + // fix things messed up by buckets array resize + hashIndex = hash % bucketsModSize; + } + + int firstIndex = buckets[hashIndex]; + buckets[hashIndex] = nextBlankIndex; + + int addedNodeIndex = nextBlankIndex; + ref TNode tBlank = ref slots[nextBlankIndex]; + if (nextBlankIndex >= firstBlankAtEndIndex) + { + // the blank nodes starting at firstBlankAtEndIndex aren't chained + nextBlankIndex = ++firstBlankAtEndIndex; + } + else + { + // the blank nodes before firstBlankAtEndIndex are chained (the hashOrNextIndexForBlanks points to the next blank node) + nextBlankIndex = tBlank.hashOrNextIndexForBlanks; + } + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = firstIndex; + tBlank.item = item; + + count++; + + isFound = false; + return addedNodeIndex; // item was not found, so return the index of the added item + } + + // return the node index that was added, or NullIndex if item was found + private int AddToHashSetIfNotFoundAndMark(in T item, int hash) + { + // this assumes we are hashing + + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return NullIndex; // item was found, so return NullIndex to indicate it was not added + } + + index = t.nextIndex & MarkNextIndexBitMaskInverted; + } + + if (nextBlankIndex >= slots.Length) + { + // there aren't any more blank nodes to add items, so we need to increase capacity + IncreaseCapacity(); + ResizeBucketsArrayForwardKeepMarks(GetNewBucketsArraySize()); + + // fix things messed up by buckets array resize + hashIndex = hash % bucketsModSize; + } + + int firstIndex = buckets[hashIndex]; + buckets[hashIndex] = nextBlankIndex; + + int addedNodeIndex = nextBlankIndex; + ref TNode tBlank = ref slots[nextBlankIndex]; + if (nextBlankIndex >= firstBlankAtEndIndex) + { + // the blank nodes starting at firstBlankAtEndIndex aren't chained + nextBlankIndex = ++firstBlankAtEndIndex; + } + else + { + // the blank nodes before firstBlankAtEndIndex are chained (the hashOrNextIndexForBlanks points to the next blank node) + nextBlankIndex = tBlank.hashOrNextIndexForBlanks; + } + + tBlank.hashOrNextIndexForBlanks = hash; + tBlank.nextIndex = firstIndex | MarkNextIndexBitMask; + tBlank.item = item; + + count++; + + return addedNodeIndex; // item was not found, so return the index of the added item + } + + // we need 2 versions of Contains, one with 'in' and one without 'in' because the one without 'in' is needed to implement the ICollection Contains method + // always keep the code for these 2 Contains methods exactly the same + /// + /// Return true if the item is contained in the FastHashSet, otherwise return false. Use this version of the Contains method when item is a large value type to avoid copying large objects. + /// + /// The item to search for in the FastHashSet. + /// True if found, false if not found. + public bool Contains(in T item) + { +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return true; // item was found, so return true + } + + index = t.nextIndex; + } + return false; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + return true; // item was found, so return true + } + } + return false; + } +#endif + } + + // this implements Contains for ICollection + /// + /// Return true if the item is contained in the FastHashSet, otherwise return false. + /// + /// The item to search for in the FastHashSet. + /// True if found, false if not found. + public bool Contains(T item) + { +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return true; // item was found, so return true + } + + index = t.nextIndex; + } + return false; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + return true; // item was found, so return true + } + } + return false; + } +#endif + } + + /// + /// Removes the item from the FastHashSet if found and returns true if the item was found and removed. + /// + /// The item value to remove. + /// True if the item was removed, or false if the item was not contained in the FastHashSet. + public bool Remove(T item) + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + int priorIndex = NullIndex; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + // item was found, so remove it + + if (priorIndex == NullIndex) + { + buckets[hashIndex] = t.nextIndex; + } + else + { + slots[priorIndex].nextIndex = t.nextIndex; + } + + // add node to blank chain or to the blanks at the end (if possible) + if (index == firstBlankAtEndIndex - 1) + { + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = index; + } + + t.nextIndex = BlankNextIndexIndicator; + + count--; + + return true; + } + + priorIndex = index; + + index = t.nextIndex; + } + return false; // item not found +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + // remove the item by moving all remaining items to fill over this one - this is probably faster than Array.CopyTo + for (int j = i + 1; j < count; j++, i++) + { + noHashArray[i] = noHashArray[j]; + } + count--; + return true; + } + } + return false; + } +#endif + } + + // this is a new public method not in HashSet + /// + /// Removes the item from the FastHashSet if found and also if the predicate param evaluates to true on the found item. + /// This is useful if there is something about the found item other than its equality value that can be used to determine if it should be removed. + /// + /// The item value to remove. + /// The predicate to evaluate on the found item. + /// True if the item was removed, or false if the item was not removed. + public bool RemoveIf(in T item, Predicate removeIfPredIsTrue) + { + if (removeIfPredIsTrue == null) + { + throw new ArgumentNullException(nameof(removeIfPredIsTrue), "Value cannot be null."); + } + + // the following code is almost the same as the Remove(item) function except that it additionally invokes the removeIfPredIsTrue param to see if the item should be removed + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + int priorIndex = NullIndex; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + if (removeIfPredIsTrue.Invoke(t.item)) + { + // item was found and predicate was true, so remove it + + if (priorIndex == NullIndex) + { + buckets[hashIndex] = t.nextIndex; + } + else + { + slots[priorIndex].nextIndex = t.nextIndex; + } + + // add node to blank chain or to the blanks at the end (if possible) + if (index == firstBlankAtEndIndex - 1) + { + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = index; + } + + t.nextIndex = BlankNextIndexIndicator; + + count--; + + return true; + } + else + { + return false; + } + } + + priorIndex = index; + + index = t.nextIndex; + } + return false; // item not found +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + if (removeIfPredIsTrue.Invoke(noHashArray[i])) + { + // remove the item by moving all remaining items to fill over this one - this is probably faster than Array.CopyTo + for (int j = i + 1; j < count; j++, i++) + { + noHashArray[i] = noHashArray[j]; + } + count--; + return true; + } + else + { + return false; + } + } + } + return false; + } +#endif + } + + // this is a new public method not in HashSet + /// + /// Returns a ref to the element in the FastHashSet if found, or adds the item if not present in the FastHashSet and returns a ref to the added element. + /// The returned element reference should only be changed in ways that does not effect its GetHashCode value. + /// The returned element reference should only be used before any modifications to the FastHashSet (like Add or Remove) which may invalidate it. + /// + /// The item to be added or found. + /// Set to true if the item is found, or false if the added was not found and added. + /// Returns a ref to the found item or to the added item. + public ref T FindOrAdd(in T item, out bool isFound) + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + isFound = false; +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int addedOrFoundItemIndex = AddToHashSetIfNotFound(in item, (comparer.GetHashCode(item) & HighBitNotSet), out isFound); + return ref slots[addedOrFoundItemIndex].item; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + isFound = true; + return ref noHashArray[i]; + } + } + + if (i == noHashArray.Length) + { + SwitchToHashing(); + return ref FindOrAdd(in item, out isFound); + } + else + { + // add to noHashArray and keep isAdded true + noHashArray[i] = item; + count++; + return ref noHashArray[i]; + } + } +#endif + } + + // this is a new public method not in HashSet + /// + /// Tries to find the element with the same value as item in the FastHashSet and, if found, it returns a ref to this found element. + /// This is similar to TryGetValue except it returns a ref to the actual element rather than creating copy of the element with an out parameter. + /// This allows the actual element to be changed if it is a mutable value type. + /// The returned element reference should only be changed in ways that does not effect its GetHashCode value. + /// The returned element reference should only be used before any modifications to the FastHashSet (like Add or Remove) which may invalidate it. + /// + /// The item to be found. + /// Set to true if the item is found, or false if not found. + /// Returns a ref to the element if it is found and sets the isFound out parameter to true. If not found, it returns a ref to the first element available and sets the isFound out parameter to false. + public ref T Find(in T item, out bool isFound) + { + isFound = false; +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + FindInSlotsArray(item, out int foundNodeIndex, out int priorNodeIndex, out int bucketsIndex); + if (foundNodeIndex != NullIndex) + { + isFound = true; + } + + return ref slots[foundNodeIndex].item; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + isFound = true; + return ref noHashArray[i]; + } + } + + // if item was not found, still need to return a ref to something, so return a ref to the first item in the array + return ref noHashArray[0]; + } +#endif + } + + // this is a new public method not in HashSet + /// + /// Tries to find the element with the same value as item in the FastHashSet and, if found,it returns a ref to this found element, except if it is also removed (which is determined by the removeIfPredIsTrue parameter). + /// The returned element reference should only be changed in ways that does not effect its GetHashCode value. + /// The returned element reference should only be used before any modifications to the FastHashSet (like Add or Remove) which may invalidate it. + /// + /// + /// The predicate to evaluate on the found item. + /// Set to true if the item is found, or false if not found. + /// Set to true if the item is found and then removed, or false if not removed. + /// Returns a ref to the element if it is found (and not removed) and sets the isFound out parameter to true and the isRemoved out parameter to false. If removed, it returns a reference to the first available element. + public ref T FindAndRemoveIf(in T item, Predicate removeIfPredIsTrue, out bool isFound, out bool isRemoved) + { + if (removeIfPredIsTrue == null) + { + throw new ArgumentNullException(nameof(removeIfPredIsTrue), "Value cannot be null."); + } + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + isFound = false; + isRemoved = false; + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + FindInSlotsArray(item, out int foundNodeIndex, out int priorNodeIndex, out int bucketsIndex); + if (foundNodeIndex != NullIndex) + { + isFound = true; + ref TNode t = ref slots[foundNodeIndex]; + if (removeIfPredIsTrue.Invoke(t.item)) + { + if (priorNodeIndex == NullIndex) + { + buckets[bucketsIndex] = t.nextIndex; + } + else + { + slots[priorNodeIndex].nextIndex = t.nextIndex; + } + + // add node to blank chain or to the blanks at the end (if possible) + if (foundNodeIndex == firstBlankAtEndIndex - 1) + { + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = foundNodeIndex; + } + + t.nextIndex = BlankNextIndexIndicator; + + count--; + + isRemoved = true; + + foundNodeIndex = NullIndex; + } + } + + return ref slots[foundNodeIndex].item; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + isFound = true; + if (removeIfPredIsTrue.Invoke(noHashArray[i])) + { + // remove the item by moving all remaining items to fill over this one - this is probably faster than Array.CopyTo + for (int j = i + 1; j < count; j++, i++) + { + noHashArray[i] = noHashArray[j]; + } + count--; + + isRemoved = true; + return ref noHashArray[0]; + } + else + { + return ref noHashArray[i]; + } + } + } + + // if item was not found, still need to return a ref to something, so return a ref to the first item in the array + return ref noHashArray[0]; + } +#endif + } + + // return index into slots array or 0 if not found + //??? to make things faster, could have a FindInSlotsArray that just returns foundNodeIndex and another version called FindWithPriorInSlotsArray that has the 3 out params + // first test to make sure this works as is + private void FindInSlotsArray(in T item, out int foundNodeIndex, out int priorNodeIndex, out int bucketsIndex) + { + foundNodeIndex = NullIndex; + priorNodeIndex = NullIndex; + + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + bucketsIndex = hashIndex; + + int priorIndex = NullIndex; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + foundNodeIndex = index; + priorNodeIndex = priorIndex; + return; // item was found + } + + priorIndex = index; + + index = t.nextIndex; + } + return; // item not found + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private bool FindInSlotsArray(in T item, int hash) + { + int hashIndex = hash % bucketsModSize; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + return true; // item was found, so return true + } + + index = t.nextIndex; + } + return false; + } + +#if !Exclude_No_Hash_Array_Implementation + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private bool FindInNoHashArray(in T item) + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + return true; // item was found, so return true + } + } + return false; + } +#endif + + private void UnmarkAllNextIndexValues(int maxNodeIndex) + { + // must be hashing to be here + for (int i = 1; i <= maxNodeIndex; i++) + { + slots[i].nextIndex &= MarkNextIndexBitMaskInverted; + } + } + + // removeMarked = true, means remove the marked items and keep the unmarked items + // removeMarked = false, means remove the unmarked items and keep the marked items + private void UnmarkAllNextIndexValuesAndRemoveAnyMarkedOrUnmarked(bool removeMarked) + { + // must be hashing to be here + + // must traverse all of the chains instead of just looping through the slots array because going through the chains is the only way to set + // nodes within a chain to blank and still be able to remove the blank node from the chain + + int index; + int nextIndex; + int priorIndex; + int lastNonBlankIndex = firstBlankAtEndIndex - 1; + for (int i = 0; i < buckets.Length; i++) + { + priorIndex = NullIndex; // 0 means use buckets array + index = buckets[i]; + + while (index != NullIndex) + { + ref TNode t = ref slots[index]; + nextIndex = t.nextIndex; + bool isMarked = (nextIndex & MarkNextIndexBitMask) != 0; + if (isMarked) + { + // this node is marked, so unmark it + nextIndex &= MarkNextIndexBitMaskInverted; + t.nextIndex = nextIndex; + } + + if (removeMarked == isMarked) + { + // set this node to blank + + count--; + + // first try to set it to blank by adding it to the blank at end group + if (index == lastNonBlankIndex) + { + //??? does it make sense to attempt this because any already blank items before this will not get added + lastNonBlankIndex--; + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + // add to the blank group + + t.nextIndex = BlankNextIndexIndicator; + + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = index; + } + + if (priorIndex == NullIndex) + { + buckets[i] = nextIndex; + } + else + { + slots[priorIndex].nextIndex = nextIndex; + } + + // keep priorIndex the same because we removed the node in the chain, so the priorIndex is still the same value + } + else + { + priorIndex = index; // node was not removed from the chain, so the priorIndex now points to the node that was not removed + } + + index = nextIndex; + } + } + } + + private FoundType FindInSlotsArrayAndMark(in T item, out int foundNodeIndex) + { + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + int index = buckets[hashIndex]; + + if (index == NullIndex) + { + foundNodeIndex = NullIndex; + return FoundType.NotFound; + } + else + { + // item with same hashIndex already exists, so need to look in the chained list for an equal item (using Equals) + + int nextIndex; + while (true) + { + ref TNode t = ref slots[index]; + nextIndex = t.nextIndex; + + // check if hash codes are equal before calling Equals (which may take longer) items that are Equals must have the same hash code + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + foundNodeIndex = index; + if ((nextIndex & MarkNextIndexBitMask) == 0) + { + // not marked, so mark it + t.nextIndex |= MarkNextIndexBitMask; + + return FoundType.FoundFirstTime; + } + return FoundType.FoundNotFirstTime; + } + + nextIndex &= MarkNextIndexBitMaskInverted; + if (nextIndex == NullIndex) + { + foundNodeIndex = NullIndex; + return FoundType.NotFound; // not found + } + else + { + index = nextIndex; + } + } + } + } + + // this is a new public method not in HashSet + /// + /// Get the information about the size of chains in the FastHashSet. + /// The size of chains should be small to reduce traversing and comparing items. + /// This can indicate the effectiveness of the hash code creation method. + /// + /// Outputs the average node visits per chain. This is a single number that summarizes the average length of chains in terms of the average number of compares until an equal value is found (when the item is present). + /// A List of LevelAndCount items that gives the length of each chain in the FastHashSet. + public List GetChainLevelsCounts(out double avgNodeVisitsPerChain) + { + Dictionary itemsInChainToCountDict = new Dictionary(); + + // this function only makes sense when hashing + int chainCount = 0; + if (buckets != null) + { + for (int i = 0; i < buckets.Length; i++) + { + int index = buckets[i]; + if (index != NullIndex) + { + chainCount++; + int itemsInChain = 1; + + while (slots[index].nextIndex != NullIndex) + { + index = slots[index].nextIndex; + itemsInChain++; + } + + itemsInChainToCountDict.TryGetValue(itemsInChain, out int cnt); + cnt++; + itemsInChainToCountDict[itemsInChain] = cnt; + } + } + } + + double totalAvgNodeVisitsIfVisitingAllChains = 0; + List lst = new List(itemsInChainToCountDict.Count); + foreach (KeyValuePair keyVal in itemsInChainToCountDict) + { + lst.Add(new ChainLevelAndCount(keyVal.Key, keyVal.Value)); + if (keyVal.Key == 1) + { + totalAvgNodeVisitsIfVisitingAllChains += keyVal.Value; + } + else + { + totalAvgNodeVisitsIfVisitingAllChains += keyVal.Value * (keyVal.Key + 1.0) / 2.0; + } + } + + if (chainCount == 0) + { + avgNodeVisitsPerChain = 0; + } + else + { + avgNodeVisitsPerChain = totalAvgNodeVisitsIfVisitingAllChains / chainCount; + } + + lst.Sort(); + + return lst; + } + + // this is a new public method not in HashSet + /// + /// Reorders items in the same hash chain (items that have the same hash code or mod to the same index), so that they are adjacent in memory. + /// This gives better locality of reference for larger count of items, which can result in fewer cache misses. + /// + public void ReorderChainedNodesToBeAdjacent() + { + if (slots != null) + { + TNode[] newSlotsArray = new TNode[slots.Length]; + + // copy elements using the buckets array chains so there is better locality in the chains + int index; + int newIndex = 1; + for (int i = 0; i < buckets.Length; i++) + { + index = buckets[i]; + if (index != NullIndex) + { + buckets[i] = newIndex; + while (true) + { + ref TNode t = ref slots[index]; + ref TNode tNew = ref newSlotsArray[newIndex]; + index = t.nextIndex; + newIndex++; + + // copy + tNew.hashOrNextIndexForBlanks = t.hashOrNextIndexForBlanks; + tNew.item = t.item; + if (index == NullIndex) + { + tNew.nextIndex = NullIndex; + break; + } + tNew.nextIndex = newIndex; + } + } + } + + newIndex++; + nextBlankIndex = newIndex; + firstBlankAtEndIndex = newIndex; + slots = newSlotsArray; + } + } + + /// + /// Looks for equalValue and if found, returns a copy of the found value in actualValue and returns true. + /// + /// The item to look for. + /// The copy of the found value, if found, or the default value of the same type if not found. + /// True if equalValue is found, or false if not found. + public bool TryGetValue(T equalValue, out T actualValue) + { +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + FindInSlotsArray(equalValue, out int foundNodeIndex, out int priorNodeIndex, out int bucketsIndex); + if (foundNodeIndex > 0) + { + actualValue = slots[foundNodeIndex].item; + return true; + } + + actualValue = default; + return false; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = 0; i < count; i++) + { + if (comparer.Equals(equalValue, noHashArray[i])) + { + actualValue = noHashArray[i]; + return true; + } + } + + actualValue = default; + return false; + } +#endif + } + + /// + /// Adds all items in into this FastHashSet. This is similar to AddRange for other types of collections, but it is called UnionWith for ISets. + /// + /// The enumerable items to add (cannot be null). + public void UnionWith(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + // Note: HashSet doesn't seem to increment this unless it really changes something - like doing an Add(3) when 3 is already in the hashset doesn't increment, same as doing a UnionWith with an empty set as the param. +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + if (other == this) + { + return; + } + + //??? maybe there is a faster way to add a bunch at one time - I copied the Add code below to make this faster + //foreach (T item in range) + //{ + // Add(item); + //} + + // do this with more code because it might get called in some high performance situations + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + foreach (T item in other) + { + AddToHashSetIfNotFound(in item, (comparer.GetHashCode(item) & HighBitNotSet), out bool isFound); + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + + foreach (T item in other) + { + //??? if it's easier for the jit compiler or il compiler to remove the array bounds checking then + // have i < noHashArray.Length and do the check for count within the loop with a break statement + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + goto found; // break out of inner for loop + } + } + + // if here then item was not found + if (i == noHashArray.Length) + { + SwitchToHashing(); + AddToHashSetIfNotFound(in item, (comparer.GetHashCode(item) & HighBitNotSet), out bool isFound); + } + else + { + // add to noHashArray + noHashArray[i] = item; + count++; + } + + found:; + } + } +#endif + } + + /// + /// Removes all items in from the FastHashSet. + /// + /// The enumerable items (cannot be null). + public void ExceptWith(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + if (other == this) + { + Clear(); + } + else + { + foreach (T item in other) + { + Remove(item); + } + } + } + + /// + /// Removes items from the FastHashSet so that the only remaining items are those contained in that also match an item in the FastHashSet. + /// + /// The enumerable items (cannot be null). + public void IntersectWith(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return; + } + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + // if hashing, find each item in the slots array and mark anything found, but remove from being found again + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int foundItemCount = 0; // the count of found items in the hash - without double counting + foreach (T item in other) + { + FoundType foundType = FindInSlotsArrayAndMark(in item, out int foundIndex); + if (foundType == FoundType.FoundFirstTime) + { + foundItemCount++; + + if (foundItemCount == count) + { + break; + } + } + } + + if (foundItemCount == 0) + { + Clear(); + } + else + { + UnmarkAllNextIndexValuesAndRemoveAnyMarkedOrUnmarked(false); + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + // Note: we could actually do this faster by moving any found items to the front and keeping track of the found items + // with a single int index + // the problem with this method is it reorders items and even though that shouldn't matter in a set + // it might cause issues with code that incorrectly assumes order stays the same for operations like this + + // possibly a faster implementation would be to use the method above, but keep track of original order with an int array of the size of count (ex. item at 0 was originally 5, and also item at 5 was originally 0) + + // set the corresponding bit in this int if an item was found + // using a uint means the no hashing array cannot be more than 32 items + uint foundItemBits = 0; + + int i; + + int foundItemCount = 0; // the count of found items in the hash - without double counting + foreach (T item in other) + { + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + foundItemBits |= mask; + foundItemCount++; + } + goto found; // break out of inner for loop + } + } + + found: + if (foundItemCount == count) + { + // all items in the set were found, so there is nothing to remove - the set isn't changed + return; + } + } + + if (foundItemCount == 0) + { + count = 0; // this is the equivalent of calling Clear + } + else + { + // remove any items that are unmarked (unfound) + // go backwards because this can be faster + for (i = count - 1; i >= 0; i--) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + if (i < count - 1) + { + // a faster method if there are multiple unfound items in a row is to find the first used item (make i go backwards until the item is used and then increment i by 1) + // if there aren't multiple unused in a row, then this is a bit of a waste + + int j = i + 1; // j now points to the next item after the unfound one that we want to keep + + i--; + while (i >= 0) + { + uint mask2 = (1u << i); + if ((foundItemBits & mask2) != 0) + { + break; + } + i--; + } + i++; + + int k = i; + for (; j < count; j++, k++) + { + noHashArray[k] = noHashArray[j]; + } + } + + count--; + } + } + } + } +#endif + } + + // An empty set is a proper subset of any other collection. Therefore, this method returns true if the collection represented by the current HashSet object + // is empty unless the other parameter is also an empty set. + // This method always returns false if Count is greater than or equal to the number of elements in other. + // If the collection represented by other is a HashSet collection with the same equality comparer as the current HashSet object, + // then this method is an O(n) operation. Otherwise, this method is an O(n + m) operation, where n is Count and m is the number of elements in other. + + /// + /// Returns true if this FastHashSet is a proper subset of . + /// + /// The enumerable items (cannot be null). + /// True if a proper subset of . + public bool IsProperSubsetOf(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return false; + } + + ICollection collection = other as ICollection; + if (collection != null) + { + if (count == 0 && collection.Count > 0) + { + return true; // by definition, an empty set is a proper subset of any non-empty collection + } + + if (count >= collection.Count) + { + return false; + } + } + else + { + if (count == 0) + { + foreach (T item in other) + { + return true; + } + return false; + } + } + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int foundItemCount = 0; // the count of found items in the hash - without double counting + int maxFoundIndex = 0; + bool notFoundAtLeastOne = false; + foreach (T item in other) + { + FoundType foundType = FindInSlotsArrayAndMark(in item, out int foundIndex); + if (foundType == FoundType.FoundFirstTime) + { + foundItemCount++; + if (maxFoundIndex < foundIndex) + { + maxFoundIndex = foundIndex; + } + } + else if (foundType == FoundType.NotFound) + { + notFoundAtLeastOne = true; + } + + if (notFoundAtLeastOne && foundItemCount == count) + { + // true means all of the items in the set were found in other and at least one item in other was not found in the set + break; // will return true below after unmarking + } + } + + UnmarkAllNextIndexValues(maxFoundIndex); + + return notFoundAtLeastOne && foundItemCount == count; // true if all of the items in the set were found in other and at least one item in other was not found in the set +#if !Exclude_No_Hash_Array_Implementation + } + else + { + uint foundItemBits = 0; + + int foundItemCount = 0; // the count of found items in the hash - without double counting + bool notFoundAtLeastOne = false; + foreach (T item in other) + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + foundItemBits |= mask; + foundItemCount++; + } + goto found; // break out of inner for loop + } + } + + // if here then item was not found + notFoundAtLeastOne = true; + + found: + if (notFoundAtLeastOne && foundItemCount == count) + { + // true means all of the items in the set were found in other and at least one item in other was not found in the set + return true; + } + } + + return false; + } +#endif + } + + /// + /// Returns true if this FastHashSet is a subset of . + /// + /// The enumerable items (cannot be null). + /// True if a subset of . + public bool IsSubsetOf(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return true; + } + + if (count == 0) + { + return true; // by definition, an empty set is a subset of any collection + } + + ICollection collection = other as ICollection; + if (collection != null) + { + if (count > collection.Count) + { + return false; + } + } + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int foundItemCount = 0; // the count of found items in the hash - without double counting + int maxFoundIndex = 0; + foreach (T item in other) + { + FoundType foundType = FindInSlotsArrayAndMark(in item, out int foundIndex); + if (foundType == FoundType.FoundFirstTime) + { + foundItemCount++; + if (maxFoundIndex < foundIndex) + { + maxFoundIndex = foundIndex; + } + + if (foundItemCount == count) + { + break; + } + } + } + + UnmarkAllNextIndexValues(maxFoundIndex); + + return foundItemCount == count; // true if all of the items in the set were found in other +#if !Exclude_No_Hash_Array_Implementation + } + else + { + uint foundItemBits = 0; + + int foundItemCount = 0; // the count of found items in the hash - without double counting + foreach (T item in other) + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + foundItemBits |= mask; + foundItemCount++; + } + goto found; // break out of inner for loop + } + } + + found: + if (foundItemCount == count) + { + break; + } + } + + return foundItemCount == count; // true if all of the items in the set were found in other + } +#endif + } + + /// + /// Returns true if this FastHashSet is a proper superset of . + /// + /// The enumerable items (cannot be null). + /// True if a proper superset of . + public bool IsProperSupersetOf(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return false; + } + + if (count == 0) + { + return false; // an empty set can never be a proper superset of anything (not even an empty collection) + } + + ICollection collection = other as ICollection; + if (collection != null) + { + if (collection.Count == 0) + { + return true; // by definition, an empty other means the set is a proper superset of it if the set has at least one value + } + } + else + { + foreach (T item in other) + { + goto someItemsInOther; + } + return true; + } + + someItemsInOther: + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int foundItemCount = 0; // the count of found items in the hash - without double counting + int maxFoundIndex = 0; + foreach (T item in other) + { + FoundType foundType = FindInSlotsArrayAndMark(in item, out int foundIndex); + if (foundType == FoundType.FoundFirstTime) + { + foundItemCount++; + if (maxFoundIndex < foundIndex) + { + maxFoundIndex = foundIndex; + } + + if (foundItemCount == count) + { + break; + } + } + else if (foundType == FoundType.NotFound) + { + // any unfound item means this can't be a proper superset of + UnmarkAllNextIndexValues(maxFoundIndex); + return false; + } + } + + UnmarkAllNextIndexValues(maxFoundIndex); + + return foundItemCount < count; // true if all of the items in other were found in set and at least one item in set was not found in other +#if !Exclude_No_Hash_Array_Implementation + } + else + { + uint foundItemBits = 0; + + int foundItemCount = 0; // the count of found items in the hash - without double counting + foreach (T item in other) + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + foundItemBits |= mask; + foundItemCount++; + } + goto found; // break out of inner for loop + } + } + + // if here then item was not found + return false; + + found: + if (foundItemCount == count) + { + break; + } + } + + return foundItemCount < count; // true if all of the items in other were found in set and at least one item in set was not found in other + } +#endif + } + + /// + /// Returns true if this FastHashSet is a superset of . + /// + /// The enumerable items (cannot be null). + /// True if a superset of . + public bool IsSupersetOf(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return true; + } + + ICollection collection = other as ICollection; + if (collection != null) + { + if (collection.Count == 0) + { + return true; // by definition, an empty other means the set is a superset of it + } + } + else + { + foreach (T item in other) + { + goto someItemsInOther; + } + return true; + } + + someItemsInOther: + + if (count == 0) + { + return false; // an empty set can never be a proper superset of anything (except an empty collection - but an empty collection returns true above) + } + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + foreach (T item in other) + { + if (!FindInSlotsArray(in item, (comparer.GetHashCode(item) & HighBitNotSet))) + { + return false; + } + } + + return true; // true if all of the items in other were found in the set, false if at least one item in other was not found in the set +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + + foreach (T item in other) + { + for (i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + goto found; // break out of inner for loop + } + } + + // if here then item was not found + return false; + + found:; + + } + + return true; // true if all of the items in other were found in the set, false if at least one item in other was not found in the set + } +#endif + } + + /// + /// Returns true if this FastHashSet contains any items in . + /// + /// The enumerable items (cannot be null). + /// True if contains any items in . + public bool Overlaps(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return count > 0; // return false if there are no items when both sets are the same, otherwise return true when both sets are the same + } + + foreach (T item in other) + { + if (Contains(in item)) + { + return true; + } + } + return false; + } + + /// + /// Returns true if this FastHashSet contains exactly the same elements as . + /// + /// The enumerable items (cannot be null). + /// True if contains the same elements as . + public bool SetEquals(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + return true; + } + + // if other is ICollection, then it has count + + ICollection c = other as ICollection; + + if (c != null) + { + if (c.Count < count) + { + return false; + } + + HashSet hset = other as HashSet; + if (hset != null && Equals(hset.Comparer, Comparer)) + { + if (hset.Count != count) + { + return false; + } + + foreach (T item in other) + { + if (!Contains(in item)) + { + return false; + } + } + return true; + } + + FastHashSet fhset = other as FastHashSet; + if (fhset != null && Equals(fhset.Comparer, Comparer)) + { + if (fhset.Count != count) + { + return false; + } + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int pastNodeIndex = slots.Length; + if (firstBlankAtEndIndex < pastNodeIndex) + { + pastNodeIndex = firstBlankAtEndIndex; + } + +#if !Exclude_No_Hash_Array_Implementation + if (fhset.IsHashing) + { +#endif + for (int i = 1; i < pastNodeIndex; i++) + { + // could not do the blank check if we know there aren't any blanks - below code and in the loop in the else + // could do the check to see if there are any blanks first and then have 2 versions of this code, one with the check for blank and the other without it + if (slots[i].nextIndex != BlankNextIndexIndicator) // skip any blank nodes + { + if (!fhset.FindInSlotsArray(in slots[i].item, slots[i].hashOrNextIndexForBlanks)) + { + return false; + } + } + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 1; i < pastNodeIndex; i++) + { + if (slots[i].nextIndex != BlankNextIndexIndicator) // skip any blank nodes + { + if (!fhset.FindInNoHashArray(in slots[i].item)) + { + return false; + } + } + } + } + } + else + { + foreach (T item in other) + { + if (!FindInNoHashArray(in item)) + { + return false; + } + } + } + return true; +#endif + } + + } + + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + int foundItemCount = 0; // the count of found items in the hash - without double counting + int maxFoundIndex = 0; + foreach (T item in other) + { + FoundType foundType = FindInSlotsArrayAndMark(in item, out int foundIndex); + if (foundType == FoundType.FoundFirstTime) + { + foundItemCount++; + if (maxFoundIndex < foundIndex) + { + maxFoundIndex = foundIndex; + } + } + else if (foundType == FoundType.NotFound) + { + UnmarkAllNextIndexValues(maxFoundIndex); + return false; + } + } + + UnmarkAllNextIndexValues(maxFoundIndex); + + return foundItemCount == count; +#if !Exclude_No_Hash_Array_Implementation + } + else + { + uint foundItemBits = 0; + + int foundItemCount = 0; // the count of found items in the hash - without double counting + foreach (T item in other) + { + for (int i = 0; i < count; i++) + { + if (comparer.Equals(item, noHashArray[i])) + { + uint mask = (1u << i); + if ((foundItemBits & mask) == 0) + { + foundItemBits |= mask; + foundItemCount++; + } + goto found; // break out of inner for loop + } + } + // if here then item was not found + return false; + found:; + } + + return foundItemCount == count; + } +#endif + } + + // From the online document: Modifies the current HashSet object to contain only elements that are present either in that object or in the specified collection, but not both. + /// + /// Modifies the FastHashSet so that it contains only items in the FashHashSet or , but not both. + /// So items in that are also in the FastHashSet are removed, and items in that are not in the FastHashSet are added. + /// + /// The enumerable items (cannot be null). + public void SymmetricExceptWith(IEnumerable other) + { + if (other == null) + { + throw new ArgumentNullException(nameof(other), "Value cannot be null."); + } + + if (other == this) + { + Clear(); + } + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (!IsHashing) + { + // to make things easier for now, just switch to hashing if calling this function and deal with only one set of code + SwitchToHashing(); + } +#endif + + // for the first loop through other, add any unfound items and mark + int addedNodeIndex; + int maxAddedNodeIndex = NullIndex; + foreach (T item in other) + { + addedNodeIndex = AddToHashSetIfNotFoundAndMark(in item, (comparer.GetHashCode(item) & HighBitNotSet)); + if (addedNodeIndex > maxAddedNodeIndex) + { + maxAddedNodeIndex = addedNodeIndex; + } + } + + foreach (T item in other) + { + RemoveIfNotMarked(in item); + } + + UnmarkAllNextIndexValues(maxAddedNodeIndex); + } + + private void RemoveIfNotMarked(in T item) + { + // calling this function assumes we are hashing + int hash = (comparer.GetHashCode(item) & HighBitNotSet); + int hashIndex = hash % bucketsModSize; + + int priorIndex = NullIndex; + + for (int index = buckets[hashIndex]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + if (t.hashOrNextIndexForBlanks == hash && comparer.Equals(t.item, item)) + { + // item was found, so remove it if not marked + if ((t.nextIndex & MarkNextIndexBitMask) == 0) + { + if (priorIndex == NullIndex) + { + buckets[hashIndex] = t.nextIndex; + } + else + { + // if slots[priorIndex].nextIndex was marked, then keep it marked + // already know that t.nextIndex is not marked + slots[priorIndex].nextIndex = t.nextIndex | (slots[priorIndex].nextIndex & MarkNextIndexBitMask); + } + + // add node to blank chain or to the blanks at the end (if possible) + if (index == firstBlankAtEndIndex - 1) + { + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = index; + } + + t.nextIndex = BlankNextIndexIndicator; + + count--; + + return; + } + } + + priorIndex = index; + + index = t.nextIndex & MarkNextIndexBitMaskInverted; + } + return; // item not found + } + + /// + /// Removes any items in the FastHashSet where the predicate is true for that item. + /// + /// The match predicate (cannot be null). + /// The number of items removed. + public int RemoveWhere(Predicate match) + { + if (match == null) + { + throw new ArgumentNullException(nameof(match), "Value cannot be null."); + } + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification++; +#endif + + int removeCount = 0; + +#if !Exclude_No_Hash_Array_Implementation + if (IsHashing) + { +#endif + // must traverse all of the chains instead of just looping through the slots array because going through the chains is the only way to set + // nodes within a chain to blank and still be able to remove the blank node from the chain + + int priorIndex; + int nextIndex; + for (int i = 0; i < buckets.Length; i++) + { + priorIndex = NullIndex; // 0 means use buckets array + + for (int index = buckets[i]; index != NullIndex;) + { + ref TNode t = ref slots[index]; + + nextIndex = t.nextIndex; + if (match.Invoke(t.item)) + { + // item was matched, so remove it + + if (priorIndex == NullIndex) + { + buckets[i] = nextIndex; + } + else + { + slots[priorIndex].nextIndex = nextIndex; + } + + // add node to blank chain or to the blanks at the end (if possible) + if (index == firstBlankAtEndIndex - 1) + { + if (nextBlankIndex == firstBlankAtEndIndex) + { + nextBlankIndex--; + } + firstBlankAtEndIndex--; + } + else + { + t.hashOrNextIndexForBlanks = nextBlankIndex; + nextBlankIndex = index; + } + + t.nextIndex = BlankNextIndexIndicator; + + count--; + removeCount++; + } + + priorIndex = index; + + index = nextIndex; + } + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + int i; + for (i = count - 1; i >= 0; i--) + { + if (match.Invoke(noHashArray[i])) + { + removeCount++; + + if (i < count - 1) + { + int j = i + 1; + int k = i; + for (; j < count; j++, k++) + { + noHashArray[k] = noHashArray[j]; + } + } + + count--; + } + } + } +#endif + + return removeCount; + } + + private class FastHashSetEqualityComparer : IEqualityComparer> + { + public bool Equals(FastHashSet x, FastHashSet y) + { + if (x == null && y == null) + { + return true; + } + + if (y == null) + { + return false; + } + + if (x != null) + { + return x.SetEquals(y); + } + else + { + return false; + } + } + + public int GetHashCode(FastHashSet set) + { + if (set == null) + { + // oddly the documentation for the IEqualityComparer.GetHashCode function says it will throw an ArgumentNullException if the param is null + return 0; // 0 seems to be what .NET framework uses when passing in null, so return the same thing to be consistent + } + else + { + unchecked + { + int hashCode = 0; +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + int pastNodeIndex = set.slots.Length; + if (set.firstBlankAtEndIndex < pastNodeIndex) + { + pastNodeIndex = set.firstBlankAtEndIndex; + } + + for (int i = 1; i < pastNodeIndex; i++) + { + if (set.slots[i].nextIndex != 0) // nextIndex == 0 indicates a blank/available node + { + // maybe do ^= instead of add? - will this produce the same thing regardless of order? - if ^= maybe we don't need unchecked + // sum up the individual item hash codes - this way it won't matter what order the items are in, the same resulting hash code will be produced + hashCode += set.slots[i].hashOrNextIndexForBlanks; + } + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + for (int i = 0; i < set.count; i++) + { + // sum up the individual item hash codes - this way it won't matter what order the items are in, the same resulting hash code will be produced + hashCode += set.noHashArray[i].GetHashCode(); + } + } +#endif + return hashCode; + } + } + } + } + + /// + /// Creates and returns the IEqualityComparer for a FastHashSet which can be used to compare two FastHashSets based on their items being equal. + /// + /// An IEqualityComparer for a FastHashSet. + public static IEqualityComparer> CreateSetComparer() + { + return new FastHashSetEqualityComparer(); + } + + /// + /// Allows enumerating through items in the FastHashSet. Order is not guaranteed. + /// + /// The IEnumerator for the FastHashSet. + public IEnumerator GetEnumerator() + { + return new FastHashSetEnumerator(this); + } + + /// + /// Allows enumerating through items in the FastHashSet. Order is not guaranteed. + /// + /// The IEnumerator for the FastHashSet. + IEnumerator IEnumerable.GetEnumerator() + { + return new FastHashSetEnumerator(this); + } + + private class FastHashSetEnumerator : IEnumerator + { + private readonly FastHashSet set; + private int currentIndex = -1; + +#if !Exclude_Check_For_Is_Disposed_In_Enumerator + private bool isDisposed; +#endif + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + private readonly int incrementForEverySetModification; +#endif + + /// + /// Constructor for the FastHashSetEnumerator that takes a FastHashSet as a parameter. + /// + /// The FastHashSet to enumerate through. + public FastHashSetEnumerator(FastHashSet set) + { + this.set = set; +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + currentIndex = NullIndex; // 0 is the index before the first possible node (0 is the blank node) +#if !Exclude_No_Hash_Array_Implementation + } + else + { + currentIndex = -1; + } +#endif + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + incrementForEverySetModification = set.incrementForEverySetModification; +#endif + } + + /// + /// Moves to the next item for the FastHashSet enumerator. + /// + /// True if there was a next item, otherwise false. + public bool MoveNext() + { +#if !Exclude_Check_For_Is_Disposed_In_Enumerator + if (isDisposed) + { + // the only reason this code returns false when Disposed is called is to be compatable with HashSet + // if this level of compatibility isn't needed, then #define Exclude_Check_For_Is_Disposed_In_Enumerator to remove this check and makes the code slightly faster + return false; + } +#endif + +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + if (incrementForEverySetModification != set.incrementForEverySetModification) + { + throw new InvalidOperationException("Collection was modified; enumeration operation may not execute."); + } +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + // it's easiest to just loop through the node array and skip any nodes that are blank + // rather than looping through the buckets array and following the nextIndex to the end of each bucket + + while (true) + { + currentIndex++; + if (currentIndex < set.firstBlankAtEndIndex) + { + if (set.slots[currentIndex].nextIndex != BlankNextIndexIndicator) + { + return true; + } + } + else + { + currentIndex = set.firstBlankAtEndIndex; + return false; + } + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + currentIndex++; + if (currentIndex < set.count) + { + return true; + } + else + { + currentIndex--; + return false; + } + } +#endif + } + + /// + /// Resets the FastHashSet enumerator. + /// + public void Reset() + { +#if !Exclude_Check_For_Set_Modifications_In_Enumerator + if (incrementForEverySetModification != set.incrementForEverySetModification) + { + throw new InvalidOperationException("Collection was modified; enumeration operation may not execute."); + } +#endif + +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + currentIndex = NullIndex; // 0 is the index before the first possible node (0 is the blank node) +#if !Exclude_No_Hash_Array_Implementation + } + else + { + currentIndex = -1; + } +#endif + } + + /// + /// Implements the IDisposable.Dispose method for the FastHashSet enumerator. + /// + void IDisposable.Dispose() + { +#if !Exclude_Check_For_Is_Disposed_In_Enumerator + isDisposed = true; +#endif + } + + /// + /// Gets the current item for the FastHashSet enumerator. + /// + public T2 Current + { + get + { +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + // it's easiest to just loop through the node array and skip any nodes with nextIndex = 0 + // rather than looping through the buckets array and following the nextIndex to the end of each bucket + + if (currentIndex > NullIndex && currentIndex < set.firstBlankAtEndIndex) + { + return set.slots[currentIndex].item; + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + if (currentIndex >= 0 && currentIndex < set.count) + { + return set.noHashArray[currentIndex]; + } + } +#endif + return default; + } + } + + /// + /// Gets a reference to the current item for the FastHashSet enumerator. + /// + public ref T2 CurrentRef + { + get + { +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + // it's easiest to just loop through the node array and skip any nodes with nextIndex = 0 + // rather than looping through the buckets array and following the nextIndex to the end of each bucket + + if (currentIndex > NullIndex && currentIndex < set.firstBlankAtEndIndex) + { + return ref set.slots[currentIndex].item; + } + else + { + // we can just return a ref to the 0 node's item instead of throwing an exception? - this should have a default item value + return ref set.slots[0].item; + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + if (currentIndex >= 0 && currentIndex < set.count) + { + return ref set.noHashArray[currentIndex]; + } + else + { + // we can just return a ref to the 0 node's item instead of throwing an exception? + return ref set.noHashArray[0]; + } + } +#endif + } + } + + /// + /// True if the current item is valid for the FastHashSet enumerator, otherwise false. + /// + public bool IsCurrentValid + { + get + { +#if !Exclude_No_Hash_Array_Implementation + if (set.IsHashing) + { +#endif + // it's easiest to just loop through the node array and skip any nodes with nextIndex = 0 + // rather than looping through the buckets array and following the nextIndex to the end of each bucket + + if (currentIndex > NullIndex && currentIndex < set.firstBlankAtEndIndex) + { + return true; + } +#if !Exclude_No_Hash_Array_Implementation + } + else + { + if (currentIndex >= 0 && currentIndex < set.count) + { + return true; + } + } +#endif + return false; + } + } + + /// + /// Gets the Current item for the FastHashSet enumerator. + /// + object IEnumerator.Current => Current; + } + + public static class FastHashSetUtil + { + /// + /// Return the prime number that is equal to n (if n is a prime number) or the closest prime number greather than n. + /// + /// The lowest number to start looking for a prime. + /// The passed in n parameter value (if it is prime), or the next highest prime greater than n. + public static int GetEqualOrClosestHigherPrime(int n) + { + if (n >= LargestPrimeLessThanMaxInt) + { + // the next prime above this number is int.MaxValue, which we don't want to return that value because some indices increment one or two ints past this number and we don't want them to overflow + return LargestPrimeLessThanMaxInt; + } + + if ((n & 1) == 0) + { + n++; // make n odd + } + + bool found; + + do + { + found = true; + + int sqrt = (int)Math.Sqrt(n); + for (int i = 3; i <= sqrt; i += 2) + { + int div = n / i; + if (div * i == n) // dividing and multiplying might be faster than a single % (n % i) == 0 + { + found = false; + n += 2; + break; + } + } + } while (!found); + + return n; + } + } + } + + public struct ChainLevelAndCount : IComparable + { + public ChainLevelAndCount(int level, int count) + { + Level = level; + Count = count; + } + + public int Level; + public int Count; + + public int CompareTo(ChainLevelAndCount other) + { + return Level.CompareTo(other.Level); + } + } + +#if DEBUG + public static class DebugOutput + { + public static void OutputEnumerableItems(IEnumerable e, string enumerableName) + { + System.Diagnostics.Debug.WriteLine("---start items: " + enumerableName + "---"); + int count = 0; + foreach (T2 item in e) + { + System.Diagnostics.Debug.WriteLine(item.ToString()); + count++; + } + System.Diagnostics.Debug.WriteLine("---end items: " + enumerableName + "; count = " + count.ToString("N0") + "---"); + } + + public static void OutputSortedEnumerableItems(IEnumerable e, string enumerableName) + { + List lst = new List(e); + lst.Sort(); + System.Diagnostics.Debug.WriteLine("---start items (sorted): " + enumerableName + "---"); + int count = 0; + foreach (T2 item in lst) + { + System.Diagnostics.Debug.WriteLine(item.ToString()); + count++; + } + System.Diagnostics.Debug.WriteLine("---end items: " + enumerableName + "; count = " + count.ToString("N0") + "---"); + } + } +#endif +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/LinkedHashSet.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/LinkedHashSet.cs new file mode 100644 index 0000000..a72f4d3 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/LinkedHashSet.cs @@ -0,0 +1,565 @@ +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Nanomesh +{ + public class LinkedHashSet : IReadOnlyCollection where T : IComparable + { + private readonly Dictionary> elements; + private LinkedHashNode first, last; + + /// + /// Initializes a new instance of the class. + /// + public LinkedHashSet() + { + elements = new Dictionary>(); + } + + /// + /// Initializes a new instance of the class. + /// + /// + public LinkedHashSet(IEnumerable initialValues) : this() + { + UnionWith(initialValues); + } + + public LinkedHashNode First => first; + + public LinkedHashNode Last => last; + + #region Implementation of IEnumerable + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + /// 1 + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + /// 2 + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + #endregion + + #region Implementation of ICollection + + /// + /// Gets the number of elements contained in the . + /// + /// + /// The number of elements contained in the . + /// + public int Count => elements.Count; + + /// + /// Removes all items from the . + /// + /// The is read-only. + public void Clear() + { + elements.Clear(); + first = null; + last = null; + } + + /// + /// Determines whether the contains a specific value. + /// + /// + /// true if is found in the ; otherwise, false. + /// + /// The object to locate in the . + public bool Contains(T item) + { + return elements.ContainsKey(item); + } + + /// + /// Copies the elements of the to an , starting at a particular index. + /// + /// The one-dimensional that is the destination of the elements copied from . The must have zero-based indexing.The zero-based index in at which copying begins. is null. is less than 0. is multidimensional.-or-The number of elements in the source is greater than the available space from to the end of the destination .-or-Type cannot be cast automatically to the type of the destination . + public void CopyTo(T[] array, int arrayIndex) + { + int index = arrayIndex; + + foreach (T item in this) + { + array[index++] = item; + } + } + + /// + /// Removes the first occurrence of a specific object from the . + /// + /// + /// true if was successfully removed from the ; otherwise, false. This method also returns false if is not found in the original . + /// + /// The object to remove from the .The is read-only. + public bool Remove(T item) + { + if (elements.TryGetValue(item, out LinkedHashNode node)) + { + elements.Remove(item); + Unlink(node); + return true; + } + + return false; + } + + #endregion + + + #region Implementation of ISet + + /// + /// Modifies the current set so that it contains all elements that are present in either the current set or the specified collection. + /// + /// The collection to compare to the current set. is null. + public void UnionWith(IEnumerable other) + { + foreach (T item in other) + { + Add(item); + } + } + + /// + /// Modifies the current set so that it contains only elements that are also in a specified collection. + /// + /// The collection to compare to the current set. is null. + public void IntersectWith(IEnumerable other) + { + ISet otherSet = AsSet(other); + + LinkedHashNode current = first; + while (current != null) + { + if (!otherSet.Contains(current.Value)) + { + elements.Remove(current.Value); + Unlink(current); + } + current = current.Next; + } + } + + /// + /// Removes all elements in the specified collection from the current set. + /// + /// The collection of items to remove from the set. is null. + public void ExceptWith(IEnumerable other) + { + foreach (T item in other) + { + Remove(item); + } + } + + /// + /// Modifies the current set so that it contains only elements that are present either in the current set or in the specified collection, but not both. + /// + /// The collection to compare to the current set. is null. + public void SymmetricExceptWith(IEnumerable other) + { + foreach (T item in other) + { + if (elements.TryGetValue(item, out LinkedHashNode node)) + { + elements.Remove(item); + Unlink(node); + } + else + { + Add(item); + } + } + } + + /// + /// Determines whether the current set is a superset of a specified collection. + /// + /// + /// true if the current set is a superset of ; otherwise, false. + /// + /// The collection to compare to the current set. is null. + public bool IsSupersetOf(IEnumerable other) + { + int numberOfOthers = CountOthers(other, out int numberOfOthersPresent); + + // All others must be present. + return numberOfOthersPresent == numberOfOthers; + } + + /// + /// Determines whether the current set is a correct superset of a specified collection. + /// + /// + /// true if the object is a correct superset of ; otherwise, false. + /// + /// The collection to compare to the current set. is null. + public bool IsProperSupersetOf(IEnumerable other) + { + int numberOfOthers = CountOthers(other, out int numberOfOthersPresent); + + // All others must be present, plus we need to have at least one additional item. + return numberOfOthersPresent == numberOfOthers && numberOfOthers < Count; + } + + /// + /// Determines whether the current set and the specified collection contain the same elements. + /// + /// + /// true if the current set is equal to ; otherwise, false. + /// + /// The collection to compare to the current set. is null. + public bool SetEquals(IEnumerable other) + { + int numberOfOthers = CountOthers(other, out int numberOfOthersPresent); + + return numberOfOthers == Count && numberOfOthersPresent == Count; + } + + /// + /// Adds an element to the current set and returns a value to indicate if the element was successfully added. + /// + /// + /// true if the element is added to the set; false if the element is already in the set. + /// + /// The element to add to the set. + public bool Add(T item) + { + if (elements.ContainsKey(item)) + { + return false; + } + + LinkedHashNode node = new LinkedHashNode(item) { Previous = last }; + + if (first == null) + { + first = node; + } + + if (last != null) + { + last.Next = node; + } + + last = node; + + elements.Add(item, node); + + return true; + } + + public bool AddAfter(T item, LinkedHashNode itemInPlace) + { + if (elements.ContainsKey(item)) + { + return false; + } + + LinkedHashNode node = new LinkedHashNode(item) { Previous = itemInPlace }; + + if (itemInPlace.Next != null) + { + node.Next = itemInPlace.Next; + itemInPlace.Next.Previous = node; + } + else + { + last = node; + } + + itemInPlace.Next = node; + + elements.Add(item, node); + + return true; + } + + public bool PushAfter(T item, LinkedHashNode itemInPlace) + { + if (elements.ContainsKey(item)) + { + return false; + } + + LinkedHashNode node = Last; + Unlink(node); + elements.Remove(node.Value); + node.Value = item; + node.Next = null; + node.Previous = itemInPlace; + + if (itemInPlace.Next != null) + { + node.Next = itemInPlace.Next; + itemInPlace.Next.Previous = node; + } + else + { + last = node; + } + + itemInPlace.Next = node; + + elements.Add(item, node); + + return true; + } + + public bool AddBefore(T item, LinkedHashNode itemInPlace) + { + if (elements.ContainsKey(item)) + { + return false; + } + + LinkedHashNode node = new LinkedHashNode(item) { Next = itemInPlace }; + + if (itemInPlace.Previous != null) + { + node.Previous = itemInPlace.Previous; + itemInPlace.Previous.Next = node; + } + else + { + first = node; + } + + itemInPlace.Previous = node; + + elements.Add(item, node); + + return true; + } + + public bool PushBefore(T item, LinkedHashNode itemInPlace) + { + if (elements.ContainsKey(item)) + { + return false; + } + + LinkedHashNode node = Last; + Unlink(node); + elements.Remove(node.Value); + node.Value = item; + node.Previous = null; + node.Next = itemInPlace; + + if (itemInPlace.Previous != null) + { + node.Previous = itemInPlace.Previous; + itemInPlace.Previous.Next = node; + } + else + { + first = node; + } + + itemInPlace.Previous = node; + + elements.Add(item, node); + + return true; + } + + #endregion + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An struct that can be used to iterate through the collection. + /// + public Enumerator GetEnumerator() + { + return new Enumerator(this); + } + + + /// + /// Count the elements in the given collection and determine both the total + /// count and how many of the elements that are present in the current set. + /// + private int CountOthers(IEnumerable items, out int numberOfOthersPresent) + { + numberOfOthersPresent = 0; + int numberOfOthers = 0; + + foreach (T item in items) + { + numberOfOthers++; + if (Contains(item)) + { + numberOfOthersPresent++; + } + } + return numberOfOthers; + } + + + /// + /// Cast the given collection to an ISet<T> if possible. If not, + /// return a new set containing the items. + /// + private static ISet AsSet(IEnumerable items) + { + return items as ISet ?? new HashSet(items); + } + + + /// + /// Unlink a node from the linked list by updating the node pointers in + /// its preceeding and subsequent node. Also update the _first and _last + /// pointers if necessary. + /// + private void Unlink(LinkedHashNode node) + { + if (node.Previous != null) + { + node.Previous.Next = node.Next; + } + + if (node.Next != null) + { + node.Next.Previous = node.Previous; + } + + if (ReferenceEquals(node, first)) + { + first = node.Next; + } + + if (ReferenceEquals(node, last)) + { + last = node.Previous; + } + } + + public class LinkedHashNode + { + public TElement Value; + public LinkedHashNode Next; + public LinkedHashNode Previous; + + public LinkedHashNode(TElement value) + { + Value = value; + } + + public override string ToString() + { + return Value.ToString(); + } + } + + public struct Enumerator : IEnumerator + { + private LinkedHashNode _node; + private T _current; + + internal Enumerator(LinkedHashSet set) + { + _current = default(T); + _node = set.first; + } + + /// + public bool MoveNext() + { + if (_node == null) + { + return false; + } + + _current = _node.Value; + _node = _node.Next; + return true; + } + + /// + public T Current => _current; + + /// + object IEnumerator.Current => Current; + + /// + void IEnumerator.Reset() + { + throw new NotSupportedException(); + } + + /// + public void Dispose() + { + } + } + + public void AddMin(T item) + { + LinkedHashNode current = Last; + while (current != null && item.CompareTo(current.Value) < 0) + { + current = current.Previous; + } + + if (current == Last) + { + return; + } + + if (current == null) + { + AddBefore(item, First); + } + else + { + AddAfter(item, current); + } + } + + public void PushMin(T item) + { + LinkedHashNode current = Last; + while (current != null && item.CompareTo(current.Value) < 0) + { + current = current.Previous; + } + + if (current == Last) + { + return; + } + + if (current == null) + { + PushBefore(item, First); + } + else + { + PushAfter(item, current); + } + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/MaxHeap.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/MaxHeap.cs new file mode 100644 index 0000000..9164ffa --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/MaxHeap.cs @@ -0,0 +1,86 @@ +using System; + +namespace Nanomesh +{ + public static class MaxHeap + { + public static T FindKthLargest(T[] nums, int k) where T : IComparable + { + Heap heap = new Heap(); + heap.Heapify(nums, nums.Length); + T data = default(T); + for (int i = 0; i < k; i++) + { + data = heap.RemoveMax(); + } + return data; + } + } + + public class Heap where T : IComparable + { + private T[] arr; + private int count; + private int size; + + public int GetLeftChild(int pos) + { + int l = 2 * pos + 1; + return l >= count ? -1 : l; + } + + public int GetRightChild(int pos) + { + int r = 2 * pos + 2; + return r >= count ? -1 : r; + } + + public void Heapify(T[] num, int n) + { + arr = new T[n]; + size = n; + for (int i = 0; i < n; i++) + { + arr[i] = num[i]; + } + + count = n; + + for (int i = (count - 1) / 2; i >= 0; i--) + { + PercolateDown(i); + } + } + public void PercolateDown(int pos) + { + int l = GetLeftChild(pos); + int r = GetRightChild(pos); + int max = pos; + if (l != -1 && arr[max].CompareTo(arr[l]) < 0) + { + max = l; + } + + if (r != -1 && arr[max].CompareTo(arr[r]) < 0) + { + max = r; + } + + if (max != pos) + { + T temp = arr[pos]; + arr[pos] = arr[max]; + arr[max] = temp; + PercolateDown(max); + } + } + public T RemoveMax() + { + T data = arr[0]; + arr[0] = arr[count - 1]; + count--; + PercolateDown(0); + return data; + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/MinHeap.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/MinHeap.cs new file mode 100644 index 0000000..8b318c5 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/MinHeap.cs @@ -0,0 +1,145 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; + +namespace Nanomesh.Collections +{ + public class MinHeap : IEnumerable + { + private readonly List values; + private readonly IComparer comparer; + + public MinHeap(IEnumerable items, IComparer comparer) + { + values = new List(); + this.comparer = comparer; + values.Add(default(T)); + values.AddRange(items); + + for (int i = values.Count / 2; i >= 1; i--) + { + BubbleDown(i); + } + } + + public MinHeap(IEnumerable items) : this(items, Comparer.Default) { } + + public MinHeap(IComparer comparer) : this(new T[0], comparer) { } + + public MinHeap() : this(Comparer.Default) { } + + public int Count => values.Count - 1; + + public T Min => values[1]; + + /// + /// Extract the smallest element. + /// + /// + public T ExtractMin() + { + int count = Count; + + if (count == 0) + { + throw new InvalidOperationException("Heap is empty."); + } + + T min = Min; + values[1] = values[count]; + values.RemoveAt(count); + + if (values.Count > 1) + { + BubbleDown(1); + } + + return min; + } + + /// + /// Insert the value. + /// + /// + /// + public void Add(T item) + { + values.Add(item); + BubbleUp(Count); + } + + private void BubbleUp(int index) + { + int parent = index / 2; + + while (index > 1 && CompareResult(parent, index) > 0) + { + Exchange(index, parent); + index = parent; + parent /= 2; + } + } + + private void BubbleDown(int index) + { + int min; + + while (true) + { + int left = index * 2; + int right = index * 2 + 1; + + if (left < values.Count && + CompareResult(left, index) < 0) + { + min = left; + } + else + { + min = index; + } + + if (right < values.Count && + CompareResult(right, min) < 0) + { + min = right; + } + + if (min != index) + { + Exchange(index, min); + index = min; + } + else + { + return; + } + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private int CompareResult(int index1, int index2) + { + return comparer.Compare(values[index1], values[index2]); + } + + private void Exchange(int index, int max) + { + T tmp = values[index]; + values[index] = values[max]; + values[max] = tmp; + } + + public IEnumerator GetEnumerator() + { + return values.Skip(1).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Collections/OrderStatistics.cs b/LightlessSync/ThirdParty/Nanomesh/Collections/OrderStatistics.cs new file mode 100644 index 0000000..dbc3726 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Collections/OrderStatistics.cs @@ -0,0 +1,118 @@ +using System; + +namespace Nanomesh +{ + public static class OrderStatistics + { + private static T FindMedian(T[] arr, int i, int n) + { + if (i <= n) + { + Array.Sort(arr, i, n); // Sort the array + } + else + { + Array.Sort(arr, n, i); + } + + return arr[n / 2]; // Return middle element + } + + // Returns k'th smallest element + // in arr[l..r] in worst case + // linear time. ASSUMPTION: ALL + // ELEMENTS IN ARR[] ARE DISTINCT + public static T FindKthSmallest(T[] arr, int l, int r, int k) where T : IComparable + { + // If k is smaller than + // number of elements in array + if (k > 0 && k <= r - l + 1) + { + int n = r - l + 1; // Number of elements in arr[l..r] + + // Divide arr[] in groups of size 5, + // calculate median of every group + // and store it in median[] array. + int i; + + // There will be floor((n+4)/5) groups; + T[] median = new T[(n + 4) / 5]; + for (i = 0; i < n / 5; i++) + { + median[i] = FindMedian(arr, l + i * 5, 5); + } + + // For last group with less than 5 elements + if (i * 5 < n) + { + median[i] = FindMedian(arr, l + i * 5, n % 5); + i++; + } + + // Find median of all medians using recursive call. + // If median[] has only one element, then no need + // of recursive call + T medOfMed = (i == 1) ? median[i - 1] : FindKthSmallest(median, 0, i - 1, i / 2); + + // Partition the array around a random element and + // get position of pivot element in sorted array + int pos = Partition(arr, l, r, medOfMed); + + // If position is same as k + if (pos - l == k - 1) + { + return arr[pos]; + } + + if (pos - l > k - 1) // If position is more, recur for left + { + return FindKthSmallest(arr, l, pos - 1, k); + } + + // Else recur for right subarray + return FindKthSmallest(arr, pos + 1, r, k - pos + l - 1); + } + + // If k is more than number of elements in array + return default(T); + } + + private static void Swap(ref T[] arr, int i, int j) + { + T temp = arr[i]; + arr[i] = arr[j]; + arr[j] = temp; + } + + // It searches for x in arr[l..r], and + // partitions the array around x. + private static int Partition(T[] arr, int l, int r, T x) where T : IComparable + { + // Search for x in arr[l..r] and move it to end + int i; + for (i = l; i < r; i++) + { + if (arr[i].CompareTo(x) == 0) + { + break; + } + } + + Swap(ref arr, i, r); + + // Standard partition algorithm + i = l; + for (int j = l; j <= r - 1; j++) + { + if (arr[j].CompareTo(x) <= 0) + { + Swap(ref arr, i, j); + i++; + } + } + Swap(ref arr, i, r); + return i; + } + + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeDefinition.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeDefinition.cs new file mode 100644 index 0000000..3a60ca8 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeDefinition.cs @@ -0,0 +1,30 @@ +namespace Nanomesh +{ + public struct AttributeDefinition + { + public double weight; + public AttributeType type; + public int id; + + public AttributeDefinition(AttributeType type) + { + this.weight = 1; + this.type = type; + this.id = 0; + } + + public AttributeDefinition(AttributeType type, double weight) + { + this.weight = weight; + this.type = type; + this.id = 0; + } + + public AttributeDefinition(AttributeType type, double weight, int id) + { + this.weight = weight; + this.type = type; + this.id = id; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeType.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeType.cs new file mode 100644 index 0000000..24d596a --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/AttributeType.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; + +namespace Nanomesh +{ + public enum AttributeType + { + Normals, + UVs, + BoneWeights, + Colors, + } + + public static class AttributeUtils + { + public static MetaAttributeList CreateAttributesFromDefinitions(IList attributeDefinitions) + { + MetaAttributeList attributeList = new EmptyMetaAttributeList(0); + for (int i = 0; i < attributeDefinitions.Count; i++) + { + switch (attributeDefinitions[i].type) + { + case AttributeType.Normals: + attributeList = attributeList.AddAttributeType(); + break; + case AttributeType.UVs: + attributeList = attributeList.AddAttributeType(); + break; + case AttributeType.BoneWeights: + attributeList = attributeList.AddAttributeType(); + break; + default: + throw new NotImplementedException(); + } + } + return attributeList; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttribute.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttribute.cs new file mode 100644 index 0000000..a08c16a --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttribute.cs @@ -0,0 +1,406 @@ +using System; + +namespace Nanomesh +{ + public unsafe interface IMetaAttribute + { + IMetaAttribute Set(int index, K value) where K : unmanaged; + K Get(int index) where K : unmanaged; + } + + public unsafe struct MetaAttribute : IMetaAttribute + where T0 : unmanaged + { + public T0 attr0; + + public MetaAttribute(T0 attr0) + { + this.attr0 = attr0; + } + + public unsafe K Get(int index) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + return kk[0]; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + + public IMetaAttribute Set(int index, K value) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override int GetHashCode() + { + return attr0.GetHashCode(); + } + + public override bool Equals(object obj) + { + return ((MetaAttribute)obj).attr0.Equals(attr0); + } + } + + public unsafe struct MetaAttribute : IMetaAttribute + where T0 : unmanaged + where T1 : unmanaged + { + public T0 attr0; + public T1 attr1; + + public MetaAttribute(T0 attr0, T1 attr1) + { + this.attr0 = attr0; + this.attr1 = attr1; + } + + public unsafe K Get(int index) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + return kk[0]; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + return kk[0]; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + + public IMetaAttribute Set(int index, K value) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + } + + public unsafe struct MetaAttribute : IMetaAttribute + where T0 : unmanaged + where T1 : unmanaged + where T2 : unmanaged + { + public T0 attr0; + public T1 attr1; + public T2 attr2; + + public MetaAttribute(T0 attr0, T1 attr1, T2 attr2) + { + this.attr0 = attr0; + this.attr1 = attr1; + this.attr2 = attr2; + } + + public unsafe K Get(int index) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + return kk[0]; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + return kk[0]; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + return kk[0]; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + + public IMetaAttribute Set(int index, K value) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + } + + public unsafe struct MetaAttribute : IMetaAttribute + where T0 : unmanaged + where T1 : unmanaged + where T2 : unmanaged + where T3 : unmanaged + { + public T0 attr0; + public T1 attr1; + public T2 attr2; + public T3 attr3; + + public MetaAttribute(T0 attr0, T1 attr1, T2 attr2, T3 attr3) + { + this.attr0 = attr0; + this.attr1 = attr1; + this.attr2 = attr2; + this.attr3 = attr3; + } + + public unsafe K Get(int index) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + return kk[0]; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + return kk[0]; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + return kk[0]; + } + case 3: + fixed (T3* k = &attr3) + { + K* kk = (K*)k; + return kk[0]; + } + default: + throw new ArgumentOutOfRangeException(); + } + + // Shorter idea but only C# 8.0: + //fixed (void* v = &this) + //{ + // byte* b = (byte*)v; + // b += Positions[index]; + // return ((K*)b)[0]; + //}; + } + + public IMetaAttribute Set(int index, K value) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 3: + fixed (T3* k = &attr3) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + } + + public unsafe struct MetaAttribute : IMetaAttribute + where T0 : unmanaged + where T1 : unmanaged + where T2 : unmanaged + where T3 : unmanaged + where T4 : unmanaged + { + public T0 attr0; + public T1 attr1; + public T2 attr2; + public T3 attr3; + public T4 attr4; + + public MetaAttribute(T0 attr0, T1 attr1, T2 attr2, T3 attr3, T4 attr4) + { + this.attr0 = attr0; + this.attr1 = attr1; + this.attr2 = attr2; + this.attr3 = attr3; + this.attr4 = attr4; + } + + public unsafe K Get(int index) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + return kk[0]; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + return kk[0]; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + return kk[0]; + } + case 3: + fixed (T3* k = &attr3) + { + K* kk = (K*)k; + return kk[0]; + } + case 4: + fixed (T4* k = &attr4) + { + K* kk = (K*)k; + return kk[0]; + } + default: + throw new ArgumentOutOfRangeException(); + } + + // Shorter idea but only C# 8.0: + //fixed (void* v = &this) + //{ + // byte* b = (byte*)v; + // b += Positions[index]; + // return ((K*)b)[0]; + //}; + } + + public IMetaAttribute Set(int index, K value) where K : unmanaged + { + switch (index) + { + case 0: + fixed (T0* k = &attr0) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 1: + fixed (T1* k = &attr1) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 2: + fixed (T2* k = &attr2) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 3: + fixed (T3* k = &attr3) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + case 4: + fixed (T4* k = &attr4) + { + K* kk = (K*)k; + kk[0] = value; + return this; + } + default: + throw new ArgumentOutOfRangeException(); + } + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttributeList.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttributeList.cs new file mode 100644 index 0000000..048969c --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/Attributes/MetaAttributeList.cs @@ -0,0 +1,448 @@ +using System; + +namespace Nanomesh +{ + public abstract class MetaAttributeList + { + public abstract IMetaAttribute this[int index] + { + get; + set; + } + + public abstract int Count { get; } + + public abstract int CountPerAttribute { get; } + + public abstract MetaAttributeList CreateNew(int length); + + public abstract MetaAttributeList AddAttributeType() + where T : unmanaged, IInterpolable; + + public abstract bool Equals(int indexA, int indexB, int attribute); + + public abstract void Interpolate(int attribute, int indexA, int indexB, double ratio); + } + + public class EmptyMetaAttributeList : MetaAttributeList + { + private readonly int _length; + + public EmptyMetaAttributeList(int length) + { + _length = length; + } + + public override IMetaAttribute this[int index] + { + get => throw new System.Exception(); + set => throw new System.Exception(); + } + + public override MetaAttributeList CreateNew(int length) => new EmptyMetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + return false; + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + throw new System.Exception(); + } + + public override MetaAttributeList AddAttributeType() + { + return new MetaAttributeList(_length); + } + + public override int Count => 0; + + public override int CountPerAttribute => 0; + } + + public class MetaAttributeList : MetaAttributeList + where T0 : unmanaged, IInterpolable + { + private readonly MetaAttribute[] _attributes; + + public MetaAttributeList(int length) + { + _attributes = new MetaAttribute[length]; + } + + public override IMetaAttribute this[int index] + { + get => _attributes[index]; + set => _attributes[index] = (MetaAttribute)value; + } + + public void Set(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + private void Get(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + public override MetaAttributeList CreateNew(int length) => new MetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + switch (attribute) + { + case 0: + return _attributes[indexA].Get(0).Equals(_attributes[indexB].Get(0)); + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + _attributes[indexA].attr0 = _attributes[indexA].Get(0).Interpolate(_attributes[indexB].Get(0), ratio); + _attributes[indexB].attr0 = _attributes[indexA].attr0; + } + + public override MetaAttributeList AddAttributeType() + { + MetaAttributeList newAttributes = new MetaAttributeList(_attributes.Length); + for (int i = 0; i < Count; i++) + newAttributes.Set(new MetaAttribute(_attributes[i].attr0, default(T)), i); + return newAttributes; + } + + public override int Count => _attributes.Length; + + public override int CountPerAttribute => 1; + } + + public class MetaAttributeList : MetaAttributeList + where T0 : unmanaged, IInterpolable + where T1 : unmanaged, IInterpolable + { + private readonly MetaAttribute[] _attributes; + + public MetaAttributeList(int length) + { + _attributes = new MetaAttribute[length]; + } + + public override IMetaAttribute this[int index] + { + get => _attributes[index]; + set => _attributes[index] = (MetaAttribute)value; + } + + public void Set(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + private void Get(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + public override MetaAttributeList CreateNew(int length) => new MetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + switch (attribute) + { + case 0: + return _attributes[indexA].Get(0).Equals(_attributes[indexB].Get(0)); + case 1: + return _attributes[indexA].Get(1).Equals(_attributes[indexB].Get(1)); + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + switch (attribute) + { + case 0: + _attributes[indexA].attr0 = _attributes[indexA].Get(0).Interpolate(_attributes[indexB].Get(0), ratio); + _attributes[indexB].attr0 = _attributes[indexA].attr0; + break; + case 1: + _attributes[indexA].attr1 = _attributes[indexA].Get(1).Interpolate(_attributes[indexB].Get(1), ratio); + _attributes[indexB].attr1 = _attributes[indexA].attr1; + break; + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override MetaAttributeList AddAttributeType() + { + MetaAttributeList newAttributes = new MetaAttributeList(_attributes.Length); + for (int i = 0; i < Count; i++) + newAttributes.Set(new MetaAttribute(_attributes[i].attr0, _attributes[i].attr1, default(T)), i); + return newAttributes; + } + + public override int Count => _attributes.Length; + + public override int CountPerAttribute => 2; + } + + public class MetaAttributeList : MetaAttributeList + where T0 : unmanaged, IInterpolable + where T1 : unmanaged, IInterpolable + where T2 : unmanaged, IInterpolable + { + private readonly MetaAttribute[] _attributes; + + public MetaAttributeList(int length) + { + _attributes = new MetaAttribute[length]; + } + + public override IMetaAttribute this[int index] + { + get => _attributes[index]; + set => _attributes[index] = (MetaAttribute)value; + } + + public void Set(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + private void Get(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + public override MetaAttributeList CreateNew(int length) => new MetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + switch (attribute) + { + case 0: + return _attributes[indexA].Get(0).Equals(_attributes[indexB].Get(0)); + case 1: + return _attributes[indexA].Get(1).Equals(_attributes[indexB].Get(1)); + case 2: + return _attributes[indexA].Get(2).Equals(_attributes[indexB].Get(2)); + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + switch (attribute) + { + case 0: + _attributes[indexA].attr0 = _attributes[indexA].Get(0).Interpolate(_attributes[indexB].Get(0), ratio); + _attributes[indexB].attr0 = _attributes[indexA].attr0; + break; + case 1: + _attributes[indexA].attr1 = _attributes[indexA].Get(1).Interpolate(_attributes[indexB].Get(1), ratio); + _attributes[indexB].attr1 = _attributes[indexA].attr1; + break; + case 2: + _attributes[indexA].attr2 = _attributes[indexA].Get(2).Interpolate(_attributes[indexB].Get(2), ratio); + _attributes[indexB].attr2 = _attributes[indexA].attr2; + break; + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override MetaAttributeList AddAttributeType() + { + MetaAttributeList newAttributes = new MetaAttributeList(_attributes.Length); + for (int i = 0; i < Count; i++) + newAttributes.Set(new MetaAttribute(_attributes[i].attr0, _attributes[i].attr1, _attributes[i].attr2, default(T)), i); + return newAttributes; + } + + public override int Count => _attributes.Length; + + public override int CountPerAttribute => 3; + } + + public class MetaAttributeList : MetaAttributeList + where T0 : unmanaged, IInterpolable + where T1 : unmanaged, IInterpolable + where T2 : unmanaged, IInterpolable + where T3 : unmanaged, IInterpolable + { + private readonly MetaAttribute[] _attributes; + + public MetaAttributeList(int length) + { + _attributes = new MetaAttribute[length]; + } + + public override IMetaAttribute this[int index] + { + get => _attributes[index]; + set => _attributes[index] = (MetaAttribute)value; + } + + public void Set(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + private void Get(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + public override MetaAttributeList CreateNew(int length) => new MetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + switch (attribute) + { + case 0: + return _attributes[indexA].Get(0).Equals(_attributes[indexB].Get(0)); + case 1: + return _attributes[indexA].Get(1).Equals(_attributes[indexB].Get(1)); + case 2: + return _attributes[indexA].Get(2).Equals(_attributes[indexB].Get(2)); + case 3: + return _attributes[indexA].Get(3).Equals(_attributes[indexB].Get(3)); + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + switch (attribute) + { + case 0: + _attributes[indexA].attr0 = _attributes[indexA].Get(0).Interpolate(_attributes[indexB].Get(0), ratio); + _attributes[indexB].attr0 = _attributes[indexA].attr0; + break; + case 1: + _attributes[indexA].attr1 = _attributes[indexA].Get(1).Interpolate(_attributes[indexB].Get(1), ratio); + _attributes[indexB].attr1 = _attributes[indexA].attr1; + break; + case 2: + _attributes[indexA].attr2 = _attributes[indexA].Get(2).Interpolate(_attributes[indexB].Get(2), ratio); + _attributes[indexB].attr2 = _attributes[indexA].attr2; + break; + case 3: + _attributes[indexA].attr3 = _attributes[indexA].Get(3).Interpolate(_attributes[indexB].Get(3), ratio); + _attributes[indexB].attr3 = _attributes[indexA].attr3; + break; + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override MetaAttributeList AddAttributeType() + { + MetaAttributeList newAttributes = new MetaAttributeList(_attributes.Length); + for (int i = 0; i < Count; i++) + newAttributes.Set(new MetaAttribute(_attributes[i].attr0, _attributes[i].attr1, _attributes[i].attr2, _attributes[i].attr3, default(T)), i); + return newAttributes; + } + + public override int Count => _attributes.Length; + + public override int CountPerAttribute => 4; + } + + public class MetaAttributeList : MetaAttributeList + where T0 : unmanaged, IInterpolable + where T1 : unmanaged, IInterpolable + where T2 : unmanaged, IInterpolable + where T3 : unmanaged, IInterpolable + where T4 : unmanaged, IInterpolable + { + private readonly MetaAttribute[] _attributes; + + public MetaAttributeList(int length) + { + _attributes = new MetaAttribute[length]; + } + + public override IMetaAttribute this[int index] + { + get => _attributes[index]; + set => _attributes[index] = (MetaAttribute)value; + } + + public void Set(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + private void Get(MetaAttribute value, int index) + { + _attributes[index] = value; + } + + public override MetaAttributeList CreateNew(int length) => new MetaAttributeList(length); + + public override unsafe bool Equals(int indexA, int indexB, int attribute) + { + switch (attribute) + { + case 0: + return _attributes[indexA].Get(0).Equals(_attributes[indexB].Get(0)); + case 1: + return _attributes[indexA].Get(1).Equals(_attributes[indexB].Get(1)); + case 2: + return _attributes[indexA].Get(2).Equals(_attributes[indexB].Get(2)); + case 3: + return _attributes[indexA].Get(3).Equals(_attributes[indexB].Get(3)); + case 4: + return _attributes[indexA].Get(3).Equals(_attributes[indexB].Get(4)); + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override void Interpolate(int attribute, int indexA, int indexB, double ratio) + { + switch (attribute) + { + case 0: + _attributes[indexA].attr0 = _attributes[indexA].Get(0).Interpolate(_attributes[indexB].Get(0), ratio); + _attributes[indexB].attr0 = _attributes[indexA].attr0; + break; + case 1: + _attributes[indexA].attr1 = _attributes[indexA].Get(1).Interpolate(_attributes[indexB].Get(1), ratio); + _attributes[indexB].attr1 = _attributes[indexA].attr1; + break; + case 2: + _attributes[indexA].attr2 = _attributes[indexA].Get(2).Interpolate(_attributes[indexB].Get(2), ratio); + _attributes[indexB].attr2 = _attributes[indexA].attr2; + break; + case 3: + _attributes[indexA].attr3 = _attributes[indexA].Get(3).Interpolate(_attributes[indexB].Get(3), ratio); + _attributes[indexB].attr3 = _attributes[indexA].attr3; + break; + case 4: + _attributes[indexA].attr4 = _attributes[indexA].Get(4).Interpolate(_attributes[indexB].Get(4), ratio); + _attributes[indexB].attr4 = _attributes[indexA].attr4; + break; + default: + throw new ArgumentOutOfRangeException(); + } + } + + public override MetaAttributeList AddAttributeType() + { + throw new NotImplementedException(); + } + + public override int Count => _attributes.Length; + + public override int CountPerAttribute => 5; + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/ConnectedMesh.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/ConnectedMesh.cs new file mode 100644 index 0000000..2cb81c8 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/ConnectedMesh.cs @@ -0,0 +1,706 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace Nanomesh +{ + // Let's say F = 2V + // Halfedge mesh is V * sizeof(vertex) + 3F * sizeof(Halfedge) + F * sizeof(Face) = 16 * 0.5F + 3F * 20 + 4F = 72F + // Connected mesh is V * sizeof(Vector3) + 3F * sizeof(Node) + F * sizeof(Face) = 12 * 0.5F + 3F * 12 + 12F = 54F (without attributes) + // Connected mesh no face is V * sizeof(Vector3) + 3F * sizeof(Node) = 12 * 0.5F + 3F * 12 = 42F (without attributes) + public partial class ConnectedMesh + { + // Todo : make this private (can only be modified from the inside) + public Vector3[] positions; + public MetaAttributeList attributes; + public Node[] nodes; + public Group[] groups; + public AttributeDefinition[] attributeDefinitions; + + public int[] PositionToNode => _positionToNode ?? (_positionToNode = GetPositionToNode()); + private int[] _positionToNode; + + internal int _faceCount; + public int FaceCount => _faceCount; + + public bool AreNodesSiblings(int nodeIndexA, int nodeIndexB) + { + return nodes[nodeIndexA].position == nodes[nodeIndexB].position; + } + + public int[] GetPositionToNode() + { + int[] positionToNode = new int[positions.Length]; + + for (int i = 0; i < positions.Length; i++) + { + positionToNode[i] = -1; + } + + for (int i = 0; i < nodes.Length; i++) + { + if (!nodes[i].IsRemoved) + { + positionToNode[nodes[i].position] = i; + } + } + + return positionToNode; + } + + public int GetEdgeCount(int nodeIndex) + { + return GetRelativesCount(nodeIndex) + 1; + } + + public int GetRelativesCount(int nodeIndex) + { + int k = 0; + int relative = nodeIndex; + while ((relative = nodes[relative].relative) != nodeIndex) + { + k++; + } + return k; + } + + public int GetSiblingsCount(int nodeIndex) + { + int k = 0; + int sibling = nodeIndex; + while ((sibling = nodes[sibling].sibling) != nodeIndex) + { + k++; + } + return k; + } + + public int ReconnectSiblings(int nodeIndex) + { + int sibling = nodeIndex; + int lastValid = -1; + int firstValid = -1; + int position = -1; + + do + { + if (nodes[sibling].IsRemoved) + { + continue; + } + + if (firstValid == -1) + { + firstValid = sibling; + position = nodes[sibling].position; + } + + if (lastValid != -1) + { + nodes[lastValid].sibling = sibling; + nodes[lastValid].position = position; + } + + lastValid = sibling; + } + while ((sibling = nodes[sibling].sibling) != nodeIndex); + + if (lastValid == -1) + { + return -1; // All siblings were removed + } + + // Close the loop + nodes[lastValid].sibling = firstValid; + nodes[lastValid].position = position; + + return firstValid; + } + + public int ReconnectSiblings(int nodeIndexA, int nodeIndexB, int position) + { + int sibling = nodeIndexA; + int lastValid = -1; + int firstValid = -1; + + do + { + if (nodes[sibling].IsRemoved) + { + continue; + } + + if (firstValid == -1) + { + firstValid = sibling; + //position = nodes[sibling].position; + } + + if (lastValid != -1) + { + nodes[lastValid].sibling = sibling; + nodes[lastValid].position = position; + } + + lastValid = sibling; + } + while ((sibling = nodes[sibling].sibling) != nodeIndexA); + + sibling = nodeIndexB; + do + { + if (nodes[sibling].IsRemoved) + { + continue; + } + + if (firstValid == -1) + { + firstValid = sibling; + //position = nodes[sibling].position; + } + + if (lastValid != -1) + { + nodes[lastValid].sibling = sibling; + nodes[lastValid].position = position; + } + + lastValid = sibling; + } + while ((sibling = nodes[sibling].sibling) != nodeIndexB); + + if (lastValid == -1) + { + return -1; // All siblings were removed + } + + // Close the loop + nodes[lastValid].sibling = firstValid; + nodes[lastValid].position = position; + + return firstValid; + } + + public int CollapseEdge(int nodeIndexA, int nodeIndexB) + { + int posA = nodes[nodeIndexA].position; + int posB = nodes[nodeIndexB].position; + + Debug.Assert(posA != posB, "A and B must have different positions"); + Debug.Assert(!nodes[nodeIndexA].IsRemoved); + Debug.Assert(!nodes[nodeIndexB].IsRemoved); + + Debug.Assert(CheckRelatives(nodeIndexA), "A's relatives must be valid"); + Debug.Assert(CheckRelatives(nodeIndexB), "B's relatives must be valid"); + Debug.Assert(CheckSiblings(nodeIndexA), "A's siblings must be valid"); + Debug.Assert(CheckSiblings(nodeIndexB), "B's siblings must be valid"); + + int siblingOfA = nodeIndexA; + do // Iterates over faces around A + { + bool isFaceTouched = false; + int faceEdgeCount = 0; + int nodeIndexC = -1; + + int relativeOfA = siblingOfA; + do // Circulate in face + { + int posC = nodes[relativeOfA].position; + if (posC == posB) + { + isFaceTouched = true; + } + else if (posC != posA) + { + nodeIndexC = relativeOfA; + } + + faceEdgeCount++; + } while ((relativeOfA = nodes[relativeOfA].relative) != siblingOfA); + + if (faceEdgeCount != 3) + throw new NotImplementedException(); + + if (isFaceTouched && faceEdgeCount == 3) + { + // Remove face : Mark nodes as removed an reconnect siblings around C + + int posC = nodes[nodeIndexC].position; + + relativeOfA = siblingOfA; + do + { + nodes[relativeOfA].MarkRemoved(); + + } while ((relativeOfA = nodes[relativeOfA].relative) != siblingOfA); + + int validNodeAtC = ReconnectSiblings(nodeIndexC); + + if (_positionToNode != null) + { + _positionToNode[posC] = validNodeAtC; + } + + _faceCount--; + } + } while ((siblingOfA = nodes[siblingOfA].sibling) != nodeIndexA); + + int validNodeAtA = ReconnectSiblings(nodeIndexA, nodeIndexB, posA); + + if (_positionToNode != null) + { + _positionToNode[posA] = validNodeAtA; + _positionToNode[posB] = -1; + } + + return validNodeAtA; + } + + public double GetEdgeTopo(int nodeIndexA, int nodeIndexB) + { + if ((uint)nodeIndexA >= (uint)nodes.Length || (uint)nodeIndexB >= (uint)nodes.Length) + { + return EdgeBorderPenalty; + } + + if (nodes[nodeIndexA].IsRemoved || nodes[nodeIndexB].IsRemoved) + { + return EdgeBorderPenalty; + } + + int posB = nodes[nodeIndexB].position; + + int facesAttached = 0; + + int attrAtA = -1; + int attrAtB = -1; + + double edgeWeight = 0; + + int siblingOfA = nodeIndexA; + do + { + int relativeOfA = siblingOfA; + while ((relativeOfA = nodes[relativeOfA].relative) != siblingOfA) + { + int posC = nodes[relativeOfA].position; + if (posC == posB) + { + facesAttached++; + + if (attributes != null) + { + for (int i = 0; i < attributes.CountPerAttribute; i++) + { + if (attrAtB != -1 && !attributes.Equals(attrAtB, nodes[relativeOfA].attribute, i)) + { + edgeWeight += attributeDefinitions[i].weight; + } + + if (attrAtA != -1 && !attributes.Equals(attrAtA, nodes[siblingOfA].attribute, i)) + { + edgeWeight += attributeDefinitions[i].weight; + } + } + } + + attrAtB = nodes[relativeOfA].attribute; + attrAtA = nodes[siblingOfA].attribute; + } + } + } while ((siblingOfA = nodes[siblingOfA].sibling) != nodeIndexA); + + if (facesAttached != 2) // Border or non-manifold edge + { + edgeWeight += EdgeBorderPenalty; + } + + return edgeWeight; + } + + internal static double EdgeBorderPenalty = 1027.007; + + // TODO : Make it work with any polygon (other than triangle) + public Vector3 GetFaceNormal(int nodeIndex) + { + int posA = nodes[nodeIndex].position; + int posB = nodes[nodes[nodeIndex].relative].position; + int posC = nodes[nodes[nodes[nodeIndex].relative].relative].position; + + Vector3 normal = Vector3.Cross( + positions[posB] - positions[posA], + positions[posC] - positions[posA]); + + return normal.Normalized; + } + + // TODO : Make it work with any polygon (other than triangle) + public double GetFaceArea(int nodeIndex) + { + int posA = nodes[nodeIndex].position; + int posB = nodes[nodes[nodeIndex].relative].position; + int posC = nodes[nodes[nodes[nodeIndex].relative].relative].position; + + Vector3 normal = Vector3.Cross( + positions[posB] - positions[posA], + positions[posC] - positions[posA]); + + return 0.5 * normal.Length; + } + + // Only works with triangles ! + public double GetAngleRadians(int nodeIndex) + { + int posA = nodes[nodeIndex].position; + int posB = nodes[nodes[nodeIndex].relative].position; + int posC = nodes[nodes[nodes[nodeIndex].relative].relative].position; + + return Vector3.AngleRadians( + positions[posB] - positions[posA], + positions[posC] - positions[posA]); + } + + public void Compact() + { + // Rebuild nodes array with only valid nodes + { + int validNodesCount = 0; + for (int i = 0; i < nodes.Length; i++) + if (!nodes[i].IsRemoved) + validNodesCount++; + + Node[] newNodes = new Node[validNodesCount]; + int k = 0; + Dictionary oldToNewNodeIndex = new Dictionary(); + for (int i = 0; i < nodes.Length; i++) + { + if (!nodes[i].IsRemoved) + { + newNodes[k] = nodes[i]; + oldToNewNodeIndex.Add(i, k); + k++; + } + } + for (int i = 0; i < newNodes.Length; i++) + { + newNodes[i].relative = oldToNewNodeIndex[newNodes[i].relative]; + newNodes[i].sibling = oldToNewNodeIndex[newNodes[i].sibling]; + } + nodes = newNodes; + } + + // Remap positions + { + Dictionary oldToNewPosIndex = new Dictionary(); + for (int i = 0; i < nodes.Length; i++) + { + if (!oldToNewPosIndex.ContainsKey(nodes[i].position)) + oldToNewPosIndex.Add(nodes[i].position, oldToNewPosIndex.Count); + + nodes[i].position = oldToNewPosIndex[nodes[i].position]; + } + Vector3[] newPositions = new Vector3[oldToNewPosIndex.Count]; + foreach (KeyValuePair oldToNewPos in oldToNewPosIndex) + { + newPositions[oldToNewPos.Value] = positions[oldToNewPos.Key]; + } + positions = newPositions; + } + + // Remap attributes + if (attributes != null) + { + Dictionary oldToNewAttrIndex = new Dictionary(); + for (int i = 0; i < nodes.Length; i++) + { + if (!oldToNewAttrIndex.ContainsKey(nodes[i].attribute)) + oldToNewAttrIndex.Add(nodes[i].attribute, oldToNewAttrIndex.Count); + + nodes[i].attribute = oldToNewAttrIndex[nodes[i].attribute]; + } + MetaAttributeList newAttributes = attributes.CreateNew(oldToNewAttrIndex.Count); + foreach (KeyValuePair oldToNewAttr in oldToNewAttrIndex) + { + newAttributes[oldToNewAttr.Value] = attributes[oldToNewAttr.Key]; + } + attributes = newAttributes; + } + + _positionToNode = null; // Invalid now + } + + public void MergePositions(double tolerance = 0.01) + { + Dictionary newPositions = new Dictionary(tolerance <= 0 ? null : new Vector3Comparer(tolerance)); + + for (int i = 0; i < positions.Length; i++) + { + newPositions.TryAdd(positions[i], newPositions.Count); + } + + for (int i = 0; i < nodes.Length; i++) + { + nodes[i].position = newPositions[positions[nodes[i].position]]; + } + + positions = new Vector3[newPositions.Count]; + foreach (KeyValuePair pair in newPositions) + { + positions[pair.Value] = pair.Key; + } + + newPositions = null; + + // Remapping siblings + Dictionary posToLastSibling = new Dictionary(); + + for (int i = 0; i < nodes.Length; i++) + { + if (posToLastSibling.ContainsKey(nodes[i].position)) + { + nodes[i].sibling = posToLastSibling[nodes[i].position]; + posToLastSibling[nodes[i].position] = i; + } + else + { + nodes[i].sibling = -1; + posToLastSibling.Add(nodes[i].position, i); + } + } + + for (int i = 0; i < nodes.Length; i++) + { + if (nodes[i].sibling < 0) + { + // Assign last sibling to close sibling loop + nodes[i].sibling = posToLastSibling[nodes[i].position]; + } + } + + _positionToNode = null; + + // Dereference faces that no longer exist + for (int i = 0; i < nodes.Length; i++) + { + if (nodes[i].IsRemoved) + { + continue; + } + + int lastPos = nodes[i].position; + int relative = i; + while ((relative = nodes[relative].relative) != i) // Circulate around face + { + int currPos = nodes[relative].position; + if (lastPos == currPos) + { + RemoveFace(relative); + break; + } + lastPos = currPos; + } + } + } + + public void MergeAttributes() + { + Dictionary _uniqueAttributes = new Dictionary(); + + for (int i = 0; i < nodes.Length; i++) + { + _uniqueAttributes.TryAdd(attributes[nodes[i].attribute], nodes[i].attribute); + } + + for (int i = 0; i < nodes.Length; i++) + { + nodes[i].attribute = _uniqueAttributes[attributes[nodes[i].attribute]]; + } + } + + public void RemoveFace(int nodeIndex) + { + int relative = nodeIndex; + do + { + nodes[relative].MarkRemoved(); + ReconnectSiblings(relative); + } while ((relative = nodes[relative].relative) != nodeIndex); + _faceCount--; + } + + public void Scale(double factor) + { + for (int i = 0; i < positions.Length; i++) + { + positions[i] = positions[i] * factor; + } + } + + public HashSet GetAllEdges() + { + HashSet edges = new HashSet(); + for (int p = 0; p < PositionToNode.Length; p++) + { + int nodeIndex = PositionToNode[p]; + if (nodeIndex < 0) + { + continue; + } + + int sibling = nodeIndex; + do + { + int firstRelative = nodes[sibling].relative; + int secondRelative = nodes[firstRelative].relative; + + Edge pair = new Edge(nodes[firstRelative].position, nodes[secondRelative].position); + + edges.Add(pair); + + } while ((sibling = nodes[sibling].sibling) != nodeIndex); + } + + return edges; + } + + public SharedMesh ToSharedMesh() + { + // Compating here is an issue if mesh is being decimated :/ + //Compact(); + + SharedMesh mesh = new SharedMesh(); + + List triangles = new List(); + HashSet browsedNodes = new HashSet(); + + Group[] newGroups = new Group[groups?.Length ?? 0]; + mesh.groups = newGroups; + mesh.attributeDefinitions = attributeDefinitions; + + int currentGroup = 0; + int indicesInGroup = 0; + + Dictionary<(int, int), int> perVertexMap = new Dictionary<(int, int), int>(); + + for (int i = 0; i < nodes.Length; i++) + { + if (newGroups.Length > 0 && groups[currentGroup].firstIndex == i) + { + if (currentGroup > 0) + { + newGroups[currentGroup - 1].indexCount = indicesInGroup; + newGroups[currentGroup].firstIndex = indicesInGroup + newGroups[currentGroup - 1].firstIndex; + } + indicesInGroup = 0; + if (currentGroup < groups.Length - 1) + { + currentGroup++; + } + } + + if (nodes[i].IsRemoved) + { + continue; + } + + indicesInGroup++; + + if (browsedNodes.Contains(i)) + { + continue; + } + + // Only works if all elements are triangles + int relative = i; + do + { + if (browsedNodes.Add(relative) && !nodes[relative].IsRemoved) + { + (int position, int attribute) key = (nodes[relative].position, nodes[relative].attribute); + perVertexMap.TryAdd(key, perVertexMap.Count); + triangles.Add(perVertexMap[key]); + } + } while ((relative = nodes[relative].relative) != i); + } + + if (newGroups.Length > 0) + { + newGroups[currentGroup].indexCount = indicesInGroup; + } + + // Positions + mesh.positions = new Vector3[perVertexMap.Count]; + foreach (KeyValuePair<(int, int), int> mapping in perVertexMap) + { + mesh.positions[mapping.Value] = positions[mapping.Key.Item1]; + } + + // Attributes + if (attributes != null && attributeDefinitions.Length > 0) + { + mesh.attributes = attributes.CreateNew(perVertexMap.Count); + foreach (KeyValuePair<(int, int), int> mapping in perVertexMap) + { + mesh.attributes[mapping.Value] = attributes[mapping.Key.Item2]; + } + } + + mesh.triangles = triangles.ToArray(); + + return mesh; + } + } + + public struct Edge : IEquatable + { + public int posA; + public int posB; + + public Edge(int posA, int posB) + { + this.posA = posA; + this.posB = posB; + } + + public override int GetHashCode() + { + unchecked + { + return posA + posB; + } + } + + public override bool Equals(object obj) + { + return Equals((Edge)obj); + } + + public bool Equals(Edge pc) + { + if (ReferenceEquals(this, pc)) + { + return true; + } + else + { + return (posA == pc.posA && posB == pc.posB) || (posA == pc.posB && posB == pc.posA); + } + } + + public static bool operator ==(Edge x, Edge y) + { + return x.Equals(y); + } + + public static bool operator !=(Edge x, Edge y) + { + return !x.Equals(y); + } + + public override string ToString() + { + return $""; + } + } +} diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Debug.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Debug.cs new file mode 100644 index 0000000..818cb62 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Debug.cs @@ -0,0 +1,138 @@ +using System; +using System.Linq; + +namespace Nanomesh +{ + public partial class ConnectedMesh + { + internal string PrintSiblings(int nodeIndex) + { + int sibling = nodeIndex; + string text = string.Join(" > ", Enumerable.Range(0, 12).Select(x => + { + string res = sibling.ToString() + (nodes[sibling].IsRemoved ? "(x)" : $"({nodes[sibling].position})"); + sibling = nodes[sibling].sibling; + return res; + })); + return text + "..."; + } + + internal string PrintRelatives(int nodeIndex) + { + int relative = nodeIndex; + string text = string.Join(" > ", Enumerable.Range(0, 12).Select(x => + { + string res = relative.ToString() + (nodes[relative].IsRemoved ? "(x)" : $"({nodes[relative].position})"); + relative = nodes[relative].relative; + return res; + })); + return text + "..."; + } + + internal bool CheckEdge(int nodeIndexA, int nodeIndexB) + { + if (nodes[nodeIndexA].position == nodes[nodeIndexB].position) + { + throw new Exception("Positions must be different"); + } + + if (nodes[nodeIndexA].IsRemoved) + { + throw new Exception($"Node A is unreferenced {nodeIndexA}"); + } + + if (nodes[nodeIndexB].IsRemoved) + { + throw new Exception($"Node B is unreferenced {nodeIndexB}"); + } + + return true; + } + + internal bool CheckRelatives(int nodeIndex) + { + if (nodes[nodeIndex].IsRemoved) + { + throw new Exception($"Node {nodeIndex} is removed"); + } + + int relative = nodeIndex; + int edgecount = 0; + int prevPos = -2; + do + { + if (nodes[relative].position == prevPos) + { + throw new Exception($"Two relatives or more share the same position : {PrintRelatives(nodeIndex)}"); + } + + if (edgecount > 50) + { + throw new Exception($"Circularity relative violation : {PrintRelatives(nodeIndex)}"); + } + + if (nodes[relative].IsRemoved) + { + throw new Exception($"Node {nodeIndex} is connected to the deleted relative {relative}"); + } + + prevPos = nodes[relative].position; + edgecount++; + + } while ((relative = nodes[relative].relative) != nodeIndex); + + return true; + } + + internal bool CheckSiblings(int nodeIndex) + { + if (nodes[nodeIndex].IsRemoved) + { + throw new Exception($"Node {nodeIndex} is removed"); + } + + int sibling = nodeIndex; + int cardinality = 0; + do + { + if (cardinality > 1000) + { + //throw new Exception($"Node {i}'s cardinality is superior to 50. It is likely to be that face siblings are not circularily linked"); + throw new Exception($"Circularity sibling violation : {PrintSiblings(nodeIndex)}"); + } + + if (nodes[sibling].IsRemoved) + { + throw new Exception($"Node {nodeIndex} has a deleted sibling {sibling}"); + } + + cardinality++; + + } while ((sibling = nodes[sibling].sibling) != nodeIndex); + + return true; + } + + internal bool Check() + { + for (int nodeIndex = 0; nodeIndex < nodes.Length; nodeIndex++) + { + if (nodes[nodeIndex].IsRemoved) + { + continue; + } + + CheckRelatives(nodeIndex); + + CheckSiblings(nodeIndex); + + if (GetEdgeCount(nodeIndex) == 2) + { + throw new Exception($"Node {nodeIndex} is part of a polygon of degree 2"); + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Node.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Node.cs new file mode 100644 index 0000000..1e92240 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/ConnectedMesh/Node.cs @@ -0,0 +1,25 @@ +namespace Nanomesh +{ + public partial class ConnectedMesh + { + public struct Node + { + public int position; + public int sibling; + public int relative; + public int attribute; + + public void MarkRemoved() + { + position = -10; + } + + public bool IsRemoved => position == -10; + + public override string ToString() + { + return $"sibl:{sibling} rela:{relative} posi:{position}"; + } + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/Group.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/Group.cs new file mode 100644 index 0000000..87dab9c --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/Group.cs @@ -0,0 +1,8 @@ +namespace Nanomesh +{ + public struct Group + { + public int firstIndex; + public int indexCount; + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Mesh/SharedMesh.cs b/LightlessSync/ThirdParty/Nanomesh/Mesh/SharedMesh.cs new file mode 100644 index 0000000..eef5c31 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Mesh/SharedMesh.cs @@ -0,0 +1,118 @@ +using System.Collections.Generic; +using System.Diagnostics; + +namespace Nanomesh +{ + /// + /// A shared mesh is a flattened approach of the triangle mesh. + /// Is does not has connectivity information, but it is simple to create + /// and is a rather lightweight mesh data structure. + /// + public class SharedMesh + { + public Vector3[] positions; + public int[] triangles; + public Group[] groups; + public MetaAttributeList attributes; + public AttributeDefinition[] attributeDefinitions; + + [Conditional("DEBUG")] + public void CheckLengths() + { + //if (attributes != null) + //{ + // foreach (var pair in attributes) + // { + // Debug.Assert(pair.Value.Length == vertices.Length, $"Attribute '{pair.Value}' must have as many elements as vertices"); + // } + //} + } + + public ConnectedMesh ToConnectedMesh() + { + CheckLengths(); + + ConnectedMesh connectedMesh = new ConnectedMesh + { + groups = groups + }; + + connectedMesh.positions = positions; + connectedMesh.attributes = attributes; + connectedMesh.attributeDefinitions = attributeDefinitions; + + // Building relatives + ConnectedMesh.Node[] nodes = new ConnectedMesh.Node[triangles.Length]; + Dictionary> vertexToNodes = new Dictionary>(); + for (int i = 0; i < triangles.Length; i += 3) + { + ConnectedMesh.Node A = new ConnectedMesh.Node(); + ConnectedMesh.Node B = new ConnectedMesh.Node(); + ConnectedMesh.Node C = new ConnectedMesh.Node(); + + A.position = triangles[i]; + B.position = triangles[i + 1]; + C.position = triangles[i + 2]; + + A.attribute = triangles[i]; + B.attribute = triangles[i + 1]; + C.attribute = triangles[i + 2]; + + A.relative = i + 1; // B + B.relative = i + 2; // C + C.relative = i; // A + + if (!vertexToNodes.ContainsKey(A.position)) + { + vertexToNodes.Add(A.position, new List()); + } + + if (!vertexToNodes.ContainsKey(B.position)) + { + vertexToNodes.Add(B.position, new List()); + } + + if (!vertexToNodes.ContainsKey(C.position)) + { + vertexToNodes.Add(C.position, new List()); + } + + vertexToNodes[A.position].Add(i); + vertexToNodes[B.position].Add(i + 1); + vertexToNodes[C.position].Add(i + 2); + + nodes[i] = A; + nodes[i + 1] = B; + nodes[i + 2] = C; + + connectedMesh._faceCount++; + } + + // Building siblings + foreach (KeyValuePair> pair in vertexToNodes) + { + int previousSibling = -1; + int firstSibling = -1; + foreach (int node in pair.Value) + { + if (firstSibling != -1) + { + nodes[node].sibling = previousSibling; + } + else + { + firstSibling = node; + } + previousSibling = node; + } + nodes[firstSibling].sibling = previousSibling; + } + + connectedMesh.nodes = nodes; + + Debug.Assert(connectedMesh.Check()); + + return connectedMesh; + } + } +} \ No newline at end of file diff --git a/LightlessSync/ThirdParty/Nanomesh/Todo.md b/LightlessSync/ThirdParty/Nanomesh/Todo.md new file mode 100644 index 0000000..b6312f1 --- /dev/null +++ b/LightlessSync/ThirdParty/Nanomesh/Todo.md @@ -0,0 +1,22 @@ +# Todo List NOT LIGHTLESS RELATED XD + +- [x] Bench iterating methods +- [x] Add a bunch of primitives +- [x] Add ConnectedMesh data structure +- [x] Add SharedMesh data structure + - [ ] Add vertex attributes +- [x] Add SharedMesh -> ConnectedMesh + - [ ] Add support for hardedges + - [ ] Add conversion of attributes +- [x] Add ConnectedMesh -> SharedMesh + - [ ] Add support for hardedges +- [x] Add export to obj + - [ ] Add support for normals +- [x] Add import from obj + - [ ] Add support for normals +- [x] Add decimate + - [x] Optimize until it is satisfying + - [ ] Take into account vertex normals + - [ ] Take into account borders + - [ ] Add an error target control +- [ ] Add create normals function \ No newline at end of file diff --git a/LightlessSync/UI/CompactUI.cs b/LightlessSync/UI/CompactUI.cs index a43f228..97763a1 100644 --- a/LightlessSync/UI/CompactUI.cs +++ b/LightlessSync/UI/CompactUI.cs @@ -28,7 +28,6 @@ using System.Collections.Immutable; using System.Globalization; using System.Numerics; using System.Reflection; -using System.Runtime.InteropServices; namespace LightlessSync.UI; @@ -71,6 +70,7 @@ public class CompactUi : WindowMediatorSubscriberBase private readonly SelectTagForSyncshellUi _selectTagForSyncshellUi; private readonly SeluneBrush _seluneBrush = new(); private readonly TopTabMenu _tabMenu; + private readonly OptimizationSummaryCard _optimizationSummaryCard; #endregion @@ -86,7 +86,8 @@ public class CompactUi : WindowMediatorSubscriberBase private int _pendingFocusFrame = -1; private Pair? _pendingFocusPair; private bool _showModalForUserAddition; - private float _transferPartHeight; + private float _footerPartHeight; + private bool _hasFooterPartHeight; private bool _wasOpen; private float _windowContentWidth; @@ -177,6 +178,7 @@ public class CompactUi : WindowMediatorSubscriberBase _characterAnalyzer = characterAnalyzer; _playerPerformanceConfig = playerPerformanceConfig; _lightlessMediator = mediator; + _optimizationSummaryCard = new OptimizationSummaryCard(_uiSharedService, _pairUiService, _playerPerformanceConfig, _fileTransferManager, _lightlessMediator); } #endregion @@ -262,12 +264,17 @@ public class CompactUi : WindowMediatorSubscriberBase using (ImRaii.PushId("global-topmenu")) _tabMenu.Draw(pairSnapshot); using (ImRaii.PushId("pairlist")) DrawPairs(); - var transfersTop = ImGui.GetCursorScreenPos().Y; - var gradientBottom = MathF.Max(gradientTop, transfersTop - style.ItemSpacing.Y - gradientInset); + var footerTop = ImGui.GetCursorScreenPos().Y; + var gradientBottom = MathF.Max(gradientTop, footerTop - style.ItemSpacing.Y - gradientInset); selune.DrawGradient(gradientTop, gradientBottom, ImGui.GetIO().DeltaTime); float pairlistEnd = ImGui.GetCursorPosY(); - using (ImRaii.PushId("transfers")) DrawTransfers(); - _transferPartHeight = ImGui.GetCursorPosY() - pairlistEnd - ImGui.GetTextLineHeight(); + bool drewFooter; + using (ImRaii.PushId("optimization-summary")) + { + drewFooter = _optimizationSummaryCard.Draw(_currentDownloads.Count); + } + _footerPartHeight = drewFooter ? ImGui.GetCursorPosY() - pairlistEnd : 0f; + _hasFooterPartHeight = true; using (ImRaii.PushId("group-pair-popup")) _selectPairsForGroupUi.Draw(pairSnapshot.DirectPairs); using (ImRaii.PushId("group-syncshell-popup")) _selectSyncshellForTagUi.Draw(pairSnapshot.Groups); using (ImRaii.PushId("group-pair-edit")) _renamePairTagUi.Draw(); @@ -330,10 +337,9 @@ public class CompactUi : WindowMediatorSubscriberBase private void DrawPairs() { - float ySize = Math.Abs(_transferPartHeight) < 0.0001f + float ySize = !_hasFooterPartHeight ? 1 - : (ImGui.GetWindowContentRegionMax().Y - ImGui.GetWindowContentRegionMin().Y - + ImGui.GetTextLineHeight() - ImGui.GetStyle().WindowPadding.Y - ImGui.GetStyle().WindowBorderSize) - _transferPartHeight - ImGui.GetCursorPosY(); + : MathF.Max(1f, ImGui.GetContentRegionAvail().Y - _footerPartHeight); if (ImGui.BeginChild("list", new Vector2(_windowContentWidth, ySize), border: false)) { @@ -346,101 +352,6 @@ public class CompactUi : WindowMediatorSubscriberBase ImGui.EndChild(); } - private void DrawTransfers() - { - var currentUploads = _fileTransferManager.GetCurrentUploadsSnapshot(); - ImGui.AlignTextToFramePadding(); - _uiSharedService.IconText(FontAwesomeIcon.Upload); - ImGui.SameLine(35 * ImGuiHelpers.GlobalScale); - - if (currentUploads.Count > 0) - { - int totalUploads = currentUploads.Count; - int doneUploads = 0; - long totalUploaded = 0; - long totalToUpload = 0; - - foreach (var upload in currentUploads) - { - if (upload.IsTransferred) - { - doneUploads++; - } - - totalUploaded += upload.Transferred; - totalToUpload += upload.Total; - } - - int activeUploads = totalUploads - doneUploads; - var uploadSlotLimit = Math.Clamp(_configService.Current.ParallelUploads, 1, 8); - - ImGui.TextUnformatted($"{doneUploads}/{totalUploads} (slots {activeUploads}/{uploadSlotLimit})"); - var uploadText = $"({UiSharedService.ByteToString(totalUploaded)}/{UiSharedService.ByteToString(totalToUpload)})"; - var textSize = ImGui.CalcTextSize(uploadText); - ImGui.SameLine(_windowContentWidth - textSize.X); - ImGui.AlignTextToFramePadding(); - ImGui.TextUnformatted(uploadText); - } - else - { - ImGui.AlignTextToFramePadding(); - ImGui.TextUnformatted("No uploads in progress"); - } - - var downloadSummary = GetDownloadSummary(); - ImGui.AlignTextToFramePadding(); - _uiSharedService.IconText(FontAwesomeIcon.Download); - ImGui.SameLine(35 * ImGuiHelpers.GlobalScale); - - if (downloadSummary.HasDownloads) - { - var totalDownloads = downloadSummary.TotalFiles; - var doneDownloads = downloadSummary.TransferredFiles; - var totalDownloaded = downloadSummary.TransferredBytes; - var totalToDownload = downloadSummary.TotalBytes; - - ImGui.TextUnformatted($"{doneDownloads}/{totalDownloads}"); - var downloadText = - $"({UiSharedService.ByteToString(totalDownloaded)}/{UiSharedService.ByteToString(totalToDownload)})"; - var textSize = ImGui.CalcTextSize(downloadText); - ImGui.SameLine(_windowContentWidth - textSize.X); - ImGui.AlignTextToFramePadding(); - ImGui.TextUnformatted(downloadText); - } - else - { - ImGui.AlignTextToFramePadding(); - ImGui.TextUnformatted("No downloads in progress"); - } - } - - - private DownloadSummary GetDownloadSummary() - { - long totalBytes = 0; - long transferredBytes = 0; - int totalFiles = 0; - int transferredFiles = 0; - - foreach (var kvp in _currentDownloads.ToArray()) - { - if (kvp.Value is not { Count: > 0 } statuses) - { - continue; - } - - foreach (var status in statuses.Values) - { - totalBytes += status.TotalBytes; - transferredBytes += status.TransferredBytes; - totalFiles += status.TotalFiles; - transferredFiles += status.TransferredFiles; - } - } - - return new DownloadSummary(totalFiles, transferredFiles, transferredBytes, totalBytes); - } - #endregion #region Header Drawing @@ -1147,13 +1058,4 @@ public class CompactUi : WindowMediatorSubscriberBase #endregion - #region Helper Types - - [StructLayout(LayoutKind.Auto)] - private readonly record struct DownloadSummary(int TotalFiles, int TransferredFiles, long TransferredBytes, long TotalBytes) - { - public bool HasDownloads => TotalFiles > 0 || TotalBytes > 0; - } - - #endregion } diff --git a/LightlessSync/UI/Components/DrawFolderBase.cs b/LightlessSync/UI/Components/DrawFolderBase.cs index 0532da9..39a1b44 100644 --- a/LightlessSync/UI/Components/DrawFolderBase.cs +++ b/LightlessSync/UI/Components/DrawFolderBase.cs @@ -39,7 +39,8 @@ public abstract class DrawFolderBase : IDrawFolder public void Draw() { - if (!RenderIfEmpty && !DrawPairs.Any()) return; + var drawPairCount = DrawPairs.Count; + if (!RenderIfEmpty && drawPairCount == 0) return; _suppressNextRowToggle = false; @@ -111,9 +112,9 @@ public abstract class DrawFolderBase : IDrawFolder if (_tagHandler.IsTagOpen(_id)) { using var indent = ImRaii.PushIndent(_uiSharedService.GetIconSize(FontAwesomeIcon.EllipsisV).X + ImGui.GetStyle().ItemSpacing.X, false); - if (DrawPairs.Any()) + if (drawPairCount > 0) { - using var clipper = ImUtf8.ListClipper(DrawPairs.Count, ImGui.GetFrameHeightWithSpacing()); + using var clipper = ImUtf8.ListClipper(drawPairCount, ImGui.GetFrameHeightWithSpacing()); while (clipper.Step()) { for (var i = clipper.DisplayStart; i < clipper.DisplayEnd; i++) diff --git a/LightlessSync/UI/Components/DrawGroupedGroupFolder.cs b/LightlessSync/UI/Components/DrawGroupedGroupFolder.cs index 72063f2..e13106d 100644 --- a/LightlessSync/UI/Components/DrawGroupedGroupFolder.cs +++ b/LightlessSync/UI/Components/DrawGroupedGroupFolder.cs @@ -22,13 +22,16 @@ public class DrawGroupedGroupFolder : IDrawFolder private readonly ApiController _apiController; private readonly SelectSyncshellForTagUi _selectSyncshellForTagUi; private readonly RenameSyncshellTagUi _renameSyncshellTagUi; + private readonly HashSet _onlinePairBuffer = new(StringComparer.Ordinal); + private IImmutableList? _drawPairsCache; + private int? _totalPairsCache; private bool _wasHovered = false; private float _menuWidth; private bool _rowClickArmed; - public IImmutableList DrawPairs => _groups.SelectMany(g => g.GroupDrawFolder.DrawPairs).ToImmutableList(); - public int OnlinePairs => _groups.SelectMany(g => g.GroupDrawFolder.DrawPairs).Where(g => g.Pair.IsOnline).DistinctBy(g => g.Pair.UserData.UID).Count(); - public int TotalPairs => _groups.Sum(g => g.GroupDrawFolder.TotalPairs); + public IImmutableList DrawPairs => _drawPairsCache ??= _groups.SelectMany(g => g.GroupDrawFolder.DrawPairs).ToImmutableList(); + public int OnlinePairs => CountOnlinePairs(DrawPairs); + public int TotalPairs => _totalPairsCache ??= _groups.Sum(g => g.GroupDrawFolder.TotalPairs); public DrawGroupedGroupFolder(IEnumerable groups, TagHandler tagHandler, ApiController apiController, UiSharedService uiSharedService, SelectSyncshellForTagUi selectSyncshellForTagUi, RenameSyncshellTagUi renameSyncshellTagUi, string tag) { @@ -50,6 +53,10 @@ public class DrawGroupedGroupFolder : IDrawFolder } using var id = ImRaii.PushId(_id); + var drawPairs = DrawPairs; + var onlinePairs = CountOnlinePairs(drawPairs); + var totalPairs = TotalPairs; + var hasPairs = drawPairs.Count > 0; var color = ImRaii.PushColor(ImGuiCol.ChildBg, ImGui.GetColorU32(ImGuiCol.FrameBgHovered), _wasHovered); var allowRowClick = string.IsNullOrEmpty(_tag); var suppressRowToggle = false; @@ -85,10 +92,10 @@ public class DrawGroupedGroupFolder : IDrawFolder { ImGui.SameLine(); ImGui.AlignTextToFramePadding(); - ImGui.TextUnformatted("[" + OnlinePairs.ToString() + "]"); + ImGui.TextUnformatted("[" + onlinePairs.ToString() + "]"); } - UiSharedService.AttachToolTip(OnlinePairs + " online in all of your joined syncshells" + Environment.NewLine + - TotalPairs + " pairs combined in all of your joined syncshells"); + UiSharedService.AttachToolTip(onlinePairs + " online in all of your joined syncshells" + Environment.NewLine + + totalPairs + " pairs combined in all of your joined syncshells"); ImGui.SameLine(); ImGui.AlignTextToFramePadding(); if (_tag != "") @@ -96,7 +103,7 @@ public class DrawGroupedGroupFolder : IDrawFolder ImGui.TextUnformatted(_tag); ImGui.SameLine(); - DrawPauseButton(); + DrawPauseButton(hasPairs); ImGui.SameLine(); DrawMenu(ref suppressRowToggle); } else @@ -104,7 +111,7 @@ public class DrawGroupedGroupFolder : IDrawFolder ImGui.TextUnformatted("All Syncshells"); ImGui.SameLine(); - DrawPauseButton(); + DrawPauseButton(hasPairs); } } color.Dispose(); @@ -151,9 +158,9 @@ public class DrawGroupedGroupFolder : IDrawFolder } } - protected void DrawPauseButton() + protected void DrawPauseButton(bool hasPairs) { - if (DrawPairs.Count > 0) + if (hasPairs) { var isPaused = _groups.Select(g => g.GroupFullInfo).All(g => g.GroupUserPermissions.IsPaused()); FontAwesomeIcon pauseIcon = isPaused ? FontAwesomeIcon.Play : FontAwesomeIcon.Pause; @@ -179,6 +186,27 @@ public class DrawGroupedGroupFolder : IDrawFolder } } + private int CountOnlinePairs(IImmutableList drawPairs) + { + if (drawPairs.Count == 0) + { + return 0; + } + + _onlinePairBuffer.Clear(); + foreach (var pair in drawPairs) + { + if (!pair.Pair.IsOnline) + { + continue; + } + + _onlinePairBuffer.Add(pair.Pair.UserData.UID); + } + + return _onlinePairBuffer.Count; + } + protected void ChangePauseStateGroups() { foreach(var group in _groups) diff --git a/LightlessSync/UI/Components/DrawUserPair.cs b/LightlessSync/UI/Components/DrawUserPair.cs index 5524226..3ee10ad 100644 --- a/LightlessSync/UI/Components/DrawUserPair.cs +++ b/LightlessSync/UI/Components/DrawUserPair.cs @@ -340,7 +340,10 @@ public class DrawUserPair ? FontAwesomeIcon.User : FontAwesomeIcon.Users); } - UiSharedService.AttachToolTip(GetUserTooltip()); + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenDisabled)) + { + UiSharedService.AttachToolTip(GetUserTooltip()); + } if (_performanceConfigService.Current.ShowPerformanceIndicator && !_performanceConfigService.Current.UIDsToIgnore @@ -354,22 +357,25 @@ public class DrawUserPair _uiSharedService.IconText(FontAwesomeIcon.ExclamationTriangle, UIColors.Get("LightlessYellow")); - string userWarningText = "WARNING: This user exceeds one or more of your defined thresholds:" + UiSharedService.TooltipSeparator; - bool shownVram = false; - if (_performanceConfigService.Current.VRAMSizeWarningThresholdMiB > 0 - && _performanceConfigService.Current.VRAMSizeWarningThresholdMiB * 1024 * 1024 < _pair.LastAppliedApproximateVRAMBytes) + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenDisabled)) { - shownVram = true; - userWarningText += $"Approx. VRAM Usage: Used: {UiSharedService.ByteToString(_pair.LastAppliedApproximateVRAMBytes)}, Threshold: {_performanceConfigService.Current.VRAMSizeWarningThresholdMiB} MiB"; - } - if (_performanceConfigService.Current.TrisWarningThresholdThousands > 0 - && _performanceConfigService.Current.TrisWarningThresholdThousands * 1024 < _pair.LastAppliedDataTris) - { - if (shownVram) userWarningText += Environment.NewLine; - userWarningText += $"Approx. Triangle count: Used: {_pair.LastAppliedDataTris}, Threshold: {_performanceConfigService.Current.TrisWarningThresholdThousands * 1000}"; - } + string userWarningText = "WARNING: This user exceeds one or more of your defined thresholds:" + UiSharedService.TooltipSeparator; + bool shownVram = false; + if (_performanceConfigService.Current.VRAMSizeWarningThresholdMiB > 0 + && _performanceConfigService.Current.VRAMSizeWarningThresholdMiB * 1024 * 1024 < _pair.LastAppliedApproximateVRAMBytes) + { + shownVram = true; + userWarningText += $"Approx. VRAM Usage: Used: {UiSharedService.ByteToString(_pair.LastAppliedApproximateVRAMBytes)}, Threshold: {_performanceConfigService.Current.VRAMSizeWarningThresholdMiB} MiB"; + } + if (_performanceConfigService.Current.TrisWarningThresholdThousands > 0 + && _performanceConfigService.Current.TrisWarningThresholdThousands * 1024 < _pair.LastAppliedDataTris) + { + if (shownVram) userWarningText += Environment.NewLine; + userWarningText += $"Approx. Triangle count: Used: {_pair.LastAppliedDataTris}, Threshold: {_performanceConfigService.Current.TrisWarningThresholdThousands * 1000}"; + } - UiSharedService.AttachToolTip(userWarningText); + UiSharedService.AttachToolTip(userWarningText); + } } ImGui.SameLine(); @@ -613,12 +619,15 @@ public class DrawUserPair perm.SetPaused(!perm.IsPaused()); _ = _apiController.UserSetPairPermissions(new(_pair.UserData, perm)); } - UiSharedService.AttachToolTip(!_pair.UserPair!.OwnPermissions.IsPaused() - ? ("Pause pairing with " + _pair.UserData.AliasOrUID - + (_pair.UserPair!.OwnPermissions.IsSticky() - ? string.Empty - : UiSharedService.TooltipSeparator + "Hold CTRL to enable preferred permissions while pausing." + Environment.NewLine + "This will leave this pair paused even if unpausing syncshells including this pair.")) - : "Resume pairing with " + _pair.UserData.AliasOrUID); + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenDisabled)) + { + UiSharedService.AttachToolTip(!_pair.UserPair!.OwnPermissions.IsPaused() + ? ("Pause pairing with " + _pair.UserData.AliasOrUID + + (_pair.UserPair!.OwnPermissions.IsSticky() + ? string.Empty + : UiSharedService.TooltipSeparator + "Hold CTRL to enable preferred permissions while pausing." + Environment.NewLine + "This will leave this pair paused even if unpausing syncshells including this pair.")) + : "Resume pairing with " + _pair.UserData.AliasOrUID); + } if (_pair.IsPaired) { @@ -781,8 +790,11 @@ public class DrawUserPair currentRightSide -= (_uiSharedService.GetIconSize(FontAwesomeIcon.Running).X + (spacingX / 2f)); ImGui.SameLine(currentRightSide); _uiSharedService.IconText(FontAwesomeIcon.Running); - UiSharedService.AttachToolTip($"This user has shared {sharedData.Count} Character Data Sets with you." + UiSharedService.TooltipSeparator - + "Click to open the Character Data Hub and show the entries."); + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenDisabled)) + { + UiSharedService.AttachToolTip($"This user has shared {sharedData.Count} Character Data Sets with you." + UiSharedService.TooltipSeparator + + "Click to open the Character Data Hub and show the entries."); + } if (ImGui.IsItemClicked(ImGuiMouseButton.Left)) { _mediator.Publish(new OpenCharaDataHubWithFilterMessage(_pair.UserData)); diff --git a/LightlessSync/UI/Components/OptimizationSettingsPanel.cs b/LightlessSync/UI/Components/OptimizationSettingsPanel.cs new file mode 100644 index 0000000..d8b8bd1 --- /dev/null +++ b/LightlessSync/UI/Components/OptimizationSettingsPanel.cs @@ -0,0 +1,1064 @@ +using Dalamud.Bindings.ImGui; +using Dalamud.Interface; +using Dalamud.Interface.Utility; +using Dalamud.Interface.Utility.Raii; +using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Configurations; +using LightlessSync.PlayerData.Pairs; +using LightlessSync.UI.Services; +using LightlessSync.Utils; +using System.Numerics; + +namespace LightlessSync.UI.Components; + +public enum OptimizationPanelSection +{ + Texture, + Model, +} + +public sealed class OptimizationSettingsPanel +{ + private readonly UiSharedService _uiSharedService; + private readonly PlayerPerformanceConfigService _performanceConfigService; + private readonly PairUiService _pairUiService; + + private const ImGuiTableFlags SettingsTableFlags = ImGuiTableFlags.SizingStretchProp + | ImGuiTableFlags.NoBordersInBody + | ImGuiTableFlags.PadOuterX; + + public OptimizationSettingsPanel( + UiSharedService uiSharedService, + PlayerPerformanceConfigService performanceConfigService, + PairUiService pairUiService) + { + _uiSharedService = uiSharedService; + _performanceConfigService = performanceConfigService; + _pairUiService = pairUiService; + } + + public void DrawSettingsTrees( + string textureLabel, + Vector4 textureColor, + string modelLabel, + Vector4 modelColor, + Func beginTree) + { + if (beginTree(textureLabel, textureColor)) + { + DrawTextureSection(showTitle: false); + UiSharedService.ColoredSeparator(textureColor, 1.5f); + ImGui.TreePop(); + } + + ImGui.Separator(); + + if (beginTree(modelLabel, modelColor)) + { + DrawModelSection(showTitle: false); + UiSharedService.ColoredSeparator(modelColor, 1.5f); + ImGui.TreePop(); + } + } + + public void DrawPopup(OptimizationPanelSection section) + { + switch (section) + { + case OptimizationPanelSection.Texture: + DrawTextureSection(showTitle: false); + break; + case OptimizationPanelSection.Model: + DrawModelSection(showTitle: false); + break; + } + } + + private void DrawTextureSection(bool showTitle) + { + var scale = ImGuiHelpers.GlobalScale; + DrawSectionIntro( + FontAwesomeIcon.Images, + UIColors.Get("LightlessYellow"), + "Texture Optimization", + "Reduce texture memory by trimming mip levels and downscaling oversized textures.", + showTitle); + + DrawCallout("texture-opt-warning", UIColors.Get("DimRed"), () => + { + _uiSharedService.MediumText("Warning", UIColors.Get("DimRed")); + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Texture compression and downscaling is potentially a "), + new SeStringUtils.RichTextEntry("destructive", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" process and may cause broken or incorrect character appearances.")); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("This feature is encouraged to help "), + new SeStringUtils.RichTextEntry("lower-end systems with limited VRAM", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(" and for use in "), + new SeStringUtils.RichTextEntry("performance-critical scenarios", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Runtime downscaling "), + new SeStringUtils.RichTextEntry("MAY", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" cause higher load on the system when processing downloads.")); + + _uiSharedService.DrawNoteLine("!!! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("When enabled, we cannot provide support for appearance issues caused by this setting!", UIColors.Get("DimRed"), true)); + }); + + DrawCallout("texture-opt-info", UIColors.Get("LightlessGrey"), () => + { + _uiSharedService.DrawNoteLine("i ", UIColors.Get("LightlessGrey"), + new SeStringUtils.RichTextEntry("Compression, downscale, and mip trimming only apply to "), + new SeStringUtils.RichTextEntry("newly downloaded pairs", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(". Existing downloads are not reprocessed; re-download to apply.")); + }); + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + DrawGroupHeader("Core Controls", UIColors.Get("LightlessYellow")); + + var textureConfig = _performanceConfigService.Current; + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("texture-opt-core", 3, SettingsTableFlags)) + { + if (table) + { + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthFixed, 180f * scale); + ImGui.TableSetupColumn("Description", ImGuiTableColumnFlags.WidthStretch); + + DrawControlRow("Trim mip levels", () => + { + var trimNonIndex = textureConfig.EnableNonIndexTextureMipTrim; + var accent = UIColors.Get("LightlessYellow"); + if (DrawAccentCheckbox("##texture-trim-mips", ref trimNonIndex, accent)) + { + textureConfig.EnableNonIndexTextureMipTrim = trimNonIndex; + _performanceConfigService.Save(); + } + }, "Removes high-resolution mip levels from oversized non-index textures.", UIColors.Get("LightlessYellow"), UIColors.Get("LightlessYellow")); + + DrawControlRow("Downscale index textures", () => + { + var downscaleIndex = textureConfig.EnableIndexTextureDownscale; + var accent = UIColors.Get("LightlessYellow"); + if (DrawAccentCheckbox("##texture-downscale-index", ref downscaleIndex, accent)) + { + textureConfig.EnableIndexTextureDownscale = downscaleIndex; + _performanceConfigService.Save(); + } + }, "Downscales oversized index textures to the configured dimension.", UIColors.Get("LightlessYellow"), UIColors.Get("LightlessYellow")); + + DrawControlRow("Max texture dimension", () => + { + var dimensionOptions = new[] { 512, 1024, 2048, 4096 }; + var optionLabels = dimensionOptions.Select(static value => value.ToString()).ToArray(); + var currentDimension = textureConfig.TextureDownscaleMaxDimension; + var selectedIndex = Array.IndexOf(dimensionOptions, currentDimension); + if (selectedIndex < 0) + { + selectedIndex = Array.IndexOf(dimensionOptions, 2048); + } + + ImGui.SetNextItemWidth(-1f); + if (ImGui.Combo("##texture-max-dimension", ref selectedIndex, optionLabels, optionLabels.Length)) + { + textureConfig.TextureDownscaleMaxDimension = dimensionOptions[selectedIndex]; + _performanceConfigService.Save(); + } + }, "Textures above this size are reduced to the limit. Default: 2048."); + } + } + + if (!textureConfig.EnableNonIndexTextureMipTrim + && !textureConfig.EnableIndexTextureDownscale + && !textureConfig.EnableUncompressedTextureCompression) + { + UiSharedService.ColorTextWrapped( + "Texture trimming, downscale, and compression are disabled. Lightless will keep original textures regardless of size.", + UIColors.Get("DimRed")); + } + + ImGui.Dummy(new Vector2(0f, 4f * scale)); + DrawGroupHeader("Behavior & Exceptions", UIColors.Get("LightlessYellow")); + + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("texture-opt-behavior", 3, SettingsTableFlags)) + { + if (table) + { + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthFixed, 180f * scale); + ImGui.TableSetupColumn("Description", ImGuiTableColumnFlags.WidthStretch); + + DrawControlRow("Only downscale uncompressed", () => + { + var onlyUncompressed = textureConfig.OnlyDownscaleUncompressedTextures; + if (ImGui.Checkbox("##texture-only-uncompressed", ref onlyUncompressed)) + { + textureConfig.OnlyDownscaleUncompressedTextures = onlyUncompressed; + _performanceConfigService.Save(); + } + }, "When disabled, block-compressed textures can be downscaled too."); + } + } + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + DrawTextureCompressionCard(textureConfig); + + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("texture-opt-behavior-extra", 3, SettingsTableFlags)) + { + if (table) + { + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthFixed, 180f * scale); + ImGui.TableSetupColumn("Description", ImGuiTableColumnFlags.WidthStretch); + + DrawControlRow("Keep original texture files", () => + { + var keepOriginalTextures = textureConfig.KeepOriginalTextureFiles; + if (ImGui.Checkbox("##texture-keep-original", ref keepOriginalTextures)) + { + textureConfig.KeepOriginalTextureFiles = keepOriginalTextures; + _performanceConfigService.Save(); + } + }, "Keeps the original texture alongside the downscaled copy."); + + DrawControlRow("Skip preferred/direct pairs", () => + { + var skipPreferredDownscale = textureConfig.SkipTextureDownscaleForPreferredPairs; + if (ImGui.Checkbox("##texture-skip-preferred", ref skipPreferredDownscale)) + { + textureConfig.SkipTextureDownscaleForPreferredPairs = skipPreferredDownscale; + _performanceConfigService.Save(); + } + }, "Leaves textures untouched for preferred/direct pairs."); + } + } + + UiSharedService.ColorTextWrapped( + "Note: Disabling \"Keep original texture files\" prevents saved/effective VRAM usage information.", + UIColors.Get("LightlessYellow")); + + ImGui.Dummy(new Vector2(0f, 4f * scale)); + DrawSummaryPanel("Usage Summary", UIColors.Get("LightlessPurple"), DrawTextureDownscaleCounters); + } + + private void DrawModelSection(bool showTitle) + { + var scale = ImGuiHelpers.GlobalScale; + DrawSectionIntro( + FontAwesomeIcon.ProjectDiagram, + UIColors.Get("LightlessOrange"), + "Model Optimization", + "Reduce triangle counts by decimating models above a threshold.", + showTitle); + + DrawCallout("model-opt-warning", UIColors.Get("DimRed"), () => + { + _uiSharedService.MediumText("Warning", UIColors.Get("DimRed")); + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Model decimation is a "), + new SeStringUtils.RichTextEntry("destructive", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" process and may cause broken or incorrect character appearances.")); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("This feature is encouraged to help "), + new SeStringUtils.RichTextEntry("lower-end systems with limited VRAM", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(" and for use in "), + new SeStringUtils.RichTextEntry("performance-critical scenarios", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Runtime decimation "), + new SeStringUtils.RichTextEntry("MAY", UIColors.Get("DimRed"), true), + new SeStringUtils.RichTextEntry(" cause higher load on the system when processing downloads.")); + + _uiSharedService.DrawNoteLine("!!! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("When enabled, we cannot provide support for appearance issues caused by this setting!", UIColors.Get("DimRed"), true)); + }); + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + DrawCallout("model-opt-behavior", UIColors.Get("LightlessGreen"), () => + { + _uiSharedService.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), + new SeStringUtils.RichTextEntry("Meshes above the "), + new SeStringUtils.RichTextEntry("triangle threshold", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(" will be decimated to the "), + new SeStringUtils.RichTextEntry("target ratio", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(". This can reduce quality or alter intended structure.")); + + _uiSharedService.DrawNoteLine("i ", UIColors.Get("LightlessGreen"), + new SeStringUtils.RichTextEntry("Decimation only applies to "), + new SeStringUtils.RichTextEntry("newly downloaded pairs", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(". Existing downloads are not reprocessed; re-download to apply.")); + }); + + DrawGroupHeader("Core Controls", UIColors.Get("LightlessOrange")); + var performanceConfig = _performanceConfigService.Current; + DrawModelDecimationCard(performanceConfig); + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + DrawGroupHeader("Behavior & Exceptions", UIColors.Get("LightlessOrange")); + + DrawModelBehaviorCard(performanceConfig); + + UiSharedService.ColorTextWrapped( + "Note: Disabling \"Keep original model files\" prevents saved/effective triangle usage information.", + UIColors.Get("LightlessYellow")); + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + DrawGroupHeader("Decimation Targets", UIColors.Get("LightlessGrey"), "Hair mods are always excluded from decimation."); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), + new SeStringUtils.RichTextEntry("Automatic decimation will only target the selected "), + new SeStringUtils.RichTextEntry("decimation targets", UIColors.Get("LightlessGreen"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiSharedService.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), + new SeStringUtils.RichTextEntry("It is advised to not decimate any body related meshes which includes: "), + new SeStringUtils.RichTextEntry("facial mods + sculpts, chest, legs, hands and feet", UIColors.Get("LightlessYellow"), true), + new SeStringUtils.RichTextEntry(".")); + + _uiSharedService.DrawNoteLine("!!! ", UIColors.Get("DimRed"), + new SeStringUtils.RichTextEntry("Automatic decimation is not perfect and can cause meshes with bad topology to be worse.", UIColors.Get("DimRed"), true)); + + DrawTargetGrid(performanceConfig); + + ImGui.Dummy(new Vector2(0f, 4f * scale)); + DrawSummaryPanel("Usage Summary", UIColors.Get("LightlessPurple"), DrawTriangleDecimationCounters); + } + + private void DrawTargetGrid(PlayerPerformanceConfig config) + { + var scale = ImGuiHelpers.GlobalScale; + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("model-opt-targets", 3, SettingsTableFlags)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthFixed, 180f * scale); + ImGui.TableSetupColumn("Description", ImGuiTableColumnFlags.WidthStretch); + + const string bodyDesc = "Body meshes (torso, limbs)."; + DrawControlRow("Body", () => + { + var allowBody = config.ModelDecimationAllowBody; + if (ImGui.Checkbox("##model-target-body", ref allowBody)) + { + config.ModelDecimationAllowBody = allowBody; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.ModelDecimationAllowBody = ModelDecimationDefaults.AllowBody; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{bodyDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AllowBody ? "On" : "Off")})."); + }, bodyDesc); + + const string faceDesc = "Face and head meshes."; + DrawControlRow("Face/head", () => + { + var allowFaceHead = config.ModelDecimationAllowFaceHead; + if (ImGui.Checkbox("##model-target-facehead", ref allowFaceHead)) + { + config.ModelDecimationAllowFaceHead = allowFaceHead; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.ModelDecimationAllowFaceHead = ModelDecimationDefaults.AllowFaceHead; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{faceDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AllowFaceHead ? "On" : "Off")})."); + }, faceDesc); + + const string tailDesc = "Tail, ear, and similar appendages."; + DrawControlRow("Tails/Ears", () => + { + var allowTail = config.ModelDecimationAllowTail; + if (ImGui.Checkbox("##model-target-tail", ref allowTail)) + { + config.ModelDecimationAllowTail = allowTail; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.ModelDecimationAllowTail = ModelDecimationDefaults.AllowTail; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{tailDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AllowTail ? "On" : "Off")})."); + }, tailDesc); + + const string clothingDesc = "Outfits, shoes, gloves, hats."; + DrawControlRow("Clothing", () => + { + var allowClothing = config.ModelDecimationAllowClothing; + if (ImGui.Checkbox("##model-target-clothing", ref allowClothing)) + { + config.ModelDecimationAllowClothing = allowClothing; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.ModelDecimationAllowClothing = ModelDecimationDefaults.AllowClothing; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{clothingDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AllowClothing ? "On" : "Off")})."); + }, clothingDesc); + + const string accessoryDesc = "Jewelry and small add-ons."; + DrawControlRow("Accessories", () => + { + var allowAccessories = config.ModelDecimationAllowAccessories; + if (ImGui.Checkbox("##model-target-accessories", ref allowAccessories)) + { + config.ModelDecimationAllowAccessories = allowAccessories; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.ModelDecimationAllowAccessories = ModelDecimationDefaults.AllowAccessories; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{accessoryDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AllowAccessories ? "On" : "Off")})."); + }, accessoryDesc); + } + } + + private void DrawSectionIntro(FontAwesomeIcon icon, Vector4 color, string title, string subtitle, bool showTitle) + { + var scale = ImGuiHelpers.GlobalScale; + if (showTitle) + { + using (_uiSharedService.MediumFont.Push()) + { + _uiSharedService.IconText(icon, color); + ImGui.SameLine(0f, 6f * scale); + ImGui.TextColored(color, title); + } + + ImGui.TextColored(UIColors.Get("LightlessGrey"), subtitle); + } + else + { + _uiSharedService.IconText(icon, color); + ImGui.SameLine(0f, 6f * scale); + ImGui.TextColored(UIColors.Get("LightlessGrey"), subtitle); + } + + ImGui.Dummy(new Vector2(0f, 2f * scale)); + } + + private void DrawGroupHeader(string title, Vector4 color, string? helpText = null) + { + using var font = _uiSharedService.MediumFont.Push(); + ImGui.TextColored(color, title); + if (!string.IsNullOrWhiteSpace(helpText)) + { + _uiSharedService.DrawHelpText(helpText); + } + UiSharedService.ColoredSeparator(color, 1.2f); + } + + private void DrawCallout(string id, Vector4 color, Action content) + { + var scale = ImGuiHelpers.GlobalScale; + var bg = new Vector4(color.X, color.Y, color.Z, 0.08f); + var border = new Vector4(color.X, color.Y, color.Z, 0.25f); + DrawPanelBox(id, bg, border, 6f * scale, new Vector2(10f * scale, 6f * scale), content); + } + + private void DrawSummaryPanel(string title, Vector4 accent, Action content) + { + var scale = ImGuiHelpers.GlobalScale; + var bg = new Vector4(accent.X, accent.Y, accent.Z, 0.06f); + var border = new Vector4(accent.X, accent.Y, accent.Z, 0.2f); + DrawPanelBox($"summary-{title}", bg, border, 6f * scale, new Vector2(10f * scale, 6f * scale), () => + { + _uiSharedService.MediumText(title, accent); + content(); + }); + } + + private void DrawTextureCompressionCard(PlayerPerformanceConfig textureConfig) + { + var scale = ImGuiHelpers.GlobalScale; + var baseColor = UIColors.Get("LightlessGrey"); + var bg = new Vector4(baseColor.X, baseColor.Y, baseColor.Z, 0.12f); + var border = new Vector4(baseColor.X, baseColor.Y, baseColor.Z, 0.32f); + + DrawPanelBox("texture-compression-card", bg, border, 6f * scale, new Vector2(10f * scale, 6f * scale), () => + { + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("texture-opt-compress-card", 2, SettingsTableFlags)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthStretch); + + DrawInlineDescriptionRow("Compress uncompressed textures", () => + { + var autoCompress = textureConfig.EnableUncompressedTextureCompression; + if (UiSharedService.CheckboxWithBorder("##texture-auto-compress", ref autoCompress, baseColor)) + { + textureConfig.EnableUncompressedTextureCompression = autoCompress; + _performanceConfigService.Save(); + } + }, "Converts uncompressed textures to BC formats based on map type (heavy). Runs after downscale/mip trim.", + drawLabelSuffix: () => + { + _uiSharedService.IconText(FontAwesomeIcon.ExclamationTriangle, UIColors.Get("LightlessYellow")); + UiSharedService.AttachToolTip("This feature can be demanding and will increase character load times."); + }); + + DrawInlineDescriptionRow("Skip mipmaps for auto-compress", () => + { + var skipMipMaps = textureConfig.SkipUncompressedTextureCompressionMipMaps; + if (UiSharedService.CheckboxWithBorder("##texture-auto-compress-skip-mips", ref skipMipMaps, baseColor)) + { + textureConfig.SkipUncompressedTextureCompressionMipMaps = skipMipMaps; + _performanceConfigService.Save(); + } + }, "Skips mipmap generation to speed up compression, but can cause shimmering.", + disableControl: !textureConfig.EnableUncompressedTextureCompression); + } + }); + } + + private void DrawModelDecimationCard(PlayerPerformanceConfig performanceConfig) + { + var scale = ImGuiHelpers.GlobalScale; + var accent = UIColors.Get("LightlessOrange"); + var bg = new Vector4(accent.X, accent.Y, accent.Z, 0.12f); + var border = new Vector4(accent.X, accent.Y, accent.Z, 0.32f); + const string enableDesc = "Generates a decimated copy of models after download."; + const string thresholdDesc = "Models below this triangle count are left untouched. Default: 15,000."; + const string ratioDesc = "Ratio relative to original triangle count (80% keeps 80%). Default: 80%."; + + DrawPanelBox("model-decimation-card", bg, border, 6f * scale, new Vector2(10f * scale, 6f * scale), () => + { + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("model-opt-core-card", 2, SettingsTableFlags)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthStretch); + + DrawInlineDescriptionRow("Enable model decimation", () => + { + var enableDecimation = performanceConfig.EnableModelDecimation; + if (DrawAccentCheckbox("##enable-model-decimation", ref enableDecimation, accent)) + { + performanceConfig.EnableModelDecimation = enableDecimation; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.EnableModelDecimation = ModelDecimationDefaults.EnableAutoDecimation; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{enableDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.EnableAutoDecimation ? "On" : "Off")})."); + }, enableDesc); + + DrawInlineDescriptionRow("Decimate above (triangles)", () => + { + var triangleThreshold = performanceConfig.ModelDecimationTriangleThreshold; + ImGui.SetNextItemWidth(220f * scale); + if (ImGui.SliderInt("##model-decimation-threshold", ref triangleThreshold, 1_000, 100_000)) + { + performanceConfig.ModelDecimationTriangleThreshold = Math.Clamp(triangleThreshold, 1_000, 100_000); + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.ModelDecimationTriangleThreshold = ModelDecimationDefaults.TriangleThreshold; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{thresholdDesc}\nRight-click to reset to default ({ModelDecimationDefaults.TriangleThreshold:N0})."); + }, thresholdDesc); + + DrawInlineDescriptionRow("Target triangle ratio", () => + { + var targetPercent = (float)(performanceConfig.ModelDecimationTargetRatio * 100.0); + var clampedPercent = Math.Clamp(targetPercent, 60f, 99f); + if (Math.Abs(clampedPercent - targetPercent) > float.Epsilon) + { + performanceConfig.ModelDecimationTargetRatio = clampedPercent / 100.0; + _performanceConfigService.Save(); + targetPercent = clampedPercent; + } + + ImGui.SetNextItemWidth(220f * scale); + if (ImGui.SliderFloat("##model-decimation-target", ref targetPercent, 60f, 99f, "%.0f%%")) + { + performanceConfig.ModelDecimationTargetRatio = Math.Clamp(targetPercent / 100f, 0.6f, 0.99f); + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.ModelDecimationTargetRatio = ModelDecimationDefaults.TargetRatio; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{ratioDesc}\nRight-click to reset to default ({ModelDecimationDefaults.TargetRatio * 100:0}%)."); + }, ratioDesc); + } + }); + } + + private void DrawModelBehaviorCard(PlayerPerformanceConfig performanceConfig) + { + var scale = ImGuiHelpers.GlobalScale; + var baseColor = UIColors.Get("LightlessGrey"); + var bg = new Vector4(baseColor.X, baseColor.Y, baseColor.Z, 0.12f); + var border = new Vector4(baseColor.X, baseColor.Y, baseColor.Z, 0.32f); + const string normalizeDesc = "Normalizes tangents to reduce shading artifacts."; + const string avoidBodyDesc = "Uses body materials as a collision guard to reduce clothing clipping. Slower and may reduce decimation."; + const string keepOriginalDesc = "Keeps the original model alongside the decimated copy."; + const string skipPreferredDesc = "Leaves models untouched for preferred/direct pairs."; + + DrawPanelBox("model-behavior-card", bg, border, 6f * scale, new Vector2(10f * scale, 6f * scale), () => + { + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("model-opt-behavior-card", 2, SettingsTableFlags)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthStretch); + + DrawInlineDescriptionRow("Normalize tangents", () => + { + var normalizeTangents = performanceConfig.ModelDecimationNormalizeTangents; + if (UiSharedService.CheckboxWithBorder("##model-normalize-tangents", ref normalizeTangents, baseColor)) + { + performanceConfig.ModelDecimationNormalizeTangents = normalizeTangents; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.ModelDecimationNormalizeTangents = ModelDecimationDefaults.NormalizeTangents; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{normalizeDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.NormalizeTangents ? "On" : "Off")})."); + }, normalizeDesc); + + DrawInlineDescriptionRow("Avoid body intersection", () => + { + var avoidBodyIntersection = performanceConfig.ModelDecimationAvoidBodyIntersection; + if (UiSharedService.CheckboxWithBorder("##model-body-collision", ref avoidBodyIntersection, baseColor)) + { + performanceConfig.ModelDecimationAvoidBodyIntersection = avoidBodyIntersection; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.ModelDecimationAvoidBodyIntersection = ModelDecimationDefaults.AvoidBodyIntersection; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{avoidBodyDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.AvoidBodyIntersection ? "On" : "Off")})."); + }, avoidBodyDesc); + + DrawInlineDescriptionRow("Keep original model files", () => + { + var keepOriginalModels = performanceConfig.KeepOriginalModelFiles; + if (UiSharedService.CheckboxWithBorder("##model-keep-original", ref keepOriginalModels, baseColor)) + { + performanceConfig.KeepOriginalModelFiles = keepOriginalModels; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.KeepOriginalModelFiles = ModelDecimationDefaults.KeepOriginalModelFiles; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{keepOriginalDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.KeepOriginalModelFiles ? "On" : "Off")})."); + }, keepOriginalDesc); + + DrawInlineDescriptionRow("Skip preferred/direct pairs", () => + { + var skipPreferredDecimation = performanceConfig.SkipModelDecimationForPreferredPairs; + if (UiSharedService.CheckboxWithBorder("##model-skip-preferred", ref skipPreferredDecimation, baseColor)) + { + performanceConfig.SkipModelDecimationForPreferredPairs = skipPreferredDecimation; + _performanceConfigService.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + performanceConfig.SkipModelDecimationForPreferredPairs = ModelDecimationDefaults.SkipPreferredPairs; + _performanceConfigService.Save(); + } + UiSharedService.AttachToolTip($"{skipPreferredDesc}\nRight-click to reset to default ({(ModelDecimationDefaults.SkipPreferredPairs ? "On" : "Off")})."); + }, skipPreferredDesc); + } + }); + } + + private void DrawInlineDescriptionRow( + string label, + Action drawControl, + string description, + Action? drawLabelSuffix = null, + bool disableControl = false) + { + var scale = ImGuiHelpers.GlobalScale; + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted(label); + if (drawLabelSuffix != null) + { + ImGui.SameLine(0f, 4f * scale); + drawLabelSuffix(); + } + + ImGui.TableSetColumnIndex(1); + using (ImRaii.Disabled(disableControl)) + { + drawControl(); + } + + ImGui.SameLine(0f, 8f * scale); + using (ImRaii.PushColor(ImGuiCol.Text, UIColors.Get("LightlessGrey"))) + { + ImGui.PushTextWrapPos(ImGui.GetCursorPos().X + ImGui.GetContentRegionAvail().X); + ImGui.TextUnformatted(description); + ImGui.PopTextWrapPos(); + } + } + + private void DrawControlRow(string label, Action drawControl, string description, Vector4? labelColor = null, Vector4? cardAccent = null, Action? drawLabelSuffix = null) + { + var scale = ImGuiHelpers.GlobalScale; + if (!cardAccent.HasValue) + { + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + using var labelTint = ImRaii.PushColor(ImGuiCol.Text, labelColor ?? Vector4.Zero, labelColor.HasValue); + ImGui.TextUnformatted(label); + if (drawLabelSuffix != null) + { + ImGui.SameLine(0f, 4f * scale); + drawLabelSuffix(); + } + ImGui.TableSetColumnIndex(1); + drawControl(); + ImGui.TableSetColumnIndex(2); + using var color = ImRaii.PushColor(ImGuiCol.Text, UIColors.Get("LightlessGrey")); + ImGui.TextWrapped(description); + return; + } + + var padX = 6f * scale; + var padY = 3f * scale; + var rowGap = 4f * scale; + var accent = cardAccent.Value; + var drawList = ImGui.GetWindowDrawList(); + + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + var col0Start = ImGui.GetCursorScreenPos(); + ImGui.TableSetColumnIndex(1); + var col1Start = ImGui.GetCursorScreenPos(); + ImGui.TableSetColumnIndex(2); + var col2Start = ImGui.GetCursorScreenPos(); + var col2Width = ImGui.GetContentRegionAvail().X; + + float descriptionHeight; + using (ImRaii.PushColor(ImGuiCol.Text, new Vector4(0f, 0f, 0f, 0f))) + { + ImGui.SetCursorScreenPos(col2Start); + ImGui.PushTextWrapPos(ImGui.GetCursorPos().X + col2Width); + ImGui.TextUnformatted(description); + ImGui.PopTextWrapPos(); + descriptionHeight = ImGui.GetItemRectSize().Y; + } + + var lineHeight = ImGui.GetTextLineHeight(); + var labelHeight = lineHeight; + var controlHeight = ImGui.GetFrameHeight(); + var contentHeight = MathF.Max(labelHeight, MathF.Max(controlHeight, descriptionHeight)); + var lineCount = Math.Max(1, (int)MathF.Round(descriptionHeight / MathF.Max(1f, lineHeight))); + var descOffset = lineCount > 1 ? lineHeight * 0.18f : 0f; + var cardTop = col0Start.Y; + var contentTop = cardTop + padY; + var cardHeight = contentHeight + (padY * 2f); + + var labelY = contentTop + (contentHeight - labelHeight) * 0.5f; + var controlY = contentTop + (contentHeight - controlHeight) * 0.5f; + var descY = contentTop + (contentHeight - descriptionHeight) * 0.5f - descOffset; + + drawList.ChannelsSplit(2); + drawList.ChannelsSetCurrent(1); + + ImGui.TableSetColumnIndex(0); + ImGui.SetCursorScreenPos(new Vector2(col0Start.X, labelY)); + using (ImRaii.PushColor(ImGuiCol.Text, labelColor ?? Vector4.Zero, labelColor.HasValue)) + { + ImGui.TextUnformatted(label); + if (drawLabelSuffix != null) + { + ImGui.SameLine(0f, 4f * scale); + drawLabelSuffix(); + } + } + + ImGui.TableSetColumnIndex(1); + ImGui.SetCursorScreenPos(new Vector2(col1Start.X, controlY)); + drawControl(); + + ImGui.TableSetColumnIndex(2); + ImGui.SetCursorScreenPos(new Vector2(col2Start.X, descY)); + using (ImRaii.PushColor(ImGuiCol.Text, UIColors.Get("LightlessGrey"))) + { + ImGui.PushTextWrapPos(ImGui.GetCursorPos().X + col2Width); + ImGui.TextUnformatted(description); + ImGui.PopTextWrapPos(); + } + + var rectMin = new Vector2(col0Start.X - padX, cardTop); + var rectMax = new Vector2(col2Start.X + col2Width + padX, cardTop + cardHeight); + var fill = new Vector4(accent.X, accent.Y, accent.Z, 0.07f); + var border = new Vector4(accent.X, accent.Y, accent.Z, 0.35f); + var rounding = MathF.Max(5f, ImGui.GetStyle().FrameRounding) * scale; + var borderThickness = MathF.Max(1f, ImGui.GetStyle().ChildBorderSize); + var clipMin = drawList.GetClipRectMin(); + var clipMax = drawList.GetClipRectMax(); + clipMin.X = MathF.Min(clipMin.X, rectMin.X); + clipMax.X = MathF.Max(clipMax.X, rectMax.X); + + drawList.ChannelsSetCurrent(0); + drawList.PushClipRect(clipMin, clipMax, false); + drawList.AddRectFilled(rectMin, rectMax, UiSharedService.Color(fill), rounding); + drawList.AddRect(rectMin, rectMax, UiSharedService.Color(border), rounding, ImDrawFlags.None, borderThickness); + drawList.PopClipRect(); + drawList.ChannelsMerge(); + + ImGui.TableSetColumnIndex(2); + ImGui.SetCursorScreenPos(new Vector2(col2Start.X, cardTop + cardHeight)); + ImGui.Dummy(new Vector2(0f, rowGap)); + } + + private static bool DrawAccentCheckbox(string id, ref bool value, Vector4 accent) + { + var frame = new Vector4(accent.X, accent.Y, accent.Z, 0.14f); + var frameHovered = new Vector4(accent.X, accent.Y, accent.Z, 0.22f); + var frameActive = new Vector4(accent.X, accent.Y, accent.Z, 0.3f); + bool changed; + using (ImRaii.PushColor(ImGuiCol.CheckMark, accent)) + using (ImRaii.PushColor(ImGuiCol.FrameBg, frame)) + using (ImRaii.PushColor(ImGuiCol.FrameBgHovered, frameHovered)) + using (ImRaii.PushColor(ImGuiCol.FrameBgActive, frameActive)) + { + changed = ImGui.Checkbox(id, ref value); + } + return changed; + } + + private static void DrawPanelBox(string id, Vector4 background, Vector4 border, float rounding, Vector2 padding, Action content) + { + using (ImRaii.PushId(id)) + { + var startPos = ImGui.GetCursorScreenPos(); + var availableWidth = ImGui.GetContentRegionAvail().X; + var drawList = ImGui.GetWindowDrawList(); + + drawList.ChannelsSplit(2); + drawList.ChannelsSetCurrent(1); + + using (ImRaii.Group()) + { + ImGui.Dummy(new Vector2(0f, padding.Y)); + ImGui.Indent(padding.X); + content(); + ImGui.Unindent(padding.X); + ImGui.Dummy(new Vector2(0f, padding.Y)); + } + + var rectMin = startPos; + var rectMax = new Vector2(startPos.X + availableWidth, ImGui.GetItemRectMax().Y); + var borderThickness = MathF.Max(1f, ImGui.GetStyle().ChildBorderSize); + + drawList.ChannelsSetCurrent(0); + drawList.AddRectFilled(rectMin, rectMax, UiSharedService.Color(background), rounding); + drawList.AddRect(rectMin, rectMax, UiSharedService.Color(border), rounding, ImDrawFlags.None, borderThickness); + drawList.ChannelsMerge(); + } + } + + private void DrawTextureDownscaleCounters() + { + HashSet trackedPairs = new(); + + var snapshot = _pairUiService.GetSnapshot(); + + foreach (var pair in snapshot.DirectPairs) + { + trackedPairs.Add(pair); + } + + foreach (var group in snapshot.GroupPairs.Values) + { + foreach (var pair in group) + { + trackedPairs.Add(pair); + } + } + + long totalOriginalBytes = 0; + long totalEffectiveBytes = 0; + var hasData = false; + + foreach (var pair in trackedPairs) + { + if (!pair.IsVisible) + continue; + + var original = pair.LastAppliedApproximateVRAMBytes; + var effective = pair.LastAppliedApproximateEffectiveVRAMBytes; + + if (original >= 0) + { + hasData = true; + totalOriginalBytes += original; + } + + if (effective >= 0) + { + hasData = true; + totalEffectiveBytes += effective; + } + } + + if (!hasData) + { + ImGui.TextDisabled("VRAM usage has not been calculated yet."); + return; + } + + var savedBytes = Math.Max(0L, totalOriginalBytes - totalEffectiveBytes); + var originalText = UiSharedService.ByteToString(totalOriginalBytes, addSuffix: true); + var effectiveText = UiSharedService.ByteToString(totalEffectiveBytes, addSuffix: true); + var savedText = UiSharedService.ByteToString(savedBytes, addSuffix: true); + + ImGui.TextUnformatted($"Total VRAM usage (original): {originalText}"); + ImGui.TextUnformatted($"Total VRAM usage (effective): {effectiveText}"); + + if (savedBytes > 0) + { + UiSharedService.ColorText($"VRAM saved by downscaling: {savedText}", UIColors.Get("LightlessGreen")); + } + else + { + ImGui.TextUnformatted($"VRAM saved by downscaling: {savedText}"); + } + } + + private void DrawTriangleDecimationCounters() + { + HashSet trackedPairs = new(); + + var snapshot = _pairUiService.GetSnapshot(); + + foreach (var pair in snapshot.DirectPairs) + { + trackedPairs.Add(pair); + } + + foreach (var group in snapshot.GroupPairs.Values) + { + foreach (var pair in group) + { + trackedPairs.Add(pair); + } + } + + long totalOriginalTris = 0; + long totalEffectiveTris = 0; + var hasData = false; + + foreach (var pair in trackedPairs) + { + if (!pair.IsVisible) + continue; + + var original = pair.LastAppliedDataTris; + var effective = pair.LastAppliedApproximateEffectiveTris; + + if (original >= 0) + { + hasData = true; + totalOriginalTris += original; + } + + if (effective >= 0) + { + hasData = true; + totalEffectiveTris += effective; + } + } + + if (!hasData) + { + ImGui.TextDisabled("Triangle usage has not been calculated yet."); + return; + } + + var savedTris = Math.Max(0L, totalOriginalTris - totalEffectiveTris); + var originalText = FormatTriangleCount(totalOriginalTris); + var effectiveText = FormatTriangleCount(totalEffectiveTris); + var savedText = FormatTriangleCount(savedTris); + + ImGui.TextUnformatted($"Total triangle usage (original): {originalText}"); + ImGui.TextUnformatted($"Total triangle usage (effective): {effectiveText}"); + + if (savedTris > 0) + { + UiSharedService.ColorText($"Triangles saved by decimation: {savedText}", UIColors.Get("LightlessGreen")); + } + else + { + ImGui.TextUnformatted($"Triangles saved by decimation: {savedText}"); + } + } + + private static string FormatTriangleCount(long triangleCount) + { + if (triangleCount < 0) + { + return "n/a"; + } + + if (triangleCount >= 1_000_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000_000d:0.#}m tris"); + } + + if (triangleCount >= 1_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000d:0.#}k tris"); + } + + return $"{triangleCount} tris"; + } +} diff --git a/LightlessSync/UI/Components/OptimizationSummaryCard.cs b/LightlessSync/UI/Components/OptimizationSummaryCard.cs new file mode 100644 index 0000000..3ff4b23 --- /dev/null +++ b/LightlessSync/UI/Components/OptimizationSummaryCard.cs @@ -0,0 +1,790 @@ +using Dalamud.Bindings.ImGui; +using Dalamud.Interface; +using Dalamud.Interface.Utility; +using Dalamud.Interface.Utility.Raii; +using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Configurations; +using LightlessSync.PlayerData.Pairs; +using LightlessSync.Services.Mediator; +using LightlessSync.UI.Services; +using LightlessSync.UI.Style; +using LightlessSync.WebAPI.Files; +using System.Globalization; +using System.Numerics; +using System.Runtime.InteropServices; + +namespace LightlessSync.UI.Components; + +public sealed class OptimizationSummaryCard +{ + private readonly UiSharedService _uiSharedService; + private readonly PairUiService _pairUiService; + private readonly PlayerPerformanceConfigService _playerPerformanceConfig; + private readonly FileUploadManager _fileTransferManager; + private readonly LightlessMediator _lightlessMediator; + private readonly OptimizationSettingsPanel _optimizationSettingsPanel; + private readonly SeluneBrush _optimizationBrush = new(); + private const string OptimizationPopupId = "Optimization Settings##LightlessOptimization"; + private bool _optimizationPopupOpen; + private bool _optimizationPopupRequest; + private OptimizationPanelSection _optimizationPopupSection = OptimizationPanelSection.Texture; + + public OptimizationSummaryCard( + UiSharedService uiSharedService, + PairUiService pairUiService, + PlayerPerformanceConfigService playerPerformanceConfig, + FileUploadManager fileTransferManager, + LightlessMediator lightlessMediator) + { + _uiSharedService = uiSharedService; + _pairUiService = pairUiService; + _playerPerformanceConfig = playerPerformanceConfig; + _fileTransferManager = fileTransferManager; + _lightlessMediator = lightlessMediator; + _optimizationSettingsPanel = new OptimizationSettingsPanel(uiSharedService, playerPerformanceConfig, pairUiService); + } + + public bool Draw(int activeDownloads) + { + var totals = GetPerformanceTotals(); + var scale = ImGuiHelpers.GlobalScale; + var accent = UIColors.Get("LightlessPurple"); + var accentBg = new Vector4(accent.X, accent.Y, accent.Z, 0.04f); + var accentBorder = new Vector4(accent.X, accent.Y, accent.Z, 0.16f); + var summaryPadding = new Vector2(12f * scale, 6f * scale); + var summaryItemSpacing = new Vector2(12f * scale, 4f * scale); + var cellPadding = new Vector2(6f * scale, 2f * scale); + var lineHeight = ImGui.GetFrameHeight(); + var lineSpacing = summaryItemSpacing.Y; + var statsContentHeight = (lineHeight * 2f) + lineSpacing; + var summaryHeight = MathF.Max(56f * scale, statsContentHeight + (summaryPadding.Y * 2f) + (cellPadding.Y * 2f)); + var activeUploads = _fileTransferManager.GetCurrentUploadsSnapshot().Count(upload => !upload.IsTransferred); + + var textureButtonSize = _uiSharedService.GetIconButtonSize(FontAwesomeIcon.Images); + var modelButtonSize = _uiSharedService.GetIconButtonSize(FontAwesomeIcon.ProjectDiagram); + var buttonWidth = MathF.Max(textureButtonSize.X, modelButtonSize.X); + var performanceConfig = _playerPerformanceConfig.Current; + var textureStatus = GetTextureOptimizationStatus(performanceConfig); + var modelStatus = GetModelOptimizationStatus(performanceConfig); + var textureStatusVisual = GetOptimizationStatusVisual(textureStatus); + var modelStatusVisual = GetOptimizationStatusVisual(modelStatus); + var textureStatusLines = BuildTextureOptimizationStatusLines(performanceConfig); + var modelStatusLines = BuildModelOptimizationStatusLines(performanceConfig); + var statusIconSpacing = 6f * scale; + var statusIconWidth = MathF.Max(GetIconWidth(textureStatusVisual.Icon), GetIconWidth(modelStatusVisual.Icon)); + var buttonRowWidth = buttonWidth + statusIconWidth + statusIconSpacing; + var vramValue = totals.HasVramData + ? UiSharedService.ByteToString(totals.DisplayVramBytes, addSuffix: true) + : "n/a"; + var vramTooltip = BuildVramTooltipData(totals, UIColors.Get("LightlessBlue")); + var triangleValue = totals.HasTriangleData + ? FormatTriangleCount(totals.DisplayTriangleCount) + : "n/a"; + var triangleTooltip = BuildTriangleTooltipData(totals, UIColors.Get("LightlessPurple")); + + var windowPos = ImGui.GetWindowPos(); + var windowSize = ImGui.GetWindowSize(); + var footerTop = ImGui.GetCursorScreenPos().Y; + var gradientTop = MathF.Max(windowPos.Y, footerTop - (12f * scale)); + var gradientBottom = windowPos.Y + windowSize.Y; + var footerSettings = new SeluneGradientSettings + { + GradientColor = UIColors.Get("LightlessPurple"), + GradientPeakOpacity = 0.08f, + GradientPeakPosition = 0.18f, + BackgroundMode = SeluneGradientMode.Vertical, + }; + using var footerSelune = Selune.Begin(_optimizationBrush, ImGui.GetWindowDrawList(), windowPos, windowSize, footerSettings); + footerSelune.DrawGradient(gradientTop, gradientBottom, ImGui.GetIO().DeltaTime); + + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 6f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, summaryPadding)) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, summaryItemSpacing)) + using (ImRaii.PushColor(ImGuiCol.ChildBg, UiSharedService.Color(accentBg))) + using (ImRaii.PushColor(ImGuiCol.Border, UiSharedService.Color(accentBorder))) + using (var child = ImRaii.Child("optimizationSummary", new Vector2(-1f, summaryHeight), true, ImGuiWindowFlags.NoScrollbar | ImGuiWindowFlags.NoScrollWithMouse)) + { + if (child) + { + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, cellPadding)) + { + if (ImGui.BeginTable("optimizationSummaryTable", 2, ImGuiTableFlags.SizingStretchProp | ImGuiTableFlags.NoBordersInBody)) + { + ImGui.TableSetupColumn("Stats", ImGuiTableColumnFlags.WidthStretch, 1f); + ImGui.TableSetupColumn("Button", ImGuiTableColumnFlags.WidthFixed, buttonRowWidth + 12f * scale); + + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + var availableHeight = summaryHeight - (summaryPadding.Y * 2f) - (cellPadding.Y * 2f); + var verticalPad = MathF.Max(0f, (availableHeight - statsContentHeight) * 0.5f); + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(summaryItemSpacing.X, 0f))) + { + if (verticalPad > 0f) + { + ImGui.Dummy(new Vector2(0f, verticalPad)); + } + DrawOptimizationStatLine(FontAwesomeIcon.Memory, UIColors.Get("LightlessBlue"), "VRAM usage", vramValue, vramTooltip, scale); + if (lineSpacing > 0f) + { + ImGui.Dummy(new Vector2(0f, lineSpacing)); + } + DrawOptimizationStatLine(FontAwesomeIcon.ProjectDiagram, UIColors.Get("LightlessPurple"), "Triangles", triangleValue, triangleTooltip, scale); + if (verticalPad > 0f) + { + ImGui.Dummy(new Vector2(0f, verticalPad)); + } + } + + ImGui.TableNextColumn(); + var separatorX = ImGui.GetCursorScreenPos().X - cellPadding.X; + var separatorTop = ImGui.GetWindowPos().Y + summaryPadding.Y; + var separatorBottom = ImGui.GetWindowPos().Y + summaryHeight - summaryPadding.Y; + ImGui.GetWindowDrawList().AddLine( + new Vector2(separatorX, separatorTop), + new Vector2(separatorX, separatorBottom), + ImGui.ColorConvertFloat4ToU32(accentBorder), + MathF.Max(1f, 1f * scale)); + float cellWidth = ImGui.GetContentRegionAvail().X; + float offsetX = MathF.Max(0f, cellWidth - buttonRowWidth); + float alignedX = ImGui.GetCursorPosX() + offsetX; + + using (ImRaii.PushStyle(ImGuiStyleVar.FrameRounding, 6f * scale)) + using (ImRaii.PushColor(ImGuiCol.Button, ImGui.ColorConvertFloat4ToU32(new Vector4(0f, 0f, 0f, 0f)))) + { + var buttonBorderThickness = 10f * scale; + var buttonRounding = ImGui.GetStyle().FrameRounding; + + DrawOptimizationStatusButtonRow( + "Texture Optimization", + textureStatusVisual.Icon, + textureStatusVisual.Color, + textureStatusVisual.Label, + textureStatusLines, + FontAwesomeIcon.Images, + textureButtonSize, + "Texture Optimization", + activeUploads, + activeDownloads, + () => OpenOptimizationPopup(OptimizationPanelSection.Texture), + alignedX, + statusIconSpacing, + buttonBorderThickness, + buttonRounding); + + DrawOptimizationStatusButtonRow( + "Model Optimization", + modelStatusVisual.Icon, + modelStatusVisual.Color, + modelStatusVisual.Label, + modelStatusLines, + FontAwesomeIcon.ProjectDiagram, + modelButtonSize, + "Model Optimization", + activeUploads, + activeDownloads, + () => OpenOptimizationPopup(OptimizationPanelSection.Model), + alignedX, + statusIconSpacing, + buttonBorderThickness, + buttonRounding); + } + + ImGui.EndTable(); + } + } + } + } + + footerSelune.DrawHighlightOnly(gradientTop, gradientBottom, ImGui.GetIO().DeltaTime); + DrawOptimizationPopup(); + return true; + } + + private PerformanceTotals GetPerformanceTotals() + { + HashSet trackedPairs = new(); + + var snapshot = _pairUiService.GetSnapshot(); + + foreach (var pair in snapshot.DirectPairs) + { + trackedPairs.Add(pair); + } + + foreach (var group in snapshot.GroupPairs.Values) + { + foreach (var pair in group) + { + trackedPairs.Add(pair); + } + } + + long displayVramBytes = 0; + long originalVramBytes = 0; + long effectiveVramBytes = 0; + bool hasVramData = false; + bool hasOriginalVram = false; + bool hasEffectiveVram = false; + + long displayTriangles = 0; + long originalTriangles = 0; + long effectiveTriangles = 0; + bool hasTriangleData = false; + bool hasOriginalTriangles = false; + bool hasEffectiveTriangles = false; + + foreach (var pair in trackedPairs) + { + if (!pair.IsVisible) + { + continue; + } + + var originalVram = pair.LastAppliedApproximateVRAMBytes; + var effectiveVram = pair.LastAppliedApproximateEffectiveVRAMBytes; + + if (originalVram >= 0) + { + originalVramBytes += originalVram; + hasOriginalVram = true; + } + + if (effectiveVram >= 0) + { + effectiveVramBytes += effectiveVram; + hasEffectiveVram = true; + } + + if (effectiveVram >= 0) + { + displayVramBytes += effectiveVram; + hasVramData = true; + } + else if (originalVram >= 0) + { + displayVramBytes += originalVram; + hasVramData = true; + } + + var originalTris = pair.LastAppliedDataTris; + var effectiveTris = pair.LastAppliedApproximateEffectiveTris; + + if (originalTris >= 0) + { + originalTriangles += originalTris; + hasOriginalTriangles = true; + } + + if (effectiveTris >= 0) + { + effectiveTriangles += effectiveTris; + hasEffectiveTriangles = true; + } + + if (effectiveTris >= 0) + { + displayTriangles += effectiveTris; + hasTriangleData = true; + } + else if (originalTris >= 0) + { + displayTriangles += originalTris; + hasTriangleData = true; + } + } + + return new PerformanceTotals( + displayVramBytes, + originalVramBytes, + effectiveVramBytes, + displayTriangles, + originalTriangles, + effectiveTriangles, + hasVramData, + hasOriginalVram, + hasEffectiveVram, + hasTriangleData, + hasOriginalTriangles, + hasEffectiveTriangles); + } + + private void DrawOptimizationStatLine(FontAwesomeIcon icon, Vector4 iconColor, string label, string value, OptimizationStatTooltip? tooltip, float scale) + { + ImGui.AlignTextToFramePadding(); + _uiSharedService.IconText(icon, iconColor); + var hovered = ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem); + ImGui.SameLine(0f, 6f * scale); + ImGui.TextUnformatted($"{label}: {value}"); + hovered |= ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem); + if (hovered && tooltip.HasValue) + { + DrawOptimizationStatTooltip(tooltip.Value); + } + } + + private static OptimizationStatTooltip? BuildVramTooltipData(PerformanceTotals totals, Vector4 titleColor) + { + if (!totals.HasOriginalVram && !totals.HasEffectiveVram) + { + return null; + } + + var lines = new List(); + + if (totals.HasOriginalVram) + { + lines.Add(new OptimizationTooltipLine( + "Original", + UiSharedService.ByteToString(totals.OriginalVramBytes, addSuffix: true), + UIColors.Get("LightlessYellow"))); + } + + if (totals.HasEffectiveVram) + { + lines.Add(new OptimizationTooltipLine( + "Effective", + UiSharedService.ByteToString(totals.EffectiveVramBytes, addSuffix: true), + UIColors.Get("LightlessGreen"))); + } + + if (totals.HasOriginalVram && totals.HasEffectiveVram) + { + var savedBytes = Math.Max(0L, totals.OriginalVramBytes - totals.EffectiveVramBytes); + if (savedBytes > 0) + { + lines.Add(new OptimizationTooltipLine( + "Saved", + UiSharedService.ByteToString(savedBytes, addSuffix: true), + titleColor)); + } + } + + return new OptimizationStatTooltip( + "Total VRAM usage", + "Approximate texture memory across visible users.", + titleColor, + lines); + } + + private static OptimizationStatTooltip? BuildTriangleTooltipData(PerformanceTotals totals, Vector4 titleColor) + { + if (!totals.HasOriginalTriangles && !totals.HasEffectiveTriangles) + { + return null; + } + + var lines = new List(); + + if (totals.HasOriginalTriangles) + { + lines.Add(new OptimizationTooltipLine( + "Original", + $"{FormatTriangleCount(totals.OriginalTriangleCount)} tris", + UIColors.Get("LightlessYellow"))); + } + + if (totals.HasEffectiveTriangles) + { + lines.Add(new OptimizationTooltipLine( + "Effective", + $"{FormatTriangleCount(totals.EffectiveTriangleCount)} tris", + UIColors.Get("LightlessGreen"))); + } + + if (totals.HasOriginalTriangles && totals.HasEffectiveTriangles) + { + var savedTris = Math.Max(0L, totals.OriginalTriangleCount - totals.EffectiveTriangleCount); + if (savedTris > 0) + { + lines.Add(new OptimizationTooltipLine( + "Saved", + $"{FormatTriangleCount(savedTris)} tris", + titleColor)); + } + } + + return new OptimizationStatTooltip( + "Total triangles", + "Approximate triangle count across visible users.", + titleColor, + lines); + } + + private static string FormatTriangleCount(long triangleCount) + { + if (triangleCount < 0) + { + return "n/a"; + } + + if (triangleCount >= 1_000_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000_000d:0.#}m"); + } + + if (triangleCount >= 1_000) + { + return FormattableString.Invariant($"{triangleCount / 1_000d:0.#}k"); + } + + return triangleCount.ToString(CultureInfo.InvariantCulture); + } + + private enum OptimizationStatus + { + Off, + Partial, + On, + } + + private static OptimizationStatus GetTextureOptimizationStatus(PlayerPerformanceConfig config) + { + bool trimEnabled = config.EnableNonIndexTextureMipTrim; + bool downscaleEnabled = config.EnableIndexTextureDownscale; + + if (!trimEnabled && !downscaleEnabled) + { + return OptimizationStatus.Off; + } + + return trimEnabled && downscaleEnabled + ? OptimizationStatus.On + : OptimizationStatus.Partial; + } + + private static OptimizationStatus GetModelOptimizationStatus(PlayerPerformanceConfig config) + { + if (!config.EnableModelDecimation) + { + return OptimizationStatus.Off; + } + + bool hasTargets = config.ModelDecimationAllowBody + || config.ModelDecimationAllowFaceHead + || config.ModelDecimationAllowTail + || config.ModelDecimationAllowClothing + || config.ModelDecimationAllowAccessories; + + return hasTargets + ? OptimizationStatus.On + : OptimizationStatus.Partial; + } + + private static (FontAwesomeIcon Icon, Vector4 Color, string Label) GetOptimizationStatusVisual(OptimizationStatus status) + { + return status switch + { + OptimizationStatus.On => (FontAwesomeIcon.Check, UIColors.Get("LightlessGreen"), "Enabled"), + OptimizationStatus.Partial => (FontAwesomeIcon.ExclamationTriangle, UIColors.Get("LightlessYellow"), "Partial"), + _ => (FontAwesomeIcon.Times, UIColors.Get("DimRed"), "Disabled"), + }; + } + + private static OptimizationTooltipLine[] BuildTextureOptimizationStatusLines(PlayerPerformanceConfig config) + { + return + [ + new OptimizationTooltipLine("Trim mip levels", FormatOnOff(config.EnableNonIndexTextureMipTrim), GetOnOffColor(config.EnableNonIndexTextureMipTrim)), + new OptimizationTooltipLine("Downscale index textures", FormatOnOff(config.EnableIndexTextureDownscale), GetOnOffColor(config.EnableIndexTextureDownscale)), + new OptimizationTooltipLine("Max dimension", config.TextureDownscaleMaxDimension.ToString(CultureInfo.InvariantCulture)), + new OptimizationTooltipLine("Only downscale uncompressed", FormatOnOff(config.OnlyDownscaleUncompressedTextures), GetOnOffColor(config.OnlyDownscaleUncompressedTextures)), + new OptimizationTooltipLine("Compress uncompressed textures", FormatOnOff(config.EnableUncompressedTextureCompression), GetOnOffColor(config.EnableUncompressedTextureCompression)), + new OptimizationTooltipLine("Skip auto-compress mipmaps", FormatOnOff(config.SkipUncompressedTextureCompressionMipMaps), GetOnOffColor(config.SkipUncompressedTextureCompressionMipMaps)), + new OptimizationTooltipLine("Keep original textures", FormatOnOff(config.KeepOriginalTextureFiles), GetOnOffColor(config.KeepOriginalTextureFiles)), + new OptimizationTooltipLine("Skip preferred pairs", FormatOnOff(config.SkipTextureDownscaleForPreferredPairs), GetOnOffColor(config.SkipTextureDownscaleForPreferredPairs)), + ]; + } + + private static OptimizationTooltipLine[] BuildModelOptimizationStatusLines(PlayerPerformanceConfig config) + { + var targets = new List(); + if (config.ModelDecimationAllowBody) + { + targets.Add("Body"); + } + + if (config.ModelDecimationAllowFaceHead) + { + targets.Add("Face/head"); + } + + if (config.ModelDecimationAllowTail) + { + targets.Add("Tails/Ears"); + } + + if (config.ModelDecimationAllowClothing) + { + targets.Add("Clothing"); + } + + if (config.ModelDecimationAllowAccessories) + { + targets.Add("Accessories"); + } + + var targetLabel = targets.Count > 0 ? string.Join(", ", targets) : "None"; + var targetColor = targets.Count > 0 ? UIColors.Get("LightlessGreen") : UIColors.Get("DimRed"); + var threshold = config.ModelDecimationTriangleThreshold.ToString("N0", CultureInfo.InvariantCulture); + var targetRatio = FormatPercent(config.ModelDecimationTargetRatio); + + return + [ + new OptimizationTooltipLine("Decimation enabled", FormatOnOff(config.EnableModelDecimation), GetOnOffColor(config.EnableModelDecimation)), + new OptimizationTooltipLine("Triangle threshold", threshold), + new OptimizationTooltipLine("Target ratio", targetRatio), + new OptimizationTooltipLine("Normalize tangents", FormatOnOff(config.ModelDecimationNormalizeTangents), GetOnOffColor(config.ModelDecimationNormalizeTangents)), + new OptimizationTooltipLine("Avoid body intersection", FormatOnOff(config.ModelDecimationAvoidBodyIntersection), GetOnOffColor(config.ModelDecimationAvoidBodyIntersection)), + new OptimizationTooltipLine("Keep original models", FormatOnOff(config.KeepOriginalModelFiles), GetOnOffColor(config.KeepOriginalModelFiles)), + new OptimizationTooltipLine("Skip preferred pairs", FormatOnOff(config.SkipModelDecimationForPreferredPairs), GetOnOffColor(config.SkipModelDecimationForPreferredPairs)), + new OptimizationTooltipLine("Targets", targetLabel, targetColor), + ]; + } + + private static string FormatOnOff(bool value) + => value ? "On" : "Off"; + + private static string FormatPercent(double value) + => FormattableString.Invariant($"{value * 100d:0.#}%"); + + private static Vector4 GetOnOffColor(bool value) + => value ? UIColors.Get("LightlessGreen") : UIColors.Get("DimRed"); + + private static float GetIconWidth(FontAwesomeIcon icon) + { + using var iconFont = ImRaii.PushFont(UiBuilder.IconFont); + return ImGui.CalcTextSize(icon.ToIconString()).X; + } + + private readonly record struct OptimizationStatTooltip(string Title, string Description, Vector4 TitleColor, IReadOnlyList Lines); + + private static void DrawOptimizationStatTooltip(OptimizationStatTooltip tooltip) + { + ImGui.BeginTooltip(); + ImGui.PushTextWrapPos(ImGui.GetFontSize() * 32f); + + ImGui.TextColored(tooltip.TitleColor, tooltip.Title); + ImGui.TextColored(UIColors.Get("LightlessGrey"), tooltip.Description); + + foreach (var line in tooltip.Lines) + { + ImGui.TextUnformatted($"{line.Label}:"); + ImGui.SameLine(); + if (line.ValueColor.HasValue) + { + ImGui.TextColored(line.ValueColor.Value, line.Value); + } + else + { + ImGui.TextUnformatted(line.Value); + } + } + + ImGui.PopTextWrapPos(); + ImGui.EndTooltip(); + } + + private static void DrawOptimizationButtonTooltip(string title, int activeUploads, int activeDownloads) + { + ImGui.BeginTooltip(); + ImGui.PushTextWrapPos(ImGui.GetFontSize() * 32f); + + ImGui.TextColored(UIColors.Get("LightlessPurple"), title); + ImGui.TextColored(UIColors.Get("LightlessGrey"), "Open optimization settings."); + + if (activeUploads > 0 || activeDownloads > 0) + { + ImGui.Separator(); + ImGui.TextUnformatted($"Active uploads: {activeUploads}"); + ImGui.TextUnformatted($"Active downloads: {activeDownloads}"); + } + + ImGui.PopTextWrapPos(); + ImGui.EndTooltip(); + } + + private readonly record struct OptimizationTooltipLine(string Label, string Value, Vector4? ValueColor = null); + + private static void DrawOptimizationStatusTooltip(string title, string statusLabel, Vector4 statusColor, IReadOnlyList lines) + { + ImGui.BeginTooltip(); + ImGui.PushTextWrapPos(ImGui.GetFontSize() * 32f); + + ImGui.TextColored(UIColors.Get("LightlessPurple"), title); + ImGui.TextUnformatted("Status:"); + ImGui.SameLine(); + ImGui.TextColored(statusColor, statusLabel); + + foreach (var line in lines) + { + ImGui.TextUnformatted($"{line.Label}:"); + ImGui.SameLine(); + if (line.ValueColor.HasValue) + { + ImGui.TextColored(line.ValueColor.Value, line.Value); + } + else + { + ImGui.TextUnformatted(line.Value); + } + } + + ImGui.PopTextWrapPos(); + ImGui.EndTooltip(); + } + + private void DrawOptimizationStatusButtonRow( + string statusTitle, + FontAwesomeIcon statusIcon, + Vector4 statusColor, + string statusLabel, + IReadOnlyList statusLines, + FontAwesomeIcon buttonIcon, + Vector2 buttonSize, + string tooltipTitle, + int activeUploads, + int activeDownloads, + Action openPopup, + float alignedX, + float iconSpacing, + float buttonBorderThickness, + float buttonRounding) + { + ImGui.SetCursorPosX(alignedX); + ImGui.AlignTextToFramePadding(); + _uiSharedService.IconText(statusIcon, statusColor); + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem)) + { + DrawOptimizationStatusTooltip(statusTitle, statusLabel, statusColor, statusLines); + } + + ImGui.SameLine(0f, iconSpacing); + using (ImRaii.PushFont(UiBuilder.IconFont)) + { + if (ImGui.Button(buttonIcon.ToIconString(), buttonSize)) + { + openPopup(); + } + } + + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem) || ImGui.IsItemActive()) + { + Selune.RegisterHighlight(ImGui.GetItemRectMin(), ImGui.GetItemRectMax(), SeluneHighlightMode.Both, true, buttonBorderThickness, exactSize: true, clipToElement: true, roundingOverride: buttonRounding); + } + + if (ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem)) + { + DrawOptimizationButtonTooltip(tooltipTitle, activeUploads, activeDownloads); + } + } + + private void OpenOptimizationPopup(OptimizationPanelSection section) + { + _optimizationPopupSection = section; + _optimizationPopupOpen = true; + _optimizationPopupRequest = true; + } + + private void DrawOptimizationPopup() + { + if (!_optimizationPopupOpen) + { + return; + } + + if (_optimizationPopupRequest) + { + ImGui.OpenPopup(OptimizationPopupId); + _optimizationPopupRequest = false; + } + + var scale = ImGuiHelpers.GlobalScale; + ImGui.SetNextWindowSize(new Vector2(680f * scale, 640f * scale), ImGuiCond.Appearing); + + if (ImGui.BeginPopupModal(OptimizationPopupId, ref _optimizationPopupOpen, UiSharedService.PopupWindowFlags)) + { + DrawOptimizationPopupHeader(); + ImGui.Separator(); + ImGui.Dummy(new Vector2(0f, 4f * scale)); + using (var child = ImRaii.Child("optimization-popup-body", new Vector2(0f, 0f), false, ImGuiWindowFlags.AlwaysVerticalScrollbar)) + { + if (child) + { + _optimizationSettingsPanel.DrawPopup(_optimizationPopupSection); + } + } + + ImGui.EndPopup(); + } + } + + private void DrawOptimizationPopupHeader() + { + var scale = ImGuiHelpers.GlobalScale; + var (title, icon, color, section) = GetPopupHeaderData(_optimizationPopupSection); + var settingsButtonSize = _uiSharedService.GetIconButtonSize(FontAwesomeIcon.Cog); + using (var table = ImRaii.Table("optimization-popup-header", 2, ImGuiTableFlags.SizingStretchProp | ImGuiTableFlags.NoBordersInBody)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("Title", ImGuiTableColumnFlags.WidthStretch); + ImGui.TableSetupColumn("Settings", ImGuiTableColumnFlags.WidthFixed, settingsButtonSize.X); + + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + using (_uiSharedService.MediumFont.Push()) + { + _uiSharedService.IconText(icon, color); + ImGui.SameLine(0f, 6f * scale); + ImGui.TextColored(color, title); + } + + ImGui.TableNextColumn(); + using (ImRaii.PushFont(UiBuilder.IconFont)) + { + if (ImGui.Button(FontAwesomeIcon.Cog.ToIconString(), settingsButtonSize)) + { + OpenOptimizationSettings(section); + } + } + + UiSharedService.AttachToolTip("Open this section in Settings."); + } + } + + private void OpenOptimizationSettings(OptimizationPanelSection section) + { + var target = section == OptimizationPanelSection.Texture + ? PerformanceSettingsSection.TextureOptimization + : PerformanceSettingsSection.ModelOptimization; + _lightlessMediator.Publish(new OpenPerformanceSettingsMessage(target)); + _optimizationPopupOpen = false; + ImGui.CloseCurrentPopup(); + } + + private static (string Title, FontAwesomeIcon Icon, Vector4 Color, OptimizationPanelSection Section) GetPopupHeaderData(OptimizationPanelSection section) + { + return section == OptimizationPanelSection.Texture + ? ("Texture Optimization", FontAwesomeIcon.Images, UIColors.Get("LightlessYellow"), OptimizationPanelSection.Texture) + : ("Model Optimization", FontAwesomeIcon.ProjectDiagram, UIColors.Get("LightlessOrange"), OptimizationPanelSection.Model); + } + + [StructLayout(LayoutKind.Auto)] + private readonly record struct PerformanceTotals( + long DisplayVramBytes, + long OriginalVramBytes, + long EffectiveVramBytes, + long DisplayTriangleCount, + long OriginalTriangleCount, + long EffectiveTriangleCount, + bool HasVramData, + bool HasOriginalVram, + bool HasEffectiveVram, + bool HasTriangleData, + bool HasOriginalTriangles, + bool HasEffectiveTriangles); +} diff --git a/LightlessSync/UI/DataAnalysisUi.cs b/LightlessSync/UI/DataAnalysisUi.cs index e0bfcb1..4ed7c30 100644 --- a/LightlessSync/UI/DataAnalysisUi.cs +++ b/LightlessSync/UI/DataAnalysisUi.cs @@ -8,13 +8,16 @@ using LightlessSync.API.Data.Enum; using LightlessSync.FileCache; using LightlessSync.Interop.Ipc; using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Configurations; using LightlessSync.Services; using LightlessSync.Services.Mediator; +using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.TextureCompression; using LightlessSync.UI.Models; using LightlessSync.Utils; using Microsoft.Extensions.Logging; using OtterTex; +using Penumbra.Api.Enums; using System.Buffers.Binary; using System.Globalization; using System.Numerics; @@ -34,9 +37,12 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private const float TextureFilterSplitterWidth = 8f; private const float TextureDetailSplitterWidth = 12f; private const float TextureDetailSplitterCollapsedWidth = 18f; + private const float ModelBatchSplitterHeight = 8f; private const float SelectedFilePanelLogicalHeight = 90f; private const float TextureHoverPreviewDelaySeconds = 1.75f; private const float TextureHoverPreviewSize = 350f; + private const float MinModelDetailPaneWidth = 520f; + private const float MaxModelDetailPaneWidth = 860f; private static readonly Vector4 SelectedTextureRowTextColor = new(0f, 0f, 0f, 1f); private readonly CharacterAnalyzer _characterAnalyzer; @@ -47,12 +53,14 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private readonly PlayerPerformanceConfigService _playerPerformanceConfig; private readonly TransientResourceManager _transientResourceManager; private readonly TransientConfigService _transientConfigService; + private readonly ModelDecimationService _modelDecimationService; private readonly TextureCompressionService _textureCompressionService; private readonly TextureMetadataHelper _textureMetadataHelper; private readonly List _textureRows = new(); private readonly Dictionary _textureSelections = new(StringComparer.OrdinalIgnoreCase); private readonly HashSet _selectedTextureKeys = new(StringComparer.OrdinalIgnoreCase); + private readonly HashSet _selectedModelKeys = new(StringComparer.OrdinalIgnoreCase); private readonly Dictionary _texturePreviews = new(StringComparer.OrdinalIgnoreCase); private readonly Dictionary _textureResolutionCache = new(StringComparer.OrdinalIgnoreCase); private readonly Dictionary _textureWorkspaceTabs = new(); @@ -61,20 +69,25 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private Dictionary>? _cachedAnalysis; private CancellationTokenSource _conversionCancellationTokenSource = new(); + private CancellationTokenSource _modelDecimationCts = new(); private CancellationTokenSource _transientRecordCts = new(); private Task? _conversionTask; + private Task? _modelDecimationTask; private TextureConversionProgress? _lastConversionProgress; private float _textureFilterPaneWidth = 320f; private float _textureDetailPaneWidth = 360f; private float _textureDetailHeight = 360f; private float _texturePreviewSize = 360f; + private float _modelDetailPaneWidth = 720f; + private float _modelBatchPanelHeight = 0f; private string _conversionCurrentFileName = string.Empty; private string _selectedFileTypeTab = string.Empty; private string _selectedHash = string.Empty; private string _textureSearch = string.Empty; + private string _modelSearch = string.Empty; private string _textureSlotFilter = "All"; private string _selectedTextureKey = string.Empty; private string _selectedStoredCharacter = string.Empty; @@ -85,6 +98,8 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private int _conversionCurrentFileProgress = 0; private int _conversionTotalJobs; + private int _modelDecimationCurrentProgress = 0; + private int _modelDecimationTotalJobs = 0; private bool _hasUpdate = false; private bool _modalOpen = false; @@ -92,6 +107,12 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private bool _textureRowsDirty = true; private bool _textureDetailCollapsed = false; private bool _conversionFailed; + private bool _modelDecimationFailed; + private bool _showModelBatchAdvancedSettings; + private bool _dismissedModelBatchWarning; + private bool _modelBatchWarningNeverShowPending; + private bool _modelBatchWarningPendingInitialized; + private string _modelDecimationCurrentHash = string.Empty; private double _textureHoverStartTime = 0; #if DEBUG private bool _debugCompressionModalOpen = false; @@ -115,8 +136,8 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase PerformanceCollectorService performanceCollectorService, UiSharedService uiSharedService, LightlessConfigService configService, PlayerPerformanceConfigService playerPerformanceConfig, TransientResourceManager transientResourceManager, - TransientConfigService transientConfigService, TextureCompressionService textureCompressionService, - TextureMetadataHelper textureMetadataHelper) + TransientConfigService transientConfigService, ModelDecimationService modelDecimationService, + TextureCompressionService textureCompressionService, TextureMetadataHelper textureMetadataHelper) : base(logger, mediator, "Lightless Character Data Analysis", performanceCollectorService) { _characterAnalyzer = characterAnalyzer; @@ -126,6 +147,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _playerPerformanceConfig = playerPerformanceConfig; _transientResourceManager = transientResourceManager; _transientConfigService = transientConfigService; + _modelDecimationService = modelDecimationService; _textureCompressionService = textureCompressionService; _textureMetadataHelper = textureMetadataHelper; Mediator.Subscribe(this, (_) => @@ -428,6 +450,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _cachedAnalysis = CloneAnalysis(_characterAnalyzer.LastAnalysis); _hasUpdate = false; InvalidateTextureRows(); + PruneModelSelections(); } private void DrawContentTabs() @@ -943,8 +966,10 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _selectedFileTypeTab = string.Empty; } + var modelGroup = groupedfiles.FirstOrDefault(g => string.Equals(g.Key, "mdl", StringComparison.OrdinalIgnoreCase)); var otherFileGroups = groupedfiles - .Where(g => !string.Equals(g.Key, "tex", StringComparison.Ordinal)) + .Where(g => !string.Equals(g.Key, "tex", StringComparison.OrdinalIgnoreCase) + && !string.Equals(g.Key, "mdl", StringComparison.OrdinalIgnoreCase)) .ToList(); if (!string.IsNullOrEmpty(_selectedFileTypeTab) && @@ -958,7 +983,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _selectedFileTypeTab = otherFileGroups[0].Key; } - DrawTextureWorkspace(kvp.Key, otherFileGroups); + DrawTextureWorkspace(kvp.Key, modelGroup, otherFileGroups); } } } @@ -970,9 +995,11 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _selectedTextureKey = string.Empty; _selectedTextureKeys.Clear(); _textureSelections.Clear(); + _selectedModelKeys.Clear(); ResetTextureFilters(); InvalidateTextureRows(); _conversionFailed = false; + _modelDecimationFailed = false; #if DEBUG ResetDebugCompressionModalState(); #endif @@ -996,6 +1023,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _textureRowsBuildCts?.Cancel(); _textureRowsBuildCts?.Dispose(); _conversionProgress.ProgressChanged -= ConversionProgress_ProgressChanged; + _modelDecimationCts.CancelDispose(); } private void ConversionProgress_ProgressChanged(object? sender, TextureConversionProgress e) @@ -1097,6 +1125,22 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } + private void PruneModelSelections() + { + if (_cachedAnalysis == null || _selectedModelKeys.Count == 0) + { + return; + } + + var validKeys = _cachedAnalysis.Values + .SelectMany(entries => entries.Values) + .Where(entry => string.Equals(entry.FileType, "mdl", StringComparison.OrdinalIgnoreCase)) + .Select(entry => entry.Hash) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + _selectedModelKeys.RemoveWhere(key => !validKeys.Contains(key)); + } + private TextureRowBuildResult BuildTextureRows( Dictionary> analysis, CancellationToken token) @@ -1390,6 +1434,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private enum TextureWorkspaceTab { Textures, + Models, OtherFiles } @@ -1445,14 +1490,19 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase public ushort MipLevels { get; } } - private void DrawTextureWorkspace(ObjectKind objectKind, IReadOnlyList> otherFileGroups) + private void DrawTextureWorkspace( + ObjectKind objectKind, + IGrouping? modelGroup, + IReadOnlyList> otherFileGroups) { if (!_textureWorkspaceTabs.ContainsKey(objectKind)) { _textureWorkspaceTabs[objectKind] = TextureWorkspaceTab.Textures; } - if (otherFileGroups.Count == 0) + var hasModels = modelGroup != null; + var hasOther = otherFileGroups.Count > 0; + if (!hasModels && !hasOther) { _textureWorkspaceTabs[objectKind] = TextureWorkspaceTab.Textures; DrawTextureTabContent(objectKind); @@ -1473,8 +1523,22 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } - using (var otherFilesTab = ImRaii.TabItem($"Other file types###other_{objectKind}")) + if (hasModels && modelGroup != null) { + using var modelsTab = ImRaii.TabItem($"Models###models_{objectKind}"); + if (modelsTab) + { + if (_textureWorkspaceTabs[objectKind] != TextureWorkspaceTab.Models) + { + _textureWorkspaceTabs[objectKind] = TextureWorkspaceTab.Models; + } + DrawModelWorkspace(modelGroup); + } + } + + if (hasOther) + { + using var otherFilesTab = ImRaii.TabItem($"Other file types###other_{objectKind}"); if (otherFilesTab) { if (_textureWorkspaceTabs[objectKind] != TextureWorkspaceTab.OtherFiles) @@ -1898,6 +1962,249 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } + private static void DrawPanelBox(string id, Vector4 background, Vector4 border, float rounding, Vector2 padding, Action content) + { + using (ImRaii.PushId(id)) + { + var startPos = ImGui.GetCursorScreenPos(); + var availableWidth = ImGui.GetContentRegionAvail().X; + var drawList = ImGui.GetWindowDrawList(); + + drawList.ChannelsSplit(2); + drawList.ChannelsSetCurrent(1); + + using (ImRaii.Group()) + { + ImGui.Dummy(new Vector2(0f, padding.Y)); + ImGui.Indent(padding.X); + content(); + ImGui.Unindent(padding.X); + ImGui.Dummy(new Vector2(0f, padding.Y)); + } + + var rectMin = startPos; + var rectMax = new Vector2(startPos.X + availableWidth, ImGui.GetItemRectMax().Y); + var borderThickness = MathF.Max(1f, ImGui.GetStyle().ChildBorderSize); + + drawList.ChannelsSetCurrent(0); + drawList.AddRectFilled(rectMin, rectMax, UiSharedService.Color(background), rounding); + drawList.AddRect(rectMin, rectMax, UiSharedService.Color(border), rounding, ImDrawFlags.None, borderThickness); + drawList.ChannelsMerge(); + } + } + + private void DrawModelWorkspace(IGrouping modelGroup) + { + var scale = ImGuiHelpers.GlobalScale; + ImGuiHelpers.ScaledDummy(0); + var accent = UIColors.Get("LightlessBlue"); + var baseItemSpacing = ImGui.GetStyle().ItemSpacing; + var warningAccent = UIColors.Get("LightlessOrange"); + var config = _playerPerformanceConfig.Current; + var showWarning = !_dismissedModelBatchWarning && config.ShowBatchModelDecimationWarning; + var sectionAvail = ImGui.GetContentRegionAvail().Y; + var childHeight = MathF.Max(0f, sectionAvail - 2f * scale); + var warningRectValid = false; + + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 6f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(4f * scale, 2f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(12f * scale, 4f * scale))) + using (var child = ImRaii.Child("modelFiles", new Vector2(-1f, childHeight), false)) + { + if (child) + { + warningRectValid = true; + using (ImRaii.Disabled(showWarning)) + { + var originalTotal = modelGroup.Sum(c => c.OriginalSize); + var compressedTotal = modelGroup.Sum(c => c.CompressedSize); + var triangleTotal = modelGroup.Sum(c => c.Triangles); + + var availableWidth = ImGui.GetContentRegionAvail().X; + var splitSpacingX = 4f * scale; + var spacingX = splitSpacingX; + var minDetailWidth = MinModelDetailPaneWidth * scale; + var maxDetailWidth = Math.Min(MaxModelDetailPaneWidth * scale, Math.Max(minDetailWidth, availableWidth - (360f * scale) - spacingX)); + var minTableWidth = 360f * scale; + + var detailWidth = Math.Clamp(_modelDetailPaneWidth, minDetailWidth, maxDetailWidth); + var tableWidth = availableWidth - detailWidth - spacingX; + if (tableWidth < minTableWidth) + { + detailWidth = Math.Max(0f, availableWidth - minTableWidth - spacingX); + tableWidth = availableWidth - detailWidth - spacingX; + if (tableWidth <= 0f) + { + tableWidth = availableWidth; + detailWidth = 0f; + } + } + if (detailWidth > 0f) + { + _modelDetailPaneWidth = detailWidth; + } + + ImGui.BeginGroup(); + using (var leftChild = ImRaii.Child("modelMainPane", new Vector2(detailWidth > 0f ? tableWidth : -1f, 0f), false)) + { + if (leftChild) + { + var badgeBg = new Vector4(accent.X, accent.Y, accent.Z, 0.18f); + var badgeBorder = new Vector4(accent.X, accent.Y, accent.Z, 0.35f); + var summaryHeight = MathF.Max(ImGui.GetTextLineHeightWithSpacing() * 2.6f, 36f * scale); + var summaryWidth = MathF.Min(520f * scale, ImGui.GetContentRegionAvail().X); + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 4f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushColor(ImGuiCol.ChildBg, UiSharedService.Color(badgeBg))) + using (ImRaii.PushColor(ImGuiCol.Border, UiSharedService.Color(badgeBorder))) + using (var summaryChild = ImRaii.Child("modelSummary", new Vector2(summaryWidth, summaryHeight), true, ImGuiWindowFlags.NoScrollbar | ImGuiWindowFlags.NoScrollWithMouse)) + { + if (summaryChild) + { + var infoColor = ImGuiColors.DalamudGrey; + var countColor = UIColors.Get("LightlessBlue"); + var actualColor = ImGuiColors.DalamudGrey; + var compressedColor = UIColors.Get("LightlessYellow2"); + var triColor = UIColors.Get("LightlessPurple"); + + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(10f * scale, 4f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(12f * scale, 2f * scale))) + using (var summaryTable = ImRaii.Table("modelSummaryTable", 4, + ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.NoBordersInBody | ImGuiTableFlags.PadOuterX | ImGuiTableFlags.NoHostExtendX, + new Vector2(-1f, -1f))) + { + if (summaryTable) + { + ImGui.TableNextRow(); + DrawSummaryCell(FontAwesomeIcon.LayerGroup, countColor, + modelGroup.Count().ToString("N0", CultureInfo.InvariantCulture), + "Model files", infoColor, scale); + DrawSummaryCell(FontAwesomeIcon.FileArchive, actualColor, + UiSharedService.ByteToString(originalTotal), + "Actual size", infoColor, scale); + DrawSummaryCell(FontAwesomeIcon.CompressArrowsAlt, compressedColor, + UiSharedService.ByteToString(compressedTotal), + "Compressed size", infoColor, scale); + DrawSummaryCell(FontAwesomeIcon.ProjectDiagram, triColor, + triangleTotal.ToString("N0", CultureInfo.InvariantCulture), + "Triangles", infoColor, scale); + } + } + } + } + + if (_showModelBatchAdvancedSettings) + { + var splitterHeight = ModelBatchSplitterHeight * scale; + var minBatchHeight = 140f * scale; + var minTableHeight = 180f * scale; + var availableHeight = ImGui.GetContentRegionAvail().Y; + var decimationRunning = _modelDecimationTask != null && !_modelDecimationTask.IsCompleted; + var actionsHeight = ImGui.GetFrameHeightWithSpacing(); + if (decimationRunning) + { + actionsHeight += ImGui.GetFrameHeightWithSpacing(); + } + + var maxBatchHeight = Math.Max(minBatchHeight, availableHeight - minTableHeight - splitterHeight - actionsHeight); + if (_modelBatchPanelHeight <= 0f || _modelBatchPanelHeight > maxBatchHeight) + { + _modelBatchPanelHeight = Math.Min( + maxBatchHeight, + Math.Max(minBatchHeight, (availableHeight - actionsHeight) * 0.35f)); + } + + using (var batchChild = ImRaii.Child("modelBatchArea", new Vector2(-1f, _modelBatchPanelHeight), false)) + { + if (batchChild) + { + DrawModelBatchPanel(); + } + } + + DrawHorizontalResizeHandle("##modelBatchSplitter", ref _modelBatchPanelHeight, minBatchHeight, maxBatchHeight, out _); + + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, baseItemSpacing)) + { + DrawModelBatchActions(); + } + } + else + { + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, baseItemSpacing)) + { + DrawModelBatchPanel(); + DrawModelBatchActions(); + } + } + + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(4f * scale, 4f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(18f * scale, 4f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.FramePadding, new Vector2(4f * scale, 3f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 3f * scale))) + { + DrawTable(modelGroup); + } + } + } + ImGui.EndGroup(); + + if (detailWidth > 0f) + { + var leftMin = ImGui.GetItemRectMin(); + var leftMax = ImGui.GetItemRectMax(); + var leftHeight = leftMax.Y - leftMin.Y; + var leftTopLocal = leftMin - ImGui.GetWindowPos(); + var maxDetailResize = Math.Min(MaxModelDetailPaneWidth * scale, Math.Max(minDetailWidth, availableWidth - minTableWidth - spacingX)); + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(splitSpacingX, ImGui.GetStyle().ItemSpacing.Y))) + { + DrawVerticalResizeHandle( + "##modelDetailSplitter", + leftTopLocal.Y, + leftHeight, + ref _modelDetailPaneWidth, + minDetailWidth, + maxDetailResize, + out _, + invert: true, + splitterWidthOverride: TextureDetailSplitterWidth); + } + + ImGui.BeginGroup(); + using (ImRaii.PushStyle(ImGuiStyleVar.ChildRounding, 5f * scale)) + using (ImRaii.PushStyle(ImGuiStyleVar.ChildBorderSize, MathF.Max(1f, ImGui.GetStyle().ChildBorderSize))) + using (ImRaii.PushStyle(ImGuiStyleVar.WindowPadding, new Vector2(4f * scale, 4f * scale))) + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(12f * scale, 4f * scale))) + using (var detailChild = ImRaii.Child("modelDetailPane", new Vector2(detailWidth, 0f), true)) + { + if (detailChild) + { + DrawModelDetailPane(modelGroup); + } + } + ImGui.EndGroup(); + } + } + } + } + + if (showWarning && warningRectValid) + { + if (!_modelBatchWarningPendingInitialized) + { + _modelBatchWarningNeverShowPending = !config.ShowBatchModelDecimationWarning; + _modelBatchWarningPendingInitialized = true; + } + + DrawModelBatchWarningOverlay(ImGui.GetItemRectMin(), ImGui.GetItemRectMax(), config, warningAccent); + } + else + { + _modelBatchWarningPendingInitialized = false; + } + } + private void DrawOtherFileWorkspace(IReadOnlyList> otherFileGroups) { if (otherFileGroups.Count == 0) @@ -2019,6 +2326,1008 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase DrawSelectedFileDetails(activeGroup); } + private void DrawModelBatchPanel() + { + var scale = ImGuiHelpers.GlobalScale; + var config = _playerPerformanceConfig.Current; + var accent = UIColors.Get("LightlessOrange"); + var panelBg = new Vector4(accent.X, accent.Y, accent.Z, 0.12f); + var panelBorder = new Vector4(accent.X, accent.Y, accent.Z, 0.35f); + + DrawPanelBox("model-batch-panel", panelBg, panelBorder, 6f * scale, new Vector2(10f * scale, 6f * scale), () => + { + using (ImRaii.PushStyle(ImGuiStyleVar.ItemSpacing, new Vector2(12f * scale, 4f * scale))) + { + _uiSharedService.IconText(FontAwesomeIcon.ProjectDiagram, accent); + ImGui.SameLine(0f, 6f * scale); + UiSharedService.ColorText("Batch decimation", accent); + } + + UiSharedService.TextWrapped("Mark models in the table to add them to the decimation queue. Settings here apply only to batch decimation."); + + if (_modelDecimationFailed) + { + UiSharedService.ColorTextWrapped("Model decimation failed. Check logs for details.", UIColors.Get("DimRed")); + } + + ImGuiHelpers.ScaledDummy(4); + + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var settingsTable = ImRaii.Table("modelBatchSettings", 2, + ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.NoBordersInBody)) + { + if (settingsTable) + { + ImGui.TableSetupColumn("Label", ImGuiTableColumnFlags.WidthFixed, 220f * scale); + ImGui.TableSetupColumn("Control", ImGuiTableColumnFlags.WidthStretch); + + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted("Target triangle ratio"); + var defaultTargetPercent = (float)(ModelDecimationDefaults.BatchTargetRatio * 100.0); + UiSharedService.AttachToolTip($"Percentage of triangles to keep after decimation. Default: {defaultTargetPercent:0}%.\nRight-click to reset."); + + ImGui.TableSetColumnIndex(1); + var targetPercent = (float)(config.BatchModelDecimationTargetRatio * 100.0); + var clampedPercent = Math.Clamp(targetPercent, 1f, 99f); + if (Math.Abs(clampedPercent - targetPercent) > float.Epsilon) + { + config.BatchModelDecimationTargetRatio = clampedPercent / 100.0; + _playerPerformanceConfig.Save(); + targetPercent = clampedPercent; + } + + ImGui.SetNextItemWidth(220f * scale); + if (ImGui.SliderFloat("##batch-decimation-target", ref targetPercent, 1f, 99f, "%.0f%%")) + { + config.BatchModelDecimationTargetRatio = Math.Clamp(targetPercent / 100f, 0.01f, 0.99f); + _playerPerformanceConfig.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.BatchModelDecimationTargetRatio = ModelDecimationDefaults.BatchTargetRatio; + _playerPerformanceConfig.Save(); + } + + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted("Normalize tangents"); + UiSharedService.AttachToolTip($"Normalize tangent vectors after decimation. Default: {(ModelDecimationDefaults.BatchNormalizeTangents ? "On" : "Off")}.\nRight-click to reset."); + + ImGui.TableSetColumnIndex(1); + var normalizeTangents = config.BatchModelDecimationNormalizeTangents; + if (UiSharedService.CheckboxWithBorder("##batch-decimation-normalize", ref normalizeTangents, accent, 1.5f)) + { + config.BatchModelDecimationNormalizeTangents = normalizeTangents; + _playerPerformanceConfig.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.BatchModelDecimationNormalizeTangents = ModelDecimationDefaults.BatchNormalizeTangents; + _playerPerformanceConfig.Save(); + } + + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted("Avoid body intersection"); + UiSharedService.AttachToolTip($"Uses body materials as a collision guard to reduce clothing clipping. Slower and may reduce decimation. Default: {(ModelDecimationDefaults.BatchAvoidBodyIntersection ? "On" : "Off")}.\nRight-click to reset."); + + ImGui.TableSetColumnIndex(1); + var avoidBodyIntersection = config.BatchModelDecimationAvoidBodyIntersection; + if (UiSharedService.CheckboxWithBorder("##batch-decimation-body-collision", ref avoidBodyIntersection, accent, 1.5f)) + { + config.BatchModelDecimationAvoidBodyIntersection = avoidBodyIntersection; + _playerPerformanceConfig.Save(); + } + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + config.BatchModelDecimationAvoidBodyIntersection = ModelDecimationDefaults.BatchAvoidBodyIntersection; + _playerPerformanceConfig.Save(); + } + } + } + + ImGuiHelpers.ScaledDummy(4); + var showAdvanced = _showModelBatchAdvancedSettings; + if (UiSharedService.CheckboxWithBorder("##batch-decimation-advanced-toggle", ref showAdvanced, accent, 1.5f)) + { + _showModelBatchAdvancedSettings = showAdvanced; + } + + ImGui.SameLine(0f, 6f * scale); + ImGui.TextUnformatted("Advanced settings"); + ImGuiHelpers.ScaledDummy(2); + UiSharedService.ColorTextWrapped("Applies to automatic and batch decimation.", UIColors.Get("LightlessGrey")); + + if (_showModelBatchAdvancedSettings) + { + ImGuiHelpers.ScaledDummy(4); + DrawModelBatchAdvancedSettings(config, accent); + } + + ImGuiHelpers.ScaledDummy(4); + }); + } + + private void DrawModelBatchWarningOverlay(Vector2 panelMin, Vector2 panelMax, PlayerPerformanceConfig config, Vector4 accent) + { + var scale = ImGuiHelpers.GlobalScale; + var overlaySize = panelMax - panelMin; + + if (overlaySize.X <= 0f || overlaySize.Y <= 0f) + { + return; + } + + var previousCursor = ImGui.GetCursorPos(); + var windowPos = ImGui.GetWindowPos(); + ImGui.SetCursorPos(panelMin - windowPos); + + var bgColor = ImGui.GetStyle().Colors[(int)ImGuiCol.WindowBg]; + bgColor.W = 0.9f; + + ImGui.PushStyleVar(ImGuiStyleVar.ChildRounding, 6f * scale); + ImGui.PushStyleVar(ImGuiStyleVar.ChildBorderSize, 0f); + ImGui.PushStyleColor(ImGuiCol.Border, Vector4.Zero); + ImGui.PushStyleColor(ImGuiCol.ChildBg, bgColor); + + var overlayFlags = ImGuiWindowFlags.NoScrollbar + | ImGuiWindowFlags.NoScrollWithMouse + | ImGuiWindowFlags.NoSavedSettings; + + if (ImGui.BeginChild("##model_decimation_warning_overlay", overlaySize, false, overlayFlags)) + { + var contentMin = ImGui.GetWindowContentRegionMin(); + var contentMax = ImGui.GetWindowContentRegionMax(); + var contentSize = contentMax - contentMin; + var text = "Model decimation is a destructive process but the algorithm was built with multiple safety features to avoid damage to the mesh and prevent clipping.\nIt is advised to back up your important mods or models/meshes before running decimation as it's not recoverable."; + var cardWidth = MathF.Min(520f * scale, contentSize.X - (32f * scale)); + cardWidth = MathF.Max(cardWidth, 320f * scale); + var cardPadding = new Vector2(12f * scale, 10f * scale); + var wrapWidth = cardWidth - (cardPadding.X * 2f); + var textSize = ImGui.CalcTextSize(text, false, wrapWidth); + var headerHeight = ImGui.GetTextLineHeightWithSpacing(); + var rowHeight = MathF.Max(ImGui.GetFrameHeight(), headerHeight); + var buttonHeight = ImGui.GetFrameHeight(); + var mediumGap = 6f * scale; + var headerGap = 4f * scale; + var cardHeight = (cardPadding.Y * 2f) + + headerHeight + + headerGap + + textSize.Y + + mediumGap + + rowHeight + + mediumGap + + buttonHeight; + + var cardMin = new Vector2( + contentMin.X + Math.Max(0f, (contentSize.X - cardWidth) * 0.5f), + contentMin.Y + Math.Max(0f, (contentSize.Y - cardHeight) * 0.5f)); + var cardMax = cardMin + new Vector2(cardWidth, cardHeight); + var cardMinScreen = ImGui.GetWindowPos() + cardMin; + var cardMaxScreen = ImGui.GetWindowPos() + cardMax; + + var drawList = ImGui.GetWindowDrawList(); + var cardBg = new Vector4(accent.X, accent.Y, accent.Z, 0.24f); + var cardBorder = new Vector4(accent.X, accent.Y, accent.Z, 0.6f); + drawList.AddRectFilled(cardMinScreen, cardMaxScreen, UiSharedService.Color(cardBg), 6f * scale); + drawList.AddRect(cardMinScreen, cardMaxScreen, UiSharedService.Color(cardBorder), 6f * scale); + + var baseX = cardMin.X + cardPadding.X; + var currentY = cardMin.Y + cardPadding.Y; + + ImGui.SetCursorPos(new Vector2(baseX, currentY)); + var warningColor = UIColors.Get("LightlessYellow"); + _uiSharedService.IconText(FontAwesomeIcon.ExclamationTriangle, warningColor); + ImGui.SameLine(0f, 6f * scale); + UiSharedService.ColorText("Model Decimation", warningColor); + + currentY += headerHeight + headerGap; + ImGui.SetCursorPos(new Vector2(baseX, currentY)); + ImGui.PushTextWrapPos(baseX + wrapWidth); + ImGui.TextUnformatted(text); + ImGui.PopTextWrapPos(); + + currentY += textSize.Y + mediumGap; + ImGui.SetCursorPos(new Vector2(baseX, currentY)); + + var neverShowAgain = _modelBatchWarningNeverShowPending; + if (UiSharedService.CheckboxWithBorder("##batch-decimation-warning-never", ref neverShowAgain, accent, 1.5f)) + { + _modelBatchWarningNeverShowPending = neverShowAgain; + } + ImGui.SameLine(0f, 6f * scale); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted("Never show again"); + + currentY += rowHeight + mediumGap; + var buttonWidth = 200f * scale; + var buttonX = cardMin.X + Math.Max(0f, (cardWidth - buttonWidth) * 0.5f); + ImGui.SetCursorPos(new Vector2(buttonX, currentY)); + if (_uiSharedService.IconTextButton(FontAwesomeIcon.Check, "I understand", buttonWidth, center: true)) + { + config.ShowBatchModelDecimationWarning = !_modelBatchWarningNeverShowPending; + _playerPerformanceConfig.Save(); + _dismissedModelBatchWarning = true; + } + } + + ImGui.EndChild(); + ImGui.PopStyleColor(2); + ImGui.PopStyleVar(2); + ImGui.SetCursorPos(previousCursor); + } + + private void DrawModelBatchAdvancedSettings(PlayerPerformanceConfig config, Vector4 accent) + { + var scale = ImGuiHelpers.GlobalScale; + var advanced = config.ModelDecimationAdvanced; + var labelWidth = 190f * scale; + var itemWidth = -1f; + + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + using (var table = ImRaii.Table("modelBatchAdvancedSettings", 4, ImGuiTableFlags.SizingStretchSame | ImGuiTableFlags.NoBordersInBody)) + { + if (!table) + { + return; + } + + ImGui.TableSetupColumn("LabelLeft", ImGuiTableColumnFlags.WidthFixed, labelWidth); + ImGui.TableSetupColumn("ControlLeft", ImGuiTableColumnFlags.WidthStretch); + ImGui.TableSetupColumn("LabelRight", ImGuiTableColumnFlags.WidthFixed, labelWidth); + ImGui.TableSetupColumn("ControlRight", ImGuiTableColumnFlags.WidthStretch); + + var triangleThreshold = config.BatchModelDecimationTriangleThreshold; + DrawBatchAdvancedCategoryRow( + "Component limits", + "Limits that decide which meshes or components are eligible for batch decimation.", + scale); + ImGui.TableNextRow(); + if (DrawBatchAdvancedIntCell( + "Triangle threshold", + "batch-adv-triangle-threshold", + triangleThreshold, + 0, + 100_000, + ModelDecimationDefaults.BatchTriangleThreshold, + "Skip meshes below this triangle count during batch decimation (0 disables).", + itemWidth, + out var triangleThresholdValue)) + { + config.BatchModelDecimationTriangleThreshold = triangleThresholdValue; + _playerPerformanceConfig.Save(); + } + + var minComponentTriangles = advanced.MinComponentTriangles; + if (DrawBatchAdvancedIntCell( + "Min component triangles", + "batch-adv-min-component", + minComponentTriangles, + 0, + 200, + ModelDecimationAdvancedSettings.DefaultMinComponentTriangles, + "Components at or below this triangle count are left untouched.", + itemWidth, + out var minComponentTrianglesValue)) + { + advanced.MinComponentTriangles = minComponentTrianglesValue; + _playerPerformanceConfig.Save(); + } + + var maxEdgeFactor = advanced.MaxCollapseEdgeLengthFactor; + DrawBatchAdvancedCategoryRow( + "Collapse guards", + "Quality and topology guards that block unsafe edge collapses.", + scale); + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Max edge length factor", + "batch-adv-max-edge", + maxEdgeFactor, + 0.1f, + 5f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultMaxCollapseEdgeLengthFactor, + "Caps collapses to (average edge length * factor).", + itemWidth, + out var maxEdgeFactorValue)) + { + advanced.MaxCollapseEdgeLengthFactor = maxEdgeFactorValue; + _playerPerformanceConfig.Save(); + } + + var normalSimilarity = advanced.NormalSimilarityThresholdDegrees; + if (DrawBatchAdvancedFloatCell( + "Normal similarity (deg)", + "batch-adv-normal-sim", + normalSimilarity, + 0f, + 180f, + 1f, + "%.0f", + ModelDecimationAdvancedSettings.DefaultNormalSimilarityThresholdDegrees, + "Block collapses that bend normals beyond this angle.", + itemWidth, + out var normalSimilarityValue)) + { + advanced.NormalSimilarityThresholdDegrees = normalSimilarityValue; + _playerPerformanceConfig.Save(); + } + + var boneWeightSimilarity = advanced.BoneWeightSimilarityThreshold; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Bone weight similarity", + "batch-adv-bone-sim", + boneWeightSimilarity, + 0f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBoneWeightSimilarityThreshold, + "Requires this bone-weight overlap to allow a collapse.", + itemWidth, + out var boneWeightSimilarityValue)) + { + advanced.BoneWeightSimilarityThreshold = boneWeightSimilarityValue; + _playerPerformanceConfig.Save(); + } + + var uvSimilarity = advanced.UvSimilarityThreshold; + if (DrawBatchAdvancedFloatCell( + "UV similarity threshold", + "batch-adv-uv-sim", + uvSimilarity, + 0f, + 0.5f, + 0.005f, + "%.3f", + ModelDecimationAdvancedSettings.DefaultUvSimilarityThreshold, + "Blocks collapses when UVs diverge beyond this threshold.", + itemWidth, + out var uvSimilarityValue)) + { + advanced.UvSimilarityThreshold = uvSimilarityValue; + _playerPerformanceConfig.Save(); + } + + var uvSeamCos = advanced.UvSeamAngleCos; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "UV seam cosine", + "batch-adv-uv-seam-cos", + uvSeamCos, + -1f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultUvSeamAngleCos, + "Cosine threshold for UV seam detection (higher is stricter).", + itemWidth, + out var uvSeamCosValue)) + { + advanced.UvSeamAngleCos = uvSeamCosValue; + _playerPerformanceConfig.Save(); + } + + var blockUvSeams = advanced.BlockUvSeamVertices; + if (DrawBatchAdvancedBoolCell( + "Block UV seam vertices", + "batch-adv-uv-block", + blockUvSeams, + ModelDecimationAdvancedSettings.DefaultBlockUvSeamVertices, + "Prevent collapses across UV seams.", + accent, + out var blockUvSeamsValue)) + { + advanced.BlockUvSeamVertices = blockUvSeamsValue; + _playerPerformanceConfig.Save(); + } + + var allowBoundary = advanced.AllowBoundaryCollapses; + ImGui.TableNextRow(); + if (DrawBatchAdvancedBoolCell( + "Allow boundary collapses", + "batch-adv-boundary", + allowBoundary, + ModelDecimationAdvancedSettings.DefaultAllowBoundaryCollapses, + "Allow collapses on mesh boundaries (can create holes).", + accent, + out var allowBoundaryValue)) + { + advanced.AllowBoundaryCollapses = allowBoundaryValue; + _playerPerformanceConfig.Save(); + } + + var bodyDistance = advanced.BodyCollisionDistanceFactor; + DrawBatchAdvancedEmptyCell(); + + var bodyNoOpDistance = advanced.BodyCollisionNoOpDistanceFactor; + DrawBatchAdvancedCategoryRow( + "Body collision", + "Controls how the body mesh is used as a collision guard to reduce clothing clipping.", + scale); + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Body collision distance", + "batch-adv-body-distance", + bodyDistance, + 0f, + 5f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionDistanceFactor, + "Primary body collision distance factor.", + itemWidth, + out var bodyDistanceValue)) + { + advanced.BodyCollisionDistanceFactor = bodyDistanceValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Body collision fallback distance", + "batch-adv-body-noop", + bodyNoOpDistance, + 0f, + 5f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionNoOpDistanceFactor, + "Fallback body collision distance for relaxed pass.", + itemWidth, + out var bodyNoOpDistanceValue)) + { + advanced.BodyCollisionNoOpDistanceFactor = bodyNoOpDistanceValue; + _playerPerformanceConfig.Save(); + } + + var bodyRelax = advanced.BodyCollisionAdaptiveRelaxFactor; + var bodyNearRatio = advanced.BodyCollisionAdaptiveNearRatio; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Body collision relax factor", + "batch-adv-body-relax", + bodyRelax, + 0f, + 5f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveRelaxFactor, + "Multiplier applied when the mesh is near the body.", + itemWidth, + out var bodyRelaxValue)) + { + advanced.BodyCollisionAdaptiveRelaxFactor = bodyRelaxValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Body collision near ratio", + "batch-adv-body-near", + bodyNearRatio, + 0f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveNearRatio, + "Fraction of vertices near the body required to relax.", + itemWidth, + out var bodyNearRatioValue)) + { + advanced.BodyCollisionAdaptiveNearRatio = bodyNearRatioValue; + _playerPerformanceConfig.Save(); + } + + var bodyUvRelax = advanced.BodyCollisionAdaptiveUvThreshold; + var bodyNoOpUvCos = advanced.BodyCollisionNoOpUvSeamAngleCos; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Body collision UV relax", + "batch-adv-body-uv", + bodyUvRelax, + 0f, + 0.5f, + 0.005f, + "%.3f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionAdaptiveUvThreshold, + "UV similarity threshold used in relaxed mode.", + itemWidth, + out var bodyUvRelaxValue)) + { + advanced.BodyCollisionAdaptiveUvThreshold = bodyUvRelaxValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Body collision UV cosine", + "batch-adv-body-uv-cos", + bodyNoOpUvCos, + -1f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionNoOpUvSeamAngleCos, + "UV seam cosine used in relaxed mode.", + itemWidth, + out var bodyNoOpUvCosValue)) + { + advanced.BodyCollisionNoOpUvSeamAngleCos = bodyNoOpUvCosValue; + _playerPerformanceConfig.Save(); + } + + var bodyProtection = advanced.BodyCollisionProtectionFactor; + var bodyProxyMin = advanced.BodyProxyTargetRatioMin; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Body collision protection", + "batch-adv-body-protect", + bodyProtection, + 0f, + 5f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionProtectionFactor, + "Expansion factor for protected vertices near the body.", + itemWidth, + out var bodyProtectionValue)) + { + advanced.BodyCollisionProtectionFactor = bodyProtectionValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Body proxy min ratio", + "batch-adv-body-proxy", + bodyProxyMin, + 0f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyProxyTargetRatioMin, + "Minimum target ratio when decimating the body proxy.", + itemWidth, + out var bodyProxyMinValue)) + { + advanced.BodyProxyTargetRatioMin = bodyProxyMinValue; + _playerPerformanceConfig.Save(); + } + + var bodyInflate = advanced.BodyCollisionProxyInflate; + var bodyPenetration = advanced.BodyCollisionPenetrationFactor; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Body collision inflate", + "batch-adv-body-inflate", + bodyInflate, + 0f, + 0.01f, + 0.0001f, + "%.4f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionProxyInflate, + "Inflate body collision distances by this offset.", + itemWidth, + out var bodyInflateValue)) + { + advanced.BodyCollisionProxyInflate = bodyInflateValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Body penetration factor", + "batch-adv-body-penetration", + bodyPenetration, + 0f, + 1f, + 0.01f, + "%.2f", + ModelDecimationAdvancedSettings.DefaultBodyCollisionPenetrationFactor, + "Reject collapses that penetrate the body below this factor.", + itemWidth, + out var bodyPenetrationValue)) + { + advanced.BodyCollisionPenetrationFactor = bodyPenetrationValue; + _playerPerformanceConfig.Save(); + } + + var minBodyDistance = advanced.MinBodyCollisionDistance; + var minBodyCell = advanced.MinBodyCollisionCellSize; + ImGui.TableNextRow(); + if (DrawBatchAdvancedFloatCell( + "Min body collision distance", + "batch-adv-body-min-dist", + minBodyDistance, + 1e-6f, + 0.01f, + 0.00001f, + "%.6f", + ModelDecimationAdvancedSettings.DefaultMinBodyCollisionDistance, + "Lower bound for body collision distance.", + itemWidth, + out var minBodyDistanceValue)) + { + advanced.MinBodyCollisionDistance = minBodyDistanceValue; + _playerPerformanceConfig.Save(); + } + + if (DrawBatchAdvancedFloatCell( + "Min body collision cell size", + "batch-adv-body-min-cell", + minBodyCell, + 1e-6f, + 0.01f, + 0.00001f, + "%.6f", + ModelDecimationAdvancedSettings.DefaultMinBodyCollisionCellSize, + "Lower bound for the body collision grid size.", + itemWidth, + out var minBodyCellValue)) + { + advanced.MinBodyCollisionCellSize = minBodyCellValue; + _playerPerformanceConfig.Save(); + } + } + } + + private bool DrawBatchAdvancedIntCell( + string label, + string id, + int currentValue, + int minValue, + int maxValue, + int defaultValue, + string tooltip, + float itemWidth, + out int newValue) + { + ImGui.TableNextColumn(); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted(label); + UiSharedService.AttachToolTip($"{tooltip}\nDefault: {defaultValue:N0}. Right-click to reset."); + + ImGui.TableNextColumn(); + ImGui.SetNextItemWidth(itemWidth); + newValue = currentValue; + var changed = ImGui.DragInt($"##{id}", ref newValue, 1f, minValue, maxValue); + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + newValue = defaultValue; + changed = true; + } + + return changed; + } + + private bool DrawBatchAdvancedFloatCell( + string label, + string id, + float currentValue, + float minValue, + float maxValue, + float speed, + string format, + float defaultValue, + string tooltip, + float itemWidth, + out float newValue) + { + ImGui.TableNextColumn(); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted(label); + var defaultText = defaultValue.ToString("0.#######", CultureInfo.InvariantCulture); + UiSharedService.AttachToolTip($"{tooltip}\nDefault: {defaultText}. Right-click to reset."); + + ImGui.TableNextColumn(); + ImGui.SetNextItemWidth(itemWidth); + newValue = currentValue; + var changed = ImGui.DragFloat($"##{id}", ref newValue, speed, minValue, maxValue, format); + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + newValue = defaultValue; + changed = true; + } + + return changed; + } + + private bool DrawBatchAdvancedBoolCell( + string label, + string id, + bool currentValue, + bool defaultValue, + string tooltip, + Vector4 accent, + out bool newValue) + { + ImGui.TableNextColumn(); + ImGui.AlignTextToFramePadding(); + ImGui.TextUnformatted(label); + UiSharedService.AttachToolTip($"{tooltip}\nDefault: {(defaultValue ? "On" : "Off")}. Right-click to reset."); + + ImGui.TableNextColumn(); + newValue = currentValue; + var changed = UiSharedService.CheckboxWithBorder($"##{id}", ref newValue, accent, 1.5f); + if (ImGui.IsItemClicked(ImGuiMouseButton.Right)) + { + newValue = defaultValue; + changed = true; + } + + return changed; + } + + private static void DrawBatchAdvancedEmptyCell() + { + ImGui.TableNextColumn(); + ImGui.TableNextColumn(); + } + + private void DrawBatchAdvancedCategoryRow(string label, string tooltip, float scale) + { + ImGui.TableNextRow(); + ImGui.TableSetColumnIndex(0); + ImGui.AlignTextToFramePadding(); + ImGui.TextColored(UIColors.Get("LightlessBlue"), label); + _uiSharedService.DrawHelpText(tooltip); + ImGui.TableSetColumnIndex(1); + ImGui.Dummy(Vector2.Zero); + ImGui.TableSetColumnIndex(2); + ImGui.Dummy(Vector2.Zero); + ImGui.TableSetColumnIndex(3); + ImGui.Dummy(Vector2.Zero); + } + + private void DrawModelBatchActions() + { + var scale = ImGuiHelpers.GlobalScale; + PruneModelSelections(); + var selectionCount = _selectedModelKeys.Count; + var decimationRunning = _modelDecimationTask != null && !_modelDecimationTask.IsCompleted; + + using (ImRaii.Disabled(decimationRunning || selectionCount == 0)) + { + var label = selectionCount > 0 ? $"Decimate {selectionCount} selected" : "Decimate selected"; + if (_uiSharedService.IconTextButton(FontAwesomeIcon.ProjectDiagram, label, 220f * scale)) + { + StartModelDecimationBatch(); + } + } + + ImGui.SameLine(); + using (ImRaii.Disabled(decimationRunning || _selectedModelKeys.Count == 0)) + { + if (_uiSharedService.IconTextButton(FontAwesomeIcon.Broom, "Clear marks", 160f * scale)) + { + _selectedModelKeys.Clear(); + } + } + + if (decimationRunning) + { + ImGui.SameLine(); + if (_uiSharedService.IconTextButton(FontAwesomeIcon.StopCircle, "Cancel", 120f * scale)) + { + _modelDecimationCts.Cancel(); + } + } + + ImGui.SameLine(); + var searchWidth = 220f * scale; + var searchStartX = ImGui.GetCursorPosX(); + var searchAvail = ImGui.GetContentRegionAvail().X; + var searchX = searchStartX + Math.Max(0f, searchAvail - searchWidth); + ImGui.SetCursorPosX(searchX); + ImGui.SetNextItemWidth(searchWidth); + var search = _modelSearch; + if (ImGui.InputTextWithHint("##model-search", "Search models...", ref search, 128)) + { + _modelSearch = search; + } + UiSharedService.AttachToolTip("Filter model rows by name, hash, or path."); + + if (decimationRunning) + { + var total = Math.Max(_modelDecimationTotalJobs, 1); + var completed = Math.Clamp(_modelDecimationCurrentProgress, 0, total); + var progress = (float)completed / total; + var label = string.IsNullOrEmpty(_modelDecimationCurrentHash) + ? $"{completed}/{total}" + : $"{completed}/{total} • {_modelDecimationCurrentHash}"; + ImGui.ProgressBar(progress, new Vector2(-1f, 0f), label); + } + } + + private void DrawModelDetailPane(IGrouping modelGroup) + { + var scale = ImGuiHelpers.GlobalScale; + CharacterAnalyzer.FileDataEntry? selected = null; + if (!string.IsNullOrEmpty(_selectedHash)) + { + selected = modelGroup.FirstOrDefault(entry => string.Equals(entry.Hash, _selectedHash, StringComparison.Ordinal)); + } + + UiSharedService.ColorText("Model Details", UIColors.Get("LightlessPurple")); + if (selected != null) + { + var sourcePath = selected.FilePaths.FirstOrDefault(); + if (!string.IsNullOrWhiteSpace(sourcePath)) + { + ImGui.SameLine(); + ImGui.TextUnformatted(Path.GetFileName(sourcePath)); + UiSharedService.AttachToolTip("Source file: " + sourcePath); + } + } + ImGui.Separator(); + + if (selected == null) + { + UiSharedService.ColorText("Select a model to view details.", ImGuiColors.DalamudGrey); + return; + } + + using (ImRaii.Child("modelDetailInfo", new Vector2(-1f, 0f), true, ImGuiWindowFlags.AlwaysVerticalScrollbar)) + { + var labelColor = ImGuiColors.DalamudGrey; + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + { + var metaFlags = ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.NoBordersInBody | ImGuiTableFlags.PadOuterX; + if (ImGui.BeginTable("modelMetaOverview", 2, metaFlags)) + { + MetaRow(FontAwesomeIcon.Cube, "Object", _selectedObjectTab.ToString()); + MetaRow(FontAwesomeIcon.Fingerprint, "Hash", selected.Hash, UIColors.Get("LightlessBlue")); + + var pendingColor = selected.IsComputed ? (Vector4?)null : UIColors.Get("LightlessYellow"); + var triangleLabel = selected.IsComputed + ? selected.Triangles.ToString("N0", CultureInfo.InvariantCulture) + : "Pending"; + MetaRow(FontAwesomeIcon.ProjectDiagram, "Triangles", triangleLabel, pendingColor); + + var originalLabel = selected.IsComputed + ? UiSharedService.ByteToString(selected.OriginalSize) + : "Pending"; + var compressedLabel = selected.IsComputed + ? UiSharedService.ByteToString(selected.CompressedSize) + : "Pending"; + MetaRow(FontAwesomeIcon.Database, "Original", originalLabel, pendingColor); + MetaRow(FontAwesomeIcon.CompressArrowsAlt, "Compressed", compressedLabel, pendingColor); + + ImGui.EndTable(); + } + } + + ImGuiHelpers.ScaledDummy(4); + + if (selected.IsComputed) + { + var savedBytes = selected.OriginalSize - selected.CompressedSize; + var savedMagnitude = Math.Abs(savedBytes); + var savedColor = savedBytes > 0 ? UIColors.Get("LightlessGreen") + : savedBytes < 0 ? UIColors.Get("DimRed") + : ImGuiColors.DalamudGrey; + var savedLabel = savedBytes > 0 ? "Saved" : savedBytes < 0 ? "Over" : "Delta"; + var savedPercent = selected.OriginalSize > 0 && savedMagnitude > 0 + ? $"{savedMagnitude * 100d / selected.OriginalSize:0.#}%" + : null; + + using (ImRaii.PushStyle(ImGuiStyleVar.CellPadding, new Vector2(6f * scale, 2f * scale))) + { + var statFlags = ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.NoBordersInBody | ImGuiTableFlags.PadOuterX; + if (ImGui.BeginTable("modelSizeSummary", 3, statFlags)) + { + ImGui.TableNextRow(); + StatCell(FontAwesomeIcon.Database, ImGuiColors.DalamudGrey, UiSharedService.ByteToString(selected.OriginalSize), "Original"); + StatCell(FontAwesomeIcon.CompressArrowsAlt, UIColors.Get("LightlessYellow2"), UiSharedService.ByteToString(selected.CompressedSize), "Compressed"); + StatCell(FontAwesomeIcon.ChartLine, savedColor, savedMagnitude > 0 ? UiSharedService.ByteToString(savedMagnitude) : "No change", savedLabel, savedPercent, savedColor); + ImGui.EndTable(); + } + } + } + else + { + UiSharedService.ColorTextWrapped("Size and triangle data are still being computed.", UIColors.Get("LightlessYellow")); + } + + ImGuiHelpers.ScaledDummy(6); + DrawPathList("File Paths", selected.FilePaths, "No file paths recorded."); + DrawPathList("Game Paths", selected.GamePaths, "No game paths recorded."); + + void MetaRow(FontAwesomeIcon icon, string label, string value, Vector4? valueColor = null) + { + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + _uiSharedService.IconText(icon, labelColor); + ImGui.SameLine(0f, 4f * scale); + using (ImRaii.PushColor(ImGuiCol.Text, labelColor)) + { + ImGui.TextUnformatted(label); + } + + ImGui.TableNextColumn(); + if (valueColor.HasValue) + { + using (ImRaii.PushColor(ImGuiCol.Text, valueColor.Value)) + { + ImGui.TextUnformatted(value); + } + } + else + { + ImGui.TextUnformatted(value); + } + } + + void StatCell(FontAwesomeIcon icon, Vector4 iconColor, string mainText, string caption, string? extra = null, Vector4? extraColor = null) + { + ImGui.TableNextColumn(); + _uiSharedService.IconText(icon, iconColor); + ImGui.SameLine(0f, 4f * scale); + using (ImRaii.PushColor(ImGuiCol.Text, iconColor)) + { + ImGui.TextUnformatted(mainText); + } + using (ImRaii.PushColor(ImGuiCol.Text, ImGuiColors.DalamudGrey)) + { + ImGui.TextUnformatted(caption); + } + if (!string.IsNullOrEmpty(extra)) + { + ImGui.SameLine(0f, 4f * scale); + using (ImRaii.PushColor(ImGuiCol.Text, extraColor ?? iconColor)) + { + ImGui.TextUnformatted(extra); + } + } + } + + void DrawPathList(string title, IReadOnlyList entries, string emptyMessage) + { + var count = entries.Count; + using var headerDefault = ImRaii.PushColor(ImGuiCol.Header, UiSharedService.Color(new Vector4(0.15f, 0.15f, 0.18f, 0.95f))); + using var headerHover = ImRaii.PushColor(ImGuiCol.HeaderHovered, UiSharedService.Color(new Vector4(0.2f, 0.2f, 0.25f, 1f))); + using var headerActive = ImRaii.PushColor(ImGuiCol.HeaderActive, UiSharedService.Color(new Vector4(0.25f, 0.25f, 0.3f, 1f))); + var label = $"{title} ({count})"; + if (!ImGui.CollapsingHeader(label, count == 0 ? ImGuiTreeNodeFlags.Leaf : ImGuiTreeNodeFlags.None)) + { + return; + } + + if (count == 0) + { + UiSharedService.ColorText(emptyMessage, ImGuiColors.DalamudGrey); + return; + } + + var tableFlags = ImGuiTableFlags.PadOuterX | ImGuiTableFlags.NoHostExtendX | ImGuiTableFlags.SizingFixedFit | ImGuiTableFlags.RowBg | ImGuiTableFlags.BordersInnerV | ImGuiTableFlags.BordersOuter; + if (ImGui.BeginTable($"{title}Table", 2, tableFlags)) + { + ImGui.TableSetupColumn("#", ImGuiTableColumnFlags.WidthFixed, 28f * scale); + ImGui.TableSetupColumn("Path", ImGuiTableColumnFlags.WidthStretch); + ImGui.TableHeadersRow(); + + for (int i = 0; i < entries.Count; i++) + { + ImGui.TableNextRow(); + ImGui.TableNextColumn(); + ImGui.TextUnformatted($"{i + 1}."); + + ImGui.TableNextColumn(); + var wrapPos = ImGui.GetCursorPosX() + ImGui.GetColumnWidth(); + ImGui.PushTextWrapPos(wrapPos); + ImGui.TextUnformatted(entries[i]); + ImGui.PopTextWrapPos(); + } + + ImGui.EndTable(); + } + } + } + } + private void DrawSelectedFileDetails(IGrouping? fileGroup) { var hasGroup = fileGroup != null; @@ -2411,6 +3720,39 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase _showModal = true; } + private void StartModelDecimationBatch() + { + if (_modelDecimationTask != null && !_modelDecimationTask.IsCompleted) + { + return; + } + + if (_cachedAnalysis == null) + { + return; + } + + var selectedEntries = _cachedAnalysis.Values + .SelectMany(entries => entries.Values) + .Where(entry => string.Equals(entry.FileType, "mdl", StringComparison.OrdinalIgnoreCase)) + .Where(entry => _selectedModelKeys.Contains(entry.Hash)) + .ToList(); + + if (selectedEntries.Count == 0) + { + return; + } + + _modelDecimationCts = _modelDecimationCts.CancelRecreate(); + _modelDecimationTotalJobs = selectedEntries.Count; + _modelDecimationCurrentProgress = 0; + _modelDecimationCurrentHash = string.Empty; + _modelDecimationFailed = false; + + var settings = GetBatchDecimationSettings(); + _modelDecimationTask = RunModelDecimationAsync(selectedEntries, settings, _modelDecimationCts.Token); + } + private async Task RunTextureConversionAsync(List requests, CancellationToken token) { try @@ -2432,7 +3774,7 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase { try { - await _characterAnalyzer.UpdateFileEntriesAsync(affectedPaths, token).ConfigureAwait(false); + await _characterAnalyzer.UpdateFileEntriesAsync(affectedPaths, token, force: true).ConfigureAwait(false); _hasUpdate = true; } catch (OperationCanceledException) @@ -2463,6 +3805,81 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase } } + private async Task RunModelDecimationAsync( + List entries, + ModelDecimationSettings settings, + CancellationToken token) + { + var affectedPaths = new HashSet(StringComparer.OrdinalIgnoreCase); + try + { + var completed = 0; + foreach (var entry in entries) + { + token.ThrowIfCancellationRequested(); + + var sourcePath = entry.FilePaths.FirstOrDefault(); + _modelDecimationCurrentHash = string.IsNullOrWhiteSpace(sourcePath) + ? entry.Hash + : Path.GetFileName(sourcePath); + _modelDecimationCurrentProgress = completed; + if (string.IsNullOrWhiteSpace(sourcePath)) + { + completed++; + continue; + } + + _modelDecimationService.ScheduleBatchDecimation(entry.Hash, sourcePath, settings); + await _modelDecimationService.WaitForPendingJobsAsync(new[] { entry.Hash }, token).ConfigureAwait(false); + + affectedPaths.Add(sourcePath); + completed++; + _modelDecimationCurrentProgress = completed; + } + + if (!token.IsCancellationRequested && affectedPaths.Count > 0) + { + await _characterAnalyzer.UpdateFileEntriesAsync(affectedPaths, token, force: true).ConfigureAwait(false); + _hasUpdate = true; + try + { + _ipcManager.Penumbra.RequestImmediateRedraw(0, RedrawType.Redraw); + } + catch (Exception redrawEx) + { + _logger.LogWarning(redrawEx, "Failed to request redraw after batch model decimation."); + } + } + } + catch (OperationCanceledException) + { + _logger.LogInformation("Model decimation batch was cancelled."); + } + catch (Exception ex) + { + _modelDecimationFailed = true; + _logger.LogError(ex, "Model decimation batch failed."); + } + finally + { + _modelDecimationCurrentHash = string.Empty; + _selectedModelKeys.Clear(); + } + } + + private ModelDecimationSettings GetBatchDecimationSettings() + { + var config = _playerPerformanceConfig.Current; + var ratio = Math.Clamp(config.BatchModelDecimationTargetRatio, 0.01, 0.99); + var advanced = config.ModelDecimationAdvanced; + return new ModelDecimationSettings( + Math.Max(0, config.BatchModelDecimationTriangleThreshold), + ratio, + config.BatchModelDecimationNormalizeTangents, + config.BatchModelDecimationAvoidBodyIntersection, + advanced); + } + private bool DrawVerticalResizeHandle( string id, float topY, @@ -2473,12 +3890,14 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase out bool isDragging, bool invert = false, bool showToggle = false, - bool isCollapsed = false) + bool isCollapsed = false, + float? splitterWidthOverride = null) { var scale = ImGuiHelpers.GlobalScale; - var splitterWidth = (showToggle + var baseWidth = splitterWidthOverride ?? (showToggle ? (isCollapsed ? TextureDetailSplitterCollapsedWidth : TextureDetailSplitterWidth) - : TextureFilterSplitterWidth) * scale; + : TextureFilterSplitterWidth); + var splitterWidth = baseWidth * scale; ImGui.SameLine(); var cursor = ImGui.GetCursorPos(); var contentMin = ImGui.GetWindowContentRegionMin(); @@ -2577,6 +3996,55 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase return toggleClicked; } + private void DrawHorizontalResizeHandle( + string id, + ref float topHeight, + float minHeight, + float maxHeight, + out bool isDragging, + bool invert = false, + float? splitterHeightOverride = null) + { + var scale = ImGuiHelpers.GlobalScale; + var baseHeight = splitterHeightOverride ?? ModelBatchSplitterHeight; + var splitterHeight = baseHeight * scale; + var width = ImGui.GetContentRegionAvail().X; + if (width <= 0f || splitterHeight <= 0f) + { + isDragging = false; + return; + } + + ImGui.InvisibleButton(id, new Vector2(width, splitterHeight)); + var drawList = ImGui.GetWindowDrawList(); + var rectMin = ImGui.GetItemRectMin(); + var rectMax = ImGui.GetItemRectMax(); + var windowPos = ImGui.GetWindowPos(); + var contentMin = ImGui.GetWindowContentRegionMin(); + var contentMax = ImGui.GetWindowContentRegionMax(); + var clipMin = windowPos + contentMin; + var clipMax = windowPos + contentMax; + drawList.PushClipRect(clipMin, clipMax, true); + + var hovered = ImGui.IsItemHovered(); + isDragging = ImGui.IsItemActive(); + var baseColor = UIColors.Get("ButtonDefault"); + var hoverColor = UIColors.Get("LightlessPurple"); + var activeColor = UIColors.Get("LightlessPurpleActive"); + var handleColor = isDragging ? activeColor : hovered ? hoverColor : baseColor; + var rounding = ImGui.GetStyle().FrameRounding; + drawList.AddRectFilled(rectMin, rectMax, UiSharedService.Color(handleColor), rounding); + drawList.AddRect(rectMin, rectMax, UiSharedService.Color(new Vector4(1f, 1f, 1f, 0.12f)), rounding); + drawList.PopClipRect(); + + if (isDragging) + { + var delta = ImGui.GetIO().MouseDelta.Y / scale; + topHeight += invert ? -delta : delta; + topHeight = Math.Clamp(topHeight, minHeight, maxHeight); + } + } + private (IDalamudTextureWrap? Texture, bool IsLoading, string? Error) GetTexturePreview(TextureRow row) { var key = row.Key; @@ -3370,24 +4838,33 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase private void DrawTable(IGrouping fileGroup) { - var tableColumns = string.Equals(fileGroup.Key, "mdl", StringComparison.Ordinal) ? 6 : 5; + var isModel = string.Equals(fileGroup.Key, "mdl", StringComparison.OrdinalIgnoreCase); + var tableColumns = 5; var scale = ImGuiHelpers.GlobalScale; + var selectionAccent = UIColors.Get("LightlessOrange"); using var table = ImRaii.Table("Analysis", tableColumns, - ImGuiTableFlags.Sortable | ImGuiTableFlags.RowBg | ImGuiTableFlags.ScrollY | ImGuiTableFlags.SizingStretchProp | ImGuiTableFlags.BordersOuter | ImGuiTableFlags.BordersInnerV, + ImGuiTableFlags.Sortable | ImGuiTableFlags.RowBg | ImGuiTableFlags.ScrollY | ImGuiTableFlags.SizingStretchProp | ImGuiTableFlags.BordersOuter | ImGuiTableFlags.BordersInnerV | ImGuiTableFlags.PadOuterX | ImGuiTableFlags.NoHostExtendX, new Vector2(-1f, 0f)); if (!table.Success) { return; } - ImGui.TableSetupColumn("Hash"); - ImGui.TableSetupColumn("Filepaths"); - ImGui.TableSetupColumn("Gamepaths"); - ImGui.TableSetupColumn("Original Size"); - ImGui.TableSetupColumn("Compressed Size"); - if (string.Equals(fileGroup.Key, "mdl", StringComparison.Ordinal)) + if (isModel) { - ImGui.TableSetupColumn("Triangles"); + ImGui.TableSetupColumn("##select", ImGuiTableColumnFlags.WidthFixed | ImGuiTableColumnFlags.NoSort, 32f * scale); + ImGui.TableSetupColumn("Model", ImGuiTableColumnFlags.WidthFixed, 380f * scale); + ImGui.TableSetupColumn("Triangles", ImGuiTableColumnFlags.WidthFixed, 120f * scale); + ImGui.TableSetupColumn("Original", ImGuiTableColumnFlags.WidthFixed, 140f * scale); + ImGui.TableSetupColumn("Compressed", ImGuiTableColumnFlags.WidthFixed, 140f * scale); + } + else + { + ImGui.TableSetupColumn("Hash", ImGuiTableColumnFlags.WidthFixed, 320f * scale); + ImGui.TableSetupColumn("Original", ImGuiTableColumnFlags.WidthFixed, 140f * scale); + ImGui.TableSetupColumn("Compressed", ImGuiTableColumnFlags.WidthFixed, 140f * scale); + ImGui.TableSetupColumn("File paths", ImGuiTableColumnFlags.WidthFixed, 90f * scale); + ImGui.TableSetupColumn("Game paths", ImGuiTableColumnFlags.WidthFixed, 90f * scale); } ImGui.TableSetupScrollFreeze(0, 1); @@ -3399,73 +4876,192 @@ public class DataAnalysisUi : WindowMediatorSubscriberBase var spec = sortSpecs.Specs[0]; bool ascending = spec.SortDirection == ImGuiSortDirection.Ascending; - switch (spec.ColumnIndex) + var columnIndex = (int)spec.ColumnIndex; + if (isModel) { - case 0: - SortCachedAnalysis(_selectedObjectTab, pair => pair.Key, ascending, StringComparer.Ordinal); - break; - case 1: - SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.FilePaths.Count, ascending); - break; - case 2: - SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.GamePaths.Count, ascending); - break; - case 3: - SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.OriginalSize, ascending); - break; - case 4: - SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.CompressedSize, ascending); - break; - case 5 when string.Equals(fileGroup.Key, "mdl", StringComparison.Ordinal): + if (columnIndex == 0) + { + // checkbox column + } + else if (columnIndex == 1) + { + SortCachedAnalysis(_selectedObjectTab, pair => GetModelDisplayName(pair.Value), ascending, StringComparer.OrdinalIgnoreCase); + } + else if (columnIndex == 2) + { SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.Triangles, ascending); - break; + } + else if (columnIndex == 3) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.OriginalSize, ascending); + } + else if (columnIndex == 4) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.CompressedSize, ascending); + } + } + else + { + if (columnIndex == 0) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Key, ascending, StringComparer.Ordinal); + } + else if (columnIndex == 1) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.OriginalSize, ascending); + } + else if (columnIndex == 2) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.CompressedSize, ascending); + } + else if (columnIndex == 3) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.FilePaths.Count, ascending); + } + else if (columnIndex == 4) + { + SortCachedAnalysis(_selectedObjectTab, pair => pair.Value.GamePaths.Count, ascending); + } } sortSpecs.SpecsDirty = false; } - foreach (var item in fileGroup) + IEnumerable entries = fileGroup; + if (isModel && !string.IsNullOrWhiteSpace(_modelSearch)) { - using var textColor = ImRaii.PushColor(ImGuiCol.Text, new Vector4(0, 0, 0, 1), string.Equals(item.Hash, _selectedHash, StringComparison.Ordinal)); - using var missingColor = ImRaii.PushColor(ImGuiCol.Text, new Vector4(1, 1, 1, 1), !item.IsComputed); - ImGui.TableNextColumn(); - if (!item.IsComputed) - { - var warning = UiSharedService.Color(UIColors.Get("DimRed")); - ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, warning); - ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, warning); - } - if (string.Equals(_selectedHash, item.Hash, StringComparison.Ordinal)) - { - var highlight = UiSharedService.Color(UIColors.Get("LightlessYellow")); - ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, highlight); - ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, highlight); - } - ImGui.TextUnformatted(item.Hash); - if (ImGui.IsItemClicked()) - { - _selectedHash = string.Equals(_selectedHash, item.Hash, StringComparison.Ordinal) - ? string.Empty - : item.Hash; - } + var term = _modelSearch.Trim(); + entries = fileGroup.Where(entry => + entry.Hash.Contains(term, StringComparison.OrdinalIgnoreCase) + || GetModelDisplayName(entry).Contains(term, StringComparison.OrdinalIgnoreCase) + || entry.FilePaths.Exists(path => path.Contains(term, StringComparison.OrdinalIgnoreCase)) + || entry.GamePaths.Exists(path => path.Contains(term, StringComparison.OrdinalIgnoreCase))); + } - ImGui.TableNextColumn(); - ImGui.TextUnformatted(item.FilePaths.Count.ToString()); - - ImGui.TableNextColumn(); - ImGui.TextUnformatted(item.GamePaths.Count.ToString()); - - ImGui.TableNextColumn(); - ImGui.TextUnformatted(UiSharedService.ByteToString(item.OriginalSize)); - - ImGui.TableNextColumn(); - ImGui.TextUnformatted(UiSharedService.ByteToString(item.CompressedSize)); - - if (string.Equals(fileGroup.Key, "mdl", StringComparison.Ordinal)) + foreach (var item in entries) + { + var isSelected = string.Equals(item.Hash, _selectedHash, StringComparison.Ordinal); + var defaultTextColor = ImGui.GetColorU32(ImGuiCol.Text); + if (isModel) { ImGui.TableNextColumn(); - ImGui.TextUnformatted(item.Triangles.ToString()); + var marked = _selectedModelKeys.Contains(item.Hash); + if (UiSharedService.CheckboxWithBorder($"##model-select-{item.Hash}", ref marked, selectionAccent, 1.5f)) + { + if (marked) + { + _selectedModelKeys.Add(item.Hash); + } + else + { + _selectedModelKeys.Remove(item.Hash); + } + } + + using (ImRaii.PushColor(ImGuiCol.Text, defaultTextColor)) + { + UiSharedService.AttachToolTip("Mark model for batch decimation."); + } + ImGui.TableNextColumn(); } + else + { + ImGui.TableNextColumn(); + } + + using var textColor = ImRaii.PushColor(ImGuiCol.Text, new Vector4(0, 0, 0, 1), isSelected); + using var missingColor = ImRaii.PushColor(ImGuiCol.Text, new Vector4(1, 1, 1, 1), !item.IsComputed); + if (isModel) + { + if (!item.IsComputed) + { + var warning = UiSharedService.Color(UIColors.Get("DimRed")); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, warning); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, warning); + } + if (isSelected) + { + var highlight = UiSharedService.Color(UIColors.Get("LightlessYellow")); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, highlight); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, highlight); + } + + var displayName = GetModelDisplayName(item); + ImGui.TextUnformatted(displayName); + using (ImRaii.PushColor(ImGuiCol.Text, defaultTextColor)) + { + UiSharedService.AttachToolTip($"Hash: {item.Hash}"); + } + if (ImGui.IsItemClicked()) + { + _selectedHash = isSelected ? string.Empty : item.Hash; + } + } + else + { + if (!item.IsComputed) + { + var warning = UiSharedService.Color(UIColors.Get("DimRed")); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, warning); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, warning); + } + if (isSelected) + { + var highlight = UiSharedService.Color(UIColors.Get("LightlessYellow")); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg1, highlight); + ImGui.TableSetBgColor(ImGuiTableBgTarget.RowBg0, highlight); + } + + ImGui.TextUnformatted(item.Hash); + if (ImGui.IsItemClicked()) + { + _selectedHash = isSelected ? string.Empty : item.Hash; + } + } + + if (isModel) + { + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.IsComputed + ? item.Triangles.ToString("N0", CultureInfo.InvariantCulture) + : "Pending"); + + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.IsComputed ? UiSharedService.ByteToString(item.OriginalSize) : "Pending"); + + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.IsComputed ? UiSharedService.ByteToString(item.CompressedSize) : "Pending"); + } + else + { + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.IsComputed ? UiSharedService.ByteToString(item.OriginalSize) : "Pending"); + + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.IsComputed ? UiSharedService.ByteToString(item.CompressedSize) : "Pending"); + + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.FilePaths.Count.ToString(CultureInfo.InvariantCulture)); + if (item.FilePaths.Count > 0) + { + UiSharedService.AttachToolTip(string.Join(Environment.NewLine, item.FilePaths)); + } + + ImGui.TableNextColumn(); + ImGui.TextUnformatted(item.GamePaths.Count.ToString(CultureInfo.InvariantCulture)); + if (item.GamePaths.Count > 0) + { + UiSharedService.AttachToolTip(string.Join(Environment.NewLine, item.GamePaths)); + } + } + } + + static string GetModelDisplayName(CharacterAnalyzer.FileDataEntry entry) + { + var sourcePath = entry.FilePaths.FirstOrDefault(); + return string.IsNullOrWhiteSpace(sourcePath) + ? entry.Hash + : Path.GetFileName(sourcePath); } } } diff --git a/LightlessSync/UI/DownloadUi.cs b/LightlessSync/UI/DownloadUi.cs index 6cc4bd1..e9a9c53 100644 --- a/LightlessSync/UI/DownloadUi.cs +++ b/LightlessSync/UI/DownloadUi.cs @@ -325,16 +325,13 @@ public class DownloadUi : WindowMediatorSubscriberBase if (hasValidSize) { - if (dlProg > 0) - { - fillPercent = transferredBytes / (double)totalBytes; - showFill = true; - } - else if (dlDecomp > 0 || dlComplete > 0 || transferredBytes >= totalBytes) + fillPercent = totalBytes > 0 ? transferredBytes / (double)totalBytes : 0.0; + if (isAllComplete && totalBytes > 0) { fillPercent = 1.0; - showFill = true; } + + showFill = transferredBytes > 0 || isAllComplete; } if (showFill) diff --git a/LightlessSync/UI/SettingsUi.cs b/LightlessSync/UI/SettingsUi.cs index 207455e..85474c1 100644 --- a/LightlessSync/UI/SettingsUi.cs +++ b/LightlessSync/UI/SettingsUi.cs @@ -25,6 +25,7 @@ using LightlessSync.Services.LightFinder; using LightlessSync.Services.Mediator; using LightlessSync.Services.PairProcessing; using LightlessSync.Services.ServerConfiguration; +using LightlessSync.UI.Components; using LightlessSync.UI.Models; using LightlessSync.UI.Services; using LightlessSync.UI.Style; @@ -66,6 +67,7 @@ public class SettingsUi : WindowMediatorSubscriberBase private readonly PairUiService _pairUiService; private readonly PerformanceCollectorService _performanceCollector; private readonly PlayerPerformanceConfigService _playerPerformanceConfigService; + private readonly OptimizationSettingsPanel _optimizationSettingsPanel; private readonly PairProcessingLimiter _pairProcessingLimiter; private readonly EventAggregator _eventAggregator; private readonly ServerConfigurationManager _serverConfigurationManager; @@ -133,6 +135,12 @@ public class SettingsUi : WindowMediatorSubscriberBase private readonly Dictionary _generalTreeHighlights = new(StringComparer.Ordinal); private const float GeneralTreeHighlightDuration = 1.5f; private readonly SeluneBrush _generalSeluneBrush = new(); + private string? _performanceScrollTarget = null; + private string? _performanceOpenTreeTarget = null; + private const string PerformanceWarningsLabel = "Warnings"; + private const string PerformanceAutoPauseLabel = "Auto Pause"; + private const string PerformanceTextureOptimizationLabel = "Texture Optimization"; + private const string PerformanceModelOptimizationLabel = "Model Optimization"; private static readonly (string Label, SeIconChar Icon)[] LightfinderIconPresets = new[] { @@ -208,6 +216,7 @@ public class SettingsUi : WindowMediatorSubscriberBase _httpClient = httpClient; _fileCompactor = fileCompactor; _uiShared = uiShared; + _optimizationSettingsPanel = new OptimizationSettingsPanel(_uiShared, _playerPerformanceConfigService, _pairUiService); _nameplateService = nameplateService; _actorObjectService = actorObjectService; _validationProgress = new Progress<(int, int, FileCacheEntity)>(v => _currentProgress = v); @@ -229,6 +238,11 @@ public class SettingsUi : WindowMediatorSubscriberBase _selectGeneralTabOnNextDraw = true; FocusGeneralTree("Lightfinder"); }); + Mediator.Subscribe(this, msg => + { + IsOpen = true; + FocusPerformanceSection(msg.Section); + }); Mediator.Subscribe(this, (_) => IsOpen = false); Mediator.Subscribe(this, (_) => UiSharedService_GposeStart()); Mediator.Subscribe(this, (_) => UiSharedService_GposeEnd()); @@ -516,162 +530,6 @@ public class SettingsUi : WindowMediatorSubscriberBase } } - private void DrawTextureDownscaleCounters() - { - HashSet trackedPairs = new(); - - var snapshot = _pairUiService.GetSnapshot(); - - foreach (var pair in snapshot.DirectPairs) - { - trackedPairs.Add(pair); - } - - foreach (var group in snapshot.GroupPairs.Values) - { - foreach (var pair in group) - { - trackedPairs.Add(pair); - } - } - - long totalOriginalBytes = 0; - long totalEffectiveBytes = 0; - var hasData = false; - - foreach (var pair in trackedPairs) - { - if (!pair.IsVisible) - continue; - - var original = pair.LastAppliedApproximateVRAMBytes; - var effective = pair.LastAppliedApproximateEffectiveVRAMBytes; - - if (original >= 0) - { - hasData = true; - totalOriginalBytes += original; - } - - if (effective >= 0) - { - hasData = true; - totalEffectiveBytes += effective; - } - } - - if (!hasData) - { - ImGui.TextDisabled("VRAM usage has not been calculated yet."); - return; - } - - var savedBytes = Math.Max(0L, totalOriginalBytes - totalEffectiveBytes); - var originalText = UiSharedService.ByteToString(totalOriginalBytes, addSuffix: true); - var effectiveText = UiSharedService.ByteToString(totalEffectiveBytes, addSuffix: true); - var savedText = UiSharedService.ByteToString(savedBytes, addSuffix: true); - - ImGui.TextUnformatted($"Total VRAM usage (original): {originalText}"); - ImGui.TextUnformatted($"Total VRAM usage (effective): {effectiveText}"); - - if (savedBytes > 0) - { - UiSharedService.ColorText($"VRAM saved by downscaling: {savedText}", UIColors.Get("LightlessGreen")); - } - else - { - ImGui.TextUnformatted($"VRAM saved by downscaling: {savedText}"); - } - } - - private void DrawTriangleDecimationCounters() - { - HashSet trackedPairs = new(); - - var snapshot = _pairUiService.GetSnapshot(); - - foreach (var pair in snapshot.DirectPairs) - { - trackedPairs.Add(pair); - } - - foreach (var group in snapshot.GroupPairs.Values) - { - foreach (var pair in group) - { - trackedPairs.Add(pair); - } - } - - long totalOriginalTris = 0; - long totalEffectiveTris = 0; - var hasData = false; - - foreach (var pair in trackedPairs) - { - if (!pair.IsVisible) - continue; - - var original = pair.LastAppliedDataTris; - var effective = pair.LastAppliedApproximateEffectiveTris; - - if (original >= 0) - { - hasData = true; - totalOriginalTris += original; - } - - if (effective >= 0) - { - hasData = true; - totalEffectiveTris += effective; - } - } - - if (!hasData) - { - ImGui.TextDisabled("Triangle usage has not been calculated yet."); - return; - } - - var savedTris = Math.Max(0L, totalOriginalTris - totalEffectiveTris); - var originalText = FormatTriangleCount(totalOriginalTris); - var effectiveText = FormatTriangleCount(totalEffectiveTris); - var savedText = FormatTriangleCount(savedTris); - - ImGui.TextUnformatted($"Total triangle usage (original): {originalText}"); - ImGui.TextUnformatted($"Total triangle usage (effective): {effectiveText}"); - - if (savedTris > 0) - { - UiSharedService.ColorText($"Triangles saved by decimation: {savedText}", UIColors.Get("LightlessGreen")); - } - else - { - ImGui.TextUnformatted($"Triangles saved by decimation: {savedText}"); - } - - static string FormatTriangleCount(long triangleCount) - { - if (triangleCount < 0) - { - return "n/a"; - } - - if (triangleCount >= 1_000_000) - { - return FormattableString.Invariant($"{triangleCount / 1_000_000d:0.#}m tris"); - } - - if (triangleCount >= 1_000) - { - return FormattableString.Invariant($"{triangleCount / 1_000d:0.#}k tris"); - } - - return $"{triangleCount} tris"; - } - } - private void DrawThemeVectorRow(MainStyle.StyleVector2Option option) { ImGui.TableNextRow(); @@ -1593,6 +1451,24 @@ public class SettingsUi : WindowMediatorSubscriberBase ImGui.SameLine(); ImGui.TextColored(statusColor, $"[{(pair.IsVisible ? "Visible" : pair.IsOnline ? "Online" : "Offline")}]"); + if (_uiShared.IconTextButton(FontAwesomeIcon.Copy, "Copy Pair Diagnostics##pairDebugCopy")) + { + ImGui.SetClipboardText(BuildPairDiagnosticsClipboard(pair, snapshot)); + } + + UiSharedService.AttachToolTip("Copies the current pair diagnostics to the clipboard."); + + ImGui.SameLine(); + if (_uiShared.IconTextButton(FontAwesomeIcon.Copy, "Copy Last Data JSON##pairDebugCopyLastData")) + { + var lastDataForClipboard = pair.LastReceivedCharacterData; + ImGui.SetClipboardText(lastDataForClipboard is null + ? "ERROR: No character data has been received for this pair." + : JsonSerializer.Serialize(lastDataForClipboard, DebugJsonOptions)); + } + + UiSharedService.AttachToolTip("Copies the last received character data JSON to the clipboard."); + if (ImGui.BeginTable("##pairDebugProperties", 2, ImGuiTableFlags.SizingStretchProp)) { DrawPairPropertyRow("UID", pair.UserData.UID); @@ -1609,8 +1485,6 @@ public class SettingsUi : WindowMediatorSubscriberBase DrawPairPropertyRow("Has Handler", FormatBool(debugInfo.HasHandler)); DrawPairPropertyRow("Handler Initialized", FormatBool(debugInfo.HandlerInitialized)); DrawPairPropertyRow("Handler Visible", FormatBool(debugInfo.HandlerVisible)); - DrawPairPropertyRow("Last Time person rendered in", FormatTimestamp(debugInfo.InvisibleSinceUtc)); - DrawPairPropertyRow("Handler Timer Temp Collection removal", FormatCountdown(debugInfo.VisibilityEvictionRemainingSeconds)); DrawPairPropertyRow("Handler Scheduled For Deletion", FormatBool(debugInfo.HandlerScheduledForDeletion)); DrawPairPropertyRow("Note", pair.GetNote() ?? "(none)"); @@ -1722,6 +1596,139 @@ public class SettingsUi : WindowMediatorSubscriberBase DrawPairEventLog(pair); } + private string BuildPairDiagnosticsClipboard(Pair pair, PairUiSnapshot snapshot) + { + var debugInfo = pair.GetDebugInfo(); + StringBuilder sb = new(); + sb.AppendLine("Lightless Pair Diagnostics"); + sb.AppendLine($"Generated: {DateTime.Now.ToString("G", CultureInfo.CurrentCulture)}"); + sb.AppendLine(); + + sb.AppendLine("Pair"); + sb.AppendLine($"Alias/UID: {pair.UserData.AliasOrUID}"); + sb.AppendLine($"UID: {pair.UserData.UID}"); + sb.AppendLine($"Alias: {(string.IsNullOrEmpty(pair.UserData.Alias) ? "(none)" : pair.UserData.Alias)}"); + sb.AppendLine($"Player Name: {pair.PlayerName ?? "(not cached)"}"); + sb.AppendLine($"Handler Ident: {(string.IsNullOrEmpty(pair.Ident) ? "(not bound)" : pair.Ident)}"); + sb.AppendLine($"Character Id: {FormatCharacterId(pair.PlayerCharacterId)}"); + sb.AppendLine($"Direct Pair: {FormatBool(pair.IsDirectlyPaired)}"); + sb.AppendLine($"Individual Status: {pair.IndividualPairStatus}"); + sb.AppendLine($"Any Connection: {FormatBool(pair.HasAnyConnection())}"); + sb.AppendLine($"Paused: {FormatBool(pair.IsPaused)}"); + sb.AppendLine($"Visible: {FormatBool(pair.IsVisible)}"); + sb.AppendLine($"Online: {FormatBool(pair.IsOnline)}"); + sb.AppendLine($"Has Handler: {FormatBool(debugInfo.HasHandler)}"); + sb.AppendLine($"Handler Initialized: {FormatBool(debugInfo.HandlerInitialized)}"); + sb.AppendLine($"Handler Visible: {FormatBool(debugInfo.HandlerVisible)}"); + sb.AppendLine($"Handler Scheduled For Deletion: {FormatBool(debugInfo.HandlerScheduledForDeletion)}"); + sb.AppendLine($"Note: {pair.GetNote() ?? "(none)"}"); + + sb.AppendLine(); + sb.AppendLine("Applied Data"); + sb.AppendLine($"Last Data Size: {FormatBytes(pair.LastAppliedDataBytes)}"); + sb.AppendLine($"Approx. VRAM: {FormatBytes(pair.LastAppliedApproximateVRAMBytes)}"); + sb.AppendLine($"Effective VRAM: {FormatBytes(pair.LastAppliedApproximateEffectiveVRAMBytes)}"); + sb.AppendLine($"Last Triangles: {(pair.LastAppliedDataTris < 0 ? "n/a" : pair.LastAppliedDataTris.ToString(CultureInfo.InvariantCulture))}"); + sb.AppendLine($"Effective Triangles: {(pair.LastAppliedApproximateEffectiveTris < 0 ? "n/a" : pair.LastAppliedApproximateEffectiveTris.ToString(CultureInfo.InvariantCulture))}"); + + sb.AppendLine(); + sb.AppendLine("Last Received Character Data"); + var lastData = pair.LastReceivedCharacterData; + if (lastData is null) + { + sb.AppendLine("None"); + } + else + { + var fileReplacementCount = lastData.FileReplacements.Values.Sum(list => list?.Count ?? 0); + var totalGamePaths = lastData.FileReplacements.Values.Sum(list => list?.Sum(replacement => replacement.GamePaths.Length) ?? 0); + sb.AppendLine($"File replacements: {fileReplacementCount} entries across {totalGamePaths} game paths."); + sb.AppendLine($"Customize+: {lastData.CustomizePlusData.Count}, Glamourer entries: {lastData.GlamourerData.Count}"); + sb.AppendLine($"Manipulation length: {lastData.ManipulationData.Length}, Heels set: {FormatBool(!string.IsNullOrEmpty(lastData.HeelsData))}"); + } + + sb.AppendLine(); + sb.AppendLine("Application Timeline"); + sb.AppendLine($"Last Data Received: {FormatTimestamp(debugInfo.LastDataReceivedAt)}"); + sb.AppendLine($"Last Apply Attempt: {FormatTimestamp(debugInfo.LastApplyAttemptAt)}"); + sb.AppendLine($"Last Successful Apply: {FormatTimestamp(debugInfo.LastSuccessfulApplyAt)}"); + + if (!string.IsNullOrEmpty(debugInfo.LastFailureReason)) + { + sb.AppendLine(); + sb.AppendLine($"Last failure: {debugInfo.LastFailureReason}"); + if (debugInfo.BlockingConditions.Count > 0) + { + sb.AppendLine("Blocking conditions:"); + foreach (var condition in debugInfo.BlockingConditions) + { + sb.AppendLine($"- {condition}"); + } + } + } + + sb.AppendLine(); + sb.AppendLine("Application & Download State"); + sb.AppendLine($"Applying Data: {FormatBool(debugInfo.IsApplying)}"); + sb.AppendLine($"Downloading: {FormatBool(debugInfo.IsDownloading)}"); + sb.AppendLine($"Pending Downloads: {debugInfo.PendingDownloadCount.ToString(CultureInfo.InvariantCulture)}"); + sb.AppendLine($"Forbidden Downloads: {debugInfo.ForbiddenDownloadCount.ToString(CultureInfo.InvariantCulture)}"); + sb.AppendLine($"Pending Mod Reapply: {FormatBool(debugInfo.PendingModReapply)}"); + sb.AppendLine($"Mod Apply Deferred: {FormatBool(debugInfo.ModApplyDeferred)}"); + sb.AppendLine($"Missing Critical Mods: {debugInfo.MissingCriticalMods.ToString(CultureInfo.InvariantCulture)}"); + sb.AppendLine($"Missing Non-Critical Mods: {debugInfo.MissingNonCriticalMods.ToString(CultureInfo.InvariantCulture)}"); + sb.AppendLine($"Missing Forbidden Mods: {debugInfo.MissingForbiddenMods.ToString(CultureInfo.InvariantCulture)}"); + + sb.AppendLine(); + sb.AppendLine("Syncshell Memberships"); + if (snapshot.PairsWithGroups.TryGetValue(pair, out var groups) && groups.Count > 0) + { + foreach (var group in groups.OrderBy(g => g.Group.AliasOrGID, StringComparer.OrdinalIgnoreCase)) + { + var flags = group.GroupPairUserInfos.TryGetValue(pair.UserData.UID, out var info) ? info : GroupPairUserInfo.None; + var flagLabel = flags switch + { + GroupPairUserInfo.None => string.Empty, + _ => $" ({string.Join(", ", GetGroupInfoFlags(flags))})" + }; + sb.AppendLine($"{group.Group.AliasOrGID} [{group.Group.GID}]{flagLabel}"); + } + } + else + { + sb.AppendLine("Not a member of any syncshells."); + } + + sb.AppendLine(); + sb.AppendLine("Pair DTO Snapshot"); + if (pair.UserPair is null) + { + sb.AppendLine("(unavailable)"); + } + else + { + sb.AppendLine(JsonSerializer.Serialize(pair.UserPair, DebugJsonOptions)); + } + + var relevantEvents = GetRelevantPairEvents(pair, 40); + sb.AppendLine(); + sb.AppendLine("Recent Events"); + if (relevantEvents.Count == 0) + { + sb.AppendLine("No recent events were logged for this pair."); + } + else + { + foreach (var ev in relevantEvents) + { + var timestamp = ev.EventTime.ToString("T", CultureInfo.CurrentCulture); + sb.AppendLine($"{timestamp} [{ev.EventSource}] {ev.EventSeverity}: {ev.Message}"); + } + } + + return sb.ToString(); + } + private static IEnumerable GetGroupInfoFlags(GroupPairUserInfo info) { if (info.HasFlag(GroupPairUserInfo.IsModerator)) @@ -1735,23 +1742,28 @@ public class SettingsUi : WindowMediatorSubscriberBase } } - private void DrawPairEventLog(Pair pair) + private List GetRelevantPairEvents(Pair pair, int maxEvents) { - ImGui.TextUnformatted("Recent Events"); var events = _eventAggregator.EventList.Value; var alias = pair.UserData.Alias; var aliasOrUid = pair.UserData.AliasOrUID; var rawUid = pair.UserData.UID; var playerName = pair.PlayerName; - var relevantEvents = events.Where(e => + return events.Where(e => EventMatchesIdentifier(e, rawUid) || EventMatchesIdentifier(e, aliasOrUid) || EventMatchesIdentifier(e, alias) || (!string.IsNullOrEmpty(playerName) && string.Equals(e.Character, playerName, StringComparison.OrdinalIgnoreCase))) .OrderByDescending(e => e.EventTime) - .Take(40) + .Take(maxEvents) .ToList(); + } + + private void DrawPairEventLog(Pair pair) + { + ImGui.TextUnformatted("Recent Events"); + var relevantEvents = GetRelevantPairEvents(pair, 40); if (relevantEvents.Count == 0) { @@ -2290,11 +2302,29 @@ public class SettingsUi : WindowMediatorSubscriberBase var syncshellOfflineSeparate = _configService.Current.ShowSyncshellOfflineUsersSeparately; var greenVisiblePair = _configService.Current.ShowVisiblePairsGreenEye; var enableParticleEffects = _configService.Current.EnableParticleEffects; + var showUiWhenUiHidden = _configService.Current.ShowUiWhenUiHidden; + var showUiInGpose = _configService.Current.ShowUiInGpose; using (var behaviorTree = BeginGeneralTree("Behavior", UIColors.Get("LightlessPurple"))) { if (behaviorTree.Visible) { + if (ImGui.Checkbox("Show Lightless windows when game UI is hidden", ref showUiWhenUiHidden)) + { + _configService.Current.ShowUiWhenUiHidden = showUiWhenUiHidden; + _configService.Save(); + } + + _uiShared.DrawHelpText("When disabled, Lightless windows (except chat) are hidden when the game UI is hidden."); + + if (ImGui.Checkbox("Show Lightless windows in group pose", ref showUiInGpose)) + { + _configService.Current.ShowUiInGpose = showUiInGpose; + _configService.Save(); + } + + _uiShared.DrawHelpText("When disabled, Lightless windows (except chat) are hidden while in group pose."); + if (ImGui.Checkbox("Enable Particle Effects", ref enableParticleEffects)) { _configService.Current.EnableParticleEffects = enableParticleEffects; @@ -3401,6 +3431,43 @@ public class SettingsUi : WindowMediatorSubscriberBase _generalTreeHighlights[label] = ImGui.GetTime(); } + private void FocusPerformanceSection(PerformanceSettingsSection section) + { + _selectGeneralTabOnNextDraw = false; + _selectedMainTab = MainSettingsTab.Performance; + var label = section switch + { + PerformanceSettingsSection.TextureOptimization => PerformanceTextureOptimizationLabel, + PerformanceSettingsSection.ModelOptimization => PerformanceModelOptimizationLabel, + _ => PerformanceTextureOptimizationLabel, + }; + _performanceOpenTreeTarget = label; + _performanceScrollTarget = label; + } + + private bool BeginPerformanceTree(string label, Vector4 color) + { + var shouldForceOpen = string.Equals(_performanceOpenTreeTarget, label, StringComparison.Ordinal); + if (shouldForceOpen) + { + ImGui.SetNextItemOpen(true, ImGuiCond.Always); + } + + var open = _uiShared.MediumTreeNode(label, color); + if (shouldForceOpen) + { + _performanceOpenTreeTarget = null; + } + + if (open && string.Equals(_performanceScrollTarget, label, StringComparison.Ordinal)) + { + ImGui.SetScrollHereY(0f); + _performanceScrollTarget = null; + } + + return open; + } + private float GetGeneralTreeHighlightAlpha(string label) { if (!_generalTreeHighlights.TryGetValue(label, out var startTime)) @@ -3490,7 +3557,7 @@ public class SettingsUi : WindowMediatorSubscriberBase bool showPerformanceIndicator = _playerPerformanceConfigService.Current.ShowPerformanceIndicator; - if (_uiShared.MediumTreeNode("Warnings", UIColors.Get("LightlessPurple"))) + if (BeginPerformanceTree(PerformanceWarningsLabel, UIColors.Get("LightlessPurple"))) { if (ImGui.Checkbox("Show performance indicator", ref showPerformanceIndicator)) { @@ -3586,7 +3653,7 @@ public class SettingsUi : WindowMediatorSubscriberBase bool autoPauseInCombat = _playerPerformanceConfigService.Current.PauseInCombat; bool autoPauseWhilePerforming = _playerPerformanceConfigService.Current.PauseWhilePerforming; - if (_uiShared.MediumTreeNode("Auto Pause", UIColors.Get("LightlessPurple"))) + if (BeginPerformanceTree(PerformanceAutoPauseLabel, UIColors.Get("LightlessPurple"))) { if (ImGui.Checkbox("Auto pause sync while combat", ref autoPauseInCombat)) { @@ -3683,261 +3750,12 @@ public class SettingsUi : WindowMediatorSubscriberBase ImGui.Separator(); - if (_uiShared.MediumTreeNode("Texture Optimization", UIColors.Get("LightlessYellow"))) - { - _uiShared.MediumText("Warning", UIColors.Get("DimRed")); - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("Texture compression and downscaling is potentially a "), - new SeStringUtils.RichTextEntry("destructive", UIColors.Get("DimRed"), true), - new SeStringUtils.RichTextEntry(" process and may cause broken or incorrect character appearances.")); - - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("This feature is encouraged to help "), - new SeStringUtils.RichTextEntry("lower-end systems with limited VRAM", UIColors.Get("LightlessYellow"), true), - new SeStringUtils.RichTextEntry(" and for use in "), - new SeStringUtils.RichTextEntry("performance-critical scenarios", UIColors.Get("LightlessYellow"), true), - new SeStringUtils.RichTextEntry(".")); - - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("Runtime downscaling "), - new SeStringUtils.RichTextEntry("MAY", UIColors.Get("DimRed"), true), - new SeStringUtils.RichTextEntry(" cause higher load on the system when processing downloads.")); - - _uiShared.DrawNoteLine("!!! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("When enabled, we cannot provide support for appearance issues caused by this setting!", UIColors.Get("DimRed"), true)); - - var textureConfig = _playerPerformanceConfigService.Current; - var trimNonIndex = textureConfig.EnableNonIndexTextureMipTrim; - if (ImGui.Checkbox("Trim mip levels for textures", ref trimNonIndex)) - { - textureConfig.EnableNonIndexTextureMipTrim = trimNonIndex; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When enabled, Lightless will remove high-resolution mip levels from textures (not index) that exceed the size limit and are not compressed with any kind compression."); - - var downscaleIndex = textureConfig.EnableIndexTextureDownscale; - if (ImGui.Checkbox("Downscale index textures above limit", ref downscaleIndex)) - { - textureConfig.EnableIndexTextureDownscale = downscaleIndex; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("Controls whether Lightless reduces index textures that exceed the size limit."); - - var dimensionOptions = new[] { 512, 1024, 2048, 4096 }; - var optionLabels = dimensionOptions.Select(selector: static value => value.ToString()).ToArray(); - var currentDimension = textureConfig.TextureDownscaleMaxDimension; - var selectedIndex = Array.IndexOf(dimensionOptions, currentDimension); - if (selectedIndex < 0) - { - selectedIndex = Array.IndexOf(dimensionOptions, 2048); - } - - ImGui.SetNextItemWidth(140 * ImGuiHelpers.GlobalScale); - if (ImGui.Combo("Maximum texture dimension", ref selectedIndex, optionLabels, optionLabels.Length)) - { - textureConfig.TextureDownscaleMaxDimension = dimensionOptions[selectedIndex]; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText($"Textures above this size will be reduced until their largest dimension is at or below the limit. Block-compressed textures are skipped when \"Only downscale uncompressed\" is enabled.{UiSharedService.TooltipSeparator}Default: 2048"); - - var keepOriginalTextures = textureConfig.KeepOriginalTextureFiles; - if (ImGui.Checkbox("Keep original texture files", ref keepOriginalTextures)) - { - textureConfig.KeepOriginalTextureFiles = keepOriginalTextures; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When disabled, Lightless removes the original texture after a downscaled copy is created."); - ImGui.SameLine(); - _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), new SeStringUtils.RichTextEntry("If disabled, saved + effective VRAM usage information will not work.", UIColors.Get("LightlessYellow"))); - - var skipPreferredDownscale = textureConfig.SkipTextureDownscaleForPreferredPairs; - if (ImGui.Checkbox("Skip downscale for preferred/direct pairs", ref skipPreferredDownscale)) - { - textureConfig.SkipTextureDownscaleForPreferredPairs = skipPreferredDownscale; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When enabled, textures for direct pairs with preferred permissions are left untouched."); - - if (!textureConfig.EnableNonIndexTextureMipTrim && !textureConfig.EnableIndexTextureDownscale) - { - UiSharedService.ColorTextWrapped("Both trimming and downscale are disabled. Lightless will keep original textures regardless of size.", UIColors.Get("DimRed")); - } - - ImGui.Dummy(new Vector2(5)); - - UiSharedService.ColoredSeparator(UIColors.Get("DimRed"), 3f); - var onlyUncompressed = textureConfig.OnlyDownscaleUncompressedTextures; - if (ImGui.Checkbox("Only downscale uncompressed textures", ref onlyUncompressed)) - { - textureConfig.OnlyDownscaleUncompressedTextures = onlyUncompressed; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("If disabled, compressed textures will be targeted for downscaling too."); - UiSharedService.ColoredSeparator(UIColors.Get("DimRed"), 3f); - - ImGui.Dummy(new Vector2(5)); - - DrawTextureDownscaleCounters(); - - ImGui.Dummy(new Vector2(5)); - - UiSharedService.ColoredSeparator(UIColors.Get("LightlessYellow"), 1.5f); - ImGui.TreePop(); - } - - ImGui.Separator(); - - if (_uiShared.MediumTreeNode("Model Optimization", UIColors.Get("DimRed"))) - { - _uiShared.MediumText("Warning", UIColors.Get("DimRed")); - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("Model decimation is a "), - new SeStringUtils.RichTextEntry("destructive", UIColors.Get("DimRed"), true), - new SeStringUtils.RichTextEntry(" process and may cause broken or incorrect character appearances.")); - - - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("This feature is encouraged to help "), - new SeStringUtils.RichTextEntry("lower-end systems with limited VRAM", UIColors.Get("LightlessYellow"), true), - new SeStringUtils.RichTextEntry(" and for use in "), - new SeStringUtils.RichTextEntry("performance-critical scenarios", UIColors.Get("LightlessYellow"), true), - new SeStringUtils.RichTextEntry(".")); - - _uiShared.DrawNoteLine("! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("Runtime decimation "), - new SeStringUtils.RichTextEntry("MAY", UIColors.Get("DimRed"), true), - new SeStringUtils.RichTextEntry(" cause higher load on the system when processing downloads.")); - - _uiShared.DrawNoteLine("!!! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("When enabled, we cannot provide support for appearance issues caused by this setting!", UIColors.Get("DimRed"), true)); - - ImGui.Dummy(new Vector2(15)); - - _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), - new SeStringUtils.RichTextEntry("If a mesh exceeds the "), - new SeStringUtils.RichTextEntry("triangle threshold", UIColors.Get("LightlessGreen"), true), - new SeStringUtils.RichTextEntry(", it will be decimated automatically to the set "), - new SeStringUtils.RichTextEntry("target triangle ratio", UIColors.Get("LightlessGreen"), true), - new SeStringUtils.RichTextEntry(". This will reduce quality of the mesh or may break it's intended structure.")); - - - var performanceConfig = _playerPerformanceConfigService.Current; - var enableDecimation = performanceConfig.EnableModelDecimation; - if (ImGui.Checkbox("Enable model decimation", ref enableDecimation)) - { - performanceConfig.EnableModelDecimation = enableDecimation; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When enabled, Lightless generates a decimated copy of given model after download."); - - var keepOriginalModels = performanceConfig.KeepOriginalModelFiles; - if (ImGui.Checkbox("Keep original model files", ref keepOriginalModels)) - { - performanceConfig.KeepOriginalModelFiles = keepOriginalModels; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When disabled, Lightless removes the original model after a decimated copy is created."); - ImGui.SameLine(); - _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), new SeStringUtils.RichTextEntry("If disabled, saved + effective triangle usage information will not work.", UIColors.Get("LightlessYellow"))); - - var skipPreferredDecimation = performanceConfig.SkipModelDecimationForPreferredPairs; - if (ImGui.Checkbox("Skip decimation for preferred/direct pairs", ref skipPreferredDecimation)) - { - performanceConfig.SkipModelDecimationForPreferredPairs = skipPreferredDecimation; - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText("When enabled, models for direct pairs with preferred permissions are left untouched."); - - var triangleThreshold = performanceConfig.ModelDecimationTriangleThreshold; - ImGui.SetNextItemWidth(300 * ImGuiHelpers.GlobalScale); - if (ImGui.SliderInt("Decimate models above", ref triangleThreshold, 8_000, 100_000)) - { - performanceConfig.ModelDecimationTriangleThreshold = Math.Clamp(triangleThreshold, 8_000, 100_000); - _playerPerformanceConfigService.Save(); - } - ImGui.SameLine(); - ImGui.Text("triangles"); - _uiShared.DrawHelpText($"Models below this triangle count are left untouched.{UiSharedService.TooltipSeparator}Default: 50,000"); - - var targetPercent = (float)(performanceConfig.ModelDecimationTargetRatio * 100.0); - var clampedPercent = Math.Clamp(targetPercent, 60f, 99f); - if (Math.Abs(clampedPercent - targetPercent) > float.Epsilon) - { - performanceConfig.ModelDecimationTargetRatio = clampedPercent / 100.0; - _playerPerformanceConfigService.Save(); - targetPercent = clampedPercent; - } - ImGui.SetNextItemWidth(300 * ImGuiHelpers.GlobalScale); - if (ImGui.SliderFloat("Target triangle ratio", ref targetPercent, 60f, 99f, "%.0f%%")) - { - performanceConfig.ModelDecimationTargetRatio = Math.Clamp(targetPercent / 100f, 0.6f, 0.99f); - _playerPerformanceConfigService.Save(); - } - _uiShared.DrawHelpText($"Target ratio relative to original triangle count (80% keeps 80% of triangles).{UiSharedService.TooltipSeparator}Default: 80%"); - - ImGui.Dummy(new Vector2(15)); - ImGui.TextUnformatted("Decimation targets"); - _uiShared.DrawHelpText("Hair mods are always excluded from decimation."); - - _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessGreen"), - new SeStringUtils.RichTextEntry("Automatic decimation will only target the selected "), - new SeStringUtils.RichTextEntry("decimation targets", UIColors.Get("LightlessGreen"), true), - new SeStringUtils.RichTextEntry(".")); - - _uiShared.DrawNoteLine("! ", UIColors.Get("LightlessYellow"), - new SeStringUtils.RichTextEntry("It is advised to not decimate any body related meshes which includes: "), - new SeStringUtils.RichTextEntry("facial mods + sculpts, chest, legs, hands and feet", UIColors.Get("LightlessYellow"), true), - new SeStringUtils.RichTextEntry(".")); - - _uiShared.DrawNoteLine("!!! ", UIColors.Get("DimRed"), - new SeStringUtils.RichTextEntry("Remember, automatic decimation is not perfect and can cause meshes to be ruined, especially hair mods.", UIColors.Get("DimRed"), true)); - - var allowBody = performanceConfig.ModelDecimationAllowBody; - if (ImGui.Checkbox("Body", ref allowBody)) - { - performanceConfig.ModelDecimationAllowBody = allowBody; - _playerPerformanceConfigService.Save(); - } - - var allowFaceHead = performanceConfig.ModelDecimationAllowFaceHead; - if (ImGui.Checkbox("Face/head", ref allowFaceHead)) - { - performanceConfig.ModelDecimationAllowFaceHead = allowFaceHead; - _playerPerformanceConfigService.Save(); - } - - var allowTail = performanceConfig.ModelDecimationAllowTail; - if (ImGui.Checkbox("Tails/Ears", ref allowTail)) - { - performanceConfig.ModelDecimationAllowTail = allowTail; - _playerPerformanceConfigService.Save(); - } - - var allowClothing = performanceConfig.ModelDecimationAllowClothing; - if (ImGui.Checkbox("Clothing (body/legs/shoes/gloves/hats)", ref allowClothing)) - { - performanceConfig.ModelDecimationAllowClothing = allowClothing; - _playerPerformanceConfigService.Save(); - } - - var allowAccessories = performanceConfig.ModelDecimationAllowAccessories; - if (ImGui.Checkbox("Accessories (earring/rings/bracelet/necklace)", ref allowAccessories)) - { - performanceConfig.ModelDecimationAllowAccessories = allowAccessories; - _playerPerformanceConfigService.Save(); - } - - ImGui.Dummy(new Vector2(5)); - - UiSharedService.ColoredSeparator(UIColors.Get("LightlessGrey"), 3f); - - ImGui.Dummy(new Vector2(5)); - DrawTriangleDecimationCounters(); - ImGui.Dummy(new Vector2(5)); - - UiSharedService.ColoredSeparator(UIColors.Get("DimRed"), 1.5f); - ImGui.TreePop(); - } + _optimizationSettingsPanel.DrawSettingsTrees( + PerformanceTextureOptimizationLabel, + UIColors.Get("LightlessYellow"), + PerformanceModelOptimizationLabel, + UIColors.Get("LightlessOrange"), + BeginPerformanceTree); ImGui.Separator(); ImGui.Dummy(new Vector2(10)); diff --git a/LightlessSync/UI/Style/MainStyle.cs b/LightlessSync/UI/Style/MainStyle.cs index 53dd682..132dc2c 100644 --- a/LightlessSync/UI/Style/MainStyle.cs +++ b/LightlessSync/UI/Style/MainStyle.cs @@ -40,10 +40,10 @@ internal static class MainStyle new("color.frameBg", "Frame Background", () => Rgba(40, 40, 40, 255), ImGuiCol.FrameBg), new("color.frameBgHovered", "Frame Background (Hover)", () => Rgba(50, 50, 50, 100), ImGuiCol.FrameBgHovered), new("color.frameBgActive", "Frame Background (Active)", () => Rgba(30, 30, 30, 255), ImGuiCol.FrameBgActive), - new("color.titleBg", "Title Background", () => Rgba(22, 14, 41, 255), ImGuiCol.TitleBg), - new("color.titleBgActive", "Title Background (Active)", () => Rgba(22, 14, 41, 255), ImGuiCol.TitleBgActive), - new("color.titleBgCollapsed", "Title Background (Collapsed)", () => Rgba(22, 14, 41, 255), ImGuiCol.TitleBgCollapsed), - + new("color.titleBg", "Title Background", () => Rgba(24, 24, 24, 232), ImGuiCol.TitleBg), + new("color.titleBgActive", "Title Background (Active)", () => Rgba(30, 30, 30, 255), ImGuiCol.TitleBgActive), + new("color.titleBgCollapsed", "Title Background (Collapsed)", () => Rgba(27, 27, 27, 255), ImGuiCol.TitleBgCollapsed), + new("color.menuBarBg", "Menu Bar Background", () => Rgba(36, 36, 36, 255), ImGuiCol.MenuBarBg), new("color.scrollbarBg", "Scrollbar Background", () => Rgba(0, 0, 0, 0), ImGuiCol.ScrollbarBg), new("color.scrollbarGrab", "Scrollbar Grab", () => Rgba(62, 62, 62, 255), ImGuiCol.ScrollbarGrab), diff --git a/LightlessSync/UI/Style/Selune.cs b/LightlessSync/UI/Style/Selune.cs index f89a1f0..00843a8 100644 --- a/LightlessSync/UI/Style/Selune.cs +++ b/LightlessSync/UI/Style/Selune.cs @@ -29,6 +29,7 @@ public sealed class SeluneGradientSettings public Vector4 GradientColor { get; init; } = UIColors.Get("LightlessPurple"); public Vector4? HighlightColor { get; init; } public float GradientPeakOpacity { get; init; } = 0.07f; + public float GradientPeakPosition { get; init; } = 0.035f; public float HighlightPeakAlpha { get; init; } = 0.13f; public float HighlightEdgeAlpha { get; init; } = 0f; public float HighlightMidpoint { get; init; } = 0.45f; @@ -378,6 +379,7 @@ internal static class SeluneRenderer topColorVec, midColorVec, bottomColorVec, + settings, settings.BackgroundMode); } @@ -403,19 +405,21 @@ internal static class SeluneRenderer Vector4 topColorVec, Vector4 midColorVec, Vector4 bottomColorVec, + SeluneGradientSettings settings, SeluneGradientMode mode) { + var peakPosition = Math.Clamp(settings.GradientPeakPosition, 0.01f, 0.99f); switch (mode) { case SeluneGradientMode.Vertical: - DrawVerticalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec); + DrawVerticalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec, peakPosition); break; case SeluneGradientMode.Horizontal: - DrawHorizontalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec); + DrawHorizontalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec, peakPosition); break; case SeluneGradientMode.Both: - DrawVerticalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec); - DrawHorizontalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec); + DrawVerticalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec, peakPosition); + DrawHorizontalBackground(drawList, gradientLeft, gradientRight, clampedTopY, clampedBottomY, topColorVec, midColorVec, bottomColorVec, peakPosition); break; } } @@ -428,13 +432,14 @@ internal static class SeluneRenderer float clampedBottomY, Vector4 topColorVec, Vector4 midColorVec, - Vector4 bottomColorVec) + Vector4 bottomColorVec, + float peakPosition) { var topColor = ImGui.ColorConvertFloat4ToU32(topColorVec); var midColor = ImGui.ColorConvertFloat4ToU32(midColorVec); var bottomColor = ImGui.ColorConvertFloat4ToU32(bottomColorVec); - var midY = clampedTopY + (clampedBottomY - clampedTopY) * 0.035f; + var midY = clampedTopY + (clampedBottomY - clampedTopY) * peakPosition; drawList.AddRectFilledMultiColor( new Vector2(gradientLeft, clampedTopY), new Vector2(gradientRight, midY), @@ -460,13 +465,14 @@ internal static class SeluneRenderer float clampedBottomY, Vector4 leftColorVec, Vector4 midColorVec, - Vector4 rightColorVec) + Vector4 rightColorVec, + float peakPosition) { var leftColor = ImGui.ColorConvertFloat4ToU32(leftColorVec); var midColor = ImGui.ColorConvertFloat4ToU32(midColorVec); var rightColor = ImGui.ColorConvertFloat4ToU32(rightColorVec); - var midX = gradientLeft + (gradientRight - gradientLeft) * 0.035f; + var midX = gradientLeft + (gradientRight - gradientLeft) * peakPosition; drawList.AddRectFilledMultiColor( new Vector2(gradientLeft, clampedTopY), new Vector2(midX, clampedBottomY), diff --git a/LightlessSync/UI/ZoneChatUi.cs b/LightlessSync/UI/ZoneChatUi.cs index 571b8ca..8f799de 100644 --- a/LightlessSync/UI/ZoneChatUi.cs +++ b/LightlessSync/UI/ZoneChatUi.cs @@ -1,5 +1,6 @@ using System.Globalization; using System.Numerics; +using System.Reflection; using LightlessSync.API.Data; using LightlessSync.API.Data.Extensions; using LightlessSync.API.Data.Enum; @@ -8,9 +9,11 @@ using Dalamud.Interface; using Dalamud.Interface.Colors; using Dalamud.Interface.Utility; using Dalamud.Interface.Utility.Raii; +using Dalamud.Interface.Windowing; using LightlessSync.API.Dto.Chat; using LightlessSync.API.Dto.Group; using LightlessSync.LightlessConfiguration; +using LightlessSync.LightlessConfiguration.Configurations; using LightlessSync.LightlessConfiguration.Models; using LightlessSync.Services; using LightlessSync.Services.Chat; @@ -38,6 +41,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private const string ReportPopupId = "Report Message##zone_chat_report_popup"; private const string ChannelDragPayloadId = "zone_chat_channel_drag"; private const string EmotePickerPopupId = "zone_chat_emote_picker"; + private const string MentionPopupId = "zone_chat_mention_popup"; private const int EmotePickerColumns = 10; private const float DefaultWindowOpacity = .97f; private const float DefaultUnfocusedWindowOpacity = 0.6f; @@ -45,11 +49,37 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private const float MaxWindowOpacity = 1f; private const float MinChatFontScale = 0.75f; private const float MaxChatFontScale = 1.5f; + private const float MinEmoteScale = 0.5f; + private const float MaxEmoteScale = 2.0f; private const float UnfocusedFadeOutSpeed = 0.22f; private const float FocusFadeInSpeed = 2.0f; private const int ReportReasonMaxLength = 500; private const int ReportContextMaxLength = 1000; private const int MaxChannelNoteTabLength = 25; + private const int MaxBadgeDisplay = 99; + private const int MaxMentionSuggestions = 8; + private const int CollapsedMessageCountDisplayCap = 999; + + private static readonly FieldInfo? FadeOutOriginField = typeof(Window).GetField("fadeOutOrigin", BindingFlags.Instance | BindingFlags.NonPublic); + private static readonly FieldInfo? FadeOutSizeField = typeof(Window).GetField("fadeOutSize", BindingFlags.Instance | BindingFlags.NonPublic); + + private enum ChatSettingsTab + { + General, + Messages, + Notifications, + Visibility, + Window + } + + private static readonly UiSharedService.TabOption[] ChatSettingsTabOptions = + [ + new UiSharedService.TabOption("General", ChatSettingsTab.General), + new UiSharedService.TabOption("Messages", ChatSettingsTab.Messages), + new UiSharedService.TabOption("Notifications", ChatSettingsTab.Notifications), + new UiSharedService.TabOption("Visibility", ChatSettingsTab.Visibility), + new UiSharedService.TabOption("Window", ChatSettingsTab.Window), + ]; private readonly UiSharedService _uiSharedService; private readonly ZoneChatService _zoneChatService; @@ -66,6 +96,9 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private readonly Dictionary _draftMessages = new(StringComparer.Ordinal); private readonly Dictionary> _pendingDraftClears = new(StringComparer.Ordinal); private readonly ImGuiWindowFlags _unpinnedWindowFlags; + private string? _activeInputChannelKey; + private int _pendingDraftCursorPos = -1; + private string? _pendingDraftCursorChannelKey; private float _currentWindowOpacity = DefaultWindowOpacity; private float _baseWindowOpacity = DefaultWindowOpacity; private bool _isWindowPinned; @@ -94,9 +127,19 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private string? _dragHoverKey; private bool _openEmotePicker; private string _emoteFilter = string.Empty; + private int _mentionSelectionIndex = -1; + private string? _mentionSelectionKey; private bool _HideStateActive; private bool _HideStateWasOpen; private bool _pushedStyle; + private ChatSettingsTab _selectedChatSettingsTab = ChatSettingsTab.General; + private bool _isWindowCollapsed; + private bool _wasWindowCollapsed; + private int _collapsedMessageCount; + private bool _forceExpandOnOpen; + private Vector2 _lastWindowPos; + private Vector2 _lastWindowSize; + private bool _hasWindowMetrics; public ZoneChatUi( ILogger logger, @@ -158,7 +201,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase var config = _chatConfigService.Current; var baseOpacity = Math.Clamp(config.ChatWindowOpacity, MinWindowOpacity, MaxWindowOpacity); _baseWindowOpacity = baseOpacity; - ImGui.PushStyleVar(ImGuiStyleVar.WindowBorderSize, 0); + ImGui.PushStyleVar(ImGuiStyleVar.WindowBorderSize, 1f); _pushedStyle = true; if (config.FadeWhenUnfocused) @@ -245,11 +288,6 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase var config = _chatConfigService.Current; var isFocused = ImGui.IsWindowFocused(ImGuiFocusedFlags.RootAndChildWindows); var isHovered = ImGui.IsWindowHovered(ImGuiHoveredFlags.RootAndChildWindows); - if (config.FadeWhenUnfocused && isHovered && !isFocused) - { - ImGui.SetWindowFocus(); - } - _isWindowFocused = config.FadeWhenUnfocused ? (isFocused || isHovered) : isFocused; var contentAlpha = 1f; @@ -263,14 +301,20 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase var drawList = ImGui.GetWindowDrawList(); var windowPos = ImGui.GetWindowPos(); var windowSize = ImGui.GetWindowSize(); + _lastWindowPos = windowPos; + _lastWindowSize = windowSize; + _hasWindowMetrics = true; + UpdateCollapsedState(isCollapsed: false); using var selune = Selune.Begin(_seluneBrush, drawList, windowPos, windowSize); var childBgColor = ImGui.GetStyle().Colors[(int)ImGuiCol.ChildBg]; childBgColor.W *= _baseWindowOpacity; using var childBg = ImRaii.PushColor(ImGuiCol.ChildBg, childBgColor); DrawConnectionControls(); - var channels = _zoneChatService.GetChannelsSnapshot(); + IReadOnlyList channels = _zoneChatService.GetChannelsSnapshot(); + IReadOnlyList visibleChannels = GetVisibleChannels(channels); DrawReportPopup(); + CleanupDrafts(channels); if (channels.Count == 0) { @@ -278,12 +322,18 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.TextWrapped("No chat channels available."); ImGui.PopStyleColor(); } + else if (visibleChannels.Count == 0) + { + EnsureSelectedChannel(visibleChannels); + ImGui.PushStyleColor(ImGuiCol.Text, ImGuiColors.DalamudGrey3); + ImGui.TextWrapped("All chat channels are hidden. Open chat settings to show channels."); + ImGui.PopStyleColor(); + } else { - EnsureSelectedChannel(channels); - CleanupDrafts(channels); + EnsureSelectedChannel(visibleChannels); - DrawChannelButtons(channels); + DrawChannelButtons(visibleChannels); if (_selectedChannelKey is null) { @@ -291,10 +341,10 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase return; } - var activeChannel = channels.FirstOrDefault(channel => string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal)); + ChatChannelSnapshot activeChannel = visibleChannels.FirstOrDefault(channel => string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal)); if (activeChannel.Equals(default(ChatChannelSnapshot))) { - activeChannel = channels[0]; + activeChannel = visibleChannels[0]; _selectedChannelKey = activeChannel.Key; } @@ -331,6 +381,136 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase _titleBarStylePopCount = 3; } + private void DrawCollapsedMessageBadge(ImDrawListPtr drawList, Vector2 windowPos, Vector2 windowSize) + { + if (_collapsedMessageCount <= 0) + { + return; + } + + var style = ImGui.GetStyle(); + var titleBarHeight = ImGui.GetFontSize() + style.FramePadding.Y * 2f; + var scale = ImGuiHelpers.GlobalScale; + + var displayCount = _collapsedMessageCount > CollapsedMessageCountDisplayCap + ? $"{CollapsedMessageCountDisplayCap}+" + : _collapsedMessageCount.ToString(CultureInfo.InvariantCulture); + var padding = new Vector2(8f, 3f) * scale; + + var title = WindowName ?? string.Empty; + var titleSplitIndex = title.IndexOf("###", StringComparison.Ordinal); + if (titleSplitIndex >= 0) + { + title = title[..titleSplitIndex]; + } + var titleSize = ImGui.CalcTextSize(title); + var leftEdge = windowPos.X + style.FramePadding.X + titleSize.X + style.ItemInnerSpacing.X + 6f * scale; + + var buttonCount = GetTitleBarButtonCount(); + var buttonWidth = ImGui.GetFrameHeight(); + var buttonSpacing = style.ItemInnerSpacing.X; + var buttonArea = buttonCount > 0 + ? (buttonWidth * buttonCount) + (buttonSpacing * (buttonCount - 1)) + : 0f; + var rightEdge = windowPos.X + windowSize.X - style.FramePadding.X - buttonArea; + var availableWidth = rightEdge - leftEdge; + if (availableWidth <= 0f) + { + return; + } + + string label = $"New messages: {displayCount}"; + var textSize = ImGui.CalcTextSize(label); + var badgeSize = textSize + padding * 2f; + if (badgeSize.X > availableWidth) + { + label = $"New: {displayCount}"; + textSize = ImGui.CalcTextSize(label); + badgeSize = textSize + padding * 2f; + } + if (badgeSize.X > availableWidth) + { + label = displayCount; + textSize = ImGui.CalcTextSize(label); + badgeSize = textSize + padding * 2f; + } + if (badgeSize.X > availableWidth) + { + return; + } + + var posX = MathF.Max(leftEdge, rightEdge - badgeSize.X); + var posY = windowPos.Y + (titleBarHeight - badgeSize.Y) * 0.5f; + var badgeMin = new Vector2(posX, posY); + var badgeMax = badgeMin + badgeSize; + + var time = (float)ImGui.GetTime(); + var pulse = 0.6f + 0.2f * (1f + MathF.Sin(time * 2f)); + var baseColor = UIColors.Get("DimRed"); + var fillColor = new Vector4(baseColor.X, baseColor.Y, baseColor.Z, baseColor.W * pulse); + drawList.AddRectFilled(badgeMin, badgeMax, ImGui.ColorConvertFloat4ToU32(fillColor), 6f * scale); + drawList.AddText(badgeMin + padding, ImGui.ColorConvertFloat4ToU32(ImGuiColors.DalamudWhite), label); + } + + private int GetTitleBarButtonCount() + { + var count = 0; + if (!Flags.HasFlag(ImGuiWindowFlags.NoCollapse)) + { + count++; + } + + if (ShowCloseButton) + { + count++; + } + + if (AllowPinning || AllowClickthrough) + { + count++; + } + + count += TitleBarButtons?.Count ?? 0; + return count; + } + + private void UpdateCollapsedState(bool isCollapsed) + { + if (isCollapsed != _wasWindowCollapsed) + { + _collapsedMessageCount = 0; + _wasWindowCollapsed = isCollapsed; + } + + _isWindowCollapsed = isCollapsed; + } + + private bool TryUpdateWindowMetricsFromBase() + { + if (FadeOutOriginField is null || FadeOutSizeField is null) + { + return false; + } + + if (FadeOutOriginField.GetValue(this) is Vector2 pos && FadeOutSizeField.GetValue(this) is Vector2 size) + { + _lastWindowPos = pos; + _lastWindowSize = size; + _hasWindowMetrics = true; + return true; + } + + return false; + } + + private static bool IsLikelyCollapsed(Vector2 windowSize) + { + var style = ImGui.GetStyle(); + var titleHeight = ImGui.GetFontSize() + style.FramePadding.Y * 2f; + var threshold = titleHeight + style.WindowBorderSize * 2f + 2f * ImGuiHelpers.GlobalScale; + return windowSize.Y <= threshold; + } + private void DrawHeader(ChatChannelSnapshot channel) { var prefix = channel.Type == ChatChannelType.Zone ? "Zone" : "Syncshell"; @@ -418,6 +598,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase var showTimestamps = _chatConfigService.Current.ShowMessageTimestamps; _chatEmoteService.EnsureGlobalEmotesLoaded(); PairUiSnapshot? pairSnapshot = null; + MentionHighlightData? mentionHighlightData = null; var itemSpacing = ImGui.GetStyle().ItemSpacing.X; if (channel.Messages.Count == 0) @@ -428,6 +609,12 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } else { + if (channel.Type == ChatChannelType.Group) + { + pairSnapshot ??= _pairUiService.GetSnapshot(); + mentionHighlightData = BuildMentionHighlightData(channel, pairSnapshot); + } + var messageCount = channel.Messages.Count; var contentMaxX = ImGui.GetWindowContentRegionMax().X; var cursorStartX = ImGui.GetCursorPosX(); @@ -437,7 +624,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase for (var i = 0; i < messageCount; i++) { - var messageHeight = MeasureMessageHeight(channel, channel.Messages[i], showTimestamps, cursorStartX, contentMaxX, itemSpacing, ref pairSnapshot); + var messageHeight = MeasureMessageHeight(channel, channel.Messages[i], showTimestamps, cursorStartX, contentMaxX, itemSpacing, mentionHighlightData, ref pairSnapshot); if (messageHeight <= 0f) { messageHeight = lineHeightWithSpacing; @@ -511,6 +698,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.BeginGroup(); ImGui.PushStyleColor(ImGuiCol.Text, color); + var mentionContextOpen = false; if (showRoleIcons) { if (!string.IsNullOrEmpty(timestampText)) @@ -557,12 +745,12 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } var messageStartX = ImGui.GetCursorPosX(); - DrawChatMessageWithEmotes($"{message.DisplayName}: ", payload.Message, messageStartX); + mentionContextOpen = DrawChatMessageWithEmotes($"{message.DisplayName}: ", payload.Message, messageStartX, mentionHighlightData); } else { var messageStartX = ImGui.GetCursorPosX(); - DrawChatMessageWithEmotes($"{timestampText}{message.DisplayName}: ", payload.Message, messageStartX); + mentionContextOpen = DrawChatMessageWithEmotes($"{timestampText}{message.DisplayName}: ", payload.Message, messageStartX, mentionHighlightData); } ImGui.PopStyleColor(); ImGui.EndGroup(); @@ -570,7 +758,8 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.SetNextWindowSizeConstraints( new Vector2(190f * ImGuiHelpers.GlobalScale, 0f), new Vector2(float.MaxValue, float.MaxValue)); - if (ImGui.BeginPopupContextItem($"chat_msg_ctx##{channel.Key}_{i}")) + var messagePopupFlags = ImGuiPopupFlags.MouseButtonRight | ImGuiPopupFlags.NoOpenOverExistingPopup; + if (!mentionContextOpen && ImGui.BeginPopupContextItem($"chat_msg_ctx##{channel.Key}_{i}", messagePopupFlags)) { var contextLocalTimestamp = payload.SentAtUtc.ToLocalTime(); var contextTimestampText = contextLocalTimestamp.ToString("yyyy-MM-dd HH:mm:ss 'UTC'z", CultureInfo.InvariantCulture); @@ -619,12 +808,15 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } } - private void DrawChatMessageWithEmotes(string prefix, string message, float lineStartX) + private bool DrawChatMessageWithEmotes(string prefix, string message, float lineStartX, MentionHighlightData? mentionHighlightData) { - var segments = BuildChatSegments(prefix, message); + var segments = BuildChatSegments(prefix, message, mentionHighlightData); var firstOnLine = true; - var emoteSize = new Vector2(ImGui.GetTextLineHeight()); + var emoteSizeValue = ImGui.GetTextLineHeight() * GetEmoteScale(); + var emoteSize = new Vector2(emoteSizeValue); var remainingWidth = ImGui.GetContentRegionAvail().X; + var mentionIndex = 0; + var mentionContextOpen = false; foreach (var segment in segments) { @@ -674,13 +866,102 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } else { - ImGui.TextUnformatted(segment.Text); + if (segment.IsMention) + { + Vector4 mentionColor = segment.IsSelfMention + ? UIColors.Get("LightlessYellow") + : UIColors.Get("LightlessPurple"); + ImGui.PushStyleColor(ImGuiCol.Text, mentionColor); + ImGui.TextUnformatted(segment.Text); + ImGui.PopStyleColor(); + mentionContextOpen |= DrawMentionContextMenu(segment.Text, mentionHighlightData, mentionIndex++); + } + else + { + ImGui.TextUnformatted(segment.Text); + } } remainingWidth -= segmentWidth; firstOnLine = false; } + return mentionContextOpen; + } + + private bool DrawMentionContextMenu(string mentionText, MentionHighlightData? mentionHighlightData, int mentionIndex) + { + string token = mentionText; + if (!string.IsNullOrEmpty(token) && token[0] == '@') + { + token = token[1..]; + } + + MentionUserInfo? mentionInfo = null; + if (mentionHighlightData.HasValue + && !string.IsNullOrWhiteSpace(token) + && mentionHighlightData.Value.Users.TryGetValue(token, out var userInfo)) + { + mentionInfo = userInfo; + } + + string statusLabel = "Unknown"; + bool canViewProfile = false; + Action? viewProfileAction = null; + + if (mentionInfo.HasValue) + { + var info = mentionInfo.Value; + if (info.IsSelf) + { + statusLabel = "You"; + } + else if (info.Pair is not null) + { + statusLabel = info.Pair.IsOnline ? "Online" : "Offline"; + } + + if (info.Pair is not null) + { + canViewProfile = true; + viewProfileAction = () => Mediator.Publish(new ProfileOpenStandaloneMessage(info.Pair)); + } + else if (info.UserData is not null) + { + canViewProfile = true; + var userData = info.UserData; + viewProfileAction = () => RunContextAction(() => OpenStandardProfileAsync(userData)); + } + } + + var style = ImGui.GetStyle(); + var iconWidth = _uiSharedService.GetIconSize(FontAwesomeIcon.User).X; + var actionWidth = ImGui.CalcTextSize("View Profile").X + iconWidth + style.ItemSpacing.X; + var baseWidth = MathF.Max( + MathF.Max(ImGui.CalcTextSize(mentionText).X, ImGui.CalcTextSize(statusLabel).X), + actionWidth); + var targetWidth = (baseWidth + style.WindowPadding.X * 2f + style.FramePadding.X * 2f) * 1.5f; + ImGui.SetNextWindowSizeConstraints(new Vector2(targetWidth, 0f), new Vector2(float.MaxValue, float.MaxValue)); + + if (!ImGui.BeginPopupContextItem($"mention_ctx##{mentionIndex}")) + { + return false; + } + + ImGui.TextUnformatted(mentionText); + ImGui.Separator(); + ImGui.TextDisabled(statusLabel); + ImGui.Separator(); + + var profileAction = new ChatMessageContextAction( + FontAwesomeIcon.User, + "View Profile", + canViewProfile, + viewProfileAction ?? NoopContextAction); + DrawContextMenuAction(profileAction, 0); + + ImGui.EndPopup(); + return true; } private void DrawEmotePickerPopup(ref string draft, string channelKey) @@ -817,15 +1098,15 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } } - private List BuildChatSegments(string prefix, string message) + private List BuildChatSegments(string prefix, string message, MentionHighlightData? mentionHighlightData) { var segments = new List(Math.Max(16, message.Length / 4)); - AppendChatSegments(segments, prefix, allowEmotes: false); - AppendChatSegments(segments, message, allowEmotes: true); + AppendChatSegments(segments, prefix, allowEmotes: false, mentionHighlightData: null); + AppendChatSegments(segments, message, allowEmotes: true, mentionHighlightData); return segments; } - private void AppendChatSegments(List segments, string text, bool allowEmotes) + private void AppendChatSegments(List segments, string text, bool allowEmotes, MentionHighlightData? mentionHighlightData) { if (string.IsNullOrEmpty(text)) { @@ -867,6 +1148,23 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } var token = text[tokenStart..index]; + if (mentionHighlightData.HasValue + && TrySplitMentionToken(token, mentionHighlightData.Value, out var leadingMention, out var mentionText, out var trailingMention, out var isSelfMention)) + { + if (!string.IsNullOrEmpty(leadingMention)) + { + segments.Add(ChatSegment.FromText(leadingMention)); + } + + segments.Add(ChatSegment.Mention(mentionText, isSelfMention)); + + if (!string.IsNullOrEmpty(trailingMention)) + { + segments.Add(ChatSegment.FromText(trailingMention)); + } + + continue; + } if (allowEmotes && TrySplitToken(token, out var leading, out var core, out var trailing)) { if (_chatEmoteService.TryGetEmote(core, out var texture) && texture is not null) @@ -925,6 +1223,451 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase return char.IsLetterOrDigit(value) || value == '_' || value == '-' || value == '!' || value == '(' || value == ')'; } + private static bool TrySplitMentionToken(string token, MentionHighlightData mentionHighlightData, out string leading, out string mentionText, out string trailing, out bool isSelfMention) + { + leading = string.Empty; + mentionText = string.Empty; + trailing = string.Empty; + isSelfMention = false; + + if (string.IsNullOrEmpty(token) || mentionHighlightData.Tokens.Count == 0) + { + return false; + } + + for (int index = 0; index < token.Length; index++) + { + if (token[index] != '@') + { + continue; + } + + if (index > 0 && IsMentionChar(token[index - 1])) + { + continue; + } + + int start = index + 1; + int end = start; + while (end < token.Length && IsMentionChar(token[end])) + { + end++; + } + + if (end == start) + { + continue; + } + + string mentionToken = token[start..end]; + if (!mentionHighlightData.Tokens.TryGetValue(mentionToken, out bool matchedSelf)) + { + continue; + } + + leading = token[..index]; + mentionText = "@" + mentionToken; + trailing = token[end..]; + isSelfMention = matchedSelf; + return true; + } + + return false; + } + + private static bool IsMentionChar(char value) + { + return char.IsLetterOrDigit(value) || value == '_' || value == '-' || value == '\''; + } + + private static bool IsMentionToken(ReadOnlySpan token, bool allowEmpty) + { + if (token.Length == 0) + { + return allowEmpty; + } + + for (int i = 0; i < token.Length; i++) + { + if (!IsMentionChar(token[i])) + { + return false; + } + } + + return true; + } + + private static bool TryGetMentionQuery(string text, out MentionQuery mentionQuery) + { + mentionQuery = default; + if (string.IsNullOrEmpty(text)) + { + return false; + } + + int cursor = text.Length; + int index = cursor - 1; + while (index >= 0) + { + char current = text[index]; + if (current == '@') + { + if (index > 0 && IsMentionChar(text[index - 1])) + { + return false; + } + + ReadOnlySpan tokenSpan = text.AsSpan(index + 1, cursor - (index + 1)); + if (!IsMentionToken(tokenSpan, allowEmpty: true)) + { + return false; + } + + mentionQuery = new MentionQuery(index, cursor, tokenSpan.ToString()); + return true; + } + + if (char.IsWhiteSpace(current)) + { + return false; + } + + if (!IsMentionChar(current)) + { + return false; + } + + index--; + } + + return false; + } + + private static string? GetPreferredMentionToken(string uid, string? alias) + { + if (!string.IsNullOrWhiteSpace(alias) && IsMentionToken(alias.AsSpan(), allowEmpty: false)) + { + return alias; + } + + if (IsMentionToken(uid.AsSpan(), allowEmpty: false)) + { + return uid; + } + + return null; + } + + private static void AddMentionToken(Dictionary tokens, string token, bool isSelf) + { + if (tokens.TryGetValue(token, out bool existing)) + { + if (isSelf && !existing) + { + tokens[token] = true; + } + + return; + } + + tokens[token] = isSelf; + } + + private static void AddMentionUserToken( + Dictionary users, + HashSet ambiguousTokens, + string token, + MentionUserInfo info) + { + if (ambiguousTokens.Contains(token)) + { + return; + } + + if (users.TryGetValue(token, out var existing)) + { + if (!string.Equals(existing.Uid, info.Uid, StringComparison.Ordinal)) + { + users.Remove(token); + ambiguousTokens.Add(token); + } + + return; + } + + users[token] = info; + } + + private static void AddMentionData( + Dictionary tokens, + Dictionary users, + HashSet ambiguousTokens, + string uid, + string? alias, + bool isSelf, + Pair? pair, + UserData? userData) + { + if (string.IsNullOrWhiteSpace(uid)) + { + return; + } + + var info = new MentionUserInfo(uid, userData, pair, isSelf); + if (IsMentionToken(uid.AsSpan(), allowEmpty: false)) + { + AddMentionToken(tokens, uid, isSelf); + AddMentionUserToken(users, ambiguousTokens, uid, info); + } + + if (!string.IsNullOrWhiteSpace(alias) && IsMentionToken(alias.AsSpan(), allowEmpty: false)) + { + AddMentionToken(tokens, alias, isSelf); + AddMentionUserToken(users, ambiguousTokens, alias, info); + } + } + + private static IReadOnlyList GetPairsForGroup(PairUiSnapshot snapshot, string groupId, GroupFullInfoDto? groupInfo) + { + if (groupInfo is not null && snapshot.GroupPairs.TryGetValue(groupInfo, out IReadOnlyList groupPairs)) + { + return groupPairs; + } + + foreach (KeyValuePair> entry in snapshot.GroupPairs) + { + if (string.Equals(entry.Key.Group.GID, groupId, StringComparison.Ordinal)) + { + return entry.Value; + } + } + + return Array.Empty(); + } + + private void AddMentionCandidate(List candidates, HashSet seenTokens, string uid, string? alias, string? note, bool isSelf, bool includeSelf) + { + if (!includeSelf && isSelf) + { + return; + } + + string? token = GetPreferredMentionToken(uid, alias); + if (string.IsNullOrWhiteSpace(token)) + { + return; + } + + if (!seenTokens.Add(token)) + { + return; + } + + string displayName = !string.IsNullOrWhiteSpace(alias) ? alias : uid; + candidates.Add(new MentionCandidate(token, displayName, note, uid, isSelf)); + } + + private List BuildMentionCandidates(ChatChannelSnapshot channel, PairUiSnapshot snapshot, bool includeSelf) + { + List candidates = new(); + if (channel.Type != ChatChannelType.Group) + { + return candidates; + } + + string? groupId = channel.Descriptor.CustomKey; + if (string.IsNullOrWhiteSpace(groupId)) + { + return candidates; + } + + HashSet seenTokens = new(StringComparer.OrdinalIgnoreCase); + string selfUid = _apiController.UID; + + GroupFullInfoDto? groupInfo = null; + if (snapshot.GroupsByGid.TryGetValue(groupId, out GroupFullInfoDto found)) + { + groupInfo = found; + } + + if (groupInfo is not null) + { + bool ownerIsSelf = string.Equals(groupInfo.Owner.UID, selfUid, StringComparison.Ordinal); + string? ownerNote = _serverConfigurationManager.GetNoteForUid(groupInfo.Owner.UID); + AddMentionCandidate(candidates, seenTokens, groupInfo.Owner.UID, groupInfo.Owner.Alias, ownerNote, ownerIsSelf, includeSelf); + + IReadOnlyList groupPairs = GetPairsForGroup(snapshot, groupId, groupInfo); + foreach (Pair pair in groupPairs) + { + bool isSelf = string.Equals(pair.UserData.UID, selfUid, StringComparison.Ordinal); + string? note = pair.GetNote(); + AddMentionCandidate(candidates, seenTokens, pair.UserData.UID, pair.UserData.Alias, note, isSelf, includeSelf); + } + } + else + { + IReadOnlyList groupPairs = GetPairsForGroup(snapshot, groupId, null); + foreach (Pair pair in groupPairs) + { + bool isSelf = string.Equals(pair.UserData.UID, selfUid, StringComparison.Ordinal); + string? note = pair.GetNote(); + AddMentionCandidate(candidates, seenTokens, pair.UserData.UID, pair.UserData.Alias, note, isSelf, includeSelf); + } + } + + if (includeSelf) + { + string? note = _serverConfigurationManager.GetNoteForUid(selfUid); + AddMentionCandidate(candidates, seenTokens, selfUid, _apiController.DisplayName, note, isSelf: true, includeSelf: true); + } + + return candidates; + } + + private MentionHighlightData? BuildMentionHighlightData(ChatChannelSnapshot channel, PairUiSnapshot snapshot) + { + if (channel.Type != ChatChannelType.Group) + { + return null; + } + + string? groupId = channel.Descriptor.CustomKey; + if (string.IsNullOrWhiteSpace(groupId)) + { + return null; + } + + Dictionary tokens = new(StringComparer.OrdinalIgnoreCase); + Dictionary users = new(StringComparer.OrdinalIgnoreCase); + HashSet ambiguousTokens = new(StringComparer.OrdinalIgnoreCase); + string selfUid = _apiController.UID; + if (!string.IsNullOrWhiteSpace(selfUid)) + { + var selfData = new UserData(selfUid, _apiController.DisplayName); + snapshot.PairsByUid.TryGetValue(selfUid, out var selfPair); + AddMentionData(tokens, users, ambiguousTokens, selfUid, _apiController.DisplayName, true, selfPair, selfData); + } + + GroupFullInfoDto? groupInfo = null; + if (snapshot.GroupsByGid.TryGetValue(groupId, out GroupFullInfoDto found)) + { + groupInfo = found; + } + + if (groupInfo is not null) + { + bool ownerIsSelf = string.Equals(groupInfo.Owner.UID, selfUid, StringComparison.Ordinal); + var ownerUid = groupInfo.Owner.UID; + snapshot.PairsByUid.TryGetValue(ownerUid, out var ownerPair); + AddMentionData(tokens, users, ambiguousTokens, ownerUid, groupInfo.Owner.Alias, ownerIsSelf, ownerPair, groupInfo.Owner); + + IReadOnlyList groupPairs = GetPairsForGroup(snapshot, groupId, groupInfo); + foreach (Pair pair in groupPairs) + { + bool isSelf = string.Equals(pair.UserData.UID, selfUid, StringComparison.Ordinal); + AddMentionData(tokens, users, ambiguousTokens, pair.UserData.UID, pair.UserData.Alias, isSelf, pair, pair.UserData); + } + } + else + { + IReadOnlyList groupPairs = GetPairsForGroup(snapshot, groupId, null); + foreach (Pair pair in groupPairs) + { + bool isSelf = string.Equals(pair.UserData.UID, selfUid, StringComparison.Ordinal); + AddMentionData(tokens, users, ambiguousTokens, pair.UserData.UID, pair.UserData.Alias, isSelf, pair, pair.UserData); + } + } + + if (tokens.Count == 0) + { + return null; + } + + return new MentionHighlightData(tokens, users); + } + + private static List FilterMentionCandidates(IEnumerable candidates, string query) + { + string trimmed = query.Trim(); + IEnumerable filtered = candidates; + + if (trimmed.Length > 0) + { + filtered = filtered.Where(candidate => + candidate.Token.Contains(trimmed, StringComparison.OrdinalIgnoreCase) + || candidate.DisplayName.Contains(trimmed, StringComparison.OrdinalIgnoreCase) + || candidate.Uid.Contains(trimmed, StringComparison.OrdinalIgnoreCase) + || (!string.IsNullOrWhiteSpace(candidate.Note) && candidate.Note.Contains(trimmed, StringComparison.OrdinalIgnoreCase))); + } + + List result = filtered + .OrderBy(candidate => string.IsNullOrWhiteSpace(candidate.Note) ? candidate.DisplayName : candidate.Note, StringComparer.OrdinalIgnoreCase) + .ThenBy(candidate => candidate.DisplayName, StringComparer.OrdinalIgnoreCase) + .Take(MaxMentionSuggestions) + .ToList(); + return result; + } + + private static string BuildMentionLabel(MentionCandidate candidate) + { + string label = candidate.DisplayName; + if (!string.IsNullOrWhiteSpace(candidate.Note) && !string.Equals(candidate.Note, candidate.DisplayName, StringComparison.OrdinalIgnoreCase)) + { + label = $"{candidate.Note} ({label})"; + } + + if (!string.Equals(candidate.Token, candidate.DisplayName, StringComparison.OrdinalIgnoreCase)) + { + label = $"{label} [{candidate.Token}]"; + } + + return label; + } + + private static string ApplyMentionToDraft(string draft, MentionQuery mentionQuery, string token, int maxLength, out int cursorPos) + { + string before = mentionQuery.StartIndex > 0 ? draft[..mentionQuery.StartIndex] : string.Empty; + string after = mentionQuery.EndIndex < draft.Length ? draft[mentionQuery.EndIndex..] : string.Empty; + string mentionText = "@" + token; + + if (string.IsNullOrEmpty(after) || !char.IsWhiteSpace(after[0])) + { + mentionText += " "; + } + + string updated = before + mentionText + after; + if (updated.Length > maxLength) + { + updated = updated[..maxLength]; + } + + cursorPos = Math.Min(before.Length + mentionText.Length, updated.Length); + return updated; + } + + private unsafe int ChatInputCallback(ref ImGuiInputTextCallbackData data) + { + if (_pendingDraftCursorPos < 0) + { + return 0; + } + + if (!string.Equals(_pendingDraftCursorChannelKey, _activeInputChannelKey, StringComparison.Ordinal)) + { + return 0; + } + + int clampedCursor = Math.Clamp(_pendingDraftCursorPos, 0, data.BufTextLen); + data.CursorPos = clampedCursor; + data.SelectionStart = clampedCursor; + data.SelectionEnd = clampedCursor; + + _pendingDraftCursorPos = -1; + _pendingDraftCursorChannelKey = null; + return 0; + } + private float MeasureMessageHeight( ChatChannelSnapshot channel, ChatMessageEntry message, @@ -932,6 +1675,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase float cursorStartX, float contentMaxX, float itemSpacing, + MentionHighlightData? mentionHighlightData, ref PairUiSnapshot? pairSnapshot) { if (message.IsSystem) @@ -988,29 +1732,32 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase prefix = $"{timestampText}{message.DisplayName}: "; } - var lines = MeasureChatMessageLines(prefix, payload.Message, lineStartX, contentMaxX); - return Math.Max(1, lines) * ImGui.GetTextLineHeightWithSpacing(); + return MeasureChatMessageHeight(prefix, payload.Message, lineStartX, contentMaxX, mentionHighlightData); } - private int MeasureChatMessageLines(string prefix, string message, float lineStartX, float contentMaxX) + private float MeasureChatMessageHeight(string prefix, string message, float lineStartX, float contentMaxX, MentionHighlightData? mentionHighlightData) { - var segments = BuildChatSegments(prefix, message); + var segments = BuildChatSegments(prefix, message, mentionHighlightData); if (segments.Count == 0) { - return 1; + return ImGui.GetTextLineHeightWithSpacing(); } - var emoteWidth = ImGui.GetTextLineHeight(); + var baseLineHeight = ImGui.GetTextLineHeight(); + var emoteSize = baseLineHeight * GetEmoteScale(); + var spacingY = ImGui.GetStyle().ItemSpacing.Y; var availableWidth = Math.Max(1f, contentMaxX - lineStartX); var remainingWidth = availableWidth; var firstOnLine = true; - var lines = 1; + var lineHeight = baseLineHeight; + var totalHeight = 0f; foreach (var segment in segments) { if (segment.IsLineBreak) { - lines++; + totalHeight += lineHeight + spacingY; + lineHeight = baseLineHeight; firstOnLine = true; remainingWidth = availableWidth; continue; @@ -1021,12 +1768,13 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase continue; } - var segmentWidth = segment.IsEmote ? emoteWidth : ImGui.CalcTextSize(segment.Text).X; + var segmentWidth = segment.IsEmote ? emoteSize : ImGui.CalcTextSize(segment.Text).X; if (!firstOnLine) { if (segmentWidth > remainingWidth) { - lines++; + totalHeight += lineHeight + spacingY; + lineHeight = baseLineHeight; firstOnLine = true; remainingWidth = availableWidth; if (segment.IsWhitespace) @@ -1036,11 +1784,17 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } } + if (segment.IsEmote) + { + lineHeight = MathF.Max(lineHeight, emoteSize); + } + remainingWidth -= segmentWidth; firstOnLine = false; } - return lines; + totalHeight += lineHeight + spacingY; + return totalHeight; } private float MeasureRolePrefixWidth(string timestampText, bool isOwner, bool isModerator, bool isPinned, float itemSpacing) @@ -1089,6 +1843,9 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase return width; } + private float GetEmoteScale() + => Math.Clamp(_chatConfigService.Current.EmoteScale, MinEmoteScale, MaxEmoteScale); + private float MeasureIconWidth(FontAwesomeIcon icon) { using var font = _uiSharedService.IconFont.Push(); @@ -1179,11 +1936,17 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.EndTooltip(); } - private readonly record struct ChatSegment(string Text, IDalamudTextureWrap? Texture, string? EmoteName, bool IsEmote, bool IsWhitespace, bool IsLineBreak) + private readonly record struct MentionQuery(int StartIndex, int EndIndex, string Token); + private readonly record struct MentionCandidate(string Token, string DisplayName, string? Note, string Uid, bool IsSelf); + private readonly record struct MentionUserInfo(string Uid, UserData? UserData, Pair? Pair, bool IsSelf); + private readonly record struct MentionHighlightData(IReadOnlyDictionary Tokens, IReadOnlyDictionary Users); + + private readonly record struct ChatSegment(string Text, IDalamudTextureWrap? Texture, string? EmoteName, bool IsEmote, bool IsWhitespace, bool IsLineBreak, bool IsMention, bool IsSelfMention) { - public static ChatSegment FromText(string text, bool isWhitespace = false) => new(text, null, null, false, isWhitespace, false); - public static ChatSegment Emote(IDalamudTextureWrap texture, string name) => new(string.Empty, texture, name, true, false, false); - public static ChatSegment LineBreak() => new(string.Empty, null, null, false, false, true); + public static ChatSegment FromText(string text, bool isWhitespace = false) => new(text, null, null, false, isWhitespace, false, false, false); + public static ChatSegment Emote(IDalamudTextureWrap texture, string name) => new(string.Empty, texture, name, true, false, false, false, false); + public static ChatSegment LineBreak() => new(string.Empty, null, null, false, false, true, false, false); + public static ChatSegment Mention(string text, bool isSelfMention) => new(text, null, null, false, false, false, true, isSelfMention); } private void DrawInput(ChatChannelSnapshot channel) @@ -1207,18 +1970,152 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase _refocusChatInput = false; _refocusChatInputKey = null; } - ImGui.InputText(inputId, ref draft, MaxMessageLength); - if (ImGui.IsItemActive()) + _activeInputChannelKey = channel.Key; + ImGui.InputText(inputId, ref draft, MaxMessageLength, ImGuiInputTextFlags.CallbackAlways, ChatInputCallback); + _activeInputChannelKey = null; + Vector2 inputMin = ImGui.GetItemRectMin(); + Vector2 inputMax = ImGui.GetItemRectMax(); + bool inputActive = ImGui.IsItemActive(); + if (inputActive) { var drawList = ImGui.GetWindowDrawList(); - var itemMin = ImGui.GetItemRectMin(); - var itemMax = ImGui.GetItemRectMax(); var highlight = UIColors.Get("LightlessPurple").WithAlpha(0.35f); var highlightU32 = ImGui.ColorConvertFloat4ToU32(highlight); - drawList.AddRect(itemMin, itemMax, highlightU32, style.FrameRounding, ImDrawFlags.None, Math.Max(1f, ImGuiHelpers.GlobalScale)); + drawList.AddRect(inputMin, inputMax, highlightU32, style.FrameRounding, ImDrawFlags.None, Math.Max(1f, ImGuiHelpers.GlobalScale)); } var enterPressed = ImGui.IsItemFocused() && (ImGui.IsKeyPressed(ImGuiKey.Enter) || ImGui.IsKeyPressed(ImGuiKey.KeypadEnter)); + bool mentionHandled = false; + bool showMentionPopup = false; + bool popupAlreadyOpen = ImGui.IsPopupOpen(MentionPopupId, ImGuiPopupFlags.AnyPopupLevel); + bool mentionContextActive = (inputActive || popupAlreadyOpen) && channel.Type == ChatChannelType.Group; + if (mentionContextActive) + { + if (TryGetMentionQuery(draft, out MentionQuery mentionQuery)) + { + PairUiSnapshot mentionSnapshot = _pairUiService.GetSnapshot(); + List mentionCandidates = BuildMentionCandidates(channel, mentionSnapshot, includeSelf: false); + List filteredCandidates = FilterMentionCandidates(mentionCandidates, mentionQuery.Token); + + if (filteredCandidates.Count > 0) + { + string mentionSelectionKey = $"{channel.Key}:{mentionQuery.Token}"; + if (!string.Equals(_mentionSelectionKey, mentionSelectionKey, StringComparison.Ordinal)) + { + _mentionSelectionKey = mentionSelectionKey; + _mentionSelectionIndex = 0; + } + else + { + _mentionSelectionIndex = Math.Clamp(_mentionSelectionIndex, 0, filteredCandidates.Count - 1); + } + + if (ImGui.IsKeyPressed(ImGuiKey.DownArrow)) + { + _mentionSelectionIndex = Math.Min(_mentionSelectionIndex + 1, filteredCandidates.Count - 1); + } + + if (ImGui.IsKeyPressed(ImGuiKey.UpArrow)) + { + _mentionSelectionIndex = Math.Max(_mentionSelectionIndex - 1, 0); + } + + if (enterPressed || ImGui.IsKeyPressed(ImGuiKey.Tab)) + { + int selectedIndex = Math.Clamp(_mentionSelectionIndex, 0, filteredCandidates.Count - 1); + MentionCandidate selected = filteredCandidates[selectedIndex]; + draft = ApplyMentionToDraft(draft, mentionQuery, selected.Token, MaxMessageLength, out int cursorPos); + _pendingDraftCursorPos = cursorPos; + _pendingDraftCursorChannelKey = channel.Key; + _refocusChatInput = true; + _refocusChatInputKey = channel.Key; + enterPressed = false; + mentionHandled = true; + } + + bool popupRequested = inputActive && !mentionHandled; + showMentionPopup = popupRequested || popupAlreadyOpen; + if (showMentionPopup) + { + float popupWidth = Math.Max(260f * ImGuiHelpers.GlobalScale, inputMax.X - inputMin.X); + ImGui.SetNextWindowPos(new Vector2(inputMin.X, inputMax.Y + style.ItemSpacing.Y), ImGuiCond.Always); + ImGui.SetNextWindowSizeConstraints(new Vector2(popupWidth, 0f), new Vector2(popupWidth, float.MaxValue)); + } + + if (popupRequested && !popupAlreadyOpen) + { + ImGui.OpenPopup(MentionPopupId); + } + + const ImGuiWindowFlags mentionPopupFlags = ImGuiWindowFlags.NoFocusOnAppearing | ImGuiWindowFlags.NoNavFocus | ImGuiWindowFlags.NoMove | ImGuiWindowFlags.NoSavedSettings; + if (showMentionPopup && ImGui.BeginPopup(MentionPopupId, mentionPopupFlags)) + { + float lineHeight = ImGui.GetTextLineHeightWithSpacing(); + int visibleEntries = Math.Min(3, filteredCandidates.Count); + float desiredHeight = lineHeight * visibleEntries; + using (ImRaii.Child("##mention_list", new Vector2(-1f, desiredHeight), true)) + { + for (int i = 0; i < filteredCandidates.Count; i++) + { + MentionCandidate candidate = filteredCandidates[i]; + string label = BuildMentionLabel(candidate); + bool isSelected = i == _mentionSelectionIndex; + if (ImGui.Selectable(label, isSelected)) + { + draft = ApplyMentionToDraft(draft, mentionQuery, candidate.Token, MaxMessageLength, out int cursorPos); + _pendingDraftCursorPos = cursorPos; + _pendingDraftCursorChannelKey = channel.Key; + _refocusChatInput = true; + _refocusChatInputKey = channel.Key; + enterPressed = false; + mentionHandled = true; + ImGui.CloseCurrentPopup(); + break; + } + + if (ImGui.IsItemHovered()) + { + _mentionSelectionIndex = i; + } + } + } + + ImGui.EndPopup(); + } + } + else + { + _mentionSelectionKey = null; + _mentionSelectionIndex = -1; + if (popupAlreadyOpen && ImGui.BeginPopup(MentionPopupId)) + { + ImGui.CloseCurrentPopup(); + ImGui.EndPopup(); + } + } + } + else + { + _mentionSelectionKey = null; + _mentionSelectionIndex = -1; + if (popupAlreadyOpen && ImGui.BeginPopup(MentionPopupId)) + { + ImGui.CloseCurrentPopup(); + ImGui.EndPopup(); + } + } + } + else + { + _mentionSelectionKey = null; + _mentionSelectionIndex = -1; + if (popupAlreadyOpen && ImGui.BeginPopup(MentionPopupId)) + { + ImGui.CloseCurrentPopup(); + ImGui.EndPopup(); + } + } + _draftMessages[channel.Key] = draft; ImGui.SameLine(); @@ -1586,6 +2483,25 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase public override void PostDraw() { + if (_forceExpandOnOpen && IsOpen) + { + Collapsed = null; + _forceExpandOnOpen = false; + } + if (IsOpen) + { + var metricsUpdated = TryUpdateWindowMetricsFromBase(); + if (metricsUpdated) + { + var isCollapsed = IsLikelyCollapsed(_lastWindowSize); + UpdateCollapsedState(isCollapsed); + } + + if (_isWindowCollapsed && _collapsedMessageCount > 0 && _hasWindowMetrics) + { + DrawCollapsedMessageBadge(ImGui.GetForegroundDrawList(), _lastWindowPos, _lastWindowSize); + } + } if (_pushedStyle) { ImGui.PopStyleVar(1); @@ -1975,13 +2891,59 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase }); } + private void HandleIncomingMessageForCollapsedBadge(ChatMessageEntry message) + { + if (!IsCountableIncomingMessage(message)) + { + return; + } + + var config = _chatConfigService.Current; + if (!IsOpen) + { + if (config.AutoOpenChatOnNewMessage && !ShouldHide()) + { + IsOpen = true; + Collapsed = false; + CollapsedCondition = ImGuiCond.Appearing; + _forceExpandOnOpen = true; + } + + return; + } + + if (_isWindowCollapsed) + { + if (_collapsedMessageCount < CollapsedMessageCountDisplayCap + 1) + { + _collapsedMessageCount++; + } + } + } + + private static bool IsCountableIncomingMessage(ChatMessageEntry message) + { + if (message.FromSelf || message.IsSystem) + { + return false; + } + + return message.Payload?.Message is { Length: > 0 }; + } + private void OnChatChannelMessageAdded(ChatChannelMessageAdded message) { - if (_selectedChannelKey is not null && string.Equals(_selectedChannelKey, message.ChannelKey, StringComparison.Ordinal)) + var channelHidden = IsChannelHidden(message.ChannelKey); + if (!channelHidden && _selectedChannelKey is not null && string.Equals(_selectedChannelKey, message.ChannelKey, StringComparison.Ordinal)) { _scrollToBottom = true; } + if (!channelHidden) + { + HandleIncomingMessageForCollapsedBadge(message.Message); + } + if (!message.Message.FromSelf || message.Message.Payload?.Message is not { Length: > 0 } payloadText) { return; @@ -2092,9 +3054,10 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase if (_selectedChannelKey is not null && channels.Any(channel => string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal))) return; - _selectedChannelKey = channels.Count > 0 ? channels[0].Key : null; - if (_selectedChannelKey is not null) + string? nextKey = channels.Count > 0 ? channels[0].Key : null; + if (!string.Equals(_selectedChannelKey, nextKey, StringComparison.Ordinal)) { + _selectedChannelKey = nextKey; _zoneChatService.SetActiveChannel(_selectedChannelKey); _scrollToBottom = true; } @@ -2118,6 +3081,43 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } } + private IReadOnlyList GetVisibleChannels(IReadOnlyList channels) + { + Dictionary hiddenChannels = _chatConfigService.Current.HiddenChannels; + if (hiddenChannels.Count == 0) + { + return channels; + } + + List visibleChannels = new List(channels.Count); + foreach (var channel in channels) + { + if (!hiddenChannels.TryGetValue(channel.Key, out var isHidden) || !isHidden) + { + visibleChannels.Add(channel); + } + } + + return visibleChannels; + } + + private bool IsChannelHidden(string channelKey) + => _chatConfigService.Current.HiddenChannels.TryGetValue(channelKey, out var isHidden) && isHidden; + + private void SetChannelHidden(string channelKey, bool hidden) + { + if (hidden) + { + _chatConfigService.Current.HiddenChannels[channelKey] = true; + } + else + { + _chatConfigService.Current.HiddenChannels.Remove(channelKey); + } + + _chatConfigService.Save(); + } + private void DrawConnectionControls() { var hubState = _apiController.ServerState; @@ -2289,14 +3289,50 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private void DrawChatSettingsPopup() { const ImGuiWindowFlags popupFlags = ImGuiWindowFlags.AlwaysAutoResize | ImGuiWindowFlags.NoMove | ImGuiWindowFlags.NoSavedSettings; + var workSize = ImGui.GetMainViewport().WorkSize; + var minWidth = MathF.Min(420f * ImGuiHelpers.GlobalScale, workSize.X * 0.9f); + var minHeight = MathF.Min(360f * ImGuiHelpers.GlobalScale, workSize.Y * 0.85f); + var minSize = new Vector2(minWidth, minHeight); + var maxSize = new Vector2( + MathF.Max(minSize.X, workSize.X * 0.95f), + MathF.Max(minSize.Y, workSize.Y * 0.95f)); + ImGui.SetNextWindowSizeConstraints(minSize, maxSize); + ImGui.SetNextWindowSize(minSize, ImGuiCond.Appearing); if (!ImGui.BeginPopup(SettingsPopupId, popupFlags)) return; ImGui.TextUnformatted("Chat Settings"); ImGui.Separator(); + UiSharedService.Tab("ChatSettingsTabs", ChatSettingsTabOptions, ref _selectedChatSettingsTab); + ImGuiHelpers.ScaledDummy(5); + var chatConfig = _chatConfigService.Current; + switch (_selectedChatSettingsTab) + { + case ChatSettingsTab.General: + DrawChatSettingsGeneral(chatConfig); + break; + case ChatSettingsTab.Messages: + DrawChatSettingsMessages(chatConfig); + break; + case ChatSettingsTab.Notifications: + DrawChatSettingsNotifications(chatConfig); + break; + case ChatSettingsTab.Visibility: + DrawChatSettingsVisibility(chatConfig); + break; + case ChatSettingsTab.Window: + DrawChatSettingsWindow(chatConfig); + break; + } + + ImGui.EndPopup(); + } + + private void DrawChatSettingsGeneral(ChatConfig chatConfig) + { var autoEnable = chatConfig.AutoEnableChatOnLogin; if (ImGui.Checkbox("Auto-enable chat on login", ref autoEnable)) { @@ -2338,6 +3374,28 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase { ImGui.SetTooltip("Toggles if the rules popup appears everytime the chat is opened for the first time."); } + } + + private void DrawChatSettingsMessages(ChatConfig chatConfig) + { + var fontScale = Math.Clamp(chatConfig.ChatFontScale, MinChatFontScale, MaxChatFontScale); + var fontScaleChanged = ImGui.SliderFloat("Message font scale", ref fontScale, MinChatFontScale, MaxChatFontScale, "%.2fx"); + var resetFontScale = ImGui.IsItemClicked(ImGuiMouseButton.Right); + if (resetFontScale) + { + fontScale = 1.0f; + fontScaleChanged = true; + } + + if (fontScaleChanged) + { + chatConfig.ChatFontScale = fontScale; + _chatConfigService.Save(); + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Adjust scale of chat message text.\nRight-click to reset to default."); + } var showTimestamps = chatConfig.ShowMessageTimestamps; if (ImGui.Checkbox("Show message timestamps", ref showTimestamps)) @@ -2372,8 +3430,116 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.SetTooltip("When disabled, emotes render as static images."); } + var emoteScale = Math.Clamp(chatConfig.EmoteScale, MinEmoteScale, MaxEmoteScale); + var emoteScaleChanged = ImGui.SliderFloat("Emote size", ref emoteScale, MinEmoteScale, MaxEmoteScale, "%.2fx"); + var resetEmoteScale = ImGui.IsItemClicked(ImGuiMouseButton.Right); + if (resetEmoteScale) + { + emoteScale = 1.0f; + emoteScaleChanged = true; + } + + if (emoteScaleChanged) + { + chatConfig.EmoteScale = emoteScale; + _chatConfigService.Save(); + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Scales emotes relative to text height.\nRight-click to reset to default."); + } + + ImGui.Separator(); + ImGui.TextUnformatted("History"); + ImGui.Separator(); + + bool persistHistory = chatConfig.PersistSyncshellHistory; + if (ImGui.Checkbox("Persist syncshell chat history", ref persistHistory)) + { + chatConfig.PersistSyncshellHistory = persistHistory; + _chatConfigService.Save(); + if (!persistHistory) + { + _zoneChatService.ClearPersistedSyncshellHistory(clearLoadedMessages: false); + } + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Stores the latest 200 syncshell messages on disk and restores them when chat loads.\nStored messages are considered stale and cannot be muted or reported."); + } + + bool hasPersistedHistory = chatConfig.SyncshellChannelHistory.Count > 0; + using (ImRaii.Disabled(!hasPersistedHistory || !UiSharedService.CtrlPressed())) + { + if (ImGui.Button("Clear saved syncshell history")) + { + _zoneChatService.ClearPersistedSyncshellHistory(clearLoadedMessages: true); + } + } + UiSharedService.AttachToolTip("Clears saved syncshell chat history and loaded cached messages." + + UiSharedService.TooltipSeparator + "Hold CTRL to enable this button"); + } + + private void DrawChatSettingsNotifications(ChatConfig chatConfig) + { + var notifyMentions = chatConfig.EnableMentionNotifications; + if (ImGui.Checkbox("Notify on mentions", ref notifyMentions)) + { + chatConfig.EnableMentionNotifications = notifyMentions; + _chatConfigService.Save(); + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Show a notification when someone mentions you in syncshell chat."); + } + + var autoOpenOnMessage = chatConfig.AutoOpenChatOnNewMessage; + if (ImGui.Checkbox("Auto-open chat on new messages when closed", ref autoOpenOnMessage)) + { + chatConfig.AutoOpenChatOnNewMessage = autoOpenOnMessage; + _chatConfigService.Save(); + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Reopens the chat window when a new message arrives while it is closed."); + } + } + + private void DrawChatSettingsVisibility(ChatConfig chatConfig) + { + ImGui.TextUnformatted("Channel Visibility"); + ImGui.Separator(); + + IReadOnlyList channels = _zoneChatService.GetChannelsSnapshot(); + if (channels.Count == 0) + { + ImGui.TextDisabled("No chat channels available."); + } + else + { + ImGui.TextDisabled("Uncheck a channel to hide its tab."); + ImGui.TextDisabled("Hidden channels still receive messages."); + + float maxListHeight = 200f * ImGuiHelpers.GlobalScale; + float listHeight = Math.Min(maxListHeight, channels.Count * ImGui.GetFrameHeightWithSpacing() + ImGui.GetStyle().ItemSpacing.Y); + using var child = ImRaii.Child("chat_channel_visibility_list", new Vector2(0f, listHeight), true); + if (child) + { + foreach (var channel in channels) + { + bool isVisible = !IsChannelHidden(channel.Key); + string prefix = channel.Type == ChatChannelType.Zone ? "Zone" : "Syncshell"; + if (ImGui.Checkbox($"{prefix}: {channel.DisplayName}##{channel.Key}", ref isVisible)) + { + SetChannelHidden(channel.Key, !isVisible); + } + } + } + } + ImGui.Separator(); ImGui.TextUnformatted("Chat Visibility"); + ImGui.Separator(); var autoHideCombat = chatConfig.HideInCombat; if (ImGui.Checkbox("Hide in combat", ref autoHideCombat)) @@ -2434,28 +3600,10 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase { ImGui.SetTooltip("Allow the chat window to remain visible in /gpose."); } + } - ImGui.Separator(); - - var fontScale = Math.Clamp(chatConfig.ChatFontScale, MinChatFontScale, MaxChatFontScale); - var fontScaleChanged = ImGui.SliderFloat("Message font scale", ref fontScale, MinChatFontScale, MaxChatFontScale, "%.2fx"); - var resetFontScale = ImGui.IsItemClicked(ImGuiMouseButton.Right); - if (resetFontScale) - { - fontScale = 1.0f; - fontScaleChanged = true; - } - - if (fontScaleChanged) - { - chatConfig.ChatFontScale = fontScale; - _chatConfigService.Save(); - } - if (ImGui.IsItemHovered()) - { - ImGui.SetTooltip("Adjust scale of chat message text.\nRight-click to reset to default."); - } - + private void DrawChatSettingsWindow(ChatConfig chatConfig) + { var windowOpacity = Math.Clamp(chatConfig.ChatWindowOpacity, MinWindowOpacity, MaxWindowOpacity); var opacityChanged = ImGui.SliderFloat("Window transparency", ref windowOpacity, MinWindowOpacity, MaxWindowOpacity, "%.2f"); var resetOpacity = ImGui.IsItemClicked(ImGuiMouseButton.Right); @@ -2484,7 +3632,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } if (ImGui.IsItemHovered()) { - ImGui.SetTooltip("When enabled, the chat window fades after it loses focus.\nHovering the window restores focus."); + ImGui.SetTooltip("When enabled, the chat window fades after it loses focus.\nHovering the window restores opacity."); } ImGui.BeginDisabled(!fadeUnfocused); @@ -2506,8 +3654,6 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.SetTooltip("Target transparency while the chat window is unfocused.\nRight-click to reset to default."); } ImGui.EndDisabled(); - - ImGui.EndPopup(); } private static float MoveTowards(float current, float target, float maxDelta) @@ -2542,27 +3688,49 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private unsafe void DrawChannelButtons(IReadOnlyList channels) { - var style = ImGui.GetStyle(); - var baseFramePadding = style.FramePadding; - var available = ImGui.GetContentRegionAvail().X; - var buttonHeight = ImGui.GetFrameHeight(); - var arrowWidth = buttonHeight; - var hasChannels = channels.Count > 0; - var scrollWidth = hasChannels ? Math.Max(0f, available - (arrowWidth * 2f + style.ItemSpacing.X * 2f)) : 0f; + ImGuiStylePtr style = ImGui.GetStyle(); + Vector2 baseFramePadding = style.FramePadding; + float available = ImGui.GetContentRegionAvail().X; + float buttonHeight = ImGui.GetFrameHeight(); + float arrowWidth = buttonHeight; + bool hasChannels = channels.Count > 0; + float scrollWidth = hasChannels ? Math.Max(0f, available - (arrowWidth * 2f + style.ItemSpacing.X * 2f)) : 0f; if (hasChannels) { - var minimumWidth = 120f * ImGuiHelpers.GlobalScale; + float minimumWidth = 120f * ImGuiHelpers.GlobalScale; scrollWidth = Math.Max(scrollWidth, minimumWidth); } - var scrollStep = scrollWidth > 0f ? scrollWidth * 0.9f : 120f; + float scrollStep = scrollWidth > 0f ? scrollWidth * 0.9f : 120f; + float badgeSpacing = 4f * ImGuiHelpers.GlobalScale; + Vector2 badgePadding = new Vector2(4f, 1.5f) * ImGuiHelpers.GlobalScale; + bool showScrollbar = false; + if (hasChannels) + { + float totalWidth = 0f; + bool firstWidth = true; + foreach (ChatChannelSnapshot channel in channels) + { + if (!firstWidth) + { + totalWidth += style.ItemSpacing.X; + } + + totalWidth += GetChannelTabWidth(channel, baseFramePadding, badgeSpacing, badgePadding); + firstWidth = false; + } + + showScrollbar = totalWidth > scrollWidth; + } + + float childHeight = buttonHeight + style.FramePadding.Y * 2f + (showScrollbar ? style.ScrollbarSize : 0f); if (!hasChannels) { _pendingChannelScroll = null; _channelScroll = 0f; _channelScrollMax = 0f; } - var prevScroll = hasChannels ? _channelScroll : 0f; - var prevMax = hasChannels ? _channelScrollMax : 0f; + float prevScroll = hasChannels ? _channelScroll : 0f; + float prevMax = hasChannels ? _channelScrollMax : 0f; float currentScroll = prevScroll; float maxScroll = prevMax; @@ -2587,7 +3755,6 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.SameLine(0f, style.ItemSpacing.X); - var childHeight = buttonHeight + style.FramePadding.Y * 2f + style.ScrollbarSize; var alignPushed = false; if (hasChannels) { @@ -2595,31 +3762,29 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase alignPushed = true; } - const int MaxBadgeDisplay = 99; - using (var child = ImRaii.Child("channel_scroll", new Vector2(scrollWidth, childHeight), false, ImGuiWindowFlags.HorizontalScrollbar)) { if (child) { - var dragActive = _dragChannelKey is not null && ImGui.IsMouseDragging(ImGuiMouseButton.Left); - var hoveredTargetThisFrame = false; - var first = true; + bool dragActive = _dragChannelKey is not null && ImGui.IsMouseDragging(ImGuiMouseButton.Left); + bool hoveredTargetThisFrame = false; + bool first = true; foreach (var channel in channels) { if (!first) ImGui.SameLine(); - var isSelected = string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal); - var showBadge = !isSelected && channel.UnreadCount > 0; - var isZoneChannel = channel.Type == ChatChannelType.Zone; + bool isSelected = string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal); + bool showBadge = !isSelected && channel.UnreadCount > 0; + bool isZoneChannel = channel.Type == ChatChannelType.Zone; (string Text, Vector2 TextSize, float Width, float Height)? badgeMetrics = null; - var channelLabel = GetChannelTabLabel(channel); + string channelLabel = GetChannelTabLabel(channel); - var normal = isSelected ? UIColors.Get("LightlessPurpleDefault") : UIColors.Get("ButtonDefault"); - var hovered = isSelected + Vector4 normal = isSelected ? UIColors.Get("LightlessPurpleDefault") : UIColors.Get("ButtonDefault"); + Vector4 hovered = isSelected ? UIColors.Get("LightlessPurple").WithAlpha(0.9f) : UIColors.Get("ButtonDefault").WithAlpha(0.85f); - var active = isSelected + Vector4 active = isSelected ? UIColors.Get("LightlessPurpleDefault").WithAlpha(0.8f) : UIColors.Get("ButtonDefault").WithAlpha(0.75f); @@ -2629,20 +3794,18 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase if (showBadge) { - var badgeSpacing = 4f * ImGuiHelpers.GlobalScale; - var badgePadding = new Vector2(4f, 1.5f) * ImGuiHelpers.GlobalScale; - var badgeText = channel.UnreadCount > MaxBadgeDisplay + string badgeText = channel.UnreadCount > MaxBadgeDisplay ? $"{MaxBadgeDisplay}+" : channel.UnreadCount.ToString(CultureInfo.InvariantCulture); - var badgeTextSize = ImGui.CalcTextSize(badgeText); - var badgeWidth = badgeTextSize.X + badgePadding.X * 2f; - var badgeHeight = badgeTextSize.Y + badgePadding.Y * 2f; - var customPadding = new Vector2(baseFramePadding.X + badgeWidth + badgeSpacing, baseFramePadding.Y); + Vector2 badgeTextSize = ImGui.CalcTextSize(badgeText); + float badgeWidth = badgeTextSize.X + badgePadding.X * 2f; + float badgeHeight = badgeTextSize.Y + badgePadding.Y * 2f; + Vector2 customPadding = new Vector2(baseFramePadding.X + badgeWidth + badgeSpacing, baseFramePadding.Y); ImGui.PushStyleVar(ImGuiStyleVar.FramePadding, customPadding); badgeMetrics = (badgeText, badgeTextSize, badgeWidth, badgeHeight); } - var clicked = ImGui.Button($"{channelLabel}##chat_channel_{channel.Key}"); + bool clicked = ImGui.Button($"{channelLabel}##chat_channel_{channel.Key}"); if (showBadge) { @@ -2678,12 +3841,12 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.EndDragDropSource(); } - var isDragTarget = false; + bool isDragTarget = false; if (ImGui.BeginDragDropTarget()) { - var acceptFlags = ImGuiDragDropFlags.AcceptBeforeDelivery | ImGuiDragDropFlags.AcceptNoDrawDefaultRect; - var payload = ImGui.AcceptDragDropPayload(ChannelDragPayloadId, acceptFlags); + ImGuiDragDropFlags acceptFlags = ImGuiDragDropFlags.AcceptBeforeDelivery | ImGuiDragDropFlags.AcceptNoDrawDefaultRect; + ImGuiPayloadPtr payload = ImGui.AcceptDragDropPayload(ChannelDragPayloadId, acceptFlags); if (!payload.IsNull && _dragChannelKey is { } draggedKey && !string.Equals(draggedKey, channel.Key, StringComparison.Ordinal)) { @@ -2698,7 +3861,7 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.EndDragDropTarget(); } - var isHoveredDuringDrag = dragActive + bool isHoveredDuringDrag = dragActive && ImGui.IsItemHovered(ImGuiHoveredFlags.AllowWhenBlockedByActiveItem | ImGuiHoveredFlags.AllowWhenOverlapped); if (!isDragTarget && isHoveredDuringDrag @@ -2712,14 +3875,14 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase } } - var drawList = ImGui.GetWindowDrawList(); - var itemMin = ImGui.GetItemRectMin(); - var itemMax = ImGui.GetItemRectMax(); + ImDrawListPtr drawList = ImGui.GetWindowDrawList(); + Vector2 itemMin = ImGui.GetItemRectMin(); + Vector2 itemMax = ImGui.GetItemRectMax(); if (isHoveredDuringDrag) { - var highlight = UIColors.Get("LightlessPurple").WithAlpha(0.35f); - var highlightU32 = ImGui.ColorConvertFloat4ToU32(highlight); + Vector4 highlight = UIColors.Get("LightlessPurple").WithAlpha(0.35f); + uint highlightU32 = ImGui.ColorConvertFloat4ToU32(highlight); drawList.AddRectFilled(itemMin, itemMax, highlightU32, style.FrameRounding); drawList.AddRect(itemMin, itemMax, highlightU32, style.FrameRounding, ImDrawFlags.None, Math.Max(1f, ImGuiHelpers.GlobalScale)); } @@ -2731,23 +3894,23 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase if (isZoneChannel) { - var borderColor = UIColors.Get("LightlessOrange"); - var borderColorU32 = ImGui.ColorConvertFloat4ToU32(borderColor); - var borderThickness = Math.Max(1f, ImGuiHelpers.GlobalScale); + Vector4 borderColor = UIColors.Get("LightlessOrange"); + uint borderColorU32 = ImGui.ColorConvertFloat4ToU32(borderColor); + float borderThickness = Math.Max(1f, ImGuiHelpers.GlobalScale); drawList.AddRect(itemMin, itemMax, borderColorU32, style.FrameRounding, ImDrawFlags.None, borderThickness); } if (showBadge && badgeMetrics is { } metrics) { - var buttonSizeY = itemMax.Y - itemMin.Y; - var badgeMin = new Vector2( + float buttonSizeY = itemMax.Y - itemMin.Y; + Vector2 badgeMin = new Vector2( itemMin.X + baseFramePadding.X, itemMin.Y + (buttonSizeY - metrics.Height) * 0.5f); - var badgeMax = badgeMin + new Vector2(metrics.Width, metrics.Height); - var badgeColor = UIColors.Get("DimRed"); - var badgeColorU32 = ImGui.ColorConvertFloat4ToU32(badgeColor); + Vector2 badgeMax = badgeMin + new Vector2(metrics.Width, metrics.Height); + Vector4 badgeColor = UIColors.Get("DimRed"); + uint badgeColorU32 = ImGui.ColorConvertFloat4ToU32(badgeColor); drawList.AddRectFilled(badgeMin, badgeMax, badgeColorU32, metrics.Height * 0.5f); - var textPos = new Vector2( + Vector2 textPos = new Vector2( badgeMin.X + (metrics.Width - metrics.TextSize.X) * 0.5f, badgeMin.Y + (metrics.Height - metrics.TextSize.Y) * 0.5f); drawList.AddText(textPos, ImGui.ColorConvertFloat4ToU32(ImGuiColors.DalamudWhite), metrics.Text); @@ -2810,6 +3973,26 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase ImGui.SetCursorPosY(ImGui.GetCursorPosY() - style.ItemSpacing.Y * 0.3f); } + private float GetChannelTabWidth(ChatChannelSnapshot channel, Vector2 baseFramePadding, float badgeSpacing, Vector2 badgePadding) + { + string channelLabel = GetChannelTabLabel(channel); + float textWidth = ImGui.CalcTextSize(channelLabel).X; + bool isSelected = string.Equals(channel.Key, _selectedChannelKey, StringComparison.Ordinal); + bool showBadge = !isSelected && channel.UnreadCount > 0; + if (!showBadge) + { + return textWidth + baseFramePadding.X * 2f; + } + + string badgeText = channel.UnreadCount > MaxBadgeDisplay + ? $"{MaxBadgeDisplay}+" + : channel.UnreadCount.ToString(CultureInfo.InvariantCulture); + Vector2 badgeTextSize = ImGui.CalcTextSize(badgeText); + float badgeWidth = badgeTextSize.X + badgePadding.X * 2f; + float customPaddingX = baseFramePadding.X + badgeWidth + badgeSpacing; + return textWidth + customPaddingX * 2f; + } + private string GetChannelTabLabel(ChatChannelSnapshot channel) { if (channel.Type != ChatChannelType.Group) @@ -2845,27 +4028,33 @@ public sealed class ZoneChatUi : WindowMediatorSubscriberBase private bool ShouldShowChannelTabContextMenu(ChatChannelSnapshot channel) { - if (channel.Type != ChatChannelType.Group) - { - return false; - } - - if (_chatConfigService.Current.PreferNotesForChannels.TryGetValue(channel.Key, out var preferNote) && preferNote) - { - return true; - } - - var note = GetChannelNote(channel); - return !string.IsNullOrWhiteSpace(note); + return true; } private void DrawChannelTabContextMenu(ChatChannelSnapshot channel) { + if (ImGui.MenuItem("Hide Channel")) + { + SetChannelHidden(channel.Key, true); + if (string.Equals(_selectedChannelKey, channel.Key, StringComparison.Ordinal)) + { + _selectedChannelKey = null; + _zoneChatService.SetActiveChannel(null); + } + ImGui.CloseCurrentPopup(); + return; + } + if (ImGui.IsItemHovered()) + { + ImGui.SetTooltip("Unhide channels from Chat Settings -> Visibility."); + } + var preferNote = _chatConfigService.Current.PreferNotesForChannels.TryGetValue(channel.Key, out var value) && value; var note = GetChannelNote(channel); var hasNote = !string.IsNullOrWhiteSpace(note); if (preferNote || hasNote) { + ImGui.Separator(); var label = preferNote ? "Prefer Name Instead" : "Prefer Note Instead"; if (ImGui.MenuItem(label)) { diff --git a/LightlessSync/Utils/RollingList.cs b/LightlessSync/Utils/RollingList.cs index 4ddb22b..2528fe8 100644 --- a/LightlessSync/Utils/RollingList.cs +++ b/LightlessSync/Utils/RollingList.cs @@ -29,6 +29,29 @@ public class RollingList : IEnumerable } } + public bool TryGetLast(out T value) + { + lock (_addLock) + { + if (_list.Count == 0) + { + value = default!; + return false; + } + + value = _list.Last!.Value; + return true; + } + } + + public List Snapshot() + { + lock (_addLock) + { + return new List(_list); + } + } + public void Add(T value) { lock (_addLock) diff --git a/LightlessSync/Utils/TaskRegistry.cs b/LightlessSync/Utils/TaskRegistry.cs new file mode 100644 index 0000000..7104548 --- /dev/null +++ b/LightlessSync/Utils/TaskRegistry.cs @@ -0,0 +1,93 @@ +using System.Collections.Concurrent; + +namespace LightlessSync.Utils; + +public sealed class TaskRegistry where HandleType : notnull +{ + private readonly ConcurrentDictionary> _activeTasks = new(); + + public Task GetOrStart(HandleType handle, Func taskFactory) + => GetOrStartInternal(handle, taskFactory); + + public Task GetOrStart(HandleType handle, Func> taskFactory) + => GetOrStartInternal(handle, taskFactory); + + public bool TryGetExisting(HandleType handle, out Task task) + { + if (_activeTasks.TryGetValue(handle, out Lazy? entry)) + { + task = entry.Value; + if (!task.IsCompleted) + { + return true; + } + + _activeTasks.TryRemove(new KeyValuePair>(handle, entry)); + } + + task = Task.CompletedTask; + return false; + } + + private Task GetOrStartInternal(HandleType handle, Func taskFactory) + { + Lazy entry = _activeTasks.GetOrAdd(handle, _ => CreateEntry(handle, taskFactory)); + Task task = entry.Value; + if (task.IsCompleted) + { + _activeTasks.TryRemove(new KeyValuePair>(handle, entry)); + } + + return task; + } + + private Task GetOrStartInternal(HandleType handle, Func> taskFactory) + { + Lazy entry = _activeTasks.GetOrAdd(handle, _ => CreateEntry(handle, taskFactory)); + Task task = entry.Value; + if (task.IsCompleted) + { + _activeTasks.TryRemove(new KeyValuePair>(handle, entry)); + } + + return (Task)task; + } + + private Lazy CreateEntry(HandleType handle, Func taskFactory) + { + Lazy entry = null!; + entry = new Lazy(() => ExecuteAndRemove(handle, entry, taskFactory), LazyThreadSafetyMode.ExecutionAndPublication); + return entry; + } + + private Lazy CreateEntry(HandleType handle, Func> taskFactory) + { + Lazy entry = null!; + entry = new Lazy(() => ExecuteAndRemove(handle, entry, taskFactory), LazyThreadSafetyMode.ExecutionAndPublication); + return entry; + } + + private async Task ExecuteAndRemove(HandleType handle, Lazy entry, Func taskFactory) + { + try + { + await taskFactory().ConfigureAwait(false); + } + finally + { + _activeTasks.TryRemove(new KeyValuePair>(handle, entry)); + } + } + + private async Task ExecuteAndRemove(HandleType handle, Lazy entry, Func> taskFactory) + { + try + { + return await taskFactory().ConfigureAwait(false); + } + finally + { + _activeTasks.TryRemove(new KeyValuePair>(handle, entry)); + } + } +} diff --git a/LightlessSync/WebAPI/Files/FileDownloadDeduplicator.cs b/LightlessSync/WebAPI/Files/FileDownloadDeduplicator.cs new file mode 100644 index 0000000..42850a2 --- /dev/null +++ b/LightlessSync/WebAPI/Files/FileDownloadDeduplicator.cs @@ -0,0 +1,48 @@ +using System.Collections.Concurrent; + +namespace LightlessSync.WebAPI.Files; + +public readonly record struct DownloadClaim(bool IsOwner, Task Completion); + +public sealed class FileDownloadDeduplicator +{ + private readonly ConcurrentDictionary> _inFlight = + new(StringComparer.OrdinalIgnoreCase); + + public DownloadClaim Claim(string hash) + { + if (string.IsNullOrWhiteSpace(hash)) + { + return new DownloadClaim(false, Task.FromResult(true)); + } + + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var existing = _inFlight.GetOrAdd(hash, tcs); + var isOwner = ReferenceEquals(existing, tcs); + return new DownloadClaim(isOwner, existing.Task); + } + + public void Complete(string hash, bool success) + { + if (string.IsNullOrWhiteSpace(hash)) + { + return; + } + + if (_inFlight.TryRemove(hash, out var tcs)) + { + tcs.TrySetResult(success); + } + } + + public void CompleteAll(bool success) + { + foreach (var entry in _inFlight.ToArray()) + { + if (_inFlight.TryRemove(entry.Key, out var tcs)) + { + tcs.TrySetResult(success); + } + } + } +} diff --git a/LightlessSync/WebAPI/Files/FileDownloadManager.cs b/LightlessSync/WebAPI/Files/FileDownloadManager.cs index 97f8af7..2b51bf5 100644 --- a/LightlessSync/WebAPI/Files/FileDownloadManager.cs +++ b/LightlessSync/WebAPI/Files/FileDownloadManager.cs @@ -1,3 +1,4 @@ +using K4os.Compression.LZ4; using K4os.Compression.LZ4.Legacy; using LightlessSync.API.Data; using LightlessSync.API.Dto.Files; @@ -8,9 +9,13 @@ using LightlessSync.PlayerData.Handlers; using LightlessSync.Services.Mediator; using LightlessSync.Services.ModelDecimation; using LightlessSync.Services.TextureCompression; +using LightlessSync.Utils; using LightlessSync.WebAPI.Files.Models; using Microsoft.Extensions.Logging; +using System.Buffers; +using System.Buffers.Binary; using System.Collections.Concurrent; +using System.IO.MemoryMappedFiles; using System.Net; using System.Net.Http.Json; @@ -18,8 +23,6 @@ namespace LightlessSync.WebAPI.Files; public partial class FileDownloadManager : DisposableMediatorSubscriberBase { - private readonly ConcurrentDictionary _downloadStatus; - private readonly FileCompactor _fileCompactor; private readonly FileCacheManager _fileDbManager; private readonly FileTransferOrchestrator _orchestrator; @@ -27,12 +30,14 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase private readonly TextureDownscaleService _textureDownscaleService; private readonly ModelDecimationService _modelDecimationService; private readonly TextureMetadataHelper _textureMetadataHelper; + private readonly FileDownloadDeduplicator _downloadDeduplicator; + private readonly ConcurrentDictionary _activeSessions = new(); + private readonly ConcurrentDictionary> _downloadQueues = new(); + private readonly TaskRegistry _downloadQueueWaiters = new(); private readonly ConcurrentDictionary _activeDownloadStreams; private readonly SemaphoreSlim _decompressGate = new(Math.Max(1, Environment.ProcessorCount / 2), Math.Max(1, Environment.ProcessorCount / 2)); - - private readonly ConcurrentQueue _deferredCompressionQueue = new(); private volatile bool _disableDirectDownloads; private int _consecutiveDirectDownloadFailures; @@ -47,9 +52,9 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase LightlessConfigService configService, TextureDownscaleService textureDownscaleService, ModelDecimationService modelDecimationService, - TextureMetadataHelper textureMetadataHelper) : base(logger, mediator) + TextureMetadataHelper textureMetadataHelper, + FileDownloadDeduplicator downloadDeduplicator) : base(logger, mediator) { - _downloadStatus = new ConcurrentDictionary(StringComparer.Ordinal); _orchestrator = orchestrator; _fileDbManager = fileCacheManager; _fileCompactor = fileCompactor; @@ -57,6 +62,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase _textureDownscaleService = textureDownscaleService; _modelDecimationService = modelDecimationService; _textureMetadataHelper = textureMetadataHelper; + _downloadDeduplicator = downloadDeduplicator; _activeDownloadStreams = new(); _lastConfigDirectDownloadsState = _configService.Current.EnableDirectDownloads; @@ -70,12 +76,46 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase foreach (var stream in _activeDownloadStreams.Keys) stream.BandwidthLimit = newLimit; }); + + Mediator.Subscribe(this, _ => + { + Logger.LogDebug("Disconnected from server, clearing in-flight downloads"); + ClearDownload(); + _downloadDeduplicator.CompleteAll(false); + }); } - public List CurrentDownloads { get; private set; } = []; + public List CurrentDownloads => _activeSessions.Values.SelectMany(s => s.Downloads).ToList(); public List ForbiddenTransfers => _orchestrator.ForbiddenTransfers; - public Guid? CurrentOwnerToken { get; private set; } - public bool IsDownloading => CurrentDownloads.Count != 0; + public bool IsDownloading => !_activeSessions.IsEmpty || _downloadQueues.Any(kvp => !kvp.Value.IsEmpty); + + public bool IsDownloadingFor(GameObjectHandler? handler) + { + if (handler is null) + return false; + + return _activeSessions.ContainsKey(handler) + || (_downloadQueues.TryGetValue(handler, out var queue) && !queue.IsEmpty); + } + + public int GetPendingDownloadCount(GameObjectHandler? handler) + { + if (handler is null) + return 0; + + var count = 0; + + if (_activeSessions.TryGetValue(handler, out var session)) + count += session.Downloads.Count; + + if (_downloadQueues.TryGetValue(handler, out var queue)) + { + foreach (var request in queue) + count += request.Session.Downloads.Count; + } + + return count; + } private bool ShouldUseDirectDownloads() => _configService.Current.EnableDirectDownloads && !_disableDirectDownloads; @@ -88,26 +128,111 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase public void ClearDownload() { - CurrentDownloads.Clear(); - _downloadStatus.Clear(); - CurrentOwnerToken = null; + foreach (var session in _activeSessions.Values.ToList()) + ClearDownload(session); + } + + private void ClearDownload(DownloadSession session) + { + foreach (var hash in session.OwnedDownloads.Keys.ToList()) + { + CompleteOwnedDownload(session, hash, false); + } + + session.Status.Clear(); + session.OwnedDownloads.Clear(); + session.Downloads.Clear(); + + if (session.Handler is not null) + _activeSessions.TryRemove(session.Handler, out _); } public async Task DownloadFiles(GameObjectHandler? gameObject, List fileReplacementDto, CancellationToken ct, bool skipDownscale = false, bool skipDecimation = false) { + var downloads = await InitiateDownloadList(gameObject, fileReplacementDto, ct).ConfigureAwait(false); + await DownloadFiles(gameObject, fileReplacementDto, downloads, ct, skipDownscale, skipDecimation).ConfigureAwait(false); + } + + public Task DownloadFiles(GameObjectHandler? gameObject, List fileReplacementDto, List downloads, CancellationToken ct, bool skipDownscale = false, bool skipDecimation = false) + { + var session = new DownloadSession(gameObject, downloads); + var completion = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var request = new DownloadRequest(session, fileReplacementDto, ct, skipDownscale, skipDecimation, completion); + return EnqueueDownloadAsync(request); + } + + private Task EnqueueDownloadAsync(DownloadRequest request) + { + var handler = request.Session.Handler; + if (handler is null) + { + _ = ExecuteDownloadRequestAsync(request); + return request.Completion.Task; + } + + var queue = _downloadQueues.GetOrAdd(handler, _ => new ConcurrentQueue()); + queue.Enqueue(request); + + _downloadQueueWaiters.GetOrStart(handler, () => ProcessDownloadQueueAsync(handler)); + + return request.Completion.Task; + } + + private async Task ProcessDownloadQueueAsync(GameObjectHandler handler) + { + if (!_downloadQueues.TryGetValue(handler, out var queue)) + return; + + while (true) + { + while (queue.TryDequeue(out var request)) + { + await ExecuteDownloadRequestAsync(request).ConfigureAwait(false); + } + + await Task.Yield(); + if (queue.IsEmpty) + return; + } + } + + private async Task ExecuteDownloadRequestAsync(DownloadRequest request) + { + if (request.CancellationToken.IsCancellationRequested) + { + request.Completion.TrySetCanceled(request.CancellationToken); + return; + } + + var session = request.Session; + if (session.Handler is not null) + { + _activeSessions[session.Handler] = session; + } + Mediator.Publish(new HaltScanMessage(nameof(DownloadFiles))); try { - await DownloadFilesInternal(gameObject, fileReplacementDto, ct, skipDownscale, skipDecimation).ConfigureAwait(false); + await DownloadFilesInternal(session, request.Replacements, request.CancellationToken, request.SkipDownscale, request.SkipDecimation).ConfigureAwait(false); + request.Completion.TrySetResult(true); } - catch + catch (OperationCanceledException) when (request.CancellationToken.IsCancellationRequested) { - ClearDownload(); + ClearDownload(session); + request.Completion.TrySetCanceled(request.CancellationToken); + } + catch (Exception ex) + { + ClearDownload(session); + request.Completion.TrySetException(ex); } finally { - if (gameObject is not null) - Mediator.Publish(new DownloadFinishedMessage(gameObject)); + if (session.Handler is not null) + { + Mediator.Publish(new DownloadFinishedMessage(session.Handler)); + _activeSessions.TryRemove(session.Handler, out _); + } Mediator.Publish(new ResumeScanMessage(nameof(DownloadFiles))); } @@ -130,6 +255,30 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase base.Dispose(disposing); } + private sealed class DownloadSession + { + public DownloadSession(GameObjectHandler? handler, List downloads) + { + Handler = handler; + ObjectName = handler?.Name ?? "Unknown"; + Downloads = downloads; + } + + public GameObjectHandler? Handler { get; } + public string ObjectName { get; } + public List Downloads { get; } + public ConcurrentDictionary Status { get; } = new(StringComparer.Ordinal); + public ConcurrentDictionary OwnedDownloads { get; } = new(StringComparer.OrdinalIgnoreCase); + } + + private sealed record DownloadRequest( + DownloadSession Session, + List Replacements, + CancellationToken CancellationToken, + bool SkipDownscale, + bool SkipDecimation, + TaskCompletionSource Completion); + private sealed class DownloadSlotLease : IAsyncDisposable { private readonly FileTransferOrchestrator _orch; @@ -154,24 +303,32 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase return new DownloadSlotLease(_orchestrator); } - private void SetStatus(string key, DownloadStatus status) + private void SetStatus(DownloadSession session, string key, DownloadStatus status) { - if (_downloadStatus.TryGetValue(key, out var st)) + if (session.Status.TryGetValue(key, out var st)) st.DownloadStatus = status; } - private void AddTransferredBytes(string key, long delta) + private void AddTransferredBytes(DownloadSession session, string key, long delta) { - if (_downloadStatus.TryGetValue(key, out var st)) + if (session.Status.TryGetValue(key, out var st)) st.AddTransferredBytes(delta); } - private void MarkTransferredFiles(string key, int files) + private void MarkTransferredFiles(DownloadSession session, string key, int files) { - if (_downloadStatus.TryGetValue(key, out var st)) + if (session.Status.TryGetValue(key, out var st)) st.SetTransferredFiles(files); } + private void CompleteOwnedDownload(DownloadSession session, string hash, bool success) + { + if (session.OwnedDownloads.TryRemove(hash, out _)) + { + _downloadDeduplicator.Complete(hash, success); + } + } + private static byte MungeByte(int byteOrEof) { if (byteOrEof == -1) throw new EndOfStreamException(); @@ -218,6 +375,101 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } } + private static async Task CopyExactlyAsync(Stream source, Stream destination, long bytesToCopy, CancellationToken ct) + { + if (bytesToCopy <= 0) + return; + + var buffer = ArrayPool.Shared.Rent(81920); + try + { + long remaining = bytesToCopy; + while (remaining > 0) + { + int read = await source.ReadAsync(buffer.AsMemory(0, (int)Math.Min(buffer.Length, remaining)), ct).ConfigureAwait(false); + if (read == 0) throw new EndOfStreamException(); + await destination.WriteAsync(buffer.AsMemory(0, read), ct).ConfigureAwait(false); + remaining -= read; + } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + } + + private async Task DecompressWrappedLz4ToFileAsync(string compressedPath, string outputPath, CancellationToken ct) + { + await using var input = new FileStream(compressedPath, FileMode.Open, FileAccess.Read, FileShare.Read, 81920, useAsync: true); + byte[] header = new byte[8]; + await ReadExactlyAsync(input, header, ct).ConfigureAwait(false); + + int outputLength = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(0, 4)); + int inputLength = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4, 4)); + + if (outputLength < 0 || inputLength < 0) + throw new InvalidDataException("LZ4 header contained a negative length."); + + long remainingLength = input.Length - 8; + if (inputLength > remainingLength) + throw new InvalidDataException("LZ4 header length exceeds file size."); + + var dir = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir)) + Directory.CreateDirectory(dir); + + if (outputLength == 0) + { + await using var emptyStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.None, 4096, useAsync: true); + await emptyStream.FlushAsync(ct).ConfigureAwait(false); + return 0; + } + + if (inputLength >= outputLength) + { + await using var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true); + await CopyExactlyAsync(input, outputStream, inputLength, ct).ConfigureAwait(false); + await outputStream.FlushAsync(ct).ConfigureAwait(false); + return outputLength; + } + + await using var mappedOutputStream = new FileStream(outputPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None, 4096, FileOptions.SequentialScan); + mappedOutputStream.SetLength(outputLength); + + using var inputMap = MemoryMappedFile.CreateFromFile(compressedPath, FileMode.Open, null, 0, MemoryMappedFileAccess.Read); + using var inputView = inputMap.CreateViewAccessor(8, inputLength, MemoryMappedFileAccess.Read); + using var outputMap = MemoryMappedFile.CreateFromFile(mappedOutputStream, null, outputLength, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, leaveOpen: true); + using var outputView = outputMap.CreateViewAccessor(0, outputLength, MemoryMappedFileAccess.Write); + + unsafe + { + byte* inputPtr = null; + byte* outputPtr = null; + try + { + inputView.SafeMemoryMappedViewHandle.AcquirePointer(ref inputPtr); + outputView.SafeMemoryMappedViewHandle.AcquirePointer(ref outputPtr); + + inputPtr += inputView.PointerOffset; + outputPtr += outputView.PointerOffset; + + int decoded = LZ4Codec.Decode(inputPtr, inputLength, outputPtr, outputLength); + if (decoded != outputLength) + throw new InvalidDataException($"LZ4 decode length mismatch (expected {outputLength}, got {decoded})."); + } + finally + { + if (inputPtr != null) + inputView.SafeMemoryMappedViewHandle.ReleasePointer(); + if (outputPtr != null) + outputView.SafeMemoryMappedViewHandle.ReleasePointer(); + } + } + + outputView.Flush(); + return outputLength; + } + private static Dictionary BuildReplacementLookup(List fileReplacement) { var map = new Dictionary(StringComparer.OrdinalIgnoreCase); @@ -426,6 +678,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } private async Task DownloadQueuedBlockFileAsync( + DownloadSession session, string statusKey, Guid requestId, List transfers, @@ -437,14 +690,14 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase requestId, transfers[0].DownloadUri, string.Join(", ", transfers.Select(c => c.Hash))); // Wait for ready WITHOUT holding a slot - SetStatus(statusKey, DownloadStatus.WaitingForQueue); + SetStatus(session, statusKey, DownloadStatus.WaitingForQueue); await WaitForDownloadReady(transfers, requestId, ct).ConfigureAwait(false); // Hold slot ONLY for the GET - SetStatus(statusKey, DownloadStatus.WaitingForSlot); + SetStatus(session, statusKey, DownloadStatus.WaitingForSlot); await using ((await AcquireSlotAsync(ct).ConfigureAwait(false)).ConfigureAwait(false)) { - SetStatus(statusKey, DownloadStatus.Downloading); + SetStatus(session, statusKey, DownloadStatus.Downloading); var requestUrl = LightlessFiles.CacheGetFullPath(transfers[0].DownloadUri, requestId); await DownloadFileThrottled(requestUrl, tempPath, progress, MungeBuffer, ct, withToken: true).ConfigureAwait(false); @@ -452,6 +705,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase } private async Task DecompressBlockFileAsync( + DownloadSession session, string downloadStatusKey, string blockFilePath, Dictionary replacementLookup, @@ -461,8 +715,8 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase bool skipDownscale, bool skipDecimation) { - SetStatus(downloadStatusKey, DownloadStatus.Decompressing); - MarkTransferredFiles(downloadStatusKey, 1); + SetStatus(session, downloadStatusKey, DownloadStatus.Decompressing); + MarkTransferredFiles(session, downloadStatusKey, 1); try { @@ -483,6 +737,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (!replacementLookup.TryGetValue(fileHash, out var repl)) { Logger.LogWarning("{dlName}: No replacement mapping for {fileHash}", downloadLabel, fileHash); + CompleteOwnedDownload(session, fileHash, false); // still need to skip bytes: var skip = checked((int)fileLengthBytes); fileBlockStream.Position += skip; @@ -503,49 +758,29 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase && expectedRawSize > 0 && decompressed.LongLength != expectedRawSize) { - await _fileCompactor.WriteAllBytesAsync(filePath, Array.Empty(), ct).ConfigureAwait(false); - PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale, skipDecimation); + Logger.LogWarning("{dlName}: Decompressed size mismatch for {fileHash} (expected {expected}, got {actual})", + downloadLabel, fileHash, expectedRawSize, decompressed.LongLength); + CompleteOwnedDownload(session, fileHash, false); continue; } - MungeBuffer(compressed); - - await _decompressGate.WaitAsync(ct).ConfigureAwait(false); - try - { - // offload CPU-intensive decompression to threadpool to free up worker - await Task.Run(async () => - { - var sw = System.Diagnostics.Stopwatch.StartNew(); - - // decompress - var decompressed = LZ4Wrapper.Unwrap(compressed); - - Logger.LogTrace("{dlName}: Unwrap {fileHash} took {ms}ms (compressed {c} bytes, decompressed {d} bytes)", - downloadLabel, fileHash, sw.ElapsedMilliseconds, compressed.Length, decompressed?.Length ?? -1); - - // write to file without compacting during download - await File.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false); - PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale, skipDecimation); - }, ct).ConfigureAwait(false); - } - finally - { - _decompressGate.Release(); - } + await _fileCompactor.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false); + PersistFileToStorage(session, fileHash, filePath, repl.GamePath, skipDownscale, skipDecimation); } catch (EndOfStreamException) { Logger.LogWarning("{dlName}: Failure to extract file {fileHash}, stream ended prematurely", downloadLabel, fileHash); + CompleteOwnedDownload(session, fileHash, false); } catch (Exception e) { Logger.LogWarning(e, "{dlName}: Error during decompression", downloadLabel); + CompleteOwnedDownload(session, fileHash, false); } } } - SetStatus(downloadStatusKey, DownloadStatus.Completed); + SetStatus(session, downloadStatusKey, DownloadStatus.Completed); } catch (EndOfStreamException) { @@ -563,15 +798,14 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase CancellationToken ct, Guid? ownerToken = null) { - CurrentOwnerToken = ownerToken; + _ = ownerToken; var objectName = gameObjectHandler?.Name ?? "Unknown"; Logger.LogDebug("Download start: {id}", objectName); if (fileReplacement == null || fileReplacement.Count == 0) { Logger.LogDebug("{dlName}: No file replacements provided", objectName); - CurrentDownloads = []; - return CurrentDownloads; + return []; } var hashes = fileReplacement @@ -583,13 +817,32 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase if (hashes.Count == 0) { Logger.LogDebug("{dlName}: No valid hashes to download", objectName); - CurrentDownloads = []; - return CurrentDownloads; + return []; + } + + var missingHashes = new List(hashes.Count); + foreach (var hash in hashes) + { + if (_fileDbManager.GetFileCacheByHash(hash) is null) + { + missingHashes.Add(hash); + } + } + + if (missingHashes.Count == 0) + { + Logger.LogDebug("{dlName}: All requested hashes already present in cache", objectName); + return []; + } + + if (missingHashes.Count < hashes.Count) + { + Logger.LogDebug("{dlName}: Skipping {count} hashes already present in cache", objectName, hashes.Count - missingHashes.Count); } List downloadFileInfoFromService = [ - .. await FilesGetSizes(hashes, ct).ConfigureAwait(false), + .. await FilesGetSizes(missingHashes, ct).ConfigureAwait(false), ]; Logger.LogDebug("Files with size 0 or less: {files}", @@ -601,13 +854,13 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase _orchestrator.ForbiddenTransfers.Add(new DownloadFileTransfer(dto)); } - CurrentDownloads = downloadFileInfoFromService + var downloads = downloadFileInfoFromService .Distinct() .Select(d => new DownloadFileTransfer(d)) .Where(d => d.CanBeTransferred) .ToList(); - return CurrentDownloads; + return downloads; } private sealed record BatchChunk(string HostKey, string StatusKey, List Items); @@ -618,9 +871,9 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase yield return items.GetRange(i, Math.Min(chunkSize, items.Count - i)); } - private async Task DownloadFilesInternal(GameObjectHandler? gameObjectHandler, List fileReplacement, CancellationToken ct, bool skipDownscale, bool skipDecimation) + private async Task DownloadFilesInternal(DownloadSession session, List fileReplacement, CancellationToken ct, bool skipDownscale, bool skipDecimation) { - var objectName = gameObjectHandler?.Name ?? "Unknown"; + var objectName = session.ObjectName; // config toggles var configAllowsDirect = _configService.Current.EnableDirectDownloads; @@ -638,7 +891,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var replacementLookup = BuildReplacementLookup(fileReplacement); var rawSizeLookup = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var download in CurrentDownloads) + foreach (var download in session.Downloads) { if (string.IsNullOrWhiteSpace(download.Hash)) { @@ -654,7 +907,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var directDownloads = new List(); var batchDownloads = new List(); - foreach (var download in CurrentDownloads) + foreach (var download in session.Downloads) { if (!string.IsNullOrEmpty(download.DirectDownloadUrl) && allowDirectDownloads) directDownloads.Add(download); @@ -662,6 +915,60 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase batchDownloads.Add(download); } + session.OwnedDownloads.Clear(); + var waitingHashes = new HashSet(StringComparer.OrdinalIgnoreCase); + var waitTasks = new List>(); + var claims = new Dictionary(StringComparer.OrdinalIgnoreCase); + + DownloadClaim GetClaim(string hash) + { + if (!claims.TryGetValue(hash, out var claim)) + { + claim = _downloadDeduplicator.Claim(hash); + claims[hash] = claim; + } + + return claim; + } + + List FilterOwned(List downloads) + { + if (downloads.Count == 0) + { + return downloads; + } + + var owned = new List(downloads.Count); + foreach (var download in downloads) + { + if (string.IsNullOrWhiteSpace(download.Hash)) + { + continue; + } + + var claim = GetClaim(download.Hash); + if (claim.IsOwner) + { + session.OwnedDownloads.TryAdd(download.Hash, 0); + owned.Add(download); + } + else if (waitingHashes.Add(download.Hash)) + { + waitTasks.Add(claim.Completion); + } + } + + return owned; + } + + directDownloads = FilterOwned(directDownloads); + batchDownloads = FilterOwned(batchDownloads); + + if (waitTasks.Count > 0) + { + Logger.LogDebug("{dlName}: {count} files already downloading elsewhere; waiting for completion.", objectName, waitTasks.Count); + } + // Chunk per host so we can fill all slots var slots = Math.Max(1, _configService.Current.ParallelDownloads); @@ -679,12 +986,12 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase .ToArray(); // init statuses - _downloadStatus.Clear(); + session.Status.Clear(); // direct downloads and batch downloads tracked separately foreach (var d in directDownloads) { - _downloadStatus[d.DirectDownloadUrl!] = new FileDownloadStatus + session.Status[d.DirectDownloadUrl!] = new FileDownloadStatus { DownloadStatus = DownloadStatus.WaitingForSlot, TotalBytes = d.Total, @@ -696,7 +1003,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase foreach (var chunk in batchChunks) { - _downloadStatus[chunk.StatusKey] = new FileDownloadStatus + session.Status[chunk.StatusKey] = new FileDownloadStatus { DownloadStatus = DownloadStatus.WaitingForQueue, TotalBytes = chunk.Items.Sum(x => x.Total), @@ -712,8 +1019,8 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase directDownloads.Count, batchDownloads.Count, batchChunks.Length); } - if (gameObjectHandler is not null) - Mediator.Publish(new DownloadStartedMessage(gameObjectHandler, _downloadStatus)); + if (session.Handler is not null) + Mediator.Publish(new DownloadStartedMessage(session.Handler, session.Status)); // work based on cpu count and slots var coreCount = Environment.ProcessorCount; @@ -724,33 +1031,53 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var extraWorkers = (availableDecompressSlots > 0 && coreCount >= 6) ? 2 : 0; // allow some extra workers so downloads can continue while earlier items decompress. - var workerDop = Math.Clamp(baseWorkers + extraWorkers, 2, coreCount); + var workerDop = Math.Clamp(slots * 2, 2, 16); + var decompressionTasks = new ConcurrentBag(); + using var decompressionLimiter = new SemaphoreSlim(CalculateDecompressionLimit(slots)); // batch downloads Task batchTask = batchChunks.Length == 0 ? Task.CompletedTask : Parallel.ForEachAsync(batchChunks, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct }, - async (chunk, token) => await ProcessBatchChunkAsync(chunk, replacementLookup, rawSizeLookup, token, skipDownscale, skipDecimation).ConfigureAwait(false)); + async (chunk, token) => await ProcessBatchChunkAsync(session, chunk, replacementLookup, rawSizeLookup, decompressionTasks, decompressionLimiter, token, skipDownscale, skipDecimation).ConfigureAwait(false)); // direct downloads Task directTask = directDownloads.Count == 0 ? Task.CompletedTask : Parallel.ForEachAsync(directDownloads, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct }, - async (d, token) => await ProcessDirectAsync(d, replacementLookup, rawSizeLookup, token, skipDownscale, skipDecimation).ConfigureAwait(false)); + async (d, token) => await ProcessDirectAsync(session, d, replacementLookup, rawSizeLookup, decompressionTasks, decompressionLimiter, token, skipDownscale, skipDecimation).ConfigureAwait(false)); - await Task.WhenAll(batchTask, directTask).ConfigureAwait(false); + Task dedupWaitTask = waitTasks.Count == 0 + ? Task.FromResult(Array.Empty()) + : Task.WhenAll(waitTasks); - // process deferred compressions after all downloads complete - await ProcessDeferredCompressionsAsync(ct).ConfigureAwait(false); + try + { + await Task.WhenAll(batchTask, directTask).ConfigureAwait(false); + } + finally + { + await WaitForAllTasksAsync(decompressionTasks).ConfigureAwait(false); + } + + var dedupResults = await dedupWaitTask.ConfigureAwait(false); + + if (waitTasks.Count > 0 && dedupResults.Any(r => !r)) + { + Logger.LogWarning("{dlName}: One or more shared downloads failed; missing files may remain.", objectName); + } Logger.LogDebug("Download end: {id}", objectName); - ClearDownload(); + ClearDownload(session); } private async Task ProcessBatchChunkAsync( + DownloadSession session, BatchChunk chunk, Dictionary replacementLookup, IReadOnlyDictionary rawSizeLookup, + ConcurrentBag decompressionTasks, + SemaphoreSlim decompressionLimiter, CancellationToken ct, bool skipDownscale, bool skipDecimation) @@ -758,7 +1085,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var statusKey = chunk.StatusKey; // enqueue (no slot) - SetStatus(statusKey, DownloadStatus.WaitingForQueue); + SetStatus(session, statusKey, DownloadStatus.WaitingForQueue); var requestIdResponse = await _orchestrator.SendRequestAsync( HttpMethod.Post, @@ -771,22 +1098,49 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var blockFile = _fileDbManager.GetCacheFilePath(requestId.ToString("N"), "blk"); var fi = new FileInfo(blockFile); + var decompressionQueued = false; + try { - var progress = CreateInlineProgress(bytes => AddTransferredBytes(statusKey, bytes)); + // download (with slot) + var progress = CreateInlineProgress(bytes => AddTransferredBytes(session, statusKey, bytes)); // Download slot held on get - await DownloadQueuedBlockFileAsync(statusKey, requestId, chunk.Items, blockFile, progress, ct).ConfigureAwait(false); + await DownloadQueuedBlockFileAsync(session, statusKey, requestId, chunk.Items, blockFile, progress, ct).ConfigureAwait(false); // decompress if file exists if (!File.Exists(blockFile)) { Logger.LogWarning("{dlName}: Block file missing before extraction, skipping", fi.Name); - SetStatus(statusKey, DownloadStatus.Completed); + SetStatus(session, statusKey, DownloadStatus.Completed); return; } + SetStatus(session, statusKey, DownloadStatus.Decompressing); - await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, rawSizeLookup, fi.Name, ct, skipDownscale, skipDecimation).ConfigureAwait(false); + EnqueueLimitedTask( + decompressionTasks, + decompressionLimiter, + async token => + { + try + { + await DecompressBlockFileAsync(session, statusKey, blockFile, replacementLookup, rawSizeLookup, fi.Name, token, skipDownscale, skipDecimation) + .ConfigureAwait(false); + } + finally + { + try { File.Delete(blockFile); } catch {} + foreach (var item in chunk.Items) + { + if (!string.IsNullOrWhiteSpace(item.Hash)) + { + CompleteOwnedDownload(session, item.Hash, false); + } + } + } + }, + ct); + decompressionQueued = true; } catch (OperationCanceledException) { @@ -795,18 +1149,31 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase catch (Exception ex) { Logger.LogError(ex, "{dlName}: Error during batch chunk processing", fi.Name); - ClearDownload(); + ClearDownload(session); } finally { - try { File.Delete(blockFile); } catch { /* ignore */ } + if (!decompressionQueued) + { + try { File.Delete(blockFile); } catch { /* ignore */ } + foreach (var item in chunk.Items) + { + if (!string.IsNullOrWhiteSpace(item.Hash)) + { + CompleteOwnedDownload(session, item.Hash, false); + } + } + } } } private async Task ProcessDirectAsync( + DownloadSession session, DownloadFileTransfer directDownload, Dictionary replacementLookup, IReadOnlyDictionary rawSizeLookup, + ConcurrentBag decompressionTasks, + SemaphoreSlim decompressionLimiter, CancellationToken ct, bool skipDownscale, bool skipDecimation) @@ -814,25 +1181,35 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var progress = CreateInlineProgress(bytes => { if (!string.IsNullOrEmpty(directDownload.DirectDownloadUrl)) - AddTransferredBytes(directDownload.DirectDownloadUrl!, bytes); + AddTransferredBytes(session, directDownload.DirectDownloadUrl!, bytes); }); if (!ShouldUseDirectDownloads() || string.IsNullOrEmpty(directDownload.DirectDownloadUrl)) { - await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation).ConfigureAwait(false); + try + { + await ProcessDirectAsQueuedFallbackAsync(session, directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation, decompressionTasks, decompressionLimiter).ConfigureAwait(false); + } + catch (Exception ex) + { + Logger.LogError(ex, "{hash}: Error during direct download fallback.", directDownload.Hash); + CompleteOwnedDownload(session, directDownload.Hash, false); + throw; + } return; } var tempFilename = _fileDbManager.GetCacheFilePath(directDownload.Hash, "bin"); + var decompressionQueued = false; try { // Download slot held on get - SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.WaitingForSlot); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.WaitingForSlot); await using ((await AcquireSlotAsync(ct).ConfigureAwait(false)).ConfigureAwait(false)) { - SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Downloading); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.Downloading); Logger.LogDebug("Beginning direct download of {hash} from {url}", directDownload.Hash, directDownload.DirectDownloadUrl); await DownloadFileThrottled(new Uri(directDownload.DirectDownloadUrl!), tempFilename, progress, callback: null, ct, withToken: false) @@ -841,13 +1218,11 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase Interlocked.Exchange(ref _consecutiveDirectDownloadFailures, 0); - // Decompress/write - SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Decompressing); - if (!replacementLookup.TryGetValue(directDownload.Hash, out var repl)) { Logger.LogWarning("{hash}: No replacement data found for direct download.", directDownload.Hash); - SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Completed); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.Completed); + CompleteOwnedDownload(session, directDownload.Hash, false); return; } @@ -856,22 +1231,68 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase Logger.LogDebug("Decompressing direct download {hash} from {compressedFile} to {finalFile}", directDownload.Hash, tempFilename, finalFilename); - // Read compressed bytes and decompress in memory - byte[] compressedBytes = await File.ReadAllBytesAsync(tempFilename, ct).ConfigureAwait(false); - var decompressedBytes = LZ4Wrapper.Unwrap(compressedBytes); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.Decompressing); + EnqueueLimitedTask( + decompressionTasks, + decompressionLimiter, + async token => + { + try + { + var decompressedLength = await DecompressWrappedLz4ToFileAsync(tempFilename, finalFilename, token).ConfigureAwait(false); - if (directDownload.TotalRaw > 0 && decompressedBytes.LongLength != directDownload.TotalRaw) - { - throw new InvalidDataException( - $"{directDownload.Hash}: Decompressed size mismatch (expected {directDownload.TotalRaw}, got {decompressedBytes.LongLength})"); - } + if (directDownload.TotalRaw > 0 && decompressedLength != directDownload.TotalRaw) + { + throw new InvalidDataException( + $"{directDownload.Hash}: Decompressed size mismatch (expected {directDownload.TotalRaw}, got {decompressedLength})"); + } - await _fileCompactor.WriteAllBytesAsync(finalFilename, decompressedBytes, ct).ConfigureAwait(false); - PersistFileToStorage(directDownload.Hash, finalFilename, repl.GamePath, skipDownscale, skipDecimation); + _fileCompactor.NotifyFileWritten(finalFilename); + PersistFileToStorage(session, directDownload.Hash, finalFilename, repl.GamePath, skipDownscale, skipDecimation); - MarkTransferredFiles(directDownload.DirectDownloadUrl!, 1); - SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Completed); - Logger.LogDebug("Finished direct download of {hash}.", directDownload.Hash); + MarkTransferredFiles(session, directDownload.DirectDownloadUrl!, 1); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.Completed); + Logger.LogDebug("Finished direct download of {hash}.", directDownload.Hash); + } + catch (Exception ex) + { + var expectedDirectDownloadFailure = ex is InvalidDataException; + var failureCount = expectedDirectDownloadFailure ? 0 : Interlocked.Increment(ref _consecutiveDirectDownloadFailures); + + if (expectedDirectDownloadFailure) + Logger.LogInformation(ex, "{hash}: Direct download unavailable, attempting queued fallback.", directDownload.Hash); + else + Logger.LogWarning(ex, "{hash}: Direct download failed, attempting queued fallback.", directDownload.Hash); + + try + { + await ProcessDirectAsQueuedFallbackAsync(session, directDownload, replacementLookup, rawSizeLookup, progress, token, skipDownscale, skipDecimation, decompressionTasks, decompressionLimiter).ConfigureAwait(false); + + if (!expectedDirectDownloadFailure && failureCount >= 3 && !_disableDirectDownloads) + { + _disableDirectDownloads = true; + Logger.LogWarning("Disabling direct downloads for this session after {count} consecutive failures.", failureCount); + } + } + catch (Exception fallbackEx) + { + Logger.LogError(fallbackEx, "{hash}: Error during direct download fallback.", directDownload.Hash); + CompleteOwnedDownload(session, directDownload.Hash, false); + SetStatus(session, directDownload.DirectDownloadUrl!, DownloadStatus.Completed); + ClearDownload(session); + } + } + finally + { + try { File.Delete(tempFilename); } + catch + { + // ignore + } + } + }, + ct); + decompressionQueued = true; } catch (OperationCanceledException ex) { @@ -880,7 +1301,8 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase else Logger.LogWarning(ex, "{hash}: Direct download cancelled unexpectedly.", directDownload.Hash); - ClearDownload(); + CompleteOwnedDownload(session, directDownload.Hash, false); + ClearDownload(session); } catch (Exception ex) { @@ -894,7 +1316,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase try { - await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation).ConfigureAwait(false); + await ProcessDirectAsQueuedFallbackAsync(session, directDownload, replacementLookup, rawSizeLookup, progress, ct, skipDownscale, skipDecimation, decompressionTasks, decompressionLimiter).ConfigureAwait(false); if (!expectedDirectDownloadFailure && failureCount >= 3 && !_disableDirectDownloads) { @@ -905,34 +1327,41 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase catch (Exception fallbackEx) { Logger.LogError(fallbackEx, "{hash}: Error during direct download fallback.", directDownload.Hash); - ClearDownload(); + CompleteOwnedDownload(session, directDownload.Hash, false); + ClearDownload(session); } } finally { - try { File.Delete(tempFilename); } - catch + if (!decompressionQueued) { - // ignore + try { File.Delete(tempFilename); } + catch + { + // ignore + } } } } private async Task ProcessDirectAsQueuedFallbackAsync( + DownloadSession session, DownloadFileTransfer directDownload, Dictionary replacementLookup, IReadOnlyDictionary rawSizeLookup, IProgress progress, CancellationToken ct, bool skipDownscale, - bool skipDecimation) + bool skipDecimation, + ConcurrentBag decompressionTasks, + SemaphoreSlim decompressionLimiter) { if (string.IsNullOrEmpty(directDownload.DirectDownloadUrl)) throw new InvalidOperationException("Direct download fallback requested without a direct download URL."); var statusKey = directDownload.DirectDownloadUrl!; - SetStatus(statusKey, DownloadStatus.WaitingForQueue); + SetStatus(session, statusKey, DownloadStatus.WaitingForQueue); var requestIdResponse = await _orchestrator.SendRequestAsync( HttpMethod.Post, @@ -942,23 +1371,46 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase var requestId = Guid.Parse((await requestIdResponse.Content.ReadAsStringAsync(ct).ConfigureAwait(false)).Trim('"')); var blockFile = _fileDbManager.GetCacheFilePath(requestId.ToString("N"), "blk"); + var fi = new FileInfo(blockFile); + var decompressionQueued = false; try { - await DownloadQueuedBlockFileAsync(statusKey, requestId, [directDownload], blockFile, progress, ct).ConfigureAwait(false); + await DownloadQueuedBlockFileAsync(session, statusKey, requestId, [directDownload], blockFile, progress, ct).ConfigureAwait(false); if (!File.Exists(blockFile)) - throw new FileNotFoundException("Block file missing after direct download fallback.", blockFile); + { + Logger.LogWarning("{dlName}: Block file missing before extraction, skipping", fi.Name); + SetStatus(session, statusKey, DownloadStatus.Completed); + return; + } - await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, rawSizeLookup, $"fallback-{directDownload.Hash}", ct, skipDownscale, skipDecimation) - .ConfigureAwait(false); + SetStatus(session, statusKey, DownloadStatus.Decompressing); + EnqueueLimitedTask( + decompressionTasks, + decompressionLimiter, + async token => + { + try + { + await DecompressBlockFileAsync(session, statusKey, blockFile, replacementLookup, rawSizeLookup, $"fallback-{directDownload.Hash}", token, skipDownscale, skipDecimation) + .ConfigureAwait(false); + } + finally + { + try { File.Delete(blockFile); } catch {} + CompleteOwnedDownload(session, directDownload.Hash, false); + } + }, + ct); + decompressionQueued = true; } finally { - try { File.Delete(blockFile); } - catch + if (!decompressionQueued) { - // ignore + try { File.Delete(blockFile); } catch {} + CompleteOwnedDownload(session, directDownload.Hash, false); } } } @@ -977,9 +1429,10 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase return await response.Content.ReadFromJsonAsync>(cancellationToken: ct).ConfigureAwait(false) ?? []; } - private void PersistFileToStorage(string fileHash, string filePath, string gamePath, bool skipDownscale, bool skipDecimation) + private bool PersistFileToStorage(DownloadSession session, string fileHash, string filePath, string gamePath, bool skipDownscale, bool skipDecimation) { var fi = new FileInfo(filePath); + var persisted = false; Func RandomDayInThePast() { @@ -993,13 +1446,13 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase fi.LastAccessTime = DateTime.Today; fi.LastWriteTime = RandomDayInThePast().Invoke(); - // queue file for deferred compression instead of compressing immediately - if (_configService.Current.UseCompactor) - _deferredCompressionQueue.Enqueue(filePath); - try { var entry = _fileDbManager.CreateCacheEntryWithKnownHash(filePath, fileHash); + if (entry != null && string.Equals(entry.Hash, fileHash, StringComparison.OrdinalIgnoreCase)) + { + persisted = true; + } if (!skipDownscale && _textureDownscaleService.ShouldScheduleDownscale(filePath)) { @@ -1021,62 +1474,67 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase File.Delete(filePath); _fileDbManager.RemoveHashedFile(entry.Hash, entry.PrefixedFilePath); + persisted = false; } } catch (Exception ex) { Logger.LogWarning(ex, "Error creating cache entry"); } + finally + { + CompleteOwnedDownload(session, fileHash, persisted); + } + + return persisted; + } + + private static int CalculateDecompressionLimit(int downloadSlots) + { + var cpuBound = Math.Max(1, Math.Min(Environment.ProcessorCount, 4)); + return Math.Clamp(downloadSlots, 1, cpuBound); + } + + private static Task EnqueueLimitedTask( + ConcurrentBag tasks, + SemaphoreSlim limiter, + Func work, + CancellationToken ct) + { + var task = Task.Run(async () => + { + await limiter.WaitAsync(ct).ConfigureAwait(false); + try + { + await work(ct).ConfigureAwait(false); + } + finally + { + limiter.Release(); + } + }, ct); + + tasks.Add(task); + return task; + } + + private static async Task WaitForAllTasksAsync(ConcurrentBag tasks) + { + while (true) + { + var snapshot = tasks.ToArray(); + if (snapshot.Length == 0) + return; + + await Task.WhenAll(snapshot).ConfigureAwait(false); + + if (tasks.Count == snapshot.Length) + return; + } } private static IProgress CreateInlineProgress(Action callback) => new InlineProgress(callback); - private async Task ProcessDeferredCompressionsAsync(CancellationToken ct) - { - if (_deferredCompressionQueue.IsEmpty) - return; - - var filesToCompress = new List(); - while (_deferredCompressionQueue.TryDequeue(out var filePath)) - { - if (File.Exists(filePath)) - filesToCompress.Add(filePath); - } - - if (filesToCompress.Count == 0) - return; - - Logger.LogDebug("Starting deferred compression of {count} files", filesToCompress.Count); - - var compressionWorkers = Math.Clamp(Environment.ProcessorCount / 4, 2, 4); - - await Parallel.ForEachAsync(filesToCompress, - new ParallelOptions - { - MaxDegreeOfParallelism = compressionWorkers, - CancellationToken = ct - }, - async (filePath, token) => - { - try - { - await Task.Yield(); - if (_configService.Current.UseCompactor && File.Exists(filePath)) - { - var bytes = await File.ReadAllBytesAsync(filePath, token).ConfigureAwait(false); - await _fileCompactor.WriteAllBytesAsync(filePath, bytes, token).ConfigureAwait(false); - Logger.LogTrace("Compressed file: {filePath}", filePath); - } - } - catch (Exception ex) - { - Logger.LogWarning(ex, "Failed to compress file: {filePath}", filePath); - } - }).ConfigureAwait(false); - - Logger.LogDebug("Completed deferred compression of {count} files", filesToCompress.Count); - } - private sealed class InlineProgress : IProgress { private readonly Action _callback; diff --git a/LightlessSync/packages.lock.json b/LightlessSync/packages.lock.json index 45d7722..5de5367 100644 --- a/LightlessSync/packages.lock.json +++ b/LightlessSync/packages.lock.json @@ -617,6 +617,12 @@ "resolved": "10.0.1", "contentHash": "xfaHEHVDkMOOZR5S6ZGezD0+vekdH1Nx/9Ih8/rOqOGSOk1fxiN3u94bYkBW/wigj0Uw2Wt3vvRj9mtYdgwEjw==" }, + "lightlesscompactor": { + "type": "Project", + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "[10.0.1, )" + } + }, "lightlesssync.api": { "type": "Project", "dependencies": {