Files
LightlessClient/LightlessSync/Services/CharacterAnalyzer.cs
defnotken 72a62b7449
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m9s
2.1.0 (#123)
# Patchnotes 2.1.0
The changes in this update are more than just "patches". With a new UI, a new feature, and a bunch of bug fixes, improvements and a new member on the dev team, we thought this was more of a minor update.

We would like to introduce @tsubasahane of MareCN to the team! We’re happy to work with them to bring Lightless and its features to the CN client as well as having another talented dev bring features and ideas to us. Speaking of which:

# Location Sharing (Big shout out to @tsubasahane for bringing this feature)

- Are you TIRED of scrambling to find the address of the venue you're in to share with your friends? We are introducing Location Sharing! An optional feature where you can share your location with direct pairs temporarily [30 minutes, 1 hour, 3 hours] minutes or until you turn it off for them. That's up to you! [#125](<#125>)  [#49](<Lightless-Sync/LightlessServer#49>)
- To share your location with a pair, click the three dots beside the pair and choose a duration to share with them. [#125](<#125>)  [#49](<Lightless-Sync/LightlessServer#49>)
- To view the location of someone who's shared with you, simply hover over the globe icon! [#125](<#125>)  [#49](<Lightless-Sync/LightlessServer#49>)

[1]

# Model Optimization (Mesh Decimating)
 - This new option can automatically “simplify” incoming character meshes to help performance by reducing triangle counts. You choose how strong the reduction is (default/recommended is 80%). [#131](<#131>)
 - Decimation only kicks in when a mesh is above a certain triangle threshold, and only for the items that qualify for it and you selected for. [#131](<#131>)
 - Hair meshes is always excluded, since simplifying hair meshes is very prone to breaking.
 - You can find everything under Settings → Performance → Model Optimization. [#131](<#131>)
+ ** IF YOU HAVE USED DECIMATION IN TESTING, PLEASE CLEAR YOUR CACHE  **

[2]

# Animation (PAP) Validation (Safer animations)
 - Lightless now checks your currently animations to see if they work with your local skeleton/bone mod. If an animation matches, it’s included in what gets sent to other players. If it doesn’t, Lightless will skip it and write a warning to your log showing how many were skipped due to skeleton changes. Its defaulted to Unsafe (off). turn it on if you experience crashes from others users. [#131](<#131>)
 - Lightless also does the same kind of check for incoming animation files, to make sure they match the body/skeleton they were sent with. [#131](<#131>)
 - Because these checks can sometimes be a little picky, you can adjust how strict they are in Settings -> General -> Animation & Bones to reduce false positives. [#131](<#131>)

# UI Changes (Thanks to @kyuwu for UI Changes)
- The top part of the main screen has gotten a makeover. You can adjust the colors of the gradiant in the Color settings of Lightless. [#127](<#127>)

[3]

- Settings have gotten some changes as well to make this change more universal, and will use the same color settings. [#127](<#127>)
- The particle effects of the gradient are toggleable in 'Settings -> UI -> Behavior' [#127](<#127>)
- Instead of showing download/upload on bottom of Main UI, it will show VRAM usage and triangles with their optimization options next to it [#138](<#138>)

# LightFinder / ShellFinder
- UI Changes that follow our new design follow the color codes for the Gradient top as the main screen does.  [#127](<#127>)

[4]

Co-authored-by: defnotken <itsdefnotken@gmail.com>
Co-authored-by: azyges <aaaaaa@aaa.aaa>
Co-authored-by: cake <admin@cakeandbanana.nl>
Co-authored-by: Tsubasa <tsubasa@noreply.git.lightless-sync.org>
Co-authored-by: choco <choco@patat.nl>
Co-authored-by: celine <aaa@aaa.aaa>
Co-authored-by: celine <celine@noreply.git.lightless-sync.org>
Co-authored-by: Tsubasahane <wozaiha@gmail.com>
Co-authored-by: cake <cake@noreply.git.lightless-sync.org>
Reviewed-on: #123
2026-01-20 19:43:00 +00:00

375 lines
15 KiB
C#

using FFXIVClientStructs.FFXIV.Client.Graphics.Render;
using LightlessSync.API.Data;
using LightlessSync.API.Data.Enum;
using LightlessSync.FileCache;
using LightlessSync.Services.CharaData;
using LightlessSync.Services.CharaData.Models;
using LightlessSync.Services.Mediator;
using LightlessSync.UI;
using LightlessSync.Utils;
using Lumina.Data.Files;
using Microsoft.Extensions.Logging;
using System.Collections.Immutable;
namespace LightlessSync.Services;
public sealed class CharacterAnalyzer : MediatorSubscriberBase, IDisposable
{
private readonly FileCacheManager _fileCacheManager;
private readonly XivDataAnalyzer _xivDataAnalyzer;
private CancellationTokenSource? _analysisCts;
private CancellationTokenSource _baseAnalysisCts = new();
private string _lastDataHash = string.Empty;
private CharacterAnalysisSummary _latestSummary = CharacterAnalysisSummary.Empty;
public CharacterAnalyzer(ILogger<CharacterAnalyzer> logger, LightlessMediator mediator, FileCacheManager fileCacheManager, XivDataAnalyzer modelAnalyzer)
: base(logger, mediator)
{
Mediator.Subscribe<CharacterDataCreatedMessage>(this, (msg) =>
{
_baseAnalysisCts = _baseAnalysisCts.CancelRecreate();
var token = _baseAnalysisCts.Token;
_ = Task.Run(async () => await BaseAnalysis(msg.CharacterData, token).ConfigureAwait(false), token);
});
_fileCacheManager = fileCacheManager;
_xivDataAnalyzer = modelAnalyzer;
}
public int CurrentFile { get; internal set; }
public bool IsAnalysisRunning => _analysisCts != null;
public int TotalFiles { get; internal set; }
internal Dictionary<ObjectKind, Dictionary<string, FileDataEntry>> LastAnalysis { get; } = [];
public CharacterAnalysisSummary LatestSummary => _latestSummary;
public void CancelAnalyze()
{
_analysisCts?.CancelDispose();
_analysisCts = null;
}
public async Task ComputeAnalysis(bool print = true, bool recalculate = false)
{
Logger.LogDebug("=== Calculating Character Analysis ===");
_analysisCts = _analysisCts?.CancelRecreate() ?? new();
var cancelToken = _analysisCts.Token;
var allFiles = LastAnalysis.SelectMany(v => v.Value.Select(d => d.Value)).ToList();
var remaining = allFiles.Where(c => !c.IsComputed || recalculate).ToList();
if (remaining.Count == 0)
return;
TotalFiles = remaining.Count;
CurrentFile = 0;
Logger.LogDebug("=== Computing {amount} remaining files ===", remaining.Count);
Mediator.Publish(new HaltScanMessage(nameof(CharacterAnalyzer)));
try
{
foreach (var file in remaining)
{
cancelToken.ThrowIfCancellationRequested();
var path = file.FilePaths.FirstOrDefault() ?? "<unknown>";
Logger.LogDebug("Computing file {file}", path);
await file.ComputeSizes(_fileCacheManager, cancelToken).ConfigureAwait(false);
CurrentFile++;
}
await _fileCacheManager.WriteOutFullCsvAsync(cancelToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
Logger.LogInformation("File analysis cancelled");
throw;
}
catch (Exception ex)
{
Logger.LogWarning(ex, "Failed to analyze files");
}
finally
{
Mediator.Publish(new ResumeScanMessage(nameof(CharacterAnalyzer)));
}
RecalculateSummary();
Mediator.Publish(new CharacterDataAnalyzedMessage());
_analysisCts.CancelDispose();
_analysisCts = null;
if (print) PrintAnalysis();
}
public void Dispose()
{
_analysisCts.CancelDispose();
_baseAnalysisCts.Dispose();
}
public async Task UpdateFileEntriesAsync(IEnumerable<string> filePaths, CancellationToken token, bool force = false)
{
var normalized = new HashSet<string>(
filePaths.Where(path => !string.IsNullOrWhiteSpace(path)),
StringComparer.OrdinalIgnoreCase);
if (normalized.Count == 0)
{
return;
}
var updated = false;
foreach (var objectEntries in LastAnalysis.Values)
{
foreach (var entry in objectEntries.Values)
{
if (!entry.FilePaths.Exists(path => normalized.Contains(path)))
{
continue;
}
token.ThrowIfCancellationRequested();
await entry.ComputeSizes(_fileCacheManager, token, force).ConfigureAwait(false);
if (string.Equals(entry.FileType, "mdl", StringComparison.OrdinalIgnoreCase))
{
var sourcePath = entry.FilePaths.FirstOrDefault(path => !string.IsNullOrWhiteSpace(path));
if (!string.IsNullOrWhiteSpace(sourcePath))
{
entry.UpdateTriangles(_xivDataAnalyzer.RefreshTrianglesForPath(entry.Hash, sourcePath));
}
}
updated = true;
}
}
if (updated)
{
RecalculateSummary();
Mediator.Publish(new CharacterDataAnalyzedMessage());
}
}
private async Task BaseAnalysis(CharacterData charaData, CancellationToken token)
{
if (string.Equals(charaData.DataHash.Value, _lastDataHash, StringComparison.Ordinal)) return;
LastAnalysis.Clear();
foreach (var obj in charaData.FileReplacements)
{
Dictionary<string, FileDataEntry> data = new(StringComparer.OrdinalIgnoreCase);
foreach (var fileEntry in obj.Value)
{
token.ThrowIfCancellationRequested();
var fileCacheEntries = (await _fileCacheManager
.GetAllFileCachesByHashAsync(fileEntry.Hash, ignoreCacheEntries: true, validate: false, token)
.ConfigureAwait(false))
.ToList();
if (fileCacheEntries.Count == 0)
continue;
var resolved = fileCacheEntries[0].ResolvedFilepath;
var extWithDot = Path.GetExtension(resolved);
var ext = string.IsNullOrEmpty(extWithDot) ? "unk?" : extWithDot.TrimStart('.');
var tris = await _xivDataAnalyzer.GetTrianglesByHash(fileEntry.Hash).ConfigureAwait(false);
var distinctFilePaths = fileCacheEntries
.Select(c => c.ResolvedFilepath)
.Where(p => !string.IsNullOrWhiteSpace(p))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToList();
long orig = 0, comp = 0;
var first = fileCacheEntries[0];
if (first.Size > 0) orig = first.Size.Value;
if (first.CompressedSize > 0) comp = first.CompressedSize.Value;
if (_fileCacheManager.TryGetSizeInfo(fileEntry.Hash, out var cached))
{
if (orig <= 0 && cached.Original > 0) orig = cached.Original;
if (comp <= 0 && cached.Compressed > 0) comp = cached.Compressed;
}
data[fileEntry.Hash] = new FileDataEntry(
fileEntry.Hash,
ext,
[.. fileEntry.GamePaths],
distinctFilePaths,
orig,
comp,
tris,
fileCacheEntries);
}
LastAnalysis[obj.Key] = data;
}
RecalculateSummary();
Mediator.Publish(new CharacterDataAnalyzedMessage());
_lastDataHash = charaData.DataHash.Value;
}
private void RecalculateSummary()
{
var builder = ImmutableDictionary.CreateBuilder<ObjectKind, CharacterAnalysisObjectSummary>();
foreach (var (objectKind, entries) in LastAnalysis)
{
long totalTriangles = 0;
long texOriginalBytes = 0;
long texCompressedBytes = 0;
foreach (var entry in entries.Values)
{
totalTriangles += entry.Triangles;
if (string.Equals(entry.FileType, "tex", StringComparison.OrdinalIgnoreCase))
{
texOriginalBytes += entry.OriginalSize;
texCompressedBytes += entry.CompressedSize;
}
}
builder[objectKind] = new CharacterAnalysisObjectSummary(entries.Count, totalTriangles, texOriginalBytes, texCompressedBytes);
}
_latestSummary = new CharacterAnalysisSummary(builder.ToImmutable());
}
private void PrintAnalysis()
{
if (LastAnalysis.Count == 0) return;
foreach (var kvp in LastAnalysis)
{
int fileCounter = 1;
int totalFiles = kvp.Value.Count;
Logger.LogInformation("=== Analysis for {obj} ===", kvp.Key);
foreach (var entry in kvp.Value.OrderBy(b => b.Value.GamePaths.OrderBy(p => p, StringComparer.Ordinal).First(), StringComparer.Ordinal))
{
Logger.LogInformation("File {x}/{y}: {hash}", fileCounter++, totalFiles, entry.Key);
foreach (var path in entry.Value.GamePaths)
{
Logger.LogInformation(" Game Path: {path}", path);
}
if (entry.Value.FilePaths.Count > 1) Logger.LogInformation(" Multiple fitting files detected for {key}", entry.Key);
foreach (var filePath in entry.Value.FilePaths)
{
Logger.LogInformation(" File Path: {path}", filePath);
}
Logger.LogInformation(" Size: {size}, Compressed: {compressed}", UiSharedService.ByteToString(entry.Value.OriginalSize),
UiSharedService.ByteToString(entry.Value.CompressedSize));
}
}
foreach (var kvp in LastAnalysis)
{
Logger.LogInformation("=== Detailed summary by file type for {obj} ===", kvp.Key);
foreach (var entry in kvp.Value.Select(v => v.Value).GroupBy(v => v.FileType, StringComparer.Ordinal))
{
Logger.LogInformation("{ext} files: {count}, size extracted: {size}, size compressed: {sizeComp}", entry.Key, entry.Count(),
UiSharedService.ByteToString(entry.Sum(v => v.OriginalSize)), UiSharedService.ByteToString(entry.Sum(v => v.CompressedSize)));
}
Logger.LogInformation("=== Total summary for {obj} ===", kvp.Key);
Logger.LogInformation("Total files: {count}, size extracted: {size}, size compressed: {sizeComp}", kvp.Value.Count,
UiSharedService.ByteToString(kvp.Value.Sum(v => v.Value.OriginalSize)), UiSharedService.ByteToString(kvp.Value.Sum(v => v.Value.CompressedSize)));
}
Logger.LogInformation("=== Total summary for all currently present objects ===");
Logger.LogInformation("Total files: {count}, size extracted: {size}, size compressed: {sizeComp}",
LastAnalysis.Values.Sum(v => v.Values.Count),
UiSharedService.ByteToString(LastAnalysis.Values.Sum(c => c.Values.Sum(v => v.OriginalSize))),
UiSharedService.ByteToString(LastAnalysis.Values.Sum(c => c.Values.Sum(v => v.CompressedSize))));
Logger.LogInformation("IMPORTANT NOTES:\n\r- For Lightless up- and downloads only the compressed size is relevant.\n\r- An unusually high total files count beyond 200 and up will also increase your download time to others significantly.");
}
internal sealed class FileDataEntry
{
public string Hash { get; }
public string FileType { get; }
public List<string> GamePaths { get; }
public List<string> FilePaths { get; }
public long OriginalSize { get; private set; }
public long CompressedSize { get; private set; }
public long Triangles { get; private set; }
public IReadOnlyList<FileCacheEntity> CacheEntries { get; }
public bool IsComputed => OriginalSize > 0 && CompressedSize > 0;
public FileDataEntry(
string hash,
string fileType,
List<string> gamePaths,
List<string> filePaths,
long originalSize,
long compressedSize,
long triangles,
IReadOnlyList<FileCacheEntity> cacheEntries)
{
Hash = hash;
FileType = fileType;
GamePaths = gamePaths;
FilePaths = filePaths;
OriginalSize = originalSize;
CompressedSize = compressedSize;
Triangles = triangles;
CacheEntries = cacheEntries;
}
public async Task ComputeSizes(FileCacheManager fileCacheManager, CancellationToken token, bool force = false)
{
if (!force && IsComputed)
return;
if (FilePaths.Count == 0 || string.IsNullOrWhiteSpace(FilePaths[0]))
return;
var path = FilePaths[0];
if (!File.Exists(path))
return;
var original = new FileInfo(path).Length;
var compressedLen = await fileCacheManager.GetCompressedSizeAsync(Hash, token).ConfigureAwait(false);
if (compressedLen <= 0 && !string.Equals(FileType, "tex", StringComparison.OrdinalIgnoreCase))
{
compressedLen = original;
}
fileCacheManager.SetSizeInfo(Hash, original, compressedLen);
FileCacheManager.ApplySizesToEntries(CacheEntries, original, compressedLen);
OriginalSize = original;
CompressedSize = compressedLen;
if (string.Equals(FileType, "tex", StringComparison.OrdinalIgnoreCase))
RefreshFormat();
}
public Lazy<string> Format => _format ??= CreateFormatValue();
private Lazy<string>? _format;
public void RefreshFormat() => _format = CreateFormatValue();
public void UpdateTriangles(long triangles) => Triangles = triangles;
private Lazy<string> CreateFormatValue()
=> new(() =>
{
if (!string.Equals(FileType, "tex", StringComparison.OrdinalIgnoreCase))
return string.Empty;
try
{
using var stream = new FileStream(FilePaths[0], FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new BinaryReader(stream);
reader.BaseStream.Position = 4;
var format = (TexFile.TextureFormat)reader.ReadInt32();
return format.ToString();
}
catch
{
return "Unknown";
}
});
}
}