2.1.0 (#123)
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m9s
All checks were successful
Tag and Release Lightless / tag-and-release (push) Successful in 2m9s
# Patchnotes 2.1.0 The changes in this update are more than just "patches". With a new UI, a new feature, and a bunch of bug fixes, improvements and a new member on the dev team, we thought this was more of a minor update. We would like to introduce @tsubasahane of MareCN to the team! We’re happy to work with them to bring Lightless and its features to the CN client as well as having another talented dev bring features and ideas to us. Speaking of which: # Location Sharing (Big shout out to @tsubasahane for bringing this feature) - Are you TIRED of scrambling to find the address of the venue you're in to share with your friends? We are introducing Location Sharing! An optional feature where you can share your location with direct pairs temporarily [30 minutes, 1 hour, 3 hours] minutes or until you turn it off for them. That's up to you! [#125](<#125>) [#49](<Lightless-Sync/LightlessServer#49>) - To share your location with a pair, click the three dots beside the pair and choose a duration to share with them. [#125](<#125>) [#49](<Lightless-Sync/LightlessServer#49>) - To view the location of someone who's shared with you, simply hover over the globe icon! [#125](<#125>) [#49](<Lightless-Sync/LightlessServer#49>) [1] # Model Optimization (Mesh Decimating) - This new option can automatically “simplify” incoming character meshes to help performance by reducing triangle counts. You choose how strong the reduction is (default/recommended is 80%). [#131](<#131>) - Decimation only kicks in when a mesh is above a certain triangle threshold, and only for the items that qualify for it and you selected for. [#131](<#131>) - Hair meshes is always excluded, since simplifying hair meshes is very prone to breaking. - You can find everything under Settings → Performance → Model Optimization. [#131](<#131>) + ** IF YOU HAVE USED DECIMATION IN TESTING, PLEASE CLEAR YOUR CACHE ❗ ** [2] # Animation (PAP) Validation (Safer animations) - Lightless now checks your currently animations to see if they work with your local skeleton/bone mod. If an animation matches, it’s included in what gets sent to other players. If it doesn’t, Lightless will skip it and write a warning to your log showing how many were skipped due to skeleton changes. Its defaulted to Unsafe (off). turn it on if you experience crashes from others users. [#131](<#131>) - Lightless also does the same kind of check for incoming animation files, to make sure they match the body/skeleton they were sent with. [#131](<#131>) - Because these checks can sometimes be a little picky, you can adjust how strict they are in Settings -> General -> Animation & Bones to reduce false positives. [#131](<#131>) # UI Changes (Thanks to @kyuwu for UI Changes) - The top part of the main screen has gotten a makeover. You can adjust the colors of the gradiant in the Color settings of Lightless. [#127](<#127>) [3] - Settings have gotten some changes as well to make this change more universal, and will use the same color settings. [#127](<#127>) - The particle effects of the gradient are toggleable in 'Settings -> UI -> Behavior' [#127](<#127>) - Instead of showing download/upload on bottom of Main UI, it will show VRAM usage and triangles with their optimization options next to it [#138](<#138>) # LightFinder / ShellFinder - UI Changes that follow our new design follow the color codes for the Gradient top as the main screen does. [#127](<#127>) [4] Co-authored-by: defnotken <itsdefnotken@gmail.com> Co-authored-by: azyges <aaaaaa@aaa.aaa> Co-authored-by: cake <admin@cakeandbanana.nl> Co-authored-by: Tsubasa <tsubasa@noreply.git.lightless-sync.org> Co-authored-by: choco <choco@patat.nl> Co-authored-by: celine <aaa@aaa.aaa> Co-authored-by: celine <celine@noreply.git.lightless-sync.org> Co-authored-by: Tsubasahane <wozaiha@gmail.com> Co-authored-by: cake <cake@noreply.git.lightless-sync.org> Reviewed-on: #123
This commit was merged in pull request #123.
This commit is contained in:
@@ -27,6 +27,7 @@ public sealed class FileCacheManager : IHostedService
|
||||
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, FileCacheEntity>> _fileCaches = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, FileCacheEntity> _fileCachesByPrefixedPath = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly SemaphoreSlim _getCachesByPathsSemaphore = new(1, 1);
|
||||
private readonly SemaphoreSlim _evictSemaphore = new(1, 1);
|
||||
private readonly Lock _fileWriteLock = new();
|
||||
private readonly IpcManager _ipcManager;
|
||||
private readonly ILogger<FileCacheManager> _logger;
|
||||
@@ -114,6 +115,35 @@ public sealed class FileCacheManager : IHostedService
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetHashFromFileName(FileInfo fileInfo, out string hash)
|
||||
{
|
||||
hash = Path.GetFileNameWithoutExtension(fileInfo.Name);
|
||||
if (string.IsNullOrWhiteSpace(hash))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (hash.Length is not (40 or 64))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
for (var i = 0; i < hash.Length; i++)
|
||||
{
|
||||
var c = hash[i];
|
||||
var isHex = (c >= '0' && c <= '9')
|
||||
|| (c >= 'a' && c <= 'f')
|
||||
|| (c >= 'A' && c <= 'F');
|
||||
if (!isHex)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
hash = hash.ToUpperInvariant();
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string BuildVersionHeader() => $"{FileCacheVersionHeaderPrefix}{FileCacheVersion}";
|
||||
|
||||
private static bool TryParseVersionHeader(string? line, out int version)
|
||||
@@ -226,13 +256,23 @@ public sealed class FileCacheManager : IHostedService
|
||||
var compressed = LZ4Wrapper.WrapHC(raw, 0, raw.Length);
|
||||
|
||||
var tmpPath = compressedPath + ".tmp";
|
||||
await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false);
|
||||
File.Move(tmpPath, compressedPath, overwrite: true);
|
||||
try
|
||||
{
|
||||
await File.WriteAllBytesAsync(tmpPath, compressed, token).ConfigureAwait(false);
|
||||
File.Move(tmpPath, compressedPath, overwrite: true);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try { if (File.Exists(tmpPath)) File.Delete(tmpPath); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
var compressedSize = compressed.LongLength;
|
||||
var compressedSize = new FileInfo(compressedPath).Length;
|
||||
SetSizeInfo(hash, originalSize, compressedSize);
|
||||
UpdateEntitiesSizes(hash, originalSize, compressedSize);
|
||||
|
||||
var maxBytes = GiBToBytes(_configService.Current.MaxLocalCacheInGiB);
|
||||
await EnforceCacheLimitAsync(maxBytes, token).ConfigureAwait(false);
|
||||
|
||||
return compressed;
|
||||
}
|
||||
finally
|
||||
@@ -277,9 +317,34 @@ public sealed class FileCacheManager : IHostedService
|
||||
_logger.LogTrace("Creating cache entry for {path}", path);
|
||||
var cacheFolder = _configService.Current.CacheFolder;
|
||||
if (string.IsNullOrEmpty(cacheFolder)) return null;
|
||||
if (TryGetHashFromFileName(fi, out var hash))
|
||||
{
|
||||
return CreateCacheEntryWithKnownHash(fi.FullName, hash);
|
||||
}
|
||||
|
||||
return CreateFileEntity(cacheFolder, CachePrefix, fi);
|
||||
}
|
||||
|
||||
public FileCacheEntity? CreateCacheEntryWithKnownHash(string path, string hash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(hash))
|
||||
{
|
||||
return CreateCacheEntry(path);
|
||||
}
|
||||
|
||||
FileInfo fi = new(path);
|
||||
if (!fi.Exists) return null;
|
||||
_logger.LogTrace("Creating cache entry for {path} using provided hash", path);
|
||||
var cacheFolder = _configService.Current.CacheFolder;
|
||||
if (string.IsNullOrEmpty(cacheFolder)) return null;
|
||||
if (!TryBuildPrefixedPath(fi.FullName, cacheFolder, CachePrefix, out var prefixedPath, out _))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return CreateFileCacheEntity(fi, prefixedPath, hash);
|
||||
}
|
||||
|
||||
public FileCacheEntity? CreateFileEntry(string path)
|
||||
{
|
||||
FileInfo fi = new(path);
|
||||
@@ -562,9 +627,10 @@ public sealed class FileCacheManager : IHostedService
|
||||
}
|
||||
}
|
||||
|
||||
public void RemoveHashedFile(string hash, string prefixedFilePath)
|
||||
public void RemoveHashedFile(string hash, string prefixedFilePath, bool removeDerivedFiles = true)
|
||||
{
|
||||
var normalizedPath = NormalizePrefixedPathKey(prefixedFilePath);
|
||||
var removedHash = false;
|
||||
|
||||
if (_fileCaches.TryGetValue(hash, out var caches))
|
||||
{
|
||||
@@ -577,11 +643,16 @@ public sealed class FileCacheManager : IHostedService
|
||||
|
||||
if (caches.IsEmpty)
|
||||
{
|
||||
_fileCaches.TryRemove(hash, out _);
|
||||
removedHash = _fileCaches.TryRemove(hash, out _);
|
||||
}
|
||||
}
|
||||
|
||||
_fileCachesByPrefixedPath.TryRemove(normalizedPath, out _);
|
||||
|
||||
if (removeDerivedFiles && removedHash)
|
||||
{
|
||||
RemoveDerivedCacheFiles(hash);
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateHashedFile(FileCacheEntity fileCache, bool computeProperties = true)
|
||||
@@ -597,7 +668,8 @@ public sealed class FileCacheManager : IHostedService
|
||||
fileCache.Hash = Crypto.ComputeFileHash(fileCache.ResolvedFilepath, Crypto.HashAlgo.Sha1);
|
||||
fileCache.LastModifiedDateTicks = fi.LastWriteTimeUtc.Ticks.ToString(CultureInfo.InvariantCulture);
|
||||
}
|
||||
RemoveHashedFile(oldHash, prefixedPath);
|
||||
var removeDerivedFiles = !string.Equals(oldHash, fileCache.Hash, StringComparison.OrdinalIgnoreCase);
|
||||
RemoveHashedFile(oldHash, prefixedPath, removeDerivedFiles);
|
||||
AddHashedFile(fileCache);
|
||||
}
|
||||
|
||||
@@ -747,7 +819,7 @@ public sealed class FileCacheManager : IHostedService
|
||||
{
|
||||
try
|
||||
{
|
||||
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath);
|
||||
RemoveHashedFile(fileCache.Hash, fileCache.PrefixedFilePath, removeDerivedFiles: false);
|
||||
var extensionPath = fileCache.ResolvedFilepath.ToUpper(CultureInfo.InvariantCulture) + "." + ext;
|
||||
File.Move(fileCache.ResolvedFilepath, extensionPath, overwrite: true);
|
||||
var newHashedEntity = new FileCacheEntity(fileCache.Hash, fileCache.PrefixedFilePath + "." + ext, DateTime.UtcNow.Ticks.ToString(CultureInfo.InvariantCulture));
|
||||
@@ -764,6 +836,33 @@ public sealed class FileCacheManager : IHostedService
|
||||
}
|
||||
}
|
||||
|
||||
private void RemoveDerivedCacheFiles(string hash)
|
||||
{
|
||||
var cacheFolder = _configService.Current.CacheFolder;
|
||||
if (string.IsNullOrWhiteSpace(cacheFolder))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
TryDeleteDerivedCacheFile(Path.Combine(cacheFolder, "downscaled", $"{hash}.tex"));
|
||||
TryDeleteDerivedCacheFile(Path.Combine(cacheFolder, "decimated", $"{hash}.mdl"));
|
||||
}
|
||||
|
||||
private void TryDeleteDerivedCacheFile(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (File.Exists(path))
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogTrace(ex, "Failed to delete derived cache file {path}", path);
|
||||
}
|
||||
}
|
||||
|
||||
private void AddHashedFile(FileCacheEntity fileCache)
|
||||
{
|
||||
var normalizedPath = NormalizePrefixedPathKey(fileCache.PrefixedFilePath);
|
||||
@@ -877,6 +976,83 @@ public sealed class FileCacheManager : IHostedService
|
||||
}, token).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task EnforceCacheLimitAsync(long maxBytes, CancellationToken token)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(CacheFolder) || maxBytes <= 0) return;
|
||||
|
||||
await _evictSemaphore.WaitAsync(token).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
Directory.CreateDirectory(CacheFolder);
|
||||
|
||||
foreach (var tmp in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension + ".tmp"))
|
||||
{
|
||||
try { File.Delete(tmp); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
var files = Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension, SearchOption.TopDirectoryOnly)
|
||||
.Select(p => new FileInfo(p))
|
||||
.Where(fi => fi.Exists)
|
||||
.OrderBy(fi => fi.LastWriteTimeUtc)
|
||||
.ToList();
|
||||
|
||||
long total = files.Sum(f => f.Length);
|
||||
if (total <= maxBytes) return;
|
||||
|
||||
foreach (var fi in files)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (total <= maxBytes) break;
|
||||
|
||||
var hash = Path.GetFileNameWithoutExtension(fi.Name);
|
||||
|
||||
try
|
||||
{
|
||||
var len = fi.Length;
|
||||
fi.Delete();
|
||||
total -= len;
|
||||
_sizeCache.TryRemove(hash, out _);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to evict cache file {file}", fi.FullName);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_evictSemaphore.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private static long GiBToBytes(double gib)
|
||||
{
|
||||
if (double.IsNaN(gib) || double.IsInfinity(gib) || gib <= 0)
|
||||
return 0;
|
||||
|
||||
var bytes = gib * 1024d * 1024d * 1024d;
|
||||
|
||||
if (bytes >= long.MaxValue) return long.MaxValue;
|
||||
|
||||
return (long)Math.Round(bytes, MidpointRounding.AwayFromZero);
|
||||
}
|
||||
|
||||
private void CleanupOrphanCompressedCache()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(CacheFolder) || !Directory.Exists(CacheFolder))
|
||||
return;
|
||||
|
||||
foreach (var path in Directory.EnumerateFiles(CacheFolder, "*" + _compressedCacheExtension))
|
||||
{
|
||||
var hash = Path.GetFileNameWithoutExtension(path);
|
||||
if (!_fileCaches.ContainsKey(hash))
|
||||
{
|
||||
try { File.Delete(path); }
|
||||
catch (Exception ex) { _logger.LogWarning(ex, "Failed deleting orphan {file}", path); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async Task StartAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("Starting FileCacheManager");
|
||||
@@ -1060,6 +1236,8 @@ public sealed class FileCacheManager : IHostedService
|
||||
{
|
||||
await WriteOutFullCsvAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
CleanupOrphanCompressedCache();
|
||||
}
|
||||
|
||||
_logger.LogInformation("Started FileCacheManager");
|
||||
|
||||
Reference in New Issue
Block a user