This commit is contained in:
2025-10-06 03:47:24 +09:00
parent 39784a1fea
commit c35650438c
22 changed files with 1480 additions and 184 deletions

View File

@@ -1,4 +1,4 @@
using Dalamud.Utility;
using Dalamud.Utility;
using K4os.Compression.LZ4.Legacy;
using LightlessSync.API.Data;
using LightlessSync.API.Dto.Files;
@@ -8,6 +8,7 @@ using LightlessSync.PlayerData.Handlers;
using LightlessSync.Services.Mediator;
using LightlessSync.WebAPI.Files.Models;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
using System.Net;
using System.Net.Http.Json;
@@ -19,7 +20,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
private readonly FileCompactor _fileCompactor;
private readonly FileCacheManager _fileDbManager;
private readonly FileTransferOrchestrator _orchestrator;
private readonly List<ThrottledStream> _activeDownloadStreams;
private readonly ConcurrentDictionary<ThrottledStream, byte> _activeDownloadStreams;
public FileDownloadManager(ILogger<FileDownloadManager> logger, LightlessMediator mediator,
FileTransferOrchestrator orchestrator,
@@ -29,14 +30,14 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
_orchestrator = orchestrator;
_fileDbManager = fileCacheManager;
_fileCompactor = fileCompactor;
_activeDownloadStreams = [];
_activeDownloadStreams = new();
Mediator.Subscribe<DownloadLimitChangedMessage>(this, (msg) =>
{
if (!_activeDownloadStreams.Any()) return;
if (_activeDownloadStreams.IsEmpty) return;
var newLimit = _orchestrator.DownloadLimitPerSlot();
Logger.LogTrace("Setting new Download Speed Limit to {newLimit}", newLimit);
foreach (var stream in _activeDownloadStreams)
foreach (var stream in _activeDownloadStreams.Keys)
{
stream.BandwidthLimit = newLimit;
}
@@ -47,7 +48,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
public List<FileTransfer> ForbiddenTransfers => _orchestrator.ForbiddenTransfers;
public bool IsDownloading => !CurrentDownloads.Any();
public bool IsDownloading => CurrentDownloads.Any();
public static void MungeBuffer(Span<byte> buffer)
{
@@ -84,7 +85,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
protected override void Dispose(bool disposing)
{
ClearDownload();
foreach (var stream in _activeDownloadStreams.ToList())
foreach (var stream in _activeDownloadStreams.Keys.ToList())
{
try
{
@@ -95,6 +96,10 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
// do nothing
//
}
finally
{
_activeDownloadStreams.TryRemove(stream, out _);
}
}
base.Dispose(disposing);
}
@@ -142,7 +147,14 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
await WaitForDownloadReady(fileTransfer, requestId, ct).ConfigureAwait(false);
_downloadStatus[downloadGroup].DownloadStatus = DownloadStatus.Downloading;
if (_downloadStatus.TryGetValue(downloadGroup, out var downloadStatus))
{
downloadStatus.DownloadStatus = DownloadStatus.Downloading;
}
else
{
Logger.LogWarning("Download status missing for {group} when starting download", downloadGroup);
}
const int maxRetries = 3;
int retryCount = 0;
@@ -204,7 +216,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
stream = new(await response.Content.ReadAsStreamAsync(ct).ConfigureAwait(false), limit);
_activeDownloadStreams.Add(stream);
_activeDownloadStreams.TryAdd(stream, 0);
while ((bytesRead = await stream.ReadAsync(buffer, ct).ConfigureAwait(false)) > 0)
{
@@ -245,7 +257,7 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
{
if (stream != null)
{
_activeDownloadStreams.Remove(stream);
_activeDownloadStreams.TryRemove(stream, out _);
await stream.DisposeAsync().ConfigureAwait(false);
}
}
@@ -253,11 +265,28 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
public async Task<List<DownloadFileTransfer>> InitiateDownloadList(GameObjectHandler gameObjectHandler, List<FileReplacementData> fileReplacement, CancellationToken ct)
{
Logger.LogDebug("Download start: {id}", gameObjectHandler.Name);
var objectName = gameObjectHandler?.Name ?? "Unknown";
Logger.LogDebug("Download start: {id}", objectName);
if (fileReplacement == null || fileReplacement.Count == 0)
{
Logger.LogDebug("{dlName}: No file replacements provided", objectName);
CurrentDownloads = [];
return CurrentDownloads;
}
var hashes = fileReplacement.Where(f => f != null && !string.IsNullOrWhiteSpace(f.Hash)).Select(f => f.Hash).Distinct(StringComparer.Ordinal).ToList();
if (hashes.Count == 0)
{
Logger.LogDebug("{dlName}: No valid hashes to download", objectName);
CurrentDownloads = [];
return CurrentDownloads;
}
List<DownloadFileDto> downloadFileInfoFromService =
[
.. await FilesGetSizes(fileReplacement.Select(f => f.Hash).Distinct(StringComparer.Ordinal).ToList(), ct).ConfigureAwait(false),
.. await FilesGetSizes(hashes, ct).ConfigureAwait(false),
];
Logger.LogDebug("Files with size 0 or less: {files}", string.Join(", ", downloadFileInfoFromService.Where(f => f.Size <= 0).Select(f => f.Hash)));
@@ -315,15 +344,23 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
FileInfo fi = new(blockFile);
try
{
_downloadStatus[fileGroup.Key].DownloadStatus = DownloadStatus.WaitingForSlot;
if (!_downloadStatus.TryGetValue(fileGroup.Key, out var downloadStatus))
{
Logger.LogWarning("Download status missing for {group}, aborting", fileGroup.Key);
return;
}
downloadStatus.DownloadStatus = DownloadStatus.WaitingForSlot;
await _orchestrator.WaitForDownloadSlotAsync(token).ConfigureAwait(false);
_downloadStatus[fileGroup.Key].DownloadStatus = DownloadStatus.WaitingForQueue;
downloadStatus.DownloadStatus = DownloadStatus.WaitingForQueue;
Progress<long> progress = new((bytesDownloaded) =>
{
try
{
if (!_downloadStatus.TryGetValue(fileGroup.Key, out FileDownloadStatus? value)) return;
value.TransferredBytes += bytesDownloaded;
if (_downloadStatus.TryGetValue(fileGroup.Key, out FileDownloadStatus? value))
{
value.TransferredBytes += bytesDownloaded;
}
}
catch (Exception ex)
{
@@ -353,6 +390,12 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
status.TransferredFiles = 1;
status.DownloadStatus = DownloadStatus.Decompressing;
}
if (!File.Exists(blockFile))
{
Logger.LogWarning("{dlName}: Block file missing before extraction, skipping", fi.Name);
return;
}
fileBlockStream = File.OpenRead(blockFile);
while (fileBlockStream.Position < fileBlockStream.Length)
{
@@ -511,4 +554,6 @@ public partial class FileDownloadManager : DisposableMediatorSubscriberBase
_orchestrator.ClearDownloadRequest(requestId);
}
}
}
}

View File

@@ -1,4 +1,4 @@
using LightlessSync.API.Data;
using LightlessSync.API.Data;
using LightlessSync.API.Dto.Files;
using LightlessSync.API.Routes;
using LightlessSync.FileCache;
@@ -10,6 +10,8 @@ using LightlessSync.WebAPI.Files.Models;
using Microsoft.Extensions.Logging;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Collections.Concurrent;
using System.Threading;
namespace LightlessSync.WebAPI.Files;
@@ -19,7 +21,9 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
private readonly LightlessConfigService _lightlessConfigService;
private readonly FileTransferOrchestrator _orchestrator;
private readonly ServerConfigurationManager _serverManager;
private readonly Dictionary<string, DateTime> _verifiedUploadedHashes = new(StringComparer.Ordinal);
private readonly ConcurrentDictionary<string, DateTime> _verifiedUploadedHashes = new(StringComparer.Ordinal);
private readonly object _currentUploadsLock = new();
private readonly Dictionary<string, FileTransfer> _currentUploadsByHash = new(StringComparer.Ordinal);
private CancellationTokenSource? _uploadCancellationTokenSource = new();
public FileUploadManager(ILogger<FileUploadManager> logger, LightlessMediator mediator,
@@ -40,17 +44,38 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
}
public List<FileTransfer> CurrentUploads { get; } = [];
public bool IsUploading => CurrentUploads.Count > 0;
public bool IsUploading
{
get
{
lock (_currentUploadsLock)
{
return CurrentUploads.Count > 0;
}
}
}
public List<FileTransfer> GetCurrentUploadsSnapshot()
{
lock (_currentUploadsLock)
{
return CurrentUploads.ToList();
}
}
public bool CancelUpload()
{
if (CurrentUploads.Any())
if (IsUploading)
{
Logger.LogDebug("Cancelling current upload");
_uploadCancellationTokenSource?.Cancel();
_uploadCancellationTokenSource?.Dispose();
_uploadCancellationTokenSource = null;
CurrentUploads.Clear();
lock (_currentUploadsLock)
{
CurrentUploads.Clear();
_currentUploadsByHash.Clear();
}
return true;
}
@@ -83,22 +108,44 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
return [.. filesToUpload.Where(f => f.IsForbidden).Select(f => f.Hash)];
}
Task uploadTask = Task.CompletedTask;
var cancellationToken = ct ?? CancellationToken.None;
var parallelUploads = Math.Clamp(_lightlessConfigService.Current.ParallelUploads, 1, 8);
using SemaphoreSlim uploadSlots = new(parallelUploads, parallelUploads);
List<Task> uploadTasks = new();
int i = 1;
foreach (var file in filesToUpload)
{
progress.Report($"Uploading file {i++}/{filesToUpload.Count}. Please wait until the upload is completed.");
Logger.LogDebug("[{hash}] Compressing", file);
var data = await _fileDbManager.GetCompressedFileData(file.Hash, ct ?? CancellationToken.None).ConfigureAwait(false);
Logger.LogDebug("[{hash}] Starting upload for {filePath}", data.Item1, _fileDbManager.GetFileCacheByHash(data.Item1)!.ResolvedFilepath);
await uploadTask.ConfigureAwait(false);
uploadTask = UploadFile(data.Item2, file.Hash, postProgress: false, ct ?? CancellationToken.None);
(ct ?? CancellationToken.None).ThrowIfCancellationRequested();
uploadTasks.Add(UploadSingleFileAsync(file, uploadSlots, cancellationToken));
}
await uploadTask.ConfigureAwait(false);
await Task.WhenAll(uploadTasks).ConfigureAwait(false);
return [];
async Task UploadSingleFileAsync(UploadFileDto fileDto, SemaphoreSlim gate, CancellationToken token)
{
await gate.WaitAsync(token).ConfigureAwait(false);
try
{
token.ThrowIfCancellationRequested();
Logger.LogDebug("[{hash}] Compressing", fileDto.Hash);
var data = await _fileDbManager.GetCompressedFileData(fileDto.Hash, token).ConfigureAwait(false);
var cacheEntry = _fileDbManager.GetFileCacheByHash(data.Item1);
if (cacheEntry != null)
{
Logger.LogDebug("[{hash}] Starting upload for {filePath}", data.Item1, cacheEntry.ResolvedFilepath);
}
await UploadFile(data.Item2, fileDto.Hash, postProgress: false, token).ConfigureAwait(false);
}
finally
{
gate.Release();
}
}
}
public async Task<CharacterData> UploadFiles(CharacterData data, List<UserData> visiblePlayers)
@@ -167,7 +214,11 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
_uploadCancellationTokenSource?.Cancel();
_uploadCancellationTokenSource?.Dispose();
_uploadCancellationTokenSource = null;
CurrentUploads.Clear();
lock (_currentUploadsLock)
{
CurrentUploads.Clear();
_currentUploadsByHash.Clear();
}
_verifiedUploadedHashes.Clear();
}
@@ -211,7 +262,17 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
{
try
{
CurrentUploads.Single(f => string.Equals(f.Hash, fileHash, StringComparison.Ordinal)).Transferred = prog.Uploaded;
lock (_currentUploadsLock)
{
if (_currentUploadsByHash.TryGetValue(fileHash, out var transfer))
{
transfer.Transferred = prog.Uploaded;
}
else
{
Logger.LogDebug("[{hash}] Could not find upload transfer during progress update", fileHash);
}
}
}
catch (Exception ex)
{
@@ -240,10 +301,16 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
{
try
{
CurrentUploads.Add(new UploadFileTransfer(file)
var uploadTransfer = new UploadFileTransfer(file)
{
Total = new FileInfo(_fileDbManager.GetFileCacheByHash(file.Hash)!.ResolvedFilepath).Length,
});
};
lock (_currentUploadsLock)
{
CurrentUploads.Add(uploadTransfer);
_currentUploadsByHash[file.Hash] = uploadTransfer;
}
}
catch (Exception ex)
{
@@ -264,33 +331,75 @@ public sealed class FileUploadManager : DisposableMediatorSubscriberBase
_verifiedUploadedHashes[file.Hash] = DateTime.UtcNow;
}
var totalSize = CurrentUploads.Sum(c => c.Total);
long totalSize;
List<FileTransfer> pendingUploads;
lock (_currentUploadsLock)
{
totalSize = CurrentUploads.Sum(c => c.Total);
pendingUploads = CurrentUploads.Where(f => f.CanBeTransferred && !f.IsTransferred).ToList();
}
var parallelUploads = Math.Clamp(_lightlessConfigService.Current.ParallelUploads, 1, 8);
using SemaphoreSlim uploadSlots = new(parallelUploads, parallelUploads);
Logger.LogDebug("Compressing and uploading files");
Task uploadTask = Task.CompletedTask;
foreach (var file in CurrentUploads.Where(f => f.CanBeTransferred && !f.IsTransferred).ToList())
List<Task> uploadTasks = new();
foreach (var transfer in pendingUploads)
{
Logger.LogDebug("[{hash}] Compressing", file);
var data = await _fileDbManager.GetCompressedFileData(file.Hash, uploadToken).ConfigureAwait(false);
CurrentUploads.Single(e => string.Equals(e.Hash, data.Item1, StringComparison.Ordinal)).Total = data.Item2.Length;
Logger.LogDebug("[{hash}] Starting upload for {filePath}", data.Item1, _fileDbManager.GetFileCacheByHash(data.Item1)!.ResolvedFilepath);
await uploadTask.ConfigureAwait(false);
uploadTask = UploadFile(data.Item2, file.Hash, true, uploadToken);
uploadToken.ThrowIfCancellationRequested();
uploadTasks.Add(UploadPendingFileAsync(transfer, uploadSlots, uploadToken));
}
if (CurrentUploads.Any())
{
await uploadTask.ConfigureAwait(false);
await Task.WhenAll(uploadTasks).ConfigureAwait(false);
var compressedSize = CurrentUploads.Sum(c => c.Total);
Logger.LogDebug("Upload complete, compressed {size} to {compressed}", UiSharedService.ByteToString(totalSize), UiSharedService.ByteToString(compressedSize));
long compressedSize;
HashSet<string> uploadedHashes;
lock (_currentUploadsLock)
{
compressedSize = CurrentUploads.Sum(c => c.Total);
uploadedHashes = CurrentUploads.Select(u => u.Hash).ToHashSet(StringComparer.Ordinal);
}
foreach (var file in unverifiedUploadHashes.Where(c => !CurrentUploads.Exists(u => string.Equals(u.Hash, c, StringComparison.Ordinal))))
Logger.LogDebug("Upload complete, compressed {size} to {compressed}", UiSharedService.ByteToString(totalSize), UiSharedService.ByteToString(compressedSize));
foreach (var file in unverifiedUploadHashes.Where(c => !uploadedHashes.Contains(c)))
{
_verifiedUploadedHashes[file] = DateTime.UtcNow;
}
CurrentUploads.Clear();
lock (_currentUploadsLock)
{
CurrentUploads.Clear();
_currentUploadsByHash.Clear();
}
async Task UploadPendingFileAsync(FileTransfer transfer, SemaphoreSlim gate, CancellationToken token)
{
await gate.WaitAsync(token).ConfigureAwait(false);
try
{
token.ThrowIfCancellationRequested();
Logger.LogDebug("[{hash}] Compressing", transfer.Hash);
var data = await _fileDbManager.GetCompressedFileData(transfer.Hash, token).ConfigureAwait(false);
lock (_currentUploadsLock)
{
if (_currentUploadsByHash.TryGetValue(data.Item1, out var trackedUpload))
{
trackedUpload.Total = data.Item2.Length;
}
}
var cacheEntry = _fileDbManager.GetFileCacheByHash(data.Item1);
if (cacheEntry != null)
{
Logger.LogDebug("[{hash}] Starting upload for {filePath}", data.Item1, cacheEntry.ResolvedFilepath);
}
await UploadFile(data.Item2, transfer.Hash, true, token).ConfigureAwait(false);
}
finally
{
gate.Release();
}
}
}
}