Files
LightlessClient/LightlessSync/WebAPI/Files/FileDownloadManager.cs
2025-12-30 08:52:59 -06:00

1036 lines
39 KiB
C#

using K4os.Compression.LZ4.Legacy;
using LightlessSync.API.Data;
using LightlessSync.API.Dto.Files;
using LightlessSync.API.Routes;
using LightlessSync.FileCache;
using LightlessSync.LightlessConfiguration;
using LightlessSync.PlayerData.Handlers;
using LightlessSync.Services.Mediator;
using LightlessSync.Services.TextureCompression;
using LightlessSync.WebAPI.Files.Models;
using Microsoft.Extensions.Logging;
using System.Collections.Concurrent;
using System.Net;
using System.Net.Http.Json;
namespace LightlessSync.WebAPI.Files;
public partial class FileDownloadManager : DisposableMediatorSubscriberBase
{
private readonly Dictionary<string, FileDownloadStatus> _downloadStatus;
private readonly object _downloadStatusLock = new();
private readonly FileCompactor _fileCompactor;
private readonly FileCacheManager _fileDbManager;
private readonly FileTransferOrchestrator _orchestrator;
private readonly LightlessConfigService _configService;
private readonly TextureDownscaleService _textureDownscaleService;
private readonly TextureMetadataHelper _textureMetadataHelper;
private readonly ConcurrentDictionary<ThrottledStream, byte> _activeDownloadStreams;
private readonly SemaphoreSlim _decompressGate =
new(Math.Max(1, Environment.ProcessorCount / 2), Math.Max(1, Environment.ProcessorCount / 2));
private volatile bool _disableDirectDownloads;
private int _consecutiveDirectDownloadFailures;
private bool _lastConfigDirectDownloadsState;
public FileDownloadManager(
ILogger<FileDownloadManager> logger,
LightlessMediator mediator,
FileTransferOrchestrator orchestrator,
FileCacheManager fileCacheManager,
FileCompactor fileCompactor,
LightlessConfigService configService,
TextureDownscaleService textureDownscaleService,
TextureMetadataHelper textureMetadataHelper) : base(logger, mediator)
{
_downloadStatus = new Dictionary<string, FileDownloadStatus>(StringComparer.Ordinal);
_orchestrator = orchestrator;
_fileDbManager = fileCacheManager;
_fileCompactor = fileCompactor;
_configService = configService;
_textureDownscaleService = textureDownscaleService;
_textureMetadataHelper = textureMetadataHelper;
_activeDownloadStreams = new();
_lastConfigDirectDownloadsState = _configService.Current.EnableDirectDownloads;
Mediator.Subscribe<DownloadLimitChangedMessage>(this, _ =>
{
if (_activeDownloadStreams.IsEmpty) return;
var newLimit = _orchestrator.DownloadLimitPerSlot();
Logger.LogTrace("Setting new Download Speed Limit to {newLimit}", newLimit);
foreach (var stream in _activeDownloadStreams.Keys)
stream.BandwidthLimit = newLimit;
});
}
public List<DownloadFileTransfer> CurrentDownloads { get; private set; } = [];
public List<FileTransfer> ForbiddenTransfers => _orchestrator.ForbiddenTransfers;
public Guid? CurrentOwnerToken { get; private set; }
public bool IsDownloading => CurrentDownloads.Count != 0;
private bool ShouldUseDirectDownloads()
=> _configService.Current.EnableDirectDownloads && !_disableDirectDownloads;
public static void MungeBuffer(Span<byte> buffer)
{
for (int i = 0; i < buffer.Length; ++i)
buffer[i] ^= 42;
}
public void ClearDownload()
{
CurrentDownloads.Clear();
lock (_downloadStatusLock)
{
_downloadStatus.Clear();
}
CurrentOwnerToken = null;
}
public async Task DownloadFiles(GameObjectHandler? gameObject, List<FileReplacementData> fileReplacementDto, CancellationToken ct, bool skipDownscale = false)
{
Mediator.Publish(new HaltScanMessage(nameof(DownloadFiles)));
try
{
await DownloadFilesInternal(gameObject, fileReplacementDto, ct, skipDownscale).ConfigureAwait(false);
}
catch
{
ClearDownload();
}
finally
{
if (gameObject is not null)
Mediator.Publish(new DownloadFinishedMessage(gameObject));
Mediator.Publish(new ResumeScanMessage(nameof(DownloadFiles)));
}
}
protected override void Dispose(bool disposing)
{
ClearDownload();
foreach (var stream in _activeDownloadStreams.Keys.ToList())
{
try { stream.Dispose(); }
catch
{
// ignore
}
finally { _activeDownloadStreams.TryRemove(stream, out _); }
}
base.Dispose(disposing);
}
private sealed class DownloadSlotLease : IAsyncDisposable
{
private readonly FileTransferOrchestrator _orch;
private bool _released;
public DownloadSlotLease(FileTransferOrchestrator orch) => _orch = orch;
public ValueTask DisposeAsync()
{
if (!_released)
{
_released = true;
_orch.ReleaseDownloadSlot();
}
return ValueTask.CompletedTask;
}
}
private async ValueTask<DownloadSlotLease> AcquireSlotAsync(CancellationToken ct)
{
await _orchestrator.WaitForDownloadSlotAsync(ct).ConfigureAwait(false);
return new DownloadSlotLease(_orchestrator);
}
private void SetStatus(string key, DownloadStatus status)
{
lock (_downloadStatusLock)
{
if (_downloadStatus.TryGetValue(key, out var st))
st.DownloadStatus = status;
}
}
private void AddTransferredBytes(string key, long delta)
{
lock (_downloadStatusLock)
{
if (_downloadStatus.TryGetValue(key, out var st))
st.TransferredBytes += delta;
}
}
private void MarkTransferredFiles(string key, int files)
{
lock (_downloadStatusLock)
{
if (_downloadStatus.TryGetValue(key, out var st))
st.TransferredFiles = files;
}
}
private static byte MungeByte(int byteOrEof)
{
if (byteOrEof == -1) throw new EndOfStreamException();
return (byte)(byteOrEof ^ 42);
}
private static (string fileHash, long fileLengthBytes) ReadBlockFileHeader(FileStream fileBlockStream)
{
List<char> hashName = [];
List<char> fileLength = [];
var separator = (char)MungeByte(fileBlockStream.ReadByte());
if (separator != '#') throw new InvalidDataException("Data is invalid, first char is not #");
bool readHash = false;
while (true)
{
int readByte = fileBlockStream.ReadByte();
if (readByte == -1) throw new EndOfStreamException();
var readChar = (char)MungeByte(readByte);
if (readChar == ':')
{
readHash = true;
continue;
}
if (readChar == '#') break;
if (!readHash) hashName.Add(readChar);
else fileLength.Add(readChar);
}
return (string.Join("", hashName), long.Parse(string.Join("", fileLength)));
}
private static async Task ReadExactlyAsync(FileStream stream, Memory<byte> buffer, CancellationToken ct)
{
int offset = 0;
while (offset < buffer.Length)
{
int n = await stream.ReadAsync(buffer.Slice(offset), ct).ConfigureAwait(false);
if (n == 0) throw new EndOfStreamException();
offset += n;
}
}
private static Dictionary<string, (string Extension, string GamePath)> BuildReplacementLookup(List<FileReplacementData> fileReplacement)
{
var map = new Dictionary<string, (string Extension, string GamePath)>(StringComparer.OrdinalIgnoreCase);
foreach (var r in fileReplacement)
{
if (r == null || string.IsNullOrWhiteSpace(r.Hash)) continue;
if (map.ContainsKey(r.Hash)) continue;
var gamePath = r.GamePaths?.FirstOrDefault() ?? string.Empty;
string ext = "";
try
{
ext = Path.GetExtension(gamePath)?.TrimStart('.') ?? "";
}
catch
{
// ignore
}
if (string.IsNullOrWhiteSpace(ext))
ext = "bin";
map[r.Hash] = (ext, gamePath);
}
return map;
}
private delegate void DownloadDataCallback(Span<byte> data);
private async Task DownloadFileThrottled(
Uri requestUrl,
string destinationFilename,
IProgress<long> progress,
DownloadDataCallback? callback,
CancellationToken ct,
bool withToken)
{
const int maxRetries = 3;
int retryCount = 0;
TimeSpan retryDelay = TimeSpan.FromSeconds(2);
HttpResponseMessage? response = null;
while (true)
{
try
{
Logger.LogDebug("Attempt {attempt} - Downloading {requestUrl}", retryCount + 1, requestUrl);
response = await _orchestrator.SendRequestAsync(
HttpMethod.Get,
requestUrl,
ct,
HttpCompletionOption.ResponseHeadersRead,
withToken)
.ConfigureAwait(false);
response.EnsureSuccessStatusCode();
break;
}
catch (HttpRequestException ex) when (ex.InnerException is TimeoutException || ex.StatusCode == null)
{
response?.Dispose();
retryCount++;
Logger.LogWarning(ex, "Timeout during download of {requestUrl}. Attempt {attempt} of {maxRetries}", requestUrl, retryCount, maxRetries);
if (retryCount >= maxRetries || ct.IsCancellationRequested)
{
Logger.LogError("Max retries reached or cancelled. Failing download for {requestUrl}", requestUrl);
throw;
}
await Task.Delay(retryDelay, ct).ConfigureAwait(false);
}
catch (TaskCanceledException ex) when (!ct.IsCancellationRequested)
{
response?.Dispose();
retryCount++;
Logger.LogWarning(ex, "Cancellation/timeout during download of {requestUrl}. Attempt {attempt} of {maxRetries}", requestUrl, retryCount, maxRetries);
if (retryCount >= maxRetries)
{
Logger.LogError("Max retries reached for {requestUrl} after TaskCanceledException", requestUrl);
throw;
}
await Task.Delay(retryDelay, ct).ConfigureAwait(false);
}
catch (OperationCanceledException) when (ct.IsCancellationRequested)
{
response?.Dispose();
throw;
}
catch (HttpRequestException ex)
{
response?.Dispose();
Logger.LogWarning(ex, "Error during download of {requestUrl}, HttpStatusCode: {code}", requestUrl, ex.StatusCode);
if (ex.StatusCode is HttpStatusCode.NotFound or HttpStatusCode.Unauthorized)
throw new InvalidDataException($"Http error {ex.StatusCode} (cancelled: {ct.IsCancellationRequested}): {requestUrl}", ex);
throw;
}
}
ThrottledStream? stream = null;
try
{
// Determine buffer size based on content length
var contentLen = response!.Content.Headers.ContentLength ?? 0;
var bufferSize = contentLen > 1024 * 1024 ? 65536 : 8196;
var buffer = new byte[bufferSize];
// Create destination file stream
var fileStream = new FileStream(
destinationFilename,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
useAsync: true);
// Download with throttling
await using (fileStream.ConfigureAwait(false))
{
var limit = _orchestrator.DownloadLimitPerSlot();
Logger.LogTrace("Starting Download with a speed limit of {limit} to {destination}", limit, destinationFilename);
stream = new ThrottledStream(await response.Content.ReadAsStreamAsync(ct).ConfigureAwait(false), limit);
_activeDownloadStreams.TryAdd(stream, 0);
while (true)
{
ct.ThrowIfCancellationRequested();
int bytesRead = await stream.ReadAsync(buffer.AsMemory(0, buffer.Length), ct).ConfigureAwait(false);
if (bytesRead == 0) break;
callback?.Invoke(buffer.AsSpan(0, bytesRead));
await fileStream.WriteAsync(buffer.AsMemory(0, bytesRead), ct).ConfigureAwait(false);
progress.Report(bytesRead);
}
Logger.LogDebug("{requestUrl} downloaded to {destination}", requestUrl, destinationFilename);
}
}
catch
{
try
{
if (!string.IsNullOrEmpty(destinationFilename) && File.Exists(destinationFilename))
File.Delete(destinationFilename);
}
catch
{
// ignore
}
throw;
}
finally
{
if (stream != null)
{
_activeDownloadStreams.TryRemove(stream, out _);
await stream.DisposeAsync().ConfigureAwait(false);
}
response?.Dispose();
}
}
private async Task WaitForDownloadReady(List<DownloadFileTransfer> downloadFileTransfer, Guid requestId, CancellationToken downloadCt)
{
bool alreadyCancelled = false;
try
{
CancellationTokenSource localTimeoutCts = new();
localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5));
CancellationTokenSource composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token);
while (!_orchestrator.IsDownloadReady(requestId))
{
try
{
await Task.Delay(250, composite.Token).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
if (downloadCt.IsCancellationRequested) throw;
var req = await _orchestrator.SendRequestAsync(
HttpMethod.Get,
LightlessFiles.RequestCheckQueueFullPath(downloadFileTransfer[0].DownloadUri, requestId),
downloadFileTransfer.Select(c => c.Hash).ToList(),
downloadCt).ConfigureAwait(false);
req.EnsureSuccessStatusCode();
localTimeoutCts.Dispose();
composite.Dispose();
localTimeoutCts = new();
localTimeoutCts.CancelAfter(TimeSpan.FromSeconds(5));
composite = CancellationTokenSource.CreateLinkedTokenSource(downloadCt, localTimeoutCts.Token);
}
}
localTimeoutCts.Dispose();
composite.Dispose();
Logger.LogDebug("Download {requestId} ready", requestId);
}
catch (TaskCanceledException)
{
try
{
await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId))
.ConfigureAwait(false);
alreadyCancelled = true;
}
catch
{
// ignore
}
throw;
}
finally
{
if (downloadCt.IsCancellationRequested && !alreadyCancelled)
{
try
{
await _orchestrator.SendRequestAsync(HttpMethod.Get, LightlessFiles.RequestCancelFullPath(downloadFileTransfer[0].DownloadUri, requestId))
.ConfigureAwait(false);
}
catch
{
// ignore
}
}
_orchestrator.ClearDownloadRequest(requestId);
}
}
private async Task DownloadQueuedBlockFileAsync(
string statusKey,
Guid requestId,
List<DownloadFileTransfer> transfers,
string tempPath,
IProgress<long> progress,
CancellationToken ct)
{
Logger.LogDebug("GUID {requestId} on server {uri} for files {files}",
requestId, transfers[0].DownloadUri, string.Join(", ", transfers.Select(c => c.Hash)));
// Wait for ready WITHOUT holding a slot
SetStatus(statusKey, DownloadStatus.WaitingForQueue);
await WaitForDownloadReady(transfers, requestId, ct).ConfigureAwait(false);
// Hold slot ONLY for the GET
SetStatus(statusKey, DownloadStatus.WaitingForSlot);
await using ((await AcquireSlotAsync(ct).ConfigureAwait(false)).ConfigureAwait(false))
{
SetStatus(statusKey, DownloadStatus.Downloading);
var requestUrl = LightlessFiles.CacheGetFullPath(transfers[0].DownloadUri, requestId);
await DownloadFileThrottled(requestUrl, tempPath, progress, MungeBuffer, ct, withToken: true).ConfigureAwait(false);
}
}
private void RemoveStatus(string key)
{
lock (_downloadStatusLock)
{
_downloadStatus.Remove(key);
}
}
private async Task DecompressBlockFileAsync(
string downloadStatusKey,
string blockFilePath,
Dictionary<string, (string Extension, string GamePath)> replacementLookup,
string downloadLabel,
CancellationToken ct,
bool skipDownscale)
{
SetStatus(downloadStatusKey, DownloadStatus.Decompressing);
MarkTransferredFiles(downloadStatusKey, 1);
try
{
var fileBlockStream = File.OpenRead(blockFilePath);
await using (fileBlockStream.ConfigureAwait(false))
{
while (fileBlockStream.Position < fileBlockStream.Length)
{
(string fileHash, long fileLengthBytes) = ReadBlockFileHeader(fileBlockStream);
try
{
// sanity check length
if (fileLengthBytes < 0 || fileLengthBytes > int.MaxValue)
throw new InvalidDataException($"Invalid block entry length: {fileLengthBytes}");
// safe cast after check
var len = checked((int)fileLengthBytes);
if (!replacementLookup.TryGetValue(fileHash, out var repl))
{
Logger.LogWarning("{dlName}: No replacement mapping for {fileHash}", downloadLabel, fileHash);
fileBlockStream.Seek(len, SeekOrigin.Current);
continue;
}
// decompress
var filePath = _fileDbManager.GetCacheFilePath(fileHash, repl.Extension);
Logger.LogTrace("{dlName}: Decompressing {file}:{len} => {dest}", downloadLabel, fileHash, fileLengthBytes, filePath);
// read compressed data
var compressed = new byte[len];
await ReadExactlyAsync(fileBlockStream, compressed.AsMemory(0, len), ct).ConfigureAwait(false);
if (len == 0)
{
await _fileCompactor.WriteAllBytesAsync(filePath, Array.Empty<byte>(), ct).ConfigureAwait(false);
PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale);
continue;
}
MungeBuffer(compressed);
// limit concurrent decompressions
await _decompressGate.WaitAsync(ct).ConfigureAwait(false);
try
{
var sw = System.Diagnostics.Stopwatch.StartNew();
// decompress
var decompressed = LZ4Wrapper.Unwrap(compressed);
Logger.LogTrace("{dlName}: Unwrap {fileHash} took {ms}ms (compressed {c} bytes, decompressed {d} bytes)",
downloadLabel, fileHash, sw.ElapsedMilliseconds, compressed.Length, decompressed?.Length ?? -1);
// write to file
await _fileCompactor.WriteAllBytesAsync(filePath, decompressed, ct).ConfigureAwait(false);
PersistFileToStorage(fileHash, filePath, repl.GamePath, skipDownscale);
}
finally
{
_decompressGate.Release();
}
}
catch (EndOfStreamException)
{
Logger.LogWarning("{dlName}: Failure to extract file {fileHash}, stream ended prematurely", downloadLabel, fileHash);
}
catch (Exception e)
{
Logger.LogWarning(e, "{dlName}: Error during decompression", downloadLabel);
}
}
}
}
catch (EndOfStreamException)
{
Logger.LogDebug("{dlName}: Failure to extract file header data, stream ended", downloadLabel);
}
catch (Exception ex)
{
Logger.LogError(ex, "{dlName}: Error during block file read", downloadLabel);
}
finally
{
RemoveStatus(downloadStatusKey);
}
}
public async Task<List<DownloadFileTransfer>> InitiateDownloadList(
GameObjectHandler? gameObjectHandler,
List<FileReplacementData> fileReplacement,
CancellationToken ct,
Guid? ownerToken = null)
{
CurrentOwnerToken = ownerToken;
var objectName = gameObjectHandler?.Name ?? "Unknown";
Logger.LogDebug("Download start: {id}", objectName);
if (fileReplacement == null || fileReplacement.Count == 0)
{
Logger.LogDebug("{dlName}: No file replacements provided", objectName);
CurrentDownloads = [];
return CurrentDownloads;
}
var hashes = fileReplacement
.Where(f => f != null && !string.IsNullOrWhiteSpace(f.Hash))
.Select(f => f.Hash)
.Distinct(StringComparer.Ordinal)
.ToList();
if (hashes.Count == 0)
{
Logger.LogDebug("{dlName}: No valid hashes to download", objectName);
CurrentDownloads = [];
return CurrentDownloads;
}
List<DownloadFileDto> downloadFileInfoFromService =
[
.. await FilesGetSizes(hashes, ct).ConfigureAwait(false),
];
foreach (var dto in downloadFileInfoFromService.Where(c => c.IsForbidden))
{
if (!_orchestrator.ForbiddenTransfers.Exists(f => string.Equals(f.Hash, dto.Hash, StringComparison.Ordinal)))
_orchestrator.ForbiddenTransfers.Add(new DownloadFileTransfer(dto));
}
CurrentDownloads = [.. downloadFileInfoFromService
.Distinct()
.Select(d => new DownloadFileTransfer(d))
.Where(d => d.CanBeTransferred)];
return CurrentDownloads;
}
private sealed record BatchChunk(string Key, List<DownloadFileTransfer> Items);
private static IEnumerable<List<T>> ChunkList<T>(List<T> items, int chunkSize)
{
for (int i = 0; i < items.Count; i += chunkSize)
yield return items.GetRange(i, Math.Min(chunkSize, items.Count - i));
}
private async Task DownloadFilesInternal(GameObjectHandler? gameObjectHandler, List<FileReplacementData> fileReplacement, CancellationToken ct, bool skipDownscale)
{
var objectName = gameObjectHandler?.Name ?? "Unknown";
// config toggles
var configAllowsDirect = _configService.Current.EnableDirectDownloads;
if (configAllowsDirect != _lastConfigDirectDownloadsState)
{
_lastConfigDirectDownloadsState = configAllowsDirect;
if (configAllowsDirect)
{
_disableDirectDownloads = false;
_consecutiveDirectDownloadFailures = 0;
}
}
var allowDirectDownloads = ShouldUseDirectDownloads();
var replacementLookup = BuildReplacementLookup(fileReplacement);
var directDownloads = new List<DownloadFileTransfer>();
var batchDownloads = new List<DownloadFileTransfer>();
foreach (var download in CurrentDownloads)
{
if (!string.IsNullOrEmpty(download.DirectDownloadUrl) && allowDirectDownloads)
directDownloads.Add(download);
else
batchDownloads.Add(download);
}
// Chunk per host so we can fill all slots
var slots = Math.Max(1, _configService.Current.ParallelDownloads);
var batchChunks = batchDownloads
.GroupBy(f => $"{f.DownloadUri.Host}:{f.DownloadUri.Port}", StringComparer.Ordinal)
.SelectMany(g =>
{
var list = g.ToList();
var chunkCount = Math.Min(slots, Math.Max(1, list.Count));
var chunkSize = (int)Math.Ceiling(list.Count / (double)chunkCount);
return ChunkList(list, chunkSize)
.Select(chunk => new BatchChunk(g.Key, chunk));
})
.ToArray();
// init statuses
lock (_downloadStatusLock)
{
_downloadStatus.Clear();
// direct downloads and batch downloads tracked separately
foreach (var d in directDownloads)
{
_downloadStatus[d.DirectDownloadUrl!] = new FileDownloadStatus
{
DownloadStatus = DownloadStatus.Initializing,
TotalBytes = d.Total,
TotalFiles = 1,
TransferredBytes = 0,
TransferredFiles = 0
};
}
foreach (var g in batchChunks.GroupBy(c => c.Key, StringComparer.Ordinal))
{
_downloadStatus[g.Key] = new FileDownloadStatus
{
DownloadStatus = DownloadStatus.Initializing,
TotalBytes = g.SelectMany(x => x.Items).Sum(x => x.Total),
TotalFiles = 1,
TransferredBytes = 0,
TransferredFiles = 0
};
}
}
if (directDownloads.Count > 0 || batchChunks.Length > 0)
{
Logger.LogInformation("Downloading {direct} files directly, and {batchtotal} queued in {chunks} chunks.",
directDownloads.Count, batchDownloads.Count, batchChunks.Length);
}
if (gameObjectHandler is not null)
Mediator.Publish(new DownloadStartedMessage(gameObjectHandler, _downloadStatus));
// allow some extra workers so downloads can continue while earlier items decompress.
var workerDop = Math.Clamp(slots * 2, 2, 16);
// batch downloads
Task batchTask = batchChunks.Length == 0
? Task.CompletedTask
: Parallel.ForEachAsync(batchChunks, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct },
async (chunk, token) => await ProcessBatchChunkAsync(chunk, replacementLookup, token, skipDownscale).ConfigureAwait(false));
// direct downloads
Task directTask = directDownloads.Count == 0
? Task.CompletedTask
: Parallel.ForEachAsync(directDownloads, new ParallelOptions { MaxDegreeOfParallelism = workerDop, CancellationToken = ct },
async (d, token) => await ProcessDirectAsync(d, replacementLookup, token, skipDownscale).ConfigureAwait(false));
await Task.WhenAll(batchTask, directTask).ConfigureAwait(false);
Logger.LogDebug("Download end: {id}", objectName);
ClearDownload();
}
private async Task ProcessBatchChunkAsync(BatchChunk chunk, Dictionary<string, (string Extension, string GamePath)> replacementLookup, CancellationToken ct, bool skipDownscale)
{
var statusKey = chunk.Key;
// enqueue (no slot)
SetStatus(statusKey, DownloadStatus.WaitingForQueue);
var requestIdResponse = await _orchestrator.SendRequestAsync(
HttpMethod.Post,
LightlessFiles.RequestEnqueueFullPath(chunk.Items[0].DownloadUri),
chunk.Items.Select(c => c.Hash),
ct).ConfigureAwait(false);
var requestId = Guid.Parse((await requestIdResponse.Content.ReadAsStringAsync(ct).ConfigureAwait(false)).Trim('"'));
var blockFile = _fileDbManager.GetCacheFilePath(requestId.ToString("N"), "blk");
var fi = new FileInfo(blockFile);
try
{
// download (with slot)
var progress = CreateInlineProgress(bytes => AddTransferredBytes(statusKey, bytes));
// Download slot held on get
await DownloadQueuedBlockFileAsync(statusKey, requestId, chunk.Items, blockFile, progress, ct).ConfigureAwait(false);
// decompress if file exists
if (!File.Exists(blockFile))
{
Logger.LogWarning("{dlName}: Block file missing before extraction, skipping", fi.Name);
return;
}
await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, fi.Name, ct, skipDownscale).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
Logger.LogDebug("{dlName}: Detected cancellation of download, partially extracting files for {id}", fi.Name, requestId);
}
catch (Exception ex)
{
Logger.LogError(ex, "{dlName}: Error during batch chunk processing", fi.Name);
ClearDownload();
}
finally
{
try { File.Delete(blockFile); } catch { /* ignore */ }
}
}
private async Task ProcessDirectAsync(DownloadFileTransfer directDownload, Dictionary<string, (string Extension, string GamePath)> replacementLookup, CancellationToken ct, bool skipDownscale)
{
var progress = CreateInlineProgress(bytes =>
{
if (!string.IsNullOrEmpty(directDownload.DirectDownloadUrl))
AddTransferredBytes(directDownload.DirectDownloadUrl!, bytes);
});
if (!ShouldUseDirectDownloads() || string.IsNullOrEmpty(directDownload.DirectDownloadUrl))
{
await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, progress, ct, skipDownscale).ConfigureAwait(false);
return;
}
var tempFilename = _fileDbManager.GetCacheFilePath(directDownload.Hash, "bin");
try
{
// Download slot held on get
SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.WaitingForSlot);
await using ((await AcquireSlotAsync(ct).ConfigureAwait(false)).ConfigureAwait(false))
{
SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Downloading);
Logger.LogDebug("Beginning direct download of {hash} from {url}", directDownload.Hash, directDownload.DirectDownloadUrl);
await DownloadFileThrottled(new Uri(directDownload.DirectDownloadUrl!), tempFilename, progress, callback: null, ct, withToken: false)
.ConfigureAwait(false);
}
Interlocked.Exchange(ref _consecutiveDirectDownloadFailures, 0);
// Decompress/write
SetStatus(directDownload.DirectDownloadUrl!, DownloadStatus.Decompressing);
if (!replacementLookup.TryGetValue(directDownload.Hash, out var repl))
{
Logger.LogWarning("{hash}: No replacement data found for direct download.", directDownload.Hash);
return;
}
var finalFilename = _fileDbManager.GetCacheFilePath(directDownload.Hash, repl.Extension);
Logger.LogDebug("Decompressing direct download {hash} from {compressedFile} to {finalFile}",
directDownload.Hash, tempFilename, finalFilename);
// Read compressed bytes and decompress in memory
byte[] compressedBytes = await File.ReadAllBytesAsync(tempFilename, ct).ConfigureAwait(false);
var decompressedBytes = LZ4Wrapper.Unwrap(compressedBytes);
await _fileCompactor.WriteAllBytesAsync(finalFilename, decompressedBytes, ct).ConfigureAwait(false);
PersistFileToStorage(directDownload.Hash, finalFilename, repl.GamePath, skipDownscale);
MarkTransferredFiles(directDownload.DirectDownloadUrl!, 1);
Logger.LogDebug("Finished direct download of {hash}.", directDownload.Hash);
RemoveStatus(directDownload.DirectDownloadUrl!);
}
catch (OperationCanceledException ex)
{
if (ct.IsCancellationRequested)
Logger.LogDebug("{hash}: Direct download cancelled by caller, discarding file.", directDownload.Hash);
else
Logger.LogWarning(ex, "{hash}: Direct download cancelled unexpectedly.", directDownload.Hash);
ClearDownload();
}
catch (Exception ex)
{
var expectedDirectDownloadFailure = ex is InvalidDataException;
var failureCount = expectedDirectDownloadFailure ? 0 : Interlocked.Increment(ref _consecutiveDirectDownloadFailures);
if (expectedDirectDownloadFailure)
Logger.LogInformation(ex, "{hash}: Direct download unavailable, attempting queued fallback.", directDownload.Hash);
else
Logger.LogWarning(ex, "{hash}: Direct download failed, attempting queued fallback.", directDownload.Hash);
try
{
await ProcessDirectAsQueuedFallbackAsync(directDownload, replacementLookup, progress, ct, skipDownscale).ConfigureAwait(false);
if (!expectedDirectDownloadFailure && failureCount >= 3 && !_disableDirectDownloads)
{
_disableDirectDownloads = true;
Logger.LogWarning("Disabling direct downloads for this session after {count} consecutive failures.", failureCount);
}
}
catch (Exception fallbackEx)
{
Logger.LogError(fallbackEx, "{hash}: Error during direct download fallback.", directDownload.Hash);
ClearDownload();
}
}
finally
{
try { File.Delete(tempFilename); }
catch
{
// ignore
}
}
}
private async Task ProcessDirectAsQueuedFallbackAsync(
DownloadFileTransfer directDownload,
Dictionary<string, (string Extension, string GamePath)> replacementLookup,
IProgress<long> progress,
CancellationToken ct,
bool skipDownscale)
{
if (string.IsNullOrEmpty(directDownload.DirectDownloadUrl))
throw new InvalidOperationException("Direct download fallback requested without a direct download URL.");
var statusKey = directDownload.DirectDownloadUrl!;
SetStatus(statusKey, DownloadStatus.WaitingForQueue);
var requestIdResponse = await _orchestrator.SendRequestAsync(
HttpMethod.Post,
LightlessFiles.RequestEnqueueFullPath(directDownload.DownloadUri),
new[] { directDownload.Hash },
ct).ConfigureAwait(false);
var requestId = Guid.Parse((await requestIdResponse.Content.ReadAsStringAsync(ct).ConfigureAwait(false)).Trim('"'));
var blockFile = _fileDbManager.GetCacheFilePath(requestId.ToString("N"), "blk");
try
{
await DownloadQueuedBlockFileAsync(statusKey, requestId, [directDownload], blockFile, progress, ct).ConfigureAwait(false);
if (!File.Exists(blockFile))
throw new FileNotFoundException("Block file missing after direct download fallback.", blockFile);
await DecompressBlockFileAsync(statusKey, blockFile, replacementLookup, $"fallback-{directDownload.Hash}", ct, skipDownscale)
.ConfigureAwait(false);
}
finally
{
try { File.Delete(blockFile); }
catch
{
// ignore
}
}
}
private async Task<List<DownloadFileDto>> FilesGetSizes(List<string> hashes, CancellationToken ct)
{
if (!_orchestrator.IsInitialized)
throw new InvalidOperationException("FileTransferManager is not initialized");
// batch request
var response = await _orchestrator.SendRequestAsync(
HttpMethod.Get,
LightlessFiles.ServerFilesGetSizesFullPath(_orchestrator.FilesCdnUri!),
hashes,
ct).ConfigureAwait(false);
// ensure success
return await response.Content.ReadFromJsonAsync<List<DownloadFileDto>>(cancellationToken: ct).ConfigureAwait(false) ?? [];
}
private void PersistFileToStorage(string fileHash, string filePath, string gamePath, bool skipDownscale)
{
var fi = new FileInfo(filePath);
Func<DateTime> RandomDayInThePast()
{
DateTime start = new(1995, 1, 1, 1, 1, 1, DateTimeKind.Local);
Random gen = new();
int range = (DateTime.Today - start).Days;
return () => start.AddDays(gen.Next(range));
}
fi.CreationTime = RandomDayInThePast().Invoke();
fi.LastAccessTime = DateTime.Today;
fi.LastWriteTime = RandomDayInThePast().Invoke();
try
{
var entry = _fileDbManager.CreateCacheEntry(filePath);
var mapKind = _textureMetadataHelper.DetermineMapKind(gamePath, filePath);
if (!skipDownscale)
_textureDownscaleService.ScheduleDownscale(fileHash, filePath, mapKind);
if (entry != null && !string.Equals(entry.Hash, fileHash, StringComparison.OrdinalIgnoreCase))
{
Logger.LogError("Hash mismatch after extracting, got {hash}, expected {expectedHash}, deleting file",
entry.Hash, fileHash);
File.Delete(filePath);
_fileDbManager.RemoveHashedFile(entry.Hash, entry.PrefixedFilePath);
}
}
catch (Exception ex)
{
Logger.LogWarning(ex, "Error creating cache entry");
}
}
private static IProgress<long> CreateInlineProgress(Action<long> callback) => new InlineProgress(callback);
private sealed class InlineProgress : IProgress<long>
{
private readonly Action<long> _callback;
public InlineProgress(Action<long> callback) => _callback = callback ?? throw new ArgumentNullException(nameof(callback));
public void Report(long value) => _callback(value);
}
}