Compare commits

...

7 Commits

28 changed files with 446 additions and 421 deletions

View File

@ -10,164 +10,164 @@ using DHT.Server;
using DHT.Server.Data; using DHT.Server.Data;
using DHT.Server.Service; using DHT.Server.Service;
namespace DHT.Desktop.Main.Pages { namespace DHT.Desktop.Main.Pages;
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
private readonly Window window; sealed class DebugPageModel {
private readonly State state; public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
[Obsolete("Designer")] private readonly Window window;
public DebugPageModel() : this(null!, State.Dummy) {} private readonly State state;
public DebugPageModel(Window window, State state) { [Obsolete("Designer")]
this.window = window; public DebugPageModel() : this(null!, State.Dummy) {}
this.state = state;
public DebugPageModel(Window window, State state) {
this.window = window;
this.state = state;
}
public async void OnClickAddRandomDataToDatabase() {
if (!int.TryParse(GenerateChannels, out int channels) || channels < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of channels must be at least 1!");
return;
} }
public async void OnClickAddRandomDataToDatabase() { if (!int.TryParse(GenerateUsers, out int users) || users < 1) {
if (!int.TryParse(GenerateChannels, out int channels) || channels < 1) { await Dialog.ShowOk(window, "Generate Random Data", "Amount of users must be at least 1!");
await Dialog.ShowOk(window, "Generate Random Data", "Amount of channels must be at least 1!"); return;
return;
}
if (!int.TryParse(GenerateUsers, out int users) || users < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of users must be at least 1!");
return;
}
if (!int.TryParse(GenerateMessages, out int messages) || messages < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of messages must be at least 1!");
return;
}
await ProgressDialog.Show(window, "Generating Random Data", async (_, callback) => await GenerateRandomData(channels, users, messages, callback));
} }
private const int BatchSize = 500; if (!int.TryParse(GenerateMessages, out int messages) || messages < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of messages must be at least 1!");
return;
}
private async Task GenerateRandomData(int channelCount, int userCount, int messageCount, IProgressCallback callback) { await ProgressDialog.Show(window, "Generating Random Data", async (_, callback) => await GenerateRandomData(channels, users, messages, callback));
int batchCount = (messageCount + BatchSize - 1) / BatchSize; }
await callback.Update("Adding messages in batches of " + BatchSize, 0, batchCount);
var rand = new Random(); private const int BatchSize = 500;
var server = new DHT.Server.Data.Server {
Id = RandomId(rand),
Name = RandomName("s"),
Type = ServerType.Server,
};
var channels = Enumerable.Range(0, channelCount).Select(i => new Channel { private async Task GenerateRandomData(int channelCount, int userCount, int messageCount, IProgressCallback callback) {
Id = RandomId(rand), int batchCount = (messageCount + BatchSize - 1) / BatchSize;
Server = server.Id, await callback.Update("Adding messages in batches of " + BatchSize, 0, batchCount);
Name = RandomName("c"),
ParentId = null, var rand = new Random();
Position = i, var server = new DHT.Server.Data.Server {
Topic = RandomText(rand, 10), Id = RandomId(rand),
Nsfw = rand.Next(4) == 0, Name = RandomName("s"),
Type = ServerType.Server,
};
var channels = Enumerable.Range(0, channelCount).Select(i => new Channel {
Id = RandomId(rand),
Server = server.Id,
Name = RandomName("c"),
ParentId = null,
Position = i,
Topic = RandomText(rand, 10),
Nsfw = rand.Next(4) == 0,
}).ToArray();
var users = Enumerable.Range(0, userCount).Select(_ => new User {
Id = RandomId(rand),
Name = RandomName("u"),
AvatarUrl = null,
Discriminator = rand.Next(0, 9999).ToString(),
}).ToArray();
await state.Db.Users.Add(users);
await state.Db.Servers.Add([server]);
await state.Db.Channels.Add(channels);
var now = DateTimeOffset.Now;
int batchIndex = 0;
while (messageCount > 0) {
int hourOffset = batchIndex;
var messages = Enumerable.Range(0, Math.Min(messageCount, BatchSize)).Select(i => {
DateTimeOffset time = now.AddHours(hourOffset).AddMinutes(i * 60.0 / BatchSize);
DateTimeOffset? edit = rand.Next(100) == 0 ? time.AddSeconds(rand.Next(1, 60)) : null;
var timeMillis = time.ToUnixTimeMilliseconds();
var editMillis = edit?.ToUnixTimeMilliseconds();
return new Message {
Id = (ulong) timeMillis,
Sender = RandomBiasedIndex(rand, users).Id,
Channel = RandomBiasedIndex(rand, channels).Id,
Text = RandomText(rand, 100),
Timestamp = timeMillis,
EditTimestamp = editMillis,
RepliedToId = null,
Attachments = ImmutableList<Attachment>.Empty,
Embeds = ImmutableList<Embed>.Empty,
Reactions = ImmutableList<Reaction>.Empty,
};
}).ToArray(); }).ToArray();
var users = Enumerable.Range(0, userCount).Select(_ => new User { await state.Db.Messages.Add(messages);
Id = RandomId(rand),
Name = RandomName("u"),
AvatarUrl = null,
Discriminator = rand.Next(0, 9999).ToString(),
}).ToArray();
await state.Db.Users.Add(users); messageCount -= BatchSize;
await state.Db.Servers.Add([server]); await callback.Update("Adding messages in batches of " + BatchSize, ++batchIndex, batchCount);
await state.Db.Channels.Add(channels);
var now = DateTimeOffset.Now;
int batchIndex = 0;
while (messageCount > 0) {
int hourOffset = batchIndex;
var messages = Enumerable.Range(0, Math.Min(messageCount, BatchSize)).Select(i => {
DateTimeOffset time = now.AddHours(hourOffset).AddMinutes(i * 60.0 / BatchSize);
DateTimeOffset? edit = rand.Next(100) == 0 ? time.AddSeconds(rand.Next(1, 60)) : null;
var timeMillis = time.ToUnixTimeMilliseconds();
var editMillis = edit?.ToUnixTimeMilliseconds();
return new Message {
Id = (ulong) timeMillis,
Sender = RandomBiasedIndex(rand, users).Id,
Channel = RandomBiasedIndex(rand, channels).Id,
Text = RandomText(rand, 100),
Timestamp = timeMillis,
EditTimestamp = editMillis,
RepliedToId = null,
Attachments = ImmutableList<Attachment>.Empty,
Embeds = ImmutableList<Embed>.Empty,
Reactions = ImmutableList<Reaction>.Empty,
};
}).ToArray();
await state.Db.Messages.Add(messages);
messageCount -= BatchSize;
await callback.Update("Adding messages in batches of " + BatchSize, ++batchIndex, batchCount);
}
} }
}
private static ulong RandomId(Random rand) { private static ulong RandomId(Random rand) {
ulong h = unchecked((ulong) rand.Next()); ulong h = unchecked((ulong) rand.Next());
ulong l = unchecked((ulong) rand.Next()); ulong l = unchecked((ulong) rand.Next());
return (h << 32) | l; return (h << 32) | l;
} }
private static string RandomName(string prefix) { private static string RandomName(string prefix) {
return prefix + "-" + ServerUtils.GenerateRandomToken(5); return prefix + "-" + ServerUtils.GenerateRandomToken(5);
} }
private static T RandomBiasedIndex<T>(Random rand, T[] options) { private static T RandomBiasedIndex<T>(Random rand, T[] options) {
return options[(int) Math.Floor(options.Length * rand.NextDouble() * rand.NextDouble())]; return options[(int) Math.Floor(options.Length * rand.NextDouble() * rand.NextDouble())];
} }
private static readonly string[] RandomWords = [ private static readonly string[] RandomWords = [
"apple", "apricot", "artichoke", "arugula", "asparagus", "avocado", "apple", "apricot", "artichoke", "arugula", "asparagus", "avocado",
"banana", "bean", "beechnut", "beet", "blackberry", "blackcurrant", "blueberry", "boysenberry", "bramble", "broccoli", "banana", "bean", "beechnut", "beet", "blackberry", "blackcurrant", "blueberry", "boysenberry", "bramble", "broccoli",
"cabbage", "cacao", "cantaloupe", "caper", "carambola", "carrot", "cauliflower", "celery", "chard", "cherry", "chokeberry", "citron", "clementine", "coconut", "corn", "crabapple", "cranberry", "cucumber", "currant", "cabbage", "cacao", "cantaloupe", "caper", "carambola", "carrot", "cauliflower", "celery", "chard", "cherry", "chokeberry", "citron", "clementine", "coconut", "corn", "crabapple", "cranberry", "cucumber", "currant",
"daikon", "date", "dewberry", "durian", "daikon", "date", "dewberry", "durian",
"edamame", "eggplant", "elderberry", "endive", "edamame", "eggplant", "elderberry", "endive",
"fig", "fig",
"garlic", "ginger", "gooseberry", "grape", "grapefruit", "guava", "garlic", "ginger", "gooseberry", "grape", "grapefruit", "guava",
"honeysuckle", "horseradish", "huckleberry", "honeysuckle", "horseradish", "huckleberry",
"jackfruit", "jicama", "jackfruit", "jicama",
"kale", "kiwi", "kohlrabi", "kumquat", "kale", "kiwi", "kohlrabi", "kumquat",
"leek", "lemon", "lentil", "lettuce", "lime", "leek", "lemon", "lentil", "lettuce", "lime",
"mandarin", "mango", "mushroom", "myrtle", "mandarin", "mango", "mushroom", "myrtle",
"nectarine", "nut", "nectarine", "nut",
"olive", "okra", "onion", "orange", "olive", "okra", "onion", "orange",
"papaya", "parsnip", "pawpaw", "peach", "pear", "pea", "pepper", "persimmon", "pineapple", "plum", "plantain", "pomegranate", "pomelo", "potato", "prune", "pumpkin", "papaya", "parsnip", "pawpaw", "peach", "pear", "pea", "pepper", "persimmon", "pineapple", "plum", "plantain", "pomegranate", "pomelo", "potato", "prune", "pumpkin",
"quandong", "quinoa", "quandong", "quinoa",
"radicchio", "radish", "raisin", "raspberry", "redcurrant", "rhubarb", "rutabaga", "radicchio", "radish", "raisin", "raspberry", "redcurrant", "rhubarb", "rutabaga",
"spinach", "strawberry", "squash", "spinach", "strawberry", "squash",
"tamarind", "tangerine", "tomatillo", "tomato", "turnip", "tamarind", "tangerine", "tomatillo", "tomato", "turnip",
"vanilla", "vanilla",
"watercress", "watermelon", "watercress", "watermelon",
"yam", "yam",
"zucchini" "zucchini"
]; ];
private static string RandomText(Random rand, int maxWords) { private static string RandomText(Random rand, int maxWords) {
int wordCount = 1 + (int) Math.Floor(maxWords * Math.Pow(rand.NextDouble(), 3)); int wordCount = 1 + (int) Math.Floor(maxWords * Math.Pow(rand.NextDouble(), 3));
return string.Join(' ', Enumerable.Range(0, wordCount).Select(_ => RandomWords[rand.Next(RandomWords.Length)])); return string.Join(' ', Enumerable.Range(0, wordCount).Select(_ => RandomWords[rand.Next(RandomWords.Length)]));
}
} }
} }
#else #else
namespace DHT.Desktop.Main.Pages { namespace DHT.Desktop.Main.Pages;
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
public void OnClickAddRandomDataToDatabase() {} sealed class DebugPageModel {
} public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
public void OnClickAddRandomDataToDatabase() {}
} }
#endif #endif

View File

@ -1,3 +0,0 @@
namespace DHT.Server.Data;
public readonly record struct DownloadWithData(Download Download, byte[]? Data);

View File

@ -26,7 +26,9 @@ public static class DatabaseExtensions {
await target.Messages.Add(batchedMessages); await target.Messages.Add(batchedMessages);
await foreach (var download in source.Downloads.Get()) { await foreach (var download in source.Downloads.Get()) {
await target.Downloads.AddDownload(await source.Downloads.HydrateWithData(download)); if (download.Status != DownloadStatus.Success || !await source.Downloads.GetDownloadData(download.NormalizedUrl, stream => target.Downloads.AddDownload(download, stream))) {
await target.Downloads.AddDownload(download, stream: null);
}
} }
} }
} }

View File

@ -161,7 +161,7 @@ public static class LegacyArchiveImport {
var messagesObj = data.HasKey(channelIdStr) ? data.RequireObject(channelIdStr, DataPath) : (JsonElement?) null; var messagesObj = data.HasKey(channelIdStr) ? data.RequireObject(channelIdStr, DataPath) : (JsonElement?) null;
if (messagesObj == null) { if (messagesObj == null) {
return Array.Empty<Message>(); return [];
} }
return messagesObj.Value.EnumerateObject().Select(item => { return messagesObj.Value.EnumerateObject().Select(item => {

View File

@ -1,10 +1,10 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq; using System.Linq;
using System.Reactive.Linq; using System.Reactive.Linq;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using DHT.Server.Data;
using DHT.Server.Data.Aggregations; using DHT.Server.Data.Aggregations;
using DHT.Server.Data.Filters; using DHT.Server.Data.Filters;
using DHT.Server.Download; using DHT.Server.Download;
@ -14,7 +14,7 @@ namespace DHT.Server.Database.Repositories;
public interface IDownloadRepository { public interface IDownloadRepository {
IObservable<long> TotalCount { get; } IObservable<long> TotalCount { get; }
Task AddDownload(DownloadWithData item); Task AddDownload(Data.Download item, Stream? stream);
Task<long> Count(DownloadItemFilter filter, CancellationToken cancellationToken = default); Task<long> Count(DownloadItemFilter filter, CancellationToken cancellationToken = default);
@ -22,9 +22,9 @@ public interface IDownloadRepository {
IAsyncEnumerable<Data.Download> Get(); IAsyncEnumerable<Data.Download> Get();
Task<DownloadWithData> HydrateWithData(Data.Download download); Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor);
Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl); Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor);
IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken = default); IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken = default);
@ -35,7 +35,7 @@ public interface IDownloadRepository {
internal sealed class Dummy : IDownloadRepository { internal sealed class Dummy : IDownloadRepository {
public IObservable<long> TotalCount { get; } = Observable.Return(0L); public IObservable<long> TotalCount { get; } = Observable.Return(0L);
public Task AddDownload(DownloadWithData item) { public Task AddDownload(Data.Download item, Stream? stream) {
return Task.CompletedTask; return Task.CompletedTask;
} }
@ -51,12 +51,12 @@ public interface IDownloadRepository {
return AsyncEnumerable.Empty<Data.Download>(); return AsyncEnumerable.Empty<Data.Download>();
} }
public Task<DownloadWithData> HydrateWithData(Data.Download download) { public Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor) {
return Task.FromResult(new DownloadWithData(download, Data: null)); return Task.FromResult(false);
} }
public Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl) { public Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor) {
return Task.FromResult<DownloadWithData?>(null); return Task.FromResult(false);
} }
public IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken) { public IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken) {

View File

@ -19,9 +19,9 @@ sealed class SqliteChannelRepository : BaseSqliteRepository, IChannelRepository
} }
public async Task Add(IReadOnlyList<Channel> channels) { public async Task Add(IReadOnlyList<Channel> channels) {
await using var conn = await pool.Take(); await using (var conn = await pool.Take()) {
await conn.BeginTransactionAsync();
await using (var tx = await conn.BeginTransactionAsync()) {
await using var cmd = conn.Upsert("channels", [ await using var cmd = conn.Upsert("channels", [
("id", SqliteType.Integer), ("id", SqliteType.Integer),
("server", SqliteType.Integer), ("server", SqliteType.Integer),
@ -43,7 +43,7 @@ sealed class SqliteChannelRepository : BaseSqliteRepository, IChannelRepository
await cmd.ExecuteNonQueryAsync(); await cmd.ExecuteNonQueryAsync();
} }
await tx.CommitAsync(); await conn.CommitTransactionAsync();
} }
UpdateTotalCount(); UpdateTotalCount();

View File

@ -1,5 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@ -14,14 +15,8 @@ using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Repositories; namespace DHT.Server.Database.Sqlite.Repositories;
sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepository { sealed class SqliteDownloadRepository(SqliteConnectionPool pool) : BaseSqliteRepository(Log), IDownloadRepository {
private static readonly Log Log = Log.ForType<SqliteDownloadRepository>(); private static readonly Log Log = Log.ForType<SqliteDownloadRepository>();
private readonly SqliteConnectionPool pool;
public SqliteDownloadRepository(SqliteConnectionPool pool) : base(Log) {
this.pool = pool;
}
internal sealed class NewDownloadCollector : IAsyncDisposable { internal sealed class NewDownloadCollector : IAsyncDisposable {
private readonly SqliteDownloadRepository repository; private readonly SqliteDownloadRepository repository;
@ -66,12 +61,10 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
} }
} }
public async Task AddDownload(DownloadWithData item) { public async Task AddDownload(Data.Download item, Stream? stream) {
var (download, data) = item;
await using (var conn = await pool.Take()) { await using (var conn = await pool.Take()) {
var tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
await using var metadataCmd = conn.Upsert("download_metadata", [ await using var metadataCmd = conn.Upsert("download_metadata", [
("normalized_url", SqliteType.Text), ("normalized_url", SqliteType.Text),
("download_url", SqliteType.Text), ("download_url", SqliteType.Text),
@ -80,30 +73,37 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
("size", SqliteType.Integer), ("size", SqliteType.Integer),
]); ]);
metadataCmd.Set(":normalized_url", download.NormalizedUrl); metadataCmd.Set(":normalized_url", item.NormalizedUrl);
metadataCmd.Set(":download_url", download.DownloadUrl); metadataCmd.Set(":download_url", item.DownloadUrl);
metadataCmd.Set(":status", (int) download.Status); metadataCmd.Set(":status", (int) item.Status);
metadataCmd.Set(":type", download.Type); metadataCmd.Set(":type", item.Type);
metadataCmd.Set(":size", download.Size); metadataCmd.Set(":size", item.Size);
await metadataCmd.ExecuteNonQueryAsync(); await metadataCmd.ExecuteNonQueryAsync();
if (data == null) { if (stream == null) {
await using var deleteBlobCmd = conn.Command("DELETE FROM download_blobs WHERE normalized_url = :normalized_url"); await using var deleteBlobCmd = conn.Command("DELETE FROM download_blobs WHERE normalized_url = :normalized_url");
deleteBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, download.NormalizedUrl); deleteBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, item.NormalizedUrl);
await deleteBlobCmd.ExecuteNonQueryAsync(); await deleteBlobCmd.ExecuteNonQueryAsync();
} }
else { else {
await using var upsertBlobCmd = conn.Upsert("download_blobs", [ await using var upsertBlobCmd = conn.Command(
("normalized_url", SqliteType.Text), """
("blob", SqliteType.Blob) INSERT INTO download_blobs (normalized_url, blob)
]); VALUES (:normalized_url, ZEROBLOB(:blob_length))
ON CONFLICT (normalized_url) DO UPDATE SET blob = excluded.blob
RETURNING rowid
"""
);
upsertBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, item.NormalizedUrl);
upsertBlobCmd.AddAndSet(":blob_length", SqliteType.Integer, item.Size);
long rowid = await upsertBlobCmd.ExecuteLongScalarAsync();
upsertBlobCmd.Set(":normalized_url", download.NormalizedUrl); await using var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid);
upsertBlobCmd.Set(":blob", data); await stream.CopyToAsync(blob);
await upsertBlobCmd.ExecuteNonQueryAsync();
} }
await tx.CommitAsync(); await conn.CommitTransactionAsync();
} }
UpdateTotalCount(); UpdateTotalCount();
@ -187,24 +187,35 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
} }
} }
public async Task<DownloadWithData> HydrateWithData(Data.Download download) { public async Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor) {
await using var conn = await pool.Take(); await using var conn = await pool.Take();
await using var cmd = conn.Command("SELECT blob FROM download_blobs WHERE normalized_url = :url"); await using var cmd = conn.Command("SELECT rowid FROM download_blobs WHERE normalized_url = :normalized_url");
cmd.AddAndSet(":url", SqliteType.Text, download.NormalizedUrl); cmd.AddAndSet(":normalized_url", SqliteType.Text, normalizedUrl);
await using var reader = await cmd.ExecuteReaderAsync();
var data = await reader.ReadAsync() && !reader.IsDBNull(0) ? (byte[]) reader["blob"] : null;
return new DownloadWithData(download, data); long rowid;
await using (var reader = await cmd.ExecuteReaderAsync()) {
if (!await reader.ReadAsync()) {
return false;
}
rowid = reader.GetInt64(0);
}
await using (var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid, readOnly: true)) {
await dataProcessor(blob);
}
return true;
} }
public async Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl) { public async Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor) {
await using var conn = await pool.Take(); await using var conn = await pool.Take();
await using var cmd = conn.Command( await using var cmd = conn.Command(
""" """
SELECT dm.download_url, dm.type, db.blob FROM download_metadata dm SELECT dm.download_url, dm.type, db.rowid FROM download_metadata dm
JOIN download_blobs db ON dm.normalized_url = db.normalized_url JOIN download_blobs db ON dm.normalized_url = db.normalized_url
WHERE dm.normalized_url = :normalized_url AND dm.status = :success IS NOT NULL WHERE dm.normalized_url = :normalized_url AND dm.status = :success IS NOT NULL
""" """
@ -213,19 +224,25 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
cmd.AddAndSet(":normalized_url", SqliteType.Text, normalizedUrl); cmd.AddAndSet(":normalized_url", SqliteType.Text, normalizedUrl);
cmd.AddAndSet(":success", SqliteType.Integer, (int) DownloadStatus.Success); cmd.AddAndSet(":success", SqliteType.Integer, (int) DownloadStatus.Success);
await using var reader = await cmd.ExecuteReaderAsync(); string downloadUrl;
string? type;
long rowid;
await using (var reader = await cmd.ExecuteReaderAsync()) {
if (!await reader.ReadAsync()) {
return false;
}
if (!await reader.ReadAsync()) { downloadUrl = reader.GetString(0);
return null; type = reader.IsDBNull(1) ? null : reader.GetString(1);
rowid = reader.GetInt64(2);
}
await using (var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid, readOnly: true)) {
await dataProcessor(new Data.Download(normalizedUrl, downloadUrl, DownloadStatus.Success, type, (ulong) blob.Length), blob);
} }
var downloadUrl = reader.GetString(0); return true;
var type = reader.IsDBNull(1) ? null : reader.GetString(1);
var data = (byte[]) reader[2];
var size = (ulong) data.LongLength;
var download = new Data.Download(normalizedUrl, downloadUrl, DownloadStatus.Success, type, size);
return new DownloadWithData(download, data);
} }
public async IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, [EnumeratorCancellation] CancellationToken cancellationToken) { public async IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, [EnumeratorCancellation] CancellationToken cancellationToken) {

View File

@ -39,7 +39,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
} }
await using (var conn = await pool.Take()) { await using (var conn = await pool.Take()) {
await using var tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
await using var messageCmd = conn.Upsert("messages", [ await using var messageCmd = conn.Upsert("messages", [
("message_id", SqliteType.Integer), ("message_id", SqliteType.Integer),
@ -167,7 +167,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
} }
} }
await tx.CommitAsync(); await conn.CommitTransactionAsync();
downloadCollector.OnCommitted(); downloadCollector.OnCommitted();
} }
@ -183,11 +183,11 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
return await conn.ExecuteReaderAsync("SELECT COUNT(*) FROM messages" + filter.GenerateConditions().BuildWhereClause(), static reader => reader?.GetInt64(0) ?? 0L, cancellationToken); return await conn.ExecuteReaderAsync("SELECT COUNT(*) FROM messages" + filter.GenerateConditions().BuildWhereClause(), static reader => reader?.GetInt64(0) ?? 0L, cancellationToken);
} }
private sealed class MesageToManyCommand<T> : IAsyncDisposable { private sealed class MessageToManyCommand<T> : IAsyncDisposable {
private readonly SqliteCommand cmd; private readonly SqliteCommand cmd;
private readonly Func<SqliteDataReader, T> readItem; private readonly Func<SqliteDataReader, T> readItem;
public MesageToManyCommand(ISqliteConnection conn, string sql, Func<SqliteDataReader, T> readItem) { public MessageToManyCommand(ISqliteConnection conn, string sql, Func<SqliteDataReader, T> readItem) {
this.cmd = conn.Command(sql); this.cmd = conn.Command(sql);
this.cmd.Add(":message_id", SqliteType.Integer); this.cmd.Add(":message_id", SqliteType.Integer);
@ -223,7 +223,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id WHERE message_id = :message_id
"""; """;
await using var attachmentCmd = new MesageToManyCommand<Attachment>(conn, AttachmentSql, static reader => new Attachment { await using var attachmentCmd = new MessageToManyCommand<Attachment>(conn, AttachmentSql, static reader => new Attachment {
Id = reader.GetUint64(0), Id = reader.GetUint64(0),
Name = reader.GetString(1), Name = reader.GetString(1),
Type = reader.IsDBNull(2) ? null : reader.GetString(2), Type = reader.IsDBNull(2) ? null : reader.GetString(2),
@ -241,7 +241,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id WHERE message_id = :message_id
"""; """;
await using var embedCmd = new MesageToManyCommand<Embed>(conn, EmbedSql, static reader => new Embed { await using var embedCmd = new MessageToManyCommand<Embed>(conn, EmbedSql, static reader => new Embed {
Json = reader.GetString(0) Json = reader.GetString(0)
}); });
@ -252,7 +252,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id WHERE message_id = :message_id
"""; """;
await using var reactionsCmd = new MesageToManyCommand<Reaction>(conn, ReactionSql, static reader => new Reaction { await using var reactionsCmd = new MessageToManyCommand<Reaction>(conn, ReactionSql, static reader => new Reaction {
EmojiId = reader.IsDBNull(0) ? null : reader.GetUint64(0), EmojiId = reader.IsDBNull(0) ? null : reader.GetUint64(0),
EmojiName = reader.IsDBNull(1) ? null : reader.GetString(1), EmojiName = reader.IsDBNull(1) ? null : reader.GetString(1),
EmojiFlags = (EmojiFlags) reader.GetInt16(2), EmojiFlags = (EmojiFlags) reader.GetInt16(2),

View File

@ -19,9 +19,9 @@ sealed class SqliteServerRepository : BaseSqliteRepository, IServerRepository {
} }
public async Task Add(IReadOnlyList<Data.Server> servers) { public async Task Add(IReadOnlyList<Data.Server> servers) {
await using var conn = await pool.Take(); await using (var conn = await pool.Take()) {
await conn.BeginTransactionAsync();
await using (var tx = await conn.BeginTransactionAsync()) {
await using var cmd = conn.Upsert("servers", [ await using var cmd = conn.Upsert("servers", [
("id", SqliteType.Integer), ("id", SqliteType.Integer),
("name", SqliteType.Text), ("name", SqliteType.Text),
@ -35,7 +35,7 @@ sealed class SqliteServerRepository : BaseSqliteRepository, IServerRepository {
await cmd.ExecuteNonQueryAsync(); await cmd.ExecuteNonQueryAsync();
} }
await tx.CommitAsync(); await conn.CommitTransactionAsync();
} }
UpdateTotalCount(); UpdateTotalCount();

View File

@ -23,7 +23,7 @@ sealed class SqliteUserRepository : BaseSqliteRepository, IUserRepository {
public async Task Add(IReadOnlyList<User> users) { public async Task Add(IReadOnlyList<User> users) {
await using (var conn = await pool.Take()) { await using (var conn = await pool.Take()) {
await using var tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
await using var cmd = conn.Upsert("users", [ await using var cmd = conn.Upsert("users", [
("id", SqliteType.Integer), ("id", SqliteType.Integer),
@ -46,7 +46,7 @@ sealed class SqliteUserRepository : BaseSqliteRepository, IUserRepository {
} }
} }
await tx.CommitAsync(); await conn.CommitTransactionAsync();
downloadCollector.OnCommitted(); downloadCollector.OnCommitted();
} }

View File

@ -1,5 +1,4 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Data.Common;
using System.Threading.Tasks; using System.Threading.Tasks;
using DHT.Server.Database.Sqlite.Utils; using DHT.Server.Database.Sqlite.Utils;
using DHT.Server.Download; using DHT.Server.Download;
@ -23,7 +22,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
await conn.ExecuteAsync("ALTER TABLE attachments RENAME COLUMN url TO normalized_url"); await conn.ExecuteAsync("ALTER TABLE attachments RENAME COLUMN url TO normalized_url");
await conn.ExecuteAsync("ALTER TABLE downloads RENAME COLUMN url TO normalized_url"); await conn.ExecuteAsync("ALTER TABLE downloads RENAME COLUMN url TO normalized_url");
} }
private async Task NormalizeAttachmentUrls(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) { private async Task NormalizeAttachmentUrls(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
await reporter.SubWork("Preparing attachments...", 0, 0); await reporter.SubWork("Preparing attachments...", 0, 0);
@ -39,7 +38,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
} }
} }
await using var tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
int totalUrls = normalizedUrls.Count; int totalUrls = normalizedUrls.Count;
int processedUrls = -1; int processedUrls = -1;
@ -61,7 +60,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls); await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync(); await conn.CommitTransactionAsync();
} }
private async Task NormalizeDownloadUrls(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) { private async Task NormalizeDownloadUrls(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
@ -84,26 +83,23 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
} }
await conn.ExecuteAsync("PRAGMA cache_size = -20000"); await conn.ExecuteAsync("PRAGMA cache_size = -20000");
await conn.BeginTransactionAsync();
await reporter.SubWork("Deleting duplicates...", 0, 0);
DbTransaction tx; await using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
await using (tx = await conn.BeginTransactionAsync()) { deleteCmd.Set(":url", duplicateUrl);
await reporter.SubWork("Deleting duplicates...", 0, 0); await deleteCmd.ExecuteNonQueryAsync();
await using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
deleteCmd.Set(":url", duplicateUrl);
await deleteCmd.ExecuteNonQueryAsync();
}
} }
await tx.CommitAsync();
} }
await conn.CommitTransactionAsync();
int totalUrls = normalizedUrlsToOriginalUrls.Count; int totalUrls = normalizedUrlsToOriginalUrls.Count;
int processedUrls = -1; int processedUrls = -1;
tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
await using (var updateCmd = conn.Command("UPDATE downloads SET download_url = :download_url, url = :normalized_url WHERE url = :download_url")) { await using (var updateCmd = conn.Command("UPDATE downloads SET download_url = :download_url, url = :normalized_url WHERE url = :download_url")) {
updateCmd.Add(":normalized_url", SqliteType.Text); updateCmd.Add(":normalized_url", SqliteType.Text);
@ -115,11 +111,10 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
// Not proper way of dealing with transactions, but it avoids a long commit at the end. // Not proper way of dealing with transactions, but it avoids a long commit at the end.
// Schema upgrades are already non-atomic anyways, so this doesn't make it worse. // Schema upgrades are already non-atomic anyways, so this doesn't make it worse.
await tx.CommitAsync(); await conn.CommitTransactionAsync();
await tx.DisposeAsync();
tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
updateCmd.Transaction = (SqliteTransaction) tx; conn.AssignActiveTransaction(updateCmd);
} }
updateCmd.Set(":normalized_url", normalizedUrl); updateCmd.Set(":normalized_url", normalizedUrl);
@ -130,8 +125,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls); await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync(); await conn.CommitTransactionAsync();
await tx.DisposeAsync();
await conn.ExecuteAsync("PRAGMA cache_size = -2000"); await conn.ExecuteAsync("PRAGMA cache_size = -2000");
} }

View File

@ -11,56 +11,55 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
async Task ISchemaUpgrade.Run(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) { async Task ISchemaUpgrade.Run(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
await reporter.MainWork("Applying schema changes...", 0, 6); await reporter.MainWork("Applying schema changes...", 0, 6);
await SqliteSchema.CreateDownloadTables(conn); await SqliteSchema.CreateDownloadTables(conn);
await reporter.MainWork("Migrating download metadata...", 1, 6); await reporter.MainWork("Migrating download metadata...", 1, 6);
await conn.ExecuteAsync("INSERT INTO download_metadata (normalized_url, download_url, status, size) SELECT normalized_url, download_url, status, size FROM downloads"); await conn.ExecuteAsync("INSERT INTO download_metadata (normalized_url, download_url, status, size) SELECT normalized_url, download_url, status, size FROM downloads");
await reporter.MainWork("Merging attachment metadata...", 2, 6); await reporter.MainWork("Merging attachment metadata...", 2, 6);
await conn.ExecuteAsync("UPDATE download_metadata SET type = (SELECT type FROM attachments WHERE download_metadata.normalized_url = attachments.normalized_url)"); await conn.ExecuteAsync("UPDATE download_metadata SET type = (SELECT type FROM attachments WHERE download_metadata.normalized_url = attachments.normalized_url)");
await reporter.MainWork("Migrating downloaded files...", 3, 6); await reporter.MainWork("Migrating downloaded files...", 3, 6);
await MigrateDownloadBlobsToNewTable(conn, reporter); await MigrateDownloadBlobsToNewTable(conn, reporter);
await reporter.MainWork("Applying schema changes...", 4, 6); await reporter.MainWork("Applying schema changes...", 4, 6);
await conn.ExecuteAsync("DROP TABLE downloads"); await conn.ExecuteAsync("DROP TABLE downloads");
await reporter.MainWork("Discovering downloadable links...", 5, 6); await reporter.MainWork("Discovering downloadable links...", 5, 6);
await DiscoverDownloadableLinks(conn, reporter); await DiscoverDownloadableLinks(conn, reporter);
} }
private async Task MigrateDownloadBlobsToNewTable(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) { private async Task MigrateDownloadBlobsToNewTable(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
await reporter.SubWork("Listing downloaded files...", 0, 0); await reporter.SubWork("Listing downloaded files...", 0, 0);
var urlsToMigrate = await GetDownloadedFileUrls(conn); var urlsToMigrate = await GetDownloadedFileUrls(conn);
int totalFiles = urlsToMigrate.Count; int totalFiles = urlsToMigrate.Count;
int processedFiles = -1; int processedFiles = -1;
await reporter.SubWork("Processing downloaded files...", 0, totalFiles); await reporter.SubWork("Processing downloaded files...", 0, totalFiles);
var tx = await conn.BeginTransactionAsync(); await conn.BeginTransactionAsync();
await using (var insertCmd = conn.Command("INSERT INTO download_blobs (normalized_url, blob) SELECT normalized_url, blob FROM downloads WHERE normalized_url = :normalized_url")) await using (var insertCmd = conn.Command("INSERT INTO download_blobs (normalized_url, blob) SELECT normalized_url, blob FROM downloads WHERE normalized_url = :normalized_url"))
await using (var deleteCmd = conn.Command("DELETE FROM downloads WHERE normalized_url = :normalized_url")) { await using (var deleteCmd = conn.Command("DELETE FROM downloads WHERE normalized_url = :normalized_url")) {
insertCmd.Add(":normalized_url", SqliteType.Text); insertCmd.Add(":normalized_url", SqliteType.Text);
deleteCmd.Add(":normalized_url", SqliteType.Text); deleteCmd.Add(":normalized_url", SqliteType.Text);
foreach (var url in urlsToMigrate) { foreach (var url in urlsToMigrate) {
if (++processedFiles % 10 == 0) { if (++processedFiles % 10 == 0) {
await reporter.SubWork("Processing downloaded files...", processedFiles, totalFiles); await reporter.SubWork("Processing downloaded files...", processedFiles, totalFiles);
// Not proper way of dealing with transactions, but it avoids a long commit at the end. // Not proper way of dealing with transactions, but it avoids a long commit at the end.
// Schema upgrades are already non-atomic anyways, so this doesn't make it worse. // Schema upgrades are already non-atomic anyways, so this doesn't make it worse.
await tx.CommitAsync(); await conn.CommitTransactionAsync();
await tx.DisposeAsync();
await conn.BeginTransactionAsync();
tx = await conn.BeginTransactionAsync(); conn.AssignActiveTransaction(insertCmd);
insertCmd.Transaction = (SqliteTransaction) tx; conn.AssignActiveTransaction(deleteCmd);
deleteCmd.Transaction = (SqliteTransaction) tx;
} }
insertCmd.Set(":normalized_url", url); insertCmd.Set(":normalized_url", url);
await insertCmd.ExecuteNonQueryAsync(); await insertCmd.ExecuteNonQueryAsync();
deleteCmd.Set(":normalized_url", url); deleteCmd.Set(":normalized_url", url);
await deleteCmd.ExecuteNonQueryAsync(); await deleteCmd.ExecuteNonQueryAsync();
} }
@ -68,8 +67,7 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
await reporter.SubWork("Processing downloaded files...", totalFiles, totalFiles); await reporter.SubWork("Processing downloaded files...", totalFiles, totalFiles);
await tx.CommitAsync(); await conn.CommitTransactionAsync();
await tx.DisposeAsync();
} }
private async Task<List<string>> GetDownloadedFileUrls(ISqliteConnection conn) { private async Task<List<string>> GetDownloadedFileUrls(ISqliteConnection conn) {
@ -110,46 +108,46 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
insertCmd.Set(":size", download.Size); insertCmd.Set(":size", download.Size);
await insertCmd.ExecuteNonQueryAsync(); await insertCmd.ExecuteNonQueryAsync();
} }
await using (var tx = await conn.BeginTransactionAsync()) {
await using var insertCmd = conn.Command("INSERT OR IGNORE INTO download_metadata (normalized_url, download_url, status, type, size) VALUES (:normalized_url, :download_url, :status, :type, :size)");
insertCmd.Add(":normalized_url", SqliteType.Text);
insertCmd.Add(":download_url", SqliteType.Text);
insertCmd.Add(":status", SqliteType.Integer);
insertCmd.Add(":type", SqliteType.Text);
insertCmd.Add(":size", SqliteType.Integer);
await reporter.SubWork("Processing embeds...", 1, 4); await conn.BeginTransactionAsync();
await using (var embedCmd = conn.Command("SELECT json FROM embeds")) {
await using var reader = await embedCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) { await using var insertCmd = conn.Command("INSERT OR IGNORE INTO download_metadata (normalized_url, download_url, status, type, size) VALUES (:normalized_url, :download_url, :status, :type, :size)");
await InsertDownload(insertCmd, await DownloadLinkExtractor.TryFromEmbedJson(reader.GetStream(0))); insertCmd.Add(":normalized_url", SqliteType.Text);
} insertCmd.Add(":download_url", SqliteType.Text);
insertCmd.Add(":status", SqliteType.Integer);
insertCmd.Add(":type", SqliteType.Text);
insertCmd.Add(":size", SqliteType.Integer);
await reporter.SubWork("Processing embeds...", 1, 4);
await using (var embedCmd = conn.Command("SELECT json FROM embeds")) {
await using var reader = await embedCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, await DownloadLinkExtractor.TryFromEmbedJson(reader.GetStream(0)));
} }
await reporter.SubWork("Processing users...", 2, 4);
await using (var avatarCmd = conn.Command("SELECT id, avatar_url FROM users WHERE avatar_url IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromUserAvatar(reader.GetUint64(0), reader.GetString(1)));
}
}
await reporter.SubWork("Processing reactions...", 3, 4);
await using (var avatarCmd = conn.Command("SELECT DISTINCT emoji_id, emoji_flags FROM reactions WHERE emoji_id IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromEmoji(reader.GetUint64(0), (EmojiFlags) reader.GetInt16(1)));
}
}
await tx.CommitAsync();
} }
await reporter.SubWork("Processing users...", 2, 4);
await using (var avatarCmd = conn.Command("SELECT id, avatar_url FROM users WHERE avatar_url IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromUserAvatar(reader.GetUint64(0), reader.GetString(1)));
}
}
await reporter.SubWork("Processing reactions...", 3, 4);
await using (var avatarCmd = conn.Command("SELECT DISTINCT emoji_id, emoji_flags FROM reactions WHERE emoji_id IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromEmoji(reader.GetUint64(0), (EmojiFlags) reader.GetInt16(1)));
}
}
await conn.CommitTransactionAsync();
} }
} }

View File

@ -1,8 +1,15 @@
using System; using System;
using System.Threading.Tasks;
using Microsoft.Data.Sqlite; using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Utils; namespace DHT.Server.Database.Sqlite.Utils;
interface ISqliteConnection : IAsyncDisposable { interface ISqliteConnection : IAsyncDisposable {
SqliteConnection InnerConnection { get; } SqliteConnection InnerConnection { get; }
Task BeginTransactionAsync();
Task CommitTransactionAsync();
Task RollbackTransactionAsync();
void AssignActiveTransaction(SqliteCommand command);
} }

View File

@ -1,5 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Data.Common;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using DHT.Utils.Collections; using DHT.Utils.Collections;
@ -73,17 +74,48 @@ sealed class SqliteConnectionPool : IAsyncDisposable {
disposalTokenSource.Dispose(); disposalTokenSource.Dispose();
} }
private sealed class PooledConnection : ISqliteConnection { private sealed class PooledConnection(SqliteConnectionPool pool, SqliteConnection conn) : ISqliteConnection {
public SqliteConnection InnerConnection { get; } public SqliteConnection InnerConnection { get; } = conn;
private readonly SqliteConnectionPool pool; private DbTransaction? activeTransaction;
public PooledConnection(SqliteConnectionPool pool, SqliteConnection conn) { public async Task BeginTransactionAsync() {
this.pool = pool; if (activeTransaction != null) {
this.InnerConnection = conn; throw new InvalidOperationException("A transaction is already active.");
}
activeTransaction = await InnerConnection.BeginTransactionAsync();
}
public async Task CommitTransactionAsync() {
if (activeTransaction == null) {
throw new InvalidOperationException("No active transaction to commit.");
}
await activeTransaction.CommitAsync();
await activeTransaction.DisposeAsync();
activeTransaction = null;
}
public async Task RollbackTransactionAsync() {
if (activeTransaction == null) {
throw new InvalidOperationException("No active transaction to rollback.");
}
await activeTransaction.RollbackAsync();
await activeTransaction.DisposeAsync();
activeTransaction = null;
}
public void AssignActiveTransaction(SqliteCommand command) {
command.Transaction = (SqliteTransaction?) activeTransaction;
} }
public async ValueTask DisposeAsync() { public async ValueTask DisposeAsync() {
if (activeTransaction != null) {
await RollbackTransactionAsync();
}
await pool.Return(this); await pool.Return(this);
} }
} }

View File

@ -1,5 +1,4 @@
using System; using System;
using System.Data.Common;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@ -9,10 +8,6 @@ using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Utils; namespace DHT.Server.Database.Sqlite.Utils;
static class SqliteExtensions { static class SqliteExtensions {
public static ValueTask<DbTransaction> BeginTransactionAsync(this ISqliteConnection conn) {
return conn.InnerConnection.BeginTransactionAsync();
}
public static SqliteCommand Command(this ISqliteConnection conn, [LanguageInjection("sql")] string sql) { public static SqliteCommand Command(this ISqliteConnection conn, [LanguageInjection("sql")] string sql) {
var cmd = conn.InnerConnection.CreateCommand(); var cmd = conn.InnerConnection.CreateCommand();
cmd.CommandText = sql; cmd.CommandText = sql;
@ -30,6 +25,10 @@ static class SqliteExtensions {
return await reader.ReadAsync(cancellationToken) ? readFunction(reader) : readFunction(null); return await reader.ReadAsync(cancellationToken) ? readFunction(reader) : readFunction(null);
} }
public static async Task<long> ExecuteLongScalarAsync(this SqliteCommand command) {
return (long) (await command.ExecuteScalarAsync())!;
}
public static SqliteCommand Insert(this ISqliteConnection conn, string tableName, (string Name, SqliteType Type)[] columns) { public static SqliteCommand Insert(this ISqliteConnection conn, string tableName, (string Name, SqliteType Type)[] columns) {
string columnNames = string.Join(',', columns.Select(static c => c.Name)); string columnNames = string.Join(',', columns.Select(static c => c.Name));

View File

@ -10,13 +10,12 @@ public readonly struct DownloadItem {
public string? Type { get; init; } public string? Type { get; init; }
public ulong? Size { get; init; } public ulong? Size { get; init; }
internal DownloadWithData ToSuccess(byte[] data) { internal Data.Download ToSuccess(long size) {
var size = (ulong) Math.Max(data.LongLength, 0); return new Data.Download(NormalizedUrl, DownloadUrl, DownloadStatus.Success, Type, (ulong) Math.Max(size, 0));
return new DownloadWithData(new Data.Download(NormalizedUrl, DownloadUrl, DownloadStatus.Success, Type, size), data);
} }
internal DownloadWithData ToFailure(HttpStatusCode? statusCode = null) { internal Data.Download ToFailure(HttpStatusCode? statusCode = null) {
var status = statusCode.HasValue ? (DownloadStatus) (int) statusCode : DownloadStatus.GenericError; var status = statusCode.HasValue ? (DownloadStatus) (int) statusCode : DownloadStatus.GenericError;
return new DownloadWithData(new Data.Download(NormalizedUrl, DownloadUrl, status, Type, Size), Data: null); return new Data.Download(NormalizedUrl, DownloadUrl, status, Type, Size);
} }
} }

View File

@ -67,28 +67,39 @@ sealed class DownloaderTask : IAsyncDisposable {
private async Task RunDownloadTask(int taskIndex) { private async Task RunDownloadTask(int taskIndex) {
var log = Log.ForType<DownloaderTask>("Task " + taskIndex); var log = Log.ForType<DownloaderTask>("Task " + taskIndex);
var client = new HttpClient(); var client = new HttpClient(new SocketsHttpHandler {
ConnectTimeout = TimeSpan.FromSeconds(30)
});
client.Timeout = Timeout.InfiniteTimeSpan;
client.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgent); client.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgent);
client.Timeout = TimeSpan.FromSeconds(30);
while (!cancellationToken.IsCancellationRequested) { while (!cancellationToken.IsCancellationRequested) {
var item = await downloadQueue.Reader.ReadAsync(cancellationToken); var item = await downloadQueue.Reader.ReadAsync(cancellationToken);
log.Debug("Downloading " + item.DownloadUrl + "..."); log.Debug("Downloading " + item.DownloadUrl + "...");
try { try {
var downloadedBytes = await client.GetByteArrayAsync(item.DownloadUrl, cancellationToken); var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, item.DownloadUrl), HttpCompletionOption.ResponseHeadersRead, cancellationToken);
await db.Downloads.AddDownload(item.ToSuccess(downloadedBytes)); response.EnsureSuccessStatusCode();
if (response.Content.Headers.ContentLength is {} contentLength) {
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
await db.Downloads.AddDownload(item.ToSuccess(contentLength), stream);
}
else {
await db.Downloads.AddDownload(item.ToFailure(), stream: null);
log.Error("Download response has no content length: " + item.DownloadUrl);
}
} catch (OperationCanceledException e) when (e.CancellationToken == cancellationToken) { } catch (OperationCanceledException e) when (e.CancellationToken == cancellationToken) {
// Ignore. // Ignore.
} catch (TaskCanceledException e) { } catch (TaskCanceledException e) when (e.InnerException is TimeoutException) {
// HttpClient request timed out. await db.Downloads.AddDownload(item.ToFailure(), stream: null);
await db.Downloads.AddDownload(item.ToFailure()); log.Error("Download timed out: " + item.DownloadUrl);
log.Error(e.Message);
} catch (HttpRequestException e) { } catch (HttpRequestException e) {
await db.Downloads.AddDownload(item.ToFailure(e.StatusCode)); await db.Downloads.AddDownload(item.ToFailure(e.StatusCode), stream: null);
log.Error(e); log.Error(e);
} catch (Exception e) { } catch (Exception e) {
await db.Downloads.AddDownload(item.ToFailure()); await db.Downloads.AddDownload(item.ToFailure(), stream: null);
log.Error(e); log.Error(e);
} finally { } finally {
try { try {

View File

@ -9,37 +9,37 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
abstract class BaseEndpoint { abstract class BaseEndpoint(IDatabaseFile db) {
private static readonly Log Log = Log.ForType<BaseEndpoint>(); private static readonly Log Log = Log.ForType<BaseEndpoint>();
protected IDatabaseFile Db { get; } protected IDatabaseFile Db { get; } = db;
protected BaseEndpoint(IDatabaseFile db) {
this.Db = db;
}
public async Task Handle(HttpContext ctx) { public async Task Handle(HttpContext ctx) {
var response = ctx.Response; var response = ctx.Response;
try { try {
response.StatusCode = (int) HttpStatusCode.OK; response.StatusCode = (int) HttpStatusCode.OK;
var output = await Respond(ctx); await Respond(ctx.Request, response);
await output.WriteTo(response);
} catch (HttpException e) { } catch (HttpException e) {
Log.Error(e); Log.Error(e);
response.StatusCode = (int) e.StatusCode; response.StatusCode = (int) e.StatusCode;
await response.WriteAsync(e.Message); if (response.HasStarted) {
Log.Warn("Response has already started, cannot write status message: " + e.Message);
}
else {
await response.WriteAsync(e.Message);
}
} catch (Exception e) { } catch (Exception e) {
Log.Error(e); Log.Error(e);
response.StatusCode = (int) HttpStatusCode.InternalServerError; response.StatusCode = (int) HttpStatusCode.InternalServerError;
} }
} }
protected abstract Task<IHttpOutput> Respond(HttpContext ctx); protected abstract Task Respond(HttpRequest request, HttpResponse response);
protected static async Task<JsonElement> ReadJson(HttpContext ctx) { protected static async Task<JsonElement> ReadJson(HttpRequest request) {
try { try {
return await ctx.Request.ReadFromJsonAsync(JsonElementContext.Default.JsonElement); return await request.ReadFromJsonAsync(JsonElementContext.Default.JsonElement);
} catch (JsonException) { } catch (JsonException) {
throw new HttpException(HttpStatusCode.UnsupportedMediaType, "This endpoint only accepts JSON."); throw new HttpException(HttpStatusCode.UnsupportedMediaType, "This endpoint only accepts JSON.");
} }

View File

@ -7,18 +7,13 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
sealed class GetDownloadedFileEndpoint : BaseEndpoint { sealed class GetDownloadedFileEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
public GetDownloadedFileEndpoint(IDatabaseFile db) : base(db) {} protected override async Task Respond(HttpRequest request, HttpResponse response) {
string url = WebUtility.UrlDecode((string) request.RouteValues["url"]!);
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
string url = WebUtility.UrlDecode((string) ctx.Request.RouteValues["url"]!);
string normalizedUrl = DiscordCdn.NormalizeUrl(url); string normalizedUrl = DiscordCdn.NormalizeUrl(url);
if (await Db.Downloads.GetSuccessfulDownloadWithData(normalizedUrl) is { Download: {} download, Data: {} data }) { if (!await Db.Downloads.GetSuccessfulDownloadWithData(normalizedUrl, (download, stream) => response.WriteStreamAsync(download.Type, download.Size, stream))) {
return new HttpOutput.File(download.Type, data); response.Redirect(url, permanent: false);
}
else {
return new HttpOutput.Redirect(url, permanent: false);
} }
} }
} }

View File

@ -1,5 +1,5 @@
using System.Net.Mime;
using System.Reflection; using System.Reflection;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Web; using System.Web;
using DHT.Server.Database; using DHT.Server.Database;
@ -10,25 +10,19 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
sealed class GetTrackingScriptEndpoint : BaseEndpoint { sealed class GetTrackingScriptEndpoint(IDatabaseFile db, ServerParameters parameters) : BaseEndpoint(db) {
private static ResourceLoader Resources { get; } = new (Assembly.GetExecutingAssembly()); private static ResourceLoader Resources { get; } = new (Assembly.GetExecutingAssembly());
private readonly ServerParameters serverParameters;
public GetTrackingScriptEndpoint(IDatabaseFile db, ServerParameters parameters) : base(db) {
serverParameters = parameters;
}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) { protected override async Task Respond(HttpRequest request, HttpResponse response) {
string bootstrap = await Resources.ReadTextAsync("Tracker/bootstrap.js"); string bootstrap = await Resources.ReadTextAsync("Tracker/bootstrap.js");
string script = bootstrap.Replace("= 0; /*[PORT]*/", "= " + serverParameters.Port + ";") string script = bootstrap.Replace("= 0; /*[PORT]*/", "= " + parameters.Port + ";")
.Replace("/*[TOKEN]*/", HttpUtility.JavaScriptStringEncode(serverParameters.Token)) .Replace("/*[TOKEN]*/", HttpUtility.JavaScriptStringEncode(parameters.Token))
.Replace("/*[IMPORTS]*/", await Resources.ReadJoinedAsync("Tracker/scripts/", '\n')) .Replace("/*[IMPORTS]*/", await Resources.ReadJoinedAsync("Tracker/scripts/", '\n'))
.Replace("/*[CSS-CONTROLLER]*/", await Resources.ReadTextAsync("Tracker/styles/controller.css")) .Replace("/*[CSS-CONTROLLER]*/", await Resources.ReadTextAsync("Tracker/styles/controller.css"))
.Replace("/*[CSS-SETTINGS]*/", await Resources.ReadTextAsync("Tracker/styles/settings.css")) .Replace("/*[CSS-SETTINGS]*/", await Resources.ReadTextAsync("Tracker/styles/settings.css"))
.Replace("/*[DEBUGGER]*/", ctx.Request.Query.ContainsKey("debug") ? "debugger;" : ""); .Replace("/*[DEBUGGER]*/", request.Query.ContainsKey("debug") ? "debugger;" : "");
ctx.Response.Headers.Append("X-DHT", "1"); response.Headers.Append("X-DHT", "1");
return new HttpOutput.File("text/javascript", Encoding.UTF8.GetBytes(script)); await response.WriteTextAsync(MediaTypeNames.Text.JavaScript, script);
} }
} }

View File

@ -8,18 +8,14 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
sealed class TrackChannelEndpoint : BaseEndpoint { sealed class TrackChannelEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
public TrackChannelEndpoint(IDatabaseFile db) : base(db) {} protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
var server = ReadServer(root.RequireObject("server"), "server"); var server = ReadServer(root.RequireObject("server"), "server");
var channel = ReadChannel(root.RequireObject("channel"), "channel", server.Id); var channel = ReadChannel(root.RequireObject("channel"), "channel", server.Id);
await Db.Servers.Add([server]); await Db.Servers.Add([server]);
await Db.Channels.Add([channel]); await Db.Channels.Add([channel]);
return HttpOutput.None;
} }
private static Data.Server ReadServer(JsonElement json, string path) => new () { private static Data.Server ReadServer(JsonElement json, string path) => new () {

View File

@ -15,14 +15,12 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
sealed class TrackMessagesEndpoint : BaseEndpoint { sealed class TrackMessagesEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
private const string HasNewMessages = "1"; private const string HasNewMessages = "1";
private const string NoNewMessages = "0"; private const string NoNewMessages = "0";
public TrackMessagesEndpoint(IDatabaseFile db) : base(db) {} protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
if (root.ValueKind != JsonValueKind.Array) { if (root.ValueKind != JsonValueKind.Array) {
throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array."); throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array.");
@ -43,7 +41,7 @@ sealed class TrackMessagesEndpoint : BaseEndpoint {
await Db.Messages.Add(messages); await Db.Messages.Add(messages);
return new HttpOutput.Text(anyNewMessages ? HasNewMessages : NoNewMessages); await response.WriteTextAsync(anyNewMessages ? HasNewMessages : NoNewMessages);
} }
private static Message ReadMessage(JsonElement json, string path) => new () { private static Message ReadMessage(JsonElement json, string path) => new () {

View File

@ -8,11 +8,9 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints; namespace DHT.Server.Endpoints;
sealed class TrackUsersEndpoint : BaseEndpoint { sealed class TrackUsersEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
public TrackUsersEndpoint(IDatabaseFile db) : base(db) {} protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
if (root.ValueKind != JsonValueKind.Array) { if (root.ValueKind != JsonValueKind.Array) {
throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array."); throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array.");
@ -26,8 +24,6 @@ sealed class TrackUsersEndpoint : BaseEndpoint {
} }
await Db.Users.Add(users); await Db.Users.Add(users);
return HttpOutput.None;
} }
private static User ReadUser(JsonElement json, string path) => new () { private static User ReadUser(JsonElement json, string path) => new () {

View File

@ -8,7 +8,7 @@ public static class LinqExtensions {
HashSet<TKey>? seenKeys = null; HashSet<TKey>? seenKeys = null;
foreach (var item in collection) { foreach (var item in collection) {
seenKeys ??= new HashSet<TKey>(); seenKeys ??= [];
if (seenKeys.Add(getKeyFromItem(item))) { if (seenKeys.Add(getKeyFromItem(item))) {
yield return item; yield return item;

View File

@ -0,0 +1,33 @@
using System.IO;
using System.Net.Mime;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public static class HttpExtensions {
public static Task WriteTextAsync(this HttpResponse response, string text) {
return WriteTextAsync(response, MediaTypeNames.Text.Plain, text);
}
public static async Task WriteTextAsync(this HttpResponse response, string contentType, string text) {
response.ContentType = contentType;
await response.StartAsync();
await response.WriteAsync(text, Encoding.UTF8);
}
public static async Task WriteFileAsync(this HttpResponse response, string? contentType, byte[] bytes) {
response.ContentType = contentType ?? string.Empty;
response.ContentLength = bytes.Length;
await response.StartAsync();
await response.Body.WriteAsync(bytes);
}
public static async Task WriteStreamAsync(this HttpResponse response, string? contentType, ulong? contentLength, Stream source) {
response.ContentType = contentType ?? string.Empty;
response.ContentLength = (long?) contentLength;
await response.StartAsync();
await source.CopyToAsync(response.Body);
}
}

View File

@ -1,35 +0,0 @@
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public static class HttpOutput {
public static IHttpOutput None { get; } = new NoneImpl();
private sealed class NoneImpl : IHttpOutput {
public Task WriteTo(HttpResponse response) {
return Task.CompletedTask;
}
}
public sealed class Text(string text) : IHttpOutput {
public Task WriteTo(HttpResponse response) {
return response.WriteAsync(text, Encoding.UTF8);
}
}
public sealed class File(string? contentType, byte[] bytes) : IHttpOutput {
public async Task WriteTo(HttpResponse response) {
response.ContentType = contentType ?? string.Empty;
await response.Body.WriteAsync(bytes);
}
}
public sealed class Redirect(string url, bool permanent) : IHttpOutput {
public Task WriteTo(HttpResponse response) {
response.Redirect(url, permanent);
return Task.CompletedTask;
}
}
}

View File

@ -1,8 +0,0 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public interface IHttpOutput {
Task WriteTo(HttpResponse response);
}

View File

@ -8,5 +8,5 @@ using DHT.Utils;
namespace DHT.Utils; namespace DHT.Utils;
static class Version { static class Version {
public const string Tag = "41.1.0.0"; public const string Tag = "41.2.0.0";
} }