1
0
mirror of https://github.com/chylex/Discord-History-Tracker.git synced 2025-09-14 00:32:08 +02:00

7 Commits

26 changed files with 537 additions and 271 deletions

View File

@@ -1,9 +1,11 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Avalonia.Controls;
using Avalonia.Platform.Storage;
using Avalonia.Threading;
using DHT.Desktop.Dialogs.File;
using DHT.Desktop.Dialogs.Message;
using DHT.Server.Database;
@@ -41,11 +43,16 @@ static class DatabaseGui {
});
}
public static async Task<IDatabaseFile?> TryOpenOrCreateDatabaseFromPath(string path, Window window, Func<Task<bool>> checkCanUpgradeDatabase) {
public static async Task<IDatabaseFile?> TryOpenOrCreateDatabaseFromPath(string path, Window window, ISchemaUpgradeCallbacks schemaUpgradeCallbacks) {
var prevSynchronizationContext = SynchronizationContext.Current;
SynchronizationContext.SetSynchronizationContext(new AvaloniaSynchronizationContext());
var taskScheduler = TaskScheduler.FromCurrentSynchronizationContext();
SynchronizationContext.SetSynchronizationContext(prevSynchronizationContext);
IDatabaseFile? file = null;
try {
file = await SqliteDatabaseFile.OpenOrCreate(path, checkCanUpgradeDatabase);
file = await SqliteDatabaseFile.OpenOrCreate(path, schemaUpgradeCallbacks, taskScheduler);
} catch (InvalidDatabaseVersionException ex) {
await Dialog.ShowOk(window, "Database Error", "Database '" + Path.GetFileName(path) + "' appears to be corrupted (invalid version: " + ex.Version + ").");
} catch (DatabaseTooNewException ex) {

View File

@@ -4,4 +4,5 @@ namespace DHT.Desktop.Dialogs.Progress;
interface IProgressCallback {
Task Update(string message, int finishedItems, int totalItems);
Task Hide();
}

View File

@@ -32,12 +32,18 @@
</Style>
</Window.Styles>
<StackPanel Margin="20">
<DockPanel>
<TextBlock DockPanel.Dock="Right" Text="{Binding Items}" Classes="items" />
<TextBlock DockPanel.Dock="Left" Text="{Binding Message}" />
</DockPanel>
<ProgressBar Value="{Binding Progress}" />
</StackPanel>
<ItemsRepeater ItemsSource="{Binding Items}" Margin="0 10">
<ItemsRepeater.ItemTemplate>
<DataTemplate>
<StackPanel Margin="20 10" IsHitTestVisible="{Binding IsVisible}" Opacity="{Binding Opacity}">
<DockPanel>
<TextBlock DockPanel.Dock="Right" Text="{Binding Items}" Classes="items" />
<TextBlock DockPanel.Dock="Left" Text="{Binding Message}" />
</DockPanel>
<ProgressBar Value="{Binding Progress}" />
</StackPanel>
</DataTemplate>
</ItemsRepeater.ItemTemplate>
</ItemsRepeater>
</Window>

View File

@@ -8,6 +8,7 @@ namespace DHT.Desktop.Dialogs.Progress;
[SuppressMessage("ReSharper", "MemberCanBeInternal")]
public sealed partial class ProgressDialog : Window {
private bool isFinished = false;
private Task progressTask = Task.CompletedTask;
public ProgressDialog() {
InitializeComponent();
@@ -15,7 +16,8 @@ public sealed partial class ProgressDialog : Window {
public void OnOpened(object? sender, EventArgs e) {
if (DataContext is ProgressDialogModel model) {
Task.Run(model.StartTask).ContinueWith(OnFinished, TaskScheduler.FromCurrentSynchronizationContext());
progressTask = Task.Run(model.StartTask);
progressTask.ContinueWith(OnFinished, TaskScheduler.FromCurrentSynchronizationContext());
}
}
@@ -27,4 +29,9 @@ public sealed partial class ProgressDialog : Window {
isFinished = true;
Close();
}
public async Task ShowProgressDialog(Window owner) {
await ShowDialog(owner);
await progressTask;
}
}

View File

@@ -1,4 +1,6 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Avalonia.Threading;
using DHT.Desktop.Common;
@@ -9,57 +11,43 @@ namespace DHT.Desktop.Dialogs.Progress;
sealed class ProgressDialogModel : BaseModel {
public string Title { get; init; } = "";
private string message = "";
public string Message {
get => message;
private set => Change(ref message, value);
}
private string items = "";
public string Items {
get => items;
private set => Change(ref items, value);
}
private int progress = 0;
public int Progress {
get => progress;
private set => Change(ref progress, value);
}
public IReadOnlyList<ProgressItem> Items { get; } = Array.Empty<ProgressItem>();
private readonly TaskRunner? task;
[Obsolete("Designer")]
public ProgressDialogModel() {}
public ProgressDialogModel(TaskRunner task) {
public ProgressDialogModel(TaskRunner task, int progressItems = 1) {
this.Items = Enumerable.Range(0, progressItems).Select(static _ => new ProgressItem()).ToArray();
this.task = task;
}
internal async Task StartTask() {
if (task != null) {
await task(new Callback(this));
await task(Items.Select(static item => new Callback(item)).ToArray());
}
}
public delegate Task TaskRunner(IProgressCallback callback);
public delegate Task TaskRunner(IReadOnlyList<IProgressCallback> callbacks);
private sealed class Callback : IProgressCallback {
private readonly ProgressDialogModel model;
private readonly ProgressItem item;
public Callback(ProgressDialogModel model) {
this.model = model;
public Callback(ProgressItem item) {
this.item = item;
}
async Task IProgressCallback.Update(string message, int finishedItems, int totalItems) {
public async Task Update(string message, int finishedItems, int totalItems) {
await Dispatcher.UIThread.InvokeAsync(() => {
model.Message = message;
model.Items = finishedItems.Format() + " / " + totalItems.Format();
model.Progress = 100 * finishedItems / totalItems;
item.Message = message;
item.Items = totalItems == 0 ? string.Empty : finishedItems.Format() + " / " + totalItems.Format();
item.Progress = totalItems == 0 ? 0 : 100 * finishedItems / totalItems;
});
}
public Task Hide() {
return Update(string.Empty, 0, 0);
}
}
}

View File

@@ -0,0 +1,41 @@
using DHT.Utils.Models;
namespace DHT.Desktop.Dialogs.Progress;
sealed class ProgressItem : BaseModel {
private bool isVisible = false;
public bool IsVisible {
get => isVisible;
private set {
Change(ref isVisible, value);
OnPropertyChanged(nameof(Opacity));
}
}
public double Opacity => IsVisible ? 1.0 : 0.0;
private string message = "";
public string Message {
get => message;
set {
Change(ref message, value);
IsVisible = !string.IsNullOrEmpty(value);
}
}
private string items = "";
public string Items {
get => items;
set => Change(ref items, value);
}
private int progress = 0;
public int Progress {
get => progress;
set => Change(ref progress, value);
}
}

View File

@@ -1,9 +1,9 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Runtime.InteropServices;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading.Tasks;
using DHT.Utils.Logging;
using static System.Environment.SpecialFolder;
@@ -47,12 +47,12 @@ static class DiscordAppSettings {
}
}
private static bool AreDevToolsEnabled(Dictionary<string, object?> json) {
return json.TryGetValue(JsonKeyDevTools, out var value) && value is JsonElement { ValueKind: JsonValueKind.True };
private static bool AreDevToolsEnabled(JsonObject json) {
return json.TryGetPropertyValue(JsonKeyDevTools, out var node) && node?.GetValueKind() == JsonValueKind.True;
}
public static async Task<SettingsJsonResult> ConfigureDevTools(bool enable) {
Dictionary<string, object?> json;
JsonObject json;
try {
json = await ReadSettingsJson();
@@ -109,13 +109,13 @@ static class DiscordAppSettings {
return SettingsJsonResult.Success;
}
private static async Task<Dictionary<string, object?>> ReadSettingsJson() {
private static async Task<JsonObject> ReadSettingsJson() {
await using var stream = new FileStream(JsonFilePath, FileMode.Open, FileAccess.Read, FileShare.Read);
return await JsonSerializer.DeserializeAsync<Dictionary<string, object?>?>(stream) ?? throw new JsonException();
return await JsonSerializer.DeserializeAsync(stream, DiscordAppSettingsJsonContext.Default.JsonObject) ?? throw new JsonException();
}
private static async Task WriteSettingsJson(Dictionary<string, object?> json) {
private static async Task WriteSettingsJson(JsonObject json) {
await using var stream = new FileStream(JsonFilePath, FileMode.Truncate, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, json, new JsonSerializerOptions { WriteIndented = true });
await JsonSerializer.SerializeAsync(stream, json, DiscordAppSettingsJsonContext.Default.JsonObject);
}
}

View File

@@ -0,0 +1,8 @@
using System.Text.Json.Nodes;
using System.Text.Json.Serialization;
namespace DHT.Desktop.Discord;
[JsonSourceGenerationOptions(GenerationMode = JsonSourceGenerationMode.Default, WriteIndented = true)]
[JsonSerializable(typeof(JsonObject))]
sealed partial class DiscordAppSettingsJsonContext : JsonSerializerContext {}

View File

@@ -17,6 +17,7 @@ using DHT.Desktop.Dialogs.TextBox;
using DHT.Server.Data;
using DHT.Server.Database;
using DHT.Server.Database.Import;
using DHT.Server.Database.Sqlite;
using DHT.Utils.Logging;
using DHT.Utils.Models;
@@ -77,31 +78,18 @@ sealed class DatabasePageModel : BaseModel {
}
ProgressDialog progressDialog = new ProgressDialog();
progressDialog.DataContext = new ProgressDialogModel(async callback => await MergeWithDatabaseFromPaths(Db, paths, progressDialog, callback)) {
progressDialog.DataContext = new ProgressDialogModel(async callbacks => await MergeWithDatabaseFromPaths(Db, paths, progressDialog, callbacks[0])) {
Title = "Database Merge"
};
await progressDialog.ShowDialog(window);
await progressDialog.ShowProgressDialog(window);
}
private static async Task MergeWithDatabaseFromPaths(IDatabaseFile target, string[] paths, ProgressDialog dialog, IProgressCallback callback) {
int total = paths.Length;
DialogResult.YesNo? upgradeResult = null;
async Task<bool> CheckCanUpgradeDatabase() {
upgradeResult ??= total > 1
? await DatabaseGui.ShowCanUpgradeMultipleDatabaseDialog(dialog)
: await DatabaseGui.ShowCanUpgradeDatabaseDialog(dialog);
return DialogResult.YesNo.Yes == upgradeResult;
}
var schemaUpgradeCallbacks = new SchemaUpgradeCallbacks(dialog, paths.Length);
await PerformImport(target, paths, dialog, callback, "Database Merge", "Database Error", "database file", async path => {
SynchronizationContext? prevSyncContext = SynchronizationContext.Current;
SynchronizationContext.SetSynchronizationContext(new AvaloniaSynchronizationContext());
IDatabaseFile? db = await DatabaseGui.TryOpenOrCreateDatabaseFromPath(path, dialog, CheckCanUpgradeDatabase);
SynchronizationContext.SetSynchronizationContext(prevSyncContext);
IDatabaseFile? db = await DatabaseGui.TryOpenOrCreateDatabaseFromPath(path, dialog, schemaUpgradeCallbacks);
if (db == null) {
return false;
@@ -116,6 +104,41 @@ sealed class DatabasePageModel : BaseModel {
});
}
private sealed class SchemaUpgradeCallbacks : ISchemaUpgradeCallbacks {
private readonly ProgressDialog dialog;
private readonly int total;
private bool? decision;
public SchemaUpgradeCallbacks(ProgressDialog dialog, int total) {
this.total = total;
this.dialog = dialog;
}
public async Task<bool> CanUpgrade() {
return decision ??= (total > 1
? await DatabaseGui.ShowCanUpgradeMultipleDatabaseDialog(dialog)
: await DatabaseGui.ShowCanUpgradeDatabaseDialog(dialog)) == DialogResult.YesNo.Yes;
}
public Task Start(int versionSteps, Func<ISchemaUpgradeCallbacks.IProgressReporter, Task> doUpgrade) {
return doUpgrade(new NullReporter());
}
private sealed class NullReporter : ISchemaUpgradeCallbacks.IProgressReporter {
public Task NextVersion() {
return Task.CompletedTask;
}
public Task MainWork(string message, int finishedItems, int totalItems) {
return Task.CompletedTask;
}
public Task SubWork(string message, int finishedItems, int totalItems) {
return Task.CompletedTask;
}
}
}
public async void ImportLegacyArchive() {
var paths = await window.StorageProvider.OpenFiles(new FilePickerOpenOptions {
Title = "Open Legacy DHT Archive",
@@ -128,11 +151,11 @@ sealed class DatabasePageModel : BaseModel {
}
ProgressDialog progressDialog = new ProgressDialog();
progressDialog.DataContext = new ProgressDialogModel(async callback => await ImportLegacyArchiveFromPaths(Db, paths, progressDialog, callback)) {
progressDialog.DataContext = new ProgressDialogModel(async callbacks => await ImportLegacyArchiveFromPaths(Db, paths, progressDialog, callbacks[0])) {
Title = "Legacy Archive Import"
};
await progressDialog.ShowDialog(window);
await progressDialog.ShowProgressDialog(window);
}
private static async Task ImportLegacyArchiveFromPaths(IDatabaseFile target, string[] paths, ProgressDialog dialog, IProgressCallback callback) {

View File

@@ -45,12 +45,12 @@ namespace DHT.Desktop.Main.Pages {
}
ProgressDialog progressDialog = new ProgressDialog {
DataContext = new ProgressDialogModel(async callback => await GenerateRandomData(channels, users, messages, callback)) {
DataContext = new ProgressDialogModel(async callbacks => await GenerateRandomData(channels, users, messages, callbacks[0])) {
Title = "Generating Random Data"
}
};
await progressDialog.ShowDialog(window);
await progressDialog.ShowProgressDialog(window);
}
private const int BatchSize = 500;

View File

@@ -1,10 +1,13 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Avalonia.Controls;
using DHT.Desktop.Common;
using DHT.Desktop.Dialogs.Message;
using DHT.Desktop.Dialogs.Progress;
using DHT.Server.Database;
using DHT.Server.Database.Sqlite;
using DHT.Utils.Models;
namespace DHT.Desktop.Main.Screens;
@@ -39,14 +42,71 @@ sealed class WelcomeScreenModel : BaseModel, IDisposable {
}
dbFilePath = path;
Db = await DatabaseGui.TryOpenOrCreateDatabaseFromPath(path, window, CheckCanUpgradeDatabase);
Db = await DatabaseGui.TryOpenOrCreateDatabaseFromPath(path, window, new SchemaUpgradeCallbacks(window));
OnPropertyChanged(nameof(Db));
OnPropertyChanged(nameof(HasDatabase));
}
private async Task<bool> CheckCanUpgradeDatabase() {
return DialogResult.YesNo.Yes == await DatabaseGui.ShowCanUpgradeDatabaseDialog(window);
private sealed class SchemaUpgradeCallbacks : ISchemaUpgradeCallbacks {
private readonly Window window;
public SchemaUpgradeCallbacks(Window window) {
this.window = window;
}
public async Task<bool> CanUpgrade() {
return DialogResult.YesNo.Yes == await DatabaseGui.ShowCanUpgradeDatabaseDialog(window);
}
public async Task Start(int versionSteps, Func<ISchemaUpgradeCallbacks.IProgressReporter, Task> doUpgrade) {
async Task StartUpgrade(IReadOnlyList<IProgressCallback> callbacks) {
var reporter = new ProgressReporter(versionSteps, callbacks);
await reporter.NextVersion();
await Task.Delay(TimeSpan.FromMilliseconds(800));
await doUpgrade(reporter);
await Task.Delay(TimeSpan.FromMilliseconds(600));
}
await new ProgressDialog {
DataContext = new ProgressDialogModel(StartUpgrade, progressItems: 3) {
Title = "Upgrading Database"
}
}.ShowProgressDialog(window);
}
private sealed class ProgressReporter : ISchemaUpgradeCallbacks.IProgressReporter {
private readonly IReadOnlyList<IProgressCallback> callbacks;
private readonly int versionSteps;
private int versionProgress = 0;
public ProgressReporter(int versionSteps, IReadOnlyList<IProgressCallback> callbacks) {
this.callbacks = callbacks;
this.versionSteps = versionSteps;
}
public async Task NextVersion() {
await callbacks[0].Update("Upgrading schema version...", versionProgress++, versionSteps);
await HideChildren(0);
}
public async Task MainWork(string message, int finishedItems, int totalItems) {
await callbacks[1].Update(message, finishedItems, totalItems);
await HideChildren(1);
}
public async Task SubWork(string message, int finishedItems, int totalItems) {
await callbacks[2].Update(message, finishedItems, totalItems);
await HideChildren(2);
}
private async Task HideChildren(int parentIndex) {
for (int i = parentIndex + 1; i < callbacks.Count; i++) {
await callbacks[i].Hide();
}
}
}
}
public void CloseDatabase() {

View File

@@ -19,9 +19,21 @@
</PropertyGroup>
<PropertyGroup>
<SuppressTrimAnalysisWarnings>false</SuppressTrimAnalysisWarnings>
<PublishTrimmed>true</PublishTrimmed>
<TrimMode>partial</TrimMode>
<JsonSerializerIsReflectionEnabledByDefault>true</JsonSerializerIsReflectionEnabledByDefault>
<EnableUnsafeBinaryFormatterSerialization>false</EnableUnsafeBinaryFormatterSerialization>
<EnableUnsafeUTF7Encoding>false</EnableUnsafeUTF7Encoding>
<EventSourceSupport>false</EventSourceSupport>
<HttpActivityPropagationSupport>false</HttpActivityPropagationSupport>
<JsonSerializerIsReflectionEnabledByDefault>false</JsonSerializerIsReflectionEnabledByDefault>
</PropertyGroup>
<PropertyGroup>
<PublishSingleFile>true</PublishSingleFile>
<PublishReadyToRun>false</PublishReadyToRun>
<EnableCompressionInSingleFile>true</EnableCompressionInSingleFile>
<IncludeNativeLibrariesForSelfExtract>true</IncludeNativeLibrariesForSelfExtract>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Release' ">

View File

@@ -0,0 +1,23 @@
using System.Text.Json.Serialization;
namespace DHT.Server.Database.Import;
sealed class DiscordEmbedLegacyJson {
public required string Url { get; init; }
public required string Type { get; init; }
public bool DhtLegacy { get; } = true;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Title { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Description { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ImageJson? Image { get; init; }
public sealed class ImageJson {
public required string Url { get; init; }
}
}

View File

@@ -0,0 +1,7 @@
using System.Text.Json.Serialization;
namespace DHT.Server.Database.Import;
[JsonSourceGenerationOptions(PropertyNamingPolicy = JsonKnownNamingPolicy.SnakeCaseLower, GenerationMode = JsonSourceGenerationMode.Default)]
[JsonSerializable(typeof(DiscordEmbedLegacyJson))]
sealed partial class DiscordEmbedLegacyJsonContext : JsonSerializerContext {}

View File

@@ -21,7 +21,7 @@ public static class LegacyArchiveImport {
public static async Task<bool> Read(Stream stream, IDatabaseFile db, FakeSnowflake fakeSnowflake, Func<Data.Server[], Task<Dictionary<Data.Server, ulong>?>> askForServerIds) {
var perf = Log.Start();
var root = await JsonSerializer.DeserializeAsync<JsonElement>(stream);
var root = await JsonSerializer.DeserializeAsync(stream, JsonElementContext.Default.JsonElement);
try {
var meta = root.RequireObject("meta");
@@ -212,30 +212,17 @@ public static class LegacyArchiveImport {
return embedsArray.Where(static embedObj => embedObj.HasKey("url")).Select(embedObj => {
string url = embedObj.RequireString("url", path);
string type = embedObj.RequireString("type", path);
var embedJson = new Dictionary<string, object> {
{ "url", url },
{ "type", type },
{ "dht_legacy", true },
var embed = new DiscordEmbedLegacyJson {
Url = url,
Type = type,
Title = type == "rich" && embedObj.HasKey("t") ? embedObj.RequireString("t", path) : null,
Description = type == "rich" && embedObj.HasKey("d") ? embedObj.RequireString("d", path) : null,
Image = type == "image" ? new DiscordEmbedLegacyJson.ImageJson { Url = url } : null
};
if (type == "image") {
embedJson["image"] = new Dictionary<string, string> {
{ "url", url }
};
}
else if (type == "rich") {
if (embedObj.HasKey("t")) {
embedJson["title"] = embedObj.RequireString("t", path);
}
if (embedObj.HasKey("d")) {
embedJson["description"] = embedObj.RequireString("d", path);
}
}
return new Embed {
Json = JsonSerializer.Serialize(embedJson)
Json = JsonSerializer.Serialize(embed, DiscordEmbedLegacyJsonContext.Default.DiscordEmbedLegacyJson)
};
});
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Threading.Tasks;
namespace DHT.Server.Database.Sqlite;
public interface ISchemaUpgradeCallbacks {
Task<bool> CanUpgrade();
Task Start(int versionSteps, Func<IProgressReporter, Task> doUpgrade);
public interface IProgressReporter {
Task NextVersion();
Task MainWork(string message, int finishedItems, int totalItems);
Task SubWork(string message, int finishedItems, int totalItems);
}
}

View File

@@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using DHT.Server.Database.Exceptions;
@@ -20,12 +19,8 @@ sealed class Schema {
this.conn = conn;
}
private void Execute(string sql) {
conn.Command(sql).ExecuteNonQuery();
}
public async Task<bool> Setup(Func<Task<bool>> checkCanUpgradeSchemas) {
Execute(@"CREATE TABLE IF NOT EXISTS metadata (key TEXT PRIMARY KEY, value TEXT)");
public async Task<bool> Setup(ISchemaUpgradeCallbacks callbacks) {
conn.Execute(@"CREATE TABLE IF NOT EXISTS metadata (key TEXT PRIMARY KEY, value TEXT)");
var dbVersionStr = conn.SelectScalar("SELECT value FROM metadata WHERE key = 'version'");
if (dbVersionStr == null) {
@@ -38,131 +33,133 @@ sealed class Schema {
throw new DatabaseTooNewException(dbVersion);
}
else if (dbVersion < Version) {
var proceed = await checkCanUpgradeSchemas();
var proceed = await callbacks.CanUpgrade();
if (!proceed) {
return false;
}
UpgradeSchemas(dbVersion);
await callbacks.Start(Version - dbVersion, async reporter => await UpgradeSchemas(dbVersion, reporter));
}
return true;
}
private void InitializeSchemas() {
Execute("""
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
avatar_url TEXT,
discriminator TEXT
)
""");
conn.Execute("""
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
avatar_url TEXT,
discriminator TEXT
)
""");
Execute("""
CREATE TABLE servers (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
type TEXT NOT NULL
)
""");
conn.Execute("""
CREATE TABLE servers (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
type TEXT NOT NULL
)
""");
Execute("""
CREATE TABLE channels (
id INTEGER PRIMARY KEY NOT NULL,
server INTEGER NOT NULL,
name TEXT NOT NULL,
parent_id INTEGER,
position INTEGER,
topic TEXT,
nsfw INTEGER
)
""");
conn.Execute("""
CREATE TABLE channels (
id INTEGER PRIMARY KEY NOT NULL,
server INTEGER NOT NULL,
name TEXT NOT NULL,
parent_id INTEGER,
position INTEGER,
topic TEXT,
nsfw INTEGER
)
""");
Execute("""
CREATE TABLE messages (
message_id INTEGER PRIMARY KEY NOT NULL,
sender_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
text TEXT NOT NULL,
timestamp INTEGER NOT NULL
)
""");
conn.Execute("""
CREATE TABLE messages (
message_id INTEGER PRIMARY KEY NOT NULL,
sender_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
text TEXT NOT NULL,
timestamp INTEGER NOT NULL
)
""");
Execute("""
CREATE TABLE attachments (
message_id INTEGER NOT NULL,
attachment_id INTEGER NOT NULL PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
type TEXT,
normalized_url TEXT NOT NULL,
download_url TEXT,
size INTEGER NOT NULL,
width INTEGER,
height INTEGER
)
""");
conn.Execute("""
CREATE TABLE attachments (
message_id INTEGER NOT NULL,
attachment_id INTEGER NOT NULL PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
type TEXT,
normalized_url TEXT NOT NULL,
download_url TEXT,
size INTEGER NOT NULL,
width INTEGER,
height INTEGER
)
""");
Execute("""
CREATE TABLE embeds (
message_id INTEGER NOT NULL,
json TEXT NOT NULL
)
""");
conn.Execute("""
CREATE TABLE embeds (
message_id INTEGER NOT NULL,
json TEXT NOT NULL
)
""");
Execute("""
CREATE TABLE downloads (
normalized_url TEXT NOT NULL PRIMARY KEY,
download_url TEXT,
status INTEGER NOT NULL,
size INTEGER NOT NULL,
blob BLOB
)
""");
conn.Execute("""
CREATE TABLE downloads (
normalized_url TEXT NOT NULL PRIMARY KEY,
download_url TEXT,
status INTEGER NOT NULL,
size INTEGER NOT NULL,
blob BLOB
)
""");
Execute("""
CREATE TABLE reactions (
message_id INTEGER NOT NULL,
emoji_id INTEGER,
emoji_name TEXT,
emoji_flags INTEGER NOT NULL,
count INTEGER NOT NULL
)
""");
conn.Execute("""
CREATE TABLE reactions (
message_id INTEGER NOT NULL,
emoji_id INTEGER,
emoji_name TEXT,
emoji_flags INTEGER NOT NULL,
count INTEGER NOT NULL
)
""");
CreateMessageEditTimestampTable();
CreateMessageRepliedToTable();
Execute("CREATE INDEX attachments_message_ix ON attachments(message_id)");
Execute("CREATE INDEX embeds_message_ix ON embeds(message_id)");
Execute("CREATE INDEX reactions_message_ix ON reactions(message_id)");
conn.Execute("CREATE INDEX attachments_message_ix ON attachments(message_id)");
conn.Execute("CREATE INDEX embeds_message_ix ON embeds(message_id)");
conn.Execute("CREATE INDEX reactions_message_ix ON reactions(message_id)");
Execute("INSERT INTO metadata (key, value) VALUES ('version', " + Version + ")");
conn.Execute("INSERT INTO metadata (key, value) VALUES ('version', " + Version + ")");
}
private void CreateMessageEditTimestampTable() {
Execute("""
CREATE TABLE edit_timestamps (
message_id INTEGER PRIMARY KEY NOT NULL,
edit_timestamp INTEGER NOT NULL
)
""");
conn.Execute("""
CREATE TABLE edit_timestamps (
message_id INTEGER PRIMARY KEY NOT NULL,
edit_timestamp INTEGER NOT NULL
)
""");
}
private void CreateMessageRepliedToTable() {
Execute("""
CREATE TABLE replied_to (
message_id INTEGER PRIMARY KEY NOT NULL,
replied_to_id INTEGER NOT NULL
)
""");
conn.Execute("""
CREATE TABLE replied_to (
message_id INTEGER PRIMARY KEY NOT NULL,
replied_to_id INTEGER NOT NULL
)
""");
}
private void NormalizeAttachmentUrls() {
private async Task NormalizeAttachmentUrls(ISchemaUpgradeCallbacks.IProgressReporter reporter) {
await reporter.SubWork("Preparing attachments...", 0, 0);
var normalizedUrls = new Dictionary<long, string>();
using (var selectCmd = conn.Command("SELECT attachment_id, url FROM attachments")) {
using var reader = selectCmd.ExecuteReader();
await using (var selectCmd = conn.Command("SELECT attachment_id, url FROM attachments")) {
await using var reader = await selectCmd.ExecuteReaderAsync();
while (reader.Read()) {
var attachmentId = reader.GetInt64(0);
@@ -171,28 +168,39 @@ sealed class Schema {
}
}
using var tx = conn.BeginTransaction();
using (var updateCmd = conn.Command("UPDATE attachments SET download_url = url, url = :normalized_url WHERE attachment_id = :attachment_id")) {
await using var tx = conn.BeginTransaction();
int totalUrls = normalizedUrls.Count;
int processedUrls = -1;
await using (var updateCmd = conn.Command("UPDATE attachments SET download_url = url, url = :normalized_url WHERE attachment_id = :attachment_id")) {
updateCmd.Parameters.Add(":attachment_id", SqliteType.Integer);
updateCmd.Parameters.Add(":normalized_url", SqliteType.Text);
foreach (var (attachmentId, normalizedUrl) in normalizedUrls) {
if (++processedUrls % 1000 == 0) {
await reporter.SubWork("Updating URLs...", processedUrls, totalUrls);
}
updateCmd.Set(":attachment_id", attachmentId);
updateCmd.Set(":normalized_url", normalizedUrl);
updateCmd.ExecuteNonQuery();
}
}
tx.Commit();
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync();
}
private void NormalizeDownloadUrls() {
private async Task NormalizeDownloadUrls(ISchemaUpgradeCallbacks.IProgressReporter reporter) {
await reporter.SubWork("Preparing downloads...", 0, 0);
var normalizedUrlsToOriginalUrls = new Dictionary<string, string>();
var duplicateUrlsToDelete = new HashSet<string>();
using (var selectCmd = conn.Command("SELECT url FROM downloads ORDER BY CASE WHEN status = 200 THEN 0 ELSE 1 END")) {
using var reader = selectCmd.ExecuteReader();
await using (var selectCmd = conn.Command("SELECT url FROM downloads ORDER BY CASE WHEN status = 200 THEN 0 ELSE 1 END")) {
await using var reader = await selectCmd.ExecuteReaderAsync();
while (reader.Read()) {
var originalUrl = reader.GetString(0);
@@ -204,96 +212,144 @@ sealed class Schema {
}
}
using var tx = conn.BeginTransaction();
conn.Execute("PRAGMA cache_size = -20000");
SqliteTransaction tx;
using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
deleteCmd.Set(":url", duplicateUrl);
deleteCmd.ExecuteNonQuery();
await using (tx = conn.BeginTransaction()) {
await reporter.SubWork("Deleting duplicates...", 0, 0);
await using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
deleteCmd.Set(":url", duplicateUrl);
deleteCmd.ExecuteNonQuery();
}
}
}
using (var updateCmd = conn.Command("UPDATE downloads SET download_url = :download_url, url = :normalized_url WHERE url = :download_url")) {
await tx.CommitAsync();
}
int totalUrls = normalizedUrlsToOriginalUrls.Count;
int processedUrls = -1;
tx = conn.BeginTransaction();
await using (var updateCmd = conn.Command("UPDATE downloads SET download_url = :download_url, url = :normalized_url WHERE url = :download_url")) {
updateCmd.Parameters.Add(":normalized_url", SqliteType.Text);
updateCmd.Parameters.Add(":download_url", SqliteType.Text);
foreach (var (normalizedUrl, downloadUrl) in normalizedUrlsToOriginalUrls) {
if (++processedUrls % 100 == 0) {
await reporter.SubWork("Updating URLs...", processedUrls, totalUrls);
// Not proper way of dealing with transactions, but it avoids a long commit at the end.
// Schema upgrades are already non-atomic anyways, so this doesn't make it worse.
await tx.CommitAsync();
await tx.DisposeAsync();
tx = conn.BeginTransaction();
updateCmd.Transaction = tx;
}
updateCmd.Set(":normalized_url", normalizedUrl);
updateCmd.Set(":download_url", downloadUrl);
updateCmd.ExecuteNonQuery();
}
}
tx.Commit();
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync();
await tx.DisposeAsync();
conn.Execute("PRAGMA cache_size = -2000");
}
private void UpgradeSchemas(int dbVersion) {
private async Task UpgradeSchemas(int dbVersion, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
var perf = Log.Start("from version " + dbVersion);
Execute("UPDATE metadata SET value = " + Version + " WHERE key = 'version'");
conn.Execute("UPDATE metadata SET value = " + Version + " WHERE key = 'version'");
if (dbVersion <= 1) {
Execute("ALTER TABLE channels ADD parent_id INTEGER");
await reporter.MainWork("Applying schema changes...", 0, 1);
conn.Execute("ALTER TABLE channels ADD parent_id INTEGER");
perf.Step("Upgrade to version 2");
await reporter.NextVersion();
}
if (dbVersion <= 2) {
await reporter.MainWork("Applying schema changes...", 0, 1);
CreateMessageEditTimestampTable();
CreateMessageRepliedToTable();
Execute("""
INSERT INTO edit_timestamps (message_id, edit_timestamp)
SELECT message_id, edit_timestamp
FROM messages
WHERE edit_timestamp IS NOT NULL
""");
conn.Execute("""
INSERT INTO edit_timestamps (message_id, edit_timestamp)
SELECT message_id, edit_timestamp
FROM messages
WHERE edit_timestamp IS NOT NULL
""");
Execute("""
INSERT INTO replied_to (message_id, replied_to_id)
SELECT message_id, replied_to_id
FROM messages
WHERE replied_to_id IS NOT NULL
""");
conn.Execute("""
INSERT INTO replied_to (message_id, replied_to_id)
SELECT message_id, replied_to_id
FROM messages
WHERE replied_to_id IS NOT NULL
""");
Execute("ALTER TABLE messages DROP COLUMN replied_to_id");
Execute("ALTER TABLE messages DROP COLUMN edit_timestamp");
conn.Execute("ALTER TABLE messages DROP COLUMN replied_to_id");
conn.Execute("ALTER TABLE messages DROP COLUMN edit_timestamp");
perf.Step("Upgrade to version 3");
Execute("VACUUM");
await reporter.MainWork("Vacuuming the database...", 1, 1);
conn.Execute("VACUUM");
perf.Step("Vacuum");
await reporter.NextVersion();
}
if (dbVersion <= 3) {
Execute("""
CREATE TABLE downloads (
url TEXT NOT NULL PRIMARY KEY,
status INTEGER NOT NULL,
size INTEGER NOT NULL,
blob BLOB
)
""");
conn.Execute("""
CREATE TABLE downloads (
url TEXT NOT NULL PRIMARY KEY,
status INTEGER NOT NULL,
size INTEGER NOT NULL,
blob BLOB
)
""");
perf.Step("Upgrade to version 4");
await reporter.NextVersion();
}
if (dbVersion <= 4) {
Execute("ALTER TABLE attachments ADD width INTEGER");
Execute("ALTER TABLE attachments ADD height INTEGER");
await reporter.MainWork("Applying schema changes...", 0, 1);
conn.Execute("ALTER TABLE attachments ADD width INTEGER");
conn.Execute("ALTER TABLE attachments ADD height INTEGER");
perf.Step("Upgrade to version 5");
await reporter.NextVersion();
}
if (dbVersion <= 5) {
Execute("ALTER TABLE attachments ADD download_url TEXT");
Execute("ALTER TABLE downloads ADD download_url TEXT");
await reporter.MainWork("Applying schema changes...", 0, 3);
conn.Execute("ALTER TABLE attachments ADD download_url TEXT");
conn.Execute("ALTER TABLE downloads ADD download_url TEXT");
NormalizeAttachmentUrls();
NormalizeDownloadUrls();
await reporter.MainWork("Updating attachments...", 1, 3);
await NormalizeAttachmentUrls(reporter);
Execute("ALTER TABLE attachments RENAME COLUMN url TO normalized_url");
Execute("ALTER TABLE downloads RENAME COLUMN url TO normalized_url");
await reporter.MainWork("Updating downloads...", 2, 3);
await NormalizeDownloadUrls(reporter);
await reporter.MainWork("Applying schema changes...", 3, 3);
conn.Execute("ALTER TABLE attachments RENAME COLUMN url TO normalized_url");
conn.Execute("ALTER TABLE downloads RENAME COLUMN url TO normalized_url");
perf.Step("Upgrade to version 6");
await reporter.NextVersion();
}
perf.End();

View File

@@ -18,7 +18,7 @@ namespace DHT.Server.Database.Sqlite;
public sealed class SqliteDatabaseFile : IDatabaseFile {
private const int DefaultPoolSize = 5;
public static async Task<SqliteDatabaseFile?> OpenOrCreate(string path, Func<Task<bool>> checkCanUpgradeSchemas) {
public static async Task<SqliteDatabaseFile?> OpenOrCreate(string path, ISchemaUpgradeCallbacks schemaUpgradeCallbacks, TaskScheduler computeTaskResultScheduler) {
var connectionString = new SqliteConnectionStringBuilder {
DataSource = path,
Mode = SqliteOpenMode.ReadWriteCreate,
@@ -27,12 +27,16 @@ public sealed class SqliteDatabaseFile : IDatabaseFile {
var pool = new SqliteConnectionPool(connectionString, DefaultPoolSize);
bool wasOpened;
using (var conn = pool.Take()) {
wasOpened = await new Schema(conn).Setup(checkCanUpgradeSchemas);
try {
using var conn = pool.Take();
wasOpened = await new Schema(conn).Setup(schemaUpgradeCallbacks);
} catch (Exception) {
pool.Dispose();
throw;
}
if (wasOpened) {
return new SqliteDatabaseFile(path, pool);
return new SqliteDatabaseFile(path, pool, computeTaskResultScheduler);
}
else {
pool.Dispose();
@@ -49,13 +53,13 @@ public sealed class SqliteDatabaseFile : IDatabaseFile {
private readonly AsyncValueComputer<long>.Single totalAttachmentsComputer;
private readonly AsyncValueComputer<long>.Single totalDownloadsComputer;
private SqliteDatabaseFile(string path, SqliteConnectionPool pool) {
private SqliteDatabaseFile(string path, SqliteConnectionPool pool, TaskScheduler computeTaskResultScheduler) {
this.log = Log.ForType(typeof(SqliteDatabaseFile), System.IO.Path.GetFileName(path));
this.pool = pool;
this.totalMessagesComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateMessageStatistics).WithOutdatedResults().BuildWithComputer(ComputeMessageStatistics);
this.totalAttachmentsComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateAttachmentStatistics).WithOutdatedResults().BuildWithComputer(ComputeAttachmentStatistics);
this.totalDownloadsComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateDownloadStatistics).WithOutdatedResults().BuildWithComputer(ComputeDownloadStatistics);
this.totalMessagesComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateMessageStatistics, computeTaskResultScheduler).WithOutdatedResults().BuildWithComputer(ComputeMessageStatistics);
this.totalAttachmentsComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateAttachmentStatistics, computeTaskResultScheduler).WithOutdatedResults().BuildWithComputer(ComputeAttachmentStatistics);
this.totalDownloadsComputer = AsyncValueComputer<long>.WithResultProcessor(UpdateDownloadStatistics, computeTaskResultScheduler).WithOutdatedResults().BuildWithComputer(ComputeDownloadStatistics);
this.Path = path;
this.Statistics = new DatabaseStatistics();

View File

@@ -5,14 +5,19 @@ using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Utils;
static class SqliteExtensions {
public static SqliteTransaction BeginTransaction(this ISqliteConnection conn) {
return conn.InnerConnection.BeginTransaction();
}
public static SqliteCommand Command(this ISqliteConnection conn, string sql) {
var cmd = conn.InnerConnection.CreateCommand();
cmd.CommandText = sql;
return cmd;
}
public static SqliteTransaction BeginTransaction(this ISqliteConnection conn) {
return conn.InnerConnection.BeginTransaction();
public static void Execute(this ISqliteConnection conn, string sql) {
using var cmd = conn.Command(sql);
cmd.ExecuteNonQuery();
}
public static object? SelectScalar(this ISqliteConnection conn, string sql) {

View File

@@ -60,6 +60,10 @@ abstract class BaseEndpoint {
protected abstract Task<IHttpOutput> Respond(HttpContext ctx);
protected static async Task<JsonElement> ReadJson(HttpContext ctx) {
return await ctx.Request.ReadFromJsonAsync<JsonElement?>() ?? throw new HttpException(HttpStatusCode.UnsupportedMediaType, "This endpoint only accepts JSON.");
try {
return await ctx.Request.ReadFromJsonAsync(JsonElementContext.Default.JsonElement);
} catch (JsonException) {
throw new HttpException(HttpStatusCode.UnsupportedMediaType, "This endpoint only accepts JSON.");
}
}
}

View File

@@ -17,6 +17,9 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class TrackMessagesEndpoint : BaseEndpoint {
private const string HasNewMessages = "1";
private const string NoNewMessages = "0";
public TrackMessagesEndpoint(IDatabaseFile db, ServerParameters parameters) : base(db, parameters) {}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
@@ -41,7 +44,7 @@ sealed class TrackMessagesEndpoint : BaseEndpoint {
Db.AddMessages(messages);
return new HttpOutput.Json(anyNewMessages ? 1 : 0);
return new HttpOutput.Text(anyNewMessages ? HasNewMessages : NoNewMessages);
}
private static Message ReadMessage(JsonElement json, string path) => new() {

View File

@@ -1,3 +1,4 @@
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
@@ -12,15 +13,15 @@ public static class HttpOutput {
}
}
public sealed class Json : IHttpOutput {
private readonly object? obj;
public sealed class Text : IHttpOutput {
private readonly string text;
public Json(object? obj) {
this.obj = obj;
public Text(string text) {
this.text = text;
}
public Task WriteTo(HttpResponse response) {
return response.WriteAsJsonAsync(obj);
return response.WriteAsync(text, Encoding.UTF8);
}
}

View File

@@ -0,0 +1,8 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace DHT.Utils.Http;
[JsonSourceGenerationOptions(PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase, GenerationMode = JsonSourceGenerationMode.Default)]
[JsonSerializable(typeof(JsonElement))]
public sealed partial class JsonElementContext : JsonSerializerContext {}

View File

@@ -8,5 +8,5 @@ using DHT.Utils;
namespace DHT.Utils;
static class Version {
public const string Tag = "39.0.0.0";
public const string Tag = "39.1.0.0";
}

View File

@@ -4,11 +4,11 @@ set list=win-x64 linux-x64 osx-x64
rmdir /S /Q bin
(for %%a in (%list%) do (
dotnet publish Desktop -c Release -r %%a -o ./bin/%%a -p:PublishSingleFile=true -p:IncludeNativeLibrariesForSelfExtract=true -p:PublishReadyToRun=false --self-contained true
dotnet publish Desktop -c Release -r %%a -o ./bin/%%a --self-contained true
powershell "Compress-Archive -Path ./bin/%%a/* -DestinationPath ./bin/%%a.zip -CompressionLevel Optimal"
))
dotnet publish Desktop -c Release -o ./bin/portable -p:PublishTrimmed=false --self-contained false
dotnet publish Desktop -c Release -o ./bin/portable -p:PublishSingleFile=false -p:PublishTrimmed=false --self-contained false
powershell "Compress-Archive -Path ./bin/portable/* -DestinationPath ./bin/portable.zip -CompressionLevel Optimal"
echo Done

View File

@@ -17,9 +17,9 @@ rm -rf "./bin"
configurations=(win-x64 linux-x64 osx-x64)
for cfg in ${configurations[@]}; do
dotnet publish Desktop -c Release -r "$cfg" -o "./bin/$cfg" -p:PublishSingleFile=true -p:IncludeNativeLibrariesForSelfExtract=true -p:PublishReadyToRun=false --self-contained true
dotnet publish Desktop -c Release -r "$cfg" -o "./bin/$cfg" --self-contained true
makezip "$cfg"
done
dotnet publish Desktop -c Release -o "./bin/portable" -p:PublishTrimmed=false --self-contained false
dotnet publish Desktop -c Release -o "./bin/portable" -p:PublishSingleFile=false -p:PublishTrimmed=false --self-contained false
makezip "portable"