1
0
mirror of https://github.com/chylex/Discord-History-Tracker.git synced 2024-11-25 05:42:45 +01:00

Compare commits

..

14 Commits

44 changed files with 577 additions and 507 deletions

View File

@ -13,10 +13,10 @@
<CssCodeStyleSettings>
<option name="HEX_COLOR_LOWER_CASE" value="true" />
</CssCodeStyleSettings>
<DB2CodeStyleSettings version="6">
<DB2CodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</DB2CodeStyleSettings>
<DerbyCodeStyleSettings version="6">
<DerbyCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</DerbyCodeStyleSettings>
<GoCodeStyleSettings>
@ -26,10 +26,10 @@
<option name="WRAP_FUNC_PARAMS" value="5" />
<option name="WRAP_FUNC_RESULT" value="5" />
</GoCodeStyleSettings>
<H2CodeStyleSettings version="6">
<H2CodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</H2CodeStyleSettings>
<HSQLCodeStyleSettings version="6">
<HSQLCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</HSQLCodeStyleSettings>
<HTMLCodeStyleSettings>
@ -85,10 +85,10 @@
<LessCodeStyleSettings>
<option name="HEX_COLOR_LOWER_CASE" value="true" />
</LessCodeStyleSettings>
<MSSQLCodeStyleSettings version="6">
<MSSQLCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</MSSQLCodeStyleSettings>
<MySQLCodeStyleSettings version="6">
<MySQLCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</MySQLCodeStyleSettings>
<Objective-C>
@ -98,7 +98,7 @@
<option name="SPACE_BEFORE_INIT_LIST" value="true" />
<option name="SPACE_AFTER_DICTIONARY_LITERAL_COLON" value="false" />
</Objective-C>
<OracleCodeStyleSettings version="6">
<OracleCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</OracleCodeStyleSettings>
<PHPCodeStyleSettings>
@ -116,7 +116,7 @@
<option name="NEW_LINE_AFTER_PHP_OPENING_TAG" value="true" />
<option name="SPACE_AROUND_ASSIGNMENT_IN_DECLARE" value="true" />
</PHPCodeStyleSettings>
<PostgresCodeStyleSettings version="6">
<PostgresCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</PostgresCodeStyleSettings>
<Properties>
@ -143,7 +143,7 @@
<option name="INDENT_WHEN_CASES" value="true" />
<option name="CHAIN_CALLS_ALIGNMENT" value="2" />
</Ruby>
<SQLiteCodeStyleSettings version="6">
<SQLiteCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</SQLiteCodeStyleSettings>
<ScssCodeStyleSettings>
@ -152,7 +152,7 @@
<Shell_Script>
<option name="KEEP_COLUMN_ALIGNMENT_PADDING" value="true" />
</Shell_Script>
<SqlCodeStyleSettings version="6">
<SqlCodeStyleSettings version="7">
<option name="KEYWORD_CASE" value="2" />
<option name="TYPE_CASE" value="2" />
<option name="CUSTOM_TYPE_CASE" value="2" />
@ -179,7 +179,7 @@
<option name="IMP_LOOP_LOOP_INDENT" value="false" />
<option name="EXPR_CASE_WHEN_WRAP" value="false" />
</SqlCodeStyleSettings>
<SybaseCodeStyleSettings version="6">
<SybaseCodeStyleSettings version="7">
<option name="USE_GENERIC_STYLE" value="true" />
</SybaseCodeStyleSettings>
<TypeScriptCodeStyleSettings version="0">

View File

@ -15,6 +15,7 @@ sealed class Arguments {
public string? DatabaseFile { get; }
public ushort? ServerPort { get; }
public string? ServerToken { get; }
public byte? ConcurrentDownloads { get; }
public Arguments(IReadOnlyList<string> args) {
for (int i = FirstArgument; i < args.Count; i++) {
@ -50,11 +51,11 @@ sealed class Arguments {
continue;
case "-port": {
if (ushort.TryParse(value, out var port)) {
ServerPort = port;
if (!ushort.TryParse(value, out var port)) {
Log.Warn("Invalid port number: " + value);
}
else {
Log.Warn("Invalid port number: " + value);
ServerPort = port;
}
continue;
@ -64,6 +65,20 @@ sealed class Arguments {
ServerToken = value;
continue;
case "-concurrentdownloads":
if (!ulong.TryParse(value, out var concurrentDownloads) || concurrentDownloads == 0) {
Log.Warn("Invalid concurrent downloads count: " + value);
}
else if (concurrentDownloads > 10) {
Log.Warn("Limiting concurrent downloads to 10");
ConcurrentDownloads = 10;
}
else {
ConcurrentDownloads = (byte) concurrentDownloads;
}
continue;
default:
Log.Warn("Unknown command line argument: " + key);
break;

View File

@ -30,6 +30,7 @@ sealed partial class MainWindowModel : ObservableObject, IAsyncDisposable {
private MainContentScreenModel? mainContentScreenModel;
private readonly Window window;
private readonly int? concurrentDownloads;
private State? state;
@ -73,6 +74,8 @@ sealed partial class MainWindowModel : ObservableObject, IAsyncDisposable {
if (args.ServerToken != null) {
ServerConfiguration.Token = args.ServerToken;
}
concurrentDownloads = args.ConcurrentDownloads;
}
private async void OnDatabaseSelected(object? sender, IDatabaseFile db) {
@ -80,7 +83,7 @@ sealed partial class MainWindowModel : ObservableObject, IAsyncDisposable {
await DisposeState();
state = new State(db);
state = new State(db, concurrentDownloads);
try {
await state.Server.Start(ServerConfiguration.Port, ServerConfiguration.Token);

View File

@ -10,164 +10,164 @@ using DHT.Server;
using DHT.Server.Data;
using DHT.Server.Service;
namespace DHT.Desktop.Main.Pages {
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
namespace DHT.Desktop.Main.Pages;
private readonly Window window;
private readonly State state;
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
[Obsolete("Designer")]
public DebugPageModel() : this(null!, State.Dummy) {}
private readonly Window window;
private readonly State state;
public DebugPageModel(Window window, State state) {
this.window = window;
this.state = state;
[Obsolete("Designer")]
public DebugPageModel() : this(null!, State.Dummy) {}
public DebugPageModel(Window window, State state) {
this.window = window;
this.state = state;
}
public async void OnClickAddRandomDataToDatabase() {
if (!int.TryParse(GenerateChannels, out int channels) || channels < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of channels must be at least 1!");
return;
}
public async void OnClickAddRandomDataToDatabase() {
if (!int.TryParse(GenerateChannels, out int channels) || channels < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of channels must be at least 1!");
return;
}
if (!int.TryParse(GenerateUsers, out int users) || users < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of users must be at least 1!");
return;
}
if (!int.TryParse(GenerateMessages, out int messages) || messages < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of messages must be at least 1!");
return;
}
await ProgressDialog.Show(window, "Generating Random Data", async (_, callback) => await GenerateRandomData(channels, users, messages, callback));
if (!int.TryParse(GenerateUsers, out int users) || users < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of users must be at least 1!");
return;
}
private const int BatchSize = 500;
if (!int.TryParse(GenerateMessages, out int messages) || messages < 1) {
await Dialog.ShowOk(window, "Generate Random Data", "Amount of messages must be at least 1!");
return;
}
private async Task GenerateRandomData(int channelCount, int userCount, int messageCount, IProgressCallback callback) {
int batchCount = (messageCount + BatchSize - 1) / BatchSize;
await callback.Update("Adding messages in batches of " + BatchSize, 0, batchCount);
await ProgressDialog.Show(window, "Generating Random Data", async (_, callback) => await GenerateRandomData(channels, users, messages, callback));
}
var rand = new Random();
var server = new DHT.Server.Data.Server {
Id = RandomId(rand),
Name = RandomName("s"),
Type = ServerType.Server,
};
private const int BatchSize = 500;
var channels = Enumerable.Range(0, channelCount).Select(i => new Channel {
Id = RandomId(rand),
Server = server.Id,
Name = RandomName("c"),
ParentId = null,
Position = i,
Topic = RandomText(rand, 10),
Nsfw = rand.Next(4) == 0,
private async Task GenerateRandomData(int channelCount, int userCount, int messageCount, IProgressCallback callback) {
int batchCount = (messageCount + BatchSize - 1) / BatchSize;
await callback.Update("Adding messages in batches of " + BatchSize, 0, batchCount);
var rand = new Random();
var server = new DHT.Server.Data.Server {
Id = RandomId(rand),
Name = RandomName("s"),
Type = ServerType.Server,
};
var channels = Enumerable.Range(0, channelCount).Select(i => new Channel {
Id = RandomId(rand),
Server = server.Id,
Name = RandomName("c"),
ParentId = null,
Position = i,
Topic = RandomText(rand, 10),
Nsfw = rand.Next(4) == 0,
}).ToArray();
var users = Enumerable.Range(0, userCount).Select(_ => new User {
Id = RandomId(rand),
Name = RandomName("u"),
AvatarUrl = null,
Discriminator = rand.Next(0, 9999).ToString(),
}).ToArray();
await state.Db.Users.Add(users);
await state.Db.Servers.Add([server]);
await state.Db.Channels.Add(channels);
var now = DateTimeOffset.Now;
int batchIndex = 0;
while (messageCount > 0) {
int hourOffset = batchIndex;
var messages = Enumerable.Range(0, Math.Min(messageCount, BatchSize)).Select(i => {
DateTimeOffset time = now.AddHours(hourOffset).AddMinutes(i * 60.0 / BatchSize);
DateTimeOffset? edit = rand.Next(100) == 0 ? time.AddSeconds(rand.Next(1, 60)) : null;
var timeMillis = time.ToUnixTimeMilliseconds();
var editMillis = edit?.ToUnixTimeMilliseconds();
return new Message {
Id = (ulong) timeMillis,
Sender = RandomBiasedIndex(rand, users).Id,
Channel = RandomBiasedIndex(rand, channels).Id,
Text = RandomText(rand, 100),
Timestamp = timeMillis,
EditTimestamp = editMillis,
RepliedToId = null,
Attachments = ImmutableList<Attachment>.Empty,
Embeds = ImmutableList<Embed>.Empty,
Reactions = ImmutableList<Reaction>.Empty,
};
}).ToArray();
var users = Enumerable.Range(0, userCount).Select(_ => new User {
Id = RandomId(rand),
Name = RandomName("u"),
AvatarUrl = null,
Discriminator = rand.Next(0, 9999).ToString(),
}).ToArray();
await state.Db.Messages.Add(messages);
await state.Db.Users.Add(users);
await state.Db.Servers.Add([server]);
await state.Db.Channels.Add(channels);
var now = DateTimeOffset.Now;
int batchIndex = 0;
while (messageCount > 0) {
int hourOffset = batchIndex;
var messages = Enumerable.Range(0, Math.Min(messageCount, BatchSize)).Select(i => {
DateTimeOffset time = now.AddHours(hourOffset).AddMinutes(i * 60.0 / BatchSize);
DateTimeOffset? edit = rand.Next(100) == 0 ? time.AddSeconds(rand.Next(1, 60)) : null;
var timeMillis = time.ToUnixTimeMilliseconds();
var editMillis = edit?.ToUnixTimeMilliseconds();
return new Message {
Id = (ulong) timeMillis,
Sender = RandomBiasedIndex(rand, users).Id,
Channel = RandomBiasedIndex(rand, channels).Id,
Text = RandomText(rand, 100),
Timestamp = timeMillis,
EditTimestamp = editMillis,
RepliedToId = null,
Attachments = ImmutableList<Attachment>.Empty,
Embeds = ImmutableList<Embed>.Empty,
Reactions = ImmutableList<Reaction>.Empty,
};
}).ToArray();
await state.Db.Messages.Add(messages);
messageCount -= BatchSize;
await callback.Update("Adding messages in batches of " + BatchSize, ++batchIndex, batchCount);
}
messageCount -= BatchSize;
await callback.Update("Adding messages in batches of " + BatchSize, ++batchIndex, batchCount);
}
}
private static ulong RandomId(Random rand) {
ulong h = unchecked((ulong) rand.Next());
ulong l = unchecked((ulong) rand.Next());
return (h << 32) | l;
}
private static ulong RandomId(Random rand) {
ulong h = unchecked((ulong) rand.Next());
ulong l = unchecked((ulong) rand.Next());
return (h << 32) | l;
}
private static string RandomName(string prefix) {
return prefix + "-" + ServerUtils.GenerateRandomToken(5);
}
private static string RandomName(string prefix) {
return prefix + "-" + ServerUtils.GenerateRandomToken(5);
}
private static T RandomBiasedIndex<T>(Random rand, T[] options) {
return options[(int) Math.Floor(options.Length * rand.NextDouble() * rand.NextDouble())];
}
private static T RandomBiasedIndex<T>(Random rand, T[] options) {
return options[(int) Math.Floor(options.Length * rand.NextDouble() * rand.NextDouble())];
}
private static readonly string[] RandomWords = [
"apple", "apricot", "artichoke", "arugula", "asparagus", "avocado",
"banana", "bean", "beechnut", "beet", "blackberry", "blackcurrant", "blueberry", "boysenberry", "bramble", "broccoli",
"cabbage", "cacao", "cantaloupe", "caper", "carambola", "carrot", "cauliflower", "celery", "chard", "cherry", "chokeberry", "citron", "clementine", "coconut", "corn", "crabapple", "cranberry", "cucumber", "currant",
"daikon", "date", "dewberry", "durian",
"edamame", "eggplant", "elderberry", "endive",
"fig",
"garlic", "ginger", "gooseberry", "grape", "grapefruit", "guava",
"honeysuckle", "horseradish", "huckleberry",
"jackfruit", "jicama",
"kale", "kiwi", "kohlrabi", "kumquat",
"leek", "lemon", "lentil", "lettuce", "lime",
"mandarin", "mango", "mushroom", "myrtle",
"nectarine", "nut",
"olive", "okra", "onion", "orange",
"papaya", "parsnip", "pawpaw", "peach", "pear", "pea", "pepper", "persimmon", "pineapple", "plum", "plantain", "pomegranate", "pomelo", "potato", "prune", "pumpkin",
"quandong", "quinoa",
"radicchio", "radish", "raisin", "raspberry", "redcurrant", "rhubarb", "rutabaga",
"spinach", "strawberry", "squash",
"tamarind", "tangerine", "tomatillo", "tomato", "turnip",
"vanilla",
"watercress", "watermelon",
"yam",
"zucchini"
];
private static readonly string[] RandomWords = [
"apple", "apricot", "artichoke", "arugula", "asparagus", "avocado",
"banana", "bean", "beechnut", "beet", "blackberry", "blackcurrant", "blueberry", "boysenberry", "bramble", "broccoli",
"cabbage", "cacao", "cantaloupe", "caper", "carambola", "carrot", "cauliflower", "celery", "chard", "cherry", "chokeberry", "citron", "clementine", "coconut", "corn", "crabapple", "cranberry", "cucumber", "currant",
"daikon", "date", "dewberry", "durian",
"edamame", "eggplant", "elderberry", "endive",
"fig",
"garlic", "ginger", "gooseberry", "grape", "grapefruit", "guava",
"honeysuckle", "horseradish", "huckleberry",
"jackfruit", "jicama",
"kale", "kiwi", "kohlrabi", "kumquat",
"leek", "lemon", "lentil", "lettuce", "lime",
"mandarin", "mango", "mushroom", "myrtle",
"nectarine", "nut",
"olive", "okra", "onion", "orange",
"papaya", "parsnip", "pawpaw", "peach", "pear", "pea", "pepper", "persimmon", "pineapple", "plum", "plantain", "pomegranate", "pomelo", "potato", "prune", "pumpkin",
"quandong", "quinoa",
"radicchio", "radish", "raisin", "raspberry", "redcurrant", "rhubarb", "rutabaga",
"spinach", "strawberry", "squash",
"tamarind", "tangerine", "tomatillo", "tomato", "turnip",
"vanilla",
"watercress", "watermelon",
"yam",
"zucchini"
];
private static string RandomText(Random rand, int maxWords) {
int wordCount = 1 + (int) Math.Floor(maxWords * Math.Pow(rand.NextDouble(), 3));
return string.Join(' ', Enumerable.Range(0, wordCount).Select(_ => RandomWords[rand.Next(RandomWords.Length)]));
}
private static string RandomText(Random rand, int maxWords) {
int wordCount = 1 + (int) Math.Floor(maxWords * Math.Pow(rand.NextDouble(), 3));
return string.Join(' ', Enumerable.Range(0, wordCount).Select(_ => RandomWords[rand.Next(RandomWords.Length)]));
}
}
#else
namespace DHT.Desktop.Main.Pages {
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
namespace DHT.Desktop.Main.Pages;
public void OnClickAddRandomDataToDatabase() {}
}
sealed class DebugPageModel {
public string GenerateChannels { get; set; } = "0";
public string GenerateUsers { get; set; } = "0";
public string GenerateMessages { get; set; } = "0";
public void OnClickAddRandomDataToDatabase() {}
}
#endif

View File

@ -60,7 +60,7 @@ sealed partial class ViewerPageModel : ObservableObject, IDisposable {
string serverUrl = "http://127.0.0.1:" + ServerConfiguration.Port;
string serverToken = ServerConfiguration.Token;
Process.Start(new ProcessStartInfo(serverUrl + "/viewer?token=" + HttpUtility.UrlEncode(serverToken)) {
Process.Start(new ProcessStartInfo(serverUrl + "/viewer/?token=" + HttpUtility.UrlEncode(serverToken)) {
UseShellExecute = true
});
} catch (Exception e) {

View File

@ -48,6 +48,16 @@ const STATE = (function() {
});
};
const getDate = function(date) {
if (date instanceof Date) {
return date;
}
else {
// noinspection JSUnresolvedReference
return date.toDate();
}
};
const trackingStateChangedListeners = [];
let isTracking = false;
@ -69,8 +79,8 @@ const STATE = (function() {
* @property {String} channel_id
* @property {DiscordUser} author
* @property {String} content
* @property {Timestamp} timestamp
* @property {Timestamp|null} editedTimestamp
* @property {Date} timestamp
* @property {Date|null} editedTimestamp
* @property {DiscordAttachment[]} attachments
* @property {Object[]} embeds
* @property {DiscordMessageReaction[]} [reactions]
@ -106,11 +116,6 @@ const STATE = (function() {
* @property {Boolean} animated
*/
/**
* @name Timestamp
* @property {Function} toDate
*/
return {
setup(port, token) {
serverPort = port;
@ -223,12 +228,12 @@ const STATE = (function() {
sender: msg.author.id,
channel: msg.channel_id,
text: msg.content,
timestamp: msg.timestamp.toDate().getTime()
timestamp: getDate(msg.timestamp).getTime()
};
if (msg.editedTimestamp !== null) {
// noinspection JSUnusedGlobalSymbols
obj.editTimestamp = msg.editedTimestamp.toDate().getTime();
obj.editTimestamp = getDate(msg.editedTimestamp).getTime();
}
if (msg.messageReference !== null) {

View File

@ -13,7 +13,6 @@
<link rel="stylesheet" href="styles/modal.css">
<script type="text/javascript">
window.DHT_SERVER_URL = location.protocol + "//" + location.host + location.pathname;
window.DHT_SERVER_TOKEN = new URLSearchParams(location.search).get("token");
</script>
<script type="module" src="scripts/bootstrap.mjs"></script>
@ -70,7 +69,9 @@
</div>
<div id="app">
<div id="channels"></div>
<div id="channels">
<div class="loading"></div>
</div>
<div id="messages"></div>
</div>

View File

@ -36,10 +36,25 @@ document.addEventListener("DOMContentLoaded", () => {
gui.scrollMessagesToTop();
});
try {
state.uploadFile(JSON.parse(window.DHT_EMBEDDED));
} catch (e) {
console.error(e);
alert("Could not parse embedded file, see console for details.");
async function loadData() {
try {
const response = await fetch("/get-viewer-data?token=" + encodeURIComponent(window.DHT_SERVER_TOKEN), {
method: "GET",
headers: {
"Content-Type": "application/json",
},
credentials: "omit",
redirect: "error",
});
const json = await response.json();
state.uploadFile(json);
} catch (e) {
console.error(e);
alert("Could not load data, see console for details.");
document.querySelector("#channels > div.loading").remove();
}
}
loadData();
});

View File

@ -41,17 +41,14 @@ export default (function() {
let templateReaction;
let templateReactionCustom;
const fileUrlProcessor = function(serverUrl, serverToken) {
if (typeof serverUrl === "string" && typeof serverToken === "string") {
return url => serverUrl + "/get-downloaded-file/" + encodeURIComponent(url) + "?token=" + encodeURIComponent(serverToken);
const fileUrlProcessor = function(serverToken) {
if (typeof serverToken === "string") {
return url => "/get-downloaded-file/" + encodeURIComponent(url) + "?token=" + encodeURIComponent(serverToken);
}
else {
return url => url;
}
}(
window["DHT_SERVER_URL"],
window["DHT_SERVER_TOKEN"]
);
}(window.DHT_SERVER_TOKEN);
const getEmoji = function(name, id, extension) {
const tag = ":" + name + ":";
@ -116,14 +113,14 @@ export default (function() {
return {
setup() {
templateChannelServer = new template([
"<div data-channel='{id}'>",
"<div class='channel' data-channel='{id}'>",
"<div class='info' title='{topic}'><strong class='name'>#{name}</strong>{nsfw}<span class='tag'>{msgcount}</span></div>",
"<span class='server'>{server.name} ({server.type})</span>",
"</div>"
].join(""));
templateChannelPrivate = new template([
"<div data-channel='{id}'>",
"<div class='channel' data-channel='{id}'>",
"<div class='info'><strong class='name'>{name}</strong><span class='tag'>{msgcount}</span></div>",
"<span class='server'>({server.type})</span>",
"</div>"
@ -164,12 +161,12 @@ export default (function() {
// noinspection HtmlUnknownTarget
templateEmbedImage = new template([
"<a href='{url}' class='embed thumbnail loading'><img src='{src}' alt='' onload='DISCORD.handleImageLoad(this)' onerror='DISCORD.handleImageLoadError(this)'></a><br>"
"<a href='{url}' class='embed thumbnail loading'><img src='{src}' alt='' onload='window.DISCORD.handleImageLoad(this)' onerror='window.DISCORD.handleImageLoadError(this)'></a><br>"
].join(""));
// noinspection HtmlUnknownTarget
templateEmbedImageWithSize = new template([
"<a href='{url}' class='embed thumbnail loading'><img src='{src}' width='{width}' height='{height}' alt='' onload='DISCORD.handleImageLoad(this)' onerror='DISCORD.handleImageLoadError(this)'></a><br>"
"<a href='{url}' class='embed thumbnail loading'><img src='{src}' width='{width}' height='{height}' alt='' onload='window.DISCORD.handleImageLoad(this)' onerror='window.DISCORD.handleImageLoadError(this)'></a><br>"
].join(""));
// noinspection HtmlUnknownTarget

View File

@ -3,18 +3,24 @@
min-width: 215px;
max-width: 300px;
overflow-y: auto;
color: #eee;
background-color: #1c1e22;
font-size: 15px;
}
#channels > div {
#channels > div.loading {
margin: 0 auto;
width: 150px;
height: 150px;
}
#channels > div.channel {
cursor: pointer;
padding: 10px 12px;
color: #eee;
font-size: 15px;
border-bottom: 1px solid #333333;
}
#channels > div:hover, #channels > div.active {
#channels > div.channel:hover, #channels > div.channel.active {
background-color: #282b30;
}

View File

@ -11,3 +11,20 @@ body {
display: flex;
flex-direction: row;
}
.loading {
position: relative;
--loading-backdrop: rgba(0, 0, 0, 0);
}
.loading::after {
content: "";
background: var(--loading-backdrop)
url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 300 300' preserveAspectRatio='xMidYMid'%3E %3Ccircle cx='150' cy='150' fill='none' stroke='%237983f5' stroke-width='8' r='42' stroke-dasharray='198 68'%3E %3CanimateTransform attributeName='transform' type='rotate' repeatCount='indefinite' dur='1.25s' values='0 150 150;360 150 150' keyTimes='0;1' /%3E %3C/circle%3E %3C/svg%3E")
no-repeat center center;
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
}

View File

@ -107,23 +107,11 @@
}
.message .thumbnail {
position: relative;
--loading-backdrop: rgba(0, 0, 0, 0.75);
max-width: calc(100% - 20px);
max-height: 320px;
}
.message .thumbnail.loading::after {
content: "";
background: rgba(0, 0, 0, 0.75)
url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 300 300' preserveAspectRatio='xMidYMid'%3E %3Ccircle cx='150' cy='150' fill='none' stroke='%237983f5' stroke-width='8' r='42' stroke-dasharray='198 68'%3E %3CanimateTransform attributeName='transform' type='rotate' repeatCount='indefinite' dur='1.25s' values='0 150 150;360 150 150' keyTimes='0;1' /%3E %3C/circle%3E %3C/svg%3E")
no-repeat center center;
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
}
.message .thumbnail img {
width: auto;
max-width: 100%;

View File

@ -1,3 +0,0 @@
namespace DHT.Server.Data;
public readonly record struct DownloadWithData(Download Download, byte[]? Data);

View File

@ -26,7 +26,9 @@ public static class DatabaseExtensions {
await target.Messages.Add(batchedMessages);
await foreach (var download in source.Downloads.Get()) {
await target.Downloads.AddDownload(await source.Downloads.HydrateWithData(download));
if (download.Status != DownloadStatus.Success || !await source.Downloads.GetDownloadData(download.NormalizedUrl, stream => target.Downloads.AddDownload(download, stream))) {
await target.Downloads.AddDownload(download, stream: null);
}
}
}
}

View File

@ -10,7 +10,7 @@ using DHT.Utils.Logging;
namespace DHT.Server.Database.Export;
public static class ViewerJsonExport {
static class ViewerJsonExport {
private static readonly Log Log = Log.ForType(typeof(ViewerJsonExport));
public static async Task Generate(Stream stream, IDatabaseFile db, MessageFilter? filter = null) {

View File

@ -161,7 +161,7 @@ public static class LegacyArchiveImport {
var messagesObj = data.HasKey(channelIdStr) ? data.RequireObject(channelIdStr, DataPath) : (JsonElement?) null;
if (messagesObj == null) {
return Array.Empty<Message>();
return [];
}
return messagesObj.Value.EnumerateObject().Select(item => {

View File

@ -1,10 +1,10 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reactive.Linq;
using System.Threading;
using System.Threading.Tasks;
using DHT.Server.Data;
using DHT.Server.Data.Aggregations;
using DHT.Server.Data.Filters;
using DHT.Server.Download;
@ -14,7 +14,7 @@ namespace DHT.Server.Database.Repositories;
public interface IDownloadRepository {
IObservable<long> TotalCount { get; }
Task AddDownload(DownloadWithData item);
Task AddDownload(Data.Download item, Stream? stream);
Task<long> Count(DownloadItemFilter filter, CancellationToken cancellationToken = default);
@ -22,9 +22,9 @@ public interface IDownloadRepository {
IAsyncEnumerable<Data.Download> Get();
Task<DownloadWithData> HydrateWithData(Data.Download download);
Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor);
Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl);
Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor);
IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken = default);
@ -35,7 +35,7 @@ public interface IDownloadRepository {
internal sealed class Dummy : IDownloadRepository {
public IObservable<long> TotalCount { get; } = Observable.Return(0L);
public Task AddDownload(DownloadWithData item) {
public Task AddDownload(Data.Download item, Stream? stream) {
return Task.CompletedTask;
}
@ -51,12 +51,12 @@ public interface IDownloadRepository {
return AsyncEnumerable.Empty<Data.Download>();
}
public Task<DownloadWithData> HydrateWithData(Data.Download download) {
return Task.FromResult(new DownloadWithData(download, Data: null));
public Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor) {
return Task.FromResult(false);
}
public Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl) {
return Task.FromResult<DownloadWithData?>(null);
public Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor) {
return Task.FromResult(false);
}
public IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, CancellationToken cancellationToken) {

View File

@ -19,9 +19,9 @@ sealed class SqliteChannelRepository : BaseSqliteRepository, IChannelRepository
}
public async Task Add(IReadOnlyList<Channel> channels) {
await using var conn = await pool.Take();
await using (var conn = await pool.Take()) {
await conn.BeginTransactionAsync();
await using (var tx = await conn.BeginTransactionAsync()) {
await using var cmd = conn.Upsert("channels", [
("id", SqliteType.Integer),
("server", SqliteType.Integer),
@ -43,7 +43,7 @@ sealed class SqliteChannelRepository : BaseSqliteRepository, IChannelRepository
await cmd.ExecuteNonQueryAsync();
}
await tx.CommitAsync();
await conn.CommitTransactionAsync();
}
UpdateTotalCount();

View File

@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
@ -14,15 +15,9 @@ using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Repositories;
sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepository {
sealed class SqliteDownloadRepository(SqliteConnectionPool pool) : BaseSqliteRepository(Log), IDownloadRepository {
private static readonly Log Log = Log.ForType<SqliteDownloadRepository>();
private readonly SqliteConnectionPool pool;
public SqliteDownloadRepository(SqliteConnectionPool pool) : base(Log) {
this.pool = pool;
}
internal sealed class NewDownloadCollector : IAsyncDisposable {
private readonly SqliteDownloadRepository repository;
private bool hasAdded = false;
@ -66,11 +61,9 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
}
}
public async Task AddDownload(DownloadWithData item) {
var (download, data) = item;
public async Task AddDownload(Data.Download item, Stream? stream) {
await using (var conn = await pool.Take()) {
var tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
await using var metadataCmd = conn.Upsert("download_metadata", [
("normalized_url", SqliteType.Text),
@ -80,30 +73,37 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
("size", SqliteType.Integer),
]);
metadataCmd.Set(":normalized_url", download.NormalizedUrl);
metadataCmd.Set(":download_url", download.DownloadUrl);
metadataCmd.Set(":status", (int) download.Status);
metadataCmd.Set(":type", download.Type);
metadataCmd.Set(":size", download.Size);
metadataCmd.Set(":normalized_url", item.NormalizedUrl);
metadataCmd.Set(":download_url", item.DownloadUrl);
metadataCmd.Set(":status", (int) item.Status);
metadataCmd.Set(":type", item.Type);
metadataCmd.Set(":size", item.Size);
await metadataCmd.ExecuteNonQueryAsync();
if (data == null) {
if (stream == null) {
await using var deleteBlobCmd = conn.Command("DELETE FROM download_blobs WHERE normalized_url = :normalized_url");
deleteBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, download.NormalizedUrl);
deleteBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, item.NormalizedUrl);
await deleteBlobCmd.ExecuteNonQueryAsync();
}
else {
await using var upsertBlobCmd = conn.Upsert("download_blobs", [
("normalized_url", SqliteType.Text),
("blob", SqliteType.Blob)
]);
await using var upsertBlobCmd = conn.Command(
"""
INSERT INTO download_blobs (normalized_url, blob)
VALUES (:normalized_url, ZEROBLOB(:blob_length))
ON CONFLICT (normalized_url) DO UPDATE SET blob = excluded.blob
RETURNING rowid
"""
);
upsertBlobCmd.Set(":normalized_url", download.NormalizedUrl);
upsertBlobCmd.Set(":blob", data);
await upsertBlobCmd.ExecuteNonQueryAsync();
upsertBlobCmd.AddAndSet(":normalized_url", SqliteType.Text, item.NormalizedUrl);
upsertBlobCmd.AddAndSet(":blob_length", SqliteType.Integer, item.Size);
long rowid = await upsertBlobCmd.ExecuteLongScalarAsync();
await using var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid);
await stream.CopyToAsync(blob);
}
await tx.CommitAsync();
await conn.CommitTransactionAsync();
}
UpdateTotalCount();
@ -187,24 +187,35 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
}
}
public async Task<DownloadWithData> HydrateWithData(Data.Download download) {
public async Task<bool> GetDownloadData(string normalizedUrl, Func<Stream, Task> dataProcessor) {
await using var conn = await pool.Take();
await using var cmd = conn.Command("SELECT blob FROM download_blobs WHERE normalized_url = :url");
cmd.AddAndSet(":url", SqliteType.Text, download.NormalizedUrl);
await using var cmd = conn.Command("SELECT rowid FROM download_blobs WHERE normalized_url = :normalized_url");
cmd.AddAndSet(":normalized_url", SqliteType.Text, normalizedUrl);
await using var reader = await cmd.ExecuteReaderAsync();
var data = await reader.ReadAsync() && !reader.IsDBNull(0) ? (byte[]) reader["blob"] : null;
long rowid;
return new DownloadWithData(download, data);
await using (var reader = await cmd.ExecuteReaderAsync()) {
if (!await reader.ReadAsync()) {
return false;
}
rowid = reader.GetInt64(0);
}
await using (var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid, readOnly: true)) {
await dataProcessor(blob);
}
return true;
}
public async Task<DownloadWithData?> GetSuccessfulDownloadWithData(string normalizedUrl) {
public async Task<bool> GetSuccessfulDownloadWithData(string normalizedUrl, Func<Data.Download, Stream, Task> dataProcessor) {
await using var conn = await pool.Take();
await using var cmd = conn.Command(
"""
SELECT dm.download_url, dm.type, db.blob FROM download_metadata dm
SELECT dm.download_url, dm.type, db.rowid FROM download_metadata dm
JOIN download_blobs db ON dm.normalized_url = db.normalized_url
WHERE dm.normalized_url = :normalized_url AND dm.status = :success IS NOT NULL
"""
@ -213,19 +224,25 @@ sealed class SqliteDownloadRepository : BaseSqliteRepository, IDownloadRepositor
cmd.AddAndSet(":normalized_url", SqliteType.Text, normalizedUrl);
cmd.AddAndSet(":success", SqliteType.Integer, (int) DownloadStatus.Success);
await using var reader = await cmd.ExecuteReaderAsync();
string downloadUrl;
string? type;
long rowid;
if (!await reader.ReadAsync()) {
return null;
await using (var reader = await cmd.ExecuteReaderAsync()) {
if (!await reader.ReadAsync()) {
return false;
}
downloadUrl = reader.GetString(0);
type = reader.IsDBNull(1) ? null : reader.GetString(1);
rowid = reader.GetInt64(2);
}
var downloadUrl = reader.GetString(0);
var type = reader.IsDBNull(1) ? null : reader.GetString(1);
var data = (byte[]) reader[2];
var size = (ulong) data.LongLength;
var download = new Data.Download(normalizedUrl, downloadUrl, DownloadStatus.Success, type, size);
await using (var blob = new SqliteBlob(conn.InnerConnection, "download_blobs", "blob", rowid, readOnly: true)) {
await dataProcessor(new Data.Download(normalizedUrl, downloadUrl, DownloadStatus.Success, type, (ulong) blob.Length), blob);
}
return new DownloadWithData(download, data);
return true;
}
public async IAsyncEnumerable<DownloadItem> PullPendingDownloadItems(int count, DownloadItemFilter filter, [EnumeratorCancellation] CancellationToken cancellationToken) {

View File

@ -39,7 +39,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
}
await using (var conn = await pool.Take()) {
await using var tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
await using var messageCmd = conn.Upsert("messages", [
("message_id", SqliteType.Integer),
@ -167,7 +167,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
}
}
await tx.CommitAsync();
await conn.CommitTransactionAsync();
downloadCollector.OnCommitted();
}
@ -183,11 +183,11 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
return await conn.ExecuteReaderAsync("SELECT COUNT(*) FROM messages" + filter.GenerateConditions().BuildWhereClause(), static reader => reader?.GetInt64(0) ?? 0L, cancellationToken);
}
private sealed class MesageToManyCommand<T> : IAsyncDisposable {
private sealed class MessageToManyCommand<T> : IAsyncDisposable {
private readonly SqliteCommand cmd;
private readonly Func<SqliteDataReader, T> readItem;
public MesageToManyCommand(ISqliteConnection conn, string sql, Func<SqliteDataReader, T> readItem) {
public MessageToManyCommand(ISqliteConnection conn, string sql, Func<SqliteDataReader, T> readItem) {
this.cmd = conn.Command(sql);
this.cmd.Add(":message_id", SqliteType.Integer);
@ -223,7 +223,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id
""";
await using var attachmentCmd = new MesageToManyCommand<Attachment>(conn, AttachmentSql, static reader => new Attachment {
await using var attachmentCmd = new MessageToManyCommand<Attachment>(conn, AttachmentSql, static reader => new Attachment {
Id = reader.GetUint64(0),
Name = reader.GetString(1),
Type = reader.IsDBNull(2) ? null : reader.GetString(2),
@ -241,7 +241,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id
""";
await using var embedCmd = new MesageToManyCommand<Embed>(conn, EmbedSql, static reader => new Embed {
await using var embedCmd = new MessageToManyCommand<Embed>(conn, EmbedSql, static reader => new Embed {
Json = reader.GetString(0)
});
@ -252,7 +252,7 @@ sealed class SqliteMessageRepository : BaseSqliteRepository, IMessageRepository
WHERE message_id = :message_id
""";
await using var reactionsCmd = new MesageToManyCommand<Reaction>(conn, ReactionSql, static reader => new Reaction {
await using var reactionsCmd = new MessageToManyCommand<Reaction>(conn, ReactionSql, static reader => new Reaction {
EmojiId = reader.IsDBNull(0) ? null : reader.GetUint64(0),
EmojiName = reader.IsDBNull(1) ? null : reader.GetString(1),
EmojiFlags = (EmojiFlags) reader.GetInt16(2),

View File

@ -19,9 +19,9 @@ sealed class SqliteServerRepository : BaseSqliteRepository, IServerRepository {
}
public async Task Add(IReadOnlyList<Data.Server> servers) {
await using var conn = await pool.Take();
await using (var conn = await pool.Take()) {
await conn.BeginTransactionAsync();
await using (var tx = await conn.BeginTransactionAsync()) {
await using var cmd = conn.Upsert("servers", [
("id", SqliteType.Integer),
("name", SqliteType.Text),
@ -35,7 +35,7 @@ sealed class SqliteServerRepository : BaseSqliteRepository, IServerRepository {
await cmd.ExecuteNonQueryAsync();
}
await tx.CommitAsync();
await conn.CommitTransactionAsync();
}
UpdateTotalCount();

View File

@ -23,7 +23,7 @@ sealed class SqliteUserRepository : BaseSqliteRepository, IUserRepository {
public async Task Add(IReadOnlyList<User> users) {
await using (var conn = await pool.Take()) {
await using var tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
await using var cmd = conn.Upsert("users", [
("id", SqliteType.Integer),
@ -46,7 +46,7 @@ sealed class SqliteUserRepository : BaseSqliteRepository, IUserRepository {
}
}
await tx.CommitAsync();
await conn.CommitTransactionAsync();
downloadCollector.OnCommitted();
}

View File

@ -1,5 +1,4 @@
using System.Collections.Generic;
using System.Data.Common;
using System.Threading.Tasks;
using DHT.Server.Database.Sqlite.Utils;
using DHT.Server.Download;
@ -39,7 +38,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
}
}
await using var tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
int totalUrls = normalizedUrls.Count;
int processedUrls = -1;
@ -61,7 +60,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync();
await conn.CommitTransactionAsync();
}
private async Task NormalizeDownloadUrls(ISqliteConnection conn, ISchemaUpgradeCallbacks.IProgressReporter reporter) {
@ -84,26 +83,23 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
}
await conn.ExecuteAsync("PRAGMA cache_size = -20000");
await conn.BeginTransactionAsync();
DbTransaction tx;
await reporter.SubWork("Deleting duplicates...", 0, 0);
await using (tx = await conn.BeginTransactionAsync()) {
await reporter.SubWork("Deleting duplicates...", 0, 0);
await using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
deleteCmd.Set(":url", duplicateUrl);
await deleteCmd.ExecuteNonQueryAsync();
}
await using (var deleteCmd = conn.Delete("downloads", ("url", SqliteType.Text))) {
foreach (var duplicateUrl in duplicateUrlsToDelete) {
deleteCmd.Set(":url", duplicateUrl);
await deleteCmd.ExecuteNonQueryAsync();
}
await tx.CommitAsync();
}
await conn.CommitTransactionAsync();
int totalUrls = normalizedUrlsToOriginalUrls.Count;
int processedUrls = -1;
tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
await using (var updateCmd = conn.Command("UPDATE downloads SET download_url = :download_url, url = :normalized_url WHERE url = :download_url")) {
updateCmd.Add(":normalized_url", SqliteType.Text);
@ -115,11 +111,10 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
// Not proper way of dealing with transactions, but it avoids a long commit at the end.
// Schema upgrades are already non-atomic anyways, so this doesn't make it worse.
await tx.CommitAsync();
await tx.DisposeAsync();
await conn.CommitTransactionAsync();
tx = await conn.BeginTransactionAsync();
updateCmd.Transaction = (SqliteTransaction) tx;
await conn.BeginTransactionAsync();
conn.AssignActiveTransaction(updateCmd);
}
updateCmd.Set(":normalized_url", normalizedUrl);
@ -130,8 +125,7 @@ sealed class SqliteSchemaUpgradeTo6 : ISchemaUpgrade {
await reporter.SubWork("Updating URLs...", totalUrls, totalUrls);
await tx.CommitAsync();
await tx.DisposeAsync();
await conn.CommitTransactionAsync();
await conn.ExecuteAsync("PRAGMA cache_size = -2000");
}

View File

@ -37,7 +37,7 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
await reporter.SubWork("Processing downloaded files...", 0, totalFiles);
var tx = await conn.BeginTransactionAsync();
await conn.BeginTransactionAsync();
await using (var insertCmd = conn.Command("INSERT INTO download_blobs (normalized_url, blob) SELECT normalized_url, blob FROM downloads WHERE normalized_url = :normalized_url"))
await using (var deleteCmd = conn.Command("DELETE FROM downloads WHERE normalized_url = :normalized_url")) {
@ -50,12 +50,11 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
// Not proper way of dealing with transactions, but it avoids a long commit at the end.
// Schema upgrades are already non-atomic anyways, so this doesn't make it worse.
await tx.CommitAsync();
await tx.DisposeAsync();
await conn.CommitTransactionAsync();
tx = await conn.BeginTransactionAsync();
insertCmd.Transaction = (SqliteTransaction) tx;
deleteCmd.Transaction = (SqliteTransaction) tx;
await conn.BeginTransactionAsync();
conn.AssignActiveTransaction(insertCmd);
conn.AssignActiveTransaction(deleteCmd);
}
insertCmd.Set(":normalized_url", url);
@ -68,8 +67,7 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
await reporter.SubWork("Processing downloaded files...", totalFiles, totalFiles);
await tx.CommitAsync();
await tx.DisposeAsync();
await conn.CommitTransactionAsync();
}
private async Task<List<string>> GetDownloadedFileUrls(ISqliteConnection conn) {
@ -111,45 +109,45 @@ sealed class SqliteSchemaUpgradeTo7 : ISchemaUpgrade {
await insertCmd.ExecuteNonQueryAsync();
}
await using (var tx = await conn.BeginTransactionAsync()) {
await using var insertCmd = conn.Command("INSERT OR IGNORE INTO download_metadata (normalized_url, download_url, status, type, size) VALUES (:normalized_url, :download_url, :status, :type, :size)");
insertCmd.Add(":normalized_url", SqliteType.Text);
insertCmd.Add(":download_url", SqliteType.Text);
insertCmd.Add(":status", SqliteType.Integer);
insertCmd.Add(":type", SqliteType.Text);
insertCmd.Add(":size", SqliteType.Integer);
await conn.BeginTransactionAsync();
await reporter.SubWork("Processing embeds...", 1, 4);
await using var insertCmd = conn.Command("INSERT OR IGNORE INTO download_metadata (normalized_url, download_url, status, type, size) VALUES (:normalized_url, :download_url, :status, :type, :size)");
insertCmd.Add(":normalized_url", SqliteType.Text);
insertCmd.Add(":download_url", SqliteType.Text);
insertCmd.Add(":status", SqliteType.Integer);
insertCmd.Add(":type", SqliteType.Text);
insertCmd.Add(":size", SqliteType.Integer);
await using (var embedCmd = conn.Command("SELECT json FROM embeds")) {
await using var reader = await embedCmd.ExecuteReaderAsync();
await reporter.SubWork("Processing embeds...", 1, 4);
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, await DownloadLinkExtractor.TryFromEmbedJson(reader.GetStream(0)));
}
await using (var embedCmd = conn.Command("SELECT json FROM embeds")) {
await using var reader = await embedCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, await DownloadLinkExtractor.TryFromEmbedJson(reader.GetStream(0)));
}
await reporter.SubWork("Processing users...", 2, 4);
await using (var avatarCmd = conn.Command("SELECT id, avatar_url FROM users WHERE avatar_url IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromUserAvatar(reader.GetUint64(0), reader.GetString(1)));
}
}
await reporter.SubWork("Processing reactions...", 3, 4);
await using (var avatarCmd = conn.Command("SELECT DISTINCT emoji_id, emoji_flags FROM reactions WHERE emoji_id IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromEmoji(reader.GetUint64(0), (EmojiFlags) reader.GetInt16(1)));
}
}
await tx.CommitAsync();
}
await reporter.SubWork("Processing users...", 2, 4);
await using (var avatarCmd = conn.Command("SELECT id, avatar_url FROM users WHERE avatar_url IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromUserAvatar(reader.GetUint64(0), reader.GetString(1)));
}
}
await reporter.SubWork("Processing reactions...", 3, 4);
await using (var avatarCmd = conn.Command("SELECT DISTINCT emoji_id, emoji_flags FROM reactions WHERE emoji_id IS NOT NULL")) {
await using var reader = await avatarCmd.ExecuteReaderAsync();
while (await reader.ReadAsync()) {
await InsertDownload(insertCmd, DownloadLinkExtractor.FromEmoji(reader.GetUint64(0), (EmojiFlags) reader.GetInt16(1)));
}
}
await conn.CommitTransactionAsync();
}
}

View File

@ -1,8 +1,15 @@
using System;
using System.Threading.Tasks;
using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Utils;
interface ISqliteConnection : IAsyncDisposable {
SqliteConnection InnerConnection { get; }
Task BeginTransactionAsync();
Task CommitTransactionAsync();
Task RollbackTransactionAsync();
void AssignActiveTransaction(SqliteCommand command);
}

View File

@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Threading;
using System.Threading.Tasks;
using DHT.Utils.Collections;
@ -73,17 +74,48 @@ sealed class SqliteConnectionPool : IAsyncDisposable {
disposalTokenSource.Dispose();
}
private sealed class PooledConnection : ISqliteConnection {
public SqliteConnection InnerConnection { get; }
private sealed class PooledConnection(SqliteConnectionPool pool, SqliteConnection conn) : ISqliteConnection {
public SqliteConnection InnerConnection { get; } = conn;
private readonly SqliteConnectionPool pool;
private DbTransaction? activeTransaction;
public PooledConnection(SqliteConnectionPool pool, SqliteConnection conn) {
this.pool = pool;
this.InnerConnection = conn;
public async Task BeginTransactionAsync() {
if (activeTransaction != null) {
throw new InvalidOperationException("A transaction is already active.");
}
activeTransaction = await InnerConnection.BeginTransactionAsync();
}
public async Task CommitTransactionAsync() {
if (activeTransaction == null) {
throw new InvalidOperationException("No active transaction to commit.");
}
await activeTransaction.CommitAsync();
await activeTransaction.DisposeAsync();
activeTransaction = null;
}
public async Task RollbackTransactionAsync() {
if (activeTransaction == null) {
throw new InvalidOperationException("No active transaction to rollback.");
}
await activeTransaction.RollbackAsync();
await activeTransaction.DisposeAsync();
activeTransaction = null;
}
public void AssignActiveTransaction(SqliteCommand command) {
command.Transaction = (SqliteTransaction?) activeTransaction;
}
public async ValueTask DisposeAsync() {
if (activeTransaction != null) {
await RollbackTransactionAsync();
}
await pool.Return(this);
}
}

View File

@ -1,5 +1,4 @@
using System;
using System.Data.Common;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
@ -9,10 +8,6 @@ using Microsoft.Data.Sqlite;
namespace DHT.Server.Database.Sqlite.Utils;
static class SqliteExtensions {
public static ValueTask<DbTransaction> BeginTransactionAsync(this ISqliteConnection conn) {
return conn.InnerConnection.BeginTransactionAsync();
}
public static SqliteCommand Command(this ISqliteConnection conn, [LanguageInjection("sql")] string sql) {
var cmd = conn.InnerConnection.CreateCommand();
cmd.CommandText = sql;
@ -31,6 +26,10 @@ static class SqliteExtensions {
return await reader.ReadAsync(cancellationToken) ? readFunction(reader) : readFunction(null);
}
public static async Task<long> ExecuteLongScalarAsync(this SqliteCommand command) {
return (long) (await command.ExecuteScalarAsync())!;
}
public static SqliteCommand Insert(this ISqliteConnection conn, string tableName, (string Name, SqliteType Type)[] columns) {
string columnNames = string.Join(',', columns.Select(static c => c.Name));
string columnParams = string.Join(',', columns.Select(static c => ':' + c.Name));

View File

@ -10,13 +10,12 @@ public readonly struct DownloadItem {
public string? Type { get; init; }
public ulong? Size { get; init; }
internal DownloadWithData ToSuccess(byte[] data) {
var size = (ulong) Math.Max(data.LongLength, 0);
return new DownloadWithData(new Data.Download(NormalizedUrl, DownloadUrl, DownloadStatus.Success, Type, size), data);
internal Data.Download ToSuccess(long size) {
return new Data.Download(NormalizedUrl, DownloadUrl, DownloadStatus.Success, Type, (ulong) Math.Max(size, 0));
}
internal DownloadWithData ToFailure(HttpStatusCode? statusCode = null) {
internal Data.Download ToFailure(HttpStatusCode? statusCode = null) {
var status = statusCode.HasValue ? (DownloadStatus) (int) statusCode : DownloadStatus.GenericError;
return new DownloadWithData(new Data.Download(NormalizedUrl, DownloadUrl, status, Type, Size), Data: null);
return new Data.Download(NormalizedUrl, DownloadUrl, status, Type, Size);
}
}

View File

@ -11,16 +11,18 @@ public sealed class Downloader {
public bool IsDownloading => current != null;
private readonly IDatabaseFile db;
private readonly int? concurrentDownloads;
private readonly SemaphoreSlim semaphore = new (1, 1);
internal Downloader(IDatabaseFile db) {
internal Downloader(IDatabaseFile db, int? concurrentDownloads) {
this.db = db;
this.concurrentDownloads = concurrentDownloads;
}
public async Task<IObservable<DownloadItem>> Start(DownloadItemFilter filter) {
await semaphore.WaitAsync();
try {
current ??= new DownloaderTask(db, filter);
current ??= new DownloaderTask(db, filter, concurrentDownloads);
return current.FinishedItems;
} finally {
semaphore.Release();

View File

@ -15,10 +15,14 @@ namespace DHT.Server.Download;
sealed class DownloaderTask : IAsyncDisposable {
private static readonly Log Log = Log.ForType<DownloaderTask>();
private const int DownloadTasks = 4;
private const int DefaultConcurrentDownloads = 4;
private const int QueueSize = 25;
private const string UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
private static int GetDownloadTaskCount(int? concurrentDownloads) {
return Math.Max(1, concurrentDownloads ?? DefaultConcurrentDownloads);
}
private readonly Channel<DownloadItem> downloadQueue = Channel.CreateBounded<DownloadItem>(new BoundedChannelOptions(QueueSize) {
SingleReader = false,
SingleWriter = true,
@ -38,12 +42,12 @@ sealed class DownloaderTask : IAsyncDisposable {
public IObservable<DownloadItem> FinishedItems => finishedItemPublisher;
internal DownloaderTask(IDatabaseFile db, DownloadItemFilter filter) {
internal DownloaderTask(IDatabaseFile db, DownloadItemFilter filter, int? concurrentDownloads) {
this.db = db;
this.filter = filter;
this.cancellationToken = cancellationTokenSource.Token;
this.queueWriterTask = Task.Run(RunQueueWriterTask);
this.downloadTasks = Enumerable.Range(1, DownloadTasks).Select(taskIndex => Task.Run(() => RunDownloadTask(taskIndex))).ToArray();
this.downloadTasks = Enumerable.Range(1, GetDownloadTaskCount(concurrentDownloads)).Select(taskIndex => Task.Run(() => RunDownloadTask(taskIndex))).ToArray();
}
private async Task RunQueueWriterTask() {
@ -63,24 +67,39 @@ sealed class DownloaderTask : IAsyncDisposable {
private async Task RunDownloadTask(int taskIndex) {
var log = Log.ForType<DownloaderTask>("Task " + taskIndex);
var client = new HttpClient();
var client = new HttpClient(new SocketsHttpHandler {
ConnectTimeout = TimeSpan.FromSeconds(30)
});
client.Timeout = Timeout.InfiniteTimeSpan;
client.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgent);
client.Timeout = TimeSpan.FromSeconds(30);
while (!cancellationToken.IsCancellationRequested) {
var item = await downloadQueue.Reader.ReadAsync(cancellationToken);
log.Debug("Downloading " + item.DownloadUrl + "...");
try {
var downloadedBytes = await client.GetByteArrayAsync(item.DownloadUrl, cancellationToken);
await db.Downloads.AddDownload(item.ToSuccess(downloadedBytes));
} catch (OperationCanceledException) {
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, item.DownloadUrl), HttpCompletionOption.ResponseHeadersRead, cancellationToken);
response.EnsureSuccessStatusCode();
if (response.Content.Headers.ContentLength is {} contentLength) {
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
await db.Downloads.AddDownload(item.ToSuccess(contentLength), stream);
}
else {
await db.Downloads.AddDownload(item.ToFailure(), stream: null);
log.Error("Download response has no content length: " + item.DownloadUrl);
}
} catch (OperationCanceledException e) when (e.CancellationToken == cancellationToken) {
// Ignore.
} catch (TaskCanceledException e) when (e.InnerException is TimeoutException) {
await db.Downloads.AddDownload(item.ToFailure(), stream: null);
log.Error("Download timed out: " + item.DownloadUrl);
} catch (HttpRequestException e) {
await db.Downloads.AddDownload(item.ToFailure(e.StatusCode));
await db.Downloads.AddDownload(item.ToFailure(e.StatusCode), stream: null);
log.Error(e);
} catch (Exception e) {
await db.Downloads.AddDownload(item.ToFailure());
await db.Downloads.AddDownload(item.ToFailure(), stream: null);
log.Error(e);
} finally {
try {

View File

@ -9,37 +9,37 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
abstract class BaseEndpoint {
abstract class BaseEndpoint(IDatabaseFile db) {
private static readonly Log Log = Log.ForType<BaseEndpoint>();
protected IDatabaseFile Db { get; }
protected BaseEndpoint(IDatabaseFile db) {
this.Db = db;
}
protected IDatabaseFile Db { get; } = db;
public async Task Handle(HttpContext ctx) {
var response = ctx.Response;
try {
response.StatusCode = (int) HttpStatusCode.OK;
var output = await Respond(ctx);
await output.WriteTo(response);
await Respond(ctx.Request, response);
} catch (HttpException e) {
Log.Error(e);
response.StatusCode = (int) e.StatusCode;
await response.WriteAsync(e.Message);
if (response.HasStarted) {
Log.Warn("Response has already started, cannot write status message: " + e.Message);
}
else {
await response.WriteAsync(e.Message);
}
} catch (Exception e) {
Log.Error(e);
response.StatusCode = (int) HttpStatusCode.InternalServerError;
}
}
protected abstract Task<IHttpOutput> Respond(HttpContext ctx);
protected abstract Task Respond(HttpRequest request, HttpResponse response);
protected static async Task<JsonElement> ReadJson(HttpContext ctx) {
protected static async Task<JsonElement> ReadJson(HttpRequest request) {
try {
return await ctx.Request.ReadFromJsonAsync(JsonElementContext.Default.JsonElement);
return await request.ReadFromJsonAsync(JsonElementContext.Default.JsonElement);
} catch (JsonException) {
throw new HttpException(HttpStatusCode.UnsupportedMediaType, "This endpoint only accepts JSON.");
}

View File

@ -7,18 +7,13 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class GetDownloadedFileEndpoint : BaseEndpoint {
public GetDownloadedFileEndpoint(IDatabaseFile db) : base(db) {}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
string url = WebUtility.UrlDecode((string) ctx.Request.RouteValues["url"]!);
sealed class GetDownloadedFileEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
protected override async Task Respond(HttpRequest request, HttpResponse response) {
string url = WebUtility.UrlDecode((string) request.RouteValues["url"]!);
string normalizedUrl = DiscordCdn.NormalizeUrl(url);
if (await Db.Downloads.GetSuccessfulDownloadWithData(normalizedUrl) is { Download: {} download, Data: {} data }) {
return new HttpOutput.File(download.Type, data);
}
else {
return new HttpOutput.Redirect(url, permanent: false);
if (!await Db.Downloads.GetSuccessfulDownloadWithData(normalizedUrl, (download, stream) => response.WriteStreamAsync(download.Type, download.Size, stream))) {
response.Redirect(url, permanent: false);
}
}
}

View File

@ -1,4 +1,4 @@
using System.Text;
using System.Net.Mime;
using System.Threading.Tasks;
using System.Web;
using DHT.Server.Database;
@ -9,25 +9,17 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class GetTrackingScriptEndpoint : BaseEndpoint {
private readonly ServerParameters serverParameters;
private readonly ResourceLoader resources;
public GetTrackingScriptEndpoint(IDatabaseFile db, ServerParameters parameters, ResourceLoader resources) : base(db) {
this.serverParameters = parameters;
this.resources = resources;
}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
sealed class GetTrackingScriptEndpoint(IDatabaseFile db, ServerParameters parameters, ResourceLoader resources) : BaseEndpoint(db) {
protected override async Task Respond(HttpRequest request, HttpResponse response) {
string bootstrap = await resources.ReadTextAsync("Tracker/bootstrap.js");
string script = bootstrap.Replace("= 0; /*[PORT]*/", "= " + serverParameters.Port + ";")
.Replace("/*[TOKEN]*/", HttpUtility.JavaScriptStringEncode(serverParameters.Token))
string script = bootstrap.Replace("= 0; /*[PORT]*/", "= " + parameters.Port + ";")
.Replace("/*[TOKEN]*/", HttpUtility.JavaScriptStringEncode(parameters.Token))
.Replace("/*[IMPORTS]*/", await resources.ReadJoinedAsync("Tracker/scripts/", '\n'))
.Replace("/*[CSS-CONTROLLER]*/", await resources.ReadTextAsync("Tracker/styles/controller.css"))
.Replace("/*[CSS-SETTINGS]*/", await resources.ReadTextAsync("Tracker/styles/settings.css"))
.Replace("/*[DEBUGGER]*/", ctx.Request.Query.ContainsKey("debug") ? "debugger;" : "");
.Replace("/*[DEBUGGER]*/", request.Query.ContainsKey("debug") ? "debugger;" : "");
ctx.Response.Headers.Append("X-DHT", "1");
return new HttpOutput.File("text/javascript", Encoding.UTF8.GetBytes(script));
response.Headers.Append("X-DHT", "1");
await response.WriteTextAsync(MediaTypeNames.Text.JavaScript, script);
}
}

View File

@ -8,18 +8,14 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class TrackChannelEndpoint : BaseEndpoint {
public TrackChannelEndpoint(IDatabaseFile db) : base(db) {}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
sealed class TrackChannelEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
var server = ReadServer(root.RequireObject("server"), "server");
var channel = ReadChannel(root.RequireObject("channel"), "channel", server.Id);
await Db.Servers.Add([server]);
await Db.Channels.Add([channel]);
return HttpOutput.None;
}
private static Data.Server ReadServer(JsonElement json, string path) => new () {

View File

@ -15,14 +15,12 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class TrackMessagesEndpoint : BaseEndpoint {
sealed class TrackMessagesEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
private const string HasNewMessages = "1";
private const string NoNewMessages = "0";
public TrackMessagesEndpoint(IDatabaseFile db) : base(db) {}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
if (root.ValueKind != JsonValueKind.Array) {
throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array.");
@ -43,7 +41,7 @@ sealed class TrackMessagesEndpoint : BaseEndpoint {
await Db.Messages.Add(messages);
return new HttpOutput.Text(anyNewMessages ? HasNewMessages : NoNewMessages);
await response.WriteTextAsync(anyNewMessages ? HasNewMessages : NoNewMessages);
}
private static Message ReadMessage(JsonElement json, string path) => new () {

View File

@ -8,11 +8,9 @@ using Microsoft.AspNetCore.Http;
namespace DHT.Server.Endpoints;
sealed class TrackUsersEndpoint : BaseEndpoint {
public TrackUsersEndpoint(IDatabaseFile db) : base(db) {}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
var root = await ReadJson(ctx);
sealed class TrackUsersEndpoint(IDatabaseFile db) : BaseEndpoint(db) {
protected override async Task Respond(HttpRequest request, HttpResponse response) {
var root = await ReadJson(request);
if (root.ValueKind != JsonValueKind.Array) {
throw new HttpException(HttpStatusCode.BadRequest, "Expected root element to be an array.");
@ -26,8 +24,6 @@ sealed class TrackUsersEndpoint : BaseEndpoint {
}
await Db.Users.Add(users);
return HttpOutput.None;
}
private static User ReadUser(JsonElement json, string path) => new () {

View File

@ -10,19 +10,14 @@ using Microsoft.AspNetCore.StaticFiles;
namespace DHT.Server.Endpoints;
sealed class ViewerEndpoint : BaseEndpoint {
sealed class ViewerEndpoint(IDatabaseFile db, ResourceLoader resources) : BaseEndpoint(db) {
private static readonly FileExtensionContentTypeProvider ContentTypeProvider = new ();
private readonly ResourceLoader resources;
private readonly Dictionary<string, byte[]?> cache = new ();
private readonly SemaphoreSlim cacheSemaphore = new (1);
public ViewerEndpoint(IDatabaseFile db, ResourceLoader resources) : base(db) {
this.resources = resources;
}
protected override async Task<IHttpOutput> Respond(HttpContext ctx) {
string path = (string?) ctx.Request.RouteValues["path"] ?? "index.html";
protected override async Task Respond(HttpRequest request, HttpResponse response) {
string path = (string?) request.RouteValues["path"] ?? "index.html";
string resourcePath = "Viewer/" + path;
byte[]? resourceBytes;
@ -41,7 +36,7 @@ sealed class ViewerEndpoint : BaseEndpoint {
}
else {
var contentType = ContentTypeProvider.TryGetContentType(path, out string? type) ? type : null;
return new HttpOutput.File(contentType, resourceBytes);
await response.WriteFileAsync(contentType, resourceBytes);
}
}
}

View File

@ -50,6 +50,7 @@ sealed class Startup {
app.UseRouting();
app.UseEndpoints(endpoints => {
endpoints.MapGet("/get-tracking-script", new GetTrackingScriptEndpoint(db, parameters, resources).Handle);
endpoints.MapGet("/get-viewer-data", new GetViewerDataEndpoint(db).Handle);
endpoints.MapGet("/get-downloaded-file/{url}", new GetDownloadedFileEndpoint(db).Handle);
endpoints.MapPost("/track-channel", new TrackChannelEndpoint(db).Handle);
endpoints.MapPost("/track-users", new TrackUsersEndpoint(db).Handle);

View File

@ -6,18 +6,12 @@ using DHT.Server.Service;
namespace DHT.Server;
public sealed class State : IAsyncDisposable {
public static State Dummy { get; } = new (DummyDatabaseFile.Instance);
public sealed class State(IDatabaseFile db, int? concurrentDownloads) : IAsyncDisposable {
public static State Dummy { get; } = new (DummyDatabaseFile.Instance, null);
public IDatabaseFile Db { get; }
public Downloader Downloader { get; }
public ServerManager Server { get; }
public State(IDatabaseFile db) {
Db = db;
Downloader = new Downloader(db);
Server = new ServerManager(db);
}
public IDatabaseFile Db { get; } = db;
public Downloader Downloader { get; } = new (db, concurrentDownloads);
public ServerManager Server { get; } = new (db);
public async ValueTask DisposeAsync() {
await Downloader.Stop();

View File

@ -8,7 +8,7 @@ public static class LinqExtensions {
HashSet<TKey>? seenKeys = null;
foreach (var item in collection) {
seenKeys ??= new HashSet<TKey>();
seenKeys ??= [];
if (seenKeys.Add(getKeyFromItem(item))) {
yield return item;

View File

@ -0,0 +1,33 @@
using System.IO;
using System.Net.Mime;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public static class HttpExtensions {
public static Task WriteTextAsync(this HttpResponse response, string text) {
return WriteTextAsync(response, MediaTypeNames.Text.Plain, text);
}
public static async Task WriteTextAsync(this HttpResponse response, string contentType, string text) {
response.ContentType = contentType;
await response.StartAsync();
await response.WriteAsync(text, Encoding.UTF8);
}
public static async Task WriteFileAsync(this HttpResponse response, string? contentType, byte[] bytes) {
response.ContentType = contentType ?? string.Empty;
response.ContentLength = bytes.Length;
await response.StartAsync();
await response.Body.WriteAsync(bytes);
}
public static async Task WriteStreamAsync(this HttpResponse response, string? contentType, ulong? contentLength, Stream source) {
response.ContentType = contentType ?? string.Empty;
response.ContentLength = (long?) contentLength;
await response.StartAsync();
await source.CopyToAsync(response.Body);
}
}

View File

@ -1,35 +0,0 @@
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public static class HttpOutput {
public static IHttpOutput None { get; } = new NoneImpl();
private sealed class NoneImpl : IHttpOutput {
public Task WriteTo(HttpResponse response) {
return Task.CompletedTask;
}
}
public sealed class Text(string text) : IHttpOutput {
public Task WriteTo(HttpResponse response) {
return response.WriteAsync(text, Encoding.UTF8);
}
}
public sealed class File(string? contentType, byte[] bytes) : IHttpOutput {
public async Task WriteTo(HttpResponse response) {
response.ContentType = contentType ?? string.Empty;
await response.Body.WriteAsync(bytes);
}
}
public sealed class Redirect(string url, bool permanent) : IHttpOutput {
public Task WriteTo(HttpResponse response) {
response.Redirect(url, permanent);
return Task.CompletedTask;
}
}
}

View File

@ -1,8 +0,0 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace DHT.Utils.Http;
public interface IHttpOutput {
Task WriteTo(HttpResponse response);
}

View File

@ -8,5 +8,5 @@ using DHT.Utils;
namespace DHT.Utils;
static class Version {
public const string Tag = "41.0.0.0";
public const string Tag = "41.2.0.0";
}