feat: upgrade to .NET 6, refactor everything

This commit is contained in:
spiral
2021-11-26 21:10:56 -05:00
parent d28e99ba43
commit 1918c56937
314 changed files with 27954 additions and 27966 deletions

View File

@@ -1,160 +1,150 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Dapper;
using Newtonsoft.Json.Linq;
using Autofac;
using Dapper;
using Serilog;
namespace PluralKit.Core
namespace PluralKit.Core;
public partial class BulkImporter: IAsyncDisposable
{
public partial class BulkImporter: IAsyncDisposable
private readonly Dictionary<string, GroupId> _existingGroupHids = new();
private readonly Dictionary<string, GroupId> _existingGroupNames = new();
private readonly Dictionary<string, MemberId> _existingMemberHids = new();
private readonly Dictionary<string, MemberId> _existingMemberNames = new();
private readonly Dictionary<string, GroupId> _knownGroupIdentifiers = new();
private readonly Dictionary<string, MemberId> _knownMemberIdentifiers = new();
private readonly ImportResultNew _result = new();
private ILogger _logger { get; init; }
private ModelRepository _repo { get; init; }
private PKSystem _system { get; set; }
private IPKConnection _conn { get; init; }
private IPKTransaction _tx { get; init; }
private Func<string, Task> _confirmFunc { get; init; }
public async ValueTask DisposeAsync()
{
private ILogger _logger { get; init; }
private ModelRepository _repo { get; init; }
private PKSystem _system { get; set; }
private IPKConnection _conn { get; init; }
private IPKTransaction _tx { get; init; }
private Func<string, Task> _confirmFunc { get; init; }
private readonly Dictionary<string, MemberId> _existingMemberHids = new();
private readonly Dictionary<string, MemberId> _existingMemberNames = new();
private readonly Dictionary<string, MemberId> _knownMemberIdentifiers = new();
private readonly Dictionary<string, GroupId> _existingGroupHids = new();
private readonly Dictionary<string, GroupId> _existingGroupNames = new();
private readonly Dictionary<string, GroupId> _knownGroupIdentifiers = new();
private ImportResultNew _result = new();
internal static async Task<ImportResultNew> PerformImport(IPKConnection conn, IPKTransaction tx, ModelRepository repo, ILogger logger,
DispatchService dispatch, ulong userId, PKSystem? system, JObject importFile, Func<string, Task> confirmFunc)
// try rolling back the transaction
// this will throw if the transaction was committed, but that's fine
// so we just catch InvalidOperationException
try
{
await using var importer = new BulkImporter()
{
_logger = logger,
_repo = repo,
_system = system,
_conn = conn,
_tx = tx,
_confirmFunc = confirmFunc,
};
if (system == null)
{
system = await repo.CreateSystem(null, importer._conn);
await repo.AddAccount(system.Id, userId, importer._conn);
importer._result.CreatedSystem = system.Hid;
importer._system = system;
}
// Fetch all members in the system and log their names and hids
var members = await conn.QueryAsync<PKMember>("select id, hid, name from members where system = @System",
new { System = system.Id });
foreach (var m in members)
{
importer._existingMemberHids[m.Hid] = m.Id;
importer._existingMemberNames[m.Name] = m.Id;
}
// same as above for groups
var groups = await conn.QueryAsync<PKGroup>("select id, hid, name from groups where system = @System",
new { System = system.Id });
foreach (var g in groups)
{
importer._existingGroupHids[g.Hid] = g.Id;
importer._existingGroupNames[g.Name] = g.Id;
}
try
{
if (importFile.ContainsKey("tuppers"))
await importer.ImportTupperbox(importFile);
else if (importFile.ContainsKey("switches"))
await importer.ImportPluralKit(importFile);
else
throw new ImportException("File type is unknown.");
importer._result.Success = true;
await tx.CommitAsync();
_ = dispatch.Dispatch(system.Id, new UpdateDispatchData()
{
Event = DispatchEvent.SUCCESSFUL_IMPORT
});
}
catch (ImportException e)
{
importer._result.Success = false;
importer._result.Message = e.Message;
}
catch (ArgumentNullException)
{
importer._result.Success = false;
}
return importer._result;
}
private (MemberId?, bool) TryGetExistingMember(string hid, string name)
{
if (_existingMemberHids.TryGetValue(hid, out var byHid)) return (byHid, true);
if (_existingMemberNames.TryGetValue(name, out var byName)) return (byName, false);
return (null, false);
}
private (GroupId?, bool) TryGetExistingGroup(string hid, string name)
{
if (_existingGroupHids.TryGetValue(hid, out var byHid)) return (byHid, true);
if (_existingGroupNames.TryGetValue(name, out var byName)) return (byName, false);
return (null, false);
}
private async Task AssertMemberLimitNotReached(int newMembers)
{
var memberLimit = _system.MemberLimitOverride ?? Limits.MaxMemberCount;
var existingMembers = await _repo.GetSystemMemberCount(_system.Id);
if (existingMembers + newMembers > memberLimit)
throw new ImportException($"Import would exceed the maximum number of members ({memberLimit}).");
}
private async Task AssertGroupLimitNotReached(int newGroups)
{
var limit = _system.GroupLimitOverride ?? Limits.MaxGroupCount;
var existing = await _repo.GetSystemGroupCount(_system.Id);
if (existing + newGroups > limit)
throw new ImportException($"Import would exceed the maximum number of groups ({limit}).");
}
public async ValueTask DisposeAsync()
{
// try rolling back the transaction
// this will throw if the transaction was committed, but that's fine
// so we just catch InvalidOperationException
try
{
await _tx.RollbackAsync();
}
catch (InvalidOperationException) { }
}
private class ImportException: Exception
{
public ImportException(string Message) : base(Message) { }
await _tx.RollbackAsync();
}
catch (InvalidOperationException) { }
}
public record ImportResultNew
internal static async Task<ImportResultNew> PerformImport(IPKConnection conn, IPKTransaction tx,
ModelRepository repo, ILogger logger, DispatchService dispatch, ulong userId,
PKSystem? system, JObject importFile, Func<string, Task> confirmFunc)
{
public int Added = 0;
public int Modified = 0;
public bool Success;
public string? CreatedSystem;
public string? Message;
await using var importer = new BulkImporter
{
_logger = logger,
_repo = repo,
_system = system,
_conn = conn,
_tx = tx,
_confirmFunc = confirmFunc
};
if (system == null)
{
system = await repo.CreateSystem(null, importer._conn);
await repo.AddAccount(system.Id, userId, importer._conn);
importer._result.CreatedSystem = system.Hid;
importer._system = system;
}
// Fetch all members in the system and log their names and hids
var members = await conn.QueryAsync<PKMember>("select id, hid, name from members where system = @System",
new { System = system.Id });
foreach (var m in members)
{
importer._existingMemberHids[m.Hid] = m.Id;
importer._existingMemberNames[m.Name] = m.Id;
}
// same as above for groups
var groups = await conn.QueryAsync<PKGroup>("select id, hid, name from groups where system = @System",
new { System = system.Id });
foreach (var g in groups)
{
importer._existingGroupHids[g.Hid] = g.Id;
importer._existingGroupNames[g.Name] = g.Id;
}
try
{
if (importFile.ContainsKey("tuppers"))
await importer.ImportTupperbox(importFile);
else if (importFile.ContainsKey("switches"))
await importer.ImportPluralKit(importFile);
else
throw new ImportException("File type is unknown.");
importer._result.Success = true;
await tx.CommitAsync();
_ = dispatch.Dispatch(system.Id, new UpdateDispatchData { Event = DispatchEvent.SUCCESSFUL_IMPORT });
}
catch (ImportException e)
{
importer._result.Success = false;
importer._result.Message = e.Message;
}
catch (ArgumentNullException)
{
importer._result.Success = false;
}
return importer._result;
}
private (MemberId?, bool) TryGetExistingMember(string hid, string name)
{
if (_existingMemberHids.TryGetValue(hid, out var byHid)) return (byHid, true);
if (_existingMemberNames.TryGetValue(name, out var byName)) return (byName, false);
return (null, false);
}
private (GroupId?, bool) TryGetExistingGroup(string hid, string name)
{
if (_existingGroupHids.TryGetValue(hid, out var byHid)) return (byHid, true);
if (_existingGroupNames.TryGetValue(name, out var byName)) return (byName, false);
return (null, false);
}
private async Task AssertMemberLimitNotReached(int newMembers)
{
var memberLimit = _system.MemberLimitOverride ?? Limits.MaxMemberCount;
var existingMembers = await _repo.GetSystemMemberCount(_system.Id);
if (existingMembers + newMembers > memberLimit)
throw new ImportException($"Import would exceed the maximum number of members ({memberLimit}).");
}
private async Task AssertGroupLimitNotReached(int newGroups)
{
var limit = _system.GroupLimitOverride ?? Limits.MaxGroupCount;
var existing = await _repo.GetSystemGroupCount(_system.Id);
if (existing + newGroups > limit)
throw new ImportException($"Import would exceed the maximum number of groups ({limit}).");
}
private class ImportException: Exception
{
public ImportException(string Message) : base(Message) { }
}
}
public record ImportResultNew
{
public int Added = 0;
public string? CreatedSystem;
public string? Message;
public int Modified = 0;
public bool Success;
}

View File

@@ -1,8 +1,4 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Dapper;
@@ -12,245 +8,254 @@ using NodaTime;
using NpgsqlTypes;
namespace PluralKit.Core
namespace PluralKit.Core;
public partial class BulkImporter
{
public partial class BulkImporter
private async Task<ImportResultNew> ImportPluralKit(JObject importFile)
{
private async Task<ImportResultNew> ImportPluralKit(JObject importFile)
var patch = SystemPatch.FromJSON(importFile);
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var patch = SystemPatch.FromJSON(importFile);
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in export file is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
if (err.Text != null)
throw new ImportException(err.Text);
throw new ImportException($"Field {err.Key} in export file is invalid.");
}
patch.AssertIsValid();
if (patch.Errors.Count > 0)
await _repo.UpdateSystem(_system.Id, patch, _conn);
var members = importFile.Value<JArray>("members");
var groups = importFile.Value<JArray>("groups");
var switches = importFile.Value<JArray>("switches");
var newMembers = members.Count(m =>
{
var (found, _) = TryGetExistingMember(m.Value<string>("id"), m.Value<string>("name"));
return found == null;
});
await AssertMemberLimitNotReached(newMembers);
if (groups != null)
{
var newGroups = groups.Count(g =>
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in export file is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
else if (err.Text != null)
throw new ImportException(err.Text);
else
throw new ImportException($"Field {err.Key} in export file is invalid.");
}
await _repo.UpdateSystem(_system.Id, patch, _conn);
var members = importFile.Value<JArray>("members");
var groups = importFile.Value<JArray>("groups");
var switches = importFile.Value<JArray>("switches");
var newMembers = members.Count(m =>
{
var (found, _) = TryGetExistingMember(m.Value<string>("id"), m.Value<string>("name"));
var (found, _) = TryGetExistingGroup(g.Value<string>("id"), g.Value<string>("name"));
return found == null;
});
await AssertMemberLimitNotReached(newMembers);
if (groups != null)
{
var newGroups = groups.Count(g =>
{
var (found, _) = TryGetExistingGroup(g.Value<string>("id"), g.Value<string>("name"));
return found == null;
});
await AssertGroupLimitNotReached(newGroups);
}
foreach (JObject member in members)
await ImportMember(member);
if (groups != null)
foreach (JObject group in groups)
await ImportGroup(group);
if (switches.Any(sw => sw.Value<JArray>("members").Any(m => !_knownMemberIdentifiers.ContainsKey((string)m))))
throw new ImportException("One or more switches include members that haven't been imported.");
await ImportSwitches(switches);
return _result;
await AssertGroupLimitNotReached(newGroups);
}
private async Task ImportMember(JObject member)
foreach (JObject member in members)
await ImportMember(member);
if (groups != null)
foreach (JObject group in groups)
await ImportGroup(group);
if (switches.Any(sw =>
sw.Value<JArray>("members").Any(m => !_knownMemberIdentifiers.ContainsKey((string)m))))
throw new ImportException("One or more switches include members that haven't been imported.");
await ImportSwitches(switches);
return _result;
}
private async Task ImportMember(JObject member)
{
var id = member.Value<string>("id");
var name = member.Value<string>("name");
var (found, isHidExisting) = TryGetExistingMember(id, name);
var isNewMember = found == null;
var referenceName = isHidExisting ? id : name;
if (isNewMember)
_result.Added++;
else
_result.Modified++;
_logger.Debug(
"Importing member with identifier {FileId} to system {System} (is creating new member? {IsCreatingNewMember})",
referenceName, _system.Id, isNewMember
);
var patch = MemberPatch.FromJSON(member);
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var id = member.Value<string>("id");
var name = member.Value<string>("name");
var (found, isHidExisting) = TryGetExistingMember(id, name);
var isNewMember = found == null;
var referenceName = isHidExisting ? id : name;
if (isNewMember)
_result.Added++;
else
_result.Modified++;
_logger.Debug(
"Importing member with identifier {FileId} to system {System} (is creating new member? {IsCreatingNewMember})",
referenceName, _system.Id, isNewMember
);
var patch = MemberPatch.FromJSON(member);
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in member {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
else if (err.Text != null)
throw new ImportException($"member {name}: {err.Text}");
else
throw new ImportException($"Field {err.Key} in member {name} is invalid.");
}
MemberId? memberId = found;
if (isNewMember)
{
var newMember = await _repo.CreateMember(_system.Id, patch.Name.Value, _conn);
memberId = newMember.Id;
}
_knownMemberIdentifiers[id] = memberId.Value;
await _repo.UpdateMember(memberId.Value, patch, _conn);
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in member {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
if (err.Text != null)
throw new ImportException($"member {name}: {err.Text}");
throw new ImportException($"Field {err.Key} in member {name} is invalid.");
}
private async Task ImportGroup(JObject group)
var memberId = found;
if (isNewMember)
{
var id = group.Value<string>("id");
var name = group.Value<string>("name");
var newMember = await _repo.CreateMember(_system.Id, patch.Name.Value, _conn);
memberId = newMember.Id;
}
var (found, isHidExisting) = TryGetExistingGroup(id, name);
var isNewGroup = found == null;
var referenceName = isHidExisting ? id : name;
_knownMemberIdentifiers[id] = memberId.Value;
_logger.Debug(
"Importing group with identifier {FileId} to system {System} (is creating new group? {IsCreatingNewGroup})",
referenceName, _system.Id, isNewGroup
);
await _repo.UpdateMember(memberId.Value, patch, _conn);
}
var patch = GroupPatch.FromJson(group);
private async Task ImportGroup(JObject group)
{
var id = group.Value<string>("id");
var name = group.Value<string>("name");
patch.AssertIsValid();
if (patch.Errors.Count > 0)
var (found, isHidExisting) = TryGetExistingGroup(id, name);
var isNewGroup = found == null;
var referenceName = isHidExisting ? id : name;
_logger.Debug(
"Importing group with identifier {FileId} to system {System} (is creating new group? {IsCreatingNewGroup})",
referenceName, _system.Id, isNewGroup
);
var patch = GroupPatch.FromJson(group);
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in group {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
if (err.Text != null)
throw new ImportException($"group {name}: {err.Text}");
throw new ImportException($"Field {err.Key} in group {name} is invalid.");
}
var groupId = found;
if (isNewGroup)
{
var newGroup = await _repo.CreateGroup(_system.Id, patch.Name.Value, _conn);
groupId = newGroup.Id;
}
_knownGroupIdentifiers[id] = groupId.Value;
await _repo.UpdateGroup(groupId.Value, patch, _conn);
var groupMembers = group.Value<JArray>("members");
var currentGroupMembers = (await _conn.QueryAsync<MemberId>(
"select member_id from group_members where group_id = @groupId",
new { groupId = groupId.Value }
)).ToList();
await using (var importer =
_conn.BeginBinaryImport("copy group_members (group_id, member_id) from stdin (format binary)"))
{
foreach (var memberIdentifier in groupMembers)
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in group {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
else if (err.Text != null)
throw new ImportException($"group {name}: {err.Text}");
else
throw new ImportException($"Field {err.Key} in group {name} is invalid.");
if (!_knownMemberIdentifiers.TryGetValue(memberIdentifier.ToString(), out var memberId))
throw new Exception(
$"Attempted to import group member with member identifier {memberIdentifier} but could not find a recently imported member with this id!");
if (currentGroupMembers.Contains(memberId))
continue;
await importer.StartRowAsync();
await importer.WriteAsync(groupId.Value.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(memberId.Value, NpgsqlDbType.Integer);
}
GroupId? groupId = found;
await importer.CompleteAsync();
}
}
if (isNewGroup)
private async Task ImportSwitches(JArray switches)
{
var existingSwitches =
(await _conn.QueryAsync<PKSwitch>("select * from switches where system = @System",
new { System = _system.Id })).ToList();
var existingTimestamps = existingSwitches.Select(sw => sw.Timestamp).ToImmutableHashSet();
var lastSwitchId = existingSwitches.Count != 0
? existingSwitches.Select(sw => sw.Id).Max()
: (SwitchId?)null;
if (switches.Count > 10000)
throw new ImportException("Too many switches present in import file.");
// Import switch definitions
var importedSwitches = new Dictionary<Instant, JArray>();
await using (var importer =
_conn.BeginBinaryImport("copy switches (system, timestamp) from stdin (format binary)"))
{
foreach (var sw in switches)
{
var newGroup = await _repo.CreateGroup(_system.Id, patch.Name.Value, _conn);
groupId = newGroup.Id;
var timestampString = sw.Value<string>("timestamp");
var timestamp = DateTimeFormats.TimestampExportFormat.Parse(timestampString);
if (!timestamp.Success)
throw new ImportException($"Switch timestamp {timestampString} is not an valid timestamp.");
// Don't import duplicate switches
if (existingTimestamps.Contains(timestamp.Value)) continue;
// Otherwise, write to importer
await importer.StartRowAsync();
await importer.WriteAsync(_system.Id.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(timestamp.Value, NpgsqlDbType.Timestamp);
var members = sw.Value<JArray>("members");
if (members.Count > Limits.MaxSwitchMemberCount)
throw new ImportException(
$"Switch with timestamp {timestampString} contains too many members ({members.Count} > 100).");
// Note that we've imported a switch with this timestamp
importedSwitches[timestamp.Value] = sw.Value<JArray>("members");
}
_knownGroupIdentifiers[id] = groupId.Value;
// Commit the import
await importer.CompleteAsync();
}
await _repo.UpdateGroup(groupId.Value, patch, _conn);
// Now, fetch all the switches we just added (so, now we get their IDs too)
// IDs are sequential, so any ID in this system, with a switch ID > the last max, will be one we just added
var justAddedSwitches = await _conn.QueryAsync<PKSwitch>(
"select * from switches where system = @System and id > @LastSwitchId",
new { System = _system.Id, LastSwitchId = lastSwitchId?.Value ?? -1 });
var groupMembers = group.Value<JArray>("members");
var currentGroupMembers = (await _conn.QueryAsync<MemberId>(
"select member_id from group_members where group_id = @groupId",
new { groupId = groupId.Value }
)).ToList();
await using (var importer = _conn.BeginBinaryImport("copy group_members (group_id, member_id) from stdin (format binary)"))
// Lastly, import the switch members
await using (var importer =
_conn.BeginBinaryImport("copy switch_members (switch, member) from stdin (format binary)"))
{
foreach (var justAddedSwitch in justAddedSwitches)
{
foreach (var memberIdentifier in groupMembers)
if (!importedSwitches.TryGetValue(justAddedSwitch.Timestamp, out var switchMembers))
throw new Exception(
$"Found 'just-added' switch (by ID) with timestamp {justAddedSwitch.Timestamp}, but this did not correspond to a timestamp we just added a switch entry of! :/");
// We still assume timestamps are unique and non-duplicate, so:
foreach (var memberIdentifier in switchMembers)
{
if (!_knownMemberIdentifiers.TryGetValue(memberIdentifier.ToString(), out var memberId))
throw new Exception($"Attempted to import group member with member identifier {memberIdentifier} but could not find a recently imported member with this id!");
if (currentGroupMembers.Contains(memberId))
continue;
if (!_knownMemberIdentifiers.TryGetValue((string)memberIdentifier, out var memberId))
throw new Exception(
$"Attempted to import switch with member identifier {memberIdentifier} but could not find an entry in the id map for this! :/");
await importer.StartRowAsync();
await importer.WriteAsync(groupId.Value.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(justAddedSwitch.Id.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(memberId.Value, NpgsqlDbType.Integer);
}
await importer.CompleteAsync();
}
}
private async Task ImportSwitches(JArray switches)
{
var existingSwitches = (await _conn.QueryAsync<PKSwitch>("select * from switches where system = @System", new { System = _system.Id })).ToList();
var existingTimestamps = existingSwitches.Select(sw => sw.Timestamp).ToImmutableHashSet();
var lastSwitchId = existingSwitches.Count != 0 ? existingSwitches.Select(sw => sw.Id).Max() : (SwitchId?)null;
if (switches.Count > 10000)
throw new ImportException($"Too many switches present in import file.");
// Import switch definitions
var importedSwitches = new Dictionary<Instant, JArray>();
await using (var importer = _conn.BeginBinaryImport("copy switches (system, timestamp) from stdin (format binary)"))
{
foreach (var sw in switches)
{
var timestampString = sw.Value<string>("timestamp");
var timestamp = DateTimeFormats.TimestampExportFormat.Parse(timestampString);
if (!timestamp.Success) throw new ImportException($"Switch timestamp {timestampString} is not an valid timestamp.");
// Don't import duplicate switches
if (existingTimestamps.Contains(timestamp.Value)) continue;
// Otherwise, write to importer
await importer.StartRowAsync();
await importer.WriteAsync(_system.Id.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(timestamp.Value, NpgsqlDbType.Timestamp);
var members = sw.Value<JArray>("members");
if (members.Count > Limits.MaxSwitchMemberCount)
throw new ImportException($"Switch with timestamp {timestampString} contains too many members ({members.Count} > 100).");
// Note that we've imported a switch with this timestamp
importedSwitches[timestamp.Value] = sw.Value<JArray>("members");
}
// Commit the import
await importer.CompleteAsync();
}
// Now, fetch all the switches we just added (so, now we get their IDs too)
// IDs are sequential, so any ID in this system, with a switch ID > the last max, will be one we just added
var justAddedSwitches = await _conn.QueryAsync<PKSwitch>(
"select * from switches where system = @System and id > @LastSwitchId",
new { System = _system.Id, LastSwitchId = lastSwitchId?.Value ?? -1 });
// Lastly, import the switch members
await using (var importer = _conn.BeginBinaryImport("copy switch_members (switch, member) from stdin (format binary)"))
{
foreach (var justAddedSwitch in justAddedSwitches)
{
if (!importedSwitches.TryGetValue(justAddedSwitch.Timestamp, out var switchMembers))
throw new Exception($"Found 'just-added' switch (by ID) with timestamp {justAddedSwitch.Timestamp}, but this did not correspond to a timestamp we just added a switch entry of! :/");
// We still assume timestamps are unique and non-duplicate, so:
foreach (var memberIdentifier in switchMembers)
{
if (!_knownMemberIdentifiers.TryGetValue((string)memberIdentifier, out var memberId))
throw new Exception($"Attempted to import switch with member identifier {memberIdentifier} but could not find an entry in the id map for this! :/");
await importer.StartRowAsync();
await importer.WriteAsync(justAddedSwitch.Id.Value, NpgsqlDbType.Integer);
await importer.WriteAsync(memberId.Value, NpgsqlDbType.Integer);
}
}
await importer.CompleteAsync();
}
await importer.CompleteAsync();
}
}
}

View File

@@ -1,123 +1,124 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Newtonsoft.Json.Linq;
using NodaTime;
namespace PluralKit.Core
namespace PluralKit.Core;
public partial class BulkImporter
{
public partial class BulkImporter
private async Task<ImportResultNew> ImportTupperbox(JObject importFile)
{
private async Task<ImportResultNew> ImportTupperbox(JObject importFile)
var tuppers = importFile.Value<JArray>("tuppers");
var newMembers = tuppers.Count(t => !_existingMemberNames.TryGetValue("name", out var memberId));
await AssertMemberLimitNotReached(newMembers);
string lastSetTag = null;
var multipleTags = false;
var hasGroup = false;
foreach (JObject tupper in tuppers)
(lastSetTag, multipleTags, hasGroup) = await ImportTupper(tupper, lastSetTag);
if (multipleTags || hasGroup)
{
var tuppers = importFile.Value<JArray>("tuppers");
var newMembers = tuppers.Count(t => !_existingMemberNames.TryGetValue("name", out var memberId));
await AssertMemberLimitNotReached(newMembers);
var issueStr =
$"{Emojis.Warn} The following potential issues were detected converting your Tupperbox input file:";
if (hasGroup)
issueStr +=
"\n- PluralKit does not support member groups. Members will be imported without groups.";
if (multipleTags)
issueStr +=
"\n- PluralKit does not support per-member system tags. Since you had multiple members with distinct tags, those tags will be applied to the members' *display names*/nicknames instead.";
string lastSetTag = null;
bool multipleTags = false;
bool hasGroup = false;
foreach (JObject tupper in tuppers)
(lastSetTag, multipleTags, hasGroup) = await ImportTupper(tupper, lastSetTag);
if (multipleTags || hasGroup)
{
var issueStr =
$"{Emojis.Warn} The following potential issues were detected converting your Tupperbox input file:";
if (hasGroup)
issueStr +=
"\n- PluralKit does not support member groups. Members will be imported without groups.";
if (multipleTags)
issueStr +=
"\n- PluralKit does not support per-member system tags. Since you had multiple members with distinct tags, those tags will be applied to the members' *display names*/nicknames instead.";
await _confirmFunc(issueStr);
_result.Success = true;
}
return _result;
await _confirmFunc(issueStr);
_result.Success = true;
}
private async Task<(string lastSetTag, bool multipleTags, bool hasGroup)> ImportTupper(JObject tupper, string lastSetTag)
return _result;
}
private async Task<(string lastSetTag, bool multipleTags, bool hasGroup)> ImportTupper(
JObject tupper, string lastSetTag)
{
if (!tupper.ContainsKey("name") || tupper["name"].Type == JTokenType.Null)
throw new ImportException("Field 'name' cannot be null.");
var hasGroup = tupper.ContainsKey("group_id") && tupper["group_id"].Type != JTokenType.Null;
var multipleTags = false;
var name = tupper.Value<string>("name");
var patch = new MemberPatch();
patch.Name = name;
if (tupper.ContainsKey("avatar_url") && tupper["avatar_url"].Type != JTokenType.Null)
patch.AvatarUrl = tupper.Value<string>("avatar_url").NullIfEmpty();
if (tupper.ContainsKey("brackets"))
{
if (!tupper.ContainsKey("name") || tupper["name"].Type == JTokenType.Null)
throw new ImportException("Field 'name' cannot be null.");
var hasGroup = tupper.ContainsKey("group_id") && tupper["group_id"].Type != JTokenType.Null;
var multipleTags = false;
var name = tupper.Value<string>("name");
var patch = new MemberPatch();
patch.Name = name;
if (tupper.ContainsKey("avatar_url") && tupper["avatar_url"].Type != JTokenType.Null) patch.AvatarUrl = tupper.Value<string>("avatar_url").NullIfEmpty();
if (tupper.ContainsKey("brackets"))
{
var brackets = tupper.Value<JArray>("brackets");
if (brackets.Count % 2 != 0)
throw new ImportException($"Field 'brackets' in tupper {name} is invalid.");
var tags = new List<ProxyTag>();
for (var i = 0; i < brackets.Count / 2; i++)
tags.Add(new ProxyTag((string)brackets[i * 2], (string)brackets[i * 2 + 1]));
patch.ProxyTags = tags.ToArray();
}
// todo: && if is new member
if (tupper.ContainsKey("posts")) patch.MessageCount = tupper.Value<int>("posts");
if (tupper.ContainsKey("show_brackets")) patch.KeepProxy = tupper.Value<bool>("show_brackets");
if (tupper.ContainsKey("birthday") && tupper["birthday"].Type != JTokenType.Null)
{
var parsed = DateTimeFormats.TimestampExportFormat.Parse(tupper.Value<string>("birthday"));
if (!parsed.Success)
throw new ImportException($"Field 'birthday' in tupper {name} is invalid.");
patch.Birthday = LocalDate.FromDateTime(parsed.Value.ToDateTimeUtc());
}
if (tupper.ContainsKey("description")) patch.Description = tupper.Value<string>("description");
if (tupper.ContainsKey("nick")) patch.DisplayName = tupper.Value<string>("nick");
if (tupper.ContainsKey("tag") && tupper["tag"].Type != JTokenType.Null)
{
var tag = tupper.Value<string>("tag");
if (tag != lastSetTag)
{
lastSetTag = tag;
multipleTags = true;
}
patch.DisplayName = $"{name} {tag}";
}
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in tupper {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
else if (err.Text != null)
throw new ImportException($"tupper {name}: {err.Text}");
else
throw new ImportException($"Field {err.Key} in tupper {name} is invalid.");
}
var isNewMember = false;
if (!_existingMemberNames.TryGetValue(name, out var memberId))
{
var newMember = await _repo.CreateMember(_system.Id, name, _conn);
memberId = newMember.Id;
isNewMember = true;
_result.Added++;
}
else
_result.Modified++;
_logger.Debug("Importing member with identifier {FileId} to system {System} (is creating new member? {IsCreatingNewMember})",
name, _system.Id, isNewMember);
await _repo.UpdateMember(memberId, patch, _conn);
return (lastSetTag, multipleTags, hasGroup);
var brackets = tupper.Value<JArray>("brackets");
if (brackets.Count % 2 != 0)
throw new ImportException($"Field 'brackets' in tupper {name} is invalid.");
var tags = new List<ProxyTag>();
for (var i = 0; i < brackets.Count / 2; i++)
tags.Add(new ProxyTag((string)brackets[i * 2], (string)brackets[i * 2 + 1]));
patch.ProxyTags = tags.ToArray();
}
// todo: && if is new member
if (tupper.ContainsKey("posts")) patch.MessageCount = tupper.Value<int>("posts");
if (tupper.ContainsKey("show_brackets")) patch.KeepProxy = tupper.Value<bool>("show_brackets");
if (tupper.ContainsKey("birthday") && tupper["birthday"].Type != JTokenType.Null)
{
var parsed = DateTimeFormats.TimestampExportFormat.Parse(tupper.Value<string>("birthday"));
if (!parsed.Success)
throw new ImportException($"Field 'birthday' in tupper {name} is invalid.");
patch.Birthday = LocalDate.FromDateTime(parsed.Value.ToDateTimeUtc());
}
if (tupper.ContainsKey("description")) patch.Description = tupper.Value<string>("description");
if (tupper.ContainsKey("nick")) patch.DisplayName = tupper.Value<string>("nick");
if (tupper.ContainsKey("tag") && tupper["tag"].Type != JTokenType.Null)
{
var tag = tupper.Value<string>("tag");
if (tag != lastSetTag)
{
lastSetTag = tag;
multipleTags = true;
}
patch.DisplayName = $"{name} {tag}";
}
patch.AssertIsValid();
if (patch.Errors.Count > 0)
{
var err = patch.Errors[0];
if (err is FieldTooLongError)
throw new ImportException($"Field {err.Key} in tupper {name} is too long "
+ $"({(err as FieldTooLongError).ActualLength} > {(err as FieldTooLongError).MaxLength}).");
if (err.Text != null)
throw new ImportException($"tupper {name}: {err.Text}");
throw new ImportException($"Field {err.Key} in tupper {name} is invalid.");
}
var isNewMember = false;
if (!_existingMemberNames.TryGetValue(name, out var memberId))
{
var newMember = await _repo.CreateMember(_system.Id, name, _conn);
memberId = newMember.Id;
isNewMember = true;
_result.Added++;
}
else
{
_result.Modified++;
}
_logger.Debug(
"Importing member with identifier {FileId} to system {System} (is creating new member? {IsCreatingNewMember})",
name, _system.Id, isNewMember);
await _repo.UpdateMember(memberId, patch, _conn);
return (lastSetTag, multipleTags, hasGroup);
}
}

View File

@@ -1,32 +1,34 @@
using NodaTime;
using NodaTime.Text;
namespace PluralKit.Core
namespace PluralKit.Core;
public static class DateTimeFormats
{
public static class DateTimeFormats
public static IPattern<Instant> TimestampExportFormat = InstantPattern.ExtendedIso;
public static IPattern<LocalDate> DateExportFormat = LocalDatePattern.CreateWithInvariantCulture("yyyy-MM-dd");
// We create a composite pattern that only shows the two most significant things
// eg. if we have something with nonzero day component, we show <x>d <x>h, but if it's
// a smaller duration we may only bother with showing <x>h <x>m or <x>m <x>s
public static IPattern<Duration> DurationFormat = new CompositePatternBuilder<Duration>
{
public static IPattern<Instant> TimestampExportFormat = InstantPattern.ExtendedIso;
public static IPattern<LocalDate> DateExportFormat = LocalDatePattern.CreateWithInvariantCulture("yyyy-MM-dd");
{DurationPattern.CreateWithInvariantCulture("s's'"), d => true},
{DurationPattern.CreateWithInvariantCulture("m'm' s's'"), d => d.Minutes > 0},
{DurationPattern.CreateWithInvariantCulture("H'h' m'm'"), d => d.Hours > 0},
{DurationPattern.CreateWithInvariantCulture("D'd' h'h'"), d => d.Days > 0}
}.Build();
// We create a composite pattern that only shows the two most significant things
// eg. if we have something with nonzero day component, we show <x>d <x>h, but if it's
// a smaller duration we may only bother with showing <x>h <x>m or <x>m <x>s
public static IPattern<Duration> DurationFormat = new CompositePatternBuilder<Duration>
{
{DurationPattern.CreateWithInvariantCulture("s's'"), d => true},
{DurationPattern.CreateWithInvariantCulture("m'm' s's'"), d => d.Minutes > 0},
{DurationPattern.CreateWithInvariantCulture("H'h' m'm'"), d => d.Hours > 0},
{DurationPattern.CreateWithInvariantCulture("D'd' h'h'"), d => d.Days > 0}
}.Build();
public static IPattern<LocalDateTime> LocalDateTimeFormat =
LocalDateTimePattern.CreateWithInvariantCulture("yyyy-MM-dd HH:mm:ss");
public static IPattern<LocalDateTime> LocalDateTimeFormat = LocalDateTimePattern.CreateWithInvariantCulture("yyyy-MM-dd HH:mm:ss");
public static IPattern<ZonedDateTime> ZonedDateTimeFormat = ZonedDateTimePattern.CreateWithInvariantCulture("yyyy-MM-dd HH:mm:ss x", DateTimeZoneProviders.Tzdb);
public static IPattern<ZonedDateTime> ZonedDateTimeFormat =
ZonedDateTimePattern.CreateWithInvariantCulture("yyyy-MM-dd HH:mm:ss x", DateTimeZoneProviders.Tzdb);
public static string FormatExport(this Instant instant) => TimestampExportFormat.Format(instant);
public static string FormatExport(this LocalDate date) => DateExportFormat.Format(date);
public static string FormatZoned(this ZonedDateTime zdt) => ZonedDateTimeFormat.Format(zdt);
public static string FormatZoned(this Instant i, DateTimeZone zone) => i.InZone(zone).FormatZoned();
public static string FormatZoned(this Instant i, PKSystem sys) => i.FormatZoned(sys.Zone);
public static string FormatDuration(this Duration d) => DurationFormat.Format(d);
}
public static string FormatExport(this Instant instant) => TimestampExportFormat.Format(instant);
public static string FormatExport(this LocalDate date) => DateExportFormat.Format(date);
public static string FormatZoned(this ZonedDateTime zdt) => ZonedDateTimeFormat.Format(zdt);
public static string FormatZoned(this Instant i, DateTimeZone zone) => i.InZone(zone).FormatZoned();
public static string FormatZoned(this Instant i, PKSystem sys) => i.FormatZoned(sys.Zone);
public static string FormatDuration(this Duration d) => DurationFormat.Format(d);
}

View File

@@ -1,174 +1,168 @@
using System.Linq;
using System.Text.RegularExpressions;
using NodaTime;
using NodaTime.Text;
namespace PluralKit.Core
namespace PluralKit.Core;
public class DateUtils
{
public class DateUtils
public static Duration? ParsePeriod(string str)
{
public static Duration? ParsePeriod(string str)
var d = Duration.Zero;
foreach (Match match in Regex.Matches(str, "(\\d{1,6})(\\w)"))
{
Duration d = Duration.Zero;
var amount = int.Parse(match.Groups[1].Value);
var type = match.Groups[2].Value.ToLowerInvariant();
foreach (Match match in Regex.Matches(str, "(\\d{1,6})(\\w)"))
{
var amount = int.Parse(match.Groups[1].Value);
var type = match.Groups[2].Value.ToLowerInvariant();
if (type == "w") d += Duration.FromDays(7) * amount;
else if (type == "d") d += Duration.FromDays(1) * amount;
else if (type == "h") d += Duration.FromHours(1) * amount;
else if (type == "m") d += Duration.FromMinutes(1) * amount;
else if (type == "s") d += Duration.FromSeconds(1) * amount;
else return null;
}
if (d == Duration.Zero) return null;
return d;
if (type == "w") d += Duration.FromDays(7) * amount;
else if (type == "d") d += Duration.FromDays(1) * amount;
else if (type == "h") d += Duration.FromHours(1) * amount;
else if (type == "m") d += Duration.FromMinutes(1) * amount;
else if (type == "s") d += Duration.FromSeconds(1) * amount;
else return null;
}
public static LocalDate? ParseDate(string str, bool allowNullYear = false)
if (d == Duration.Zero) return null;
return d;
}
public static LocalDate? ParseDate(string str, bool allowNullYear = false)
{
// NodaTime can't parse constructs like "1st" and "2nd" so we quietly replace those away
// Gotta make sure to do the regex otherwise we'll catch things like the "st" in "August" too
str = Regex.Replace(str, "(\\d+)(st|nd|rd|th)", "$1");
var patterns = new[]
{
// NodaTime can't parse constructs like "1st" and "2nd" so we quietly replace those away
// Gotta make sure to do the regex otherwise we'll catch things like the "st" in "August" too
str = Regex.Replace(str, "(\\d+)(st|nd|rd|th)", "$1");
"MMM d yyyy", // Jan 1 2019
"MMM d, yyyy", // Jan 1, 2019
"MMMM d yyyy", // January 1 2019
"MMMM d, yyyy", // January 1, 2019
"yyyy-MM-dd", // 2019-01-01
"yyyy MM dd", // 2019 01 01
"yyyy/MM/dd" // 2019/01/01
}.ToList();
var patterns = new[]
if (allowNullYear)
patterns.AddRange(new[]
{
"MMM d yyyy", // Jan 1 2019
"MMM d, yyyy", // Jan 1, 2019
"MMMM d yyyy", // January 1 2019
"MMMM d, yyyy", // January 1, 2019
"yyyy-MM-dd", // 2019-01-01
"yyyy MM dd", // 2019 01 01
"yyyy/MM/dd" // 2019/01/01
}.ToList();
if (allowNullYear) patterns.AddRange(new[]
{
"MMM d", // Jan 1
"MMMM d", // January 1
"MM-dd", // 01-01
"MM dd", // 01 01
"MM/dd" // 01/01
"MMM d", // Jan 1
"MMMM d", // January 1
"MM-dd", // 01-01
"MM dd", // 01 01
"MM/dd" // 01/01
});
// Giving a template value so year will be parsed as 0004 if not present
// This means we can later disambiguate whether a null year was given
// We use the basis year 0004 (rather than, say, 0001) because 0004 is a leap year in the Gregorian calendar
// which means the date "Feb 29, 0004" is a valid date. 0001 is still accepted as a null year for legacy reasons.
// TODO: should we be using invariant culture here?
foreach (var pattern in patterns.Select(p => LocalDatePattern.CreateWithInvariantCulture(p).WithTemplateValue(new LocalDate(0004, 1, 1))))
{
var result = pattern.Parse(str);
if (result.Success) return result.Value;
}
return null;
}
public static ZonedDateTime? ParseDateTime(string str, bool nudgeToPast = false, DateTimeZone zone = null)
// Giving a template value so year will be parsed as 0004 if not present
// This means we can later disambiguate whether a null year was given
// We use the basis year 0004 (rather than, say, 0001) because 0004 is a leap year in the Gregorian calendar
// which means the date "Feb 29, 0004" is a valid date. 0001 is still accepted as a null year for legacy reasons.
// TODO: should we be using invariant culture here?
foreach (var pattern in patterns.Select(p =>
LocalDatePattern.CreateWithInvariantCulture(p).WithTemplateValue(new LocalDate(0004, 1, 1))))
{
if (zone == null) zone = DateTimeZone.Utc;
// Find the current timestamp in the given zone, find the (naive) midnight timestamp, then put that into the same zone (and make it naive again)
// Should yield a <current *local @ zone* date> 12:00:00 AM.
var now = SystemClock.Instance.GetCurrentInstant().InZone(zone).LocalDateTime;
var midnight = now.Date.AtMidnight();
// First we try to parse the string as a relative time using the period parser
var relResult = ParsePeriod(str);
if (relResult != null)
{
// if we can, we just subtract that amount from the
return now.InZoneLeniently(zone).Minus(relResult.Value);
}
var timePatterns = new[]
{
"H:mm", // 4:30
"HH:mm", // 23:30
"H:mm:ss", // 4:30:29
"HH:mm:ss", // 23:30:29
"h tt", // 2 PM
"htt", // 2PM
"h:mm tt", // 4:30 PM
"h:mmtt", // 4:30PM
"h:mm:ss tt", // 4:30:29 PM
"h:mm:sstt", // 4:30:29PM
"hh:mm tt", // 11:30 PM
"hh:mmtt", // 11:30PM
"hh:mm:ss tt", // 11:30:29 PM
"hh:mm:sstt" // 11:30:29PM
};
var datePatterns = new[]
{
"MMM d yyyy", // Jan 1 2019
"MMM d, yyyy", // Jan 1, 2019
"MMMM d yyyy", // January 1 2019
"MMMM d, yyyy", // January 1, 2019
"yyyy-MM-dd", // 2019-01-01
"yyyy MM dd", // 2019 01 01
"yyyy/MM/dd", // 2019/01/01
"MMM d", // Jan 1
"MMMM d", // January 1
"MM-dd", // 01-01
"MM dd", // 01 01
"MM/dd" // 01-01
};
// First, we try all the timestamps that only have a time
foreach (var timePattern in timePatterns)
{
var pat = LocalDateTimePattern.CreateWithInvariantCulture(timePattern).WithTemplateValue(midnight);
var result = pat.Parse(str);
if (result.Success)
{
// If we have a successful match and we need a time in the past, we try to shove a future-time a date before
// Example: "4:30 pm" at 3:30 pm likely refers to 4:30 pm the previous day
var val = result.Value;
// If we need to nudge, we just subtract a day. This only occurs when we're parsing specifically *just time*, so
// we know we won't nudge it by more than a day since we use today's midnight timestamp as a date template.
// Since this is a naive datetime, this ensures we're actually moving by one calendar day even if
// DST changes occur, since they'll be resolved later wrt. the right side of the boundary
if (val > now && nudgeToPast) val = val.PlusDays(-1);
return val.InZoneLeniently(zone);
}
}
// Then we try specific date+time combinations, both date first and time first, with and without commas
foreach (var timePattern in timePatterns)
{
foreach (var datePattern in datePatterns)
{
foreach (var patternStr in new[]
{
$"{timePattern}, {datePattern}", $"{datePattern}, {timePattern}",
$"{timePattern} {datePattern}", $"{datePattern} {timePattern}"
})
{
var pattern = LocalDateTimePattern.CreateWithInvariantCulture(patternStr).WithTemplateValue(midnight);
var res = pattern.Parse(str);
if (res.Success) return res.Value.InZoneLeniently(zone);
}
}
}
// Finally, just date patterns, still using midnight as the template
foreach (var datePattern in datePatterns)
{
var pat = LocalDateTimePattern.CreateWithInvariantCulture(datePattern).WithTemplateValue(midnight);
var res = pat.Parse(str);
if (res.Success) return res.Value.InZoneLeniently(zone);
}
// Still haven't parsed something, we just give up lmao
return null;
var result = pattern.Parse(str);
if (result.Success) return result.Value;
}
return null;
}
public static ZonedDateTime? ParseDateTime(string str, bool nudgeToPast = false, DateTimeZone zone = null)
{
if (zone == null) zone = DateTimeZone.Utc;
// Find the current timestamp in the given zone, find the (naive) midnight timestamp, then put that into the same zone (and make it naive again)
// Should yield a <current *local @ zone* date> 12:00:00 AM.
var now = SystemClock.Instance.GetCurrentInstant().InZone(zone).LocalDateTime;
var midnight = now.Date.AtMidnight();
// First we try to parse the string as a relative time using the period parser
var relResult = ParsePeriod(str);
if (relResult != null)
// if we can, we just subtract that amount from the
return now.InZoneLeniently(zone).Minus(relResult.Value);
var timePatterns = new[]
{
"H:mm", // 4:30
"HH:mm", // 23:30
"H:mm:ss", // 4:30:29
"HH:mm:ss", // 23:30:29
"h tt", // 2 PM
"htt", // 2PM
"h:mm tt", // 4:30 PM
"h:mmtt", // 4:30PM
"h:mm:ss tt", // 4:30:29 PM
"h:mm:sstt", // 4:30:29PM
"hh:mm tt", // 11:30 PM
"hh:mmtt", // 11:30PM
"hh:mm:ss tt", // 11:30:29 PM
"hh:mm:sstt" // 11:30:29PM
};
var datePatterns = new[]
{
"MMM d yyyy", // Jan 1 2019
"MMM d, yyyy", // Jan 1, 2019
"MMMM d yyyy", // January 1 2019
"MMMM d, yyyy", // January 1, 2019
"yyyy-MM-dd", // 2019-01-01
"yyyy MM dd", // 2019 01 01
"yyyy/MM/dd", // 2019/01/01
"MMM d", // Jan 1
"MMMM d", // January 1
"MM-dd", // 01-01
"MM dd", // 01 01
"MM/dd" // 01-01
};
// First, we try all the timestamps that only have a time
foreach (var timePattern in timePatterns)
{
var pat = LocalDateTimePattern.CreateWithInvariantCulture(timePattern).WithTemplateValue(midnight);
var result = pat.Parse(str);
if (result.Success)
{
// If we have a successful match and we need a time in the past, we try to shove a future-time a date before
// Example: "4:30 pm" at 3:30 pm likely refers to 4:30 pm the previous day
var val = result.Value;
// If we need to nudge, we just subtract a day. This only occurs when we're parsing specifically *just time*, so
// we know we won't nudge it by more than a day since we use today's midnight timestamp as a date template.
// Since this is a naive datetime, this ensures we're actually moving by one calendar day even if
// DST changes occur, since they'll be resolved later wrt. the right side of the boundary
if (val > now && nudgeToPast) val = val.PlusDays(-1);
return val.InZoneLeniently(zone);
}
}
// Then we try specific date+time combinations, both date first and time first, with and without commas
foreach (var timePattern in timePatterns)
foreach (var datePattern in datePatterns)
foreach (var patternStr in new[]
{
$"{timePattern}, {datePattern}", $"{datePattern}, {timePattern}",
$"{timePattern} {datePattern}", $"{datePattern} {timePattern}"
})
{
var pattern = LocalDateTimePattern.CreateWithInvariantCulture(patternStr).WithTemplateValue(midnight);
var res = pattern.Parse(str);
if (res.Success) return res.Value.InZoneLeniently(zone);
}
// Finally, just date patterns, still using midnight as the template
foreach (var datePattern in datePatterns)
{
var pat = LocalDateTimePattern.CreateWithInvariantCulture(datePattern).WithTemplateValue(midnight);
var res = pat.Parse(str);
if (res.Success) return res.Value.InZoneLeniently(zone);
}
// Still haven't parsed something, we just give up lmao
return null;
}
}

View File

@@ -1,15 +1,14 @@
namespace PluralKit.Core
namespace PluralKit.Core;
public static class Emojis
{
public static class Emojis
{
public static readonly string Warn = "\u26A0";
public static readonly string Success = "\u2705";
public static readonly string Error = "\u274C";
public static readonly string Note = "\U0001f4dd";
public static readonly string ThumbsUp = "\U0001f44d";
public static readonly string RedQuestion = "\u2753";
public static readonly string Bell = "\U0001F514";
public static readonly string Image = "\U0001F5BC";
public static readonly string Paperclip = "\U0001F4CE";
}
public static readonly string Warn = "\u26A0";
public static readonly string Success = "\u2705";
public static readonly string Error = "\u274C";
public static readonly string Note = "\U0001f4dd";
public static readonly string ThumbsUp = "\U0001f44d";
public static readonly string RedQuestion = "\u2753";
public static readonly string Bell = "\U0001F514";
public static readonly string Image = "\U0001F5BC";
public static readonly string Paperclip = "\U0001F4CE";
}

View File

@@ -1,78 +1,75 @@
using System;
using System.Collections.Concurrent;
using System.Threading;
using System.Threading.Tasks;
using NodaTime;
namespace PluralKit.Core
namespace PluralKit.Core;
public class HandlerQueue<T>
{
public class HandlerQueue<T>
private readonly ConcurrentDictionary<long, HandlerEntry> _handlers = new();
private long _seq;
public async Task<T> WaitFor(Func<T, bool> predicate, Duration? timeout = null, CancellationToken ct = default)
{
private long _seq;
private readonly ConcurrentDictionary<long, HandlerEntry> _handlers = new();
var timeoutTask = Task.Delay(timeout?.ToTimeSpan() ?? TimeSpan.FromMilliseconds(-1), ct);
var tcs = new TaskCompletionSource<T>();
public async Task<T> WaitFor(Func<T, bool> predicate, Duration? timeout = null, CancellationToken ct = default)
ValueTask Handler(T e)
{
var timeoutTask = Task.Delay(timeout?.ToTimeSpan() ?? TimeSpan.FromMilliseconds(-1), ct);
var tcs = new TaskCompletionSource<T>();
ValueTask Handler(T e)
{
tcs.SetResult(e);
return default;
}
var entry = new HandlerEntry { Predicate = predicate, Handler = Handler };
_handlers[Interlocked.Increment(ref _seq)] = entry;
// Wait for either the event task or the timeout task
// If the timeout task finishes first, raise, otherwise pass event through
try
{
var theTask = await Task.WhenAny(timeoutTask, tcs.Task);
if (theTask == timeoutTask)
throw new TimeoutException();
}
finally
{
entry.Remove();
}
return await tcs.Task;
tcs.SetResult(e);
return default;
}
public async Task<bool> TryHandle(T evt)
{
// First pass to clean up dead handlers
foreach (var (k, entry) in _handlers)
if (!entry.Alive)
_handlers.TryRemove(k, out _);
var entry = new HandlerEntry { Predicate = predicate, Handler = Handler };
_handlers[Interlocked.Increment(ref _seq)] = entry;
// Now iterate and try handling until we find a good one
var now = SystemClock.Instance.GetCurrentInstant();
foreach (var (_, entry) in _handlers)
// Wait for either the event task or the timeout task
// If the timeout task finishes first, raise, otherwise pass event through
try
{
var theTask = await Task.WhenAny(timeoutTask, tcs.Task);
if (theTask == timeoutTask)
throw new TimeoutException();
}
finally
{
entry.Remove();
}
return await tcs.Task;
}
public async Task<bool> TryHandle(T evt)
{
// First pass to clean up dead handlers
foreach (var (k, entry) in _handlers)
if (!entry.Alive)
_handlers.TryRemove(k, out _);
// Now iterate and try handling until we find a good one
var now = SystemClock.Instance.GetCurrentInstant();
foreach (var (_, entry) in _handlers)
if (entry.Expiry < now)
{
if (entry.Expiry < now) entry.Alive = false;
else if (entry.Alive && entry.Predicate(evt))
{
await entry.Handler(evt);
entry.Alive = false;
return true;
}
entry.Alive = false;
}
else if (entry.Alive && entry.Predicate(evt))
{
await entry.Handler(evt);
entry.Alive = false;
return true;
}
return false;
}
return false;
}
public class HandlerEntry
{
internal Func<T, ValueTask> Handler;
internal Func<T, bool> Predicate;
internal bool Alive = true;
internal Instant Expiry = SystemClock.Instance.GetCurrentInstant() + Duration.FromMinutes(30);
public class HandlerEntry
{
internal bool Alive = true;
internal Instant Expiry = SystemClock.Instance.GetCurrentInstant() + Duration.FromMinutes(30);
internal Func<T, ValueTask> Handler;
internal Func<T, bool> Predicate;
public void Remove() => Alive = false;
}
public void Remove() => Alive = false;
}
}

View File

@@ -1,5 +1,3 @@
using System.IO;
using Microsoft.Extensions.Configuration;
using Newtonsoft.Json;
@@ -7,27 +5,27 @@ using Newtonsoft.Json;
using NodaTime;
using NodaTime.Serialization.JsonNet;
namespace PluralKit.Core
namespace PluralKit.Core;
public static class InitUtils
{
public static class InitUtils
public static void InitStatic()
{
public static void InitStatic()
{
Database.InitStatic();
}
Database.InitStatic();
}
public static IConfigurationBuilder BuildConfiguration(string[] args) => new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("pluralkit.conf", true)
.AddEnvironmentVariables()
.AddCommandLine(args);
public static IConfigurationBuilder BuildConfiguration(string[] args) => new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("pluralkit.conf", true)
.AddEnvironmentVariables()
.AddCommandLine(args);
public static JsonSerializerSettings BuildSerializerSettings() => new JsonSerializerSettings().BuildSerializerSettings();
public static JsonSerializerSettings BuildSerializerSettings() =>
new JsonSerializerSettings().BuildSerializerSettings();
public static JsonSerializerSettings BuildSerializerSettings(this JsonSerializerSettings settings)
{
settings.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
return settings;
}
public static JsonSerializerSettings BuildSerializerSettings(this JsonSerializerSettings settings)
{
settings.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
return settings;
}
}

View File

@@ -1,22 +1,24 @@
namespace PluralKit.Core
{
public static class Limits
{
public static readonly int MaxProxyNameLength = 80;
namespace PluralKit.Core;
public static readonly int MaxSystemNameLength = 100;
public static readonly int MaxSystemTagLength = MaxProxyNameLength - 1;
public static readonly int MaxMemberCount = 1000;
public static readonly int MaxGroupCount = 250;
public static int WarnThreshold(int limit) => limit - 50;
public static readonly int MaxDescriptionLength = 1000;
public static readonly int MaxProxyTagLength = 100;
public static readonly int MaxSwitchMemberCount = 150;
public static readonly int MaxMemberNameLength = 100; // Fair bit larger than MaxProxyNameLength for bookkeeping
public static readonly int MaxGroupNameLength = 100;
public static readonly int MaxPronounsLength = 100;
public static readonly int MaxUriLength = 256; // May need to be set higher, I know there are URLs longer than this in prod (they can rehost, I guess...)
public static readonly long AvatarFileSizeLimit = 1024 * 1024;
public static readonly int AvatarDimensionLimit = 1000;
}
public static class Limits
{
public static readonly int MaxProxyNameLength = 80;
public static readonly int MaxSystemNameLength = 100;
public static readonly int MaxSystemTagLength = MaxProxyNameLength - 1;
public static readonly int MaxMemberCount = 1000;
public static readonly int MaxGroupCount = 250;
public static int WarnThreshold(int limit) => limit - 50;
public static readonly int MaxDescriptionLength = 1000;
public static readonly int MaxProxyTagLength = 100;
public static readonly int MaxSwitchMemberCount = 150;
public static readonly int MaxMemberNameLength = 100; // Fair bit larger than MaxProxyNameLength for bookkeeping
public static readonly int MaxGroupNameLength = 100;
public static readonly int MaxPronounsLength = 100;
// May need to be set higher, I know there are URLs longer than this in prod (they can rehost, I guess...)
public static readonly int MaxUriLength = 256;
public static readonly long AvatarFileSizeLimit = 1024 * 1024;
public static readonly int AvatarDimensionLimit = 1000;
}

View File

@@ -1,33 +1,33 @@
using System;
using System.Text.RegularExpressions;
namespace PluralKit.Core
namespace PluralKit.Core;
public static class MiscUtils
{
public static class MiscUtils
// discord mediaproxy URLs used to be stored directly in the database, so now we cleanup image urls before using them outside of proxying
private static readonly Regex MediaProxyUrl =
new(
@"^https?://media.discordapp.net/attachments/(\d{17,19})/(\d{17,19})/([^/\\&\?]+)\.(png|jpg|jpeg|webp)(\?.*)?$");
private static readonly string DiscordCdnReplacement = "https://cdn.discordapp.com/attachments/$1/$2/$3.$4";
public static bool TryMatchUri(string input, out Uri uri)
{
public static bool TryMatchUri(string input, out Uri uri)
try
{
try
{
uri = new Uri(input);
if (!uri.IsAbsoluteUri || (uri.Scheme != "http" && uri.Scheme != "https"))
return false;
}
catch (UriFormatException)
{
uri = null;
uri = new Uri(input);
if (!uri.IsAbsoluteUri || uri.Scheme != "http" && uri.Scheme != "https")
return false;
}
return true;
}
// discord mediaproxy URLs used to be stored directly in the database, so now we cleanup image urls before using them outside of proxying
private static readonly Regex MediaProxyUrl = new Regex(@"^https?://media.discordapp.net/attachments/(\d{17,19})/(\d{17,19})/([^/\\&\?]+)\.(png|jpg|jpeg|webp)(\?.*)?$");
private static readonly string DiscordCdnReplacement = "https://cdn.discordapp.com/attachments/$1/$2/$3.$4";
public static string? TryGetCleanCdnUrl(this string? url)
catch (UriFormatException)
{
return url == null ? null : MediaProxyUrl.Replace(url, DiscordCdnReplacement);
uri = null;
return false;
}
return true;
}
public static string? TryGetCleanCdnUrl(this string? url) =>
url == null ? null : MediaProxyUrl.Replace(url, DiscordCdnReplacement);
}

View File

@@ -1,93 +1,88 @@
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
namespace PluralKit.Core
namespace PluralKit.Core;
public static class StringUtils
{
public static class StringUtils
public static string GenerateToken()
{
public static string GenerateToken()
// Results in a 64-byte Base64 string (no padding)
var buf = RandomNumberGenerator.GetBytes(48);
return Convert.ToBase64String(buf);
}
public static bool IsLongerThan(this string str, int length)
{
if (str != null) return str.Length > length;
return false;
}
public static string ExtractCountryFlag(string flag)
{
if (flag.Length != 4) return null;
try
{
var buf = new byte[48]; // Results in a 64-byte Base64 string (no padding)
new RNGCryptoServiceProvider().GetBytes(buf);
return Convert.ToBase64String(buf);
var cp1 = char.ConvertToUtf32(flag, 0);
var cp2 = char.ConvertToUtf32(flag, 2);
if (cp1 < 0x1F1E6 || cp1 > 0x1F1FF) return null;
if (cp2 < 0x1F1E6 || cp2 > 0x1F1FF) return null;
return $"{(char)(cp1 - 0x1F1E6 + 'A')}{(char)(cp2 - 0x1F1E6 + 'A')}";
}
public static bool IsLongerThan(this string str, int length)
catch (ArgumentException)
{
if (str != null) return str.Length > length;
return false;
}
public static string ExtractCountryFlag(string flag)
{
if (flag.Length != 4) return null;
try
{
var cp1 = char.ConvertToUtf32(flag, 0);
var cp2 = char.ConvertToUtf32(flag, 2);
if (cp1 < 0x1F1E6 || cp1 > 0x1F1FF) return null;
if (cp2 < 0x1F1E6 || cp2 > 0x1F1FF) return null;
return $"{(char)(cp1 - 0x1F1E6 + 'A')}{(char)(cp2 - 0x1F1E6 + 'A')}";
}
catch (ArgumentException)
{
return null;
}
}
public static string NullIfEmpty(this string input)
{
if (input == null) return null;
if (input.Trim().Length == 0) return null;
return input;
}
public static bool EmptyOrNull(this string input)
{
if (input == null) return true;
if (input.Trim().Length == 0) return true;
return false;
}
public static string NormalizeLineEndSpacing(this string input)
{
// iOS has a weird issue on embeds rendering newlines when there are spaces *just before* it
// so we remove 'em all :)
return Regex.Replace(input, " *\n", "\n");
}
public static IReadOnlyList<string> JoinPages(IEnumerable<string> input, int characterLimit) =>
JoinPages(input, _ => characterLimit);
public static IReadOnlyList<string> JoinPages(IEnumerable<string> input, Func<int, int> characterLimitByPage)
{
var output = new List<string>();
var buf = new StringBuilder();
foreach (var s in input)
{
var limit = characterLimitByPage.Invoke(output.Count);
// Would adding this string put us over the limit?
// (note: don't roll over if the buffer's already empty; this means an individual section is above the character limit. todo: truncate, then?)
if (buf.Length > 0 && buf.Length + s.Length > limit)
{
// If so, "roll over" (before adding the string to the buffer)
output.Add(buf.ToString());
buf.Clear();
}
buf.Append(s);
}
// We most likely have something left over, so add that in too
if (buf.Length > 0)
output.Add(buf.ToString());
return output;
return null;
}
}
public static string NullIfEmpty(this string input)
{
if (input == null) return null;
if (input.Trim().Length == 0) return null;
return input;
}
public static bool EmptyOrNull(this string input)
{
if (input == null) return true;
if (input.Trim().Length == 0) return true;
return false;
}
public static string NormalizeLineEndSpacing(this string input) =>
// iOS has a weird issue on embeds rendering newlines when there are spaces *just before* it
// so we remove 'em all :)
Regex.Replace(input, " *\n", "\n");
public static IReadOnlyList<string> JoinPages(IEnumerable<string> input, int characterLimit) =>
JoinPages(input, _ => characterLimit);
public static IReadOnlyList<string> JoinPages(IEnumerable<string> input, Func<int, int> characterLimitByPage)
{
var output = new List<string>();
var buf = new StringBuilder();
foreach (var s in input)
{
var limit = characterLimitByPage.Invoke(output.Count);
// Would adding this string put us over the limit?
// (note: don't roll over if the buffer's already empty; this means an individual section is above the character limit. todo: truncate, then?)
if (buf.Length > 0 && buf.Length + s.Length > limit)
{
// If so, "roll over" (before adding the string to the buffer)
output.Add(buf.ToString());
buf.Clear();
}
buf.Append(s);
}
// We most likely have something left over, so add that in too
if (buf.Length > 0)
output.Add(buf.ToString());
return output;
}
}