initttt
This commit is contained in:
7
packages/logic/core/array.utils.ts
Normal file
7
packages/logic/core/array.utils.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function chunk<T>(arr: T[], size: number): T[][] {
|
||||
const result = [];
|
||||
for (let i = 0; i < arr.length; i += size) {
|
||||
result.push(arr.slice(i, i + size));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
264
packages/logic/core/data/countries.ts
Normal file
264
packages/logic/core/data/countries.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
export const COUNTRIES = [
|
||||
{ id: "1", name: "Afghanistan", code: "AF" },
|
||||
{ id: "2", name: "Albania", code: "AL" },
|
||||
{ id: "3", name: "Algeria", code: "DZ" },
|
||||
{ id: "4", name: "American Samoa", code: "AS" },
|
||||
{ id: "5", name: "Andorra", code: "AD" },
|
||||
{ id: "6", name: "Angola", code: "AO" },
|
||||
{ id: "7", name: "Anguilla", code: "AI" },
|
||||
{ id: "8", name: "Antarctica", code: "AQ" },
|
||||
{ id: "9", name: "Antigua and Barbuda", code: "AG" },
|
||||
{ id: "10", name: "Argentina", code: "AR" },
|
||||
{ id: "11", name: "Armenia", code: "AM" },
|
||||
{ id: "12", name: "Aruba", code: "AW" },
|
||||
{ id: "13", name: "Australia", code: "AU" },
|
||||
{ id: "14", name: "Austria", code: "AT" },
|
||||
{ id: "15", name: "Azerbaijan", code: "AZ" },
|
||||
{ id: "16", name: "Bahamas", code: "BS" },
|
||||
{ id: "17", name: "Bahrain", code: "BH" },
|
||||
{ id: "18", name: "Bangladesh", code: "BD" },
|
||||
{ id: "19", name: "Barbados", code: "BB" },
|
||||
{ id: "20", name: "Belarus", code: "BY" },
|
||||
{ id: "21", name: "Belgium", code: "BE" },
|
||||
{ id: "22", name: "Belize", code: "BZ" },
|
||||
{ id: "23", name: "Benin", code: "BJ" },
|
||||
{ id: "24", name: "Bermuda", code: "BM" },
|
||||
{ id: "25", name: "Bhutan", code: "BT" },
|
||||
{ id: "26", name: "Bolivia", code: "BO" },
|
||||
{ id: "27", name: "Bosnia and Herzegovina", code: "BA" },
|
||||
{ id: "28", name: "Botswana", code: "BW" },
|
||||
{ id: "29", name: "Bouvet Island", code: "BV" },
|
||||
{ id: "30", name: "Brazil", code: "BR" },
|
||||
{ id: "31", name: "British Indian Ocean Territory", code: "IO" },
|
||||
{ id: "32", name: "British Virgin Islands", code: "VG" },
|
||||
{ id: "33", name: "Brunei", code: "BN" },
|
||||
{ id: "34", name: "Bulgaria", code: "BG" },
|
||||
{ id: "35", name: "Burkina Faso", code: "BF" },
|
||||
{ id: "36", name: "Burundi", code: "BI" },
|
||||
{ id: "37", name: "Cambodia", code: "KH" },
|
||||
{ id: "38", name: "Cameroon", code: "CM" },
|
||||
{ id: "39", name: "Canada", code: "CA" },
|
||||
{ id: "40", name: "Cape Verde", code: "CV" },
|
||||
{ id: "41", name: "Caribbean Netherlands", code: "BQ" },
|
||||
{ id: "42", name: "Cayman Islands", code: "KY" },
|
||||
{ id: "43", name: "Central African Republic", code: "CF" },
|
||||
{ id: "44", name: "Chad", code: "TD" },
|
||||
{ id: "45", name: "Chile", code: "CL" },
|
||||
{ id: "46", name: "China", code: "CN" },
|
||||
{ id: "47", name: "Christmas Island", code: "CX" },
|
||||
{ id: "48", name: "Cocos (Keeling) Islands", code: "CC" },
|
||||
{ id: "49", name: "Colombia", code: "CO" },
|
||||
{ id: "50", name: "Comoros", code: "KM" },
|
||||
{ id: "51", name: "Cook Islands", code: "CK" },
|
||||
{ id: "52", name: "Costa Rica", code: "CR" },
|
||||
{ id: "53", name: "Croatia", code: "HR" },
|
||||
{ id: "54", name: "Cuba", code: "CU" },
|
||||
{ id: "55", name: "Curaçao", code: "CW" },
|
||||
{ id: "56", name: "Cyprus", code: "CY" },
|
||||
{ id: "57", name: "Czechia", code: "CZ" },
|
||||
{ id: "58", name: "DR Congo", code: "CD" },
|
||||
{ id: "59", name: "Denmark", code: "DK" },
|
||||
{ id: "60", name: "Djibouti", code: "DJ" },
|
||||
{ id: "61", name: "Dominica", code: "DM" },
|
||||
{ id: "62", name: "Dominican Republic", code: "DO" },
|
||||
{ id: "63", name: "Ecuador", code: "EC" },
|
||||
{ id: "64", name: "Egypt", code: "EG" },
|
||||
{ id: "65", name: "El Salvador", code: "SV" },
|
||||
{ id: "66", name: "Equatorial Guinea", code: "GQ" },
|
||||
{ id: "67", name: "Eritrea", code: "ER" },
|
||||
{ id: "68", name: "Estonia", code: "EE" },
|
||||
{ id: "69", name: "Eswatini", code: "SZ" },
|
||||
{ id: "70", name: "Ethiopia", code: "ET" },
|
||||
{ id: "71", name: "Falkland Islands", code: "FK" },
|
||||
{ id: "72", name: "Faroe Islands", code: "FO" },
|
||||
{ id: "73", name: "Fiji", code: "FJ" },
|
||||
{ id: "74", name: "Finland", code: "FI" },
|
||||
{ id: "75", name: "France", code: "FR" },
|
||||
{ id: "76", name: "French Guiana", code: "GF" },
|
||||
{ id: "77", name: "French Polynesia", code: "PF" },
|
||||
{ id: "78", name: "French Southern and Antarctic Lands", code: "TF" },
|
||||
{ id: "79", name: "Gabon", code: "GA" },
|
||||
{ id: "80", name: "Gambia", code: "GM" },
|
||||
{ id: "81", name: "Georgia", code: "GE" },
|
||||
{ id: "82", name: "Germany", code: "DE" },
|
||||
{ id: "83", name: "Ghana", code: "GH" },
|
||||
{ id: "84", name: "Gibraltar", code: "GI" },
|
||||
{ id: "85", name: "Greece", code: "GR" },
|
||||
{ id: "86", name: "Greenland", code: "GL" },
|
||||
{ id: "87", name: "Grenada", code: "GD" },
|
||||
{ id: "88", name: "Guadeloupe", code: "GP" },
|
||||
{ id: "89", name: "Guam", code: "GU" },
|
||||
{ id: "90", name: "Guatemala", code: "GT" },
|
||||
{ id: "91", name: "Guernsey", code: "GG" },
|
||||
{ id: "92", name: "Guinea", code: "GN" },
|
||||
{ id: "93", name: "Guinea-Bissau", code: "GW" },
|
||||
{ id: "94", name: "Guyana", code: "GY" },
|
||||
{ id: "95", name: "Haiti", code: "HT" },
|
||||
{ id: "96", name: "Heard Island and McDonald Islands", code: "HM" },
|
||||
{ id: "97", name: "Honduras", code: "HN" },
|
||||
{ id: "98", name: "Hong Kong", code: "HK" },
|
||||
{ id: "99", name: "Hungary", code: "HU" },
|
||||
{ id: "100", name: "Iceland", code: "IS" },
|
||||
{ id: "101", name: "India", code: "IN" },
|
||||
{ id: "102", name: "Indonesia", code: "ID" },
|
||||
{ id: "103", name: "Iran", code: "IR" },
|
||||
{ id: "104", name: "Iraq", code: "IQ" },
|
||||
{ id: "105", name: "Ireland", code: "IE" },
|
||||
{ id: "106", name: "Isle of Man", code: "IM" },
|
||||
{ id: "107", name: "Israel", code: "IL" },
|
||||
{ id: "108", name: "Italy", code: "IT" },
|
||||
{ id: "109", name: "Ivory Coast", code: "CI" },
|
||||
{ id: "110", name: "Jamaica", code: "JM" },
|
||||
{ id: "111", name: "Japan", code: "JP" },
|
||||
{ id: "112", name: "Jersey", code: "JE" },
|
||||
{ id: "113", name: "Jordan", code: "JO" },
|
||||
{ id: "114", name: "Kazakhstan", code: "KZ" },
|
||||
{ id: "115", name: "Kenya", code: "KE" },
|
||||
{ id: "116", name: "Kiribati", code: "KI" },
|
||||
{ id: "117", name: "Kosovo", code: "XK" },
|
||||
{ id: "118", name: "Kuwait", code: "KW" },
|
||||
{ id: "119", name: "Kyrgyzstan", code: "KG" },
|
||||
{ id: "120", name: "Laos", code: "LA" },
|
||||
{ id: "121", name: "Latvia", code: "LV" },
|
||||
{ id: "122", name: "Lebanon", code: "LB" },
|
||||
{ id: "123", name: "Lesotho", code: "LS" },
|
||||
{ id: "124", name: "Liberia", code: "LR" },
|
||||
{ id: "125", name: "Libya", code: "LY" },
|
||||
{ id: "126", name: "Liechtenstein", code: "LI" },
|
||||
{ id: "127", name: "Lithuania", code: "LT" },
|
||||
{ id: "128", name: "Luxembourg", code: "LU" },
|
||||
{ id: "129", name: "Macau", code: "MO" },
|
||||
{ id: "130", name: "Madagascar", code: "MG" },
|
||||
{ id: "131", name: "Malawi", code: "MW" },
|
||||
{ id: "132", name: "Malaysia", code: "MY" },
|
||||
{ id: "133", name: "Maldives", code: "MV" },
|
||||
{ id: "134", name: "Mali", code: "ML" },
|
||||
{ id: "135", name: "Malta", code: "MT" },
|
||||
{ id: "136", name: "Marshall Islands", code: "MH" },
|
||||
{ id: "137", name: "Martinique", code: "MQ" },
|
||||
{ id: "138", name: "Mauritania", code: "MR" },
|
||||
{ id: "139", name: "Mauritius", code: "MU" },
|
||||
{ id: "140", name: "Mayotte", code: "YT" },
|
||||
{ id: "141", name: "Mexico", code: "MX" },
|
||||
{ id: "142", name: "Micronesia", code: "FM" },
|
||||
{ id: "143", name: "Moldova", code: "MD" },
|
||||
{ id: "144", name: "Monaco", code: "MC" },
|
||||
{ id: "145", name: "Mongolia", code: "MN" },
|
||||
{ id: "146", name: "Montenegro", code: "ME" },
|
||||
{ id: "147", name: "Montserrat", code: "MS" },
|
||||
{ id: "148", name: "Morocco", code: "MA" },
|
||||
{ id: "149", name: "Mozambique", code: "MZ" },
|
||||
{ id: "150", name: "Myanmar", code: "MM" },
|
||||
{ id: "151", name: "Namibia", code: "NA" },
|
||||
{ id: "152", name: "Nauru", code: "NR" },
|
||||
{ id: "153", name: "Nepal", code: "NP" },
|
||||
{ id: "154", name: "Netherlands", code: "NL" },
|
||||
{ id: "155", name: "New Caledonia", code: "NC" },
|
||||
{ id: "156", name: "New Zealand", code: "NZ" },
|
||||
{ id: "157", name: "Nicaragua", code: "NI" },
|
||||
{ id: "158", name: "Niger", code: "NE" },
|
||||
{ id: "159", name: "Nigeria", code: "NG" },
|
||||
{ id: "160", name: "Niue", code: "NU" },
|
||||
{ id: "161", name: "Norfolk Island", code: "NF" },
|
||||
{ id: "162", name: "North Korea", code: "KP" },
|
||||
{ id: "163", name: "North Macedonia", code: "MK" },
|
||||
{ id: "164", name: "Northern Mariana Islands", code: "MP" },
|
||||
{ id: "165", name: "Norway", code: "NO" },
|
||||
{ id: "166", name: "Oman", code: "OM" },
|
||||
{ id: "167", name: "Pakistan", code: "PK" },
|
||||
{ id: "168", name: "Palau", code: "PW" },
|
||||
{ id: "169", name: "Palestine", code: "PS" },
|
||||
{ id: "170", name: "Panama", code: "PA" },
|
||||
{ id: "171", name: "Papua New Guinea", code: "PG" },
|
||||
{ id: "172", name: "Paraguay", code: "PY" },
|
||||
{ id: "173", name: "Peru", code: "PE" },
|
||||
{ id: "174", name: "Philippines", code: "PH" },
|
||||
{ id: "175", name: "Pitcairn Islands", code: "PN" },
|
||||
{ id: "176", name: "Poland", code: "PL" },
|
||||
{ id: "177", name: "Portugal", code: "PT" },
|
||||
{ id: "178", name: "Puerto Rico", code: "PR" },
|
||||
{ id: "179", name: "Qatar", code: "QA" },
|
||||
{ id: "180", name: "Republic of the Congo", code: "CG" },
|
||||
{ id: "181", name: "Romania", code: "RO" },
|
||||
{ id: "182", name: "Russia", code: "RU" },
|
||||
{ id: "183", name: "Rwanda", code: "RW" },
|
||||
{ id: "184", name: "Réunion", code: "RE" },
|
||||
{ id: "185", name: "Saint Barthélemy", code: "BL" },
|
||||
{
|
||||
id: "186",
|
||||
name: "Saint Helena, Ascension and Tristan da Cunha",
|
||||
code: "SH",
|
||||
},
|
||||
{ id: "187", name: "Saint Kitts and Nevis", code: "KN" },
|
||||
{ id: "188", name: "Saint Lucia", code: "LC" },
|
||||
{ id: "189", name: "Saint Martin", code: "MF" },
|
||||
{ id: "190", name: "Saint Pierre and Miquelon", code: "PM" },
|
||||
{ id: "191", name: "Saint Vincent and the Grenadines", code: "VC" },
|
||||
{ id: "192", name: "Samoa", code: "WS" },
|
||||
{ id: "193", name: "San Marino", code: "SM" },
|
||||
{ id: "194", name: "Saudi Arabia", code: "SA" },
|
||||
{ id: "195", name: "Senegal", code: "SN" },
|
||||
{ id: "196", name: "Serbia", code: "RS" },
|
||||
{ id: "197", name: "Seychelles", code: "SC" },
|
||||
{ id: "198", name: "Sierra Leone", code: "SL" },
|
||||
{ id: "199", name: "Singapore", code: "SG" },
|
||||
{ id: "200", name: "Sint Maarten", code: "SX" },
|
||||
{ id: "201", name: "Slovakia", code: "SK" },
|
||||
{ id: "202", name: "Slovenia", code: "SI" },
|
||||
{ id: "203", name: "Solomon Islands", code: "SB" },
|
||||
{ id: "204", name: "Somalia", code: "SO" },
|
||||
{ id: "205", name: "South Africa", code: "ZA" },
|
||||
{ id: "206", name: "South Georgia", code: "GS" },
|
||||
{ id: "207", name: "South Korea", code: "KR" },
|
||||
{ id: "208", name: "South Sudan", code: "SS" },
|
||||
{ id: "209", name: "Spain", code: "ES" },
|
||||
{ id: "210", name: "Sri Lanka", code: "LK" },
|
||||
{ id: "211", name: "Sudan", code: "SD" },
|
||||
{ id: "212", name: "Suriname", code: "SR" },
|
||||
{ id: "213", name: "Svalbard and Jan Mayen", code: "SJ" },
|
||||
{ id: "214", name: "Sweden", code: "SE" },
|
||||
{ id: "215", name: "Switzerland", code: "CH" },
|
||||
{ id: "216", name: "Syria", code: "SY" },
|
||||
{ id: "217", name: "São Tomé and Príncipe", code: "ST" },
|
||||
{ id: "218", name: "Taiwan", code: "TW" },
|
||||
{ id: "219", name: "Tajikistan", code: "TJ" },
|
||||
{ id: "220", name: "Tanzania", code: "TZ" },
|
||||
{ id: "221", name: "Thailand", code: "TH" },
|
||||
{ id: "222", name: "Timor-Leste", code: "TL" },
|
||||
{ id: "223", name: "Togo", code: "TG" },
|
||||
{ id: "224", name: "Tokelau", code: "TK" },
|
||||
{ id: "225", name: "Tonga", code: "TO" },
|
||||
{ id: "226", name: "Trinidad and Tobago", code: "TT" },
|
||||
{ id: "227", name: "Tunisia", code: "TN" },
|
||||
{ id: "228", name: "Turkey", code: "TR" },
|
||||
{ id: "229", name: "Turkmenistan", code: "TM" },
|
||||
{ id: "230", name: "Turks and Caicos Islands", code: "TC" },
|
||||
{ id: "231", name: "Tuvalu", code: "TV" },
|
||||
{ id: "232", name: "Uganda", code: "UG" },
|
||||
{ id: "233", name: "Ukraine", code: "UA" },
|
||||
{ id: "234", name: "United Arab Emirates", code: "AE" },
|
||||
{ id: "235", name: "United Kingdom", code: "GB" },
|
||||
{ id: "236", name: "United States", code: "US" },
|
||||
{ id: "237", name: "United States Minor Outlying Islands", code: "UM" },
|
||||
{ id: "238", name: "United States Virgin Islands", code: "VI" },
|
||||
{ id: "239", name: "Uruguay", code: "UY" },
|
||||
{ id: "240", name: "Uzbekistan", code: "UZ" },
|
||||
{ id: "241", name: "Vanuatu", code: "VU" },
|
||||
{ id: "242", name: "Vatican City", code: "VA" },
|
||||
{ id: "243", name: "Venezuela", code: "VE" },
|
||||
{ id: "244", name: "Vietnam", code: "VN" },
|
||||
{ id: "245", name: "Wallis and Futuna", code: "WF" },
|
||||
{ id: "246", name: "Western Sahara", code: "EH" },
|
||||
{ id: "247", name: "Yemen", code: "YE" },
|
||||
{ id: "248", name: "Zambia", code: "ZM" },
|
||||
{ id: "249", name: "Zimbabwe", code: "ZW" },
|
||||
{ id: "250", name: "Åland Islands", code: "AX" },
|
||||
];
|
||||
|
||||
export const COUNTRIES_SELECT = COUNTRIES.map((c) => {
|
||||
return {
|
||||
id: c.id,
|
||||
label: `${c.code} (${c.name})`,
|
||||
value: c.name.toLowerCase(),
|
||||
};
|
||||
});
|
||||
1227
packages/logic/core/data/phonecc.ts
Normal file
1227
packages/logic/core/data/phonecc.ts
Normal file
File diff suppressed because it is too large
Load Diff
83
packages/logic/core/date.utils.ts
Normal file
83
packages/logic/core/date.utils.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { CalendarDate } from "@internationalized/date";
|
||||
|
||||
export function formatDuration(ms: number): string {
|
||||
const seconds = Math.floor(ms / 1000);
|
||||
if (seconds < 60) return `${seconds}s`;
|
||||
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const remainingSeconds = seconds % 60;
|
||||
return `${minutes}m ${remainingSeconds}s`;
|
||||
}
|
||||
|
||||
export function formatDateTimeFromIsoString(isoString: string): string {
|
||||
try {
|
||||
const date = new Date(isoString);
|
||||
return new Intl.DateTimeFormat("en-US", {
|
||||
dateStyle: "medium",
|
||||
timeStyle: "short",
|
||||
}).format(date);
|
||||
} catch (e) {
|
||||
return "Invalid date";
|
||||
}
|
||||
}
|
||||
|
||||
export function getJustDateString(d: Date): string {
|
||||
return d.toISOString().split("T")[0];
|
||||
}
|
||||
|
||||
export function formatDateTime(dateTimeStr: string) {
|
||||
const date = new Date(dateTimeStr);
|
||||
return {
|
||||
time: date.toLocaleTimeString("en-US", {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
hour12: false,
|
||||
}),
|
||||
date: date.toLocaleDateString("en-US", {
|
||||
weekday: "short",
|
||||
day: "2-digit",
|
||||
month: "short",
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function formatDate(dateStr: string) {
|
||||
return new Date(dateStr).toLocaleDateString("en-US", {
|
||||
weekday: "short",
|
||||
day: "2-digit",
|
||||
month: "short",
|
||||
});
|
||||
}
|
||||
|
||||
export function isTimestampMoreThan1MinAgo(ts: string): boolean {
|
||||
const lastPingedDate = new Date(ts);
|
||||
const now = new Date();
|
||||
const diff = now.getTime() - lastPingedDate.getTime();
|
||||
return diff > 60000;
|
||||
}
|
||||
|
||||
export function isTimestampOlderThan(ts: string, seconds: number): boolean {
|
||||
const lastPingedDate = new Date(ts);
|
||||
const now = new Date();
|
||||
const diff = now.getTime() - lastPingedDate.getTime();
|
||||
return diff > seconds * 1000;
|
||||
}
|
||||
|
||||
export function makeDateStringISO(ds: string): string {
|
||||
if (ds.includes("T")) {
|
||||
return `${ds.split("T")[0]}T00:00:00.000Z`;
|
||||
}
|
||||
return `${ds}T00:00:00.000Z`;
|
||||
}
|
||||
|
||||
export function parseCalDateToDateString(v: CalendarDate) {
|
||||
let month: string | number = v.month;
|
||||
if (month < 10) {
|
||||
month = `0${month}`;
|
||||
}
|
||||
let day: string | number = v.day;
|
||||
if (day < 10) {
|
||||
day = `0${day}`;
|
||||
}
|
||||
return `${v.year}-${month}-${day}`;
|
||||
}
|
||||
8
packages/logic/core/error.ts
Normal file
8
packages/logic/core/error.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export type Err = {
|
||||
code: string;
|
||||
message: string;
|
||||
description: string;
|
||||
detail: string;
|
||||
actionable?: boolean;
|
||||
error?: any;
|
||||
};
|
||||
5
packages/logic/core/flow.execution.context.ts
Normal file
5
packages/logic/core/flow.execution.context.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export type FlowExecCtx = {
|
||||
flowId: string;
|
||||
userId?: string;
|
||||
sessionId?: string;
|
||||
};
|
||||
31
packages/logic/core/hash.utils.ts
Normal file
31
packages/logic/core/hash.utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { argon2id, hash as argonHash, verify as argonVerify } from "argon2";
|
||||
|
||||
export async function hashString(target: string): Promise<string> {
|
||||
const salt = Buffer.from(crypto.getRandomValues(new Uint8Array(16))).toString(
|
||||
"hex",
|
||||
);
|
||||
const hash = await argonHash(target, {
|
||||
type: argon2id,
|
||||
salt: Buffer.from(salt, "hex"),
|
||||
hashLength: 32,
|
||||
timeCost: 3,
|
||||
memoryCost: 65536,
|
||||
parallelism: 1,
|
||||
});
|
||||
return hash;
|
||||
}
|
||||
|
||||
export async function verifyHash({
|
||||
hash,
|
||||
target,
|
||||
}: {
|
||||
hash: string;
|
||||
target: string;
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const isValid = await argonVerify(hash, `${target}`);
|
||||
return isValid;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
88
packages/logic/core/http.telemetry.ts
Normal file
88
packages/logic/core/http.telemetry.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { context, metrics, SpanStatusCode, trace } from "@opentelemetry/api";
|
||||
|
||||
type TelemetryContext = {
|
||||
req: {
|
||||
method: string;
|
||||
path: string;
|
||||
};
|
||||
res: {
|
||||
status: number;
|
||||
};
|
||||
};
|
||||
|
||||
export function createHttpTelemetryMiddleware(serviceName: string) {
|
||||
const tracer = trace.getTracer(`${serviceName}.http`);
|
||||
const meter = metrics.getMeter(`${serviceName}.http`);
|
||||
|
||||
const requestCount = meter.createCounter(`${serviceName}.http.server.requests`, {
|
||||
description: `Total number of ${serviceName} HTTP requests`,
|
||||
});
|
||||
|
||||
const requestDuration = meter.createHistogram(
|
||||
`${serviceName}.http.server.duration`,
|
||||
{
|
||||
description: `${serviceName} HTTP request duration`,
|
||||
unit: "ms",
|
||||
},
|
||||
);
|
||||
|
||||
const activeRequests = meter.createUpDownCounter(
|
||||
`${serviceName}.http.server.active_requests`,
|
||||
{
|
||||
description: `Number of in-flight ${serviceName} HTTP requests`,
|
||||
},
|
||||
);
|
||||
|
||||
return async (c: TelemetryContext, next: () => Promise<unknown>) => {
|
||||
const startedAt = performance.now();
|
||||
const method = c.req.method;
|
||||
const route = c.req.path;
|
||||
|
||||
const span = tracer.startSpan(`http ${method} ${route}`, {
|
||||
attributes: {
|
||||
"http.request.method": method,
|
||||
"url.path": route,
|
||||
},
|
||||
});
|
||||
|
||||
activeRequests.add(1, {
|
||||
"http.request.method": method,
|
||||
"url.path": route,
|
||||
});
|
||||
|
||||
try {
|
||||
await context.with(trace.setSpan(context.active(), span), next);
|
||||
|
||||
span.setAttribute("http.response.status_code", c.res.status);
|
||||
span.setStatus({
|
||||
code:
|
||||
c.res.status >= 500
|
||||
? SpanStatusCode.ERROR
|
||||
: SpanStatusCode.OK,
|
||||
});
|
||||
} catch (error) {
|
||||
span.recordException(error as Error);
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: `Unhandled ${serviceName} request error`,
|
||||
});
|
||||
throw error;
|
||||
} finally {
|
||||
const durationMs = performance.now() - startedAt;
|
||||
const attrs = {
|
||||
"http.request.method": method,
|
||||
"http.response.status_code": c.res.status,
|
||||
"url.path": route,
|
||||
};
|
||||
|
||||
requestCount.add(1, attrs);
|
||||
requestDuration.record(durationMs, attrs);
|
||||
activeRequests.add(-1, {
|
||||
"http.request.method": method,
|
||||
"url.path": route,
|
||||
});
|
||||
|
||||
span.end();
|
||||
}
|
||||
};
|
||||
}
|
||||
80
packages/logic/core/observability.ts
Normal file
80
packages/logic/core/observability.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { SpanStatusCode, trace, type Attributes } from "@opentelemetry/api";
|
||||
import type { FlowExecCtx } from "./flow.execution.context";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
|
||||
const tracer = trace.getTracer("@pkg/logic");
|
||||
|
||||
type BaseSpanOptions = {
|
||||
name: string;
|
||||
fctx?: FlowExecCtx;
|
||||
attributes?: Attributes;
|
||||
};
|
||||
|
||||
function spanAttributes(
|
||||
fctx?: FlowExecCtx,
|
||||
attributes?: Attributes,
|
||||
): Attributes | undefined {
|
||||
const flowAttrs: Attributes = {};
|
||||
if (fctx?.flowId) flowAttrs["flow.id"] = fctx.flowId;
|
||||
if (fctx?.userId) flowAttrs["flow.user_id"] = fctx.userId;
|
||||
if (fctx?.sessionId) flowAttrs["flow.session_id"] = fctx.sessionId;
|
||||
|
||||
if (!attributes && Object.keys(flowAttrs).length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return { ...flowAttrs, ...(attributes ?? {}) };
|
||||
}
|
||||
|
||||
export async function withFlowSpan<T>({
|
||||
name,
|
||||
fctx,
|
||||
attributes,
|
||||
fn,
|
||||
}: BaseSpanOptions & {
|
||||
fn: () => Promise<T>;
|
||||
}): Promise<T> {
|
||||
return tracer.startActiveSpan(
|
||||
name,
|
||||
{ attributes: spanAttributes(fctx, attributes) },
|
||||
async (span) => {
|
||||
try {
|
||||
const result = await fn();
|
||||
span.setStatus({ code: SpanStatusCode.OK });
|
||||
return result;
|
||||
} catch (error) {
|
||||
span.recordException(error as Error);
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message:
|
||||
error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
throw error;
|
||||
} finally {
|
||||
span.end();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function traceResultAsync<T, E>({
|
||||
name,
|
||||
fctx,
|
||||
attributes,
|
||||
fn,
|
||||
}: BaseSpanOptions & {
|
||||
fn: () => ResultAsync<T, E>;
|
||||
}): ResultAsync<T, E> {
|
||||
return ResultAsync.fromPromise(
|
||||
withFlowSpan({
|
||||
name,
|
||||
fctx,
|
||||
attributes,
|
||||
fn: async () =>
|
||||
fn().match(
|
||||
(value) => value,
|
||||
(error) => Promise.reject(error),
|
||||
),
|
||||
}),
|
||||
(error) => error as E,
|
||||
);
|
||||
}
|
||||
12
packages/logic/core/pagination.utils.ts
Normal file
12
packages/logic/core/pagination.utils.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export const paginationModel = v.object({
|
||||
cursor: v.optional(v.string()),
|
||||
limit: v.pipe(v.number(), v.integer(), v.maxValue(100)),
|
||||
asc: v.optional(v.boolean(), true),
|
||||
totalItemCount: v.optional(v.pipe(v.number(), v.integer()), 0),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
});
|
||||
|
||||
export type PaginationModel = v.InferOutput<typeof paginationModel>;
|
||||
40
packages/logic/core/rate.limiter.ts
Normal file
40
packages/logic/core/rate.limiter.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { logger } from "@pkg/logger";
|
||||
|
||||
export class RateLimiter {
|
||||
private requestTimestamps: number[] = [];
|
||||
private readonly callsPerMinute: number;
|
||||
|
||||
constructor(callsPerMinute: number = 60) {
|
||||
this.callsPerMinute = Math.min(callsPerMinute, 60);
|
||||
}
|
||||
|
||||
async checkRateLimit(): Promise<void> {
|
||||
const currentTime = Date.now();
|
||||
const oneMinuteAgo = currentTime - 60000; // 60 seconds in milliseconds
|
||||
|
||||
// Remove timestamps older than 1 minute
|
||||
this.requestTimestamps = this.requestTimestamps.filter(
|
||||
(timestamp) => timestamp > oneMinuteAgo,
|
||||
);
|
||||
|
||||
// If we're approaching the limit, wait until we have capacity
|
||||
if (this.requestTimestamps.length >= this.callsPerMinute) {
|
||||
const oldestRequest = this.requestTimestamps[0];
|
||||
const waitTime = oldestRequest + 60000 - currentTime;
|
||||
|
||||
if (waitTime > 0) {
|
||||
logger.warn(
|
||||
`Rate limit approaching (${this.requestTimestamps.length} requests in last minute). Sleeping for ${waitTime}ms`,
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
||||
// After waiting, some timestamps may have expired
|
||||
this.requestTimestamps = this.requestTimestamps.filter(
|
||||
(timestamp) => timestamp > Date.now() - 60000,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Add current request to timestamps
|
||||
this.requestTimestamps.push(Date.now());
|
||||
}
|
||||
}
|
||||
1
packages/logic/core/settings.ts
Normal file
1
packages/logic/core/settings.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { getSetting, settings } from "@pkg/settings";
|
||||
106
packages/logic/core/string.utils/index.ts
Normal file
106
packages/logic/core/string.utils/index.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export function capitalize(input: string, firstOfAllWords?: boolean): string {
|
||||
// capitalize first letter of input
|
||||
if (!firstOfAllWords) {
|
||||
return input.charAt(0).toUpperCase() + input.slice(1);
|
||||
}
|
||||
let out = "";
|
||||
for (const word of input.split(" ")) {
|
||||
out += word.charAt(0).toUpperCase() + word.slice(1) + " ";
|
||||
}
|
||||
return out.slice(0, -1);
|
||||
}
|
||||
|
||||
export function camelToSpacedPascal(input: string): string {
|
||||
let result = "";
|
||||
let previousChar = "";
|
||||
for (const char of input) {
|
||||
if (char === char.toUpperCase() && previousChar !== " ") {
|
||||
result += " ";
|
||||
}
|
||||
result += char;
|
||||
previousChar = char;
|
||||
}
|
||||
return result.charAt(0).toUpperCase() + result.slice(1);
|
||||
}
|
||||
|
||||
export function snakeToCamel(input: string): string {
|
||||
if (!input) {
|
||||
return input;
|
||||
}
|
||||
// also account for numbers and kebab-case
|
||||
const splits = input.split(/[-_]/);
|
||||
let result = splits[0];
|
||||
for (const split of splits.slice(1)) {
|
||||
result += capitalize(split, true);
|
||||
}
|
||||
return result ?? "";
|
||||
}
|
||||
|
||||
export function snakeToSpacedPascal(input: string): string {
|
||||
return camelToSpacedPascal(snakeToCamel(input));
|
||||
}
|
||||
|
||||
export function spacedPascalToSnake(input: string): string {
|
||||
return input.split(" ").join("_").toLowerCase();
|
||||
}
|
||||
|
||||
export function convertDashedLowerToTitleCase(input: string): string {
|
||||
return input
|
||||
.split("-")
|
||||
.map(
|
||||
(word) =>
|
||||
word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(),
|
||||
)
|
||||
.join(" "); // Join the words with a space
|
||||
}
|
||||
|
||||
export function encodeCursor<T>(cursor: T): string {
|
||||
try {
|
||||
// Convert the object to a JSON string
|
||||
const jsonString = JSON.stringify(cursor);
|
||||
// Convert to UTF-8 bytes, then base64
|
||||
return btoa(
|
||||
encodeURIComponent(jsonString).replace(/%([0-9A-F]{2})/g, (_, p1) =>
|
||||
String.fromCharCode(parseInt(p1, 16)),
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error encoding cursor:", error);
|
||||
throw new Error("Failed to encode cursor");
|
||||
}
|
||||
}
|
||||
|
||||
export function decodeCursor<T>(
|
||||
cursor: string,
|
||||
parser: v.BaseSchema<any, T, any>,
|
||||
) {
|
||||
try {
|
||||
// Decode base64 back to UTF-8 string
|
||||
const decoded = decodeURIComponent(
|
||||
Array.prototype.map
|
||||
.call(atob(cursor), (c) => {
|
||||
return (
|
||||
"%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2)
|
||||
);
|
||||
})
|
||||
.join(""),
|
||||
);
|
||||
// Parse back to object
|
||||
const parsedData = JSON.parse(decoded);
|
||||
const result = v.safeParse(parser, parsedData);
|
||||
return result.success
|
||||
? { success: true, data: result.output as T }
|
||||
: {
|
||||
success: false,
|
||||
error: new Error(
|
||||
result.issues.map((i) => i.message).join(", "),
|
||||
),
|
||||
data: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error decoding cursor:", error);
|
||||
return { error: new Error("Failed to decode cursor"), data: undefined };
|
||||
}
|
||||
}
|
||||
555
packages/logic/core/string.utils/sequence.matcher.ts
Normal file
555
packages/logic/core/string.utils/sequence.matcher.ts
Normal file
@@ -0,0 +1,555 @@
|
||||
/**
|
||||
* Similar to Python's difflib.SequenceMatcher
|
||||
*
|
||||
* A flexible class for comparing pairs of sequences of any type.
|
||||
* Uses the Ratcliff-Obershelp algorithm with "gestalt pattern matching"
|
||||
* to find the longest contiguous matching subsequences.
|
||||
*/
|
||||
|
||||
export interface Match {
|
||||
/** Starting position in sequence a */
|
||||
a: number;
|
||||
/** Starting position in sequence b */
|
||||
b: number;
|
||||
/** Length of the matching block */
|
||||
size: number;
|
||||
}
|
||||
|
||||
export type OpCode = "replace" | "delete" | "insert" | "equal";
|
||||
|
||||
export interface OpCodeTuple {
|
||||
/** Operation type */
|
||||
tag: OpCode;
|
||||
/** Start index in sequence a */
|
||||
i1: number;
|
||||
/** End index in sequence a */
|
||||
i2: number;
|
||||
/** Start index in sequence b */
|
||||
j1: number;
|
||||
/** End index in sequence b */
|
||||
j2: number;
|
||||
}
|
||||
|
||||
export type JunkFunction<T> = (element: T) => boolean;
|
||||
|
||||
export class SequenceMatcher<T> {
|
||||
private isjunk: JunkFunction<T> | null;
|
||||
private a: T[];
|
||||
private b: T[];
|
||||
private autojunk: boolean;
|
||||
|
||||
// Cached data structures for sequence b
|
||||
private bjunk: Set<T>;
|
||||
private bpopular: Set<T>;
|
||||
private b2j: Map<T, number[]>;
|
||||
|
||||
// Cached results
|
||||
private fullbcount: Map<T, number> | null = null;
|
||||
private matchingBlocks: Match[] | null = null;
|
||||
private opcodes: OpCodeTuple[] | null = null;
|
||||
|
||||
constructor(
|
||||
isjunk: JunkFunction<T> | null = null,
|
||||
a: T[] = [],
|
||||
b: T[] = [],
|
||||
autojunk: boolean = true,
|
||||
) {
|
||||
this.isjunk = isjunk;
|
||||
this.a = [];
|
||||
this.b = [];
|
||||
this.autojunk = autojunk;
|
||||
this.bjunk = new Set();
|
||||
this.bpopular = new Set();
|
||||
this.b2j = new Map();
|
||||
|
||||
this.setSeqs(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set both sequences to be compared
|
||||
*/
|
||||
setSeqs(a: T[], b: T[]): void {
|
||||
this.setSeq1(a);
|
||||
this.setSeq2(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the first sequence to be compared
|
||||
*/
|
||||
setSeq1(a: T[]): void {
|
||||
if (a === this.a) return;
|
||||
this.a = [...a];
|
||||
this.matchingBlocks = null;
|
||||
this.opcodes = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the second sequence to be compared
|
||||
*/
|
||||
setSeq2(b: T[]): void {
|
||||
if (b === this.b) return;
|
||||
this.b = [...b];
|
||||
this.matchingBlocks = null;
|
||||
this.opcodes = null;
|
||||
this.fullbcount = null;
|
||||
this.chainB();
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze sequence b and build lookup structures
|
||||
*/
|
||||
private chainB(): void {
|
||||
const b = this.b;
|
||||
this.bjunk = new Set();
|
||||
this.bpopular = new Set();
|
||||
this.b2j = new Map();
|
||||
|
||||
// Count occurrences of each element
|
||||
const elementCounts = new Map<T, number>();
|
||||
for (const element of b) {
|
||||
elementCounts.set(element, (elementCounts.get(element) || 0) + 1);
|
||||
}
|
||||
|
||||
// Determine junk and popular elements
|
||||
const n = b.length;
|
||||
const popularThreshold = Math.floor(n / 100) + 1; // > 1% of sequence
|
||||
|
||||
for (const [element, count] of elementCounts) {
|
||||
if (this.isjunk && this.isjunk(element)) {
|
||||
this.bjunk.add(element);
|
||||
} else if (this.autojunk && n >= 200 && count > popularThreshold) {
|
||||
this.bpopular.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
// Build position mapping for non-junk, non-popular elements
|
||||
for (let i = 0; i < b.length; i++) {
|
||||
const element = b[i];
|
||||
if (!this.bjunk.has(element) && !this.bpopular.has(element)) {
|
||||
if (!this.b2j.has(element)) {
|
||||
this.b2j.set(element, []);
|
||||
}
|
||||
this.b2j.get(element)!.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest matching block in a[alo:ahi] and b[blo:bhi]
|
||||
*/
|
||||
findLongestMatch(
|
||||
alo: number = 0,
|
||||
ahi: number | null = null,
|
||||
blo: number = 0,
|
||||
bhi: number | null = null,
|
||||
): Match {
|
||||
if (ahi === null) ahi = this.a.length;
|
||||
if (bhi === null) bhi = this.b.length;
|
||||
|
||||
let besti = alo;
|
||||
let bestj = blo;
|
||||
let bestsize = 0;
|
||||
|
||||
// Find all positions where a[i] appears in b
|
||||
const j2len = new Map<number, number>();
|
||||
|
||||
for (let i = alo; i < ahi; i++) {
|
||||
const element = this.a[i];
|
||||
const positions = this.b2j.get(element) || [];
|
||||
const newj2len = new Map<number, number>();
|
||||
|
||||
for (const j of positions) {
|
||||
if (j < blo) continue;
|
||||
if (j >= bhi) break;
|
||||
|
||||
const prevLen = j2len.get(j - 1) || 0;
|
||||
const k = prevLen + 1;
|
||||
newj2len.set(j, k);
|
||||
|
||||
if (k > bestsize) {
|
||||
besti = i - k + 1;
|
||||
bestj = j - k + 1;
|
||||
bestsize = k;
|
||||
}
|
||||
}
|
||||
|
||||
j2len.clear();
|
||||
for (const [key, value] of newj2len) {
|
||||
j2len.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Extend match with junk elements
|
||||
while (
|
||||
besti > alo &&
|
||||
bestj > blo &&
|
||||
!this.isBJunk(this.b[bestj - 1]) &&
|
||||
this.elementsEqual(this.a[besti - 1], this.b[bestj - 1])
|
||||
) {
|
||||
besti--;
|
||||
bestj--;
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
while (
|
||||
besti + bestsize < ahi &&
|
||||
bestj + bestsize < bhi &&
|
||||
!this.isBJunk(this.b[bestj + bestsize]) &&
|
||||
this.elementsEqual(this.a[besti + bestsize], this.b[bestj + bestsize])
|
||||
) {
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
// Extend match with junk elements at the beginning
|
||||
while (besti > alo && bestj > blo && this.isBJunk(this.b[bestj - 1])) {
|
||||
besti--;
|
||||
bestj--;
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
// Extend match with junk elements at the end
|
||||
while (
|
||||
besti + bestsize < ahi &&
|
||||
bestj + bestsize < bhi &&
|
||||
this.isBJunk(this.b[bestj + bestsize])
|
||||
) {
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
return { a: besti, b: bestj, size: bestsize };
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of non-overlapping matching blocks
|
||||
*/
|
||||
getMatchingBlocks(): Match[] {
|
||||
if (this.matchingBlocks !== null) {
|
||||
return this.matchingBlocks;
|
||||
}
|
||||
|
||||
const matches: Match[] = [];
|
||||
this.getMatchingBlocksRecursive(
|
||||
0,
|
||||
this.a.length,
|
||||
0,
|
||||
this.b.length,
|
||||
matches,
|
||||
);
|
||||
|
||||
// Add sentinel
|
||||
matches.push({ a: this.a.length, b: this.b.length, size: 0 });
|
||||
|
||||
this.matchingBlocks = matches;
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively find matching blocks
|
||||
*/
|
||||
private getMatchingBlocksRecursive(
|
||||
alo: number,
|
||||
ahi: number,
|
||||
blo: number,
|
||||
bhi: number,
|
||||
matches: Match[],
|
||||
): void {
|
||||
const match = this.findLongestMatch(alo, ahi, blo, bhi);
|
||||
|
||||
if (match.size > 0) {
|
||||
// Recurse on the pieces before and after the match
|
||||
if (alo < match.a && blo < match.b) {
|
||||
this.getMatchingBlocksRecursive(
|
||||
alo,
|
||||
match.a,
|
||||
blo,
|
||||
match.b,
|
||||
matches,
|
||||
);
|
||||
}
|
||||
|
||||
matches.push(match);
|
||||
|
||||
if (match.a + match.size < ahi && match.b + match.size < bhi) {
|
||||
this.getMatchingBlocksRecursive(
|
||||
match.a + match.size,
|
||||
ahi,
|
||||
match.b + match.size,
|
||||
bhi,
|
||||
matches,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of 5-tuples describing how to turn a into b
|
||||
*/
|
||||
getOpcodes(): OpCodeTuple[] {
|
||||
if (this.opcodes !== null) {
|
||||
return this.opcodes;
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
const opcodes: OpCodeTuple[] = [];
|
||||
|
||||
for (const match of this.getMatchingBlocks()) {
|
||||
let tag: OpCode = "equal";
|
||||
|
||||
if (i < match.a && j < match.b) {
|
||||
tag = "replace";
|
||||
} else if (i < match.a) {
|
||||
tag = "delete";
|
||||
} else if (j < match.b) {
|
||||
tag = "insert";
|
||||
}
|
||||
|
||||
if (tag !== "equal") {
|
||||
opcodes.push({
|
||||
tag,
|
||||
i1: i,
|
||||
i2: match.a,
|
||||
j1: j,
|
||||
j2: match.b,
|
||||
});
|
||||
}
|
||||
|
||||
i = match.a + match.size;
|
||||
j = match.b + match.size;
|
||||
|
||||
// Don't add the sentinel match
|
||||
if (match.size > 0) {
|
||||
opcodes.push({
|
||||
tag: "equal",
|
||||
i1: match.a,
|
||||
i2: i,
|
||||
j1: match.b,
|
||||
j2: j,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.opcodes = opcodes;
|
||||
return opcodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a measure of sequences' similarity (0.0-1.0)
|
||||
*/
|
||||
ratio(): number {
|
||||
const matches = this.getMatchingBlocks()
|
||||
.slice(0, -1) // Exclude sentinel
|
||||
.reduce((sum, match) => sum + match.size, 0);
|
||||
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0 ? 1.0 : (2.0 * matches) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() relatively quickly
|
||||
*/
|
||||
quickRatio(): number {
|
||||
if (this.fullbcount === null) {
|
||||
this.fullbcount = new Map();
|
||||
for (const element of this.b) {
|
||||
this.fullbcount.set(
|
||||
element,
|
||||
(this.fullbcount.get(element) || 0) + 1,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let matches = 0;
|
||||
const tempCounts = new Map(this.fullbcount);
|
||||
|
||||
for (const element of this.a) {
|
||||
const count = tempCounts.get(element);
|
||||
if (count && count > 0) {
|
||||
matches++;
|
||||
tempCounts.set(element, count - 1);
|
||||
}
|
||||
}
|
||||
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0 ? 1.0 : (2.0 * matches) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() very quickly
|
||||
*/
|
||||
realQuickRatio(): number {
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0
|
||||
? 1.0
|
||||
: (2.0 * Math.min(this.a.length, this.b.length)) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if element is junk in sequence b
|
||||
*/
|
||||
private isBJunk(element: T): boolean {
|
||||
return this.bjunk.has(element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if two elements are equal
|
||||
*/
|
||||
private elementsEqual(a: T, b: T): boolean {
|
||||
return a === b;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to get close matches similar to Python's get_close_matches
|
||||
*/
|
||||
export function getCloseMatches<T>(
|
||||
word: T[],
|
||||
possibilities: T[][],
|
||||
n: number = 3,
|
||||
cutoff: number = 0.6,
|
||||
): T[][] {
|
||||
if (n <= 0) {
|
||||
throw new Error("n must be greater than 0");
|
||||
}
|
||||
|
||||
const matches: Array<{ sequence: T[]; ratio: number }> = [];
|
||||
|
||||
for (const possibility of possibilities) {
|
||||
const matcher = new SequenceMatcher(null, word, possibility);
|
||||
const ratio = matcher.ratio();
|
||||
|
||||
if (ratio >= cutoff) {
|
||||
matches.push({ sequence: possibility, ratio });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ratio (descending) and take top n
|
||||
matches.sort((a, b) => b.ratio - a.ratio);
|
||||
return matches.slice(0, n).map((match) => match.sequence);
|
||||
}
|
||||
|
||||
/**
|
||||
* String-specific version of SequenceMatcher for character-by-character comparison.
|
||||
* This class treats strings as sequences of characters while providing a string-friendly API.
|
||||
*/
|
||||
export class StringSequenceMatcher {
|
||||
private matcher: SequenceMatcher<string>;
|
||||
|
||||
constructor(
|
||||
isjunk: JunkFunction<string> | null = null,
|
||||
a: string = "",
|
||||
b: string = "",
|
||||
autojunk: boolean = true,
|
||||
) {
|
||||
this.matcher = new SequenceMatcher(
|
||||
isjunk,
|
||||
Array.from(a),
|
||||
Array.from(b),
|
||||
autojunk,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set both sequences to be compared
|
||||
*/
|
||||
setSeqs(a: string, b: string): void {
|
||||
this.matcher.setSeqs(Array.from(a), Array.from(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the first sequence to be compared
|
||||
*/
|
||||
setSeq1(a: string): void {
|
||||
this.matcher.setSeq1(Array.from(a));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the second sequence to be compared
|
||||
*/
|
||||
setSeq2(b: string): void {
|
||||
this.matcher.setSeq2(Array.from(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest matching block in a[alo:ahi] and b[blo:bhi]
|
||||
*/
|
||||
findLongestMatch(
|
||||
alo: number = 0,
|
||||
ahi: number | null = null,
|
||||
blo: number = 0,
|
||||
bhi: number | null = null,
|
||||
): Match {
|
||||
return this.matcher.findLongestMatch(alo, ahi, blo, bhi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of non-overlapping matching blocks
|
||||
*/
|
||||
getMatchingBlocks(): Match[] {
|
||||
return this.matcher.getMatchingBlocks();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of 5-tuples describing how to turn a into b
|
||||
*/
|
||||
getOpcodes(): OpCodeTuple[] {
|
||||
return this.matcher.getOpcodes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a measure of sequences' similarity (0.0-1.0)
|
||||
*/
|
||||
ratio(): number {
|
||||
return this.matcher.ratio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() relatively quickly
|
||||
*/
|
||||
quickRatio(): number {
|
||||
return this.matcher.quickRatio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() very quickly
|
||||
*/
|
||||
realQuickRatio(): number {
|
||||
return this.matcher.realQuickRatio();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for string similarity
|
||||
*/
|
||||
export function getStringSimilarity(a: string, b: string): number {
|
||||
const matcher = new StringSequenceMatcher(null, a, b);
|
||||
return matcher.ratio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get close string matches
|
||||
*/
|
||||
export function getCloseStringMatches(
|
||||
word: string,
|
||||
possibilities: string[],
|
||||
n: number = 3,
|
||||
cutoff: number = 0.6,
|
||||
): string[] {
|
||||
if (n <= 0) {
|
||||
throw new Error("n must be greater than 0");
|
||||
}
|
||||
|
||||
const matches: Array<{ string: string; ratio: number }> = [];
|
||||
|
||||
for (const possibility of possibilities) {
|
||||
const ratio = getStringSimilarity(word, possibility);
|
||||
|
||||
if (ratio >= cutoff) {
|
||||
matches.push({ string: possibility, ratio });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ratio (descending) and take top n
|
||||
matches.sort((a, b) => b.ratio - a.ratio);
|
||||
return matches.slice(0, n).map((match) => match.string);
|
||||
}
|
||||
396
packages/logic/domains/2fa/controller.ts
Normal file
396
packages/logic/domains/2fa/controller.ts
Normal file
@@ -0,0 +1,396 @@
|
||||
import { errAsync, okAsync, ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { UserRepository } from "@domains/user/repository";
|
||||
import { getRedisInstance, Redis } from "@pkg/keystore";
|
||||
import { TwofaRepository } from "./repository";
|
||||
import { logDomainEvent } from "@pkg/logger";
|
||||
import { auth } from "../auth/config.base";
|
||||
import type { TwoFaSession } from "./data";
|
||||
import { User } from "@domains/user/data";
|
||||
import { settings } from "@core/settings";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { twofaErrors } from "./errors";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class TwofaController {
|
||||
constructor(
|
||||
private twofaRepo: TwofaRepository,
|
||||
private userRepo: UserRepository,
|
||||
private store: Redis,
|
||||
private secret: string,
|
||||
) {}
|
||||
|
||||
checkTotp(secret: string, code: string) {
|
||||
return this.twofaRepo.checkTotp(secret, code);
|
||||
}
|
||||
|
||||
is2faEnabled(fctx: FlowExecCtx, userId: string) {
|
||||
return this.twofaRepo
|
||||
.getUsers2FAInfo(fctx, userId, true)
|
||||
.map((data) => !!data)
|
||||
.orElse(() => okAsync(false));
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepo.isUserBanned(fctx, userId).orElse((error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.user_ban_check.failed",
|
||||
fctx,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return okAsync(false);
|
||||
});
|
||||
}
|
||||
|
||||
setup2FA(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) =>
|
||||
enabled
|
||||
? errAsync(twofaErrors.alreadyEnabled(fctx))
|
||||
: this.twofaRepo.setup(fctx, user.id, this.secret),
|
||||
)
|
||||
.map((secret) => {
|
||||
const appName = settings.appName;
|
||||
const totpUri = `otpauth://totp/${appName}:${user.email}?secret=${secret}&issuer=${appName}`;
|
||||
return { totpURI: totpUri, secret };
|
||||
});
|
||||
}
|
||||
|
||||
verifyAndEnable2FA(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
code: string,
|
||||
headers: Headers,
|
||||
) {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.verify_and_enable.started",
|
||||
fctx,
|
||||
meta: { userId: user.id },
|
||||
});
|
||||
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (enabled) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.verify_and_enable.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "ALREADY_ENABLED",
|
||||
message: "2FA already enabled",
|
||||
},
|
||||
meta: { userId: user.id },
|
||||
});
|
||||
return errAsync(twofaErrors.alreadyEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() =>
|
||||
this.twofaRepo.verifyAndEnable2FA(fctx, user.id, code),
|
||||
)
|
||||
.andThen((verified) => {
|
||||
if (verified) {
|
||||
return ResultAsync.combine([
|
||||
ResultAsync.fromPromise(
|
||||
auth.api.revokeOtherSessions({ headers }),
|
||||
() => twofaErrors.revokeSessionsFailed(fctx),
|
||||
),
|
||||
this.userRepo.updateLastVerified2FaAtToNow(
|
||||
fctx,
|
||||
user.id,
|
||||
),
|
||||
]).map(() => {
|
||||
logDomainEvent({
|
||||
event: "security.twofa.verify_and_enable.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId: user.id },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.verify_and_enable.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "INVALID_CODE",
|
||||
message: "2FA code verification failed",
|
||||
},
|
||||
meta: { userId: user.id },
|
||||
});
|
||||
return okAsync(verified);
|
||||
});
|
||||
}
|
||||
|
||||
disable(fctx: FlowExecCtx, user: User, code: string) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(twofaErrors.notEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.get2FASecret(fctx, user.id))
|
||||
.andThen((secret) => {
|
||||
if (!secret) {
|
||||
return errAsync(twofaErrors.invalidSetup(fctx));
|
||||
}
|
||||
if (!this.checkTotp(secret, code)) {
|
||||
return errAsync(twofaErrors.invalidCode(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.disable(fctx, user.id));
|
||||
}
|
||||
|
||||
generateBackupCodes(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(twofaErrors.notEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.generateBackupCodes(fctx, user.id));
|
||||
}
|
||||
|
||||
requiresInitialVerification(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
sessionId: string,
|
||||
) {
|
||||
return this.is2faEnabled(fctx, user.id).andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(`initial_2fa_completed:${sessionId}`),
|
||||
() => null,
|
||||
)
|
||||
.map((completed) => !completed && completed !== "0")
|
||||
.orElse(() => okAsync(true));
|
||||
});
|
||||
}
|
||||
|
||||
requiresSensitiveActionVerification(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id).andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
if (!user.last2FAVerifiedAt) {
|
||||
return okAsync(true);
|
||||
}
|
||||
|
||||
const requiredHours = settings.twofaRequiredHours || 24;
|
||||
const verificationAge =
|
||||
Date.now() - user.last2FAVerifiedAt.getTime();
|
||||
const maxAge = requiredHours * 60 * 60 * 1000;
|
||||
|
||||
return okAsync(verificationAge > maxAge);
|
||||
});
|
||||
}
|
||||
|
||||
markInitialVerificationComplete(sessionId: string) {
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.setex(
|
||||
`initial_2fa_completed:${sessionId}`,
|
||||
60 * 60 * 24 * 7,
|
||||
"true",
|
||||
),
|
||||
() => null,
|
||||
)
|
||||
.map(() => undefined)
|
||||
.orElse((error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.mark_initial_verification.failed",
|
||||
fctx: { flowId: crypto.randomUUID() },
|
||||
error,
|
||||
});
|
||||
return okAsync(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
startVerification(
|
||||
fctx: FlowExecCtx,
|
||||
params: {
|
||||
userId: string;
|
||||
sessionId: string;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
},
|
||||
) {
|
||||
return this.twofaRepo.createSession(fctx, params).map((session) => ({
|
||||
verificationToken: session.verificationToken,
|
||||
}));
|
||||
}
|
||||
|
||||
private validateSession(fctx: FlowExecCtx, session: TwoFaSession) {
|
||||
if (session.status !== "pending") {
|
||||
return errAsync(twofaErrors.sessionNotActive(fctx));
|
||||
}
|
||||
|
||||
if (session.expiresAt < new Date()) {
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, { status: "expired" })
|
||||
.andThen(() => errAsync(twofaErrors.sessionExpired(fctx)));
|
||||
}
|
||||
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private handleMaxAttempts(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
) {
|
||||
const banExpiresAt = new Date();
|
||||
banExpiresAt.setHours(banExpiresAt.getHours() + 1);
|
||||
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, { status: "failed" })
|
||||
.andThen(() =>
|
||||
this.userRepo.banUser(
|
||||
fctx,
|
||||
userId,
|
||||
"Too many failed 2FA verification attempts",
|
||||
banExpiresAt,
|
||||
),
|
||||
)
|
||||
.andThen(() => errAsync(twofaErrors.tooManyAttempts(fctx)));
|
||||
}
|
||||
|
||||
private checkAttemptsLimit(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
) {
|
||||
if (session.attempts >= session.maxAttempts) {
|
||||
return this.handleMaxAttempts(fctx, session, userId);
|
||||
}
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private checkCodeReplay(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
code: string,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
if (session.codeUsed === code) {
|
||||
return this.twofaRepo
|
||||
.incrementAttempts(fctx, session.id)
|
||||
.andThen(() => errAsync(twofaErrors.codeReplay(fctx)));
|
||||
}
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private verifyTotpCode(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
code: string,
|
||||
) {
|
||||
return this.twofaRepo.get2FASecret(fctx, userId).andThen((secret) => {
|
||||
if (!secret) {
|
||||
return errAsync(twofaErrors.invalidSetup(fctx));
|
||||
}
|
||||
|
||||
if (!this.checkTotp(secret, code)) {
|
||||
return this.twofaRepo
|
||||
.incrementAttempts(fctx, session.id)
|
||||
.andThen(() => errAsync(twofaErrors.invalidCode(fctx)));
|
||||
}
|
||||
|
||||
return okAsync(session);
|
||||
});
|
||||
}
|
||||
|
||||
private completeVerification(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
code: string,
|
||||
) {
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, {
|
||||
status: "verified",
|
||||
verifiedAt: new Date(),
|
||||
codeUsed: code,
|
||||
})
|
||||
.andThen(() =>
|
||||
ResultAsync.combine([
|
||||
this.userRepo.updateLastVerified2FaAtToNow(fctx, userId),
|
||||
this.markInitialVerificationComplete(session.sessionId),
|
||||
]),
|
||||
)
|
||||
.map(() => undefined);
|
||||
}
|
||||
|
||||
verifyCode(
|
||||
fctx: FlowExecCtx,
|
||||
params: { verificationSessToken: string; code: string },
|
||||
user?: User,
|
||||
) {
|
||||
if (!user) {
|
||||
return errAsync(twofaErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(
|
||||
twofaErrors.notEnabledForVerification(fctx),
|
||||
);
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() =>
|
||||
this.twofaRepo.getSessionByToken(
|
||||
fctx,
|
||||
params.verificationSessToken,
|
||||
),
|
||||
)
|
||||
.andThen((session) => {
|
||||
if (!session) {
|
||||
return errAsync(twofaErrors.sessionNotFound(fctx));
|
||||
}
|
||||
return okAsync(session);
|
||||
})
|
||||
.andThen((session) => this.validateSession(fctx, session))
|
||||
.andThen((session) =>
|
||||
this.checkAttemptsLimit(fctx, session, user.id),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.checkCodeReplay(fctx, session, params.code),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.verifyTotpCode(fctx, session, user.id, params.code),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.completeVerification(fctx, session, user.id, params.code),
|
||||
)
|
||||
.map(() => ({ success: true }));
|
||||
}
|
||||
|
||||
cleanupExpiredSessions(fctx: FlowExecCtx) {
|
||||
return this.twofaRepo.cleanupExpiredSessions(fctx);
|
||||
}
|
||||
}
|
||||
|
||||
export function getTwofaController() {
|
||||
const _redis = getRedisInstance();
|
||||
return new TwofaController(
|
||||
new TwofaRepository(db, _redis),
|
||||
new UserRepository(db),
|
||||
_redis,
|
||||
settings.twoFaSecret,
|
||||
);
|
||||
}
|
||||
48
packages/logic/domains/2fa/data.ts
Normal file
48
packages/logic/domains/2fa/data.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export const startVerificationSchema = v.object({
|
||||
userId: v.string(),
|
||||
sessionId: v.string(),
|
||||
});
|
||||
|
||||
export const verifyCodeSchema = v.object({
|
||||
verificationToken: v.string(),
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const enable2FACodeSchema = v.object({
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const disable2FASchema = v.object({
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const twoFactorSchema = v.object({
|
||||
id: v.string(),
|
||||
secret: v.string(),
|
||||
backupCodes: v.array(v.string()),
|
||||
userId: v.string(),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
export type TwoFactor = v.InferOutput<typeof twoFactorSchema>;
|
||||
|
||||
export type TwoFaSessionStatus = "pending" | "verified" | "failed" | "expired";
|
||||
|
||||
export const twoFaSessionSchema = v.object({
|
||||
id: v.string(),
|
||||
userId: v.string(),
|
||||
sessionId: v.string(),
|
||||
verificationToken: v.string(),
|
||||
codeUsed: v.optional(v.string()),
|
||||
status: v.picklist(["pending", "verified", "failed", "expired"]),
|
||||
attempts: v.number(),
|
||||
maxAttempts: v.number(),
|
||||
verifiedAt: v.optional(v.date()),
|
||||
expiresAt: v.date(),
|
||||
createdAt: v.date(),
|
||||
ipAddress: v.string(),
|
||||
userAgent: v.string(),
|
||||
});
|
||||
export type TwoFaSession = v.InferOutput<typeof twoFaSessionSchema>;
|
||||
180
packages/logic/domains/2fa/errors.ts
Normal file
180
packages/logic/domains/2fa/errors.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const twofaErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
alreadyEnabled: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA already enabled",
|
||||
description: "Disable it first if you want to re-enable it",
|
||||
detail: "2FA already enabled",
|
||||
}),
|
||||
|
||||
notEnabled: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA not enabled for this user",
|
||||
description: "Enable 2FA to perform this action",
|
||||
detail: "2FA not enabled for this user",
|
||||
}),
|
||||
|
||||
userNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "User not found",
|
||||
description: "Session is invalid or expired",
|
||||
detail: "User ID not found in database",
|
||||
}),
|
||||
|
||||
sessionNotActive: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Verification session is no longer active",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Session status is not 'pending'",
|
||||
}),
|
||||
|
||||
sessionExpired: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Verification session has expired",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Session expired timestamp passed",
|
||||
}),
|
||||
|
||||
sessionNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Invalid or expired verification session",
|
||||
description: "Your verification session has expired or is invalid",
|
||||
detail: "Session not found by verification token",
|
||||
}),
|
||||
|
||||
tooManyAttempts: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.BANNED,
|
||||
message: "Too many failed attempts",
|
||||
description:
|
||||
"Your account has been banned, contact us to resolve this issue",
|
||||
detail: "Max attempts reached for 2FA verification",
|
||||
}),
|
||||
|
||||
codeReplay: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "This code has already been used",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Code replay attempt detected",
|
||||
}),
|
||||
|
||||
invalidSetup: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Invalid 2FA setup found",
|
||||
description: "Please contact us to resolve this issue",
|
||||
detail: "Invalid 2FA data found",
|
||||
}),
|
||||
|
||||
invalidCode: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Invalid verification code",
|
||||
description: "Please try again with the correct code",
|
||||
detail: "Code is invalid",
|
||||
}),
|
||||
|
||||
notEnabledForVerification: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA not enabled for this user",
|
||||
description:
|
||||
"Two-factor authentication is not enabled on your account",
|
||||
detail: "User has 2FA disabled but verification attempted",
|
||||
}),
|
||||
|
||||
revokeSessionsFailed: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Failed to revoke sessions",
|
||||
description: "Please try again later",
|
||||
detail: "Failed to revoke other sessions",
|
||||
}),
|
||||
|
||||
// Repository errors
|
||||
notFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA not found",
|
||||
description: "Likely not enabled, otherwise please contact us :)",
|
||||
detail: "2FA not found",
|
||||
}),
|
||||
|
||||
setupNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.VALIDATION_ERROR,
|
||||
message: "Cannot perform action",
|
||||
description: "If 2FA is not enabled, please refresh and try again",
|
||||
detail: "2FA setup not found",
|
||||
}),
|
||||
|
||||
maxAttemptsReached: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Too many failed attempts",
|
||||
description: "Please refresh and try again",
|
||||
detail: "Max attempts reached for session",
|
||||
}),
|
||||
|
||||
backupCodesNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA info not found",
|
||||
description: "Please setup 2FA or contact us if this is unexpected",
|
||||
detail: "2FA info not found for user",
|
||||
}),
|
||||
|
||||
backupCodesAlreadyGenerated: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Backup codes already generated",
|
||||
description:
|
||||
"Can only generate if not already present, or all are used up",
|
||||
detail: "Backup codes already generated",
|
||||
}),
|
||||
|
||||
sessionNotFoundById: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA session not found",
|
||||
description: "The verification session may have expired",
|
||||
detail: "Session ID not found in database",
|
||||
}),
|
||||
};
|
||||
695
packages/logic/domains/2fa/repository.ts
Normal file
695
packages/logic/domains/2fa/repository.ts
Normal file
@@ -0,0 +1,695 @@
|
||||
import { errAsync, okAsync, ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { hashString, verifyHash } from "@/core/hash.utils";
|
||||
import { twoFactor, twofaSessions } from "@pkg/db/schema";
|
||||
import { TwoFactor, type TwoFaSession } from "./data";
|
||||
import { crypto } from "@otplib/plugin-crypto-noble";
|
||||
import { base32 } from "@otplib/plugin-base32-scure";
|
||||
import { and, Database, eq, gt, lt } from "@pkg/db";
|
||||
import { generate, verify } from "@otplib/totp";
|
||||
import { settings } from "@core/settings";
|
||||
import type { Err } from "@pkg/result";
|
||||
import { twofaErrors } from "./errors";
|
||||
import { Redis } from "@pkg/keystore";
|
||||
import { logDomainEvent, logger } from "@pkg/logger";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
type TwoFaSetup = {
|
||||
secret: string;
|
||||
lastUsedCode: string;
|
||||
tries: number;
|
||||
};
|
||||
|
||||
export class TwofaRepository {
|
||||
private PENDING_KEY_PREFIX = "pending_enabling_2fa:";
|
||||
private EXPIRY_TIME = 60 * 20; // 20 mins
|
||||
private DEFAULT_BACKUP_CODES_AMT = 8;
|
||||
private MAX_SETUP_ATTEMPTS = 3;
|
||||
|
||||
constructor(
|
||||
private db: Database,
|
||||
private store: Redis,
|
||||
) {}
|
||||
|
||||
checkTotp(secret: string, code: string) {
|
||||
const checked = verify({ secret, token: code, crypto, base32 });
|
||||
logger.debug("TOTP check result", { checked });
|
||||
return checked;
|
||||
}
|
||||
|
||||
async checkBackupCode(hash: string, code: string) {
|
||||
return verifyHash({ hash, target: code });
|
||||
}
|
||||
|
||||
private getKey(userId: string) {
|
||||
if (userId.includes(this.PENDING_KEY_PREFIX)) {
|
||||
return userId;
|
||||
}
|
||||
return `${this.PENDING_KEY_PREFIX}${userId}`;
|
||||
}
|
||||
|
||||
getUsers2FAInfo(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
returnUndefined?: boolean,
|
||||
): ResultAsync<TwoFactor | undefined, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.get_info.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twoFactor.findFirst({
|
||||
where: eq(twoFactor.userId, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.get_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return twofaErrors.dbError(fctx, "Failed to query 2FA info");
|
||||
},
|
||||
).andThen((found) => {
|
||||
if (!found) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.get_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: { code: "NOT_FOUND", message: "2FA info not found" },
|
||||
meta: { userId },
|
||||
});
|
||||
if (returnUndefined) {
|
||||
return okAsync(undefined);
|
||||
}
|
||||
return errAsync(twofaErrors.notFound(fctx));
|
||||
}
|
||||
logDomainEvent({
|
||||
event: "security.twofa.get_info.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return okAsync(found as TwoFactor);
|
||||
});
|
||||
}
|
||||
|
||||
isSetupPending(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.debug("Checking if 2FA setup is pending", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(this.getKey(userId)),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to check setup pending status",
|
||||
),
|
||||
).map((found) => {
|
||||
const isPending = !!found;
|
||||
logger.debug("Setup pending status checked", {
|
||||
...fctx,
|
||||
userId,
|
||||
isPending,
|
||||
});
|
||||
return isPending;
|
||||
});
|
||||
}
|
||||
|
||||
setup(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
secret: string,
|
||||
): ResultAsync<string, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.setup.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromSafePromise(
|
||||
(async () => {
|
||||
const token = await generate({
|
||||
secret,
|
||||
crypto,
|
||||
base32,
|
||||
});
|
||||
const payload = {
|
||||
secret: token,
|
||||
lastUsedCode: "",
|
||||
tries: 0,
|
||||
} as TwoFaSetup;
|
||||
await this.store.setex(
|
||||
this.getKey(userId),
|
||||
this.EXPIRY_TIME,
|
||||
JSON.stringify(payload),
|
||||
);
|
||||
logDomainEvent({
|
||||
event: "security.twofa.setup.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, expiresInSec: this.EXPIRY_TIME },
|
||||
});
|
||||
return secret;
|
||||
})(),
|
||||
).mapErr((error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.setup.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return twofaErrors.dbError(fctx, "Setting to data store failed");
|
||||
});
|
||||
}
|
||||
|
||||
verifyAndEnable2FA(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
code: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.verify_enable.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(this.getKey(userId)),
|
||||
() => twofaErrors.dbError(fctx, "Failed to get setup session"),
|
||||
)
|
||||
.andThen((payload) => {
|
||||
if (!payload) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.verify_enable.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "SETUP_NOT_FOUND",
|
||||
message: "2FA setup session not found",
|
||||
},
|
||||
meta: { userId },
|
||||
});
|
||||
return errAsync(twofaErrors.setupNotFound(fctx));
|
||||
}
|
||||
return okAsync(JSON.parse(payload) as TwoFaSetup);
|
||||
})
|
||||
.andThen((payloadObj) => {
|
||||
const key = this.getKey(userId);
|
||||
|
||||
if (payloadObj.tries >= this.MAX_SETUP_ATTEMPTS) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.verify_enable.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "MAX_ATTEMPTS_REACHED",
|
||||
message: "Max setup attempts reached",
|
||||
},
|
||||
meta: { userId, attempts: payloadObj.tries },
|
||||
});
|
||||
return ResultAsync.fromPromise(this.store.del(key), () =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to delete setup session",
|
||||
),
|
||||
).andThen(() =>
|
||||
errAsync(twofaErrors.maxAttemptsReached(fctx)),
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!this.checkTotp(payloadObj.secret, code) ||
|
||||
code === payloadObj.lastUsedCode
|
||||
) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.verify_enable.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "INVALID_CODE",
|
||||
message: "Invalid or replayed setup code",
|
||||
},
|
||||
meta: {
|
||||
userId,
|
||||
attempts: payloadObj.tries + 1,
|
||||
codeReused: code === payloadObj.lastUsedCode,
|
||||
},
|
||||
});
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.setex(
|
||||
key,
|
||||
this.EXPIRY_TIME,
|
||||
JSON.stringify({
|
||||
secret: payloadObj.secret,
|
||||
lastUsedCode: code,
|
||||
tries: payloadObj.tries + 1,
|
||||
}),
|
||||
),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to update setup session",
|
||||
),
|
||||
).map(() => false);
|
||||
}
|
||||
|
||||
logger.info("2FA code verified successfully, enabling 2FA", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(this.store.del(key), () =>
|
||||
twofaErrors.dbError(fctx, "Failed to delete setup session"),
|
||||
)
|
||||
.andThen(() =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(twoFactor)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
secret: payloadObj.secret,
|
||||
userId: userId,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.execute(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to insert 2FA record",
|
||||
),
|
||||
),
|
||||
)
|
||||
.map(() => {
|
||||
logDomainEvent({
|
||||
event: "security.twofa.verify_enable.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
disable(fctx: FlowExecCtx, userId: string): ResultAsync<boolean, Err> {
|
||||
logger.info("Disabling 2FA", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(twoFactor)
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.execute(),
|
||||
() => twofaErrors.dbError(fctx, "Failed to delete 2FA record"),
|
||||
).map((result) => {
|
||||
logger.info("2FA disabled successfully", { ...fctx, userId });
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
generateBackupCodes(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<string[], Err> {
|
||||
logger.info("Generating backup codes", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twoFactor.findFirst({
|
||||
where: eq(twoFactor.userId, userId),
|
||||
}),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA info"),
|
||||
)
|
||||
.andThen((found) => {
|
||||
if (!found) {
|
||||
logger.error("2FA not enabled for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return errAsync(twofaErrors.backupCodesNotFound(fctx));
|
||||
}
|
||||
if (found.backupCodes && found.backupCodes.length) {
|
||||
logger.warn("Backup codes already generated", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return errAsync(
|
||||
twofaErrors.backupCodesAlreadyGenerated(fctx),
|
||||
);
|
||||
}
|
||||
return okAsync(found);
|
||||
})
|
||||
.andThen(() => {
|
||||
const codes = Array.from(
|
||||
{ length: this.DEFAULT_BACKUP_CODES_AMT },
|
||||
() => nanoid(12),
|
||||
);
|
||||
|
||||
logger.debug("Backup codes generated, hashing", {
|
||||
...fctx,
|
||||
userId,
|
||||
count: codes.length,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
(async () => {
|
||||
const hashed = [];
|
||||
for (const code of codes) {
|
||||
const hash = await hashString(code);
|
||||
hashed.push(hash);
|
||||
}
|
||||
return { codes, hashed };
|
||||
})(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to hash backup codes",
|
||||
),
|
||||
).andThen(({ codes, hashed }) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twoFactor)
|
||||
.set({ backupCodes: hashed })
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.returning(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to update backup codes",
|
||||
),
|
||||
).map(() => {
|
||||
logger.info("Backup codes generated successfully", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return codes;
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
get2FASecret(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<string | null, Err> {
|
||||
logger.debug("Getting 2FA secret", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(twoFactor)
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.limit(1),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA secret"),
|
||||
).map((result) => {
|
||||
if (!result.length) {
|
||||
logger.debug("No 2FA secret found", { ...fctx, userId });
|
||||
return null;
|
||||
}
|
||||
logger.debug("2FA secret retrieved", { ...fctx, userId });
|
||||
return result[0].secret;
|
||||
});
|
||||
}
|
||||
|
||||
createSession(
|
||||
fctx: FlowExecCtx,
|
||||
params: {
|
||||
userId: string;
|
||||
sessionId: string;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
},
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.create_session.started",
|
||||
fctx,
|
||||
meta: { userId: params.userId, sessionId: params.sessionId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromSafePromise(
|
||||
(async () => {
|
||||
const expiryMinutes = settings.twofaSessionExpiryMinutes || 10;
|
||||
const now = new Date();
|
||||
const expiresAt = new Date(
|
||||
now.getTime() + expiryMinutes * 60 * 1000,
|
||||
);
|
||||
|
||||
return { expiresAt, now, params };
|
||||
})(),
|
||||
).andThen(({ expiresAt, now, params }) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(twofaSessions)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: params.userId,
|
||||
sessionId: params.sessionId,
|
||||
verificationToken: nanoid(32),
|
||||
status: "pending",
|
||||
attempts: 0,
|
||||
maxAttempts: 5,
|
||||
expiresAt,
|
||||
createdAt: now,
|
||||
ipAddress: params.ipAddress,
|
||||
userAgent: params.userAgent,
|
||||
})
|
||||
.returning(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.create_session.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId: params.userId },
|
||||
});
|
||||
return twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to create 2FA session",
|
||||
);
|
||||
},
|
||||
).map(([session]) => {
|
||||
logDomainEvent({
|
||||
event: "security.twofa.create_session.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: {
|
||||
twofaSessionId: session.id,
|
||||
userId: params.userId,
|
||||
},
|
||||
});
|
||||
return session as TwoFaSession;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
getSessionByToken(
|
||||
fctx: FlowExecCtx,
|
||||
token: string,
|
||||
): ResultAsync<TwoFaSession | null, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
level: "debug",
|
||||
event: "security.twofa.get_session.started",
|
||||
fctx,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(twofaSessions)
|
||||
.where(
|
||||
and(
|
||||
eq(twofaSessions.verificationToken, token),
|
||||
gt(twofaSessions.expiresAt, new Date()),
|
||||
),
|
||||
)
|
||||
.limit(1),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.get_session.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return twofaErrors.dbError(fctx, "Failed to query 2FA session");
|
||||
},
|
||||
).map((result) => {
|
||||
if (!result.length) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "security.twofa.get_session.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: {
|
||||
code: "SESSION_NOT_FOUND",
|
||||
message: "2FA session not found or expired",
|
||||
},
|
||||
});
|
||||
return null;
|
||||
}
|
||||
logDomainEvent({
|
||||
level: "debug",
|
||||
event: "security.twofa.get_session.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { twofaSessionId: result[0].id },
|
||||
});
|
||||
return result[0] as TwoFaSession;
|
||||
});
|
||||
}
|
||||
|
||||
updateSession(
|
||||
fctx: FlowExecCtx,
|
||||
id: string,
|
||||
updates: Partial<
|
||||
Pick<
|
||||
TwoFaSession,
|
||||
"status" | "attempts" | "verifiedAt" | "codeUsed"
|
||||
>
|
||||
>,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
logger.debug("Updating 2FA session", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
updates,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twofaSessions)
|
||||
.set(updates)
|
||||
.where(eq(twofaSessions.id, id))
|
||||
.returning(),
|
||||
() => twofaErrors.dbError(fctx, "Failed to update 2FA session"),
|
||||
).andThen(([session]) => {
|
||||
if (!session) {
|
||||
logger.error("2FA session not found for update", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
logger.debug("2FA session updated successfully", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return okAsync(session as TwoFaSession);
|
||||
});
|
||||
}
|
||||
|
||||
incrementAttempts(
|
||||
fctx: FlowExecCtx,
|
||||
id: string,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
logger.debug("Incrementing session attempts", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twofaSessions.findFirst({
|
||||
where: eq(twofaSessions.id, id),
|
||||
columns: { id: true, attempts: true },
|
||||
}),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to query session for increment",
|
||||
),
|
||||
)
|
||||
.andThen((s) => {
|
||||
if (!s) {
|
||||
logger.error("Session not found for increment", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
return okAsync(s);
|
||||
})
|
||||
.andThen((s) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twofaSessions)
|
||||
.set({ attempts: s.attempts + 1 })
|
||||
.where(eq(twofaSessions.id, id))
|
||||
.returning(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to increment attempts",
|
||||
),
|
||||
).andThen(([session]) => {
|
||||
if (!session) {
|
||||
logger.error("Session not found after increment", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
|
||||
logger.warn("Failed verification attempt", {
|
||||
...fctx,
|
||||
sessionId: session.id,
|
||||
attempts: session.attempts,
|
||||
});
|
||||
|
||||
return okAsync(session as TwoFaSession);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
cleanupExpiredSessions(fctx: FlowExecCtx): ResultAsync<number, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "security.twofa.cleanup_expired.started",
|
||||
fctx,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(twofaSessions)
|
||||
.where(lt(twofaSessions.expiresAt, new Date())),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "security.twofa.cleanup_expired.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to cleanup expired sessions",
|
||||
);
|
||||
},
|
||||
).map((result) => {
|
||||
const count = result.length || 0;
|
||||
logDomainEvent({
|
||||
event: "security.twofa.cleanup_expired.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { count },
|
||||
});
|
||||
return count;
|
||||
});
|
||||
}
|
||||
}
|
||||
43
packages/logic/domains/2fa/sensitive-actions.ts
Normal file
43
packages/logic/domains/2fa/sensitive-actions.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { getTwofaController } from "./controller";
|
||||
import type { User } from "@/domains/user/data";
|
||||
|
||||
const twofaController = getTwofaController();
|
||||
|
||||
/**
|
||||
* Check if user needs 2FA verification for sensitive actions
|
||||
* Call this before executing sensitive operations like:
|
||||
* - Changing password
|
||||
* - Viewing billing info
|
||||
* - Deleting account
|
||||
* - etc.
|
||||
*/
|
||||
export async function requiresSensitiveAction2FA(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
): Promise<boolean> {
|
||||
const result = await twofaController.requiresSensitiveActionVerification(
|
||||
fctx,
|
||||
user,
|
||||
);
|
||||
return result.match(
|
||||
(data) => data,
|
||||
() => true, // On error, require verification for security
|
||||
);
|
||||
}
|
||||
|
||||
export async function checkInitial2FaRequired(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
sessionId: string,
|
||||
): Promise<boolean> {
|
||||
const result = await twofaController.requiresInitialVerification(
|
||||
fctx,
|
||||
user,
|
||||
sessionId,
|
||||
);
|
||||
return result.match(
|
||||
(data) => data,
|
||||
() => true,
|
||||
);
|
||||
}
|
||||
99
packages/logic/domains/auth/config.base.ts
Normal file
99
packages/logic/domains/auth/config.base.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import {
|
||||
admin,
|
||||
customSession,
|
||||
multiSession,
|
||||
username,
|
||||
} from "better-auth/plugins";
|
||||
import { drizzleAdapter } from "better-auth/adapters/drizzle";
|
||||
import { UserRoleMap } from "@domains/user/data";
|
||||
import { getRedisInstance } from "@pkg/keystore";
|
||||
import { settings } from "@core/settings";
|
||||
import { betterAuth } from "better-auth";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { db, schema } from "@pkg/db";
|
||||
|
||||
const COOKIE_CACHE_MAX_AGE = 60 * 5;
|
||||
const USERNAME_REGEX = /^[a-zA-Z0-9_]+$/;
|
||||
|
||||
export const auth = betterAuth({
|
||||
trustedOrigins: ["http://localhost:5173", settings.betterAuthUrl],
|
||||
advanced: { useSecureCookies: settings.nodeEnv === "production" },
|
||||
appName: settings.appName,
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
disableSignUp: true,
|
||||
requireEmailVerification: false,
|
||||
},
|
||||
plugins: [
|
||||
customSession(async ({ user, session }) => {
|
||||
session.id = session.token;
|
||||
return { user, session };
|
||||
}),
|
||||
username({
|
||||
minUsernameLength: 5,
|
||||
maxUsernameLength: 20,
|
||||
usernameValidator: async (username) => {
|
||||
return USERNAME_REGEX.test(username);
|
||||
},
|
||||
}),
|
||||
admin({
|
||||
defaultRole: UserRoleMap.admin,
|
||||
defaultBanReason:
|
||||
"Stop fanum taxing the server bub, losing aura points fr",
|
||||
defaultBanExpiresIn: 60 * 60 * 24,
|
||||
}),
|
||||
multiSession({ maximumSessions: 5 }),
|
||||
],
|
||||
logger: {
|
||||
log: (level, message, metadata) => {
|
||||
logger.log(level, message, metadata);
|
||||
},
|
||||
level: "debug",
|
||||
},
|
||||
database: drizzleAdapter(db, { provider: "pg", schema: { ...schema } }),
|
||||
secondaryStorage: {
|
||||
get: async (key) => {
|
||||
const redis = getRedisInstance();
|
||||
return await redis.get(key);
|
||||
},
|
||||
set: async (key, value, ttl) => {
|
||||
const redis = getRedisInstance();
|
||||
if (ttl) {
|
||||
await redis.setex(key, ttl, value);
|
||||
} else {
|
||||
await redis.set(key, value);
|
||||
}
|
||||
},
|
||||
delete: async (key) => {
|
||||
const redis = getRedisInstance();
|
||||
const out = await redis.del(key);
|
||||
if (!out && out !== 0) {
|
||||
return null;
|
||||
}
|
||||
return out.toString() as any;
|
||||
},
|
||||
},
|
||||
session: {
|
||||
modelName: "session",
|
||||
expiresIn: 60 * 60 * 24 * 7,
|
||||
updateAge: 60 * 60 * 24,
|
||||
cookieCache: {
|
||||
enabled: true,
|
||||
maxAge: COOKIE_CACHE_MAX_AGE,
|
||||
},
|
||||
},
|
||||
user: {
|
||||
modelName: "user",
|
||||
additionalFields: {
|
||||
onboardingDone: {
|
||||
type: "boolean",
|
||||
defaultValue: false,
|
||||
required: false,
|
||||
},
|
||||
last2FAVerifiedAt: { type: "date", required: false },
|
||||
parentId: { required: false, type: "string" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// - - -
|
||||
60
packages/logic/domains/auth/controller.ts
Normal file
60
packages/logic/domains/auth/controller.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { AuthContext, MiddlewareContext, MiddlewareOptions } from "better-auth";
|
||||
import { AccountRepository } from "../user/account.repository";
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
import { authErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { nanoid } from "nanoid";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class AuthController {
|
||||
constructor(private accountRepo: AccountRepository) {}
|
||||
|
||||
swapAccountPasswordForTwoFactor(
|
||||
fctx: FlowExecCtx,
|
||||
ctx: MiddlewareContext<
|
||||
MiddlewareOptions,
|
||||
AuthContext & { returned?: unknown; responseHeaders?: Headers }
|
||||
>,
|
||||
) {
|
||||
logger.info("Swapping account password for 2FA", {
|
||||
...fctx,
|
||||
});
|
||||
|
||||
if (!ctx.path.includes("two-factor")) {
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(ctx));
|
||||
}
|
||||
|
||||
if (!ctx.body.password || ctx.body.password.length === 0) {
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(ctx));
|
||||
}
|
||||
|
||||
logger.info("Rotating password for 2FA setup for user", {
|
||||
...fctx,
|
||||
userId: ctx.body.userId,
|
||||
});
|
||||
|
||||
return this.accountRepo
|
||||
.rotatePassword(fctx, ctx.body.userId, nanoid())
|
||||
.mapErr((err) => {
|
||||
logger.error("Failed to rotate password for 2FA", {
|
||||
...fctx,
|
||||
error: err,
|
||||
});
|
||||
return authErrors.passwordRotationFailed(fctx, err.detail);
|
||||
})
|
||||
.map((newPassword) => {
|
||||
logger.info("Password rotated successfully for 2FA setup", {
|
||||
...fctx,
|
||||
});
|
||||
return {
|
||||
...ctx,
|
||||
body: { ...ctx.body, password: newPassword },
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getAuthController(): AuthController {
|
||||
return new AuthController(new AccountRepository(db));
|
||||
}
|
||||
32
packages/logic/domains/auth/errors.ts
Normal file
32
packages/logic/domains/auth/errors.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { getError } from "@pkg/logger";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
|
||||
export const authErrors = {
|
||||
passwordRotationFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
|
||||
message: "Failed to begin 2FA setup",
|
||||
description: "An error occurred while rotating the password for 2FA",
|
||||
detail,
|
||||
}),
|
||||
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
accountNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Account not found",
|
||||
description: "Please try again later",
|
||||
detail: "Account not found for user",
|
||||
}),
|
||||
};
|
||||
96
packages/logic/domains/notifications/controller.ts
Normal file
96
packages/logic/domains/notifications/controller.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { okAsync } from "neverthrow";
|
||||
import {
|
||||
NotificationFilters,
|
||||
PaginationOptions,
|
||||
} from "./data";
|
||||
import { NotificationRepository } from "./repository";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class NotificationController {
|
||||
constructor(private notifsRepo: NotificationRepository) {}
|
||||
|
||||
getNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
filters: NotificationFilters,
|
||||
pagination: PaginationOptions,
|
||||
) {
|
||||
return this.notifsRepo.getNotifications(fctx, filters, pagination);
|
||||
}
|
||||
|
||||
markAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.markAsRead(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
markAsUnread(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.markAsUnread(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
archive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.archive(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
unarchive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.unarchive(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
deleteNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.deleteNotifications(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
getUnreadCount(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.getUnreadCount(fctx, userId);
|
||||
}
|
||||
|
||||
markAllAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
) {
|
||||
// Get all unread notification IDs for this user
|
||||
const filters: NotificationFilters = {
|
||||
userId,
|
||||
isRead: false,
|
||||
isArchived: false,
|
||||
};
|
||||
|
||||
// Get a large number to handle bulk operations
|
||||
const pagination: PaginationOptions = { page: 1, pageSize: 1000 };
|
||||
|
||||
return this.notifsRepo
|
||||
.getNotifications(fctx, filters, pagination)
|
||||
.map((paginated) => paginated.data.map((n) => n.id))
|
||||
.andThen((notificationIds) => {
|
||||
if (notificationIds.length === 0) {
|
||||
return okAsync(true);
|
||||
}
|
||||
return this.notifsRepo.markAsRead(fctx, notificationIds, userId);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getNotificationController(): NotificationController {
|
||||
return new NotificationController(new NotificationRepository(db));
|
||||
}
|
||||
115
packages/logic/domains/notifications/data.ts
Normal file
115
packages/logic/domains/notifications/data.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
// Notification schema
|
||||
export const notificationSchema = v.object({
|
||||
id: v.pipe(v.number(), v.integer()),
|
||||
title: v.string(),
|
||||
body: v.string(),
|
||||
priority: v.string(),
|
||||
type: v.string(),
|
||||
category: v.string(),
|
||||
isRead: v.boolean(),
|
||||
isArchived: v.boolean(),
|
||||
actionUrl: v.string(),
|
||||
actionType: v.string(),
|
||||
actionData: v.string(),
|
||||
icon: v.string(),
|
||||
userId: v.string(),
|
||||
sentAt: v.date(),
|
||||
readAt: v.nullable(v.date()),
|
||||
expiresAt: v.nullable(v.date()),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
|
||||
export type Notification = v.InferOutput<typeof notificationSchema>;
|
||||
export type Notifications = Notification[];
|
||||
|
||||
// Notification filters schema
|
||||
export const notificationFiltersSchema = v.object({
|
||||
userId: v.string(),
|
||||
isRead: v.optional(v.boolean()),
|
||||
isArchived: v.optional(v.boolean()),
|
||||
type: v.optional(v.string()),
|
||||
category: v.optional(v.string()),
|
||||
priority: v.optional(v.string()),
|
||||
search: v.optional(v.string()),
|
||||
});
|
||||
export type NotificationFilters = v.InferOutput<
|
||||
typeof notificationFiltersSchema
|
||||
>;
|
||||
|
||||
export type NotificationsQueryInput = {
|
||||
isRead?: boolean;
|
||||
isArchived?: boolean;
|
||||
type?: string;
|
||||
category?: string;
|
||||
priority?: string;
|
||||
search?: string;
|
||||
page?: number;
|
||||
pageSize?: number;
|
||||
sortBy?: string;
|
||||
sortOrder?: string;
|
||||
};
|
||||
|
||||
// Pagination options schema
|
||||
export const paginationOptionsSchema = v.object({
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
sortBy: v.optional(v.string()),
|
||||
sortOrder: v.optional(v.string()),
|
||||
});
|
||||
export type PaginationOptions = v.InferOutput<typeof paginationOptionsSchema>;
|
||||
|
||||
// Paginated notifications schema
|
||||
export const paginatedNotificationsSchema = v.object({
|
||||
data: v.array(notificationSchema),
|
||||
total: v.pipe(v.number(), v.integer()),
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
});
|
||||
export type PaginatedNotifications = v.InferOutput<
|
||||
typeof paginatedNotificationsSchema
|
||||
>;
|
||||
|
||||
// Get notifications schema
|
||||
export const getNotificationsSchema = v.object({
|
||||
filters: notificationFiltersSchema,
|
||||
pagination: paginationOptionsSchema,
|
||||
});
|
||||
export type GetNotifications = v.InferOutput<typeof getNotificationsSchema>;
|
||||
|
||||
// Bulk notification IDs schema
|
||||
export const bulkNotificationIdsSchema = v.object({
|
||||
notificationIds: v.array(v.pipe(v.number(), v.integer())),
|
||||
});
|
||||
export type BulkNotificationIds = v.InferOutput<
|
||||
typeof bulkNotificationIdsSchema
|
||||
>;
|
||||
|
||||
// View Model specific types
|
||||
export const clientNotificationFiltersSchema = v.object({
|
||||
userId: v.string(),
|
||||
isRead: v.optional(v.boolean()),
|
||||
isArchived: v.optional(v.boolean()),
|
||||
type: v.optional(v.string()),
|
||||
category: v.optional(v.string()),
|
||||
priority: v.optional(v.string()),
|
||||
search: v.optional(v.string()),
|
||||
});
|
||||
export type ClientNotificationFilters = v.InferOutput<
|
||||
typeof clientNotificationFiltersSchema
|
||||
>;
|
||||
|
||||
export const clientPaginationStateSchema = v.object({
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
total: v.pipe(v.number(), v.integer()),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
sortBy: v.picklist(["createdAt", "sentAt", "readAt", "priority"]),
|
||||
sortOrder: v.picklist(["asc", "desc"]),
|
||||
});
|
||||
export type ClientPaginationState = v.InferOutput<
|
||||
typeof clientPaginationStateSchema
|
||||
>;
|
||||
78
packages/logic/domains/notifications/errors.ts
Normal file
78
packages/logic/domains/notifications/errors.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const notificationErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getNotificationsFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to fetch notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
markAsReadFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to mark notifications as read",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
markAsUnreadFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to mark notifications as unread",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
archiveFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to archive notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
unarchiveFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to unarchive notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
deleteNotificationsFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to delete notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getUnreadCountFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to get unread count",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
};
|
||||
|
||||
453
packages/logic/domains/notifications/repository.ts
Normal file
453
packages/logic/domains/notifications/repository.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
import { and, asc, count, Database, desc, eq, like, or, sql } from "@pkg/db";
|
||||
import { notifications } from "@pkg/db/schema";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import type {
|
||||
Notification,
|
||||
NotificationFilters,
|
||||
PaginatedNotifications,
|
||||
PaginationOptions,
|
||||
} from "./data";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { notificationErrors } from "./errors";
|
||||
import { logDomainEvent } from "@pkg/logger";
|
||||
|
||||
export class NotificationRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
getNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
filters: NotificationFilters,
|
||||
pagination: PaginationOptions,
|
||||
): ResultAsync<PaginatedNotifications, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.list.started",
|
||||
fctx,
|
||||
meta: {
|
||||
hasSearch: Boolean(filters.search),
|
||||
isRead: filters.isRead,
|
||||
isArchived: filters.isArchived,
|
||||
page: pagination.page,
|
||||
pageSize: pagination.pageSize,
|
||||
sortBy: pagination.sortBy,
|
||||
sortOrder: pagination.sortOrder,
|
||||
},
|
||||
});
|
||||
|
||||
const { userId, isRead, isArchived, type, category, priority, search } =
|
||||
filters;
|
||||
const {
|
||||
page,
|
||||
pageSize,
|
||||
sortBy = "createdAt",
|
||||
sortOrder = "desc",
|
||||
} = pagination;
|
||||
|
||||
// Build WHERE conditions
|
||||
const conditions = [eq(notifications.userId, userId)];
|
||||
|
||||
if (isRead !== undefined) {
|
||||
conditions.push(eq(notifications.isRead, isRead));
|
||||
}
|
||||
|
||||
if (isArchived !== undefined) {
|
||||
conditions.push(eq(notifications.isArchived, isArchived));
|
||||
}
|
||||
|
||||
if (type) {
|
||||
conditions.push(eq(notifications.type, type));
|
||||
}
|
||||
|
||||
if (category) {
|
||||
conditions.push(eq(notifications.category, category));
|
||||
}
|
||||
|
||||
if (priority) {
|
||||
conditions.push(eq(notifications.priority, priority));
|
||||
}
|
||||
|
||||
if (search) {
|
||||
conditions.push(
|
||||
or(
|
||||
like(notifications.title, `%${search}%`),
|
||||
like(notifications.body, `%${search}%`),
|
||||
)!,
|
||||
);
|
||||
}
|
||||
|
||||
const whereClause = and(...conditions);
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.select({ count: count() }).from(notifications).where(whereClause),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.list.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.getNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((totalResult) => {
|
||||
const total = totalResult[0]?.count || 0;
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
// Map sortBy to proper column
|
||||
const getOrderColumn = (sortBy: string) => {
|
||||
switch (sortBy) {
|
||||
case "createdAt":
|
||||
return notifications.createdAt;
|
||||
case "sentAt":
|
||||
return notifications.sentAt;
|
||||
case "readAt":
|
||||
return notifications.readAt;
|
||||
case "priority":
|
||||
return notifications.priority;
|
||||
default:
|
||||
return notifications.createdAt;
|
||||
}
|
||||
};
|
||||
|
||||
const orderColumn = getOrderColumn(sortBy);
|
||||
const orderFunc = sortOrder === "asc" ? asc : desc;
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(whereClause)
|
||||
.orderBy(orderFunc(orderColumn))
|
||||
.limit(pageSize)
|
||||
.offset(offset),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.list.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.getNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((data) => {
|
||||
const totalPages = Math.ceil(total / pageSize);
|
||||
logDomainEvent({
|
||||
event: "notifications.list.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: {
|
||||
count: data.length,
|
||||
page,
|
||||
totalPages,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data: data as Notification[],
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
markAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.mark_read.started",
|
||||
fctx,
|
||||
meta: { userId, notificationCount: notificationIds.length },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isRead: true,
|
||||
readAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.mark_read.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.markAsReadFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "notifications.mark_read.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { notificationCount: notificationIds.length },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
markAsUnread(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.mark_unread.started",
|
||||
fctx,
|
||||
meta: { userId, notificationCount: notificationIds.length },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isRead: false,
|
||||
readAt: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.mark_unread.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.markAsUnreadFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "notifications.mark_unread.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { notificationCount: notificationIds.length },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
archive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.archive.started",
|
||||
fctx,
|
||||
meta: { userId, notificationCount: notificationIds.length },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isArchived: true,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.archive.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.archiveFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "notifications.archive.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { notificationCount: notificationIds.length },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
unarchive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.unarchive.started",
|
||||
fctx,
|
||||
meta: { userId, notificationCount: notificationIds.length },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isArchived: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.unarchive.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.unarchiveFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "notifications.unarchive.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { notificationCount: notificationIds.length },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
deleteNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.delete.started",
|
||||
fctx,
|
||||
meta: { userId, notificationCount: notificationIds.length },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(notifications)
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.delete.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.deleteNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "notifications.delete.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { notificationCount: notificationIds.length },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
getUnreadCount(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<number, Err> {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "notifications.unread_count.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select({ count: count() })
|
||||
.from(notifications)
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
eq(notifications.isRead, false),
|
||||
eq(notifications.isArchived, false),
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "notifications.unread_count.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.getUnreadCountFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((result) => {
|
||||
const count = result[0]?.count || 0;
|
||||
logDomainEvent({
|
||||
event: "notifications.unread_count.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { count },
|
||||
});
|
||||
return count;
|
||||
});
|
||||
}
|
||||
}
|
||||
72
packages/logic/domains/tasks/controller.ts
Normal file
72
packages/logic/domains/tasks/controller.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { db } from "@pkg/db";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { CreateTask, TaskStatus, TaskType, UpdateTask } from "./data";
|
||||
import { TasksRepository } from "./repository";
|
||||
|
||||
export class TasksController {
|
||||
constructor(private tasksRepo: TasksRepository) {}
|
||||
|
||||
createTask(fctx: FlowExecCtx, taskData: CreateTask) {
|
||||
return this.tasksRepo.createTask(fctx, taskData);
|
||||
}
|
||||
|
||||
getTaskById(fctx: FlowExecCtx, taskId: string) {
|
||||
return this.tasksRepo.getTaskById(fctx, taskId);
|
||||
}
|
||||
|
||||
updateTask(fctx: FlowExecCtx, taskId: string, updates: UpdateTask) {
|
||||
return this.tasksRepo.updateTask(fctx, taskId, updates);
|
||||
}
|
||||
|
||||
deleteTask(fctx: FlowExecCtx, taskId: string) {
|
||||
return this.tasksRepo.deleteTask(fctx, taskId);
|
||||
}
|
||||
|
||||
getTasksByStatuses(fctx: FlowExecCtx, statuses: TaskStatus[]) {
|
||||
return this.tasksRepo.getTasksByStatuses(fctx, statuses);
|
||||
}
|
||||
|
||||
getTasksByTypeAndStatuses(
|
||||
fctx: FlowExecCtx,
|
||||
type: TaskType,
|
||||
statuses: TaskStatus[],
|
||||
) {
|
||||
return this.tasksRepo.getTasksByTypeAndStatuses(fctx, type, statuses);
|
||||
}
|
||||
|
||||
markTaskAsCompleted(
|
||||
fctx: FlowExecCtx,
|
||||
taskId: string,
|
||||
result?: Record<string, any>,
|
||||
) {
|
||||
return this.tasksRepo.markTaskAsCompleted(fctx, taskId, result);
|
||||
}
|
||||
|
||||
markTaskAsFailed(fctx: FlowExecCtx, taskId: string, error: any) {
|
||||
return this.tasksRepo.markTaskAsFailed(fctx, taskId, error);
|
||||
}
|
||||
|
||||
updateTaskProgress(fctx: FlowExecCtx, taskId: string, progress: number) {
|
||||
return this.tasksRepo.updateTask(fctx, taskId, {
|
||||
progress: Math.max(0, Math.min(100, progress)),
|
||||
});
|
||||
}
|
||||
|
||||
cancelTask(fctx: FlowExecCtx, taskId: string) {
|
||||
return this.tasksRepo.updateTask(fctx, taskId, {
|
||||
status: TaskStatus.CANCELLED,
|
||||
completedAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
startTask(fctx: FlowExecCtx, taskId: string) {
|
||||
return this.tasksRepo.updateTask(fctx, taskId, {
|
||||
status: TaskStatus.RUNNING,
|
||||
startedAt: new Date(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getTasksController(): TasksController {
|
||||
return new TasksController(new TasksRepository(db));
|
||||
}
|
||||
71
packages/logic/domains/tasks/data.ts
Normal file
71
packages/logic/domains/tasks/data.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export enum TaskStatus {
|
||||
PENDING = "pending",
|
||||
RUNNING = "running",
|
||||
COMPLETED = "completed",
|
||||
FAILED = "failed",
|
||||
CANCELLED = "cancelled",
|
||||
}
|
||||
|
||||
export const taskStatusSchema = v.picklist([
|
||||
"pending",
|
||||
"running",
|
||||
"completed",
|
||||
"failed",
|
||||
"cancelled",
|
||||
]);
|
||||
export type TaskStatusType = v.InferOutput<typeof taskStatusSchema>;
|
||||
|
||||
export enum TaskType {
|
||||
APK_BUILD = "apk_build",
|
||||
}
|
||||
|
||||
export const taskTypeSchema = v.picklist(["apk_build"]);
|
||||
export type TaskTypeValue = v.InferOutput<typeof taskTypeSchema>;
|
||||
|
||||
export const taskErrorSchema = v.object({
|
||||
code: v.string(),
|
||||
message: v.string(),
|
||||
detail: v.optional(v.string()),
|
||||
timestamp: v.date(),
|
||||
});
|
||||
export type TaskError = v.InferOutput<typeof taskErrorSchema>;
|
||||
|
||||
export const taskSchema = v.object({
|
||||
id: v.string(),
|
||||
type: taskTypeSchema,
|
||||
status: taskStatusSchema,
|
||||
progress: v.pipe(v.number(), v.integer()),
|
||||
payload: v.optional(v.nullable(v.record(v.string(), v.any()))),
|
||||
result: v.optional(v.nullable(v.record(v.string(), v.any()))),
|
||||
error: v.optional(v.nullable(taskErrorSchema)),
|
||||
userId: v.string(),
|
||||
resourceId: v.string(),
|
||||
startedAt: v.optional(v.nullable(v.date())),
|
||||
completedAt: v.optional(v.nullable(v.date())),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
export type Task = v.InferOutput<typeof taskSchema>;
|
||||
|
||||
export const createTaskSchema = v.object({
|
||||
id: v.string(),
|
||||
type: taskTypeSchema,
|
||||
status: v.optional(taskStatusSchema),
|
||||
progress: v.optional(v.pipe(v.number(), v.integer())),
|
||||
payload: v.optional(v.nullable(v.record(v.string(), v.any()))),
|
||||
userId: v.string(),
|
||||
resourceId: v.string(),
|
||||
});
|
||||
export type CreateTask = v.InferOutput<typeof createTaskSchema>;
|
||||
|
||||
export const updateTaskSchema = v.object({
|
||||
status: v.optional(taskStatusSchema),
|
||||
progress: v.optional(v.pipe(v.number(), v.integer())),
|
||||
result: v.optional(v.nullable(v.record(v.string(), v.any()))),
|
||||
error: v.optional(v.nullable(taskErrorSchema)),
|
||||
startedAt: v.optional(v.nullable(v.date())),
|
||||
completedAt: v.optional(v.nullable(v.date())),
|
||||
});
|
||||
export type UpdateTask = v.InferOutput<typeof updateTaskSchema>;
|
||||
87
packages/logic/domains/tasks/errors.ts
Normal file
87
packages/logic/domains/tasks/errors.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const taskErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
taskNotFound: (fctx: FlowExecCtx, taskId: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Task not found",
|
||||
description: "The requested task does not exist",
|
||||
detail: `No task found with ID: ${taskId}`,
|
||||
}),
|
||||
|
||||
createTaskFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while creating task",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getTaskFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while fetching task",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
updateTaskFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while updating task",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
deleteTaskFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while deleting task",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getTasksFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while fetching tasks",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getTasksByStatusFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while fetching tasks by status",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
checkTaskExistenceFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while checking task existence",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
};
|
||||
|
||||
163
packages/logic/domains/tasks/repository.ts
Normal file
163
packages/logic/domains/tasks/repository.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import { CreateTask, Task, TaskStatus, TaskType, UpdateTask } from "./data";
|
||||
import { ResultAsync, errAsync, okAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { Database, and, asc, eq, inArray } from "@pkg/db";
|
||||
import { task } from "@pkg/db/schema";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { taskErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
|
||||
export class TasksRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
createTask(fctx: FlowExecCtx, taskData: CreateTask): ResultAsync<Task, Err> {
|
||||
logger.info("Creating new task", { ...fctx, taskId: taskData.id });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(task)
|
||||
.values({
|
||||
id: taskData.id,
|
||||
type: taskData.type,
|
||||
status: taskData.status || TaskStatus.PENDING,
|
||||
progress: taskData.progress || 0,
|
||||
payload: taskData.payload ?? null,
|
||||
userId: taskData.userId,
|
||||
resourceId: taskData.resourceId,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
.execute(),
|
||||
(error) =>
|
||||
taskErrors.createTaskFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).map((result) => result[0] as Task);
|
||||
}
|
||||
|
||||
getTaskById(fctx: FlowExecCtx, taskId: string): ResultAsync<Task, Err> {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.task.findFirst({
|
||||
where: eq(task.id, taskId),
|
||||
}),
|
||||
(error) =>
|
||||
taskErrors.getTaskFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).andThen((result) => {
|
||||
if (!result) {
|
||||
return errAsync(taskErrors.taskNotFound(fctx, taskId));
|
||||
}
|
||||
|
||||
return okAsync(result as Task);
|
||||
});
|
||||
}
|
||||
|
||||
updateTask(
|
||||
fctx: FlowExecCtx,
|
||||
taskId: string,
|
||||
updates: UpdateTask,
|
||||
): ResultAsync<Task, Err> {
|
||||
return this.getTaskById(fctx, taskId).andThen(() =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(task)
|
||||
.set({ ...updates, updatedAt: new Date() })
|
||||
.where(eq(task.id, taskId))
|
||||
.returning()
|
||||
.execute(),
|
||||
(error) =>
|
||||
taskErrors.updateTaskFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).andThen((updateResult) => {
|
||||
if (!updateResult[0]) {
|
||||
return errAsync(taskErrors.taskNotFound(fctx, taskId));
|
||||
}
|
||||
return okAsync(updateResult[0] as Task);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
deleteTask(fctx: FlowExecCtx, taskId: string): ResultAsync<boolean, Err> {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.delete(task).where(eq(task.id, taskId)).execute(),
|
||||
(error) =>
|
||||
taskErrors.deleteTaskFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).map(() => true);
|
||||
}
|
||||
|
||||
getTasksByStatuses(
|
||||
fctx: FlowExecCtx,
|
||||
statuses: TaskStatus[],
|
||||
): ResultAsync<Task[], Err> {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(task)
|
||||
.where(inArray(task.status, statuses))
|
||||
.orderBy(asc(task.createdAt)),
|
||||
(error) =>
|
||||
taskErrors.getTasksByStatusFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).map((result) => result as Task[]);
|
||||
}
|
||||
|
||||
getTasksByTypeAndStatuses(
|
||||
fctx: FlowExecCtx,
|
||||
type: TaskType,
|
||||
statuses: TaskStatus[],
|
||||
): ResultAsync<Task[], Err> {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(task)
|
||||
.where(and(eq(task.type, type), inArray(task.status, statuses)))
|
||||
.orderBy(asc(task.createdAt)),
|
||||
(error) =>
|
||||
taskErrors.getTasksByStatusFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
),
|
||||
).map((result) => result as Task[]);
|
||||
}
|
||||
|
||||
markTaskAsCompleted(
|
||||
fctx: FlowExecCtx,
|
||||
taskId: string,
|
||||
result?: Record<string, any>,
|
||||
): ResultAsync<Task, Err> {
|
||||
return this.updateTask(fctx, taskId, {
|
||||
status: TaskStatus.COMPLETED,
|
||||
progress: 100,
|
||||
result: result ?? null,
|
||||
completedAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
markTaskAsFailed(
|
||||
fctx: FlowExecCtx,
|
||||
taskId: string,
|
||||
error: any,
|
||||
): ResultAsync<Task, Err> {
|
||||
return this.updateTask(fctx, taskId, {
|
||||
status: TaskStatus.FAILED,
|
||||
error: {
|
||||
code: error.code || "UNKNOWN_ERROR",
|
||||
message: error.message || "Task failed",
|
||||
detail: error.detail,
|
||||
timestamp: new Date(),
|
||||
},
|
||||
completedAt: new Date(),
|
||||
});
|
||||
}
|
||||
}
|
||||
250
packages/logic/domains/user/account.repository.ts
Normal file
250
packages/logic/domains/user/account.repository.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { traceResultAsync } from "@core/observability";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError, logDomainEvent } from "@pkg/logger";
|
||||
import { auth } from "../auth/config.base";
|
||||
import { account } from "@pkg/db/schema";
|
||||
import { ResultAsync, errAsync, okAsync } from "neverthrow";
|
||||
import { Database, eq } from "@pkg/db";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
export class AccountRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
private dbError(fctx: FlowExecCtx, detail: string): Err {
|
||||
return getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
});
|
||||
}
|
||||
|
||||
private accountNotFound(fctx: FlowExecCtx): Err {
|
||||
return getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Account not found",
|
||||
description: "Please try again later",
|
||||
detail: "Account not found for user",
|
||||
});
|
||||
}
|
||||
|
||||
ensureAccountExists(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.ensureAccountExists",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "account.ensure_exists.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.account.findFirst({
|
||||
where: eq(account.userId, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.ensure_exists.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((existingAccount) => {
|
||||
if (existingAccount) {
|
||||
logDomainEvent({
|
||||
event: "account.ensure_exists.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, existed: true },
|
||||
});
|
||||
return okAsync(true);
|
||||
}
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
auth.$context.then((ctx) => ctx.password.hash(nanoid())),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.ensure_exists.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, stage: "hash_password" },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).andThen((password) => {
|
||||
const aid = nanoid();
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(account)
|
||||
.values({
|
||||
id: aid,
|
||||
accountId: userId,
|
||||
providerId: "credential",
|
||||
userId,
|
||||
password,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.execute(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.ensure_exists.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, stage: "create_account" },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "account.ensure_exists.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, existed: false },
|
||||
});
|
||||
return false;
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
rotatePassword(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
password: string,
|
||||
): ResultAsync<string, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.rotatePassword",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "account.rotate_password.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.account.findFirst({
|
||||
where: eq(account.userId, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.rotate_password.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, stage: "check_exists" },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).andThen((existingAccount) => {
|
||||
if (!existingAccount) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "account.rotate_password.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: { code: "NOT_FOUND", message: "Account not found" },
|
||||
meta: { userId },
|
||||
});
|
||||
return errAsync(this.accountNotFound(fctx));
|
||||
}
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
auth.$context.then((ctx) => ctx.password.hash(password)),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.rotate_password.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, stage: "hash_password" },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).andThen((hashed) => {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(account)
|
||||
.set({ password: hashed })
|
||||
.where(eq(account.userId, userId))
|
||||
.returning()
|
||||
.execute(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "account.rotate_password.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, stage: "update_password" },
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "account.rotate_password.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return password;
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
96
packages/logic/domains/user/controller.ts
Normal file
96
packages/logic/domains/user/controller.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { traceResultAsync } from "@core/observability";
|
||||
import { AccountRepository } from "./account.repository";
|
||||
import { UserRepository } from "./repository";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class UserController {
|
||||
constructor(
|
||||
private userRepository: UserRepository,
|
||||
private accountRepo: AccountRepository,
|
||||
) {}
|
||||
|
||||
getUserInfo(fctx: FlowExecCtx, userId: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.getUserInfo",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.userRepository.getUserInfo(fctx, userId),
|
||||
});
|
||||
}
|
||||
|
||||
ensureAccountExists(fctx: FlowExecCtx, userId: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.ensureAccountExists",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.accountRepo.ensureAccountExists(fctx, userId),
|
||||
});
|
||||
}
|
||||
|
||||
isUsernameAvailable(fctx: FlowExecCtx, username: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.isUsernameAvailable",
|
||||
fctx,
|
||||
attributes: { "app.user.username": username },
|
||||
fn: () => this.userRepository.isUsernameAvailable(fctx, username),
|
||||
});
|
||||
}
|
||||
|
||||
updateLastVerified2FaAtToNow(fctx: FlowExecCtx, userId: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.updateLastVerified2FaAtToNow",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.userRepository.updateLastVerified2FaAtToNow(fctx, userId),
|
||||
});
|
||||
}
|
||||
|
||||
banUser(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
reason: string,
|
||||
banExpiresAt: Date,
|
||||
) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.banUser",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.userRepository.banUser(fctx, userId, reason, banExpiresAt),
|
||||
});
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.isUserBanned",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.userRepository.isUserBanned(fctx, userId),
|
||||
});
|
||||
}
|
||||
|
||||
getBanInfo(fctx: FlowExecCtx, userId: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.getBanInfo",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.userRepository.getBanInfo(fctx, userId),
|
||||
});
|
||||
}
|
||||
|
||||
rotatePassword(fctx: FlowExecCtx, userId: string, password: string) {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.controller.rotatePassword",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => this.accountRepo.rotatePassword(fctx, userId, password),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getUserController(): UserController {
|
||||
return new UserController(
|
||||
new UserRepository(db),
|
||||
new AccountRepository(db),
|
||||
);
|
||||
}
|
||||
159
packages/logic/domains/user/data.ts
Normal file
159
packages/logic/domains/user/data.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { Session } from "better-auth";
|
||||
import * as v from "valibot";
|
||||
|
||||
export type { Session } from "better-auth";
|
||||
|
||||
export type ModifiedSession = Session & { isCurrent?: boolean };
|
||||
|
||||
// User role enum
|
||||
export enum UserRoleMap {
|
||||
user = "user",
|
||||
admin = "admin",
|
||||
}
|
||||
|
||||
// User role schema
|
||||
export const userRoleSchema = v.picklist(["user", "admin"]);
|
||||
export type UserRole = v.InferOutput<typeof userRoleSchema>;
|
||||
|
||||
// User schema
|
||||
export const userSchema = v.object({
|
||||
id: v.string(),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
emailVerified: v.boolean(),
|
||||
image: v.optional(v.string()),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
username: v.optional(v.string()),
|
||||
displayUsername: v.optional(v.string()),
|
||||
role: v.optional(v.string()),
|
||||
banned: v.optional(v.boolean()),
|
||||
banReason: v.optional(v.string()),
|
||||
banExpires: v.optional(v.date()),
|
||||
onboardingDone: v.optional(v.boolean()),
|
||||
last2FAVerifiedAt: v.optional(v.date()),
|
||||
parentId: v.optional(v.string()),
|
||||
});
|
||||
export type User = v.InferOutput<typeof userSchema>;
|
||||
|
||||
// Account schema
|
||||
export const accountSchema = v.object({
|
||||
id: v.string(),
|
||||
accountId: v.string(),
|
||||
providerId: v.string(),
|
||||
userId: v.string(),
|
||||
accessToken: v.string(),
|
||||
refreshToken: v.string(),
|
||||
idToken: v.string(),
|
||||
accessTokenExpiresAt: v.date(),
|
||||
refreshTokenExpiresAt: v.date(),
|
||||
scope: v.string(),
|
||||
password: v.string(),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
export type Account = v.InferOutput<typeof accountSchema>;
|
||||
|
||||
// Ensure account exists schema
|
||||
export const ensureAccountExistsSchema = v.object({
|
||||
userId: v.string(),
|
||||
});
|
||||
export type EnsureAccountExists = v.InferOutput<
|
||||
typeof ensureAccountExistsSchema
|
||||
>;
|
||||
|
||||
// Ban info schema
|
||||
export const banInfoSchema = v.object({
|
||||
banned: v.boolean(),
|
||||
reason: v.optional(v.string()),
|
||||
expires: v.optional(v.date()),
|
||||
});
|
||||
export type BanInfo = v.InferOutput<typeof banInfoSchema>;
|
||||
|
||||
// Ban user schema
|
||||
export const banUserSchema = v.object({
|
||||
userId: v.string(),
|
||||
reason: v.string(),
|
||||
banExpiresAt: v.date(),
|
||||
});
|
||||
export type BanUser = v.InferOutput<typeof banUserSchema>;
|
||||
|
||||
// Check username availability schema
|
||||
export const checkUsernameSchema = v.object({
|
||||
username: v.string(),
|
||||
});
|
||||
export type CheckUsername = v.InferOutput<typeof checkUsernameSchema>;
|
||||
|
||||
// Rotate password schema
|
||||
export const rotatePasswordSchema = v.object({
|
||||
userId: v.string(),
|
||||
password: v.string(),
|
||||
});
|
||||
export type RotatePassword = v.InferOutput<typeof rotatePasswordSchema>;
|
||||
|
||||
// View Model specific types
|
||||
|
||||
// Search and filter types
|
||||
export const searchFieldSchema = v.picklist(["email", "name", "username"]);
|
||||
export type SearchField = v.InferOutput<typeof searchFieldSchema>;
|
||||
|
||||
export const searchOperatorSchema = v.picklist([
|
||||
"contains",
|
||||
"starts_with",
|
||||
"ends_with",
|
||||
]);
|
||||
export type SearchOperator = v.InferOutput<typeof searchOperatorSchema>;
|
||||
|
||||
export const filterOperatorSchema = v.picklist([
|
||||
"eq",
|
||||
"ne",
|
||||
"lt",
|
||||
"lte",
|
||||
"gt",
|
||||
"gte",
|
||||
]);
|
||||
export type FilterOperator = v.InferOutput<typeof filterOperatorSchema>;
|
||||
|
||||
export const sortDirectionSchema = v.picklist(["asc", "desc"]);
|
||||
export type SortDirection = v.InferOutput<typeof sortDirectionSchema>;
|
||||
|
||||
// Users query state
|
||||
export const usersQueryStateSchema = v.object({
|
||||
// searching
|
||||
searchValue: v.optional(v.string()),
|
||||
searchField: v.optional(searchFieldSchema),
|
||||
searchOperator: v.optional(searchOperatorSchema),
|
||||
|
||||
// pagination
|
||||
limit: v.pipe(v.number(), v.integer()),
|
||||
offset: v.pipe(v.number(), v.integer()),
|
||||
|
||||
// sorting
|
||||
sortBy: v.optional(v.string()),
|
||||
sortDirection: v.optional(sortDirectionSchema),
|
||||
|
||||
// filtering
|
||||
filterField: v.optional(v.string()),
|
||||
filterValue: v.optional(v.union([v.string(), v.number(), v.boolean()])),
|
||||
filterOperator: v.optional(filterOperatorSchema),
|
||||
});
|
||||
export type UsersQueryState = v.InferOutput<typeof usersQueryStateSchema>;
|
||||
|
||||
// UI View Model types
|
||||
|
||||
export const banExpiryModeSchema = v.picklist([
|
||||
"never",
|
||||
"1d",
|
||||
"7d",
|
||||
"30d",
|
||||
"custom",
|
||||
]);
|
||||
export type BanExpiryMode = v.InferOutput<typeof banExpiryModeSchema>;
|
||||
|
||||
export const createUserFormSchema = v.object({
|
||||
email: v.string(),
|
||||
password: v.string(),
|
||||
name: v.string(),
|
||||
role: v.union([userRoleSchema, v.array(userRoleSchema)]),
|
||||
});
|
||||
export type CreateUserForm = v.InferOutput<typeof createUserFormSchema>;
|
||||
77
packages/logic/domains/user/errors.ts
Normal file
77
packages/logic/domains/user/errors.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const userErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
userNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "User not found",
|
||||
description: "Try with a different user id",
|
||||
detail: "User not found in database",
|
||||
}),
|
||||
|
||||
usernameCheckFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while checking username availability",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
banOperationFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to perform ban operation",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
unbanFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to unban user",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
updateFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to update user",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getUserInfoFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while getting user info",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getBanInfoFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while getting ban info",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
};
|
||||
420
packages/logic/domains/user/repository.ts
Normal file
420
packages/logic/domains/user/repository.ts
Normal file
@@ -0,0 +1,420 @@
|
||||
import { ResultAsync, errAsync, okAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { traceResultAsync } from "@core/observability";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { Database, eq } from "@pkg/db";
|
||||
import { BanInfo, User } from "./data";
|
||||
import { user } from "@pkg/db/schema";
|
||||
import { userErrors } from "./errors";
|
||||
import { logDomainEvent } from "@pkg/logger";
|
||||
|
||||
export class UserRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
getUserInfo(fctx: FlowExecCtx, userId: string): ResultAsync<User, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.getUserInfo",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.get_info.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.get_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.getUserInfoFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "user.get_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: { code: "NOT_FOUND", message: "User not found" },
|
||||
meta: { userId },
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
logDomainEvent({
|
||||
event: "user.get_info.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return okAsync(userData as User);
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
isUsernameAvailable(
|
||||
fctx: FlowExecCtx,
|
||||
username: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.isUsernameAvailable",
|
||||
fctx,
|
||||
attributes: { "app.user.username": username },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.username_check.started",
|
||||
fctx,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.username, username),
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.username_check.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
});
|
||||
return userErrors.usernameCheckFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((existingUser) => {
|
||||
const isAvailable = !existingUser?.id;
|
||||
logDomainEvent({
|
||||
event: "user.username_check.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { isAvailable },
|
||||
});
|
||||
return isAvailable;
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
updateLastVerified2FaAtToNow(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.updateLastVerified2FaAtToNow",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.update_last_2fa.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({ last2FAVerifiedAt: new Date() })
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.update_last_2fa.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.updateFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "user.update_last_2fa.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
banUser(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
reason: string,
|
||||
banExpiresAt: Date,
|
||||
): ResultAsync<boolean, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.banUser",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.ban.started",
|
||||
fctx,
|
||||
meta: {
|
||||
userId,
|
||||
reasonLength: reason.length,
|
||||
banExpiresAt: banExpiresAt.toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({
|
||||
banned: true,
|
||||
banReason: reason,
|
||||
banExpires: banExpiresAt,
|
||||
})
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.ban.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.banOperationFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logDomainEvent({
|
||||
event: "user.ban.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return true;
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string): ResultAsync<boolean, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.isUserBanned",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.is_banned.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
columns: {
|
||||
banned: true,
|
||||
banExpires: true,
|
||||
},
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.is_banned.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "user.is_banned.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: { code: "NOT_FOUND", message: "User not found" },
|
||||
meta: { userId },
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
if (!userData.banned) {
|
||||
logDomainEvent({
|
||||
event: "user.is_banned.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, isBanned: false },
|
||||
});
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
if (!userData.banExpires) {
|
||||
logDomainEvent({
|
||||
event: "user.is_banned.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, isBanned: true, isPermanent: true },
|
||||
});
|
||||
return okAsync(true);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
if (userData.banExpires <= now) {
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({
|
||||
banned: false,
|
||||
banReason: null,
|
||||
banExpires: null,
|
||||
})
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.unban_after_expiry.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.unbanFailed(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
)
|
||||
.map(() => {
|
||||
logDomainEvent({
|
||||
event: "user.unban_after_expiry.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId },
|
||||
});
|
||||
return false;
|
||||
})
|
||||
.orElse((error) => {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "user.is_banned.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId, degraded: true, isBanned: true },
|
||||
});
|
||||
return okAsync(true);
|
||||
});
|
||||
}
|
||||
|
||||
logDomainEvent({
|
||||
event: "user.is_banned.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: {
|
||||
userId,
|
||||
isBanned: true,
|
||||
banExpires: userData.banExpires.toISOString(),
|
||||
},
|
||||
});
|
||||
return okAsync(true);
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
getBanInfo(fctx: FlowExecCtx, userId: string): ResultAsync<BanInfo, Err> {
|
||||
return traceResultAsync({
|
||||
name: "logic.user.repository.getBanInfo",
|
||||
fctx,
|
||||
attributes: { "app.user.id": userId },
|
||||
fn: () => {
|
||||
const startedAt = Date.now();
|
||||
logDomainEvent({
|
||||
event: "user.ban_info.started",
|
||||
fctx,
|
||||
meta: { userId },
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
columns: { banned: true, banReason: true, banExpires: true },
|
||||
}),
|
||||
(error) => {
|
||||
logDomainEvent({
|
||||
level: "error",
|
||||
event: "user.ban_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error,
|
||||
meta: { userId },
|
||||
});
|
||||
return userErrors.getBanInfoFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logDomainEvent({
|
||||
level: "warn",
|
||||
event: "user.ban_info.failed",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: { code: "NOT_FOUND", message: "User not found" },
|
||||
meta: { userId },
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
logDomainEvent({
|
||||
event: "user.ban_info.succeeded",
|
||||
fctx,
|
||||
durationMs: Date.now() - startedAt,
|
||||
meta: { userId, banned: userData.banned || false },
|
||||
});
|
||||
|
||||
return okAsync({
|
||||
banned: userData.banned || false,
|
||||
reason: userData.banReason || undefined,
|
||||
expires: userData.banExpires || undefined,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
40
packages/logic/package.json
Normal file
40
packages/logic/package.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "@pkg/logic",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"auth:schemagen": "pnpm dlx @better-auth/cli generate --config ./domains/auth/config.base.ts --output ../../packages/db/schema/better.auth.schema.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@otplib/plugin-base32-scure": "^13.3.0",
|
||||
"@otplib/plugin-crypto-noble": "^13.3.0",
|
||||
"@otplib/totp": "^13.3.0",
|
||||
"@pkg/db": "workspace:*",
|
||||
"@pkg/keystore": "workspace:*",
|
||||
"@pkg/logger": "workspace:*",
|
||||
"@pkg/result": "workspace:*",
|
||||
"@pkg/settings": "workspace:*",
|
||||
"@types/pdfkit": "^0.14.0",
|
||||
"argon2": "^0.43.0",
|
||||
"better-auth": "^1.4.7",
|
||||
"date-fns-tz": "^3.2.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"hono": "^4.11.1",
|
||||
"imapflow": "^1.0.188",
|
||||
"mailparser": "^3.7.3",
|
||||
"nanoid": "^5.1.5",
|
||||
"neverthrow": "^8.2.0",
|
||||
"otplib": "^13.3.0",
|
||||
"uuid": "^11.1.0",
|
||||
"valibot": "^1.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
"@types/mailparser": "^3.4.6",
|
||||
"@types/tmp": "^0.2.6",
|
||||
"@types/uuid": "^10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
16
packages/logic/tsconfig.json
Normal file
16
packages/logic/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "hono/jsx",
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["./*"],
|
||||
"@domains/*": ["./domains/*"],
|
||||
"@core/*": ["./core/*"]
|
||||
},
|
||||
"moduleResolution": "bundler",
|
||||
"module": "esnext",
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user