diff options
198 files changed, 5790 insertions, 2350 deletions
diff --git a/.editorconfig b/.editorconfig index beab7a9..92ff780 100644 --- a/.editorconfig +++ b/.editorconfig @@ -13,6 +13,13 @@ tab_width = 8 [*.md] indent_size = 2 +max_line_length = 80 + +[*.{sh,bash}] +indent_size = 2 + +[store/debian-dev/**/*.{sh,bash}] +indent_size = 4 [*.{html,css,js,ts}] indent_size = 2 @@ -20,8 +27,11 @@ indent_size = 2 [*.{c,cpp,h}] indent_size = 2 -[*.{json,yaml,yml}] +[*.{json,yaml,yml,yaml.template}] indent_size = 2 [*.py] profile = black + +[*.lua] +indent_size = 4 @@ -1,11 +1,4 @@ -# Hi! This is **crupest** - -Nice to meet you here! 🤗 - -Working on [GNU/Hurd](https://www.gnu.org/software/hurd/index.html) [Debian](https://www.debian.org/ports/hurd/) now. ❤️ - -I love everything in the world. Contact me via my email, *<crupest@crupest.life>*, or create an issue in any of my repos. I love talking to people a lot. - -> *Die Philosophen haben die Welt nur verschieden interpretiert, es kömmt aber darauf an, sie zu verändern.* -(*The philosophers have only interpreted the world in various ways, the point is to change it.*) -\- Marx, K. (1845). *Theses on Feuerbach*. +Hi! This is **crupest**. Nice to meet you here! 🤗 +Feel free to contact me via my email address *<crupest@crupest.life>*, +or just create an issue in any of my [GitHub](https://github.com/crupest) +repos. I love talking with people a lot. diff --git a/deno/.gitignore b/deno/.gitignore new file mode 100644 index 0000000..327aef0 --- /dev/null +++ b/deno/.gitignore @@ -0,0 +1,3 @@ +out +.env.local +db.sqlite diff --git a/deno/base/config.ts b/deno/base/config.ts new file mode 100644 index 0000000..96cc869 --- /dev/null +++ b/deno/base/config.ts @@ -0,0 +1,95 @@ +import { StringUtils } from "./lib.ts"; + +export interface ConfigDefinitionItem { + readonly description: string; + readonly default?: string; + readonly secret?: boolean; +} + +interface ConfigMapItem extends ConfigDefinitionItem { + readonly env: string; + value?: string; +} + +export type ConfigDefinition<K extends string = string> = Record< + K, + ConfigDefinitionItem +>; +type ConfigMap<K extends string = string> = Record<K, ConfigMapItem>; + +export class ConfigProvider<K extends string> { + readonly #prefix: string; + readonly #map: ConfigMap<K>; + + constructor(prefix: string, ...definitions: Partial<ConfigDefinition<K>>[]) { + this.#prefix = prefix; + + const map: ConfigMap = {}; + for (const definition of definitions) { + for (const [key, def] of Object.entries(definition as ConfigDefinition)) { + map[key] = { + ...def, + env: `${this.#prefix}-${ + StringUtils.camelCaseToKebabCase(key as string) + }` + .replaceAll("-", "_") + .toUpperCase(), + }; + } + } + this.#map = map as ConfigMap<K>; + } + + resolveFromEnv(options?: { keys?: K[] }) { + const keys = options?.keys ?? Object.keys(this.#map); + for (const key of keys) { + const { env, description, default: _default } = this.#map[key as K]; + const value = Deno.env.get(env) ?? _default; + if (value == null) { + throw new Error(`Required env ${env} (${description}) is not set.`); + } + this.#map[key as K].value = value; + } + } + + get(key: K): string { + if (!(key in this.#map)) { + throw new Error(`Unknown config key ${key as string}.`); + } + if (this.#map[key].value == null) { + this.resolveFromEnv({ keys: [key] }); + } + return this.#map[key].value!; + } + + set(key: K, value: string) { + if (!(key in this.#map)) { + throw new Error(`Unknown config key ${key as string}.`); + } + this.#map[key].value = value; + } + + getInt(key: K): number { + return Number(this.get(key)); + } + + getList(key: K, separator: string = ","): string[] { + const value = this.get(key); + if (value.length === 0) return []; + return value.split(separator); + } + + [Symbol.for("Deno.customInspect")]() { + const getValueString = (item: ConfigMapItem): string => { + if (item.value == null) return "(unresolved)"; + if (item.secret === true) return "***"; + return item.value; + }; + + return Object.entries(this.#map as ConfigMap) + .map( + ([key, item]) => `${key} [env: ${item.env}]: ${getValueString(item)}`, + ) + .join("\n"); + } +} diff --git a/deno/base/cron.ts b/deno/base/cron.ts new file mode 100644 index 0000000..bf0a0be --- /dev/null +++ b/deno/base/cron.ts @@ -0,0 +1,43 @@ +export type CronCallback = (task: CronTask) => Promise<void>; + +export interface CronTaskConfig { + readonly name: string; + readonly interval: number; + readonly callback: CronCallback; + readonly startNow?: boolean; +} + +export class CronTask { + #timerTag: number | null = null; + + constructor(public readonly config: CronTaskConfig) { + if (config.interval <= 0) { + throw new Error("Cron task interval must be positive."); + } + + if (config.startNow === true) { + this.start(); + } + } + + get running(): boolean { + return this.#timerTag != null; + } + + start() { + if (this.#timerTag == null) { + this.#timerTag = setInterval( + this.config.callback, + this.config.interval, + this, + ); + } + } + + stop() { + if (this.#timerTag != null) { + clearInterval(this.#timerTag); + this.#timerTag = null; + } + } +} diff --git a/deno/base/deno.json b/deno/base/deno.json new file mode 100644 index 0000000..582f0f6 --- /dev/null +++ b/deno/base/deno.json @@ -0,0 +1,9 @@ +{ + "name": "@crupest/base", + "version": "0.1.0", + "exports": { + ".": "./lib.ts", + "./config": "./config.ts", + "./cron": "./cron.ts" + } +} diff --git a/deno/base/lib.ts b/deno/base/lib.ts new file mode 100644 index 0000000..3c69e0a --- /dev/null +++ b/deno/base/lib.ts @@ -0,0 +1,30 @@ +function camelCaseToKebabCase(str: string): string { + return str.replace(/[A-Z]/g, (m) => "-" + m.toLowerCase()); +} + +function prependNonEmpty<T>( + object: T, + prefix: string = " ", +): string { + if (object == null) return ""; + const string = typeof object === "string" ? object : String(object); + return string.length === 0 ? "" : prefix + string; +} + +export const StringUtils = Object.freeze({ + camelCaseToKebabCase, + prependNonEmpty, +}); + +function toFileNameString(date: Date, dateOnly?: boolean): string { + const str = date.toISOString(); + return dateOnly === true + ? str.slice(0, str.indexOf("T")) + : str.replaceAll(/:|\./g, "-"); +} + +export const DateUtils = Object.freeze( + { + toFileNameString, + } as const, +); diff --git a/deno/deno.json b/deno/deno.json new file mode 100644 index 0000000..286451e --- /dev/null +++ b/deno/deno.json @@ -0,0 +1,18 @@ +{ + "workspace": ["./base", "./mail", "./tools"], + "tasks": { + "compile:mail": "deno task --cwd=mail compile" + }, + "imports": { + "@std/collections": "jsr:@std/collections@^1.1.1", + "@std/csv": "jsr:@std/csv@^1.0.6", + "@std/encoding": "jsr:@std/encoding@^1.0.10", + "@std/expect": "jsr:@std/expect@^1.0.16", + "@std/io": "jsr:@std/io@^0.225.2", + "@std/path": "jsr:@std/path@^1.1.0", + "@std/testing": "jsr:@std/testing@^1.0.13", + "@std/fs": "jsr:@std/fs@^1.0.18", + "yargs": "npm:yargs@^18.0.0", + "@types/yargs": "npm:@types/yargs@^17.0.33" + } +} diff --git a/deno/deno.lock b/deno/deno.lock new file mode 100644 index 0000000..bdc8c3f --- /dev/null +++ b/deno/deno.lock @@ -0,0 +1,1329 @@ +{ + "version": "5", + "specifiers": { + "jsr:@db/sqlite@0.12": "0.12.0", + "jsr:@denosaurs/plug@1": "1.1.0", + "jsr:@std/assert@0.217": "0.217.0", + "jsr:@std/assert@^1.0.13": "1.0.13", + "jsr:@std/async@^1.0.13": "1.0.13", + "jsr:@std/bytes@^1.0.5": "1.0.6", + "jsr:@std/collections@^1.1.1": "1.1.1", + "jsr:@std/csv@^1.0.6": "1.0.6", + "jsr:@std/data-structures@^1.0.8": "1.0.8", + "jsr:@std/encoding@1": "1.0.10", + "jsr:@std/encoding@^1.0.10": "1.0.10", + "jsr:@std/expect@^1.0.16": "1.0.16", + "jsr:@std/fmt@1": "1.0.8", + "jsr:@std/fs@1": "1.0.17", + "jsr:@std/fs@^1.0.17": "1.0.17", + "jsr:@std/fs@^1.0.18": "1.0.18", + "jsr:@std/internal@^1.0.6": "1.0.7", + "jsr:@std/internal@^1.0.7": "1.0.7", + "jsr:@std/internal@^1.0.8": "1.0.8", + "jsr:@std/io@~0.225.2": "0.225.2", + "jsr:@std/path@0.217": "0.217.0", + "jsr:@std/path@1": "1.1.0", + "jsr:@std/path@^1.0.9": "1.1.0", + "jsr:@std/path@^1.1.0": "1.1.0", + "jsr:@std/streams@^1.0.9": "1.0.9", + "jsr:@std/testing@^1.0.13": "1.0.13", + "npm:@aws-sdk/client-s3@^3.821.0": "3.824.0", + "npm:@aws-sdk/client-sesv2@^3.821.0": "3.824.0", + "npm:@hono/zod-validator@0.7": "0.7.0_hono@4.7.11_zod@3.25.51", + "npm:@smithy/fetch-http-handler@^5.0.4": "5.0.4", + "npm:@types/lodash@*": "4.17.17", + "npm:@types/mustache@*": "4.2.6", + "npm:@types/node@*": "22.15.15", + "npm:@types/yargs@*": "17.0.33", + "npm:@types/yargs@^17.0.33": "17.0.33", + "npm:email-addresses@5": "5.0.0", + "npm:hono@^4.7.11": "4.7.11", + "npm:kysely@~0.28.2": "0.28.2", + "npm:mustache@^4.2.0": "4.2.0", + "npm:yargs@18": "18.0.0", + "npm:zod@^3.25.48": "3.25.51" + }, + "jsr": { + "@db/sqlite@0.12.0": { + "integrity": "dd1ef7f621ad50fc1e073a1c3609c4470bd51edc0994139c5bf9851de7a6d85f", + "dependencies": [ + "jsr:@denosaurs/plug", + "jsr:@std/path@0.217" + ] + }, + "@denosaurs/plug@1.1.0": { + "integrity": "eb2f0b7546c7bca2000d8b0282c54d50d91cf6d75cb26a80df25a6de8c4bc044", + "dependencies": [ + "jsr:@std/encoding@1", + "jsr:@std/fmt", + "jsr:@std/fs@1", + "jsr:@std/path@1" + ] + }, + "@std/assert@0.217.0": { + "integrity": "c98e279362ca6982d5285c3b89517b757c1e3477ee9f14eb2fdf80a45aaa9642" + }, + "@std/assert@1.0.13": { + "integrity": "ae0d31e41919b12c656c742b22522c32fb26ed0cba32975cb0de2a273cb68b29", + "dependencies": [ + "jsr:@std/internal@^1.0.6" + ] + }, + "@std/async@1.0.13": { + "integrity": "1d76ca5d324aef249908f7f7fe0d39aaf53198e5420604a59ab5c035adc97c96" + }, + "@std/bytes@1.0.6": { + "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" + }, + "@std/collections@1.1.1": { + "integrity": "eff6443fbd9d5a6697018fb39c5d13d5f662f0045f21392d640693d0008ab2af" + }, + "@std/csv@1.0.6": { + "integrity": "52ef0e62799a0028d278fa04762f17f9bd263fad9a8e7f98c14fbd371d62d9fd", + "dependencies": [ + "jsr:@std/streams" + ] + }, + "@std/data-structures@1.0.8": { + "integrity": "2fb7219247e044c8fcd51341788547575653c82ae2c759ff209e0263ba7d9b66" + }, + "@std/encoding@1.0.10": { + "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" + }, + "@std/expect@1.0.16": { + "integrity": "ceeef6dda21f256a5f0f083fcc0eaca175428b523359a9b1d9b3a1df11cc7391", + "dependencies": [ + "jsr:@std/assert@^1.0.13", + "jsr:@std/internal@^1.0.7" + ] + }, + "@std/fmt@1.0.8": { + "integrity": "71e1fc498787e4434d213647a6e43e794af4fd393ef8f52062246e06f7e372b7" + }, + "@std/fs@1.0.17": { + "integrity": "1c00c632677c1158988ef7a004cb16137f870aafdb8163b9dce86ec652f3952b", + "dependencies": [ + "jsr:@std/path@^1.0.9" + ] + }, + "@std/fs@1.0.18": { + "integrity": "24bcad99eab1af4fde75e05da6e9ed0e0dce5edb71b7e34baacf86ffe3969f3a", + "dependencies": [ + "jsr:@std/path@^1.1.0" + ] + }, + "@std/internal@1.0.7": { + "integrity": "39eeb5265190a7bc5d5591c9ff019490bd1f2c3907c044a11b0d545796158a0f" + }, + "@std/internal@1.0.8": { + "integrity": "fc66e846d8d38a47cffd274d80d2ca3f0de71040f855783724bb6b87f60891f5" + }, + "@std/io@0.225.2": { + "integrity": "3c740cd4ee4c082e6cfc86458f47e2ab7cb353dc6234d5e9b1f91a2de5f4d6c7", + "dependencies": [ + "jsr:@std/bytes" + ] + }, + "@std/path@0.217.0": { + "integrity": "1217cc25534bca9a2f672d7fe7c6f356e4027df400c0e85c0ef3e4343bc67d11", + "dependencies": [ + "jsr:@std/assert@0.217" + ] + }, + "@std/path@1.1.0": { + "integrity": "ddc94f8e3c275627281cbc23341df6b8bcc874d70374f75fec2533521e3d6886" + }, + "@std/streams@1.0.9": { + "integrity": "a9d26b1988cdd7aa7b1f4b51e1c36c1557f3f252880fa6cc5b9f37078b1a5035", + "dependencies": [ + "jsr:@std/bytes" + ] + }, + "@std/testing@1.0.13": { + "integrity": "74418be16f627dfe996937ab0ffbdbda9c1f35534b78724658d981492f121e71", + "dependencies": [ + "jsr:@std/assert@^1.0.13", + "jsr:@std/async", + "jsr:@std/data-structures", + "jsr:@std/fs@^1.0.17", + "jsr:@std/internal@^1.0.8", + "jsr:@std/path@^1.1.0" + ] + } + }, + "npm": { + "@aws-crypto/crc32@5.2.0": { + "integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/crc32c@5.2.0": { + "integrity": "sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/sha1-browser@5.2.0": { + "integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==", + "dependencies": [ + "@aws-crypto/supports-web-crypto", + "@aws-crypto/util", + "@aws-sdk/types", + "@aws-sdk/util-locate-window", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-crypto/sha256-browser@5.2.0": { + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "dependencies": [ + "@aws-crypto/sha256-js", + "@aws-crypto/supports-web-crypto", + "@aws-crypto/util", + "@aws-sdk/types", + "@aws-sdk/util-locate-window", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-crypto/sha256-js@5.2.0": { + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/supports-web-crypto@5.2.0": { + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "dependencies": [ + "tslib" + ] + }, + "@aws-crypto/util@5.2.0": { + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-sdk/client-s3@3.824.0": { + "integrity": "sha512-7neTQIdSVP/F4RTWG5T87LDpB955iQD6lxg9nJ00fdkIPczDcRtAEXow44NjF4fEdpQ1A9jokUtBSVE+GMXZ/A==", + "dependencies": [ + "@aws-crypto/sha1-browser", + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/credential-provider-node", + "@aws-sdk/middleware-bucket-endpoint", + "@aws-sdk/middleware-expect-continue", + "@aws-sdk/middleware-flexible-checksums", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-location-constraint", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-sdk-s3", + "@aws-sdk/middleware-ssec", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/signature-v4-multi-region", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@aws-sdk/xml-builder", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/eventstream-serde-browser", + "@smithy/eventstream-serde-config-resolver", + "@smithy/eventstream-serde-node", + "@smithy/fetch-http-handler", + "@smithy/hash-blob-browser", + "@smithy/hash-node", + "@smithy/hash-stream-node", + "@smithy/invalid-dependency", + "@smithy/md5-js", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "@smithy/util-waiter", + "tslib" + ] + }, + "@aws-sdk/client-sesv2@3.824.0": { + "integrity": "sha512-WRssgE34ZTO0It5knqDsjp42CwyAC0RnPLHI1f8lOZIpAZjtTUFWuptKriDoycDXaGPkRcu1dcvrYqUskBXn1A==", + "dependencies": [ + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/credential-provider-node", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/signature-v4-multi-region", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/fetch-http-handler", + "@smithy/hash-node", + "@smithy/invalid-dependency", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/client-sso@3.823.0": { + "integrity": "sha512-dBWdsbyGw8rPfdCsZySNtTOGQK4EZ8lxB/CneSQWRBPHgQ+Ys88NXxImO8xfWO7Itt1eh8O7UDTZ9+smcvw2pw==", + "dependencies": [ + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/fetch-http-handler", + "@smithy/hash-node", + "@smithy/invalid-dependency", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/core@3.823.0": { + "integrity": "sha512-1Cf4w8J7wYexz0KU3zpaikHvldGXQEjFldHOhm0SBGRy7qfYNXecfJAamccF7RdgLxKGgkv5Pl9zX/Z/DcW9zg==", + "dependencies": [ + "@aws-sdk/types", + "@aws-sdk/xml-builder", + "@smithy/core", + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-middleware", + "@smithy/util-utf8@4.0.0", + "fast-xml-parser", + "tslib" + ] + }, + "@aws-sdk/credential-provider-env@3.823.0": { + "integrity": "sha512-AIrLLwumObge+U1klN4j5ToIozI+gE9NosENRyHe0GIIZgTLOG/8jxrMFVYFeNHs7RUtjDTxxewislhFyGxJ/w==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-http@3.823.0": { + "integrity": "sha512-u4DXvB/J/o2bcvP1JP6n3ch7V3/NngmiJFPsM0hKUyRlLuWM37HEDEdjPRs3/uL/soTxrEhWKTA9//YVkvzI0w==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/fetch-http-handler", + "@smithy/node-http-handler", + "@smithy/property-provider", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-stream", + "tslib" + ] + }, + "@aws-sdk/credential-provider-ini@3.823.0": { + "integrity": "sha512-C0o63qviK5yFvjH9zKWAnCUBkssJoQ1A1XAHe0IAQkurzoNBSmu9oVemqwnKKHA4H6QrmusaEERfL00yohIkJA==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/credential-provider-env", + "@aws-sdk/credential-provider-http", + "@aws-sdk/credential-provider-process", + "@aws-sdk/credential-provider-sso", + "@aws-sdk/credential-provider-web-identity", + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/credential-provider-imds", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-node@3.823.0": { + "integrity": "sha512-nfSxXVuZ+2GJDpVFlflNfh55Yb4BtDsXLGNssXF5YU6UgSPsi8j2YkaE92Jv2s7dlUK07l0vRpLyPuXMaGeiRQ==", + "dependencies": [ + "@aws-sdk/credential-provider-env", + "@aws-sdk/credential-provider-http", + "@aws-sdk/credential-provider-ini", + "@aws-sdk/credential-provider-process", + "@aws-sdk/credential-provider-sso", + "@aws-sdk/credential-provider-web-identity", + "@aws-sdk/types", + "@smithy/credential-provider-imds", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-process@3.823.0": { + "integrity": "sha512-U/A10/7zu2FbMFFVpIw95y0TZf+oYyrhZTBn9eL8zgWcrYRqxrxdqtPj/zMrfIfyIvQUhuJSENN4dx4tfpCMWQ==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-sso@3.823.0": { + "integrity": "sha512-ff8IM80Wqz1V7VVMaMUqO2iR417jggfGWLPl8j2l7uCgwpEyop1ZZl5CFVYEwSupRBtwp+VlW1gTCk7ke56MUw==", + "dependencies": [ + "@aws-sdk/client-sso", + "@aws-sdk/core", + "@aws-sdk/token-providers", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-web-identity@3.823.0": { + "integrity": "sha512-lzoZdJMQq9w7i4lXVka30cVBe/dZoUDZST8Xz/soEd73gg7RTKgG+0szL4xFWgdBDgcJDWLfZfJzlbyIVyAyOA==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-bucket-endpoint@3.821.0": { + "integrity": "sha512-cebgeytKlWOgGczLo3BPvNY9XlzAzGZQANSysgJ2/8PSldmUpXRIF+GKPXDVhXeInWYHIfB8zZi3RqrPoXcNYQ==", + "dependencies": [ + "@aws-sdk/types", + "@aws-sdk/util-arn-parser", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-config-provider", + "tslib" + ] + }, + "@aws-sdk/middleware-expect-continue@3.821.0": { + "integrity": "sha512-zAOoSZKe1njOrtynvK6ZORU57YGv5I7KP4+rwOvUN3ZhJbQ7QPf8gKtFUCYAPRMegaXCKF/ADPtDZBAmM+zZ9g==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-flexible-checksums@3.823.0": { + "integrity": "sha512-Elt6G1ryEEdkrppqbyJON0o2x4x9xKknimJtMLdfG1b4YfO9X+UB31pk4R2SHvMYfrJ+p8DE2jRAhvV4g/dwIQ==", + "dependencies": [ + "@aws-crypto/crc32", + "@aws-crypto/crc32c", + "@aws-crypto/util", + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/is-array-buffer@4.0.0", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/middleware-host-header@3.821.0": { + "integrity": "sha512-xSMR+sopSeWGx5/4pAGhhfMvGBHioVBbqGvDs6pG64xfNwM5vq5s5v6D04e2i+uSTj4qGa71dLUs5I0UzAK3sw==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-location-constraint@3.821.0": { + "integrity": "sha512-sKrm80k0t3R0on8aA/WhWFoMaAl4yvdk+riotmMElLUpcMcRXAd1+600uFVrxJqZdbrKQ0mjX0PjT68DlkYXLg==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-logger@3.821.0": { + "integrity": "sha512-0cvI0ipf2tGx7fXYEEN5fBeZDz2RnHyb9xftSgUsEq7NBxjV0yTZfLJw6Za5rjE6snC80dRN8+bTNR1tuG89zA==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-recursion-detection@3.821.0": { + "integrity": "sha512-efmaifbhBoqKG3bAoEfDdcM8hn1psF+4qa7ykWuYmfmah59JBeqHLfz5W9m9JoTwoKPkFcVLWZxnyZzAnVBOIg==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-sdk-s3@3.823.0": { + "integrity": "sha512-UV755wt2HDru8PbxLn2S0Fvwgdn9mYamexn31Q6wyUGQ6rkpjKNEzL+oNDGQQmDQAOcQO+nLubKFsCwtBM02fQ==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@aws-sdk/util-arn-parser", + "@smithy/core", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/middleware-ssec@3.821.0": { + "integrity": "sha512-YYi1Hhr2AYiU/24cQc8HIB+SWbQo6FBkMYojVuz/zgrtkFmALxENGF/21OPg7f/QWd+eadZJRxCjmRwh5F2Cxg==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-user-agent@3.823.0": { + "integrity": "sha512-TKRQK09ld1LrIPExC9rIDpqnMsWcv+eq8ABKFHVo8mDLTSuWx/IiQ4eCh9T5zDuEZcLY4nNYCSzXKqw6XKcMCA==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@smithy/core", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/nested-clients@3.823.0": { + "integrity": "sha512-/BcyOBubrJnd2gxlbbmNJR1w0Z3OVN/UE8Yz20e+ou+Mijjv7EbtVwmWvio1e3ZjphwdA8tVfPYZKwXmrvHKmQ==", + "dependencies": [ + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/fetch-http-handler", + "@smithy/hash-node", + "@smithy/invalid-dependency", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/region-config-resolver@3.821.0": { + "integrity": "sha512-t8og+lRCIIy5nlId0bScNpCkif8sc0LhmtaKsbm0ZPm3sCa/WhCbSZibjbZ28FNjVCV+p0D9RYZx0VDDbtWyjw==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/node-config-provider", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "tslib" + ] + }, + "@aws-sdk/signature-v4-multi-region@3.824.0": { + "integrity": "sha512-HBjuWeN6Z1pvJjUvGXdMNLwEypKKB4km6zXj9jsbOOwP8NTL6J5rY+JmlX/mfBTmvzmI0kMu2bxlQ4ME2CIRbA==", + "dependencies": [ + "@aws-sdk/middleware-sdk-s3", + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/token-providers@3.823.0": { + "integrity": "sha512-vz6onCb/+g4y+owxGGPMEMdN789dTfBOgz/c9pFv0f01840w9Rrt46l+gjQlnXnx+0KG6wNeBIVhFdbCfV3HyQ==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/types@3.821.0": { + "integrity": "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/util-arn-parser@3.804.0": { + "integrity": "sha512-wmBJqn1DRXnZu3b4EkE6CWnoWMo1ZMvlfkqU5zPz67xx1GMaXlDCchFvKAXMjk4jn/L1O3tKnoFDNsoLV1kgNQ==", + "dependencies": [ + "tslib" + ] + }, + "@aws-sdk/util-endpoints@3.821.0": { + "integrity": "sha512-Uknt/zUZnLE76zaAAPEayOeF5/4IZ2puTFXvcSCWHsi9m3tqbb9UozlnlVqvCZLCRWfQryZQoG2W4XSS3qgk5A==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "@smithy/util-endpoints", + "tslib" + ] + }, + "@aws-sdk/util-locate-window@3.804.0": { + "integrity": "sha512-zVoRfpmBVPodYlnMjgVjfGoEZagyRF5IPn3Uo6ZvOZp24chnW/FRstH7ESDHDDRga4z3V+ElUQHKpFDXWyBW5A==", + "dependencies": [ + "tslib" + ] + }, + "@aws-sdk/util-user-agent-browser@3.821.0": { + "integrity": "sha512-irWZHyM0Jr1xhC+38OuZ7JB6OXMLPZlj48thElpsO1ZSLRkLZx5+I7VV6k3sp2yZ7BYbKz/G2ojSv4wdm7XTLw==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "bowser", + "tslib" + ] + }, + "@aws-sdk/util-user-agent-node@3.823.0": { + "integrity": "sha512-WvNeRz7HV3JLBVGTXW4Qr5QvvWY0vtggH5jW/NqHFH+ZEliVQaUIJ/HNLMpMoCSiu/DlpQAyAjRZXAptJ0oqbw==", + "dependencies": [ + "@aws-sdk/middleware-user-agent", + "@aws-sdk/types", + "@smithy/node-config-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/xml-builder@3.821.0": { + "integrity": "sha512-DIIotRnefVL6DiaHtO6/21DhJ4JZnnIwdNbpwiAhdt/AVbttcE4yw925gsjur0OGv5BTYXQXU3YnANBYnZjuQA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@hono/zod-validator@0.7.0_hono@4.7.11_zod@3.25.51": { + "integrity": "sha512-qe2ZE6sHFE98dcUrbYMtS3bAV8hqcCOflykvZga2S7XhmNSZzT+dIz4OuMILsjLHkJw9JMn912/dB7dQOmuPvg==", + "dependencies": [ + "hono", + "zod" + ] + }, + "@smithy/abort-controller@4.0.4": { + "integrity": "sha512-gJnEjZMvigPDQWHrW3oPrFhQtkrgqBkyjj3pCIdF3A5M6vsZODG93KNlfJprv6bp4245bdT32fsHK4kkH3KYDA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/chunked-blob-reader-native@4.0.0": { + "integrity": "sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig==", + "dependencies": [ + "@smithy/util-base64", + "tslib" + ] + }, + "@smithy/chunked-blob-reader@5.0.0": { + "integrity": "sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/config-resolver@4.1.4": { + "integrity": "sha512-prmU+rDddxHOH0oNcwemL+SwnzcG65sBF2yXRO7aeXIn/xTlq2pX7JLVbkBnVLowHLg4/OL4+jBmv9hVrVGS+w==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "tslib" + ] + }, + "@smithy/core@3.5.3": { + "integrity": "sha512-xa5byV9fEguZNofCclv6v9ra0FYh5FATQW/da7FQUVTic94DfrN/NvmKZjrMyzbpqfot9ZjBaO8U1UeTbmSLuA==", + "dependencies": [ + "@smithy/middleware-serde", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/credential-provider-imds@4.0.6": { + "integrity": "sha512-hKMWcANhUiNbCJouYkZ9V3+/Qf9pteR1dnwgdyzR09R4ODEYx8BbUysHwRSyex4rZ9zapddZhLFTnT4ZijR4pw==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/types", + "@smithy/url-parser", + "tslib" + ] + }, + "@smithy/eventstream-codec@4.0.4": { + "integrity": "sha512-7XoWfZqWb/QoR/rAU4VSi0mWnO2vu9/ltS6JZ5ZSZv0eovLVfDfu0/AX4ub33RsJTOth3TiFWSHS5YdztvFnig==", + "dependencies": [ + "@aws-crypto/crc32", + "@smithy/types", + "@smithy/util-hex-encoding", + "tslib" + ] + }, + "@smithy/eventstream-serde-browser@4.0.4": { + "integrity": "sha512-3fb/9SYaYqbpy/z/H3yIi0bYKyAa89y6xPmIqwr2vQiUT2St+avRt8UKwsWt9fEdEasc5d/V+QjrviRaX1JRFA==", + "dependencies": [ + "@smithy/eventstream-serde-universal", + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-config-resolver@4.1.2": { + "integrity": "sha512-JGtambizrWP50xHgbzZI04IWU7LdI0nh/wGbqH3sJesYToMi2j/DcoElqyOcqEIG/D4tNyxgRuaqBXWE3zOFhQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-node@4.0.4": { + "integrity": "sha512-RD6UwNZ5zISpOWPuhVgRz60GkSIp0dy1fuZmj4RYmqLVRtejFqQ16WmfYDdoSoAjlp1LX+FnZo+/hkdmyyGZ1w==", + "dependencies": [ + "@smithy/eventstream-serde-universal", + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-universal@4.0.4": { + "integrity": "sha512-UeJpOmLGhq1SLox79QWw/0n2PFX+oPRE1ZyRMxPIaFEfCqWaqpB7BU9C8kpPOGEhLF7AwEqfFbtwNxGy4ReENA==", + "dependencies": [ + "@smithy/eventstream-codec", + "@smithy/types", + "tslib" + ] + }, + "@smithy/fetch-http-handler@5.0.4": { + "integrity": "sha512-AMtBR5pHppYMVD7z7G+OlHHAcgAN7v0kVKEpHuTO4Gb199Gowh0taYi9oDStFeUhetkeP55JLSVlTW1n9rFtUw==", + "dependencies": [ + "@smithy/protocol-http", + "@smithy/querystring-builder", + "@smithy/types", + "@smithy/util-base64", + "tslib" + ] + }, + "@smithy/hash-blob-browser@4.0.4": { + "integrity": "sha512-WszRiACJiQV3QG6XMV44i5YWlkrlsM5Yxgz4jvsksuu7LDXA6wAtypfPajtNTadzpJy3KyJPoWehYpmZGKUFIQ==", + "dependencies": [ + "@smithy/chunked-blob-reader", + "@smithy/chunked-blob-reader-native", + "@smithy/types", + "tslib" + ] + }, + "@smithy/hash-node@4.0.4": { + "integrity": "sha512-qnbTPUhCVnCgBp4z4BUJUhOEkVwxiEi1cyFM+Zj6o+aY8OFGxUQleKWq8ltgp3dujuhXojIvJWdoqpm6dVO3lQ==", + "dependencies": [ + "@smithy/types", + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/hash-stream-node@4.0.4": { + "integrity": "sha512-wHo0d8GXyVmpmMh/qOR0R7Y46/G1y6OR8U+bSTB4ppEzRxd1xVAQ9xOE9hOc0bSjhz0ujCPAbfNLkLrpa6cevg==", + "dependencies": [ + "@smithy/types", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/invalid-dependency@4.0.4": { + "integrity": "sha512-bNYMi7WKTJHu0gn26wg8OscncTt1t2b8KcsZxvOv56XA6cyXtOAAAaNP7+m45xfppXfOatXF3Sb1MNsLUgVLTw==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/is-array-buffer@2.2.0": { + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/is-array-buffer@4.0.0": { + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/md5-js@4.0.4": { + "integrity": "sha512-uGLBVqcOwrLvGh/v/jw423yWHq/ofUGK1W31M2TNspLQbUV1Va0F5kTxtirkoHawODAZcjXTSGi7JwbnPcDPJg==", + "dependencies": [ + "@smithy/types", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/middleware-content-length@4.0.4": { + "integrity": "sha512-F7gDyfI2BB1Kc+4M6rpuOLne5LOcEknH1n6UQB69qv+HucXBR1rkzXBnQTB2q46sFy1PM/zuSJOB532yc8bg3w==", + "dependencies": [ + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@smithy/middleware-endpoint@4.1.11": { + "integrity": "sha512-zDogwtRLzKl58lVS8wPcARevFZNBOOqnmzWWxVe9XiaXU2CADFjvJ9XfNibgkOWs08sxLuSr81NrpY4mgp9OwQ==", + "dependencies": [ + "@smithy/core", + "@smithy/middleware-serde", + "@smithy/node-config-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-middleware", + "tslib" + ] + }, + "@smithy/middleware-retry@4.1.12": { + "integrity": "sha512-wvIH70c4e91NtRxdaLZF+mbLZ/HcC6yg7ySKUiufL6ESp6zJUSnJucZ309AvG9nqCFHSRB5I6T3Ez1Q9wCh0Ww==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/service-error-classification", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-middleware", + "@smithy/util-retry", + "tslib", + "uuid" + ] + }, + "@smithy/middleware-serde@4.0.8": { + "integrity": "sha512-iSSl7HJoJaGyMIoNn2B7czghOVwJ9nD7TMvLhMWeSB5vt0TnEYyRRqPJu/TqW76WScaNvYYB8nRoiBHR9S1Ddw==", + "dependencies": [ + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@smithy/middleware-stack@4.0.4": { + "integrity": "sha512-kagK5ggDrBUCCzI93ft6DjteNSfY8Ulr83UtySog/h09lTIOAJ/xUSObutanlPT0nhoHAkpmW9V5K8oPyLh+QA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/node-config-provider@4.1.3": { + "integrity": "sha512-HGHQr2s59qaU1lrVH6MbLlmOBxadtzTsoO4c+bF5asdgVik3I8o7JIOzoeqWc5MjVa+vD36/LWE0iXKpNqooRw==", + "dependencies": [ + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@smithy/node-http-handler@4.0.6": { + "integrity": "sha512-NqbmSz7AW2rvw4kXhKGrYTiJVDHnMsFnX4i+/FzcZAfbOBauPYs2ekuECkSbtqaxETLLTu9Rl/ex6+I2BKErPA==", + "dependencies": [ + "@smithy/abort-controller", + "@smithy/protocol-http", + "@smithy/querystring-builder", + "@smithy/types", + "tslib" + ] + }, + "@smithy/property-provider@4.0.4": { + "integrity": "sha512-qHJ2sSgu4FqF4U/5UUp4DhXNmdTrgmoAai6oQiM+c5RZ/sbDwJ12qxB1M6FnP+Tn/ggkPZf9ccn4jqKSINaquw==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/protocol-http@5.1.2": { + "integrity": "sha512-rOG5cNLBXovxIrICSBm95dLqzfvxjEmuZx4KK3hWwPFHGdW3lxY0fZNXfv2zebfRO7sJZ5pKJYHScsqopeIWtQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/querystring-builder@4.0.4": { + "integrity": "sha512-SwREZcDnEYoh9tLNgMbpop+UTGq44Hl9tdj3rf+yeLcfH7+J8OXEBaMc2kDxtyRHu8BhSg9ADEx0gFHvpJgU8w==", + "dependencies": [ + "@smithy/types", + "@smithy/util-uri-escape", + "tslib" + ] + }, + "@smithy/querystring-parser@4.0.4": { + "integrity": "sha512-6yZf53i/qB8gRHH/l2ZwUG5xgkPgQF15/KxH0DdXMDHjesA9MeZje/853ifkSY0x4m5S+dfDZ+c4x439PF0M2w==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/service-error-classification@4.0.5": { + "integrity": "sha512-LvcfhrnCBvCmTee81pRlh1F39yTS/+kYleVeLCwNtkY8wtGg8V/ca9rbZZvYIl8OjlMtL6KIjaiL/lgVqHD2nA==", + "dependencies": [ + "@smithy/types" + ] + }, + "@smithy/shared-ini-file-loader@4.0.4": { + "integrity": "sha512-63X0260LoFBjrHifPDs+nM9tV0VMkOTl4JRMYNuKh/f5PauSjowTfvF3LogfkWdcPoxsA9UjqEOgjeYIbhb7Nw==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/signature-v4@5.1.2": { + "integrity": "sha512-d3+U/VpX7a60seHziWnVZOHuEgJlclufjkS6zhXvxcJgkJq4UWdH5eOBLzHRMx6gXjsdT9h6lfpmLzbrdupHgQ==", + "dependencies": [ + "@smithy/is-array-buffer@4.0.0", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-hex-encoding", + "@smithy/util-middleware", + "@smithy/util-uri-escape", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/smithy-client@4.4.3": { + "integrity": "sha512-xxzNYgA0HD6ETCe5QJubsxP0hQH3QK3kbpJz3QrosBCuIWyEXLR/CO5hFb2OeawEKUxMNhz3a1nuJNN2np2RMA==", + "dependencies": [ + "@smithy/core", + "@smithy/middleware-endpoint", + "@smithy/middleware-stack", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-stream", + "tslib" + ] + }, + "@smithy/types@4.3.1": { + "integrity": "sha512-UqKOQBL2x6+HWl3P+3QqFD4ncKq0I8Nuz9QItGv5WuKuMHuuwlhvqcZCoXGfc+P1QmfJE7VieykoYYmrOoFJxA==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/url-parser@4.0.4": { + "integrity": "sha512-eMkc144MuN7B0TDA4U2fKs+BqczVbk3W+qIvcoCY6D1JY3hnAdCuhCZODC+GAeaxj0p6Jroz4+XMUn3PCxQQeQ==", + "dependencies": [ + "@smithy/querystring-parser", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-base64@4.0.0": { + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "dependencies": [ + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/util-body-length-browser@4.0.0": { + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-body-length-node@4.0.0": { + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-buffer-from@2.2.0": { + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": [ + "@smithy/is-array-buffer@2.2.0", + "tslib" + ] + }, + "@smithy/util-buffer-from@4.0.0": { + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "dependencies": [ + "@smithy/is-array-buffer@4.0.0", + "tslib" + ] + }, + "@smithy/util-config-provider@4.0.0": { + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-defaults-mode-browser@4.0.19": { + "integrity": "sha512-mvLMh87xSmQrV5XqnUYEPoiFFeEGYeAKIDDKdhE2ahqitm8OHM3aSvhqL6rrK6wm1brIk90JhxDf5lf2hbrLbQ==", + "dependencies": [ + "@smithy/property-provider", + "@smithy/smithy-client", + "@smithy/types", + "bowser", + "tslib" + ] + }, + "@smithy/util-defaults-mode-node@4.0.19": { + "integrity": "sha512-8tYnx+LUfj6m+zkUUIrIQJxPM1xVxfRBvoGHua7R/i6qAxOMjqR6CpEpDwKoIs1o0+hOjGvkKE23CafKL0vJ9w==", + "dependencies": [ + "@smithy/config-resolver", + "@smithy/credential-provider-imds", + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/smithy-client", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-endpoints@3.0.6": { + "integrity": "sha512-YARl3tFL3WgPuLzljRUnrS2ngLiUtkwhQtj8PAL13XZSyUiNLQxwG3fBBq3QXFqGFUXepIN73pINp3y8c2nBmA==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-hex-encoding@4.0.0": { + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-middleware@4.0.4": { + "integrity": "sha512-9MLKmkBmf4PRb0ONJikCbCwORACcil6gUWojwARCClT7RmLzF04hUR4WdRprIXal7XVyrddadYNfp2eF3nrvtQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-retry@4.0.5": { + "integrity": "sha512-V7MSjVDTlEt/plmOFBn1762Dyu5uqMrV2Pl2X0dYk4XvWfdWJNe9Bs5Bzb56wkCuiWjSfClVMGcsuKrGj7S/yg==", + "dependencies": [ + "@smithy/service-error-classification", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-stream@4.2.2": { + "integrity": "sha512-aI+GLi7MJoVxg24/3J1ipwLoYzgkB4kUfogZfnslcYlynj3xsQ0e7vk4TnTro9hhsS5PvX1mwmkRqqHQjwcU7w==", + "dependencies": [ + "@smithy/fetch-http-handler", + "@smithy/node-http-handler", + "@smithy/types", + "@smithy/util-base64", + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-hex-encoding", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/util-uri-escape@4.0.0": { + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-utf8@2.3.0": { + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": [ + "@smithy/util-buffer-from@2.2.0", + "tslib" + ] + }, + "@smithy/util-utf8@4.0.0": { + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "dependencies": [ + "@smithy/util-buffer-from@4.0.0", + "tslib" + ] + }, + "@smithy/util-waiter@4.0.5": { + "integrity": "sha512-4QvC49HTteI1gfemu0I1syWovJgPvGn7CVUoN9ZFkdvr/cCFkrEL7qNCdx/2eICqDWEGnnr68oMdSIPCLAriSQ==", + "dependencies": [ + "@smithy/abort-controller", + "@smithy/types", + "tslib" + ] + }, + "@types/lodash@4.17.17": { + "integrity": "sha512-RRVJ+J3J+WmyOTqnz3PiBLA501eKwXl2noseKOrNo/6+XEHjTAxO4xHvxQB6QuNm+s4WRbn6rSiap8+EA+ykFQ==" + }, + "@types/mustache@4.2.6": { + "integrity": "sha512-t+8/QWTAhOFlrF1IVZqKnMRJi84EgkIK5Kh0p2JV4OLywUvCwJPFxbJAl7XAow7DVIHsF+xW9f1MVzg0L6Szjw==" + }, + "@types/node@22.15.15": { + "integrity": "sha512-R5muMcZob3/Jjchn5LcO8jdKwSCbzqmPB6ruBxMcf9kbxtniZHP327s6C37iOfuw8mbKK3cAQa7sEl7afLrQ8A==", + "dependencies": [ + "undici-types" + ] + }, + "@types/yargs-parser@21.0.3": { + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==" + }, + "@types/yargs@17.0.33": { + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dependencies": [ + "@types/yargs-parser" + ] + }, + "ansi-regex@6.1.0": { + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==" + }, + "ansi-styles@6.2.1": { + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" + }, + "bowser@2.11.0": { + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + }, + "cliui@9.0.1": { + "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", + "dependencies": [ + "string-width", + "strip-ansi", + "wrap-ansi" + ] + }, + "email-addresses@5.0.0": { + "integrity": "sha512-4OIPYlA6JXqtVn8zpHpGiI7vE6EQOAg16aGnDMIAlZVinnoZ8208tW1hAbjWydgN/4PLTT9q+O1K6AH/vALJGw==" + }, + "emoji-regex@10.4.0": { + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==" + }, + "escalade@3.2.0": { + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==" + }, + "fast-xml-parser@4.4.1": { + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "dependencies": [ + "strnum" + ], + "bin": true + }, + "get-caller-file@2.0.5": { + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "get-east-asian-width@1.3.0": { + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==" + }, + "hono@4.7.11": { + "integrity": "sha512-rv0JMwC0KALbbmwJDEnxvQCeJh+xbS3KEWW5PC9cMJ08Ur9xgatI0HmtgYZfOdOSOeYsp5LO2cOhdI8cLEbDEQ==" + }, + "kysely@0.28.2": { + "integrity": "sha512-4YAVLoF0Sf0UTqlhgQMFU9iQECdah7n+13ANkiuVfRvlK+uI0Etbgd7bVP36dKlG+NXWbhGua8vnGt+sdhvT7A==" + }, + "mustache@4.2.0": { + "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "bin": true + }, + "string-width@7.2.0": { + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dependencies": [ + "emoji-regex", + "get-east-asian-width", + "strip-ansi" + ] + }, + "strip-ansi@7.1.0": { + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": [ + "ansi-regex" + ] + }, + "strnum@1.1.2": { + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==" + }, + "tslib@2.8.1": { + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, + "undici-types@6.21.0": { + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==" + }, + "uuid@9.0.1": { + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "bin": true + }, + "wrap-ansi@9.0.0": { + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", + "dependencies": [ + "ansi-styles", + "string-width", + "strip-ansi" + ] + }, + "y18n@5.0.8": { + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + }, + "yargs-parser@22.0.0": { + "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==" + }, + "yargs@18.0.0": { + "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", + "dependencies": [ + "cliui", + "escalade", + "get-caller-file", + "string-width", + "y18n", + "yargs-parser" + ] + }, + "zod@3.25.51": { + "integrity": "sha512-TQSnBldh+XSGL+opiSIq0575wvDPqu09AqWe1F7JhUMKY+M91/aGlK4MhpVNO7MgYfHcVCB1ffwAUTJzllKJqg==" + } + }, + "workspace": { + "dependencies": [ + "jsr:@std/collections@^1.1.1", + "jsr:@std/csv@^1.0.6", + "jsr:@std/encoding@^1.0.10", + "jsr:@std/expect@^1.0.16", + "jsr:@std/fs@^1.0.18", + "jsr:@std/io@~0.225.2", + "jsr:@std/path@^1.1.0", + "jsr:@std/testing@^1.0.13", + "npm:@types/yargs@^17.0.33", + "npm:yargs@18" + ], + "members": { + "mail": { + "dependencies": [ + "jsr:@db/sqlite@0.12", + "npm:@aws-sdk/client-s3@^3.821.0", + "npm:@aws-sdk/client-sesv2@^3.821.0", + "npm:@hono/zod-validator@0.7", + "npm:@smithy/fetch-http-handler@^5.0.4", + "npm:email-addresses@5", + "npm:hono@^4.7.11", + "npm:kysely@~0.28.2", + "npm:zod@^3.25.48" + ] + }, + "tools": { + "dependencies": [ + "npm:mustache@^4.2.0" + ] + } + } + } +} diff --git a/deno/mail/app.ts b/deno/mail/app.ts new file mode 100644 index 0000000..332c430 --- /dev/null +++ b/deno/mail/app.ts @@ -0,0 +1,83 @@ +import { Hono } from "hono"; +import { logger as honoLogger } from "hono/logger"; + +import { + AliasRecipientMailHook, + FallbackRecipientHook, + MailDeliverer, + RecipientFromHeadersHook, +} from "./mail.ts"; +import { DovecotMailDeliverer } from "./dovecot.ts"; +import { DumbSmtpServer } from "./dumb-smtp-server.ts"; + +export function createInbound( + { + fallback, + mailDomain, + aliasFile, + ldaPath, + doveadmPath, + }: { + fallback: string[]; + mailDomain: string; + aliasFile: string; + ldaPath: string; + doveadmPath: string; + }, +) { + const deliverer = new DovecotMailDeliverer(ldaPath, doveadmPath); + deliverer.preHooks.push( + new RecipientFromHeadersHook(mailDomain), + new FallbackRecipientHook(new Set(fallback)), + new AliasRecipientMailHook(aliasFile), + ); + return deliverer; +} + +export function createHono(outbound: MailDeliverer, inbound: MailDeliverer) { + const hono = new Hono(); + + hono.onError((err, c) => { + console.error("Hono handler threw an uncaught error.", err); + return c.json({ message: "Server error, check its log." }, 500); + }); + hono.use(honoLogger()); + hono.post("/send/raw", async (context) => { + const body = await context.req.text(); + if (body.trim().length === 0) { + return context.json({ message: "Can't send an empty mail." }, 400); + } else { + const result = await outbound.deliverRaw(body); + return context.json({ + awsMessageId: result.awsMessageId, + }); + } + }); + hono.post("/receive/raw", async (context) => { + await inbound.deliverRaw(await context.req.text()); + return context.json({ message: "Done!" }); + }); + + return hono; +} + +export function createSmtp(outbound: MailDeliverer) { + return new DumbSmtpServer(outbound); +} + +export async function sendMail(port: number) { + const decoder = new TextDecoder(); + let text = ""; + for await (const chunk of Deno.stdin.readable) { + text += decoder.decode(chunk); + } + + const res = await fetch(`http://127.0.0.1:${port}/send/raw`, { + method: "post", + body: text, + }); + const fn = res.ok ? "info" : "error"; + console[fn](res); + console[fn](await res.text()); + if (!res.ok) Deno.exit(-1); +} diff --git a/deno/mail/aws/app.ts b/deno/mail/aws/app.ts new file mode 100644 index 0000000..6c7ede7 --- /dev/null +++ b/deno/mail/aws/app.ts @@ -0,0 +1,318 @@ +import { join } from "@std/path"; +import { z } from "zod"; +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { FetchHttpHandler } from "@smithy/fetch-http-handler"; +// @ts-types="npm:@types/yargs" +import yargs from "yargs"; + +import { ConfigDefinition, ConfigProvider } from "@crupest/base/config"; +import { CronTask } from "@crupest/base/cron"; + +import { DbService } from "../db.ts"; +import { createHono, createInbound, createSmtp, sendMail } from "../app.ts"; +import { DovecotMailDeliverer } from "../dovecot.ts"; +import { MailDeliverer } from "../mail.ts"; +import { + AwsMailMessageIdRewriteHook, + AwsMailMessageIdSaveHook, +} from "./mail.ts"; +import { AwsMailDeliverer } from "./deliver.ts"; +import { AwsMailFetcher, LiveMailNotFoundError } from "./fetch.ts"; + + +const PREFIX = "crupest-mail-server"; +const CONFIG_DEFINITIONS = { + dataPath: { + description: "Path to save app persistent data.", + default: ".", + }, + mailDomain: { + description: + "The part after `@` of an address. Used to determine local recipients.", + }, + httpHost: { + description: "Listening address for http server.", + default: "0.0.0.0", + }, + httpPort: { description: "Listening port for http server.", default: "2345" }, + smtpHost: { + description: "Listening address for dumb smtp server.", + default: "127.0.0.1", + }, + smtpPort: { + description: "Listening port for dumb smtp server.", + default: "2346", + }, + ldaPath: { + description: "full path of lda executable", + default: "/dovecot/libexec/dovecot/dovecot-lda", + }, + doveadmPath: { + description: "full path of doveadm executable", + default: "/dovecot/bin/doveadm", + }, + inboundFallback: { + description: "comma separated addresses used as fallback recipients", + default: "", + }, + awsInboundPath: { + description: "(random set) path for aws sns", + }, + awsInboundKey: { + description: "(random set) http header Authorization for aws sns", + }, + awsRegion: { + description: "aws region", + }, + awsUser: { + description: "aws access key id", + }, + awsPassword: { + description: "aws secret access key", + secret: true, + }, + awsMailBucket: { + description: "aws s3 bucket saving raw mails", + secret: true, + }, +} as const satisfies ConfigDefinition; + +function createAwsOptions({ + user, + password, + region, +}: { + user: string; + password: string; + region: string; +}) { + return { + credentials: () => + Promise.resolve({ + accessKeyId: user, + secretAccessKey: password, + }), + requestHandler: new FetchHttpHandler(), + region, + }; +} + +function createOutbound( + awsOptions: ReturnType<typeof createAwsOptions>, + db: DbService, + local?: DovecotMailDeliverer, +) { + const deliverer = new AwsMailDeliverer(awsOptions); + deliverer.preHooks.push( + new AwsMailMessageIdRewriteHook(db.messageIdToAws.bind(db)), + ); + deliverer.postHooks.push( + new AwsMailMessageIdSaveHook( + async (original, aws, context) => { + await db.addMessageIdMap({ message_id: original, aws_message_id: aws }); + void local?.saveNewSent(context.logTag, context.mail, original); + }, + ), + ); + return deliverer; +} + +function setupAwsHono( + hono: Hono, + options: { + path: string; + auth: string; + fetcher: AwsMailFetcher; + deliverer: MailDeliverer; + }, +) { + let counter = 1; + + hono.post( + `/${options.path}`, + async (ctx, next) => { + const auth = ctx.req.header("Authorization"); + if (auth !== options.auth) { + return ctx.json({ message: "Bad auth!" }, 403); + } + await next(); + }, + zValidator( + "json", + z.object({ + key: z.string(), + recipients: z.optional(z.array(z.string())), + }), + ), + async (ctx) => { + const { fetcher, deliverer } = options; + const { key, recipients } = ctx.req.valid("json"); + try { + await fetcher.deliverLiveMail( + `[inbound ${counter++}]`, + key, + deliverer, + recipients, + ); + } catch (e) { + if (e instanceof LiveMailNotFoundError) { + return ctx.json({ message: e.message }); + } + throw e; + } + return ctx.json({ message: "Done!" }); + }, + ); +} + +function createCron(fetcher: AwsMailFetcher, deliverer: MailDeliverer) { + return new CronTask({ + name: "live-mail-recycler", + interval: 6 * 3600 * 1000, + callback: () => { + return fetcher.recycleLiveMails(deliverer); + }, + startNow: true, + }); +} + +function createBaseServices() { + const config = new ConfigProvider(PREFIX, CONFIG_DEFINITIONS); + Deno.mkdirSync(config.get("dataPath"), { recursive: true }); + return { config }; +} + +function createAwsFetchOnlyServices() { + const services = createBaseServices(); + const { config } = services; + + const awsOptions = createAwsOptions({ + user: config.get("awsUser"), + password: config.get("awsPassword"), + region: config.get("awsRegion"), + }); + const fetcher = new AwsMailFetcher(awsOptions, config.get("awsMailBucket")); + + return { ...services, awsOptions, fetcher }; +} + +function createAwsRecycleOnlyServices() { + const services = createAwsFetchOnlyServices(); + const { config } = services; + + const inbound = createInbound({ + fallback: config.getList("inboundFallback"), + ldaPath: config.get("ldaPath"), + doveadmPath: config.get("doveadmPath"), + aliasFile: join(config.get("dataPath"), "aliases.csv"), + mailDomain: config.get("mailDomain"), + }); + + return { ...services, inbound }; +} + +function createAwsServices() { + const services = createAwsRecycleOnlyServices(); + const { config, awsOptions, inbound } = services; + + const dbService = new DbService(join(config.get("dataPath"), "db.sqlite")); + const outbound = createOutbound(awsOptions, dbService, inbound); + + return { ...services, dbService, outbound }; +} + +function createServerServices() { + const services = createAwsServices(); + const { config, outbound, inbound, fetcher } = services; + + const smtp = createSmtp(outbound); + const hono = createHono(outbound, inbound); + + setupAwsHono(hono, { + path: config.get("awsInboundPath"), + auth: config.get("awsInboundKey"), + fetcher, + deliverer: inbound, + }); + + return { ...services, smtp, hono }; +} + +async function serve(cron: boolean = false) { + const { config, fetcher, inbound, smtp, dbService, hono } = createServerServices(); + + await dbService.migrate(); + + smtp.serve({ + hostname: config.get("smtpHost"), + port: config.getInt("smtpPort"), + }); + Deno.serve( + { + hostname: config.get("httpHost"), + port: config.getInt("httpPort"), + }, + hono.fetch, + ); + + if (cron) { + createCron(fetcher, inbound); + } +} + +async function listLives() { + const { fetcher } = createAwsFetchOnlyServices(); + const liveMails = await fetcher.listLiveMails(); + console.info(`Total ${liveMails.length}:`); + if (liveMails.length !== 0) { + console.info(liveMails.join("\n")); + } +} + +async function recycleLives() { + const { fetcher, inbound } = createAwsRecycleOnlyServices(); + await fetcher.recycleLiveMails(inbound); +} + +if (import.meta.main) { + await yargs(Deno.args) + .scriptName("mail") + .command({ + command: "sendmail", + describe: "send mail via this server's endpoint", + handler: async (_argv) => { + const { config } = createBaseServices(); + await sendMail(config.getInt("httpPort")); + }, + }) + .command({ + command: "live", + describe: "work with live mails", + builder: (builder) => { + return builder + .command({ + command: "list", + describe: "list live mails", + handler: listLives, + }) + .command({ + command: "recycle", + describe: "recycle all live mails", + handler: recycleLives, + }) + .demandCommand(1, "One command must be specified."); + }, + handler: () => {}, + }) + .command({ + command: "serve", + describe: "start the http and smtp servers", + builder: (builder) => builder.option("real", { type: "boolean" }), + handler: (argv) => serve(argv.real), + }) + .demandCommand(1, "One command must be specified.") + .help() + .strict() + .parse(); +} diff --git a/deno/mail/aws/deliver.ts b/deno/mail/aws/deliver.ts new file mode 100644 index 0000000..0195369 --- /dev/null +++ b/deno/mail/aws/deliver.ts @@ -0,0 +1,63 @@ +import { + SendEmailCommand, + SESv2Client, + SESv2ClientConfig, +} from "@aws-sdk/client-sesv2"; + +import { Mail, MailDeliverContext, MailDeliverer } from "../mail.ts"; + +declare module "../mail.ts" { + interface MailDeliverResult { + awsMessageId?: string; + } +} + +export class AwsMailDeliverer extends MailDeliverer { + readonly name = "aws"; + readonly #aws; + readonly #ses; + + constructor(aws: SESv2ClientConfig) { + super(true); + this.#aws = aws; + this.#ses = new SESv2Client(aws); + } + + protected override async doDeliver( + mail: Mail, + context: MailDeliverContext, + ): Promise<void> { + try { + const sendCommand = new SendEmailCommand({ + Content: { + Raw: { Data: mail.toUtf8Bytes() }, + }, + }); + + console.info(context.logTag, "Calling aws send-email api..."); + const res = await this.#ses.send(sendCommand); + if (res.MessageId == null) { + console.warn( + context.logTag, + "AWS send-email returned null message id.", + ); + } else { + context.result.awsMessageId = + `${res.MessageId}@${this.#aws.region}.amazonses.com`; + } + + context.result.smtpMessage = + `AWS Message ID: ${context.result.awsMessageId}`; + context.result.recipients.set("*", { + kind: "success", + message: `Succeeded to call aws send-email api.`, + }); + } catch (cause) { + context.result.recipients.set("*", { + kind: "failure", + message: "A JS error was thrown when calling aws send-email." + cause, + cause, + }); + } + } +} diff --git a/deno/mail/aws/fetch.ts b/deno/mail/aws/fetch.ts new file mode 100644 index 0000000..2154972 --- /dev/null +++ b/deno/mail/aws/fetch.ts @@ -0,0 +1,136 @@ +import { + CopyObjectCommand, + DeleteObjectCommand, + GetObjectCommand, + ListObjectsV2Command, + NoSuchKey, + S3Client, + S3ClientConfig, +} from "@aws-sdk/client-s3"; + +import { DateUtils } from "@crupest/base"; + +import { Mail } from "../mail.ts"; +import { MailDeliverer } from "../mail.ts"; + +export class LiveMailNotFoundError extends Error {} + +async function s3MoveObject( + client: S3Client, + bucket: string, + path: string, + newPath: string, +): Promise<void> { + const copyCommand = new CopyObjectCommand({ + Bucket: bucket, + Key: newPath, + CopySource: `${bucket}/${path}`, + }); + await client.send(copyCommand); + + const deleteCommand = new DeleteObjectCommand({ + Bucket: bucket, + Key: path, + }); + await client.send(deleteCommand); +} + +const AWS_SES_S3_SETUP_TAG = "AMAZON_SES_SETUP_NOTIFICATION"; + +export class AwsMailFetcher { + readonly #livePrefix = "mail/live/"; + readonly #archivePrefix = "mail/archive/"; + readonly #s3; + readonly #bucket; + + constructor(aws: S3ClientConfig, bucket: string) { + this.#s3 = new S3Client(aws); + this.#bucket = bucket; + } + + async listLiveMails(): Promise<string[]> { + const listCommand = new ListObjectsV2Command({ + Bucket: this.#bucket, + Prefix: this.#livePrefix, + }); + const res = await this.#s3.send(listCommand); + + if (res.Contents == null) { + console.warn("S3 API returned null Content."); + return []; + } + + const result: string[] = []; + for (const object of res.Contents) { + if (object.Key == null) { + console.warn("S3 API returned null Key."); + continue; + } + + if (object.Key.endsWith(AWS_SES_S3_SETUP_TAG)) continue; + + result.push(object.Key.slice(this.#livePrefix.length)); + } + return result; + } + + async deliverLiveMail( + logTag: string, + s3Key: string, + deliverer: MailDeliverer, + recipients?: string[], + ) { + console.info(logTag, `Fetching live mail ${s3Key}...`); + const mailPath = `${this.#livePrefix}${s3Key}`; + const command = new GetObjectCommand({ + Bucket: this.#bucket, + Key: mailPath, + }); + + let rawMail; + + try { + const res = await this.#s3.send(command); + if (res.Body == null) { + throw new Error("S3 API returns a null body."); + } + rawMail = await res.Body.transformToString(); + } catch (cause) { + if (cause instanceof NoSuchKey) { + const message = + `Live mail ${s3Key} is not found. Perhaps already delivered?`; + console.error(message, cause); + throw new LiveMailNotFoundError(message); + } + throw cause; + } + + const mail = new Mail(rawMail); + await deliverer.deliver({ mail, recipients }); + + const { date } = new Mail(rawMail).parsed; + const dateString = date != null + ? DateUtils.toFileNameString(date, true) + : "invalid-date"; + const newPath = `${this.#archivePrefix}${dateString}/${s3Key}`; + + console.info(logTag, `Archiving live mail ${s3Key} to ${newPath}...`); + await s3MoveObject(this.#s3, this.#bucket, mailPath, newPath); + + console.info(logTag, `Done deliver live mail ${s3Key}.`); + } + + async recycleLiveMails(deliverer: MailDeliverer) { + console.info("Begin to recycle live mails..."); + const mails = await this.listLiveMails(); + console.info(`Found ${mails.length} live mails`); + let counter = 1; + for (const s3Key of mails) { + await this.deliverLiveMail( + `[${counter++}/${mails.length}]`, + s3Key, + deliverer, + ); + } + } +} diff --git a/deno/mail/aws/mail.ts b/deno/mail/aws/mail.ts new file mode 100644 index 0000000..26f3ea0 --- /dev/null +++ b/deno/mail/aws/mail.ts @@ -0,0 +1,59 @@ +import { MailDeliverContext, MailDeliverHook } from "../mail.ts"; + +export class AwsMailMessageIdRewriteHook implements MailDeliverHook { + readonly #lookup; + + constructor(lookup: (origin: string) => Promise<string | null>) { + this.#lookup = lookup; + } + + async callback(context: MailDeliverContext): Promise<void> { + const addresses = context.mail.simpleFindAllAddresses(); + for (const address of addresses) { + const awsMessageId = await this.#lookup(address); + if (awsMessageId != null && awsMessageId.length !== 0) { + console.info( + context.logTag, + `Rewrite address-line string in mail: ${address} => ${awsMessageId}.`, + ); + context.mail.raw = context.mail.raw.replaceAll(address, awsMessageId); + } + } + } +} + +export class AwsMailMessageIdSaveHook implements MailDeliverHook { + readonly #record; + + constructor( + record: ( + original: string, + aws: string, + context: MailDeliverContext, + ) => Promise<void>, + ) { + this.#record = record; + } + + async callback(context: MailDeliverContext): Promise<void> { + const { messageId } = context.mail.parsed; + if (messageId == null) { + console.warn( + context.logTag, + "Original mail doesn't have message id, skip saving message id map.", + ); + return; + } + if (context.result.awsMessageId != null) { + console.info( + context.logTag, + `Save message id map: ${messageId} => ${context.result.awsMessageId}.`, + ); + context.mail.raw = context.mail.raw.replaceAll( + messageId, + context.result.awsMessageId, + ); + await this.#record(messageId, context.result.awsMessageId, context); + } + } +} diff --git a/deno/mail/db.test.ts b/deno/mail/db.test.ts new file mode 100644 index 0000000..60035c4 --- /dev/null +++ b/deno/mail/db.test.ts @@ -0,0 +1,23 @@ +import { describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect/expect"; + +import { DbService } from "./db.ts"; + +describe("DbService", () => { + const mockRow = { + message_id: "mock-message-id@mock.mock", + aws_message_id: "mock-aws-message-id@mock.mock", + }; + + it("works", async () => { + const db = new DbService(":memory:"); + await db.migrate(); + await db.addMessageIdMap(mockRow); + expect(await db.messageIdToAws(mockRow.message_id)).toBe( + mockRow.aws_message_id, + ); + expect(await db.messageIdFromAws(mockRow.aws_message_id)).toBe( + mockRow.message_id, + ); + }); +}); diff --git a/deno/mail/db.ts b/deno/mail/db.ts new file mode 100644 index 0000000..062700b --- /dev/null +++ b/deno/mail/db.ts @@ -0,0 +1,146 @@ +import { + Generated, + Insertable, + Kysely, + Migration, + Migrator, + SqliteDatabase, + SqliteDialect, + SqliteStatement, +} from "kysely"; +import * as sqlite from "@db/sqlite"; + +class SqliteStatementAdapter implements SqliteStatement { + constructor(public readonly stmt: sqlite.Statement) {} + + get reader(): boolean { + return this.stmt.columnNames().length >= 1; + } + + all(parameters: readonly unknown[]): unknown[] { + return this.stmt.all(...(parameters as sqlite.BindValue[])); + } + + iterate(parameters: readonly unknown[]): IterableIterator<unknown> { + return this.stmt.iter(...(parameters as sqlite.BindValue[])); + } + + run(parameters: readonly unknown[]): { + changes: number | bigint; + lastInsertRowid: number | bigint; + } { + const { db } = this.stmt; + const totalChangesBefore = db.totalChanges; + const changes = this.stmt.run(...(parameters as sqlite.BindValue[])); + return { + changes: totalChangesBefore === db.totalChanges ? 0 : changes, + lastInsertRowid: db.lastInsertRowId, + }; + } +} + +class SqliteDatabaseAdapter implements SqliteDatabase { + constructor(public readonly db: sqlite.Database) {} + + prepare(sql: string): SqliteStatementAdapter { + return new SqliteStatementAdapter(this.db.prepare(sql)); + } + + close(): void { + this.db.close(); + } +} + +export class DbError extends Error {} + +interface AwsMessageIdMapTable { + id: Generated<number>; + message_id: string; + aws_message_id: string; +} + +interface Database { + aws_message_id_map: AwsMessageIdMapTable; +} + +const migrations: Record<string, Migration> = { + "0001-init": { + // deno-lint-ignore no-explicit-any + async up(db: Kysely<any>): Promise<void> { + await db.schema + .createTable("aws_message_id_map") + .addColumn("id", "integer", (col) => col.primaryKey().autoIncrement()) + .addColumn("message_id", "text", (col) => col.notNull().unique()) + .addColumn("aws_message_id", "text", (col) => col.notNull().unique()) + .execute(); + + for (const column of ["message_id", "aws_message_id"]) { + await db.schema + .createIndex(`aws_message_id_map_${column}`) + .on("aws_message_id_map") + .column(column) + .execute(); + } + }, + + // deno-lint-ignore no-explicit-any + async down(db: Kysely<any>): Promise<void> { + await db.schema.dropTable("aws_message_id_map").execute(); + }, + }, +}; + +export class DbService { + #db; + #kysely; + #migrator; + + constructor(public readonly path: string) { + this.#db = new sqlite.Database(path); + this.#kysely = new Kysely<Database>({ + dialect: new SqliteDialect({ + database: new SqliteDatabaseAdapter(this.#db), + }), + }); + this.#migrator = new Migrator({ + db: this.#kysely, + provider: { + getMigrations(): Promise<Record<string, Migration>> { + return Promise.resolve(migrations); + }, + }, + }); + } + + async migrate(): Promise<void> { + await this.#migrator.migrateToLatest(); + } + + async addMessageIdMap( + mail: Insertable<AwsMessageIdMapTable>, + ): Promise<number> { + const inserted = await this.#kysely + .insertInto("aws_message_id_map") + .values(mail) + .executeTakeFirstOrThrow(); + return Number(inserted.insertId!); + } + + async messageIdToAws(messageId: string): Promise<string | null> { + const row = await this.#kysely + .selectFrom("aws_message_id_map") + .where("message_id", "=", messageId) + .select("aws_message_id") + .executeTakeFirst(); + return row?.aws_message_id ?? null; + } + + async messageIdFromAws(awsMessageId: string): Promise<string | null> { + const row = await this.#kysely + .selectFrom("aws_message_id_map") + .where("aws_message_id", "=", awsMessageId) + .select("message_id") + .executeTakeFirst(); + return row?.message_id ?? null; + } +} diff --git a/deno/mail/deno.json b/deno/mail/deno.json new file mode 100644 index 0000000..86a8999 --- /dev/null +++ b/deno/mail/deno.json @@ -0,0 +1,18 @@ +{ + "version": "0.1.0", + "tasks": { + "run": "deno run -A aws/app.ts", + "compile": "deno compile -o out/crupest-mail -A aws/app.ts" + }, + "imports": { + "@aws-sdk/client-s3": "npm:@aws-sdk/client-s3@^3.821.0", + "@aws-sdk/client-sesv2": "npm:@aws-sdk/client-sesv2@^3.821.0", + "@db/sqlite": "jsr:@db/sqlite@^0.12.0", + "@hono/zod-validator": "npm:@hono/zod-validator@^0.7.0", + "@smithy/fetch-http-handler": "npm:@smithy/fetch-http-handler@^5.0.4", + "email-addresses": "npm:email-addresses@^5.0.0", + "hono": "npm:hono@^4.7.11", + "kysely": "npm:kysely@^0.28.2", + "zod": "npm:zod@^3.25.48" + } +} diff --git a/deno/mail/dovecot.ts b/deno/mail/dovecot.ts new file mode 100644 index 0000000..c0d56a2 --- /dev/null +++ b/deno/mail/dovecot.ts @@ -0,0 +1,219 @@ +import { Mail, MailDeliverContext, MailDeliverer } from "./mail.ts"; + +// https://doc.dovecot.org/main/core/man/dovecot-lda.1.html +const ldaExitCodeMessageMap = new Map<number, string>(); +ldaExitCodeMessageMap.set(67, "recipient user not known"); +ldaExitCodeMessageMap.set(75, "temporary error"); + +type CommandResult = { + kind: "exit"; + status: Deno.CommandStatus; + logMessage: string; +} | { kind: "throw"; cause: unknown; logMessage: string }; + +async function runCommand( + bin: string, + options: { + logTag: string; + args: string[]; + stdin?: Uint8Array; + suppressStartLog?: boolean; + suppressResultLog?: boolean; + errorCodeMessageMap?: Map<number, string>; + }, +): Promise<CommandResult> { + const { logTag, args, stdin, suppressResultLog, errorCodeMessageMap } = + options; + + if (options.suppressResultLog !== true) { + console.info(logTag, `Run external command ${bin} ${args.join(" ")}`); + } + + try { + // Create and spawn process. + const command = new Deno.Command(bin, { + args, + stdin: stdin == null ? "null" : "piped", + }); + const process = command.spawn(); + + // Write stdin if any. + if (stdin != null) { + const writer = process.stdin.getWriter(); + await writer.write(stdin); + writer.close(); + } + + // Wait for process to exit. + const status = await process.status; + + // Build log message string. + let message = `External command exited with code ${status.code}`; + if (status.signal != null) message += ` (signal: ${status.signal})`; + if (errorCodeMessageMap != null && errorCodeMessageMap.has(status.code)) { + message += `, ${errorCodeMessageMap.get(status.code)}`; + } + message += "."; + if (suppressResultLog !== true) console.log(logTag, message); + + // Return result. + return { + kind: "exit", + status, + logMessage: message, + }; + } catch (cause) { + const message = `A JS error was thrown when invoking external command:`; + if (suppressResultLog !== true) console.log(logTag, message); + return { kind: "throw", cause, logMessage: message + " " + cause }; + } +} + +export class DovecotMailDeliverer extends MailDeliverer { + readonly name = "dovecot"; + readonly #ldaPath; + readonly #doveadmPath; + + constructor( + ldaPath: string, + doveadmPath: string, + ) { + super(false); + this.#ldaPath = ldaPath; + this.#doveadmPath = doveadmPath; + } + + protected override async doDeliver( + mail: Mail, + context: MailDeliverContext, + ): Promise<void> { + const utf8Bytes = mail.toUtf8Bytes(); + + const recipients = [...context.recipients]; + + if (recipients.length === 0) { + throw new Error( + "Failed to deliver to dovecot, no recipients are specified.", + ); + } + + for (const recipient of recipients) { + const result = await runCommand( + this.#ldaPath, + { + logTag: context.logTag, + args: ["-d", recipient], + stdin: utf8Bytes, + suppressResultLog: true, + errorCodeMessageMap: ldaExitCodeMessageMap, + }, + ); + + if (result.kind === "exit" && result.status.success) { + context.result.recipients.set(recipient, { + kind: "success", + message: result.logMessage, + }); + } else { + context.result.recipients.set(recipient, { + kind: "failure", + message: result.logMessage, + }); + } + } + } + + #queryArgs(mailbox: string, messageId: string) { + return ["mailbox", mailbox, "header", "Message-ID", `<${messageId}>`]; + } + + async #deleteMail( + logTag: string, + user: string, + mailbox: string, + messageId: string, + noLog?: boolean, + ): Promise<void> { + await runCommand(this.#doveadmPath, { + logTag, + args: ["expunge", "-u", user, ...this.#queryArgs(mailbox, messageId)], + suppressStartLog: noLog, + suppressResultLog: noLog, + }); + } + + async #saveMail( + logTag: string, + user: string, + mailbox: string, + mail: Uint8Array, + ) { + await runCommand(this.#doveadmPath, { + logTag, + args: ["save", "-u", user, "-m", mailbox], + stdin: mail, + }); + } + + async #markAsRead( + logTag: string, + user: string, + mailbox: string, + messageId: string, + ) { + await runCommand(this.#doveadmPath, { + logTag, + args: [ + "flags", + "add", + "-u", + user, + "\\Seen", + ...this.#queryArgs(mailbox, messageId), + ], + }); + } + + async saveNewSent(logTag: string, mail: Mail, messageIdToDelete: string) { + console.info(logTag, "Save sent mail and delete ones with old message id."); + + // Try to get from and recipients from headers. + const { messageId, from, recipients } = mail.parsed; + + if (from == null) { + console.warn( + logTag, + "Failed to get sender (from) in headers, skip saving.", + ); + return; + } + + if (recipients.includes(from)) { + // So the mail should lie in the Inbox. + console.info( + logTag, + "One recipient of the mail is the sender itself, skip saving.", + ); + return; + } + + await this.#saveMail(logTag, from, "Sent", mail.toUtf8Bytes()); + if (messageId != null) { + await this.#markAsRead(logTag, from, "Sent", messageId); + } else { + console.warn( + "Message id of the mail is not found, skip marking as read.", + ); + } + + console.info( + logTag, + "Schedule deletion of old mails (no logging) at 5,15,30,60 seconds later.", + ); + [5, 15, 30, 60].forEach((seconds) => + setTimeout(() => { + void this.#deleteMail(logTag, from, "Sent", messageIdToDelete, true); + }, 1000 * seconds) + ); + } +} diff --git a/deno/mail/dumb-smtp-server.ts b/deno/mail/dumb-smtp-server.ts new file mode 100644 index 0000000..70d5ec0 --- /dev/null +++ b/deno/mail/dumb-smtp-server.ts @@ -0,0 +1,129 @@ +import { MailDeliverer } from "./mail.ts"; + +const CRLF = "\r\n"; + +function createResponses(host: string, port: number | string) { + const serverName = `[${host}]:${port}`; + return { + serverName, + READY: `220 ${serverName} SMTP Ready`, + EHLO: `250 ${serverName}`, + MAIL: "250 2.1.0 Sender OK", + RCPT: "250 2.1.5 Recipient OK", + DATA: "354 Start mail input; end with <CRLF>.<CRLF>", + QUIT: `211 2.0.0 ${serverName} closing connection`, + ACTIVE_CLOSE: "421 4.7.0 Please open a new connection to send more emails", + INVALID: "500 5.5.1 Error: command not recognized", + } as const; +} + +export class DumbSmtpServer { + #deliverer; + + constructor(deliverer: MailDeliverer) { + this.#deliverer = deliverer; + } + + async #handleConnection( + logTag: string, + conn: Deno.Conn, + responses: ReturnType<typeof createResponses>, + ) { + using disposeStack = new DisposableStack(); + disposeStack.defer(() => { + console.info(logTag, "Close tcp connection."); + conn.close(); + }); + + console.info(logTag, "New tcp connection established."); + + const writer = conn.writable.getWriter(); + disposeStack.defer(() => writer.releaseLock()); + const reader = conn.readable.getReader(); + disposeStack.defer(() => reader.releaseLock()); + + const [decoder, encoder] = [new TextDecoder(), new TextEncoder()]; + const decode = (data: Uint8Array) => decoder.decode(data); + const send = async (s: string) => { + console.info(logTag, "Send line:", s); + await writer.write(encoder.encode(s + CRLF)); + }; + + let buffer: string = ""; + let rawMail: string | null = null; + + await send(responses["READY"]); + + while (true) { + const { value, done } = await reader.read(); + if (done) break; + + buffer += decode(value); + + while (true) { + const eolPos = buffer.indexOf(CRLF); + if (eolPos === -1) break; + + const line = buffer.slice(0, eolPos); + buffer = buffer.slice(eolPos + CRLF.length); + + if (rawMail == null) { + console.info(logTag, "Received line:", line); + const upperLine = line.toUpperCase(); + if (upperLine.startsWith("EHLO") || upperLine.startsWith("HELO")) { + await send(responses["EHLO"]); + } else if (upperLine.startsWith("MAIL FROM:")) { + await send(responses["MAIL"]); + } else if (upperLine.startsWith("RCPT TO:")) { + await send(responses["RCPT"]); + } else if (upperLine === "DATA") { + await send(responses["DATA"]); + console.info(logTag, "Begin to receive mail data..."); + rawMail = ""; + } else if (upperLine === "QUIT") { + await send(responses["QUIT"]); + return; + } else { + await send(responses["INVALID"]); + return; + } + } else { + if (line === ".") { + try { + console.info(logTag, "Mail data received, begin to relay..."); + const { smtpMessage } = await this.#deliverer.deliverRaw(rawMail); + await send(`250 2.6.0 ${smtpMessage}`); + rawMail = null; + } catch (err) { + console.error(logTag, "Relay failed.", err); + await send("554 5.3.0 Error: check server log"); + } + await send(responses["ACTIVE_CLOSE"]); + } else { + const dataLine = line.startsWith("..") ? line.slice(1) : line; + rawMail += dataLine + CRLF; + } + } + } + } + } + + async serve(options: { hostname: string; port: number }) { + const listener = Deno.listen(options); + const responses = createResponses(options.hostname, options.port); + console.info( + `Dumb SMTP server starts to listen on ${responses.serverName}.`, + ); + + let counter = 1; + + for await (const conn of listener) { + const logTag = `[outbound ${counter++}]`; + try { + await this.#handleConnection(logTag, conn, responses); + } catch (cause) { + console.error(logTag, "A JS error was thrown by handler:", cause); + } + } + } +} diff --git a/deno/mail/mail-parsing.ts b/deno/mail/mail-parsing.ts new file mode 100644 index 0000000..8e9697d --- /dev/null +++ b/deno/mail/mail-parsing.ts @@ -0,0 +1,144 @@ +import emailAddresses from "email-addresses"; + +class MailParsingError extends Error {} + +function parseHeaderSection(section: string) { + const headers = [] as [key: string, value: string][]; + + let field: string | null = null; + let lineNumber = 1; + + const handleField = () => { + if (field == null) return; + const sepPos = field.indexOf(":"); + if (sepPos === -1) { + throw new MailParsingError( + `Expect ':' in the header field line: ${field}`, + ); + } + headers.push([field.slice(0, sepPos).trim(), field.slice(sepPos + 1)]); + field = null; + }; + + for (const line of section.trimEnd().split(/\r?\n|\r/)) { + if (line.match(/^\s/)) { + if (field == null) { + throw new MailParsingError("Header section starts with a space."); + } + field += line; + } else { + handleField(); + field = line; + } + lineNumber += 1; + } + + handleField(); + + return headers; +} + +function findFirst(fields: readonly [string, string][], key: string) { + for (const [k, v] of fields) { + if (key.toLowerCase() === k.toLowerCase()) return v; + } + return undefined; +} + +function findMessageId(fields: readonly [string, string][]) { + const messageIdField = findFirst(fields, "message-id"); + if (messageIdField == null) return undefined; + + const match = messageIdField.match(/\<(.*?)\>/); + if (match != null) { + return match[1]; + } else { + console.warn(`Invalid syntax in header 'message-id': ${messageIdField}`); + return undefined; + } +} + +function findDate(fields: readonly [string, string][]) { + const dateField = findFirst(fields, "date"); + if (dateField == null) return undefined; + + const date = new Date(dateField); + if (isNaN(date.getTime())) { + console.warn(`Invalid date string in header 'date': ${dateField}`); + return undefined; + } + return date; +} + +function findFrom(fields: readonly [string, string][]) { + const fromField = findFirst(fields, "from"); + if (fromField == null) return undefined; + + const addr = emailAddresses.parseOneAddress(fromField); + return addr?.type === "mailbox" ? addr.address : undefined; +} + +function findRecipients(fields: readonly [string, string][]) { + const headers = ["to", "cc", "bcc", "x-original-to"]; + const recipients = new Set<string>(); + for (const [key, value] of fields) { + if (headers.includes(key.toLowerCase())) { + emailAddresses + .parseAddressList(value) + ?.flatMap((a) => (a.type === "mailbox" ? a : a.addresses)) + ?.forEach(({ address }) => recipients.add(address)); + } + } + return recipients; +} + +function parseSections(raw: string) { + const twoEolMatch = raw.match(/(\r?\n)(\r?\n)/); + if (twoEolMatch == null) { + throw new MailParsingError( + "No header/body section separator (2 successive EOLs) found.", + ); + } + + const [eol, sep] = [twoEolMatch[1], twoEolMatch[2]]; + + if (eol !== sep) { + console.warn("Different EOLs (\\r\\n, \\n) found."); + } + + return { + header: raw.slice(0, twoEolMatch.index!), + body: raw.slice(twoEolMatch.index! + eol.length + sep.length), + eol, + sep, + }; +} + +export type ParsedMail = Readonly<{ + header: string; + body: string; + sep: string; + eol: string; + headers: readonly [string, string][]; + messageId: string | undefined; + date: Date | undefined; + from: string | undefined; + recipients: readonly string[]; +}>; + +export function simpleParseMail(raw: string): ParsedMail { + const sections = Object.freeze(parseSections(raw)); + const headers = Object.freeze(parseHeaderSection(sections.header)); + const messageId = findMessageId(headers); + const date = findDate(headers); + const from = findFrom(headers); + const recipients = Object.freeze([...findRecipients(headers)]); + return Object.freeze({ + ...sections, + headers, + messageId, + date, + from, + recipients, + }); +} diff --git a/deno/mail/mail.test.ts b/deno/mail/mail.test.ts new file mode 100644 index 0000000..a8204be --- /dev/null +++ b/deno/mail/mail.test.ts @@ -0,0 +1,121 @@ +import { describe, it } from "@std/testing/bdd"; +import { expect, fn } from "@std/expect"; + +import { Mail, MailDeliverContext, MailDeliverer } from "./mail.ts"; + +const mockDate = "Fri, 02 May 2025 08:33:02 +0000"; +const mockMessageId = "mock-message-id@from.mock"; +const mockMessageId2 = "mock-message-id-2@from.mock"; +const mockFromAddress = "mock@from.mock"; +const mockCcAddress = "mock@cc.mock"; +const mockBodyStr = `This is body content. +Line 2 ${mockMessageId2} + +Line 4`; +const mockHeaders = [ + ["Content-Disposition", "inline"], + ["Content-Transfer-Encoding", "quoted-printable"], + ["MIME-Version", "1.0"], + ["X-Mailer", "MIME-tools 5.509 (Entity 5.509)"], + ["Content-Type", "text/plain; charset=utf-8"], + ["From", `"Mock From" <${mockFromAddress}>`], + [ + "To", + `"John \\"Big\\" Doe" <john@example.com>, "Alice (Work)" <alice+work@example.com>, + undisclosed-recipients:;, "Group: Team" <team@company.com>, + "Escaped, Name" <escape@test.com>, just@email.com, + "Comment (This is valid)" <comment@domain.net>, + "Odd @Chars" <weird!#$%'*+-/=?^_\`{|}~@char-test.com>, + "Non-ASCII 用户" <user@例子.中国>, + admin@[192.168.1.1]`, + ], + ["CC", `Mock CC <${mockCcAddress}>`], + ["Subject", "A very long mock\n subject"], + ["Message-ID", `<${mockMessageId}>`], + ["Date", mockDate], +]; +const mockHeaderStr = mockHeaders.map((h) => h[0] + ": " + h[1]).join("\n"); +const mockMailStr = mockHeaderStr + "\n\n" + mockBodyStr; +const mockCrlfMailStr = mockMailStr.replaceAll("\n", "\r\n"); +const mockToAddresses = [ + "john@example.com", + "alice+work@example.com", + "team@company.com", + "escape@test.com", + "just@email.com", + "comment@domain.net", + "weird!#$%'*+-/=?^_`{|}~@char-test.com", + "user@例子.中国", + "admin@[192.168.1.1]", +]; + +describe("Mail", () => { + it("simple parse", () => { + const { parsed } = new Mail(mockMailStr); + expect(parsed.header).toEqual(mockHeaderStr); + expect(parsed.body).toEqual(mockBodyStr); + expect(parsed.sep).toBe("\n"); + expect(parsed.eol).toBe("\n"); + }); + + it("simple parse crlf", () => { + const { parsed } = new Mail(mockCrlfMailStr); + expect(parsed.sep).toBe("\r\n"); + expect(parsed.eol).toBe("\r\n"); + }); + + it("simple parse date", () => { + expect( + new Mail(mockMailStr).parsed.date, + ).toEqual(new Date(mockDate)); + }); + + it("simple parse headers", () => { + expect( + new Mail(mockMailStr).parsed.headers, + ).toEqual(mockHeaders.map((h) => [h[0], " " + h[1].replaceAll("\n", "")])); + }); + + it("parse recipients", () => { + const mail = new Mail(mockMailStr); + expect([...mail.parsed.recipients]).toEqual([ + ...mockToAddresses, + mockCcAddress, + ]); + }); + + it("find all addresses", () => { + const mail = new Mail(mockMailStr); + expect(mail.simpleFindAllAddresses()).toEqual([ + "mock@from.mock", + "john@example.com", + "alice+work@example.com", + "team@company.com", + "escape@test.com", + "just@email.com", + "comment@domain.net", + "mock@cc.mock", + "mock-message-id@from.mock", + "mock-message-id-2@from.mock", + ]); + }); +}); + +describe("MailDeliverer", () => { + class MockMailDeliverer extends MailDeliverer { + name = "mock"; + override doDeliver = fn((_: Mail, ctx: MailDeliverContext) => { + ctx.result.recipients.set("*", { + kind: "success", + message: "success message", + }); + return Promise.resolve(); + }) as MailDeliverer["doDeliver"]; + } + const mockDeliverer = new MockMailDeliverer(false); + + it("deliver success", async () => { + await mockDeliverer.deliverRaw(mockMailStr); + expect(mockDeliverer.doDeliver).toHaveBeenCalledTimes(1); + }); +}); diff --git a/deno/mail/mail.ts b/deno/mail/mail.ts new file mode 100644 index 0000000..9cc591c --- /dev/null +++ b/deno/mail/mail.ts @@ -0,0 +1,234 @@ +import { encodeBase64 } from "@std/encoding/base64"; +import { parse } from "@std/csv/parse"; +import { simpleParseMail } from "./mail-parsing.ts"; + +export class Mail { + #raw; + #parsed; + + constructor(raw: string) { + this.#raw = raw; + this.#parsed = simpleParseMail(raw); + } + + get raw() { + return this.#raw; + } + + set raw(value) { + this.#raw = value; + this.#parsed = simpleParseMail(value); + } + + get parsed() { + return this.#parsed; + } + + toUtf8Bytes(): Uint8Array { + const utf8Encoder = new TextEncoder(); + return utf8Encoder.encode(this.raw); + } + + toBase64(): string { + return encodeBase64(this.raw); + } + + simpleFindAllAddresses(): string[] { + const re = /,?\<?([a-z0-9_'+\-\.]+\@[a-z0-9_'+\-\.]+)\>?,?/gi; + return [...this.raw.matchAll(re)].map((m) => m[1]); + } +} + +export interface MailDeliverRecipientResult { + kind: "success" | "failure"; + message?: string; + cause?: unknown; +} + +export class MailDeliverResult { + message?: string; + smtpMessage?: string; + recipients = new Map<string, MailDeliverRecipientResult>(); + constructor(public mail: Mail) {} + + get hasFailure() { + return this.recipients.values().some((v) => v.kind !== "success"); + } + + generateLogMessage(prefix: string) { + const lines = []; + if (this.message != null) lines.push(`${prefix} message: ${this.message}`); + if (this.smtpMessage != null) { + lines.push(`${prefix} smtpMessage: ${this.smtpMessage}`); + } + for (const [name, result] of this.recipients.entries()) { + const { kind, message } = result; + lines.push(`${prefix} (${name}): ${kind} ${message}`); + } + return lines.join("\n"); + } +} + +export class MailDeliverContext { + readonly recipients: Set<string> = new Set(); + readonly result; + + constructor(public logTag: string, public mail: Mail) { + this.result = new MailDeliverResult(this.mail); + } +} + +export interface MailDeliverHook { + callback(context: MailDeliverContext): Promise<void>; +} + +export abstract class MailDeliverer { + #counter = 1; + #last?: Promise<void>; + + abstract name: string; + preHooks: MailDeliverHook[] = []; + postHooks: MailDeliverHook[] = []; + + constructor(public sync: boolean) {} + + protected abstract doDeliver( + mail: Mail, + context: MailDeliverContext, + ): Promise<void>; + + async deliverRaw(rawMail: string) { + return await this.deliver({ mail: new Mail(rawMail) }); + } + + async #deliverCore(context: MailDeliverContext) { + for (const hook of this.preHooks) { + await hook.callback(context); + } + + await this.doDeliver(context.mail, context); + + for (const hook of this.postHooks) { + await hook.callback(context); + } + } + + async deliver(options: { + mail: Mail; + recipients?: string[]; + logTag?: string; + }): Promise<MailDeliverResult> { + const logTag = options.logTag ?? `[${this.name} ${this.#counter}]`; + this.#counter++; + + if (this.#last != null) { + console.info(logTag, "Wait for last delivering done..."); + await this.#last; + } + + const context = new MailDeliverContext( + logTag, + options.mail, + ); + options.recipients?.forEach((r) => context.recipients.add(r)); + + console.info(context.logTag, "Begin to deliver mail..."); + + const deliverPromise = this.#deliverCore(context); + + if (this.sync) { + this.#last = deliverPromise.then(() => {}, () => {}); + } + + await deliverPromise; + this.#last = undefined; + + console.info(context.logTag, "Deliver result:"); + console.info(context.result.generateLogMessage(context.logTag)); + + if (context.result.hasFailure) { + throw new Error("Failed to deliver to some recipients."); + } + + return context.result; + } +} + +export class RecipientFromHeadersHook implements MailDeliverHook { + constructor(public mailDomain: string) {} + + callback(context: MailDeliverContext) { + if (context.recipients.size !== 0) { + console.warn( + context.logTag, + "Recipients are already filled, skip inferring from headers.", + ); + } else { + [...context.mail.parsed.recipients].filter((r) => + r.endsWith("@" + this.mailDomain) + ).forEach((r) => context.recipients.add(r)); + + console.info( + context.logTag, + "Use recipients inferred from mail headers:", + [...context.recipients].join(", "), + ); + } + return Promise.resolve(); + } +} + +export class FallbackRecipientHook implements MailDeliverHook { + constructor(public fallback: Set<string> = new Set()) {} + + callback(context: MailDeliverContext) { + if (context.recipients.size === 0) { + console.info( + context.logTag, + "Use fallback recipients:" + [...this.fallback].join(", "), + ); + this.fallback.forEach((a) => context.recipients.add(a)); + } + return Promise.resolve(); + } +} + +export class AliasRecipientMailHook implements MailDeliverHook { + #aliasFile; + + constructor(aliasFile: string) { + this.#aliasFile = aliasFile; + } + + async #parseAliasFile(logTag: string): Promise<Map<string, string>> { + const result = new Map(); + if ((await Deno.stat(this.#aliasFile)).isFile) { + const text = await Deno.readTextFile(this.#aliasFile); + const csv = parse(text); + for (const [real, ...aliases] of csv) { + aliases.forEach((a) => result.set(a, real)); + } + } else { + console.warn( + logTag, + `Recipient alias file ${this.#aliasFile} is not found.`, + ); + } + return result; + } + + async callback(context: MailDeliverContext) { + const aliases = await this.#parseAliasFile(context.logTag); + for (const recipient of [...context.recipients]) { + const realRecipients = aliases.get(recipient); + if (realRecipients != null) { + console.info( + context.logTag, + `Recipient alias resolved: ${recipient} => ${realRecipients}.`, + ); + context.recipients.delete(recipient); + context.recipients.add(realRecipients); + } + } + } +} diff --git a/deno/tools/deno.json b/deno/tools/deno.json new file mode 100644 index 0000000..355046a --- /dev/null +++ b/deno/tools/deno.json @@ -0,0 +1,8 @@ +{ + "version": "0.1.0", + "tasks": { + }, + "imports": { + "mustache": "npm:mustache@^4.2.0" + } +} diff --git a/deno/tools/geosite.ts b/deno/tools/geosite.ts new file mode 100644 index 0000000..3aabec2 --- /dev/null +++ b/deno/tools/geosite.ts @@ -0,0 +1,161 @@ +const ATTR = "cn"; +const REPO_NAME = "domain-list-community"; +const URL = + "https://github.com/v2fly/domain-list-community/archive/refs/heads/master.zip"; +const SITES = [ + "github", + "google", + "youtube", + "twitter", + "facebook", + "discord", + "reddit", + "twitch", + "quora", + "telegram", + "imgur", + "stackexchange", + "onedrive", + "duckduckgo", + "wikimedia", + "gitbook", + "gitlab", + "creativecommons", + "archive", + "matrix", + "tor", + "python", + "ruby", + "rust", + "nodejs", + "npmjs", + "qt", + "docker", + "v2ray", + "homebrew", + "bootstrap", + "heroku", + "vercel", + "ieee", + "sci-hub", + "libgen", +]; + +const prefixes = ["include", "domain", "keyword", "full", "regexp"] as const; + +interface Rule { + kind: (typeof prefixes)[number]; + value: string; + attrs: string[]; +} + +type FileProvider = (name: string) => string; + +function extract(starts: string[], provider: FileProvider): Rule[] { + function parseLine(line: string): Rule { + let kind = prefixes.find((p) => line.startsWith(p + ":")); + if (kind != null) { + line = line.slice(line.indexOf(":") + 1); + } else { + kind = "domain"; + } + const segs = line.split("@"); + return { + kind, + value: segs[0].trim(), + attrs: [...segs.slice(1)].map((s) => s.trim()), + }; + } + + function parse(text: string): Rule[] { + return text + .replaceAll("\c\n", "\n") + .split("\n") + .map((l) => l.trim()) + .filter((l) => l.length !== 0 && !l.startsWith("#")) + .map((l) => parseLine(l)); + } + + const visited = [] as string[]; + const rules = [] as Rule[]; + + function add(name: string) { + const text = provider(name); + for (const rule of parse(text)) { + if (rule.kind === "include") { + if (visited.includes(rule.value)) { + console.warn(`circular refs found: ${name} includes ${rule.value}.`); + continue; + } else { + visited.push(rule.value); + add(rule.value); + } + } else { + rules.push(rule); + } + } + } + + for (const start of starts) { + add(start); + } + + return rules; +} + +function toNewFormat(rules: Rule[], attr: string): [string, string] { + function toLine(rule: Rule) { + const prefixMap = { + domain: "DOMAIN-SUFFIX", + full: "DOMAIN", + keyword: "DOMAIN-KEYWORD", + regexp: "DOMAIN-REGEX", + } as const; + if (rule.kind === "include") { + throw new Error("Include rule not parsed."); + } + return `${prefixMap[rule.kind]},${rule.value}`; + } + + function toLines(rules: Rule[]) { + return rules.map((r) => toLine(r)).join("\n"); + } + + const has: Rule[] = []; + const notHas: Rule[] = []; + rules.forEach((r) => (r.attrs.includes(attr) ? has.push(r) : notHas.push(r))); + + return [toLines(has), toLines(notHas)]; +} + +if (import.meta.main) { + const tmpDir = Deno.makeTempDirSync({ prefix: "geosite-rules-" }); + console.log("Work dir is ", tmpDir); + const zipFilePath = tmpDir + "/repo.zip"; + const res = await fetch(URL); + if (!res.ok) { + throw new Error("Failed to download repo."); + } + Deno.writeFileSync(zipFilePath, await res.bytes()); + const unzip = new Deno.Command("unzip", { + args: ["-q", zipFilePath], + cwd: tmpDir, + }); + if (!(await unzip.spawn().status).success) { + throw new Error("Failed to unzip"); + } + + const dataDir = tmpDir + "/" + REPO_NAME + "-master/data"; + const provider = (name: string) => + Deno.readTextFileSync(dataDir + "/" + name); + + const rules = extract(SITES, provider); + const [has, notHas] = toNewFormat(rules, ATTR); + const resultDir = tmpDir + "/result"; + Deno.mkdirSync(resultDir); + const hasFile = resultDir + "/has-rule"; + const notHasFile = resultDir + "/not-has-rule"; + console.log("Write result to: " + hasFile + " , " + notHasFile); + Deno.writeTextFileSync(hasFile, has); + Deno.writeTextFileSync(notHasFile, notHas); +} diff --git a/deno/tools/main.ts b/deno/tools/main.ts new file mode 100644 index 0000000..897350c --- /dev/null +++ b/deno/tools/main.ts @@ -0,0 +1,14 @@ +import yargs, { DEMAND_COMMAND_MESSAGE } from "./yargs.ts"; +import vm from "./vm.ts"; +import service from "./service.ts"; + +if (import.meta.main) { + await yargs(Deno.args) + .scriptName("crupest") + .command(vm) + .command(service) + .demandCommand(1, DEMAND_COMMAND_MESSAGE) + .help() + .strict() + .parse(); +} diff --git a/deno/tools/service.ts b/deno/tools/service.ts new file mode 100644 index 0000000..bd4d22c --- /dev/null +++ b/deno/tools/service.ts @@ -0,0 +1,180 @@ +import { dirname, join, relative } from "@std/path"; +import { copySync, existsSync, walkSync } from "@std/fs"; +import { distinct } from "@std/collections"; +// @ts-types="npm:@types/mustache" +import Mustache from "mustache"; + +import { defineYargsModule, DEMAND_COMMAND_MESSAGE } from "./yargs.ts"; + +const MUSTACHE_RENDER_OPTIONS: Mustache.RenderOptions = { + tags: ["@@", "@@"], + escape: (value: unknown) => String(value), +}; + +function mustacheParse(template: string) { + return Mustache.parse(template, MUSTACHE_RENDER_OPTIONS.tags); +} + +function mustacheRender(template: string, view: Record<string, string>) { + return Mustache.render(template, view, {}, MUSTACHE_RENDER_OPTIONS); +} + +function getVariableKeysOfTemplate(template: string): string[] { + return distinct( + mustacheParse(template) + .filter((v) => v[0] === "name") + .map((v) => v[1]), + ); +} + +function loadTemplatedConfigFiles( + files: string[], +): Record<string, string> { + console.log("Scan config files ..."); + const config: Record<string, string> = {}; + for (const file of files) { + console.log(` from file ${file}`); + const text = Deno.readTextFileSync(file); + let lineNumber = 0; + for (const rawLine of text.split("\n")) { + lineNumber++; + const line = rawLine.trim(); + if (line.length === 0) continue; + if (line.startsWith("#")) continue; + const equalSymbolIndex = line.indexOf("="); + if (equalSymbolIndex === -1) { + throw new Error(`Line ${lineNumber} of ${file} is invalid.`); + } + const [key, valueText] = [ + line.slice(0, equalSymbolIndex).trim(), + line.slice(equalSymbolIndex + 1).trim(), + ]; + console.log(` (${key in config ? "override" : "new"}) ${key}`); + getVariableKeysOfTemplate(valueText).forEach((name) => { + if (!(name in config)) { + throw new Error( + `Variable ${name} is not defined yet, perhaps due to typos or wrong order.`, + ); + } + }); + config[key] = mustacheRender(valueText, config); + } + } + return config; +} + +const TEMPLATE_FILE_EXT = ".template"; + +class TemplateDir { + templates: { path: string; ext: string; text: string; vars: string[] }[] = []; + plains: { path: string }[] = []; + + constructor(public dir: string) { + console.log(`Scan template dir ${dir} ...`); + Array.from( + walkSync(dir, { includeDirs: false, followSymlinks: true }), + ).forEach(({ path }) => { + path = relative(this.dir, path); + if (path.endsWith(TEMPLATE_FILE_EXT)) { + console.log(` (template) ${path}`); + const text = Deno.readTextFileSync(join(dir, path)); + this.templates.push({ + path, + ext: TEMPLATE_FILE_EXT, + text, + vars: getVariableKeysOfTemplate(text), + }); + } else { + console.log(` (plain) ${path}`); + this.plains.push({ path }); + } + }); + } + + allNeededVars() { + return distinct(this.templates.flatMap((t) => t.vars)); + } + + generate(vars: Record<string, string>, generatedDir?: string) { + console.log( + `Generate to dir ${generatedDir ?? "[dry-run]"} ...`, + ); + + const undefinedVars = this.allNeededVars().filter((v) => !(v in vars)); + if (undefinedVars.length !== 0) { + throw new Error( + `Needed variables are not defined: ${undefinedVars.join(", ")}`, + ); + } + + if (generatedDir != null) { + if (existsSync(generatedDir)) { + console.log(` delete old generated dir`); + Deno.removeSync(generatedDir, { recursive: true }); + } + + for (const file of this.plains) { + const [source, destination] = [ + join(this.dir, file.path), + join(generatedDir, file.path), + ]; + console.log(` copy ${file.path}`); + Deno.mkdirSync(dirname(destination), { recursive: true }); + copySync(source, destination); + } + for (const file of this.templates) { + const path = file.path.slice(0, -file.ext.length); + const destination = join(generatedDir, path); + console.log(` generate ${path}`); + const rendered = mustacheRender(file.text, vars); + Deno.mkdirSync(dirname(destination), { recursive: true }); + Deno.writeTextFileSync(destination, rendered); + } + } + } +} + +export default defineYargsModule({ + command: "service", + aliases: ["sv"], + describe: "Manage services.", + builder: (builder) => { + return builder + .option("project-dir", { + type: "string", + }) + .demandOption("project-dir") + .command({ + command: "gen-tmpl", + describe: "Generate files from templates", + builder: (builder) => { + return builder + .option("dry-run", { + type: "boolean", + default: true, + }) + .strict(); + }, + handler: (argv) => { + const { projectDir, dryRun } = argv; + + const config = loadTemplatedConfigFiles( + [ + join(projectDir, "data/config"), + join(projectDir, "services/config.template"), + ], + ); + + new TemplateDir( + join(projectDir, "services/templates"), + ).generate( + config, + dryRun ? undefined : join(projectDir, "services/generated"), + ); + console.log("Done!"); + }, + }) + .demandCommand(1, DEMAND_COMMAND_MESSAGE); + }, + handler: () => {}, +}); diff --git a/deno/tools/vm.ts b/deno/tools/vm.ts new file mode 100644 index 0000000..17e8125 --- /dev/null +++ b/deno/tools/vm.ts @@ -0,0 +1,144 @@ +import os from "node:os"; +import { join } from "@std/path"; +import { defineYargsModule, DEMAND_COMMAND_MESSAGE } from "./yargs.ts"; + +type ArchAliasMap = { [name: string]: string[] }; +const arches = { + x86_64: ["x86_64", "amd64"], + i386: ["i386", "x86", "i686"], +} as const satisfies ArchAliasMap; +type Arch = keyof typeof arches; +type GeneralArch = (typeof arches)[Arch][number]; + +function normalizeArch(generalName: GeneralArch): Arch { + for (const [name, aliases] of Object.entries(arches as ArchAliasMap)) { + if (aliases.includes(generalName)) return name as Arch; + } + throw Error("Unknown architecture name."); +} + +interface GeneralVmSetup { + name?: string[]; + arch: GeneralArch; + disk: string; + sshForwardPort: number; + kvm?: boolean; +} + +interface VmSetup { + arch: Arch; + disk: string; + sshForwardPort: number; + kvm: boolean; +} + +const MY_VMS: GeneralVmSetup[] = [ + { + name: ["hurd", ...arches.i386.map((a) => `hurd-${a}`)], + arch: "i386", + disk: join(os.homedir(), "vms/hurd-i386.qcow2"), + sshForwardPort: 3222, + }, + { + name: [...arches.x86_64.map((a) => `hurd-${a}`)], + arch: "x86_64", + disk: join(os.homedir(), "vms/hurd-x86_64.qcow2"), + sshForwardPort: 3223, + }, +]; + +function normalizeVmSetup(generalSetup: GeneralVmSetup): VmSetup { + const { arch, disk, sshForwardPort, kvm } = generalSetup; + return { + arch: normalizeArch(arch), + disk, + sshForwardPort, + kvm: kvm ?? Deno.build.os === "linux", + }; +} + +function resolveVmSetup( + name: string, + vms: GeneralVmSetup[], +): VmSetup | undefined { + const setup = vms.find((vm) => vm.name?.includes(name)); + return setup == null ? undefined : normalizeVmSetup(setup); +} + +const qemuBinPrefix = "qemu-system" as const; + +const qemuBinSuffix = { + x86_64: "x86_64", + i386: "x86_64", +} as const; + +function getQemuBin(arch: Arch): string { + return `${qemuBinPrefix}-${qemuBinSuffix[arch]}`; +} + +function getLinuxHostArgs(kvm: boolean): string[] { + return kvm ? ["-enable-kvm"] : []; +} + +function getMachineArgs(arch: Arch): string[] { + const is64 = arch === "x86_64"; + const machineArgs = is64 ? ["-machine", "q35"] : []; + const memory = is64 ? 8 : 4; + return [...machineArgs, "-m", `${memory}G`]; +} + +function getNetworkArgs(sshForwardPort: number): string[] { + return ["-net", "nic", "-net", `user,hostfwd=tcp::${sshForwardPort}-:22`]; +} + +function getDisplayArgs(): string[] { + return ["-vga", "vmware"]; +} + +function getDiskArgs(disk: string): string[] { + return ["-drive", `cache=writeback,file=${disk}`]; +} + +function createQemuArgs(setup: VmSetup): string[] { + const { arch, disk, sshForwardPort } = setup; + return [ + getQemuBin(arch), + ...getLinuxHostArgs(setup.kvm), + ...getMachineArgs(arch), + ...getDisplayArgs(), + ...getNetworkArgs(sshForwardPort), + ...getDiskArgs(disk), + ]; +} + +const gen = defineYargsModule({ + command: "gen <name>", + describe: "generate cli command to run the vm", + builder: (builder) => { + return builder + .positional("name", { + describe: "name of the vm to run", + type: "string", + }) + .demandOption("name") + .strict(); + }, + handler: (argv) => { + const vm = resolveVmSetup(argv.name, MY_VMS); + if (vm == null) { + console.error(`No vm called ${argv.name} is found.`); + Deno.exit(-1); + } + const cli = createQemuArgs(vm); + console.log(`${cli.join(" ")}`); + }, +}); + +export default defineYargsModule({ + command: "vm", + describe: "Manage (qemu) virtual machines.", + builder: (builder) => { + return builder.command(gen).demandCommand(1, DEMAND_COMMAND_MESSAGE); + }, + handler: () => {}, +}); diff --git a/deno/tools/yargs.ts b/deno/tools/yargs.ts new file mode 100644 index 0000000..eaa7803 --- /dev/null +++ b/deno/tools/yargs.ts @@ -0,0 +1,12 @@ +// @ts-types="npm:@types/yargs" +export { default } from "yargs"; +export * from "yargs"; + +import { CommandModule } from "yargs"; +export function defineYargsModule<T, U>( + module: CommandModule<T, U>, +): CommandModule<T, U> { + return module; +} + +export const DEMAND_COMMAND_MESSAGE = "No command is specified"; diff --git a/dictionary.txt b/dictionary.txt index 7688382..673d2b4 100644 --- a/dictionary.txt +++ b/dictionary.txt @@ -2,37 +2,71 @@ crupest Yuqian Yang +fxxking + +# general +aarch64 +esmtp +healthcheck -# self-hosted services -2fauth -rspamd certbot roundcube roundcubemail +gerrit gohugoio +pwsh +rclone +doveadm + +kmod +btrfs +chroot +nproc +zstd +cpio +pacman +fontconfig + +nspawn +tini +containerd +buildx +qcow2 +hostfwd + +# languages +pythonpath +denoland +kysely +insertable + +ustc +sourceware +sesv2 +amazonses +maileroo + +geodata +geoip +geosite +vmess +vnext -# general catppuccin macchiato -cheatsheet -aarch64 -pythonpath -gerrit -esmtp -tini -healthcheck # vim/nvim nvim neovide vimruntime - +termguicolors autobrief autopairs bashls bufhidden bufnr clangd +denols devicons exepath gitsigns @@ -40,13 +74,6 @@ lspconfig lualine luasnip -# unix -cpio -kmod -nproc -sourceware -zstd - # hurd gnumach settrans @@ -61,7 +88,6 @@ dquilt buildpackage quiltrc nocheck -chroot indep confdir createchroot @@ -69,13 +95,3 @@ sbuild sbuildrc schroot -# commercial -myqcloud - -# misc -geodata -geoip -geosite -vmess -vnext -ustc diff --git a/services/base-config b/services/base-config deleted file mode 100644 index ccd1e1b..0000000 --- a/services/base-config +++ /dev/null @@ -1,5 +0,0 @@ -CRUPEST_DOMAIN=crupest.life -CRUPEST_EMAIL=crupest@crupest.life -CRUPEST_GITHUB=https://github.com/crupest -CRUPEST_SERVICES_DIR=services -CRUPEST_DATA_DIR=data diff --git a/services/config.template b/services/config.template index 7b3d1dc..77be817 100644 --- a/services/config.template +++ b/services/config.template @@ -1,11 +1,13 @@ -CRUPEST_MAIL_SERVER_DOMAIN=mail.@@CRUPEST_DOMAIN@@ +CRUPEST_SERVICES_DIR=services +CRUPEST_DATA_DIR=data CRUPEST_ROOT_URL=https://@@CRUPEST_DOMAIN@@ +CRUPEST_MAIL_SERVER_DOMAIN=mail.@@CRUPEST_DOMAIN@@ CRUPEST_DOCKER_DIR=@@CRUPEST_SERVICES_DIR@@/docker -CRUPEST_SERVICES_STATE_DIR=@@CRUPEST_SERVICES_DIR@@/state CRUPEST_DATA_SECRET_DIR=@@CRUPEST_DATA_DIR@@/secret CRUPEST_DATA_CERTBOT_DIR=@@CRUPEST_DATA_DIR@@/certbot CRUPEST_DATA_GIT_DIR=@@CRUPEST_DATA_DIR@@/git -CRUPEST_DATA_MAILSERVER_DIR=@@CRUPEST_DATA_DIR@@/dms +CRUPEST_DATA_MAIL_SERVER_DIR=@@CRUPEST_DATA_DIR@@/mail-server CRUPEST_DATA_ROUNDCUBE_DIR=@@CRUPEST_DATA_DIR@@/roundcube CRUPEST_GENERATED_DIR=@@CRUPEST_SERVICES_DIR@@/generated -CRUPEST_GENERATED_NGINX_DIR=@@CRUPEST_GENERATED_DIR@@/nginx +CRUPEST_SSL_FULLCHAIN_FILE=@@CRUPEST_DATA_CERTBOT_DIR@@/certs/live/@@CRUPEST_DOMAIN@@/fullchain.pem +CRUPEST_SSL_PRIVATE_KEY_FILE=@@CRUPEST_DATA_CERTBOT_DIR@@/certs/live/@@CRUPEST_DOMAIN@@/privkey.pem diff --git a/services/docker/auto-backup/Dockerfile b/services/docker/auto-backup/Dockerfile index e376174..147be21 100644 --- a/services/docker/auto-backup/Dockerfile +++ b/services/docker/auto-backup/Dockerfile @@ -1,13 +1,12 @@ FROM debian RUN apt-get update && apt-get install -y \ - tini ca-certificates coreutils tar zstd \ + tini ca-certificates coreutils tar zstd rclone \ && rm -rf /var/lib/apt/lists/* ENV CRUPEST_AUTO_BACKUP_INIT_DELAY= ENV CRUPEST_AUTO_BACKUP_INTERVAL=1d -ADD --chmod=755 https://github.com/tencentyun/coscli/releases/download/v1.0.3/coscli-v1.0.3-linux-amd64 /app/coscli -ADD daemon.bash /app/ +ADD daemon.bash rclone.conf /app/ VOLUME [ "/data" ] diff --git a/services/docker/auto-backup/daemon.bash b/services/docker/auto-backup/daemon.bash index ff670e3..c82e2d0 100755 --- a/services/docker/auto-backup/daemon.bash +++ b/services/docker/auto-backup/daemon.bash @@ -15,21 +15,20 @@ success() { echo -e "\033[32mSuccess: " "$@" "\033[0m" } -if [[ -z "$CRUPEST_AUTO_BACKUP_INTERVAL" ]]; then - die "Backup interval not set, please set it!" -fi +[[ -n "$CRUPEST_AUTO_BACKUP_INTERVAL" ]] || die "Backup interval not set, please set it!" note "Checking secrets..." -[[ -n "$CRUPEST_AUTO_BACKUP_COS_ENDPOINT" ]] || die "COS endpoint not set!" -[[ -n "$CRUPEST_AUTO_BACKUP_COS_BUCKET" ]] || die "COS bucket not set!" -[[ -n "$CRUPEST_AUTO_BACKUP_COS_SECRET_ID" ]] || die "COS secret ID not set!" -[[ -n "$CRUPEST_AUTO_BACKUP_COS_SECRET_KEY" ]] || die "COS secret key not set!" +[[ -n "$RCLONE_S3_PROVIDER" ]] || die "S3 provider not set!" +[[ -n "$RCLONE_S3_ENDPOINT" ]] || die "S3 endpoint not set!" +[[ -n "$RCLONE_S3_ACCESS_KEY_ID" ]] || die "S3 AccessKey ID not set!" +[[ -n "$RCLONE_S3_SECRET_ACCESS_KEY" ]] || die "S3 AccessKey Secret not set!" +[[ -n "$CRUPEST_AUTO_BACKUP_S3_BUCKET" ]] || die "S3 bucket not set!" success "Secrets check passed." note "Checking tools..." tar --version zstd --version -/app/coscli --version +rclone --version success "Tools check passed." echo "Backup interval set to $CRUPEST_AUTO_BACKUP_INTERVAL..." @@ -57,13 +56,10 @@ function backup { du -h "$tmp_file" | cut -f1 | xargs echo "Size of $tmp_file:" des_file_name="$current_time.$backup_file_ext" - echo "Upload $des_file_name to COS..." + echo "Upload $des_file_name to S3..." - /app/coscli --init-skip \ - --secret-id "${CRUPEST_AUTO_BACKUP_COS_SECRET_ID}" \ - --secret-key "${CRUPEST_AUTO_BACKUP_COS_SECRET_KEY}" \ - --endpoint "${CRUPEST_AUTO_BACKUP_COS_ENDPOINT}" \ - cp "$tmp_file" "cos://${CRUPEST_AUTO_BACKUP_COS_BUCKET}/$des_file_name" + rclone --config=/app/rclone.conf copyto \ + "$tmp_file" "remote://${CRUPEST_AUTO_BACKUP_S3_BUCKET}/$des_file_name" echo "Remove tmp file..." rm "$tmp_file" diff --git a/services/docker/auto-backup/rclone.conf b/services/docker/auto-backup/rclone.conf new file mode 100644 index 0000000..0cf3b64 --- /dev/null +++ b/services/docker/auto-backup/rclone.conf @@ -0,0 +1,4 @@ +[remote] +type = s3 +env_auth = true +no_check_bucket = true diff --git a/services/docker/debian-dev/Dockerfile b/services/docker/debian-dev/Dockerfile deleted file mode 100644 index 8114c56..0000000 --- a/services/docker/debian-dev/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -FROM debian:latest - -ARG USER=crupest -ARG IN_CHINA= - -ENV CRUPEST_DEBIAN_DEV_USER=${USER} -ENV CRUPEST_DEBIAN_DEV_IN_CHINA=${IN_CHINA} - -ADD bootstrap /bootstrap -RUN /bootstrap/setup.bash - -ENV LANG=en_US.utf8 -USER ${USER} -WORKDIR /home/${USER} - -RUN --mount=type=secret,id=code-server-password,required=true,env=CRUPEST_CODE_SERVER_PASSWORD \ - mkdir -p ${HOME}/.config/code-server && \ - echo -e "auth: password\nhashed-password: " >> ${HOME}/.config/code-server/config.yaml && \ - echo -n "$CRUPEST_CODE_SERVER_PASSWORD" | argon2 $(shuf -i 10000000-99999999 -n 1 --random-source /dev/urandom) -e >> ${HOME}/.config/code-server/config.yaml - -EXPOSE 4567 -VOLUME [ "/home/${USER}" ] - -CMD [ "tini", "--", "/usr/bin/code-server", "--bind-addr", "0.0.0.0:4567" ] diff --git a/services/docker/debian-dev/bootstrap/extra/setup-cmake.bash b/services/docker/debian-dev/bootstrap/extra/setup-cmake.bash deleted file mode 100755 index 76c1ae4..0000000 --- a/services/docker/debian-dev/bootstrap/extra/setup-cmake.bash +++ /dev/null @@ -1,9 +0,0 @@ -#! /usr/bin/env bash - -set -e - -CMAKE_VERSION=$(curl -s https://api.github.com/repos/Kitware/CMake/releases/latest | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/') -wget -O cmake-installer.sh https://github.com/Kitware/CMake/releases/download/v"$CMAKE_VERSION"/cmake-"$CMAKE_VERSION"-linux-x86_64.sh -chmod +x cmake-installer.sh -./cmake-installer.sh --skip-license --prefix=/usr -rm cmake-installer.sh diff --git a/services/docker/debian-dev/bootstrap/extra/setup-dotnet.bash b/services/docker/debian-dev/bootstrap/extra/setup-dotnet.bash deleted file mode 100755 index 0ef7743..0000000 --- a/services/docker/debian-dev/bootstrap/extra/setup-dotnet.bash +++ /dev/null @@ -1,10 +0,0 @@ -#! /usr/bin/env bash - -set -e - -wget https://packages.microsoft.com/config/debian/11/packages-microsoft-prod.deb -O packages-microsoft-prod.deb -dpkg -i packages-microsoft-prod.deb -rm packages-microsoft-prod.deb - -apt-get update -apt-get install -y dotnet-sdk-7.0 diff --git a/services/docker/debian-dev/bootstrap/extra/setup-llvm.bash b/services/docker/debian-dev/bootstrap/extra/setup-llvm.bash deleted file mode 100755 index 48dde86..0000000 --- a/services/docker/debian-dev/bootstrap/extra/setup-llvm.bash +++ /dev/null @@ -1,26 +0,0 @@ -#! /usr/bin/env bash - -set -e - -LLVM_VERSION=18 - -. /bootstrap/func.bash - -if is_true "$CRUPEST_DEBIAN_DEV_IN_CHINA"; then - base_url=https://mirrors.tuna.tsinghua.edu.cn/llvm-apt -else - base_url=https://apt.llvm.org -fi - -wget "$base_url/llvm.sh" -chmod +x llvm.sh -./llvm.sh $LLVM_VERSION all -m "$base_url" -rm llvm.sh - -update-alternatives --install /usr/bin/clang clang /usr/bin/clang-$LLVM_VERSION 100 \ - --slave /usr/bin/clang++ clang++ /usr/bin/clang++-$LLVM_VERSION \ - --slave /usr/bin/clangd clangd /usr/bin/clangd-$LLVM_VERSION \ - --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-$LLVM_VERSION \ - --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-$LLVM_VERSION \ - --slave /usr/bin/lldb lldb /usr/bin/lldb-$LLVM_VERSION \ - --slave /usr/bin/lld lld /usr/bin/lld-$LLVM_VERSION diff --git a/services/docker/debian-dev/bootstrap/home/.bashrc b/services/docker/debian-dev/bootstrap/home/.bashrc deleted file mode 100644 index 3646ee2..0000000 --- a/services/docker/debian-dev/bootstrap/home/.bashrc +++ /dev/null @@ -1,117 +0,0 @@ -# ~/.bashrc: executed by bash(1) for non-login shells. -# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc) -# for examples - -# If not running interactively, don't do anything -case $- in - *i*) ;; - *) return;; -esac - -# don't put duplicate lines or lines starting with space in the history. -# See bash(1) for more options -HISTCONTROL=ignoreboth - -# append to the history file, don't overwrite it -shopt -s histappend - -# for setting history length see HISTSIZE and HISTFILESIZE in bash(1) -HISTSIZE=1000 -HISTFILESIZE=2000 - -# check the window size after each command and, if necessary, -# update the values of LINES and COLUMNS. -shopt -s checkwinsize - -# If set, the pattern "**" used in a pathname expansion context will -# match all files and zero or more directories and subdirectories. -#shopt -s globstar - -# make less more friendly for non-text input files, see lesspipe(1) -#[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)" - -# set variable identifying the chroot you work in (used in the prompt below) -if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then - debian_chroot=$(cat /etc/debian_chroot) -fi - -# set a fancy prompt (non-color, unless we know we "want" color) -case "$TERM" in - xterm-color|*-256color) color_prompt=yes;; -esac - -# uncomment for a colored prompt, if the terminal has the capability; turned -# off by default to not distract the user: the focus in a terminal window -# should be on the output of commands, not on the prompt -#force_color_prompt=yes - -if [ -n "$force_color_prompt" ]; then - if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then - # We have color support; assume it's compliant with Ecma-48 - # (ISO/IEC-6429). (Lack of such support is extremely rare, and such - # a case would tend to support setf rather than setaf.) - color_prompt=yes - else - color_prompt= - fi -fi - -if [ "$color_prompt" = yes ]; then - PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ ' -else - PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ ' -fi -unset color_prompt force_color_prompt - -# If this is an xterm set the title to user@host:dir -case "$TERM" in -xterm*|rxvt*) - PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1" - ;; -*) - ;; -esac - -# enable color support of ls and also add handy aliases -if [ -x /usr/bin/dircolors ]; then - test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)" - alias ls='ls --color=auto' - #alias dir='dir --color=auto' - #alias vdir='vdir --color=auto' - - #alias grep='grep --color=auto' - #alias fgrep='fgrep --color=auto' - #alias egrep='egrep --color=auto' -fi - -# colored GCC warnings and errors -#export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01' - -# some more ls aliases -#alias ll='ls -l' -#alias la='ls -A' -#alias l='ls -CF' - -# Alias definitions. -# You may want to put all your additions into a separate file like -# ~/.bash_aliases, instead of adding them here directly. -# See /usr/share/doc/bash-doc/examples in the bash-doc package. - -if [ -f ~/.bash_aliases ]; then - . ~/.bash_aliases -fi - -# enable programmable completion features (you don't need to enable -# this, if it's already enabled in /etc/bash.bashrc and /etc/profile -# sources /etc/bash.bashrc). -if ! shopt -oq posix; then - if [ -f /usr/share/bash-completion/bash_completion ]; then - . /usr/share/bash-completion/bash_completion - elif [ -f /etc/bash_completion ]; then - . /etc/bash_completion - fi -fi - -alias dquilt="quilt --quiltrc=${HOME}/.quiltrc-dpkg" -. /usr/share/bash-completion/completions/quilt -complete -F _quilt_completion $_quilt_complete_opt dquilt diff --git a/services/docker/debian-dev/bootstrap/official.sources b/services/docker/debian-dev/bootstrap/official.sources deleted file mode 100644 index c9aa9a0..0000000 --- a/services/docker/debian-dev/bootstrap/official.sources +++ /dev/null @@ -1,23 +0,0 @@ -Types: deb -URIs: http://deb.debian.org/debian -Suites: bookworm bookworm-updates bookworm-backports -Components: main contrib non-free non-free-firmware -Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg - -Types: deb-src -URIs: http://deb.debian.org/debian -Suites: bookworm bookworm-updates bookworm-backports -Components: main contrib non-free non-free-firmware -Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg - -Types: deb -URIs: http://deb.debian.org/debian-security -Suites: bookworm-security -Components: main contrib non-free non-free-firmware -Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg - -Types: deb-src -URIs: http://deb.debian.org/debian-security -Suites: bookworm-security -Components: main contrib non-free non-free-firmware -Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg diff --git a/services/docker/debian-dev/bootstrap/setup-apt.bash b/services/docker/debian-dev/bootstrap/setup-apt.bash deleted file mode 100755 index 38cba05..0000000 --- a/services/docker/debian-dev/bootstrap/setup-apt.bash +++ /dev/null @@ -1,41 +0,0 @@ -#! /usr/bin/env bash -# shellcheck disable=1090,1091 - -set -e - -if [[ $EUID -ne 0 ]]; then - die "This script must be run as root." -fi - -script_dir=$(dirname "$0") - -old_one="/etc/apt/sources.list" -new_one="/etc/apt/sources.list.d/debian.sources" - -echo "Setup apt sources ..." - -echo "Backup old ones to .bak ..." -if [[ -f "$old_one" ]]; then - mv "$old_one" "$old_one.bak" -fi - -if [[ -f "$new_one" ]]; then - mv "$new_one" "$new_one.bak" -fi - -echo "Copy the new one ..." -cp "$script_dir/official.sources" "$new_one" - -if [[ -n "$CRUPEST_DEBIAN_DEV_IN_CHINA" ]]; then - echo "Replace with China mirror ..." - china_mirror="mirrors.ustc.edu.cn" - sed -i "s|deb.debian.org|${china_mirror}|" "$new_one" -fi - -echo "Try to use https ..." -apt-get update -apt-get install -y apt-transport-https ca-certificates - -sed -i 's|http://|https://|' "$new_one" - -echo "APT source setup done!" diff --git a/services/docker/debian-dev/bootstrap/setup.bash b/services/docker/debian-dev/bootstrap/setup.bash deleted file mode 100755 index 65aabbb..0000000 --- a/services/docker/debian-dev/bootstrap/setup.bash +++ /dev/null @@ -1,56 +0,0 @@ -#! /usr/bin/env bash -# shellcheck disable=1090,1091 - -set -e -o pipefail - -die() { - echo "$@" >&2 - exit 1 -} - -if [[ $EUID -ne 0 ]]; then - die "This script must be run as root." -fi - -script_dir=$(dirname "$0") - -os_release_file="/etc/os-release" -if [[ -f "$os_release_file" ]]; then - debian_version=$(. "$os_release_file"; echo "$VERSION_CODENAME") - if [[ "$debian_version" != "bookworm" ]]; then - die "This script can only be run on Debian Bookworm. But it is $debian_version" - fi -else - die "$os_release_file not found. Failed to get debian version." -fi - -script_dir=$(dirname "$0") - -export DEBIAN_FRONTEND=noninteractive - -echo "Begin to setup debian..." - -bash "$script_dir/setup-apt.bash" - -echo "Installing packages..." -apt-get update -apt-get install -y \ - tini locales procps sudo vim less man bash-completion curl wget \ - build-essential git devscripts debhelper quilt argon2 - -echo "Setting up locale..." -localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 - -echo "Setting up sudo..." -sed -i.bak 's|%sudo[[:space:]]\+ALL=(ALL:ALL)[[:space:]]\+ALL|%sudo ALL=(ALL:ALL) NOPASSWD: ALL|' /etc/sudoers - -echo "Creating user $CRUPEST_DEBIAN_DEV_USER ..." -useradd -m -G sudo -s /usr/bin/bash "$CRUPEST_DEBIAN_DEV_USER" - -echo "Setting up code-server..." -curl -fsSL https://code-server.dev/install.sh | sh - -echo "Cleaning up apt source index..." -rm -rf /var/lib/apt/lists/* - -echo "Setup debian done." diff --git a/services/docker/git-server/app/cgit/private b/services/docker/git-server/app/cgit/private index a5710b2..1671ff6 100644 --- a/services/docker/git-server/app/cgit/private +++ b/services/docker/git-server/app/cgit/private @@ -1,5 +1,5 @@ cache-root=/var/cache/cgit/private include=/app/cgit/common strict-export=cgit-export -clone-url=$CRUPEST_ROOT_URL/git/private$CGIT_REPO_URL +clone-url=$CRUPEST_ROOT_URL/git/private/$CGIT_REPO_URL scan-path=/git/repos/private diff --git a/services/docker/git-server/app/cgit/public b/services/docker/git-server/app/cgit/public index d1b276d..630f241 100644 --- a/services/docker/git-server/app/cgit/public +++ b/services/docker/git-server/app/cgit/public @@ -1,5 +1,5 @@ cache-root=/var/cache/cgit/public root-readme=/git/README.md include=/app/cgit/common -clone-url=$CRUPEST_ROOT_URL/git$CGIT_REPO_URL +clone-url=$CRUPEST_ROOT_URL/git/$CGIT_REPO_URL scan-path=/git/repos/public diff --git a/services/docker/git-server/app/lighttpd/lighttpd.conf b/services/docker/git-server/app/lighttpd/lighttpd.conf index d6c30cd..cb08baa 100644 --- a/services/docker/git-server/app/lighttpd/lighttpd.conf +++ b/services/docker/git-server/app/lighttpd/lighttpd.conf @@ -53,12 +53,14 @@ $HTTP["url"] =^ "/git" { $HTTP["url"] =^ "/git/private" { setenv.add-environment = ( - "CGIT_CONFIG" => "/app/cgit/private" + "CGIT_CONFIG" => "/app/cgit/private", + "CRUPEST_ROOT_URL" => env.CRUPEST_ROOT_URL, ) } else { setenv.add-environment = ( - "CGIT_CONFIG" => "/app/cgit/public" + "CGIT_CONFIG" => "/app/cgit/public", + "CRUPEST_ROOT_URL" => env.CRUPEST_ROOT_URL, ) } diff --git a/services/docker/mail-server/Dockerfile b/services/docker/mail-server/Dockerfile new file mode 100644 index 0000000..8ac8792 --- /dev/null +++ b/services/docker/mail-server/Dockerfile @@ -0,0 +1,11 @@ +FROM denoland/deno AS deno-build +COPY --from=deno . /workdir/ +WORKDIR /workdir +RUN deno install +RUN deno task compile:mail + +FROM dovecot/dovecot:latest-root +COPY --from=deno-build /workdir/mail/out/crupest-mail /app/ +ADD dovecot.conf /etc/dovecot/dovecot.conf +ADD app/* /app/ +CMD ["/app/main.bash"] diff --git a/services/docker/mail-server/app/main.bash b/services/docker/mail-server/app/main.bash new file mode 100755 index 0000000..2dfc2ee --- /dev/null +++ b/services/docker/mail-server/app/main.bash @@ -0,0 +1,12 @@ +#!/usr/bin/bash + +set -e -o pipefail + +die() { + echo "$@" >&2 + exit 1 +} + +/app/crupest-mail serve --real & + +/dovecot/sbin/dovecot -F diff --git a/services/docker/mail-server/aws-lambda.js b/services/docker/mail-server/aws-lambda.js new file mode 100644 index 0000000..d240c1a --- /dev/null +++ b/services/docker/mail-server/aws-lambda.js @@ -0,0 +1,23 @@ +export const handler = async (event, context, callback) => { + const sesNotification = event.Records[0].ses; + console.log("SES Notification:\n", JSON.stringify(sesNotification, null, 2)); + + const res = await fetch( + `https://mail.crupest.life/${process.env.CRUPEST_MAIL_SERVER_AWS_INBOUND_PATH}`, + { + method: "POST", + headers: { + "content-type": "application/json", + "Authorization": process.env.CRUPEST_MAIL_SERVER_AWS_INBOUND_KEY, + }, + body: JSON.stringify({ + key: sesNotification.mail.messageId, + recipients: sesNotification.receipt.recipients, + }), + }, + ); + console.log(res); + console.log(res.text()); + + callback(null, { "disposition": "CONTINUE" }); +}; diff --git a/services/docker/mail-server/dovecot.conf b/services/docker/mail-server/dovecot.conf new file mode 100644 index 0000000..0c5ec30 --- /dev/null +++ b/services/docker/mail-server/dovecot.conf @@ -0,0 +1,197 @@ +dovecot_config_version = 2.4.1 +dovecot_storage_version = 2.4.0 + +base_dir = /run/dovecot +state_dir = /run/dovecot +log_path = /dev/stdout + +protocols = imap submission lmtp sieve +sendmail_path = /app/out/crupest-mail sendmail +submission_relay_host = 127.0.0.1 +submission_relay_port = 2346 +submission_relay_trusted = yes + +mail_driver = maildir +mail_home = /data/vmail/%{user | domain}/%{user | username} +mail_path = ~/mail +mail_log_events = delete undelete expunge save copy mailbox_create mailbox_delete mailbox_rename flag_change + +# Setup default mailboxes for inbox namespace +@mailbox_defaults = english + +namespace inbox { + mailbox Archive { + special_use = "\\Archive" + } +} + +mail_plugins { + fts = yes + fts_flatcurve = yes + mail_log = yes + notify = yes +} + +fts_autoindex = yes +fts_autoindex_max_recent_msgs = 999 +fts_search_add_missing = yes +language_filters = normalizer-icu snowball stopwords + +language_tokenizers = generic email-address +language_tokenizer_generic_algorithm = simple + +language en { + default = yes + filters = lowercase snowball english-possessive stopwords +} + +fts flatcurve { + substring_search = yes +} + +auth_mechanisms = plain login + +passdb passwd-file { + passwd_file_path = /data/userdb + default_password_scheme = SHA512-CRYPT +} + +userdb passwd-file { + passwd_file_path = /data/userdb + fields { + uid:default = vmail + gid:default = vmail + home:default = /data/vmail/%{user | domain}/%{user | username} + } +} + +ssl = yes +ssl_server { + cert_file = /etc/dovecot/ssl/tls.crt + key_file = /etc/dovecot/ssl/tls.key +} + +protocol imap { + mail_plugins { + imap_sieve = yes + imap_filter_sieve = yes + } +} + +protocol lmtp { + mail_plugins { + sieve = yes + } +} + +protocol lda { + mail_plugins { + sieve = yes + } +} + +service imap-login { + process_min_avail = 1 + client_limit = 100 +} + +service pop3-login { + process_min_avail = 1 + client_limit = 100 +} + +service submission-login { + process_min_avail = 1 + client_limit = 100 + + inet_listener submissions { + port = 465 + ssl = yes + } +} + +service managesieve-login { + process_min_avail = 1 + client_limit = 100 +} + +sieve_plugins = sieve_imapsieve sieve_extprograms + +event_exporter log { + format = json + time_format = rfc3339 +} + +# Add default backend metrics +@metric_defaults = backend + +# Log auth failures +metric auth_failures { + filter = event=auth_request_finished AND NOT success=yes + exporter = log +} + +metric imap_command { + filter = event=imap_command_finished + group_by cmd_name { + method discrete { + } + } + group_by tagged_reply_state { + method discrete { + } + } +} + +metric smtp_command { + filter = event=smtp_server_command_finished and protocol=submission + group_by cmd_name { + method discrete { + } + } + group_by status_code { + method discrete { + } + } + group_by duration { + method exponential { + base = 10 + min_magnitude = 1 + max_magnitude = 5 + } + } +} + +metric lmtp_command { + filter = event=smtp_server_command_finished and protocol=lmtp + group_by cmd_name { + method discrete { + } + } + group_by status_code { + method discrete { + } + } + group_by duration { + method exponential { + base = 10 + min_magnitude = 1 + max_magnitude = 5 + } + } +} + +# Add duration metrics for deliveries +metric mail_deliveries { + filter = event=mail_delivery_finished + group_by duration { + method exponential { + base = 10 + min_magnitude = 1 + max_magnitude = 5 + } + } +} + +!include_try vendor.d/*.conf +!include_try conf.d/*.conf diff --git a/services/docker/nginx/Dockerfile b/services/docker/nginx/Dockerfile index 77398cd..3169e00 100644 --- a/services/docker/nginx/Dockerfile +++ b/services/docker/nginx/Dockerfile @@ -6,6 +6,5 @@ FROM nginx:mainline RUN apt update && apt-get install -y tini certbot && rm -rf /var/lib/apt/lists/* ADD mail-robots.txt /srv/mail/robots.txt ADD certbot.bash nginx-wrapper.bash /app/ -COPY configs/. /etc/nginx/ COPY --from=build-www /project/public /srv/www CMD ["/usr/bin/tini", "--", "/app/nginx-wrapper.bash"] diff --git a/services/docker/nginx/configs/templates/code.conf.template b/services/docker/nginx/configs/templates/code.conf.template deleted file mode 100644 index aa70ebc..0000000 --- a/services/docker/nginx/configs/templates/code.conf.template +++ /dev/null @@ -1,6 +0,0 @@ -server { - server_name code.${CRUPEST_DOMAIN}; - include common/http-listen; - - include common/acme-challenge; -} diff --git a/services/docker/nginx/configs/templates/mail.conf.template b/services/docker/nginx/configs/templates/mail.conf.template deleted file mode 100644 index 7f5f215..0000000 --- a/services/docker/nginx/configs/templates/mail.conf.template +++ /dev/null @@ -1,29 +0,0 @@ -server { - server_name mail.${CRUPEST_DOMAIN}; - include common/https-listen; - - location = /robots.txt { - root /srv/mail; - } - - location / { - include common/proxy-common; - proxy_pass http://roundcubemail:80/; - } - - location /rspamd/ { - include common/proxy-common; - proxy_pass http://mailserver:11334/; - } - - client_max_body_size 5G; -} - - -server { - server_name mail.${CRUPEST_DOMAIN}; - include common/http-listen; - - include common/https-redirect; - include common/acme-challenge; -} diff --git a/services/docker/nginx/configs/templates/root.conf.template b/services/docker/nginx/configs/templates/root.conf.template deleted file mode 100644 index e3e93ad..0000000 --- a/services/docker/nginx/configs/templates/root.conf.template +++ /dev/null @@ -1,45 +0,0 @@ -server { - server_name ${CRUPEST_DOMAIN}; - include common/https-listen; - - location / { - root /srv/www; - } - - location /2fa/ { - include common/proxy-common; - proxy_pass http://2fauth:8000/; - } - - location /git/ { - include common/proxy-common; - client_max_body_size 5G; - proxy_pass http://git-server:3636; - } - - location = /github { - return 301 ${CRUPEST_GITHUB}; - } - - location = /github/ { - return 301 ${CRUPEST_GITHUB}; - } - - location /_${CRUPEST_V2RAY_PATH} { - if ($http_upgrade != "websocket") { - return 404; - } - - proxy_redirect off; - include common/proxy-common; - proxy_pass http://v2ray:10000; - } -} - -server { - server_name ${CRUPEST_DOMAIN}; - include common/http-listen; - - include common/https-redirect; - include common/acme-challenge; -} diff --git a/services/docker/nginx/configs/templates/timeline.conf.template b/services/docker/nginx/configs/templates/timeline.conf.template deleted file mode 100644 index a467594..0000000 --- a/services/docker/nginx/configs/templates/timeline.conf.template +++ /dev/null @@ -1,6 +0,0 @@ -server { - server_name timeline.${CRUPEST_DOMAIN}; - include common/http-listen; - - include common/acme-challenge; -} diff --git a/services/docker/nginx/nginx-wrapper.bash b/services/docker/nginx/nginx-wrapper.bash index c848287..a4a19ec 100755 --- a/services/docker/nginx/nginx-wrapper.bash +++ b/services/docker/nginx/nginx-wrapper.bash @@ -7,10 +7,6 @@ die() { exit 1 } -[[ -n "$CRUPEST_DOMAIN" ]] || die "CRUPEST_DOMAIN is not set. It is used as root domain." -[[ -n "$CRUPEST_GITHUB" ]] || die "CRUPEST_GITHUB is not set. It is used as GitHub redirection." -[[ -n "$CRUPEST_V2RAY_PATH" ]] || die "CRUPEST_V2RAY_PATH is not set. It is used as v2ray tunnel endpoint." - /app/certbot.bash & /docker-entrypoint.sh nginx "-g" "daemon off;" diff --git a/services/manage b/services/manage index 4589475..18b3d0f 100755 --- a/services/manage +++ b/services/manage @@ -2,23 +2,17 @@ set -e -python3 --version >/dev/null 2>&1 || ( - echo Error: failed to run Python with python3 --version. +deno --version >/dev/null 2>&1 || ( + echo "Error: failed to run deno --version." + echo "If deno is not installed, install it with:" + echo " curl -fsSL https://deno.land/install.sh | sh" exit 1 ) -script_dir="$(dirname "$0")" - -# shellcheck disable=SC2046 -export $(xargs <"${script_dir:?}/base-config") - -CRUPEST_PROJECT_DIR="$(realpath "$script_dir/..")" +CRUPEST_PROJECT_DIR="$(dirname "$0")/.." export CRUPEST_PROJECT_DIR -export PYTHONPATH="$CRUPEST_PROJECT_DIR/python:$PYTHONPATH" +echo "Detected project Dir: $CRUPEST_PROJECT_DIR" +echo -if [[ "$#" != "0" ]] && [[ "$1" == "gen-tmpl" ]]; then - python3 -m cru.service template generate "${@:2}" -else - python3 -m cru.service "$@" -fi +exec deno run -A "$CRUPEST_PROJECT_DIR/deno/tools/main.ts" service --project-dir "$CRUPEST_PROJECT_DIR" "$@" diff --git a/services/templates/disabled/docker-compose.yaml b/services/templates/disabled/docker-compose.yaml index 565ca49..0cd2256 100644 --- a/services/templates/disabled/docker-compose.yaml +++ b/services/templates/disabled/docker-compose.yaml @@ -1,22 +1,4 @@ services: - debian-dev: - pull_policy: build - build: - context: ./docker/debian-dev - dockerfile: Dockerfile - pull: true - args: - - USER=crupest - tags: - - "crupest/debian-dev:latest" - container_name: debian-dev - init: true - command: [ "/bootstrap/start/code-server.bash" ] - volumes: - - ./data/debian-dev:/data - - debian-dev-home:/home/crupest - restart: on-failure:3 - timeline: image: crupest/timeline:latest pull_policy: always @@ -27,6 +9,3 @@ services: - TIMELINE_DisableAutoBackup=true volumes: - ./data/timeline:/root/timeline - -volumes: - debian-dev-home: diff --git a/services/templates/disabled/nginx/code.conf.template b/services/templates/disabled/nginx/code.conf.template deleted file mode 100644 index 0abe042..0000000 --- a/services/templates/disabled/nginx/code.conf.template +++ /dev/null @@ -1,20 +0,0 @@ -server { - server_name code.@@CRUPEST_DOMAIN@@; - include common/https-listen; - - location / { - include common/proxy-common; - proxy_pass http://debian-dev:8080/; - } - - client_max_body_size 5G; -} - - -server { - server_name code.@@CRUPEST_DOMAIN@@; - include common/http-listen; - - include common/https-redirect; - include common/acme-challenge; -} diff --git a/services/templates/disabled/nginx/timeline.conf.template b/services/templates/disabled/nginx/timeline.conf.template index ce7341b..086c1f7 100644 --- a/services/templates/disabled/nginx/timeline.conf.template +++ b/services/templates/disabled/nginx/timeline.conf.template @@ -2,9 +2,9 @@ server { listen 443 ssl http2; listen [::]:443 ssl http2; server_name timeline.@@CRUPEST_DOMAIN@@; - + location / { - include common/reverse-proxy; + include conf.d/common/reverse-proxy; proxy_pass http://timeline:5000/; } @@ -16,6 +16,6 @@ server { listen [::]:80; server_name timeline.@@CRUPEST_DOMAIN@@; - include common/https-redirect; - include common/acme-challenge; + include conf.d/common/https-redirect; + include conf.d/common/acme-challenge; } diff --git a/services/templates/docker-compose.yaml.template b/services/templates/docker-compose.yaml.template index e133462..b81875b 100644 --- a/services/templates/docker-compose.yaml.template +++ b/services/templates/docker-compose.yaml.template @@ -3,9 +3,9 @@ services: nginx: pull_policy: build build: - context: ./@@CRUPEST_DOCKER_DIR@@/nginx + context: "./@@CRUPEST_DOCKER_DIR@@/nginx" additional_contexts: - - www=./www + - "www=./www" dockerfile: Dockerfile pull: true ports: @@ -13,102 +13,117 @@ services: - "443:443" - "443:443/udp" env_file: - - ./@@CRUPEST_GENERATED_DIR@@/envs/nginx.env - - ./@@CRUPEST_GENERATED_DIR@@/envs/v2ray-common.env + - "./@@CRUPEST_GENERATED_DIR@@/envs/v2ray-common.env" + - "./@@CRUPEST_GENERATED_DIR@@/envs/mail-server-common.env" volumes: + - "./@@CRUPEST_GENERATED_DIR@@/nginx:/etc/nginx/conf.d" - "./@@CRUPEST_DATA_CERTBOT_DIR@@/certs:/etc/letsencrypt" - "./@@CRUPEST_DATA_CERTBOT_DIR@@/data:/var/lib/letsencrypt" - - "./@@CRUPEST_DATA_CERTBOT_DIR@@/webroot:/srv/acme:ro" - "./@@CRUPEST_DATA_CERTBOT_DIR@@/webroot:/var/www/certbot" - restart: on-failure:3 + networks: + default: + ipv4_address: "172.21.5.2" + restart: "on-failure:3" - v2ray: + mail-server: pull_policy: build build: - context: ./@@CRUPEST_DOCKER_DIR@@/v2ray + context: "./@@CRUPEST_DOCKER_DIR@@/mail-server" + additional_contexts: + - "deno=./deno" dockerfile: Dockerfile pull: true - hostname: v2ray + container_name: mail-server + hostname: mail + domainname: "@@CRUPEST_DOMAIN@@" env_file: - - ./@@CRUPEST_GENERATED_DIR@@/envs/v2ray-common.env - - ./@@CRUPEST_GENERATED_DIR@@/envs/v2ray.env - restart: on-failure:3 - - auto-backup: - pull_policy: build - env_file: ./@@CRUPEST_GENERATED_DIR@@/envs/auto-backup.env - build: - context: ./@@CRUPEST_DOCKER_DIR@@/auto-backup - dockerfile: Dockerfile - pull: true - volumes: - - "./data:/data:ro" - - "./data/auto-backup:/data/auto-backup" - restart: on-failure:3 - - mailserver: - image: docker.io/mailserver/docker-mailserver:latest - pull_policy: always - container_name: mailserver - hostname: mail.@@CRUPEST_DOMAIN@@ - env_file: ./@@CRUPEST_GENERATED_DIR@@/envs/mailserver.env - # More information about the mail-server ports: - # https://docker-mailserver.github.io/docker-mailserver/edge/config/security/understanding-the-ports/ - # To avoid conflicts with yaml base-60 float, DO NOT remove the quotation marks. + - "./@@CRUPEST_GENERATED_DIR@@/envs/mail-server-common.env" + - "./@@CRUPEST_GENERATED_DIR@@/envs/mail-server.env" ports: - - "25:25" # SMTP (explicit TLS => STARTTLS) - "143:143" # IMAP4 (explicit TLS => STARTTLS) - - "465:465" # ESMTP (implicit TLS) - - "587:587" # ESMTP (explicit TLS => STARTTLS) - "993:993" # IMAP4 (implicit TLS) + - "587:587" # ESMTP (explicit TLS => STARTTLS) + - "465:465" # ESMTP (implicit TLS) - "4190:4190" # manage sieve protocol volumes: - - ./@@CRUPEST_DATA_MAILSERVER_DIR@@/mail-data/:/var/mail/ - - ./@@CRUPEST_SERVICES_STATE_DIR@@/mail-state/:/var/mail-state/ - - ./@@CRUPEST_DATA_MAILSERVER_DIR@@/mail-logs/:/var/log/mail/ - - ./@@CRUPEST_DATA_MAILSERVER_DIR@@/config/:/tmp/docker-mailserver/ - - ./@@CRUPEST_DATA_CERTBOT_DIR@@/certs:/etc/letsencrypt - - /etc/localtime:/etc/localtime:ro - restart: on-failure:3 + - "./@@CRUPEST_DATA_MAIL_SERVER_DIR@@:/data" + - "./@@CRUPEST_SSL_FULLCHAIN_FILE@@:/etc/dovecot/ssl/tls.crt" + - "./@@CRUPEST_SSL_PRIVATE_KEY_FILE@@:/etc/dovecot/ssl/tls.key" + - "/etc/localtime:/etc/localtime:ro" + networks: + default: + ipv4_address: "172.21.5.3" + restart: "on-failure:3" stop_grace_period: 1m - healthcheck: - test: "ss --listening --tcp | grep -P 'LISTEN.+:smtp' || exit 1" - timeout: 3s - retries: 0 git-server: pull_policy: build build: - context: ./@@CRUPEST_DOCKER_DIR@@/git-server + context: "./@@CRUPEST_DOCKER_DIR@@/git-server" dockerfile: Dockerfile pull: true hostname: git-server environment: - - CRUPEST_ROOT_URL=@@CRUPEST_ROOT_URL@@ + - "CRUPEST_ROOT_URL=@@CRUPEST_ROOT_URL@@" volumes: - "./@@CRUPEST_DATA_GIT_DIR@@:/git" + networks: + default: + ipv4_address: "172.21.5.4" restart: on-failure:3 roundcubemail: - image: roundcube/roundcubemail:latest + image: "roundcube/roundcubemail" pull_policy: always hostname: roundcubemail - env_file: ./@@CRUPEST_GENERATED_DIR@@/envs/roundcubemail.env + env_file: + - "./@@CRUPEST_GENERATED_DIR@@/envs/roundcubemail.env" volumes: - - ./@@CRUPEST_DATA_SECRET_DIR@@/gnupg:/gnupg - - ./@@CRUPEST_DATA_ROUNDCUBE_DIR@@/www/html:/var/www/html - - ./@@CRUPEST_DATA_ROUNDCUBE_DIR@@/db:/var/roundcube/db - - ./@@CRUPEST_DATA_ROUNDCUBE_DIR@@/config:/var/roundcube/config - - roundcubemail-temp:/tmp/roundcube-temp + - "./@@CRUPEST_GENERATED_DIR@@/my-roundcube.inc.php:/var/roundcube/config/my-roundcube.inc.php" + - "./@@CRUPEST_DATA_SECRET_DIR@@/gnupg:/var/roundcube/enigma" + - "./@@CRUPEST_DATA_ROUNDCUBE_DIR@@/www/html:/var/www/html" + - "./@@CRUPEST_DATA_ROUNDCUBE_DIR@@/db:/var/roundcube/db" + - "roundcubemail-temp:/tmp/roundcube-temp" + networks: + default: + ipv4_address: "172.21.5.5" restart: on-failure:3 - 2fauth: - image: 2fauth/2fauth - pull_policy: always - hostname: 2fauth - env_file: ./@@CRUPEST_GENERATED_DIR@@/envs/2fauth.env + v2ray: + pull_policy: build + build: + context: "./@@CRUPEST_DOCKER_DIR@@/v2ray" + dockerfile: Dockerfile + pull: true + hostname: v2ray + env_file: + - "./@@CRUPEST_GENERATED_DIR@@/envs/v2ray-common.env" + - "./@@CRUPEST_GENERATED_DIR@@/envs/v2ray.env" + networks: + default: + ipv4_address: "172.21.5.6" + restart: "on-failure:3" + + auto-backup: + pull_policy: build + build: + context: "./@@CRUPEST_DOCKER_DIR@@/auto-backup" + dockerfile: Dockerfile + pull: true + env_file: + - "./@@CRUPEST_GENERATED_DIR@@/envs/auto-backup.env" volumes: - - ./data/2fauth:/2fauth + - "./data:/data:ro" + - "./data/auto-backup:/data/auto-backup" + restart: "on-failure:3" volumes: roundcubemail-temp: + +networks: + default: + enable_ipv6: false + ipam: + config: + - subnet: "172.21.5.0/24" + ip_range: "172.21.5.64/26" diff --git a/services/templates/envs/2fauth.env.template b/services/templates/envs/2fauth.env.template deleted file mode 100644 index de2ad3a..0000000 --- a/services/templates/envs/2fauth.env.template +++ /dev/null @@ -1,15 +0,0 @@ -APP_NAME=2FAuth-crupest -APP_TIMEZONE=UTC -SITE_OWNER=@@CRUPEST_EMAIL@@ -APP_KEY=@@CRUPEST_2FAUTH_APP_KEY@@ -APP_URL=@@CRUPEST_ROOT_URL@@/2fa -APP_SUBDIRECTORY=2fa -MAIL_MAILER=smtp -MAIL_HOST=@@CRUPEST_MAIL_SERVER_DOMAIN@@ -MAIL_PORT=465 -MAIL_USERNAME=@@CRUPEST_2FAUTH_MAIL_USERNAME@@ -MAIL_PASSWORD=@@CRUPEST_2FAUTH_MAIL_PASSWORD@@ -MAIL_ENCRYPTION=ssl -MAIL_FROM_NAME=2FAuth-crupest -MAIL_FROM_ADDRESS=@@CRUPEST_2FAUTH_MAIL_USERNAME@@ -TRUSTED_PROXIES=* diff --git a/services/templates/envs/auto-backup.env.template b/services/templates/envs/auto-backup.env.template index c2a6ba9..59d5fed 100644 --- a/services/templates/envs/auto-backup.env.template +++ b/services/templates/envs/auto-backup.env.template @@ -1,4 +1,5 @@ -CRUPEST_AUTO_BACKUP_COS_ENDPOINT=@@CRUPEST_AUTO_BACKUP_COS_ENDPOINT@@ -CRUPEST_AUTO_BACKUP_COS_BUCKET=@@CRUPEST_AUTO_BACKUP_COS_BUCKET@@ -CRUPEST_AUTO_BACKUP_COS_SECRET_ID=@@CRUPEST_AUTO_BACKUP_COS_SECRET_ID@@ -CRUPEST_AUTO_BACKUP_COS_SECRET_KEY=@@CRUPEST_AUTO_BACKUP_COS_SECRET_KEY@@ +RCLONE_S3_PROVIDER=@@CRUPEST_AUTO_BACKUP_S3_PROVIDER@@ +RCLONE_S3_ENDPOINT=@@CRUPEST_AUTO_BACKUP_S3_ENDPOINT@@ +RCLONE_S3_ACCESS_KEY_ID=@@CRUPEST_AUTO_BACKUP_S3_ACCESS_KEY_ID@@ +RCLONE_S3_SECRET_ACCESS_KEY=@@CRUPEST_AUTO_BACKUP_S3_ACCESS_KEY_SECRET@@ +CRUPEST_AUTO_BACKUP_S3_BUCKET=@@CRUPEST_AUTO_BACKUP_S3_BUCKET@@ diff --git a/services/templates/envs/mail-server-common.env.template b/services/templates/envs/mail-server-common.env.template new file mode 100644 index 0000000..0905af6 --- /dev/null +++ b/services/templates/envs/mail-server-common.env.template @@ -0,0 +1 @@ +CRUPEST_MAIL_SERVER_AWS_INBOUND_PATH=@@CRUPEST_MAIL_SERVER_AWS_INBOUND_PATH@@ diff --git a/services/templates/envs/mail-server.env.template b/services/templates/envs/mail-server.env.template new file mode 100644 index 0000000..9ad1c58 --- /dev/null +++ b/services/templates/envs/mail-server.env.template @@ -0,0 +1,8 @@ +CRUPEST_MAIL_SERVER_MAIL_DOMAIN=@@CRUPEST_DOMAIN@@ +CRUPEST_MAIL_SERVER_DATA_PATH=/data/crupest-mail/ +CRUPEST_MAIL_SERVER_INBOUND_FALLBACK=crupest@crupest.life +CRUPEST_MAIL_SERVER_AWS_INBOUND_KEY=@@CRUPEST_MAIL_SERVER_AWS_INBOUND_KEY@@ +CRUPEST_MAIL_SERVER_AWS_REGION=@@CRUPEST_MAIL_SERVER_AWS_REGION@@ +CRUPEST_MAIL_SERVER_AWS_USER=@@CRUPEST_MAIL_SERVER_AWS_USER@@ +CRUPEST_MAIL_SERVER_AWS_PASSWORD=@@CRUPEST_MAIL_SERVER_AWS_PASSWORD@@ +CRUPEST_MAIL_SERVER_AWS_MAIL_BUCKET=@@CRUPEST_MAIL_SERVER_AWS_MAIL_BUCKET@@ diff --git a/services/templates/envs/mailserver.env b/services/templates/envs/mailserver.env deleted file mode 100644 index 9b12dfe..0000000 --- a/services/templates/envs/mailserver.env +++ /dev/null @@ -1,661 +0,0 @@ -# ----------------------------------------------- -# --- Mailserver Environment Variables ---------- -# ----------------------------------------------- - -# DOCUMENTATION FOR THESE VARIABLES IS FOUND UNDER -# https://docker-mailserver.github.io/docker-mailserver/latest/config/environment/ - -# ----------------------------------------------- -# --- General Section --------------------------- -# ----------------------------------------------- - -# empty => uses the `hostname` command to get the mail server's canonical hostname -# => Specify a fully-qualified domainname to serve mail for. This is used for many of the config features so if you can't set your hostname (e.g. you're in a container platform that doesn't let you) specify it in this environment variable. -OVERRIDE_HOSTNAME= - -# REMOVED in version v11.0.0! Use LOG_LEVEL instead. -DMS_DEBUG=0 - -# Set the log level for DMS. -# This is mostly relevant for container startup scripts and change detection event feedback. -# -# Valid values (in order of increasing verbosity) are: `error`, `warn`, `info`, `debug` and `trace`. -# The default log level is `info`. -LOG_LEVEL=info - -# critical => Only show critical messages -# error => Only show erroneous output -# **warn** => Show warnings -# info => Normal informational output -# debug => Also show debug messages -SUPERVISOR_LOGLEVEL= - -# Support for deployment where these defaults are not compatible (eg: some NAS appliances): -# /var/mail vmail User ID (default: 5000) -DMS_VMAIL_UID= -# /var/mail vmail Group ID (default: 5000) -DMS_VMAIL_GID= - -# **empty** => use FILE -# LDAP => use LDAP authentication -# OIDC => use OIDC authentication (not yet implemented) -# FILE => use local files (this is used as the default) -ACCOUNT_PROVISIONER= - -# empty => postmaster@domain.com -# => Specify the postmaster address -POSTMASTER_ADDRESS= - -# Check for updates on container start and then once a day -# If an update is available, a mail is sent to POSTMASTER_ADDRESS -# 0 => Update check disabled -# 1 => Update check enabled -ENABLE_UPDATE_CHECK=1 - -# Customize the update check interval. -# Number + Suffix. Suffix must be 's' for seconds, 'm' for minutes, 'h' for hours or 'd' for days. -UPDATE_CHECK_INTERVAL=1d - -# Set different options for mynetworks option (can be overwrite in postfix-main.cf) -# **WARNING**: Adding the docker network's gateway to the list of trusted hosts, e.g. using the `network` or -# `connected-networks` option, can create an open relay -# https://github.com/docker-mailserver/docker-mailserver/issues/1405#issuecomment-590106498 -# The same can happen for rootless podman. To prevent this, set the value to "none" or configure slirp4netns -# https://github.com/docker-mailserver/docker-mailserver/issues/2377 -# -# none => Explicitly force authentication -# container => Container IP address only -# host => Add docker container network (ipv4 only) -# network => Add all docker container networks (ipv4 only) -# connected-networks => Add all connected docker networks (ipv4 only) -PERMIT_DOCKER=none - -# Set the timezone. If this variable is unset, the container runtime will try to detect the time using -# `/etc/localtime`, which you can alternatively mount into the container. The value of this variable -# must follow the pattern `AREA/ZONE`, i.e. of you want to use Germany's time zone, use `Europe/Berlin`. -# You can lookup all available timezones here: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List -TZ= - -# In case you network interface differs from 'eth0', e.g. when you are using HostNetworking in Kubernetes, -# you can set NETWORK_INTERFACE to whatever interface you want. This interface will then be used. -# - **empty** => eth0 -NETWORK_INTERFACE= - -# empty => modern -# modern => Enables TLSv1.2 and modern ciphers only. (default) -# intermediate => Enables TLSv1, TLSv1.1 and TLSv1.2 and broad compatibility ciphers. -TLS_LEVEL= - -# Configures the handling of creating mails with forged sender addresses. -# -# **0** => (not recommended) Mail address spoofing allowed. Any logged in user may create email messages with a forged sender address (see also https://en.wikipedia.org/wiki/Email_spoofing). -# 1 => Mail spoofing denied. Each user may only send with his own or his alias addresses. Addresses with extension delimiters(http://www.postfix.org/postconf.5.html#recipient_delimiter) are not able to send messages. -SPOOF_PROTECTION= - -# Enables the Sender Rewriting Scheme. SRS is needed if your mail server acts as forwarder. See [postsrsd](https://github.com/roehling/postsrsd/blob/master/README.md#sender-rewriting-scheme-crash-course) for further explanation. -# - **0** => Disabled -# - 1 => Enabled -ENABLE_SRS=0 - -# Enables the OpenDKIM service. -# **1** => Enabled -# 0 => Disabled -ENABLE_OPENDKIM=0 - -# Enables the OpenDMARC service. -# **1** => Enabled -# 0 => Disabled -ENABLE_OPENDMARC=0 - - -# Enabled `policyd-spf` in Postfix's configuration. You will likely want to set this -# to `0` in case you're using Rspamd (`ENABLE_RSPAMD=1`). -# -# - 0 => Disabled -# - **1** => Enabled -ENABLE_POLICYD_SPF=0 - -# Enables POP3 service -# - **0** => Disabled -# - 1 => Enabled -ENABLE_POP3= - -# Enables IMAP service -# - 0 => Disabled -# - **1** => Enabled -ENABLE_IMAP=1 - -# Enables ClamAV, and anti-virus scanner. -# 1 => Enabled -# **0** => Disabled -ENABLE_CLAMAV=0 - -# Add the value of this ENV as a prefix to the mail subject when spam is detected. -# NOTE: This subject prefix may be redundant (by default spam is delivered to a junk folder). -# It provides value when your junk mail is stored alongside legitimate mail instead of a separate location (like with `SPAMASSASSIN_SPAM_TO_INBOX=1` or `MOVE_SPAM_TO_JUNK=0` or a POP3 only setup, without IMAP). -# NOTE: When not using Docker Compose, other CRI may not support quote-wrapping the value here to preserve any trailing white-space. -SPAM_SUBJECT= - -# Enables Rspamd -# **0** => Disabled -# 1 => Enabled -ENABLE_RSPAMD=1 - -# When `ENABLE_RSPAMD=1`, an internal Redis instance is enabled implicitly. -# This setting provides an opt-out to allow using an external instance instead. -# 0 => Disabled -# 1 => Enabled -ENABLE_RSPAMD_REDIS= - -# When enabled, -# -# 1. the "[autolearning][rspamd-autolearn]" feature is turned on; -# 2. the Bayes classifier will be trained when moving mails from or to the Junk folder (with the help of Sieve scripts). -# -# **0** => disabled -# 1 => enabled -RSPAMD_LEARN=0 - -# This settings controls whether checks should be performed on emails coming -# from authenticated users (i.e. most likely outgoing emails). The default value -# is `0` in order to align better with SpamAssassin. We recommend reading -# through https://rspamd.com/doc/tutorials/scanning_outbound.html though to -# decide for yourself whether you need and want this feature. -# -# Note that DKIM signing of e-mails will still happen. -RSPAMD_CHECK_AUTHENTICATED=0 - -# Controls whether the Rspamd Greylisting module is enabled. -# This module can further assist in avoiding spam emails by greylisting -# e-mails with a certain spam score. -# -# **0** => disabled -# 1 => enabled -RSPAMD_GREYLISTING=1 - -# Can be used to enable or disable the Hfilter group module. -# -# - 0 => Disabled -# - **1** => Enabled -RSPAMD_HFILTER=1 - -# Can be used to control the score when the HFILTER_HOSTNAME_UNKNOWN symbol applies. A higher score is more punishing. Setting it to 15 is equivalent to rejecting the email when the check fails. -# -# Default: 6 -RSPAMD_HFILTER_HOSTNAME_UNKNOWN_SCORE=6 - -# Can be used to enable or disable the (still experimental) neural module. -# -# - **0** => Disabled -# - 1 => Enabled -RSPAMD_NEURAL=0 - -# Amavis content filter (used for ClamAV & SpamAssassin) -# 0 => Disabled -# 1 => Enabled -ENABLE_AMAVIS=0 - -# -1/-2/-3 => Only show errors -# **0** => Show warnings -# 1/2 => Show default informational output -# 3/4/5 => log debug information (very verbose) -AMAVIS_LOGLEVEL=0 - -# This enables DNS block lists in Postscreen. -# Note: Emails will be rejected, if they don't pass the block list checks! -# **0** => DNS block lists are disabled -# 1 => DNS block lists are enabled -ENABLE_DNSBL=0 - -# If you enable Fail2Ban, don't forget to add the following lines to your `compose.yaml`: -# cap_add: -# - NET_ADMIN -# Otherwise, `nftables` won't be able to ban IPs. -ENABLE_FAIL2BAN=0 - -# Fail2Ban blocktype -# drop => drop packet (send NO reply) -# reject => reject packet (send ICMP unreachable) -FAIL2BAN_BLOCKTYPE=drop - -# 1 => Enables Managesieve on port 4190 -# empty => disables Managesieve -ENABLE_MANAGESIEVE=1 - -# **enforce** => Allow other tests to complete. Reject attempts to deliver mail with a 550 SMTP reply, and log the helo/sender/recipient information. Repeat this test the next time the client connects. -# drop => Drop the connection immediately with a 521 SMTP reply. Repeat this test the next time the client connects. -# ignore => Ignore the failure of this test. Allow other tests to complete. Repeat this test the next time the client connects. This option is useful for testing and collecting statistics without blocking mail. -POSTSCREEN_ACTION=enforce - -# empty => all daemons start -# 1 => only launch postfix smtp -SMTP_ONLY= - -# Please read [the SSL page in the documentation](https://docker-mailserver.github.io/docker-mailserver/latest/config/security/ssl) for more information. -# -# empty => SSL disabled -# letsencrypt => Enables Let's Encrypt certificates -# custom => Enables custom certificates -# manual => Let's you manually specify locations of your SSL certificates for non-standard cases -# self-signed => Enables self-signed certificates -SSL_TYPE=letsencrypt - -# These are only supported with `SSL_TYPE=manual`. -# Provide the path to your cert and key files that you've mounted access to within the container. -SSL_CERT_PATH= -SSL_KEY_PATH= -# Optional: A 2nd certificate can be supported as fallback (dual cert support), eg ECDSA with an RSA fallback. -# Useful for additional compatibility with older MTA and MUA (eg pre-2015). -SSL_ALT_CERT_PATH= -SSL_ALT_KEY_PATH= - -# Set how many days a virusmail will stay on the server before being deleted -# empty => 7 days -VIRUSMAILS_DELETE_DELAY= - -# Configure Postfix `virtual_transport` to deliver mail to a different LMTP client (default is a dovecot socket). -# Provide any valid URI. Examples: -# -# empty => `lmtp:unix:/var/run/dovecot/lmtp` (default, configured in Postfix main.cf) -# `lmtp:unix:private/dovecot-lmtp` (use socket) -# `lmtps:inet:<host>:<port>` (secure lmtp with starttls) -# `lmtp:<kopano-host>:2003` (use kopano as mailstore) -POSTFIX_DAGENT= - -# Set the mailbox size limit for all users. If set to zero, the size will be unlimited (default). Size is in bytes. -# -# empty => 0 -POSTFIX_MAILBOX_SIZE_LIMIT= - -# See https://docker-mailserver.github.io/docker-mailserver/latest/config/account-management/overview/#quotas -# 0 => Dovecot quota is disabled -# 1 => Dovecot quota is enabled -ENABLE_QUOTAS=1 - -# Set the message size limit for all users. If set to zero, the size will be unlimited (not recommended!). Size is in bytes. -# -# empty => 10240000 (~10 MB) -POSTFIX_MESSAGE_SIZE_LIMIT= - -# Mails larger than this limit won't be scanned. -# ClamAV must be enabled (ENABLE_CLAMAV=1) for this. -# -# empty => 25M (25 MB) -CLAMAV_MESSAGE_SIZE_LIMIT= - -# Enables regular pflogsumm mail reports. -# This is a new option. The old REPORT options are still supported for backwards compatibility. If this is not set and reports are enabled with the old options, logrotate will be used. -# -# not set => No report -# daily_cron => Daily report for the previous day -# logrotate => Full report based on the mail log when it is rotated -PFLOGSUMM_TRIGGER= - -# Recipient address for pflogsumm reports. -# -# not set => Use REPORT_RECIPIENT or POSTMASTER_ADDRESS -# => Specify the recipient address(es) -PFLOGSUMM_RECIPIENT= - -# Sender address (`FROM`) for pflogsumm reports if pflogsumm reports are enabled. -# -# not set => Use REPORT_SENDER -# => Specify the sender address -PFLOGSUMM_SENDER= - -# Interval for logwatch report. -# -# none => No report is generated -# daily => Send a daily report -# weekly => Send a report every week -LOGWATCH_INTERVAL= - -# Recipient address for logwatch reports if they are enabled. -# -# not set => Use REPORT_RECIPIENT or POSTMASTER_ADDRESS -# => Specify the recipient address(es) -LOGWATCH_RECIPIENT= - -# Sender address (`FROM`) for logwatch reports if logwatch reports are enabled. -# -# not set => Use REPORT_SENDER -# => Specify the sender address -LOGWATCH_SENDER= - -# Defines who receives reports if they are enabled. -# **empty** => ${POSTMASTER_ADDRESS} -# => Specify the recipient address -REPORT_RECIPIENT= - -# Defines who sends reports if they are enabled. -# **empty** => mailserver-report@${DOMAINNAME} -# => Specify the sender address -REPORT_SENDER= - -# Changes the interval in which log files are rotated -# **weekly** => Rotate log files weekly -# daily => Rotate log files daily -# monthly => Rotate log files monthly -# -# Note: This Variable actually controls logrotate inside the container -# and rotates the log files depending on this setting. The main log output is -# still available in its entirety via `docker logs mail` (Or your -# respective container name). If you want to control logrotation for -# the Docker-generated logfile see: -# https://docs.docker.com/config/containers/logging/configure/ -# -# Note: This variable can also determine the interval for Postfix's log summary reports, see [`PFLOGSUMM_TRIGGER`](#pflogsumm_trigger). -LOGROTATE_INTERVAL=weekly - -# Defines how many log files are kept by logrorate -LOGROTATE_COUNT=4 - - -# If enabled, employs `reject_unknown_client_hostname` to sender restrictions in Postfix's configuration. -# -# - **0** => Disabled -# - 1 => Enabled -POSTFIX_REJECT_UNKNOWN_CLIENT_HOSTNAME=0 - -# Choose TCP/IP protocols for postfix to use -# **all** => All possible protocols. -# ipv4 => Use only IPv4 traffic. Most likely you want this behind Docker. -# ipv6 => Use only IPv6 traffic. -# -# Note: More details at http://www.postfix.org/postconf.5.html#inet_protocols -POSTFIX_INET_PROTOCOLS=all - -# Enables MTA-STS support for outbound mail. -# More details: https://docker-mailserver.github.io/docker-mailserver/v13.3/config/best-practices/mta-sts/ -# - **0** ==> MTA-STS disabled -# - 1 => MTA-STS enabled -ENABLE_MTA_STS=0 - -# Choose TCP/IP protocols for dovecot to use -# **all** => Listen on all interfaces -# ipv4 => Listen only on IPv4 interfaces. Most likely you want this behind Docker. -# ipv6 => Listen only on IPv6 interfaces. -# -# Note: More information at https://dovecot.org/doc/dovecot-example.conf -DOVECOT_INET_PROTOCOLS=all - -# ----------------------------------------------- -# --- SpamAssassin Section ---------------------- -# ----------------------------------------------- - -ENABLE_SPAMASSASSIN=0 - -# KAM is a 3rd party SpamAssassin ruleset, provided by the McGrail Foundation. -# If SpamAssassin is enabled, KAM can be used in addition to the default ruleset. -# - **0** => KAM disabled -# - 1 => KAM enabled -# -# Note: only has an effect if `ENABLE_SPAMASSASSIN=1` -ENABLE_SPAMASSASSIN_KAM=0 - -# deliver spam messages to the inbox (tagged using SPAM_SUBJECT) -SPAMASSASSIN_SPAM_TO_INBOX=1 - -# spam messages will be moved in the Junk folder (SPAMASSASSIN_SPAM_TO_INBOX=1 required) -MOVE_SPAM_TO_JUNK=1 - -# spam messages will be marked as read -MARK_SPAM_AS_READ=0 - -# add 'spam info' headers at, or above this level -SA_TAG=2.0 - -# add 'spam detected' headers at, or above this level -SA_TAG2=6.31 - -# triggers spam evasive actions -SA_KILL=10.0 - -# ----------------------------------------------- -# --- Fetchmail Section ------------------------- -# ----------------------------------------------- - -ENABLE_FETCHMAIL=0 - -# The interval to fetch mail in seconds -FETCHMAIL_POLL=300 -# Use multiple fetchmail instances (1 per poll entry in fetchmail.cf) -# Supports multiple IMAP IDLE connections when a server is used across multiple poll entries -# https://otremba.net/wiki/Fetchmail_(Debian)#Immediate_Download_via_IMAP_IDLE -FETCHMAIL_PARALLEL=0 - -# Enable or disable `getmail`. -# -# - **0** => Disabled -# - 1 => Enabled -ENABLE_GETMAIL=0 - -# The number of minutes for the interval. Min: 1; Default: 5. -GETMAIL_POLL=5 - -# ----------------------------------------------- -# --- OAUTH2 Section ---------------------------- -# ----------------------------------------------- - -# empty => OAUTH2 authentication is disabled -# 1 => OAUTH2 authentication is enabled -ENABLE_OAUTH2= - -# Specify the user info endpoint URL of the oauth2 provider -# Example: https://oauth2.example.com/userinfo/ -OAUTH2_INTROSPECTION_URL= - -# ----------------------------------------------- -# --- LDAP Section ------------------------------ -# ----------------------------------------------- - -# A second container for the ldap service is necessary (i.e. https://hub.docker.com/r/bitnami/openldap/) - -# empty => no -# yes => LDAP over TLS enabled for Postfix -LDAP_START_TLS= - -# empty => mail.example.com -# Specify the `<dns-name>` / `<ip-address>` where the LDAP server is reachable via a URI like: `ldaps://mail.example.com`. -# Note: You must include the desired URI scheme (`ldap://`, `ldaps://`, `ldapi://`). -LDAP_SERVER_HOST= - -# empty => ou=people,dc=domain,dc=com -# => e.g. LDAP_SEARCH_BASE=dc=mydomain,dc=local -LDAP_SEARCH_BASE= - -# empty => cn=admin,dc=domain,dc=com -# => take a look at examples of SASL_LDAP_BIND_DN -LDAP_BIND_DN= - -# empty** => admin -# => Specify the password to bind against ldap -LDAP_BIND_PW= - -# e.g. `"(&(mail=%s)(mailEnabled=TRUE))"` -# => Specify how ldap should be asked for users -LDAP_QUERY_FILTER_USER= - -# e.g. `"(&(mailGroupMember=%s)(mailEnabled=TRUE))"` -# => Specify how ldap should be asked for groups -LDAP_QUERY_FILTER_GROUP= - -# e.g. `"(&(mailAlias=%s)(mailEnabled=TRUE))"` -# => Specify how ldap should be asked for aliases -LDAP_QUERY_FILTER_ALIAS= - -# e.g. `"(&(|(mail=*@%s)(mailalias=*@%s)(mailGroupMember=*@%s))(mailEnabled=TRUE))"` -# => Specify how ldap should be asked for domains -LDAP_QUERY_FILTER_DOMAIN= - -# ----------------------------------------------- -# --- Dovecot Section --------------------------- -# ----------------------------------------------- - -# empty => no -# yes => LDAP over TLS enabled for Dovecot -DOVECOT_TLS= - -# e.g. `"(&(objectClass=PostfixBookMailAccount)(uniqueIdentifier=%n))"` -DOVECOT_USER_FILTER= - -# e.g. `"(&(objectClass=PostfixBookMailAccount)(uniqueIdentifier=%n))"` -DOVECOT_PASS_FILTER= - -# Define the mailbox format to be used -# default is maildir, supported values are: sdbox, mdbox, maildir -DOVECOT_MAILBOX_FORMAT=maildir - -# empty => no -# yes => Allow bind authentication for LDAP -# https://wiki.dovecot.org/AuthDatabase/LDAP/AuthBinds -DOVECOT_AUTH_BIND= - -# ----------------------------------------------- -# --- Postgrey Section -------------------------- -# ----------------------------------------------- - -ENABLE_POSTGREY=0 -# greylist for N seconds -POSTGREY_DELAY=300 -# delete entries older than N days since the last time that they have been seen -POSTGREY_MAX_AGE=35 -# response when a mail is greylisted -POSTGREY_TEXT="Delayed by Postgrey" -# whitelist host after N successful deliveries (N=0 to disable whitelisting) -POSTGREY_AUTO_WHITELIST_CLIENTS=5 - -# ----------------------------------------------- -# --- SASL Section ------------------------------ -# ----------------------------------------------- - -ENABLE_SASLAUTHD=0 - -# empty => pam -# `ldap` => authenticate against ldap server -# `shadow` => authenticate against local user db -# `mysql` => authenticate against mysql db -# `rimap` => authenticate against imap server -# Note: can be a list of mechanisms like pam ldap shadow -SASLAUTHD_MECHANISMS= - -# empty => None -# e.g. with SASLAUTHD_MECHANISMS rimap you need to specify the ip-address/servername of the imap server ==> xxx.xxx.xxx.xxx -SASLAUTHD_MECH_OPTIONS= - -# empty => Use value of LDAP_SERVER_HOST -# Note: You must include the desired URI scheme (`ldap://`, `ldaps://`, `ldapi://`). -SASLAUTHD_LDAP_SERVER= - -# empty => Use value of LDAP_BIND_DN -# specify an object with privileges to search the directory tree -# e.g. active directory: SASLAUTHD_LDAP_BIND_DN=cn=Administrator,cn=Users,dc=mydomain,dc=net -# e.g. openldap: SASLAUTHD_LDAP_BIND_DN=cn=admin,dc=mydomain,dc=net -SASLAUTHD_LDAP_BIND_DN= - -# empty => Use value of LDAP_BIND_PW -SASLAUTHD_LDAP_PASSWORD= - -# empty => Use value of LDAP_SEARCH_BASE -# specify the search base -SASLAUTHD_LDAP_SEARCH_BASE= - -# empty => default filter `(&(uniqueIdentifier=%u)(mailEnabled=TRUE))` -# e.g. for active directory: `(&(sAMAccountName=%U)(objectClass=person))` -# e.g. for openldap: `(&(uid=%U)(objectClass=person))` -SASLAUTHD_LDAP_FILTER= - -# empty => no -# yes => LDAP over TLS enabled for SASL -# If set to yes, the protocol in SASLAUTHD_LDAP_SERVER must be ldap:// or missing. -SASLAUTHD_LDAP_START_TLS= - -# empty => no -# yes => Require and verify server certificate -# If yes you must/could specify SASLAUTHD_LDAP_TLS_CACERT_FILE or SASLAUTHD_LDAP_TLS_CACERT_DIR. -SASLAUTHD_LDAP_TLS_CHECK_PEER= - -# File containing CA (Certificate Authority) certificate(s). -# empty => Nothing is added to the configuration -# Any value => Fills the `ldap_tls_cacert_file` option -SASLAUTHD_LDAP_TLS_CACERT_FILE= - -# Path to directory with CA (Certificate Authority) certificates. -# empty => Nothing is added to the configuration -# Any value => Fills the `ldap_tls_cacert_dir` option -SASLAUTHD_LDAP_TLS_CACERT_DIR= - -# Specify what password attribute to use for password verification. -# empty => Nothing is added to the configuration but the documentation says it is `userPassword` by default. -# Any value => Fills the `ldap_password_attr` option -SASLAUTHD_LDAP_PASSWORD_ATTR= - -# empty => `bind` will be used as a default value -# `fastbind` => The fastbind method is used -# `custom` => The custom method uses userPassword attribute to verify the password -SASLAUTHD_LDAP_AUTH_METHOD= - -# Specify the authentication mechanism for SASL bind -# empty => Nothing is added to the configuration -# Any value => Fills the `ldap_mech` option -SASLAUTHD_LDAP_MECH= - -# ----------------------------------------------- -# --- SRS Section ------------------------------- -# ----------------------------------------------- - -# envelope_sender => Rewrite only envelope sender address (default) -# header_sender => Rewrite only header sender (not recommended) -# envelope_sender,header_sender => Rewrite both senders -# An email has an "envelope" sender (indicating the sending server) and a -# "header" sender (indicating who sent it). More strict SPF policies may require -# you to replace both instead of just the envelope sender. -SRS_SENDER_CLASSES=envelope_sender - -# empty => Envelope sender will be rewritten for all domains -# provide comma separated list of domains to exclude from rewriting -SRS_EXCLUDE_DOMAINS= - -# empty => generated when the image is built -# provide a secret to use in base64 -# you may specify multiple keys, comma separated. the first one is used for -# signing and the remaining will be used for verification. this is how you -# rotate and expire keys -SRS_SECRET= - -# ----------------------------------------------- -# --- Default Relay Host Section ---------------- -# ----------------------------------------------- - -# Setup relaying all mail through a default relay host -# -# Set a default host to relay all mail through (optionally include a port) -# Example: [mail.example.com]:587 -DEFAULT_RELAY_HOST= - -# ----------------------------------------------- -# --- Multi-Domain Relay Section ---------------- -# ----------------------------------------------- - -# Setup relaying for multiple domains based on the domain name of the sender -# optionally uses usernames and passwords in postfix-sasl-password.cf and relay host mappings in postfix-relaymap.cf -# -# Set a default host to relay mail through -# Example: mail.example.com -RELAY_HOST= - -# empty => 25 -# default port to relay mail -RELAY_PORT=25 - -# ----------------------------------------------- -# --- Relay Host Credentials Section ------------ -# ----------------------------------------------- - -# Configure a relay user and password to use with RELAY_HOST / DEFAULT_RELAY_HOST - -# empty => no default -RELAY_USER= - -# empty => no default -RELAY_PASSWORD= diff --git a/services/templates/envs/nginx.env.template b/services/templates/envs/nginx.env.template deleted file mode 100644 index 55143ab..0000000 --- a/services/templates/envs/nginx.env.template +++ /dev/null @@ -1,2 +0,0 @@ -CRUPEST_DOMAIN=@@CRUPEST_DOMAIN@@ -CRUPEST_GITHUB=@@CRUPEST_GITHUB@@ diff --git a/services/templates/my-roundcube.inc.php b/services/templates/my-roundcube.inc.php new file mode 100644 index 0000000..c07aff9 --- /dev/null +++ b/services/templates/my-roundcube.inc.php @@ -0,0 +1,3 @@ +<?php + +$config['managesieve_host'] = 'tls://%h'; diff --git a/services/docker/nginx/configs/common/acme-challenge b/services/templates/nginx/common/acme-challenge index 26054b8..8280cd8 100644 --- a/services/docker/nginx/configs/common/acme-challenge +++ b/services/templates/nginx/common/acme-challenge @@ -1,3 +1,3 @@ location /.well-known/acme-challenge { - root /srv/acme; + root /var/www/certbot; } diff --git a/services/docker/nginx/configs/common/http-listen b/services/templates/nginx/common/http-listen index 76cb18d..76cb18d 100644 --- a/services/docker/nginx/configs/common/http-listen +++ b/services/templates/nginx/common/http-listen diff --git a/services/docker/nginx/configs/common/https-listen b/services/templates/nginx/common/https-listen index db2f68e..db2f68e 100644 --- a/services/docker/nginx/configs/common/https-listen +++ b/services/templates/nginx/common/https-listen diff --git a/services/docker/nginx/configs/common/https-redirect b/services/templates/nginx/common/https-redirect index 56d095d..56d095d 100644 --- a/services/docker/nginx/configs/common/https-redirect +++ b/services/templates/nginx/common/https-redirect diff --git a/services/docker/nginx/configs/common/proxy-common b/services/templates/nginx/common/reverse-proxy index 4193548..4193548 100644 --- a/services/docker/nginx/configs/common/proxy-common +++ b/services/templates/nginx/common/reverse-proxy diff --git a/services/docker/nginx/configs/conf.d/default.conf b/services/templates/nginx/default.conf index 515942b..515942b 100644 --- a/services/docker/nginx/configs/conf.d/default.conf +++ b/services/templates/nginx/default.conf diff --git a/services/templates/nginx/mail.conf.template b/services/templates/nginx/mail.conf.template new file mode 100644 index 0000000..1c2a2ca --- /dev/null +++ b/services/templates/nginx/mail.conf.template @@ -0,0 +1,29 @@ +server { + server_name mail.@@CRUPEST_DOMAIN@@; + include conf.d/common/https-listen; + + location = /robots.txt { + root /srv/mail; + } + + location = /@@CRUPEST_MAIL_SERVER_AWS_INBOUND_PATH@@ { + include conf.d/common/reverse-proxy; + proxy_pass http://mail-server:2345/@@CRUPEST_MAIL_SERVER_AWS_INBOUND_PATH@@; + } + + location / { + include conf.d/common/reverse-proxy; + proxy_pass http://roundcubemail:80/; + } + + client_max_body_size 5G; +} + + +server { + server_name mail.@@CRUPEST_DOMAIN@@; + include conf.d/common/http-listen; + + include conf.d/common/https-redirect; + include conf.d/common/acme-challenge; +} diff --git a/services/templates/nginx/root.conf.template b/services/templates/nginx/root.conf.template new file mode 100644 index 0000000..7a56982 --- /dev/null +++ b/services/templates/nginx/root.conf.template @@ -0,0 +1,40 @@ +server { + server_name @@CRUPEST_DOMAIN@@; + include conf.d/common/https-listen; + + location / { + root /srv/www; + } + + location /git/ { + include conf.d/common/reverse-proxy; + client_max_body_size 5G; + proxy_pass http://git-server:3636; + } + + location = /github { + return 301 @@CRUPEST_GITHUB@@; + } + + location = /github/ { + return 301 @@CRUPEST_GITHUB@@; + } + + location /_${CRUPEST_V2RAY_PATH} { + if ($http_upgrade != "websocket") { + return 404; + } + + proxy_redirect off; + include conf.d/common/reverse-proxy; + proxy_pass http://v2ray:10000; + } +} + +server { + server_name @@CRUPEST_DOMAIN@@; + include conf.d/common/http-listen; + + include conf.d/common/https-redirect; + include conf.d/common/acme-challenge; +} diff --git a/services/docker/nginx/configs/templates/ssl.conf.template b/services/templates/nginx/ssl.conf.template index 54205f1..181a1af 100644 --- a/services/docker/nginx/configs/templates/ssl.conf.template +++ b/services/templates/nginx/ssl.conf.template @@ -4,8 +4,8 @@ # the up-to-date file that you will need to refer to when manually updating # this file. Contents are based on https://ssl-config.mozilla.org -ssl_certificate /etc/letsencrypt/live/${CRUPEST_DOMAIN}/fullchain.pem; -ssl_certificate_key /etc/letsencrypt/live/${CRUPEST_DOMAIN}/privkey.pem; +ssl_certificate /etc/letsencrypt/live/@@CRUPEST_DOMAIN@@/fullchain.pem; +ssl_certificate_key /etc/letsencrypt/live/@@CRUPEST_DOMAIN@@/privkey.pem; ssl_session_cache shared:le_nginx_SSL:10m; ssl_session_timeout 1440m; diff --git a/services/templates/nginx/timeline.conf.template b/services/templates/nginx/timeline.conf.template new file mode 100644 index 0000000..3414510 --- /dev/null +++ b/services/templates/nginx/timeline.conf.template @@ -0,0 +1,6 @@ +server { + server_name timeline.@@CRUPEST_DOMAIN@@; + include conf.d/common/http-listen; + + include conf.d/common/acme-challenge; +} diff --git a/services/docker/nginx/configs/conf.d/websocket.conf b/services/templates/nginx/websocket.conf index 32af4c3..32af4c3 100644 --- a/services/docker/nginx/configs/conf.d/websocket.conf +++ b/services/templates/nginx/websocket.conf diff --git a/store/config/aichat/roles/blogger.md b/store/config/aichat/roles/blogger.md new file mode 100644 index 0000000..b2ebb65 --- /dev/null +++ b/store/config/aichat/roles/blogger.md @@ -0,0 +1,4 @@ +model: deepseek:deepseek-chat + +--- +You are DeepSeek, an AI assistant specialized in English writing. Your task is to refine my writings and explain the changes to help me improve my English writing skills. diff --git a/store/config/aichat/roles/coder.md b/store/config/aichat/roles/coder.md new file mode 100644 index 0000000..ae5458b --- /dev/null +++ b/store/config/aichat/roles/coder.md @@ -0,0 +1,5 @@ +model: deepseek:deepseek-chat +temperature: 0 + +--- +You are an AI programming assistant. Help users write, debug, and optimize code. Provide clear, well-commented examples, explain concepts simply, and suggest multiple solutions when possible. Prioritize best practices like readability, efficiency, and security. Ask clarifying questions if the request is unclear. diff --git a/store/config/etc/fonts/local.conf b/store/config/etc/fonts/local.conf new file mode 100644 index 0000000..a8dbe2b --- /dev/null +++ b/store/config/etc/fonts/local.conf @@ -0,0 +1,60 @@ +<?xml version="1.0"?> +<!DOCTYPE fontconfig SYSTEM "urn:fontconfig:fonts.dtd"> +<fontconfig> + + <alias> + <family>sans-serif</family> + <prefer> + <family>MiSans</family> + </prefer> + </alias> + + <alias> + <family>serif</family> + <prefer> + <family>MiSans</family> + </prefer> + </alias> + + <alias> + <family>monospace</family> + <prefer> + <family>Maple Mono</family> + </prefer> + </alias> + + <alias> + <family>MiSans</family> + <prefer> + <family>MiSans</family> + <family>Noto Color Emoji</family> + </prefer> + </alias> + + <alias> + <family>Maple Mono</family> + <prefer> + <family>Maple Mono</family> + <family>Maple Mono NF</family> + <family>Maple Mono NF CN</family> + <family>Noto Color Emoji</family> + </prefer> + </alias> + + <alias> + <family>Noto Sans</family> + <prefer> + <family>Noto Sans</family> + <family>Noto Sans CJK SC</family> + </prefer> + </alias> + + <alias> + <family>Noto Serif</family> + <prefer> + <family>Noto Serif</family> + <family>Noto Serif CJK SC</family> + </prefer> + </alias> + +</fontconfig>
\ No newline at end of file diff --git a/store/home/config/halloy/config.toml b/store/config/halloy/config.toml index 5d39f52..5d39f52 100644 --- a/store/home/config/halloy/config.toml +++ b/store/config/halloy/config.toml diff --git a/store/home/bash_profile b/store/config/home/bash_profile index b65f405..b65f405 100644 --- a/store/home/bash_profile +++ b/store/config/home/bash_profile diff --git a/store/config/home/bashrc b/store/config/home/bashrc new file mode 100644 index 0000000..e59b2a4 --- /dev/null +++ b/store/config/home/bashrc @@ -0,0 +1,13 @@ +set-proxy() { + export http_proxy="http://127.0.0.1:7897" + export https_proxy="http://127.0.0.1:7897" + export HTTP_PROXY="http://127.0.0.1:7897" + export HTTPS_PROXY="http://127.0.0.1:7897" +} + +unset-proxy() { + unset http_proxy + unset https_proxy + unset HTTP_PROXY + unset HTTPS_PROXY +} diff --git a/store/home/gitconfig b/store/config/home/gitconfig index b343ab3..4b88c56 100644 --- a/store/home/gitconfig +++ b/store/config/home/gitconfig @@ -8,3 +8,4 @@ helper = /usr/lib/git-core/git-credential-libsecret [alias] lg = log --decorate --oneline --graph + pap = push --all --prune diff --git a/store/config/mihomo/config.yaml b/store/config/mihomo/config.yaml new file mode 100644 index 0000000..c455409 --- /dev/null +++ b/store/config/mihomo/config.yaml @@ -0,0 +1,101 @@ +mixed-port: 7897 +mode: rule +log-level: info +external-controller: 127.0.0.1:9090 +profile: + store-selected: true + +external-ui: ui/metacubexd +external-ui-name: metacubexd +external-ui-url: https://github.com/MetaCubeX/metacubexd/archive/refs/heads/gh-pages.zip + +geox-url: + geoip: https://testingcf.jsdelivr.net/gh/MetaCubeX/meta-rules-dat@release/geoip.dat + geosite: https://testingcf.jsdelivr.net/gh/MetaCubeX/meta-rules-dat@release/geosite.dat + mmdb: https://testingcf.jsdelivr.net/gh/MetaCubeX/meta-rules-dat@release/country.mmdb + asn: https://testingcf.jsdelivr.net/gh/MetaCubeX/meta-rules-dat@release/GeoLite2-ASN.mmdb +geo-auto-update: true +geo-update-interval: 24 + +ipv6: false +unified-delay: true +global-client-fingerprint: edge + +dns: + enable: true + ipv6: true + default-nameserver: + - 223.5.5.5 + - 119.29.29.29 + nameserver: +# - 9.9.9.11 +# - tls://1.1.1.1 + - https://doh.pub/dns-query + - https://dns.alidns.com/dns-query + - 223.5.5.5 + - 119.29.29.29 + +sniffer: + enable: true + sniff: + HTTP: + ports: [80] + TLS: + ports: [443] + QUIC: + ports: [443] + skip-domain: + - "Mijia Cloud" + +rule-providers: + cn: + type: file + path: has-rule + behavior: classical + format: text + + non-cn: + type: file + path: not-has-rule + behavior: classical + format: text + + need: + type: file + path: need-rule + behavior: classical + format: text + +rules: + - RULE-SET,cn,DIRECT + - RULE-SET,non-cn,node-select + - RULE-SET,need,node-select +# - NOT,((GEOIP,CN)),node-select + - DOMAIN-SUFFIX,gnu.org,node-select + - DOMAIN-SUFFIX,nongnu.org,node-select + - DOMAIN-SUFFIX,ietf.org,node-select + - DOMAIN-SUFFIX,metacubex.one,node-select + - DOMAIN-SUFFIX,winehq.org,node-select + - DOMAIN-SUFFIX,freedesktop.org,node-select + - DOMAIN-SUFFIX,eff.org,node-select + - DOMAIN-SUFFIX,typescriptlang.org,node-select + - DOMAIN-SUFFIX,arxiv.org,node-select +# - MATCH,node-select + - MATCH,DIRECT + +proxy-groups: + - name: node-select + type: select + proxies: + - auto-select + use: + - money + + - name: auto-select + type: url-test + use: + - money + interval: 300 + url: https://www.gstatic.com/generate_204 + filter: 日本|新加坡|香港|台湾|美国 + expected-status: 204 diff --git a/store/config/mihomo/need-rule b/store/config/mihomo/need-rule new file mode 100644 index 0000000..7ffcf49 --- /dev/null +++ b/store/config/mihomo/need-rule @@ -0,0 +1,4 @@ +IP-CIDR,185.199.108.153/32 +IP-CIDR,185.199.109.153/32 +IP-CIDR,185.199.110.153/32 +IP-CIDR,185.199.111.153/32
\ No newline at end of file diff --git a/store/config/nvim/init.lua b/store/config/nvim/init.lua new file mode 100644 index 0000000..668c00c --- /dev/null +++ b/store/config/nvim/init.lua @@ -0,0 +1,35 @@ +vim.crupest = {} + +local lazy_path = vim.fn.stdpath("data") .. "/lazy/lazy.nvim" +if not vim.uv.fs_stat(lazy_path) then + vim.fn.system({ + "git", + "clone", + "--filter=blob:none", + "https://github.com/folke/lazy.nvim.git", + "--branch=stable", -- latest stable release + lazy_path, + }) +end +vim.opt.rtp:prepend(lazy_path) +require("lazy").setup { + spec = { { import = "plugins" } } +} + +vim.cmd([[ + if has('nvim') && executable('nvr') + let $GIT_EDITOR = 'nvr -cc split --remote-wait' + endif +]]) + +if vim.g.neovide then + vim.opt.guifont = "Maple Mono"; + vim.g.neovide_normal_opacity = 0.95; + vim.g.neovide_input_ime = false; + vim.g.neovide_cursor_animate_in_insert_mode = false + vim.g.neovide_scroll_animation_far_lines = 0 + vim.g.neovide_input_macos_option_key_is_meta = 'only_left' + vim.cmd("colorscheme catppuccin-macchiato") +end + +require("setup").setup() diff --git a/store/config/nvim/lazy-lock.json b/store/config/nvim/lazy-lock.json new file mode 100644 index 0000000..cdb120a --- /dev/null +++ b/store/config/nvim/lazy-lock.json @@ -0,0 +1,19 @@ +{ + "catppuccin": { "branch": "main", "commit": "fa42eb5e26819ef58884257d5ae95dd0552b9a66" }, + "cmp-buffer": { "branch": "main", "commit": "b74fab3656eea9de20a9b8116afa3cfc4ec09657" }, + "cmp-nvim-lsp": { "branch": "main", "commit": "a8912b88ce488f411177fc8aed358b04dc246d7b" }, + "cmp-path": { "branch": "main", "commit": "c6635aae33a50d6010bf1aa756ac2398a2d54c32" }, + "gitsigns.nvim": { "branch": "main", "commit": "88205953bd748322b49b26e1dfb0389932520dc9" }, + "lazy.nvim": { "branch": "main", "commit": "6c3bda4aca61a13a9c63f1c1d1b16b9d3be90d7a" }, + "lualine.nvim": { "branch": "master", "commit": "a94fc68960665e54408fe37dcf573193c4ce82c9" }, + "neo-tree.nvim": { "branch": "v3.x", "commit": "f481de16a0eb59c985abac8985e3f2e2f75b4875" }, + "nui.nvim": { "branch": "main", "commit": "de740991c12411b663994b2860f1a4fd0937c130" }, + "nvim-autopairs": { "branch": "master", "commit": "4d74e75913832866aa7de35e4202463ddf6efd1b" }, + "nvim-cmp": { "branch": "main", "commit": "b5311ab3ed9c846b585c0c15b7559be131ec4be9" }, + "nvim-lint": { "branch": "master", "commit": "2b0039b8be9583704591a13129c600891ac2c596" }, + "nvim-lspconfig": { "branch": "master", "commit": "463b16bd6a347a129367a7fd00ebcdd9442d9a96" }, + "nvim-treesitter": { "branch": "master", "commit": "42fc28ba918343ebfd5565147a42a26580579482" }, + "nvim-web-devicons": { "branch": "master", "commit": "1fb58cca9aebbc4fd32b086cb413548ce132c127" }, + "plenary.nvim": { "branch": "master", "commit": "857c5ac632080dba10aae49dba902ce3abf91b35" }, + "telescope.nvim": { "branch": "master", "commit": "b4da76be54691e854d3e0e02c36b0245f945c2c7" } +} diff --git a/store/config/nvim/lua/plugins.lua b/store/config/nvim/lua/plugins.lua new file mode 100644 index 0000000..85de362 --- /dev/null +++ b/store/config/nvim/lua/plugins.lua @@ -0,0 +1,40 @@ +-- spellchecker: disable +return { + { + "catppuccin/nvim", + name = "catppuccin", + priority = 1000 + }, + { + "neovim/nvim-lspconfig" + }, + { + "nvim-treesitter/nvim-treesitter", + build = ":TSUpdate" + }, + { + "nvim-neo-tree/neo-tree.nvim", + branch = "v3.x", + dependencies = { + "nvim-lua/plenary.nvim", + "nvim-tree/nvim-web-devicons", -- not strictly required, but recommended + "MunifTanjim/nui.nvim", + -- {"3rd/image.nvim", opts = {}}, -- Optional image support in preview window: See `# Preview Mode` for more information + }, + }, + { + "nvim-lualine/lualine.nvim", + dependencies = { 'nvim-tree/nvim-web-devicons' } + }, + { + "nvim-telescope/telescope.nvim", + dependencies = { 'nvim-lua/plenary.nvim' } + }, + { "lewis6991/gitsigns.nvim" }, + { "hrsh7th/nvim-cmp" }, + { "hrsh7th/cmp-nvim-lsp" }, + { "hrsh7th/cmp-buffer" }, + { "hrsh7th/cmp-path" }, + { "windwp/nvim-autopairs" }, + { "mfussenegger/nvim-lint" }, +} diff --git a/store/config/nvim/lua/setup/init.lua b/store/config/nvim/lua/setup/init.lua new file mode 100644 index 0000000..bbce01c --- /dev/null +++ b/store/config/nvim/lua/setup/init.lua @@ -0,0 +1,45 @@ +local function close_float() + local wins = vim.api.nvim_list_wins() + for _, v in ipairs(wins) do + if vim.api.nvim_win_get_config(v).relative ~= '' then + vim.api.nvim_win_close(v, false) + end + end +end + +local function setup() + if vim.fn.has("win32") ~= 0 then + require("setup.win").setup() + end + + vim.opt.termguicolors = true; + vim.opt.fileformats = "unix,dos"; + vim.opt.number = true; + + vim.g.load_doxygen_syntax = true; + vim.g.doxygen_javadoc_autobrief = false; + + vim.keymap.set("n", "<c-tab>", "<cmd>bnext<cr>") + vim.keymap.set("n", "<c-s-tab>", "<cmd>bNext<cr>") + vim.keymap.set("n", "<esc>", close_float) + vim.keymap.set('t', '<A-n>', '<C-\\><C-n>') + vim.keymap.set('t', '<A-p>', function() + local register = vim.fn.input("Enter register: ") + if register == "" then + register = '"' + end + return '<C-\\><C-N>"' .. register .. 'pi' + end, { expr = true }) + + vim.cmd("autocmd FileType gitcommit,gitrebase,gitconfig set bufhidden=delete") + + vim.diagnostic.config({ virtual_text = true }) + vim.keymap.set("n", "grl", vim.diagnostic.open_float) + + require("setup.lsp").setup() + require("setup.plugins").setup() +end + +return { + setup = setup +} diff --git a/store/config/nvim/lua/setup/lsp.lua b/store/config/nvim/lua/setup/lsp.lua new file mode 100644 index 0000000..4216f1c --- /dev/null +++ b/store/config/nvim/lua/setup/lsp.lua @@ -0,0 +1,126 @@ +vim.lsp.config("*", { + capabilities = vim.tbl_extend("force", + vim.lsp.protocol.make_client_capabilities(), + require("cmp_nvim_lsp").default_capabilities() + ) +}) + +---@param ev vim.api.keyset.create_autocmd.callback_args +---@param name string +local function client_name_is(ev, name) + local client = vim.lsp.get_client_by_id(ev.data.client_id) + return client and client.name == name +end + +local function setup_clangd() + local clangd = "clangd" + local brew_clangd_path = "/usr/local/opt/llvm/bin/clangd" + + if vim.uv.fs_stat(brew_clangd_path) ~= nil then + clangd = brew_clangd_path + end + + vim.lsp.config("clangd", { cmd = { clangd } }) + + vim.api.nvim_create_autocmd("LspAttach", { + callback = function(ev) + if client_name_is(ev, "clangd") then + vim.keymap.set("n", "grs", "<cmd>ClangdSwitchSourceHeader<cr>", { + buffer = ev.buf + }) + end + end + }) + + vim.api.nvim_create_autocmd("LspDetach", { + callback = function(ev) + if client_name_is(ev, "clangd") then + vim.keymap.del("n", "grs", { buffer = ev.buf }) + end + end + }) +end + +local function setup_lua_ls() + vim.lsp.config("lua_ls", { + settings = { + Lua = { + runtime = { + version = "LuaJIT" + }, + diagnostics = { + globals = { "vim" }, + }, + workspace = { + library = { + [vim.fn.expand "$VIMRUNTIME/lua"] = true, + [vim.fn.expand "$VIMRUNTIME/lua/vim/lsp"] = true, + [vim.fn.stdpath "data" .. "/lazy/lazy.nvim/lua/lazy"] = true, + }, + }, + }, + }, + }) +end + +function vim.crupest.no_range_format() + print("Lsp doesn't support range formatting. Use gqa to format the whole document.") + return 0 +end + +local function setup_denols() + vim.lsp.config("denols", { + root_dir = function(bufnr, on_dir) + local deno_configs = vim.fs.find({ "deno.json", "deno.jsonc" }, { + path = vim.api.nvim_buf_get_name(bufnr), upward = true, limit = math.huge }) + if 0 ~= #deno_configs then + local deno_config = deno_configs[#deno_configs] + on_dir(vim.fs.dirname(deno_config)) + end + end, + }) + + vim.api.nvim_create_autocmd("LspAttach", { + callback = function(ev) + if client_name_is(ev, "denols") then + vim.api.nvim_set_option_value( + "formatexpr", + "v:lua.vim.crupest.no_range_format()", + { buf = ev.buf } + ) + end + end + }) + + vim.api.nvim_create_autocmd("LspDetach", { + callback = function(ev) + if client_name_is(ev, "denols") then + vim.api.nvim_set_option_value("formatexpr", "", { buf = ev.buf }) + end + end + }) +end + + +local function setup() + vim.api.nvim_create_autocmd("LspAttach", { + callback = function(ev) + vim.keymap.set("n", "gqa", vim.lsp.buf.format, { buffer = ev.buf }) + end + }) + + vim.api.nvim_create_autocmd("LspDetach", { + callback = function(ev) + vim.keymap.del("n", "gqa", { buffer = ev.buf }) + end + }) + + setup_clangd() + setup_lua_ls() + setup_denols() + vim.lsp.enable({ "clangd", "lua_ls", "denols" }) +end + +return { + setup = setup +} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/cmp.lua b/store/config/nvim/lua/setup/plugins/cmp.lua index 2244443..be9f8ea 100644 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/cmp.lua +++ b/store/config/nvim/lua/setup/plugins/cmp.lua @@ -1,28 +1,24 @@ local function setup() local cmp = require("cmp") - local luasnip = require("luasnip") cmp.setup { snippet = { expand = function(args) - luasnip.lsp_expand(args.body) + vim.snippet.expand(args.body) end, }, - window = { - completion = cmp.config.window.bordered(), - documentation = cmp.config.window.bordered(), - }, mapping = cmp.mapping.preset.insert({ ['<C-b>'] = cmp.mapping.scroll_docs(-4), ['<C-f>'] = cmp.mapping.scroll_docs(4), ['<C-j>'] = cmp.mapping.select_next_item({ behavior = cmp.SelectBehavior.Select }), ['<C-k>'] = cmp.mapping.select_prev_item({ behavior = cmp.SelectBehavior.Select }), + ['<C-e>'] = cmp.mapping.abort(), ['<C-y>'] = cmp.mapping.confirm({ select = true }), ['<CR>'] = cmp.mapping.confirm({ select = true }), }), sources = cmp.config.sources({ { name = 'nvim_lsp' }, - { name = 'luasnip' }, + { name = 'path' }, }, { { name = 'buffer' }, }) diff --git a/store/config/nvim/lua/setup/plugins/gitsigns.lua b/store/config/nvim/lua/setup/plugins/gitsigns.lua new file mode 100644 index 0000000..957c661 --- /dev/null +++ b/store/config/nvim/lua/setup/plugins/gitsigns.lua @@ -0,0 +1,40 @@ +local function setup() + local gitsigns = require('gitsigns') + + gitsigns.setup { + on_attach = function(bufnr) + local function map(mode, l, r, opts) + opts = opts or {} + opts.buffer = bufnr + vim.keymap.set(mode, l, r, opts) + end + + -- Navigation + map('n', ']c', function() + if vim.wo.diff then + vim.cmd.normal({ ']c', bang = true }) + else + gitsigns.nav_hunk('next') + end + end) + + map('n', '[c', function() + if vim.wo.diff then + vim.cmd.normal({ '[c', bang = true }) + else + gitsigns.nav_hunk('prev') + end + end) + + -- Actions + map('n', '<leader>gc', gitsigns.preview_hunk) + map('n', '<leader>gt', gitsigns.toggle_deleted) + map('n', '<leader>gd', gitsigns.diffthis) + map('n', '<leader>gb', function() gitsigns.blame_line { full = true } end) + end + } +end + +return { + setup = setup +} diff --git a/store/config/nvim/lua/setup/plugins/init.lua b/store/config/nvim/lua/setup/plugins/init.lua new file mode 100644 index 0000000..8f1346b --- /dev/null +++ b/store/config/nvim/lua/setup/plugins/init.lua @@ -0,0 +1,24 @@ +local function setup() + require('lualine').setup {} + require("neo-tree").setup { + filesystem = { + filtered_items = { + hide_dotfiles = false, + hide_gitignored = false, + hide_hidden = false, + } + } + } + + require("setup.plugins.telescope").setup() + require("setup.plugins.gitsigns").setup() + + require("setup.plugins.tree-sitter").setup() + require("setup.plugins.lint").setup() + require("setup.plugins.cmp").setup() + require("nvim-autopairs").setup {} +end + +return { + setup = setup +} diff --git a/store/config/nvim/lua/setup/plugins/lint.lua b/store/config/nvim/lua/setup/plugins/lint.lua new file mode 100644 index 0000000..d03f539 --- /dev/null +++ b/store/config/nvim/lua/setup/plugins/lint.lua @@ -0,0 +1,94 @@ +--- spellchecker: ignore markdownlintrc + +---@alias CruLinter { name: string, config_patterns: string[], filetypes: string[] | nil, fast: boolean } + +local cspell = { + name = "cspell", + config_patterns = { + ".cspell.json", + "cspell.json", + ".cSpell.json", + "cSpell.json", + "cspell.config.js", + "cspell.config.cjs", + "cspell.config.json", + "cspell.config.yaml", + "cspell.config.yml", + "cspell.yaml", + "cspell.yml", + }, + fast = true, +} + +local markdownlint = { + name = "markdownlint", + config_patterns = { + ".markdownlint.jsonc", + ".markdownlint.json", + ".markdownlint.yaml", + ".markdownlint.yml", + ".markdownlintrc", + }, + filetypes = { "markdown" }, + fast = true, +} + +local linters = { cspell = cspell, markdownlint = markdownlint } + +---@param linter CruLinter +---@param buf integer +---@return string | nil +local function find_config(linter, buf) + local files = vim.fs.find(linter.config_patterns, { + path = vim.api.nvim_buf_get_name(buf), upward = true }) + if #files ~= 0 then + return files[1]; + end + return nil +end + +vim.list_extend(require("lint.linters.markdownlint").args, { + "--config", + function() + return find_config(markdownlint, 0); + end +}) + +---@param linter CruLinter +---@param buf integer +function vim.crupest.lint(linter, buf) + if linter.filetypes then + local filetype = vim.api.nvim_get_option_value("filetype", { buf = buf }) + if not vim.list_contains(linter.filetypes, filetype) then + return + end + end + + if find_config(linter, buf) then + require("lint").try_lint(linter.name) + end +end + +function vim.crupest.lint_all(buf, fast) + for _, linter in pairs(linters) do + if not fast or linter.fast then + vim.crupest.lint(linter, buf) + end + end +end + +local function setup() + vim.api.nvim_create_autocmd( + { "BufReadPost", "InsertLeave", "TextChanged" }, + { + callback = function(opt) + if vim.api.nvim_get_option_value("buftype", { buf = opt.buf }) == "" then + vim.crupest.lint_all(opt.buf, true) + end + end + }) +end + +return { + setup = setup, +} diff --git a/store/config/nvim/lua/setup/plugins/telescope.lua b/store/config/nvim/lua/setup/plugins/telescope.lua new file mode 100644 index 0000000..69a69c0 --- /dev/null +++ b/store/config/nvim/lua/setup/plugins/telescope.lua @@ -0,0 +1,28 @@ +local function setup() + local builtin = require('telescope.builtin') + vim.keymap.set('n', '<leader>/', builtin.live_grep, {}) + vim.keymap.set('n', '<leader>fg', builtin.live_grep, {}) + vim.keymap.set('n', '<leader>ff', builtin.find_files, {}) + vim.keymap.set('n', '<leader>fb', builtin.buffers, {}) + vim.keymap.set('n', '<leader>fh', builtin.help_tags, {}) + vim.keymap.set('n', '<leader>fr', builtin.registers, {}) + vim.keymap.set('n', '<leader>fq', builtin.quickfixhistory, {}) + vim.keymap.set('n', '<leader>fm', builtin.marks, {}) + vim.keymap.set('n', '<leader>fd', builtin.diagnostics, {}) + vim.keymap.set('n', '<leader>fs', builtin.lsp_workspace_symbols, {}) + + local function all_files(opts) + opts = vim.tbl_extend('force', { + hidden = true, + no_ignore = true, + no_ignore_parent = true, + }, opts or {}) + builtin.find_files(opts) + end + + vim.keymap.set('n', '<leader>fa', all_files, {}) +end + +return { + setup = setup +} diff --git a/store/config/nvim/lua/setup/plugins/tree-sitter.lua b/store/config/nvim/lua/setup/plugins/tree-sitter.lua new file mode 100644 index 0000000..043f425 --- /dev/null +++ b/store/config/nvim/lua/setup/plugins/tree-sitter.lua @@ -0,0 +1,11 @@ +local function setup() + require'nvim-treesitter.configs'.setup { + highlight = { enable = true }, + incremental_selection = { enable = true }, + textobjects = { enable = true }, + } +end + +return { + setup = setup +} diff --git a/store/config/nvim/lua/setup/win.lua b/store/config/nvim/lua/setup/win.lua new file mode 100644 index 0000000..9aa979d --- /dev/null +++ b/store/config/nvim/lua/setup/win.lua @@ -0,0 +1,15 @@ +-- spellchecker: ignore shellcmdflag shellredir shellpipe shellquote shellxquote +local function setup() + vim.cmd([[ + let &shell = executable('pwsh') ? 'pwsh' : 'powershell' + let &shellcmdflag = '-NoLogo -ExecutionPolicy RemoteSigned -Command [Console]::InputEncoding=[Console]::OutputEncoding=[System.Text.UTF8Encoding]::new();$PSDefaultParameterValues[''Out-File:Encoding'']=''utf8'';Remove-Alias -Force -ErrorAction SilentlyContinue tee;' + let &shellredir = '2>&1 | %%{ "$_" } | Out-File %s; exit $LastExitCode' + let &shellpipe = '2>&1 | %%{ "$_" } | tee %s; exit $LastExitCode' + set shellquote= shellxquote= + ]]) + vim.opt.completeslash = 'slash' +end + +return { + setup = setup +} diff --git a/store/debian-dev/Dockerfile b/store/debian-dev/Dockerfile new file mode 100644 index 0000000..d5e25ba --- /dev/null +++ b/store/debian-dev/Dockerfile @@ -0,0 +1,22 @@ +ARG VERSION=latest +FROM debian:${VERSION} + +ARG USER= +ARG CHINA= + +ENV CRUPEST_DEBIAN_DEV_USER=${USER} +ENV CRUPEST_DEBIAN_DEV_CHINA=${CHINA} + +ADD setup /setup +RUN export DEBIAN_FRONTEND=noninteractive; \ + /setup/apt.bash && /setup/package.bash && \ + /setup/for-container.bash && \ + rm -rf /var/lib/apt/lists/* + + +ENV LANG=en_US.utf8 +USER ${USER} +WORKDIR /home/${USER} +RUN env DEBIAN_FRONTEND=noninteractive /setup/user.bash + +VOLUME [ "/home/${USER}" ] diff --git a/store/debian-dev/setup/apt.bash b/store/debian-dev/setup/apt.bash new file mode 100755 index 0000000..e841351 --- /dev/null +++ b/store/debian-dev/setup/apt.bash @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +china_mirror="mirrors.ustc.edu.cn" +try_files=("/etc/apt/sources.list" "/etc/apt/sources.list.d/debian.sources") +files=() + +for try_file in "${try_files[@]}"; do + if [[ -f "$try_file" ]]; then + files+=("$try_file") + fi +done + +for file in "${files[@]}"; do + echo "copy $file to $file.bak" + cp "$file" "$file.bak" +done + +if [[ -n "$CRUPEST_DEBIAN_DEV_CHINA" ]]; then + echo "use China mirrors" + for file in "${files[@]}"; do + sed -i "s|deb.debian.org|${china_mirror}|g" "$file" + done +fi + +echo "use https" +apt-get update +apt-get install -y apt-transport-https ca-certificates + +for file in "${files[@]}"; do + sed -i 's|http://|https://|g' "$file" +done diff --git a/store/debian-dev/setup/bashrc b/store/debian-dev/setup/bashrc new file mode 100644 index 0000000..00c9d11 --- /dev/null +++ b/store/debian-dev/setup/bashrc @@ -0,0 +1,3 @@ +alias dquilt='quilt "--quiltrc=${HOME}/.quiltrc-dpkg"' +. /usr/share/bash-completion/completions/quilt +complete -F _quilt_completion $_quilt_complete_opt dquilt diff --git a/store/debian-dev/setup/cmake.bash b/store/debian-dev/setup/cmake.bash new file mode 100755 index 0000000..dd7307e --- /dev/null +++ b/store/debian-dev/setup/cmake.bash @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +CMAKE_VERSION=$(curl -s https://api.github.com/repos/Kitware/CMake/releases/latest | \ + grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/') + +curl -fsSL "https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-linux-x86_64.sh" | \ + sh -s -- --skip-license --prefix=/usr diff --git a/store/debian-dev/setup/code-server.bash b/store/debian-dev/setup/code-server.bash new file mode 100755 index 0000000..1151dc2 --- /dev/null +++ b/store/debian-dev/setup/code-server.bash @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +if [[ $# != 1 ]]; then + echo "Require exactly one argument, the password of the code server." >&2 + exit 1 +fi + +curl -fsSL https://code-server.dev/install.sh | sh + +apt update && apt install argon2 +mkdir -p "${HOME}/.config/code-server" +echo -e "auth: password\nhashed-password: " >> "${HOME}/.config/code-server/config.yaml" +echo -n "$1" | \ + argon2 "$(shuf -i 10000000-99999999 -n 1 --random-source /dev/urandom)" -e \ + >> "${HOME}/.config/code-server/config.yaml" diff --git a/store/debian-dev/setup/for-container.bash b/store/debian-dev/setup/for-container.bash new file mode 100755 index 0000000..0aa47b0 --- /dev/null +++ b/store/debian-dev/setup/for-container.bash @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +echo "set up locale" +localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 + +echo "set up sudo" +sed -i.bak 's|%sudo[[:space:]]\+ALL=(ALL:ALL)[[:space:]]\+ALL|%sudo ALL=(ALL:ALL) NOPASSWD: ALL|' /etc/sudoers + +if ! id "username" &>/dev/null; then + echo "create user $CRUPEST_DEBIAN_DEV_USER" + useradd -m -G sudo -s /usr/bin/bash "$CRUPEST_DEBIAN_DEV_USER" +fi diff --git a/store/debian-dev/setup/llvm.bash b/store/debian-dev/setup/llvm.bash new file mode 100755 index 0000000..ca6d4bf --- /dev/null +++ b/store/debian-dev/setup/llvm.bash @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +if [[ -n "$CRUPEST_DEBIAN_DEV_CHINA" ]]; then + base_url=https://mirrors.tuna.tsinghua.edu.cn/llvm-apt +else + base_url=https://apt.llvm.org +fi + +curl -fsSL "$base_url/llvm.sh" | sh -s -- all -m "$base_url" diff --git a/store/debian-dev/setup/package.bash b/store/debian-dev/setup/package.bash new file mode 100755 index 0000000..5ad7b7a --- /dev/null +++ b/store/debian-dev/setup/package.bash @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +echo "install packages" +apt-get update +apt-get install -y \ + locales lsb-release software-properties-common \ + sudo procps bash-completion man less gnupg curl wget \ + vim build-essential git devscripts debhelper quilt diff --git a/services/docker/debian-dev/bootstrap/home/.quiltrc-dpkg b/store/debian-dev/setup/quiltrc-dpkg index e8fc3c5..e8fc3c5 100644 --- a/services/docker/debian-dev/bootstrap/home/.quiltrc-dpkg +++ b/store/debian-dev/setup/quiltrc-dpkg diff --git a/store/debian-dev/setup/user.bash b/store/debian-dev/setup/user.bash new file mode 100755 index 0000000..4e13804 --- /dev/null +++ b/store/debian-dev/setup/user.bash @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +base_dir="$(dirname "$0")" +dot_files=("bashrc" "quiltrc-dpkg") + +for file in "${dot_files[@]}"; do + echo "copy $base_dir/$file $HOME/.$file" + cp "$base_dir/$file" "$HOME/.$file" +done diff --git a/store/home/bashrc b/store/home/bashrc deleted file mode 100644 index 7d31f23..0000000 --- a/store/home/bashrc +++ /dev/null @@ -1,8 +0,0 @@ -set-proxy() { - export http_proxy="http://127.0.0.1:7897" - export https_proxy="http://127.0.0.1:7897" -} - -unset-proxy() { - unset http_proxy https_proxy -} diff --git a/store/home/bin/neovide-listen b/store/home/bin/neovide-listen deleted file mode 100755 index 3bcc7da..0000000 --- a/store/home/bin/neovide-listen +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -export CRU_NVIM_SERVER=${CRU_NVIM_SERVER:-/tmp/nvimsocket} - -args=() - -self_built_nvim="$HOME/codes/neovim/build/bin/nvim" -self_built_nvim_runtime="$HOME/codes/neovim/runtime" -if [[ -z "$CRU_NVIM" ]] && [[ -e "$self_built_nvim" ]]; then - echo "Found self-built neovim at $CRU_NVIM" - CRU_NVIM="$self_built_nvim" - CRU_VIMRUNTIME="$self_built_nvim_runtime" -fi - -[[ -z "$CRU_NVIM" ]] || args=("${args[@]}" "--neovim-bin" "$CRU_NVIM") -export CRU_NVIM=${CRU_NVIM:-nvim} -[[ -z "$CRU_VIMRUNTIME" ]] || export VIMRUNTIME="$CRU_VIMRUNTIME" - -self_built_neovide="$HOME/codes/neovide/target/release/neovide" -if [[ -z "$CRU_NEOVIDE" ]] && [[ -e "$self_built_neovide" ]]; then - echo "Found self-build of neovide at $self_built_neovide" - CRU_NEOVIDE="$self_built_neovide" -fi -export CRU_NEOVIDE=${CRU_NEOVIDE:-neovide} - -listen_added=0 -for arg in "$@"; do - args=("${args[@]}" "$arg") - if [[ "$arg" == "--" ]]; then - args=("${args[@]}" "--listen" "$CRU_NVIM_SERVER") - listen_added=1 - fi -done - -if [[ $listen_added = 0 ]]; then - args=("${args[@]}" "--" "--listen" "$CRU_NVIM_SERVER") -fi - -if which nvr > /dev/null; then - echo "Found nvr, set VISUAL EDITOR GIT_EDITOR to nvr-wait" - export VISUAL="nvr-wait" - export EDITOR="$VISUAL" - export GIT_EDITOR="$VISUAL" -fi - -args=("$CRU_NEOVIDE" "${args[@]}") -echo "Full command is ${args[@]}" -exec "${args[@]}" - diff --git a/store/home/bin/nvr-wait b/store/home/bin/nvr-wait deleted file mode 100755 index 410d42d..0000000 --- a/store/home/bin/nvr-wait +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -export CRU_NVIM_SERVER=${CRU_NVIM_SERVER:-/tmp/nvimsocket} -exec nvr --nostart --servername "$CRU_NVIM_SERVER" -cc split --remote-wait "$@" diff --git a/store/home/config/nvim/.gitignore b/store/home/config/nvim/.gitignore deleted file mode 100644 index 722d5e7..0000000 --- a/store/home/config/nvim/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.vscode diff --git a/store/home/config/nvim/init.lua b/store/home/config/nvim/init.lua deleted file mode 100644 index d9349e1..0000000 --- a/store/home/config/nvim/init.lua +++ /dev/null @@ -1,61 +0,0 @@ -if vim.g.neovide then - -- spellchecker: disable-next-line - vim.opt.guifont = "FiraCode Nerd Font"; - vim.g.neovide_normal_opacity = 0.95; - vim.g.neovide_input_ime = false; - vim.g.neovide_cursor_animate_in_insert_mode = false - vim.g.neovide_input_macos_option_key_is_meta = 'only_left' -end - -local is_win = vim.fn.has("win32") ~= 0 - --- spellchecker: disable -if is_win then - vim.cmd([[ - let &shell = executable('pwsh') ? 'pwsh' : 'powershell' - let &shellcmdflag = '-NoLogo -ExecutionPolicy RemoteSigned -Command [Console]::InputEncoding=[Console]::OutputEncoding=[System.Text.UTF8Encoding]::new();$PSDefaultParameterValues[''Out-File:Encoding'']=''utf8'';Remove-Alias -Force -ErrorAction SilentlyContinue tee;' - let &shellredir = '2>&1 | %%{ "$_" } | Out-File %s; exit $LastExitCode' - let &shellpipe = '2>&1 | %%{ "$_" } | tee %s; exit $LastExitCode' - set shellquote= shellxquote= - ]]) - vim.opt.completeslash = 'slash' -end --- spellchecker: enable - --- spellchecker: disable -vim.opt.termguicolors = true; -vim.opt.fileformats = "unix,dos"; -vim.opt.softtabstop = 4; -vim.opt.shiftwidth = 4; -vim.opt.expandtab = true; -vim.opt.wrap = false; -vim.opt.number = true; --- spellchecker: enable - -vim.g.load_doxygen_syntax = true; -vim.g.doxygen_javadoc_autobrief = false; - --- Init lazy.nvim -local lazy_path = vim.fn.stdpath("data") .. "/lazy/lazy.nvim" -if not vim.uv.fs_stat(lazy_path) then - vim.fn.system({ - "git", - "clone", - "--filter=blob:none", - "https://github.com/folke/lazy.nvim.git", - "--branch=stable", -- latest stable release - lazy_path, - }) -end -vim.opt.rtp:prepend(lazy_path) - --- Use lazy.nvim -require("lazy").setup("plugins") - -vim.cmd("colorscheme catppuccin-macchiato") - -require("crupest.nvim.lsp").setup() -require("crupest.nvim.plugins").setup() -require("crupest.nvim.keymap").setup() - -vim.cmd("autocmd FileType gitcommit,gitrebase,gitconfig set bufhidden=delete") diff --git a/store/home/config/nvim/lazy-lock.json b/store/home/config/nvim/lazy-lock.json deleted file mode 100644 index 76f55b8..0000000 --- a/store/home/config/nvim/lazy-lock.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "LuaSnip": { "branch": "master", "commit": "c9b9a22904c97d0eb69ccb9bab76037838326817" }, - "catppuccin": { "branch": "main", "commit": "5b5e3aef9ad7af84f463d17b5479f06b87d5c429" }, - "cmp-buffer": { "branch": "main", "commit": "3022dbc9166796b644a841a02de8dd1cc1d311fa" }, - "cmp-cmdline": { "branch": "main", "commit": "d250c63aa13ead745e3a40f61fdd3470efde3923" }, - "cmp-nvim-lsp": { "branch": "main", "commit": "99290b3ec1322070bcfb9e846450a46f6efa50f0" }, - "cmp-path": { "branch": "main", "commit": "91ff86cd9c29299a64f968ebb45846c485725f23" }, - "cmp_luasnip": { "branch": "master", "commit": "98d9cb5c2c38532bd9bdb481067b20fea8f32e90" }, - "gitsigns.nvim": { "branch": "main", "commit": "4c40357994f386e72be92a46f41fc1664c84c87d" }, - "lazy.nvim": { "branch": "main", "commit": "6c3bda4aca61a13a9c63f1c1d1b16b9d3be90d7a" }, - "lualine.nvim": { "branch": "master", "commit": "f4f791f67e70d378a754d02da068231d2352e5bc" }, - "nvim-autopairs": { "branch": "master", "commit": "68f0e5c3dab23261a945272032ee6700af86227a" }, - "nvim-cmp": { "branch": "main", "commit": "c27370703e798666486e3064b64d59eaf4bdc6d5" }, - "nvim-lint": { "branch": "master", "commit": "6e9dd545a1af204c4022a8fcd99727ea41ffdcc8" }, - "nvim-lspconfig": { "branch": "master", "commit": "84e0cd5a3c58e88ef706fdf4a1eed59ded1d3ce2" }, - "nvim-tree.lua": { "branch": "master", "commit": "c09ff35de503a41fa62465c6b4ae72d96e7a7ce4" }, - "nvim-web-devicons": { "branch": "master", "commit": "ab4cfee554e501f497bce0856788d43cf2eb93d7" }, - "plenary.nvim": { "branch": "master", "commit": "857c5ac632080dba10aae49dba902ce3abf91b35" }, - "telescope.nvim": { "branch": "master", "commit": "814f102cd1da3dc78c7d2f20f2ef3ed3cdf0e6e4" } -} diff --git a/store/home/config/nvim/lua/crupest/nvim/keymap.lua b/store/home/config/nvim/lua/crupest/nvim/keymap.lua deleted file mode 100644 index 624c04c..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/keymap.lua +++ /dev/null @@ -1,9 +0,0 @@ -local function setup() - vim.keymap.set("n", "<c-tab>", "<cmd>bnext<cr>") - vim.keymap.set("n", "<c-s-tab>", "<cmd>bNext<cr>") - vim.keymap.set("n", "<esc>", require("crupest.utils.nvim").close_float) -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/lsp/c.lua b/store/home/config/nvim/lua/crupest/nvim/lsp/c.lua deleted file mode 100644 index 6080510..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/lsp/c.lua +++ /dev/null @@ -1,25 +0,0 @@ -local lspconfig = require("lspconfig") - -local brew_clangd_path = "/usr/local/opt/llvm/bin/clangd" - -local function setup() - local clangd = "clangd" - - if vim.uv.fs_stat(brew_clangd_path) ~= nil then - clangd = brew_clangd_path - end - - -- setup lsp clangd - lspconfig.clangd.setup { - cmd = { clangd }, - on_attach = function(_, bufnr) - vim.keymap.set('n', 'grs', "<cmd>ClangdSwitchSourceHeader<cr>", { - buffer = bufnr - }) - end - } -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/lsp/init.lua b/store/home/config/nvim/lua/crupest/nvim/lsp/init.lua deleted file mode 100644 index ba11087..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/lsp/init.lua +++ /dev/null @@ -1,25 +0,0 @@ -local lspconfig = require("lspconfig") -local cmp_nvim_lsp = require("cmp_nvim_lsp") -local cmp_default_caps = cmp_nvim_lsp.default_capabilities() - -local lspconfig_default_caps = lspconfig.util.default_config.capabilities - -lspconfig.util.default_config = vim.tbl_extend( - "force", - lspconfig.util.default_config, - { - capabilities = vim.tbl_extend("force", lspconfig_default_caps, cmp_default_caps), - autostart = false, - }) - -local function setup() - lspconfig.cmake.setup {} - lspconfig.bashls.setup {} - require("crupest.nvim.lsp.c").setup() - require("crupest.nvim.lsp.lua").setup() -end - - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/lsp/lua.lua b/store/home/config/nvim/lua/crupest/nvim/lsp/lua.lua deleted file mode 100644 index 93aa503..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/lsp/lua.lua +++ /dev/null @@ -1,29 +0,0 @@ -local lspconfig = require("lspconfig") - -local function setup() - lspconfig.lua_ls.setup { - settings = { - Lua = { - runtime = { - version = "LuaJIT" - }, - diagnostics = { - globals = { "vim" }, - }, - workspace = { - library = { - [vim.fn.expand "$VIMRUNTIME/lua"] = true, - [vim.fn.expand "$VIMRUNTIME/lua/vim/lsp"] = true, - [vim.fn.stdpath "data" .. "/lazy/lazy.nvim/lua/lazy"] = true, - }, - maxPreload = 100000, - preloadFileSize = 10000, - }, - }, - }, - } -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/gitsigns.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/gitsigns.lua deleted file mode 100644 index 220c91a..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/gitsigns.lua +++ /dev/null @@ -1,51 +0,0 @@ -local function setup() - local gitsigns = require('gitsigns') - gitsigns.setup { - on_attach = function(bufnr) - local function map(mode, l, r, opts) - opts = opts or {} - opts.buffer = bufnr - vim.keymap.set(mode, l, r, opts) - end - - -- Navigation - map('n', ']c', function() - if vim.wo.diff then - vim.cmd.normal({ ']c', bang = true }) - else - gitsigns.nav_hunk('next') - end - end) - - map('n', '[c', function() - if vim.wo.diff then - vim.cmd.normal({ '[c', bang = true }) - else - gitsigns.nav_hunk('prev') - end - end) - - -- Actions - map('n', '<leader>hs', gitsigns.stage_hunk) - map('n', '<leader>hr', gitsigns.reset_hunk) - map('v', '<leader>hs', function() gitsigns.stage_hunk { vim.fn.line('.'), vim.fn.line('v') } end) - map('v', '<leader>hr', function() gitsigns.reset_hunk { vim.fn.line('.'), vim.fn.line('v') } end) - map('n', '<leader>hS', gitsigns.stage_buffer) - map('n', '<leader>hu', gitsigns.undo_stage_hunk) - map('n', '<leader>hR', gitsigns.reset_buffer) - map('n', '<leader>hp', gitsigns.preview_hunk) - map('n', '<leader>hb', function() gitsigns.blame_line { full = true } end) - map('n', '<leader>tb', gitsigns.toggle_current_line_blame) - map('n', '<leader>hd', gitsigns.diffthis) - map('n', '<leader>hD', function() gitsigns.diffthis('~') end) - map('n', '<leader>td', gitsigns.toggle_deleted) - - -- Text object - map({ 'o', 'x' }, 'ih', ':<C-U>Gitsigns select_hunk<CR>') - end - } -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/init.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/init.lua deleted file mode 100644 index 24e0c2e..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/init.lua +++ /dev/null @@ -1,12 +0,0 @@ -local function setup() - require("crupest.nvim.plugins.lint").setup() - require("crupest.nvim.plugins.snip").setup() - require("crupest.nvim.plugins.cmp").setup() - require("crupest.nvim.plugins.telescope").setup() - require("crupest.nvim.plugins.gitsigns").setup() - require("crupest.nvim.plugins.others").setup() -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/lint.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/lint.lua deleted file mode 100644 index e2dff1b..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/lint.lua +++ /dev/null @@ -1,85 +0,0 @@ -local lint = require("lint") - -local cspell = { - name = "cspell", - config_patterns = { - ".cspell.json", - "cspell.json", - ".cSpell.json", - "cSpell.json", - "cspell.config.js", - "cspell.config.cjs", - "cspell.config.json", - "cspell.config.yaml", - "cspell.config.yml", - "cspell.yaml", - "cspell.yml", - }, - fast = true, - initialized = false -} - -local linters = { cspell } - -local linter_names = vim.tbl_map(function(l) return l.name end, linters) - -local function cru_lint(linter, opt) - opt = opt or {} - - if not opt.buf then - opt.buf = 0 - end - - if 0 ~= #vim.fs.find(linter.config_patterns, { - path = vim.api.nvim_buf_get_name(opt.buf), upward = true }) then - if not linter.initialized then - vim.diagnostic.config({ virtual_text = true }, lint.get_namespace(linter.name)) - linter.initialized = true - end - lint.try_lint(linter.name) - end -end - -local function cru_lint_one(name, opt) - for _, linter in ipairs(linters) do - if linter.name == name then - cru_lint(linter, opt) - return - end - end - vim.notify("No linter named " .. name .. " is configured.", vim.log.levels.ERROR, {}) -end - -local function cru_lint_all(opt, fast) - for _, linter in ipairs(linters) do - if not fast or linter.fast then - cru_lint(linter, opt) - end - end -end - -local function cru_lint_all_fast(opt) - local buf = opt.buf - if vim.api.nvim_get_option_value("buftype", { buf = buf }) == "" then - cru_lint_all(opt, true) - end -end - -local function setup() - vim.api.nvim_create_autocmd({ "BufReadPost", "InsertLeave", "TextChanged" }, { callback = cru_lint_all_fast }) - - local function cru_lint_cmd(opt) - if #opt.args == 0 then - cru_lint_all(opt, false) - else - cru_lint_one(opt.args, opt) - end - end - - vim.api.nvim_create_user_command("CruLint", cru_lint_cmd, - { nargs = '?', complete = function() return linter_names end }) -end - -return { - setup = setup, -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/others.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/others.lua deleted file mode 100644 index 2ef0d75..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/others.lua +++ /dev/null @@ -1,9 +0,0 @@ -local function setup() - require('lualine').setup {} - require("nvim-tree").setup {} - require("nvim-autopairs").setup {} -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/snip.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/snip.lua deleted file mode 100644 index 78ed2eb..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/snip.lua +++ /dev/null @@ -1,75 +0,0 @@ ---- spellchecker: disable - -local luasnip = require("luasnip") - -local ls = luasnip --- some shorthands... -local s = ls.snippet -local sn = ls.snippet_node -local t = ls.text_node -local i = ls.insert_node -local f = ls.function_node -local c = ls.choice_node -local d = ls.dynamic_node -local r = ls.restore_node -local l = require("luasnip.extras").lambda -local rep = require("luasnip.extras").rep -local p = require("luasnip.extras").partial -local m = require("luasnip.extras").match -local n = require("luasnip.extras").nonempty -local dl = require("luasnip.extras").dynamic_lambda -local fmt = require("luasnip.extras.fmt").fmt -local fmta = require("luasnip.extras.fmt").fmta -local types = require("luasnip.util.types") -local conds = require("luasnip.extras.conditions") -local conds_expand = require("luasnip.extras.conditions.expand") - -local function copy(args) - return args[1] -end - -local function setup() - vim.keymap.set({ "i", "s" }, "<C-L>", function() luasnip.jump(1) end, { silent = true }) - vim.keymap.set({ "i", "s" }, "<C-J>", function() luasnip.jump(-1) end, { silent = true }) - - vim.keymap.set({ "i", "s" }, "<C-E>", function() - if luasnip.choice_active() then - luasnip.change_choice(1) - end - end, { silent = true }) - - luasnip.add_snippets("cpp", { - s("cs", { - i(1, "classname"), - t("::"), - f(copy, 1), - t("("), - i(0), - t(") { }") - }), - - s("ds", { - i(1, "classname"), - t("::~"), - f(copy, 1), - t("() { }") - }), - - s("csds", { - i(1, "classname"), - t("::"), - f(copy, 1), - t("("), - i(0), - t({ ") { }", "", "" }), - f(copy, 1), - t("::~"), - f(copy, 1), - t("() { }") - }), - }) -end - -return { - setup = setup, -} diff --git a/store/home/config/nvim/lua/crupest/nvim/plugins/telescope.lua b/store/home/config/nvim/lua/crupest/nvim/plugins/telescope.lua deleted file mode 100644 index d68b7f2..0000000 --- a/store/home/config/nvim/lua/crupest/nvim/plugins/telescope.lua +++ /dev/null @@ -1,11 +0,0 @@ -local function setup() - local builtin = require('telescope.builtin') - vim.keymap.set('n', '<leader>ff', builtin.find_files, {}) - vim.keymap.set('n', '<leader>fg', builtin.live_grep, {}) - vim.keymap.set('n', '<leader>fb', builtin.buffers, {}) - vim.keymap.set('n', '<leader>fh', builtin.help_tags, {}) -end - -return { - setup = setup -} diff --git a/store/home/config/nvim/lua/crupest/utils/find.lua b/store/home/config/nvim/lua/crupest/utils/find.lua deleted file mode 100644 index dd1f663..0000000 --- a/store/home/config/nvim/lua/crupest/utils/find.lua +++ /dev/null @@ -1,101 +0,0 @@ -local is_win = vim.fn.has("win32") ~= 0 - -local M = {} - -local windows_exe_ext = { "exe", "bat", "cmd", "ps1" } - ---- Find real path (with ext) for an executable. ---- @param dir string ---- @param name string | string[] ---- @return string | nil -function M.find_exe_file(dir, name) - if type(name) == "string" then - name = { name } - end - for _, n in ipairs(name) do - if vim.uv.fs_stat(vim.fs.joinpath(dir, n)) ~= nil then - return n - end - if is_win then - for _, ext in ipairs(windows_exe_ext) do - if vim.uv.fs_stat(vim.fs.joinpath(dir, n .. "." .. ext)) ~= nil then - return n .. "." .. ext - end - end - end - end - return nil -end - ---- Walk up until found an executable in node_modules. ---- @param path string ---- @param name string ---- @return string | nil exe_path Path to the executable. -function M.find_node_modules_exe(path, name) - local bin_dirs = vim.fs.find("node_modules/.bin", { path = path, upward = true, type = "directory" }) - if #bin_dirs == 0 then return nil end - local exe = M.find_exe_file(bin_dirs[1], name) - return exe and vim.fs.joinpath(bin_dirs[1], exe) -end - ---- Find executable in PATH. ---- @param name string ---- @return string | nil -function M.find_global_exe(name) - local exe = vim.fn.exepath(name) - if exe == "" then return nil end - return exe -end - ---- @alias ExePlace "node_modules" | "global" ---- @param path string ---- @param name string ---- @param places ExePlace[] ---- @return string | nil, ExePlace? -function M.find_exe(path, name, places) - for _, place in ipairs(places) do - if place == "node_modules" then - local r = M.find_node_modules_exe(path, name) - if r then return r, "node_modules" end - end - if place == "global" then - local r = M.find_global_exe(name) - if r then return r, "global" end - end - end - return nil, nil -end - ---- @alias FindExeForBufOpts { name: string, exe: string?, places: ExePlace[], config_files: string[]?, filetypes: string[]? } ---- @alias FindExeForBufResult { name: string, file: string, exe: string, exe_path: string, place: ExePlace, config_file: string?, filetype: string? } ---- @param buf number ---- @param opts FindExeForBufOpts ---- @return FindExeForBufResult | nil -function M.find_exe_for_buf(buf, opts) - local r = {} --- @type FindExeForBufResult - r.name = opts.name - r.file = vim.api.nvim_buf_get_name(buf) - r.exe = opts.exe or opts.name - - if opts.filetypes then - r.filetype = vim.api.nvim_get_option_value("filetype", { scope = "buffer", buf = buf }) - if not vim.tbl_contains(opts.filetypes, r.filetype) then return nil end - end - - if opts.config_files then - local config_file_list = vim.fs.find(opts.config_files, { path = r.file, upward = true }) - if #config_file_list == 0 then return nil end - r.config_file = config_file_list[1] - end - - local exe_path, place = M.find_exe(r.file, r.exe, opts.places) - if exe_path == nil then return nil end - r.exe_path = exe_path - - --- @cast place ExePlace - r.place = place - - return r -end - -return M diff --git a/store/home/config/nvim/lua/crupest/utils/nvim.lua b/store/home/config/nvim/lua/crupest/utils/nvim.lua deleted file mode 100644 index 4477ecc..0000000 --- a/store/home/config/nvim/lua/crupest/utils/nvim.lua +++ /dev/null @@ -1,12 +0,0 @@ -local M = {} - -function M.close_float() - local wins = vim.api.nvim_list_wins() - for _, v in ipairs(wins) do - if vim.api.nvim_win_get_config(v).relative ~= '' then - vim.api.nvim_win_close(v, false) - end - end -end - -return M diff --git a/store/home/config/nvim/lua/plugins.lua b/store/home/config/nvim/lua/plugins.lua deleted file mode 100644 index b689559..0000000 --- a/store/home/config/nvim/lua/plugins.lua +++ /dev/null @@ -1,30 +0,0 @@ --- spellchecker: disable -return { - { "catppuccin/nvim", name = "catppuccin", priority = 1000 }, - "neovim/nvim-lspconfig", - "L3MON4D3/LuaSnip", - "hrsh7th/nvim-cmp", - "hrsh7th/cmp-nvim-lsp", - "hrsh7th/cmp-buffer", - "hrsh7th/cmp-path", - "hrsh7th/cmp-cmdline", - "saadparwaiz1/cmp_luasnip", - { - "nvim-tree/nvim-tree.lua", - lazy = false, - dependencies = { - "nvim-tree/nvim-web-devicons", - }, - }, - { - "nvim-lualine/lualine.nvim", - dependencies = { 'nvim-tree/nvim-web-devicons' } - }, - { - "nvim-telescope/telescope.nvim", - dependencies = { 'nvim-lua/plenary.nvim' } - }, - "windwp/nvim-autopairs", - "mfussenegger/nvim-lint", - "lewis6991/gitsigns.nvim", -} diff --git a/store/misc/magic/extend-script.js b/store/misc/magic/extend-script.js deleted file mode 100644 index 519cee9..0000000 --- a/store/misc/magic/extend-script.js +++ /dev/null @@ -1,21 +0,0 @@ -// Define main function (script entry) - -function main(config, profileName) { - delete config.dns; - delete config.tun; - delete config.hosts; - - delete config["cfw-latency-timeout"] - delete config["cfw-latency-url"] - delete config["cfw-conn-break-strategy"] - - config["proxies"] = [config["crupest-proxy"], ...config["proxies"]] - delete config["crupest-proxy"] - - select_proxy = { name: "node-select", type: "select", proxies: ["auto-select", ...config.proxies.map(p => p.name)] } - auto_select_proxy = config["crupest-auto-select"] - config["proxy-groups"] = [ select_proxy, auto_select_proxy ] - delete config["crupest-auto-select"] - - return config; -} diff --git a/store/misc/magic/extend.yaml b/store/misc/magic/extend.yaml deleted file mode 100644 index 3006f08..0000000 --- a/store/misc/magic/extend.yaml +++ /dev/null @@ -1,65 +0,0 @@ -# Profile Enhancement Merge Template for Clash Verge - -profile: - store-selected: true - -rules: - - "GEOSITE,github,node-select" - - "GEOSITE,google,node-select" - - "GEOSITE,youtube,node-select" - - "GEOSITE,twitter,node-select" - - "GEOSITE,facebook,node-select" - - "GEOSITE,discord,node-select" - - "GEOSITE,reddit,node-select" - - "GEOSITE,twitch,node-select" - - "GEOSITE,quora,node-select" - - "GEOSITE,telegram,node-select" - - "GEOSITE,imgur,node-select" - - "GEOSITE,stackexchange,node-select" - - "GEOSITE,onedrive,node-select" - - - "GEOSITE,duckduckgo,node-select" - - "GEOSITE,wikimedia,node-select" - - "GEOSITE,gitbook,node-select" - - "GEOSITE,gitlab,node-select" - - "GEOSITE,creativecommons,node-select" - - "GEOSITE,archive,node-select" - - "GEOSITE,matrix,node-select" - - "GEOSITE,tor,node-select" - - - "GEOSITE,python,node-select" - - "GEOSITE,ruby,node-select" - - "GEOSITE,rust,node-select" - - "GEOSITE,nodejs,node-select" - - "GEOSITE,npmjs,node-select" - - "GEOSITE,qt,node-select" - - "GEOSITE,docker,node-select" - - "GEOSITE,v2ray,node-select" - - "GEOSITE,homebrew,node-select" - - "GEOSITE,bootstrap,node-select" - - - "GEOSITE,heroku,node-select" - - "GEOSITE,vercel,node-select" - - - "GEOSITE,ieee,node-select" - - "GEOSITE,sci-hub,node-select" - - "GEOSITE,libgen,node-select" - - - "DOMAIN-SUFFIX,gnu.org,node-select" - - "DOMAIN-SUFFIX,nongnu.org,node-select" - - "DOMAIN-SUFFIX,ietf.org,node-select" - - "DOMAIN-SUFFIX,packagist.org,node-select" - - "DOMAIN-SUFFIX,metacubex.one,node-select" - - "MATCH,DIRECT" - -crupest-proxy: - ... - -crupest-auto-select: - name: "auto-select" - type: url-test - interval: 1800 - include-all-proxies: true - url: 'https://www.gstatic.com/generate_204' - filter: "日本|新加坡|香港|台湾|美国" - expected-status: 204 diff --git a/python/.gitignore b/store/works/python/.gitignore index f5833b1..f5833b1 100644 --- a/python/.gitignore +++ b/store/works/python/.gitignore diff --git a/python/.python-version b/store/works/python/.python-version index 2c07333..2c07333 100644 --- a/python/.python-version +++ b/store/works/python/.python-version diff --git a/python/cru/__init__.py b/store/works/python/cru/__init__.py index 17799a9..17799a9 100644 --- a/python/cru/__init__.py +++ b/store/works/python/cru/__init__.py diff --git a/python/cru/_base.py b/store/works/python/cru/_base.py index 2599d8f..2599d8f 100644 --- a/python/cru/_base.py +++ b/store/works/python/cru/_base.py diff --git a/python/cru/_const.py b/store/works/python/cru/_const.py index 8246b35..8246b35 100644 --- a/python/cru/_const.py +++ b/store/works/python/cru/_const.py diff --git a/python/cru/_decorator.py b/store/works/python/cru/_decorator.py index 137fc05..137fc05 100644 --- a/python/cru/_decorator.py +++ b/store/works/python/cru/_decorator.py diff --git a/python/cru/_error.py b/store/works/python/cru/_error.py index e53c787..e53c787 100644 --- a/python/cru/_error.py +++ b/store/works/python/cru/_error.py diff --git a/python/cru/_event.py b/store/works/python/cru/_event.py index 51a794c..51a794c 100644 --- a/python/cru/_event.py +++ b/store/works/python/cru/_event.py diff --git a/python/cru/_func.py b/store/works/python/cru/_func.py index fc57802..fc57802 100644 --- a/python/cru/_func.py +++ b/store/works/python/cru/_func.py diff --git a/python/cru/_helper.py b/store/works/python/cru/_helper.py index 43baf46..43baf46 100644 --- a/python/cru/_helper.py +++ b/store/works/python/cru/_helper.py diff --git a/python/cru/_iter.py b/store/works/python/cru/_iter.py index f9683ca..f9683ca 100644 --- a/python/cru/_iter.py +++ b/store/works/python/cru/_iter.py diff --git a/python/cru/_type.py b/store/works/python/cru/_type.py index 1f81da3..1f81da3 100644 --- a/python/cru/_type.py +++ b/store/works/python/cru/_type.py diff --git a/python/cru/attr.py b/store/works/python/cru/attr.py index d4cc86a..d4cc86a 100644 --- a/python/cru/attr.py +++ b/store/works/python/cru/attr.py diff --git a/python/cru/config.py b/store/works/python/cru/config.py index 0f6f0d0..0f6f0d0 100644 --- a/python/cru/config.py +++ b/store/works/python/cru/config.py diff --git a/python/cru/list.py b/store/works/python/cru/list.py index 216a561..216a561 100644 --- a/python/cru/list.py +++ b/store/works/python/cru/list.py diff --git a/python/cru/parsing.py b/store/works/python/cru/parsing.py index 0e9239d..0e9239d 100644 --- a/python/cru/parsing.py +++ b/store/works/python/cru/parsing.py diff --git a/python/cru/service/__init__.py b/store/works/python/cru/service/__init__.py index e69de29..e69de29 100644 --- a/python/cru/service/__init__.py +++ b/store/works/python/cru/service/__init__.py diff --git a/python/cru/service/__main__.py b/store/works/python/cru/service/__main__.py index 2a0268b..2a0268b 100644 --- a/python/cru/service/__main__.py +++ b/store/works/python/cru/service/__main__.py diff --git a/python/cru/service/_app.py b/store/works/python/cru/service/_app.py index b4c6271..b4c6271 100644 --- a/python/cru/service/_app.py +++ b/store/works/python/cru/service/_app.py diff --git a/python/cru/service/_base.py b/store/works/python/cru/service/_base.py index e1eee70..e1eee70 100644 --- a/python/cru/service/_base.py +++ b/store/works/python/cru/service/_base.py diff --git a/python/cru/service/_gen_cmd.py b/store/works/python/cru/service/_gen_cmd.py index f51d65f..f51d65f 100644 --- a/python/cru/service/_gen_cmd.py +++ b/store/works/python/cru/service/_gen_cmd.py diff --git a/python/cru/service/_nginx.py b/store/works/python/cru/service/_nginx.py index 87cff6d..87cff6d 100644 --- a/python/cru/service/_nginx.py +++ b/store/works/python/cru/service/_nginx.py diff --git a/python/cru/service/_template.py b/store/works/python/cru/service/_template.py index 22c1d21..22c1d21 100644 --- a/python/cru/service/_template.py +++ b/store/works/python/cru/service/_template.py diff --git a/python/cru/system.py b/store/works/python/cru/system.py index f321717..f321717 100644 --- a/python/cru/system.py +++ b/store/works/python/cru/system.py diff --git a/python/cru/template.py b/store/works/python/cru/template.py index 3a70337..3a70337 100644 --- a/python/cru/template.py +++ b/store/works/python/cru/template.py diff --git a/python/cru/tool.py b/store/works/python/cru/tool.py index 377f5d7..377f5d7 100644 --- a/python/cru/tool.py +++ b/store/works/python/cru/tool.py diff --git a/python/cru/value.py b/store/works/python/cru/value.py index 9c03219..9c03219 100644 --- a/python/cru/value.py +++ b/store/works/python/cru/value.py diff --git a/python/poetry.lock b/store/works/python/poetry.lock index 4338200..4338200 100644 --- a/python/poetry.lock +++ b/store/works/python/poetry.lock diff --git a/python/pyproject.toml b/store/works/python/pyproject.toml index 28c753e..28c753e 100644 --- a/python/pyproject.toml +++ b/store/works/python/pyproject.toml diff --git a/store/works/solutions/leetcode/rust/Cargo.toml b/store/works/solutions/leetcode/rust/Cargo.toml index a87486e..c79b5fd 100644 --- a/store/works/solutions/leetcode/rust/Cargo.toml +++ b/store/works/solutions/leetcode/rust/Cargo.toml @@ -1,7 +1,7 @@ [package]
name = "crupest-leetcode"
version = "0.1.0"
-authors = ["杨宇千 <crupest@outlook.com>"]
+authors = ["crupest <crupest@outlook.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
diff --git a/www/.markdownlint.json b/www/.markdownlint.json new file mode 100644 index 0000000..12bc65e --- /dev/null +++ b/www/.markdownlint.json @@ -0,0 +1,5 @@ +{ + "$schema": "https://raw.githubusercontent.com/DavidAnson/markdownlint/main/schema/markdownlint-config-schema.json", + "default": true, + "MD013": false +} diff --git a/www/assets/res/css/article.css b/www/assets/res/css/article.css index 9629597..090231b 100644 --- a/www/assets/res/css/article.css +++ b/www/assets/res/css/article.css @@ -2,9 +2,6 @@ font-size: 0.95em; padding-inline: 0.5em; padding-block: 0.5em; - margin-block-end: 0.5em; - border-radius: 3px; - background-color: hsl(33, 100%, 98%); & > p { font-size: 0.9em; @@ -20,8 +17,8 @@ & > .date { font-size: small; + margin-top: 0.25em; float: right; - margin-inline-end: 1.5em; color: hsl(0, 0%, 25%); } @@ -40,3 +37,8 @@ html[data-theme="dark"] { } } } + +hr.article-preview-hr { + border: none; + border-top: 1.5px dashed currentColor; +} diff --git a/www/assets/res/css/base.css b/www/assets/res/css/base.css index c6d9347..77cd804 100644 --- a/www/assets/res/css/base.css +++ b/www/assets/res/css/base.css @@ -1,7 +1,12 @@ html { width: 100%; - --fg-color: black; - --bg-color: white; + --body-fg-color: unset; + --body-bg-color: unset; + --table-border-color: black; + --toast-fg-color: white; + --toast-bg-color: black; + --code-fg-color: var(--body-fg-color); + --code-bg-color: #eff1f5; } * { @@ -11,8 +16,14 @@ html { body { width: 100%; margin: 0; - color: var(--fg-color); - background-color: var(--bg-color); + color: var(--body-fg-color); + background-color: var(--body-bg-color); +} + +/* https://developer.mozilla.org/en-US/docs/Web/HTML/Reference/Elements/Heading_Elements#specifying_a_uniform_font_size_for_h1 */ +h1 { + margin-block: 0.67em; + font-size: 2em; } .mono { @@ -31,21 +42,19 @@ div.mono-container { } table { - --border-color: black; - border-collapse: collapse; &, :is(td,th) { - padding: 0.4em; - border: 1px solid var(--border-color); + padding: 0.2em 0.4em; + border: 1px solid var(--table-border-color); } } .toast { font-size: large; font-family: monospace; - color: var(--bg-color); - background-color: var(--fg-color); + color: var(--toast-fg-color); + background-color: var(--toast-bg-color); padding: 0.5em 0.3em; border-radius: 6px; @@ -57,29 +66,16 @@ table { } .chroma { - overflow-x: scroll; + overflow-x: auto; padding-left: 1px; padding-right: 4px; - border-radius: 3px; + border-radius: 6px; } nav { font-size: large; } -#slogan { - color: white; - width: 100%; - padding: 0.5em 1em; - text-align: center; - background-color: dodgerblue; - user-select: none; - - & > * { - user-select: text; - } -} - #main-article { position: relative; left: 50%; @@ -87,6 +83,13 @@ nav { max-width: 880px; padding: 0 1em; margin-top: 1em; + + code:not(.chroma code) { + border-radius: 4px; + color: var(--code-fg-color); + background-color: var(--code-bg-color); + padding-inline: 0.4em; + } } #license a { @@ -123,8 +126,12 @@ nav { } html[data-theme="dark"] { - --fg-color: white; - --bg-color: black; + --body-fg-color: white; + --body-bg-color: black; + --table-border-color: hsl(0, 0%, 25%); + --toast-fg-color: var(--body-bg-color); + --toast-bg-color: var(--body-fg-color); + --code-bg-color: #1e1e2e; & a:link { color:#34ffd9; @@ -133,21 +140,4 @@ html[data-theme="dark"] { & a:visited { color:#abcac4; } - - & #slogan { - color: var(--fg-color); - background-color: midnightblue; - } - - & :is(.chroma,.chroma *) { - background-color: hsl(0, 0%, 1.5%); - - & .n { - color: var(--fg-color); - } - } - - table { - --border-color: hsl(0, 0%, 25%); - } } diff --git a/www/assets/res/css/home.css b/www/assets/res/css/home.css index 1db357e..60c9ac7 100644 --- a/www/assets/res/css/home.css +++ b/www/assets/res/css/home.css @@ -1,15 +1,3 @@ -#slogan { - position: sticky; - z-index: 1; - top: 0; -} - -#title-name { - font-family: monospace; - color: var(--bg-color); - background-color: var(--fg-color); -} - #avatar { float: right; } @@ -76,4 +64,4 @@ html[data-theme="dark"] { & .friend-github { filter: invert(1); } -}
\ No newline at end of file +} diff --git a/www/assets/res/css/single.css b/www/assets/res/css/single.css index 70325d1..3d8ee0f 100644 --- a/www/assets/res/css/single.css +++ b/www/assets/res/css/single.css @@ -10,11 +10,3 @@ margin-inline-end: auto; } } - -.end-hr { - border: none; - width: 8em; - margin-block-start: 1.5em; - margin-inline-start: 0; - border-top: 1px dashed black; -} diff --git a/www/assets/res/css/todos.css b/www/assets/res/css/todos.css index e9a595d..f9aa23b 100644 --- a/www/assets/res/css/todos.css +++ b/www/assets/res/css/todos.css @@ -1,14 +1,17 @@ -.todo { - +h3.todo { &::before { - font-family: monospace; + font-size: small; } &.working::before { - content: "* "; + content: "(working) "; } &.done::before { - content: "✓ "; + content: "(done) "; + } + + &.give-up::before { + content: "(give up) "; } -}
\ No newline at end of file +} diff --git a/www/assets/res/js/color-scheme.ts b/www/assets/res/js/color-scheme.ts index db6a3aa..fb4accf 100644 --- a/www/assets/res/js/color-scheme.ts +++ b/www/assets/res/js/color-scheme.ts @@ -90,21 +90,3 @@ function next(): Scheme | null { } } } - -window.addEventListener("load", () => { - const slogon = document.getElementById("slogan")! - let clicks: number = 0 - - const reset = createResetTimer(() => { - clicks = 0 - }) - - slogon.addEventListener("click", () => { - reset() - clicks += 1 - if (clicks === 3) { - saveScheme(next()) - clicks = 0 - } - }) -}) diff --git a/www/config/_default/hugo.yaml b/www/config/_default/hugo.yaml index 2479f6e..289b0b4 100644 --- a/www/config/_default/hugo.yaml +++ b/www/config/_default/hugo.yaml @@ -18,6 +18,10 @@ frontmatter: markup: goldmark: + parser: + attribute: + block: true + title: true extensions: table: true highlight: @@ -32,4 +36,3 @@ params: minify: false sourceMap: "" - timeline: "https://timeline.crupest.life"
\ No newline at end of file diff --git a/www/content/hurd/_index.md b/www/content/hurd/_index.md deleted file mode 100644 index b4c727c..0000000 --- a/www/content/hurd/_index.md +++ /dev/null @@ -1,182 +0,0 @@ ---- -title: "Hurd" -date: 2025-03-03T15:34:41+08:00 -lastmod: 2025-03-03T23:28:46+08:00 -layout: single ---- - -{{< mono >}} - -[TODOS](/hurd/todos) - -{{< /mono >}} - -## links - -{{< mono >}} - -| name | link | -| --- | --- | -| kernel-list-archive | <https://lists.gnu.org/archive/html/bug-hurd/> | -| debian-list-archive | <https://lists.debian.org/debian-hurd/> | -| irc-archive | <https://logs.guix.gnu.org/hurd/> | -| kernel-home | <https://www.gnu.org/software/hurd/index.html> | -| debian-home | <https://www.debian.org/ports/hurd/> | - -{{< /mono >}} - -refs: - -{{< mono >}} - -| name | link | -| --- | --- | -| c | <https://en.cppreference.com/w/c> | -| posix latest | <https://pubs.opengroup.org/onlinepubs/9799919799/> | -| posix 2013 | <https://pubs.opengroup.org/onlinepubs/9699919799.2013edition/> | -| posix 2008 | <https://pubs.opengroup.org/onlinepubs/9699919799.2008edition/> | -| glibc | <https://sourceware.org/glibc/manual/2.41/html_mono/libc.html> | - -{{< /mono >}} - -## *_MAX patch - -TODO: Move to separate page. - -```c -#include <errno.h> -#include <stdlib.h> -#include <unistd.h> - -static inline char *xreadlink(const char *restrict path) { - char *buffer; - size_t allocated = 128; - ssize_t len; - - while (1) { - buffer = (char *)malloc(allocated); - if (!buffer) { - return NULL; - } - len = readlink(path, buffer, allocated); - if (len < (ssize_t)allocated) { - return buffer; - } - free(buffer); - if (len >= (ssize_t)allocated) { - allocated *= 2; - continue; - } - return NULL; - } -} - -static inline char *xgethostname() { - long max_host_name; - char *buffer; - - max_host_name = sysconf(_SC_HOST_NAME_MAX); - buffer = malloc(max_host_name + 1); - - if (gethostname(buffer, max_host_name + 1)) { - free(buffer); - return NULL; - } - - buffer[max_host_name] = '\0'; - return buffer; -} - -static inline char *xgetcwd() { - char *buffer; - size_t allocated = 128; - - while (1) { - buffer = (char *)malloc(allocated); - if (!buffer) { - return NULL; - } - getcwd(buffer, allocated); - if (buffer) - return buffer; - free(buffer); - if (errno == ERANGE) { - allocated *= 2; - continue; - } - return NULL; - } -} -``` - -## git repos - -{{< link-group >}} -hurd -cru: <https://crupest.life/git/cru-hurd/hurd.git> -upstream: <https://git.savannah.gnu.org/git/hurd/hurd.git> -debian: <https://salsa.debian.org/hurd-team/hurd> -{{< /link-group >}} - -{{< link-group >}} -gnumach -cru: <https://crupest.life/git/cru-hurd/gnumach.git> -upstream: <https://git.savannah.gnu.org/git/hurd/gnumach.git> -debian: <https://salsa.debian.org/hurd-team/gnumach> -{{< /link-group >}} - -{{< link-group >}} -mig -cru: <https://crupest.life/git/cru-hurd/mig.git> -upstream: <https://git.savannah.gnu.org/git/hurd/mig.git> -debian: <https://salsa.debian.org/hurd-team/mig> -{{< /link-group >}} - -{{< link-group >}} -glibc -cru: <https://crupest.life/git/cru-hurd/glibc.git> -upstream: <git://sourceware.org/git/glibc.git> -debian: <https://salsa.debian.org/glibc-team/glibc> -mirror: <https://mirrors.tuna.tsinghua.edu.cn/git/glibc.git> -{{< /link-group >}} - -{{< link-group >}} -web -cru: <https://crupest.life/git/cru-hurd/web.git> -upstream: <https://git.savannah.gnu.org/git/hurd/web.git> -{{< /link-group >}} - -## cheatsheet - -Start qemu - -```sh -qemu-system-x86_64 -enable-kvm -m 4G -net nic -net user,hostfwd=tcp::3222-:22 -vga vmware -drive cache=writeback,file=[...] -``` - -Configure/Setup network - -```sh -settrans -fgap /servers/socket/2 /hurd/pfinet -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 -fsysopts /servers/socket/2 /hurd/pfinet -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 -fsysopts /server/socket/2 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 -``` - -Setup apt - -```sh -apt-get --allow-unauthenticated --allow-insecure-repositories update -apt-get --allow-unauthenticated upgrade -``` - -## mailing lists / irc - -{{< mono >}} - -| name | address | -| --- | --- | -| hurd | <bug-hurd@gnu.org> | -| debian | <debian-hurd@lists.debian.org> | -| irc | librechat #hurd | - -{{< /mono >}} diff --git a/www/content/notes/_index.md b/www/content/notes/_index.md new file mode 100644 index 0000000..3f96f73 --- /dev/null +++ b/www/content/notes/_index.md @@ -0,0 +1,16 @@ +--- +title: "Notes" +date: 2025-06-14T21:24:00+08:00 +lastmod: 2025-06-14T21:24:00+08:00 +layout: single +--- + +- [Cheat Sheet](/notes/cheat-sheet) + +- [Hurd](/notes/hurd) + + - [Cheat Sheet](/notes/hurd/cheat-sheet) + + - [Todos](/notes/hurd/todos) + + - [Useful Links](/notes/hurd/links) diff --git a/www/content/notes/cheat-sheet.md b/www/content/notes/cheat-sheet.md new file mode 100644 index 0000000..aba8e18 --- /dev/null +++ b/www/content/notes/cheat-sheet.md @@ -0,0 +1,132 @@ +--- +title: "Cheat Sheet" +date: 2025-04-01T23:09:53+08:00 +lastmod: 2025-06-12T01:09:39+08:00 +--- + +goto: [Hurd Cheat Sheet (in a separated page)](/notes/hurd/cheat-sheet) +{class="mono"} + +## GRUB + +Update GRUB after `grub` package is updated. Replace `/boot` with your mount +point of the EFI partition in `--efi-directory=/boot`. Replace `GRUB` with your +bootloader id in `--bootloader-id=GRUB`. + +```sh +grub-install --target=x86_64-efi --efi-directory=/boot --bootloader-id=GRUB +grub-mkconfig -o /boot/grub/grub.cfg +``` + +## (Private) My Service Infrastructure Management + +All commands should be run at the project root path. + +### Install Deno + +Script from <https://docs.deno.com/runtime/getting_started/installation/> + +```sh +curl -fsSL https://deno.land/install.sh | sh +``` + +### Add Git Server User / Set Password + +```sh +docker run -it --rm -v "./data/git/user-info:/user-info" httpd htpasswd /user-info [username] +``` + +### Certbot + +A complete command is `[prefix] [docker (based on challenge kind)] [command] [challenge] [domains] [test] [misc]` + +| part | for | segment | +| :-: | :-: | --- | +| prefix | * | `docker run -it --rm --name certbot -v "./data/certbot/certs:/etc/letsencrypt" -v "./data/certbot/data:/var/lib/letsencrypt" certbot/certbot` | +| docker | challenge standalone | `-p "0.0.0.0:80:80"` | +| docker | challenge nginx | `-v "./data/certbot/webroot:/var/www/certbot"` | +| command | create/expand/shrink | `certonly` | +| command | renew | `renew` | +| challenge | standalone | `--standalone` | +| challenge | nginx | `--webroot -w /var/www/certbot` | +| domains | * | `[-d [domain]]...` | +| test | * | `--test-cert --dry-run` | +| misc | agree tos | `--agree-tos` | +| misc | cert name | `--cert-name [name]` | +| misc | email | `--email [email]` | + +For example, **test** create/expand/shrink with standalone server: + +```sh +docker run -it --rm --name certbot \ + -v "./data/certbot/certs:/etc/letsencrypt" \ + -v "./data/certbot/data:/var/lib/letsencrypt" \ + -p "0.0.0.0:80:80" \ + certbot/certbot \ + certonly \ + --standalone \ + --cert-name crupest.life \ + -d crupest.life -d mail.crupest.life -d timeline.crupest.life \ + --test-cert --dry-run +``` + +## System Setup + +### Debian setup + +#### Setup SSL Certificates and Curl + +```sh +apt-get update +apt-get install ca-certificates curl +install -m 0755 -d /etc/apt/keyrings +``` + +### Docker Setup + +#### Uninstall Packages Provided by Stock Repo + +```bash +for pkg in docker.io docker-doc docker-compose \ + podman-docker containerd runc; do + apt-get remove $pkg; +done +``` + +#### Install Certs From Docker + +Remember to [setup ssl and curl](#setup-ssl-certificates-and-curl) first. + +```sh +curl -fsSL https://download.docker.com/linux/debian/gpg \ + -o /etc/apt/keyrings/docker.asc +chmod a+r /etc/apt/keyrings/docker.asc +``` + +#### Add Docker Repos + +```bash +echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] \ + https://download.docker.com/linux/debian \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + tee /etc/apt/sources.list.d/docker.list > /dev/null +``` + +#### Install Docker Packages + +```sh +apt-get update +apt-get install docker-ce docker-ce-cli containerd.io \ + docker-buildx-plugin docker-compose-plugin +``` + +#### Start And Enable Docker + +Remember to log out and log back to let user group change take effects. + +```sh +systemctl enable docker +systemctl start docker +groupadd -f docker +usermod -aG docker $USER +``` diff --git a/www/content/notes/hurd/_index.md b/www/content/notes/hurd/_index.md new file mode 100644 index 0000000..8faf70b --- /dev/null +++ b/www/content/notes/hurd/_index.md @@ -0,0 +1,15 @@ +--- +title: "Hurd" +date: 2025-03-03T15:34:41+08:00 +lastmod: 2025-06-12T01:09:39+08:00 +layout: single +--- + +This is the gateway page for various notes about +[GNU/Hurd](https://www.gnu.org/software/hurd/) written by me. + +- [Cheat Sheet](/notes/hurd/cheat-sheet) + +- [Todos](/notes/hurd/todos) + +- [Useful Links](/notes/hurd/links) diff --git a/www/content/notes/hurd/cheat-sheet.md b/www/content/notes/hurd/cheat-sheet.md new file mode 100644 index 0000000..6fe5ccd --- /dev/null +++ b/www/content/notes/hurd/cheat-sheet.md @@ -0,0 +1,68 @@ +--- +title: "Hurd Cheat Sheet" +date: 2025-06-12T00:59:16+08:00 +lastmod: 2025-06-14T20:34:06+08:00 +--- + +## Mirrors + +The mirror has to be `debian-ports`, not `debian`, and many mirror sites do not +provide it. Following is aliyun mirror: + +```txt +/etc/apt/sources.list +--- +deb https://mirrors.aliyun.com/debian-ports/ unstable main +deb https://mirrors.aliyun.com/debian-ports/ unreleased main +deb-src https://mirrors.aliyun.com/debian/ unstable main +``` + +The hurd-amd64 deb-src seems to not work. + +## Use QEMU Virtual Machine + +For i386, use + +```sh +qemu-system-x86_64 -enable-kvm -m 4G \ + -net nic -net user,hostfwd=tcp::3222-:22 \ + -vga vmware -drive cache=writeback,file=[...] +``` + +For x86_64, use + +```sh +qemu-system-x86_64 -enable-kvm -m 8G -machine q35 \ + -net nic -net user,hostfwd=tcp::3223-:22 \ + -vga vmware -drive cache=writeback,file=[...] +``` + +GRUB in the image seems to use hard-coded path of `/dev/*` block file as the +root partition in the kernel command line rather than GUID, so if the hard disk +bus is changed in QEMU and the path is changed accordingly, the system can't +boot on. + +QEMU cli arguments `-machine q35` enables AHCI and SATA, and is **required for +official x86_64 image to boot**. As for i386, I haven't checked now. + +There is [a Deno script](https://github.com/crupest/crupest/blob/dev/deno/tools/manage-vm.ts) +written by me to help define and build QEMU cli arguments of VMs. + +## Inside Hurd + +Configure/Setup network + +```sh +settrans -fgap /servers/socket/2 /hurd/pfinet \ + -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 +fsysopts /servers/socket/2 /hurd/pfinet \ + -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 +fsysopts /server/socket/2 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 +``` + +Setup apt + +```sh +apt-get --allow-unauthenticated --allow-insecure-repositories update +apt-get --allow-unauthenticated upgrade +``` diff --git a/www/content/notes/hurd/links.md b/www/content/notes/hurd/links.md new file mode 100644 index 0000000..1e966d4 --- /dev/null +++ b/www/content/notes/hurd/links.md @@ -0,0 +1,88 @@ +--- +title: "Hurd Useful Links" +date: 2025-06-14T20:34:06+08:00 +lastmod: 2025-06-14T20:34:06+08:00 +--- + +## links + +| name | link | +| --- | --- | +| kernel-list-archive | <https://lists.gnu.org/archive/html/bug-hurd/> | +| debian-list-archive | <https://lists.debian.org/debian-hurd/> | +| irc-archive | <https://logs.guix.gnu.org/hurd/> | +| kernel-home | <https://www.gnu.org/software/hurd/index.html> | +| debian-home | <https://www.debian.org/ports/hurd/> | + +refs: + +| name | link | +| --- | --- | +| c | <https://en.cppreference.com/w/c> | +| posix latest | <https://pubs.opengroup.org/onlinepubs/9799919799/> | +| posix 2013 | <https://pubs.opengroup.org/onlinepubs/9699919799.2013edition/> | +| posix 2008 | <https://pubs.opengroup.org/onlinepubs/9699919799.2008edition/> | +| glibc | <https://sourceware.org/glibc/manual/2.41/html_mono/libc.html> | + +## mailing lists / irc + +| name | address | +| --- | --- | +| hurd | <bug-hurd@gnu.org> | +| debian | <debian-hurd@lists.debian.org> | +| irc | librechat #hurd | + +## *_MAX patch + +See [this](posts/c-func-ext.md) + +## git repos + +Clone all at once: + +```sh +# glibc is too big, so not clone here. +for repo in hurd gnumach mig web; do + if [ ! -d $repo ]; then + git clone "https://crupest.life/git/hurd/$repo.git" + pushd $repo + git remote add upstream "https://git.savannah.gnu.org/git/hurd/$repo.git" + popd + fi +done +``` + +{{< link-group >}} +hurd +cru: <https://crupest.life/git/hurd/hurd.git> +upstream: <https://git.savannah.gnu.org/git/hurd/hurd.git> +debian: <https://salsa.debian.org/hurd-team/hurd> +{{< /link-group >}} + +{{< link-group >}} +gnumach +cru: <https://crupest.life/git/hurd/gnumach.git> +upstream: <https://git.savannah.gnu.org/git/hurd/gnumach.git> +debian: <https://salsa.debian.org/hurd-team/gnumach> +{{< /link-group >}} + +{{< link-group >}} +mig +cru: <https://crupest.life/git/hurd/mig.git> +upstream: <https://git.savannah.gnu.org/git/hurd/mig.git> +debian: <https://salsa.debian.org/hurd-team/mig> +{{< /link-group >}} + +{{< link-group >}} +glibc +cru: <https://crupest.life/git/hurd/glibc.git> +upstream: <git://sourceware.org/git/glibc.git> +debian: <https://salsa.debian.org/glibc-team/glibc> +mirror: <https://mirrors.tuna.tsinghua.edu.cn/git/glibc.git> +{{< /link-group >}} + +{{< link-group >}} +web +cru: <https://crupest.life/git/hurd/web.git> +upstream: <https://git.savannah.gnu.org/git/hurd/web.git> +{{< /link-group >}} diff --git a/www/content/hurd/todos.md b/www/content/notes/hurd/todos.md index d95bfb9..2dbded3 100644 --- a/www/content/hurd/todos.md +++ b/www/content/notes/hurd/todos.md @@ -1,7 +1,7 @@ --- title: "Hurd Todos" date: 2025-03-03T21:22:35+08:00 -lastmod: 2025-03-03T23:28:46+08:00 +lastmod: 2025-06-14T20:34:06+08:00 params: css: - todos @@ -9,7 +9,11 @@ params: ## Porting -### {{< todo name=pam state=working >}} +### hurd-fs4 {class="todo working"} + +<https://salsa.debian.org/rust-team/debcargo-conf/-/merge_requests/872> + +### pam {class="todo give-up"} {{< link-group >}} git @@ -23,7 +27,7 @@ mail <https://lists.debian.org/debian-hurd/2025/02/msg00018.html> {{< /link-group >}} -### {{< todo name=abseil state=working >}} +### abseil {class="todo working"} {{< link-group >}} git @@ -34,11 +38,11 @@ debian: <https://salsa.debian.org/debian/abseil> {{< link-group >}} mail -<https://lists.debian.org/debian-hurd/2025/02/msg00011.html>\ +<https://lists.debian.org/debian-hurd/2025/02/msg00011.html> <https://lists.debian.org/debian-hurd/2025/02/msg00035.html> {{< /link-group >}} -### {{< todo name=libgav1 state=done >}} +### libgav1 {class="todo done"} {{< link-group >}} git diff --git a/www/content/posts/_index.html b/www/content/posts/_index.md index 76fa783..76fa783 100644 --- a/www/content/posts/_index.html +++ b/www/content/posts/_index.md diff --git a/www/content/posts/c-func-ext.md b/www/content/posts/c-func-ext.md new file mode 100644 index 0000000..1f5f822 --- /dev/null +++ b/www/content/posts/c-func-ext.md @@ -0,0 +1,101 @@ +--- +title: "Libc/POSIX Function \"Extensions\"" +date: 2025-03-04T13:40:33+08:00 +lastmod: 2025-03-04T13:40:33+08:00 +categories: coding +tags: + - c + - posix +--- + +(I've given up on this, at least for linux pam.) + +Recently, I’ve been working on porting some libraries to GNU/Hurd. Many (old) +libraries use [`*_MAX` constants on POSIX system +interfaces](https://pubs.opengroup.org/onlinepubs/9699919799.2008edition/nframe.html) +to calculate buffer sizes. However, the GNU/Hurd maintainers urge against the +blind use of them and refuse to define them in system headers. When old APIs are +gone, compatibility problems come. To make my life easier, I'll put some +reusable code snippets here to help *fix `*_MAX` bugs*. + +<!--more--> + +```c +#include <stdlib.h> +#include <stdarg.h> +#include <unistd.h> +#include <stdio.h> +#include <errno.h> + +static inline char *xreadlink(const char *restrict path) { + char *buffer; + size_t allocated = 128; + ssize_t len; + + while (1) { + buffer = (char*) malloc(allocated); + if (!buffer) { return NULL; } + len = readlink(path, buffer, allocated); + if (len < (ssize_t) allocated) { return buffer; } + free(buffer); + if (len >= (ssize_t) allocated) { allocated *= 2; continue; } + return NULL; + } + } + + +static inline char *xgethostname(void) { + long max_host_name; + char *buffer; + + max_host_name = sysconf(_SC_HOST_NAME_MAX); + buffer = malloc(max_host_name + 1); + + if (gethostname(buffer, max_host_name + 1)) { + free(buffer); + return NULL; + } + + buffer[max_host_name] = '\0'; + return buffer; +} + +static inline char *xgetcwd(void) { + char *buffer; + size_t allocated = 128; + + while (1) { + buffer = (char*) malloc(allocated); + if (!buffer) { return NULL; } + getcwd(buffer, allocated); + if (buffer) return buffer; + free(buffer); + if (errno == ERANGE) { allocated *= 2; continue; } + return NULL; + } +} + +static inline __attribute__((__format__(__printf__, 2, 3))) int +xsprintf(char **buf_ptr, const char *restrict format, ...) { + char *buffer; + int ret; + + va_list args; + va_start(args, format); + + ret = snprintf(NULL, 0, format, args); + if (ret < 0) { goto out; } + + buffer = malloc(ret + 1); + if (!buffer) { ret = -1; goto out; } + + ret = snprintf(NULL, 0, format, args); + if (ret < 0) { free(buffer); goto out; } + + *buf_ptr = buffer; + +out: + va_end(args); + return ret; +} +``` diff --git a/www/content/posts/nspawn.md b/www/content/posts/nspawn.md new file mode 100644 index 0000000..c6add7e --- /dev/null +++ b/www/content/posts/nspawn.md @@ -0,0 +1,206 @@ +--- +title: "Use systemd-nspawn to Create a Development Sandbox" +date: 2025-03-04T23:22:23+08:00 +lastmod: 2025-03-27T17:46:24+08:00 +--- + +*systemd-nspawn* is a great tool for creating development sandboxes. Compared to +other similar technologies, it's lightweight, flexible, and easy to use. In this +blog, I'll present a simple guide to using it. + +<!--more--> + +## Advantages + +I've been using traditional VMs and Docker for creating development +environments. While both work fine, regardless of the performance, they suffer +from being overly isolated. Two big headaches for me are host network sharing in +traditional VMs and the immutability of Docker container ports and mounts. + +*systemd-nspawn* is much more flexible. Every feature can be configured +granularly and dynamically. For example, filesystem sharing can be configured to +work like bind mounts, and network isolation can be disabled entirely, which +exactly solves the two headaches mentioned above. Additionally, being part of +*systemd*, it has the same excellent design as other *systemd* components. + +Debian has a similar powerful tool called *schroot*. It is the official tool for +automatic package building. Unfortunately, it seems to be a tool specific to +Debian. + +## Usage + +*systemd-nspawn* consists of two parts that work together to achieve its VM +functionality: + +1. The program `systemd-nspawn`, which runs other programs in an isolated + environment with user-specified settings. Each running VM is essentially a + group of processes launched via `systemd-nspawn`. +2. Components for defining and managing VMs, possibly utilizing + `systemd-nspawn`. + +*systemd-nspawn* has a user interface similar to *systemd service*: + +- `[name].service` => `[name].nspawn`: Define VMs. + - Should be placed in `/etc/systemd/nspawn/`, where `machinectl` scans for VM + definitions. + - `[name]` serves as the VM's name. Use it to specify the VM when calling + `machinectl`. Note: You'd better use a valid hostname (avoid illegal + characters like `.`) to prevent weird errors. + - The options available roughly mirror `systemd-nspawn`'s CLI arguments, with + some adjustments to better fit VM semantics. + - Isolation-related options are usually prefixed with `Private` (e.g., + `PrivateUsers=`). +- `systemctl` => `machinectl`: Manage VMs. + - `enable`/`disable`: Set whether the VM starts automatically at system boot. + - `start`/`poweroff`/`reboot`/`terminate`/`kill`: Control the VM's running + state. + - `login`/`shell`: Do things inside the VM. + +I'll demonstrate how to create a Debian-based VM on Arch Linux as an example. +You should adjust the commands based on your own situation. + +### Create Root Filesystem + +The root filesystem of a distribution can be created using a special tool from +its package manager. For Debian-based distributions, it's `debootstrap`. If your +OS uses a different package manager ecosystem, the target distribution's one and +its keyrings (which might reside somewhere else) have to be installed first. + +```bash-session +# pacman -S debootstrap debian-archive-keyring ubuntu-keyring +``` + +Regular directories work perfectly as root filesystems, but other directory-like +things should work, too, such as `btrfs` subvolume. + +```bash-session +# btrfs subvolume create /var/lib/machines/[name] +``` + +Now, run `debootstrap` to create a minimal filesystem. Update the command with +the target distribution's codename and one of its mirrors you select. + +```bash-session +# debootstrap --include=dbus,libpam-systemd [codename] \ + /var/lib/machines/[name] [mirror] +``` + +At this point, the filesystem contains only the distribution's essential +packages, much like a base Docker image (e.g., `debian`), so you can customize +it in a similar way. + +### Configure and Customize + +I'll present my personal configuration here as a reference. You can create a new +one based on it or from scratch. + +1. Disable user isolation: `[Exec] PrivateUsers=no` +2. Disable network isolation: `[Network] Private=no` +3. Create a user with the same username, group name, UID and GIDs: should be + done inside the VM. +4. Only bind a subdirectory under *home*: `[Files] Bind=/home/[user]/[subdir]` +5. Set the hostname: `[Exec] Hostname=[hostname]` + +I disable user isolation because it's implemented using the kernel's user +namespace, which adds many inconveniences due to UID/GID mapping. + +So, the final `.nspawn` file is like: + +```systemd +/etc/systemd/nspawn/[name].nspawn +--- +[Exec] +PrivateUsers=no +Hostname=[hostname] + +[Files] +Bind=/home/[user]/[subdir] + +[Network] +Private=no +``` + +If `machinectl` can already start the VM, you can log in to customize it +further. Otherwise, you can use `systemd-nspawn` directly to enter the VM and +run commands inside it. `--resolv-conf=bind-host` binds the host's +`/etc/resolv.conf` file to make the network work. + +```bash-session +# systemd-nspawn --resolv-conf=bind-host -D /var/lib/machines/[name] +``` + +Now, inside the VM, you can do whatever you like. In my configuration, a correct +user must be created manually. + +```bash-session +# apt install locales sudo nano vim less man bash-completion curl wget \ + build-essential git +# dpkg-reconfigure locales + +# useradd -m -G sudo -s /usr/bin/bash [user] +# passwd [user] +``` + +Some setup may need to be done manually, especially those usually handled by the +distribution's installer. + +1. Update `/etc/hostname` with the VM's real hostname. +2. Update `/etc/hosts`. + +```plain +/etc/hosts +--- +127.0.0.1 localhost [hostname] +::1 localhost ip6-localhost ip6-loopback +ff02::1 ip6-allnodes +ff02::2 ip6-allrouters +``` + +**Ubuntu 20.04 specific:** Due to [a bug in +systemd](https://github.com/systemd/systemd/issues/22234), the backport source +has to be added. + +```plain +/etc/apt/sources.list +--- +deb https://mirrors.ustc.edu.cn/ubuntu focal main restricted universe multiverse +deb https://mirrors.ustc.edu.cn/ubuntu/ focal-updates main restricted universe multiverse +deb https://mirrors.ustc.edu.cn/ubuntu/ focal-backports main restricted universe multiverse +deb https://mirrors.ustc.edu.cn/ubuntu/ focal-security main restricted universe multiverse +``` + +### Use + +The following command starts a new shell session for the specified user inside +the VM, where you can run commands and perform tasks. + +```bash-session +# machinectl shell [user]@[name] +``` + +Another way is to use `login` command to enter the *login console*. From there, +you can log in as a user to start a shell session. + +```bash-session +# machinectl login [name] +``` + +To exit a VM session (especially in the *login console*), press `CTRL+]` three +times quickly in a row. + +### Snapshot + +The easiest way to backup/snapshot a VM is to create an archive of the VM's +filesystem. You can use any archive tool you prefer, such as the simple `tar`. +If the VM's filesystem is a `btrfs` subvolume, native `btrfs` snapshots can be +used here. Before creating a snapshot, you should power off the VM to avoid +archiving runtime files. + +```bash-session +# machinectl poweroff [name] +# btrfs subvolume snapshot /var/lib/machines/[name] \ + /var/lib/machines/btrfs-snapshots/[name]/[snapshot-name] +``` + +`machinectl` also provides an *image* feature similar to Docker, though I've +never tried it. Feel free to explore it if you're interested! diff --git a/www/content/posts/pattern-in-cpp.md b/www/content/posts/pattern-in-cpp.md deleted file mode 100644 index be921fd..0000000 --- a/www/content/posts/pattern-in-cpp.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: "Pattern in C++" -date: 2022-01-24T15:57:34+08:00 -categories: Coding -tags: - - C++ -description: The secret ingredient of secret ingredient soup is ... ---- - -> The secret ingredient of secret ingredient soup is ... -> Nothing! - -This is a script from Kung Fu Panda, which is one of my favorite film. - -People have been struggling for days to find a pattern in which they can code better. Especially those who use Java. They even wrote a book called "Programming Pattern" (or another name since I might forget the name) to show the common patterns. - -<!--more--> - -I don't mean patterns are bad. However there exists many people who strive to the patterns, stick to the patterns or even get stubborn to patterns. Maybe there exists a way to achieve their goals with some not that patterned code but they don't want to use them at all just because the code is not patterned. - -I love C++. And the pattern of C++ is exactly *NO PATTERN*. You write code in the way that they do the task most effectively and neatly. You don't need to apply a pattern to achieve your goal. You just write your code naturally with the way you are most comfortable with. - -Tools including programming language serve for people but not vice versa. Don't let tools constrain your hands. And when you find tools not convenient, just fix them or make a new one. Just like C++ is evolving all the time. So are all languages. - -The best is code is those that express meanings most clearly and achieve goals most effectively and do nothing with patterns. diff --git a/www/content/posts/use-paddleocr.md b/www/content/posts/use-paddleocr.md index f4eae32..806df41 100644 --- a/www/content/posts/use-paddleocr.md +++ b/www/content/posts/use-paddleocr.md @@ -2,10 +2,10 @@ title: "Use PaddleOCR" date: 2022-11-30T13:25:36+08:00 description: Simple steps to use PaddleOCR. -categories: Coding +categories: coding tags: - AI - - Python + - python - OCR --- diff --git a/www/content/todos.md b/www/content/todos.md deleted file mode 100644 index a790e24..0000000 --- a/www/content/todos.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "Todos" -date: 2025-03-03T15:34:53+08:00 -lastmod: 2025-03-03T23:28:46+08:00 ---- - -[Hurd](/hurd/todos) diff --git a/www/layouts/partials/css-res.html b/www/layouts/_partials/css-res.html index 6fabf67..6fabf67 100644 --- a/www/layouts/partials/css-res.html +++ b/www/layouts/_partials/css-res.html diff --git a/www/layouts/partials/css.html b/www/layouts/_partials/css.html index 12d3353..12d3353 100644 --- a/www/layouts/partials/css.html +++ b/www/layouts/_partials/css.html diff --git a/www/layouts/partials/date.html b/www/layouts/_partials/date.html index 9769e4e..9769e4e 100644 --- a/www/layouts/partials/date.html +++ b/www/layouts/_partials/date.html diff --git a/www/layouts/partials/highlight.html b/www/layouts/_partials/highlight.html index 28c510e..28c510e 100644 --- a/www/layouts/partials/highlight.html +++ b/www/layouts/_partials/highlight.html diff --git a/www/layouts/partials/js.html b/www/layouts/_partials/js.html index 16dafa4..16dafa4 100644 --- a/www/layouts/partials/js.html +++ b/www/layouts/_partials/js.html diff --git a/www/layouts/partials/nav.html b/www/layouts/_partials/nav.html index 42c9ad1..42c9ad1 100644 --- a/www/layouts/partials/nav.html +++ b/www/layouts/_partials/nav.html diff --git a/www/layouts/partials/preview/article.html b/www/layouts/_partials/preview/article.html index 6245434..6245434 100644 --- a/www/layouts/partials/preview/article.html +++ b/www/layouts/_partials/preview/article.html diff --git a/www/layouts/partials/preview/post.html b/www/layouts/_partials/preview/post.html index f0c6fb5..f0c6fb5 100644 --- a/www/layouts/partials/preview/post.html +++ b/www/layouts/_partials/preview/post.html diff --git a/www/layouts/_partials/preview/posts.html b/www/layouts/_partials/preview/posts.html new file mode 100644 index 0000000..291c4b2 --- /dev/null +++ b/www/layouts/_partials/preview/posts.html @@ -0,0 +1,13 @@ +{{ $h := .h }} +{{ $is_first := true}} +{{ range .pages }} + {{ if $is_first }} + {{ $is_first = false }} + {{ else }} + <hr class="article-preview-hr"> + {{ end }} + {{ partial "preview/post.html" (dict + "h" $h + "page" .) + }} +{{ end }}
\ No newline at end of file diff --git a/www/layouts/shortcodes/link-group.html b/www/layouts/_shortcodes/link-group.html index b16c2bc..b16c2bc 100644 --- a/www/layouts/shortcodes/link-group.html +++ b/www/layouts/_shortcodes/link-group.html diff --git a/www/layouts/_default/baseof.html b/www/layouts/baseof.html index 621aea1..b9cd715 100644 --- a/www/layouts/_default/baseof.html +++ b/www/layouts/baseof.html @@ -19,9 +19,6 @@ {{ end }} </head> <body> - <div id="slogan"> - <span>🙃The world is full of pain, but we can fix it with love!</span> - </div> {{ block "pre-article" .}} {{ end }} <article id="main-article"> @@ -29,6 +26,8 @@ {{ end }} <hr/> <footer class="mono-link"> + {{ block "footer" . }} + {{ end }} <p id="license"> <small>This work is licensed under <a rel="license noopener noreferrer" diff --git a/www/layouts/index.html b/www/layouts/home.html index 7b33a6a..38f7604 100644 --- a/www/layouts/index.html +++ b/www/layouts/home.html @@ -22,30 +22,30 @@ {{ define "content"}} <img id="avatar" src="/avatar.png" alt="My avatar" width="80" height="80" /> - <h1 id="title">Hello! This is <span id="title-name">crupest</span> !</h1> + <h1 id="title">Hello! This is <code>crupest</code> !</h1> <hr /> <section> <p>Welcome to my home page! Nice to meet you here! 🥰</p> - <p>If you have something interesting to share with me, feel free to email me at - <a rel="noopener noreferrer" href="mailto:crupest@crupest.life">crupest@crupest.life</a>. - You can also create an issue in any of my repos on GitHub to talk anything to me. - </p> + <p>Feel free to contact me via my email address <a href="mailto:crupest@crupest.life">crupest@crupest.life</a>, + or just create an issue in any of my <a rel="noopener noreferrer" href="https://github.com/crupest">GitHub</a> + repos. I love talking with people a lot.</p> <div id="links" class="mono-link"> - links: + goto: <ul> - <li><a href="{{ .RelPermalink }}">home</a></li> <li><a href="{{ absURL "/git/" }}">git</a></li> - {{ with .GetPage "/hurd" }} + {{ with .GetPage "/notes" }} + <li><a href="{{ .RelPermalink }}">notes</a></li> + {{ end }} + {{ with .GetPage "/notes/hurd" }} <li><a href="{{ .RelPermalink }}">hurd</a></li> {{ end }} - {{ with .GetPage "/todos" }} - <li><a href="{{ .RelPermalink }}">todos</a></li> + {{ with .GetPage "/notes/cheat-sheet" }} + <li><a href="{{ .RelPermalink }}">cheat-sheet</a></li> {{ end }} - <li><a rel="noopener noreferrer" href="https://github.com/crupest">github</a></li> - </ul> + </ul> </div> </section> - <hr> + <hr/> {{ with .GetPage "/posts" }} <section id="recent-posts"> <h2>Recent Posts <a class="mono-link" href="{{ .RelPermalink }}">(all)</a></h2> @@ -55,7 +55,7 @@ }} </section> {{ end }} - <hr> + <hr/> <section> <h2 id="friends">My Friends <small>(more links are being collected ...)</small></h2> <div id="friends-container"> @@ -75,18 +75,4 @@ {{ end }} </div> </section> - <hr> - <section> - <h2>Always Remember</h2> - <figure class="citation"> - <blockquote> - <p>Die Philosophen haben die Welt nur verschieden interpretiert, es kömmt aber darauf an, sie zu verändern.</p> - <p><small>Translated from German:</small> - The philosophers have only interpreted the world in various ways, the point is to change it.</p> - </blockquote> - <figcaption> - <cite>Karl Marx, Theses on Feuerbach (1845)</cite> - </figcaption> - </figure> - </section> -{{ end }}
\ No newline at end of file +{{ end }} diff --git a/www/layouts/_default/list.html b/www/layouts/list.html index c7c6cce..1d4ec56 100644 --- a/www/layouts/_default/list.html +++ b/www/layouts/list.html @@ -4,9 +4,11 @@ {{ define "content" }} {{ partial "nav.html" . }} - <h1>Posts</h1> + <h1>{{ .Title }}</h1> + <hr/> {{ partial "preview/posts.html" (dict - "h" "h3" - "pages" (.RegularPages.ByDate.Reverse.Limit 3)) + "h" "h3" + "pages" .RegularPages + ) }} {{ end }} diff --git a/www/layouts/partials/preview/posts.html b/www/layouts/partials/preview/posts.html deleted file mode 100644 index f2cb640..0000000 --- a/www/layouts/partials/preview/posts.html +++ /dev/null @@ -1,7 +0,0 @@ -{{ $h := .h }} -{{ range .pages }} - {{ partial "preview/post.html" (dict - "h" $h - "page" .) - }} -{{ end }}
\ No newline at end of file diff --git a/www/layouts/shortcodes/mono.html b/www/layouts/shortcodes/mono.html deleted file mode 100644 index ab183a5..0000000 --- a/www/layouts/shortcodes/mono.html +++ /dev/null @@ -1,3 +0,0 @@ -<div class="mono-container"> - {{ .Inner | .Page.RenderString }} -</div> diff --git a/www/layouts/shortcodes/todo.html b/www/layouts/shortcodes/todo.html deleted file mode 100644 index 1327b31..0000000 --- a/www/layouts/shortcodes/todo.html +++ /dev/null @@ -1 +0,0 @@ -<span class="todo {{ .Get "state" }}">{{ .Get "name" }}</span>
\ No newline at end of file diff --git a/www/layouts/_default/single.html b/www/layouts/single.html index cd0e9c5..33360b4 100644 --- a/www/layouts/_default/single.html +++ b/www/layouts/single.html @@ -18,6 +18,8 @@ {{ end}} </p> {{ .Content }} - <hr class="end-hr"/> - {{ partial "nav.html" . }} {{ end }} + +{{ define "footer" }} + {{ partial "nav.html" . }} +{{ end }}
\ No newline at end of file |