diff options
248 files changed, 16210 insertions, 0 deletions
diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..d55cc4a --- /dev/null +++ b/.editorconfig @@ -0,0 +1,7 @@ +root = true + +[*] +end_of_line = lf + +[*.py] +profile = black diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..246ccc5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +/data +/log +/tmp +/backup +/generated + +# old generated files +/mailserver.env +/docker-compose.yaml +/crupest-api-config.json +/nginx-config +/v2ray-config.json +/v2ray-client-config.json +/forgejo.app.ini.init + +tools/docker-mailserver-setup.sh diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..ff2a1bd --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "python.analysis.typeCheckingMode": "basic", + "dotnet.defaultSolution": "disable" +}
\ No newline at end of file @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 crupest + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..61cc0c3 --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +# Hi! This is **crupest** + +Nice to meet you here! 🤗 + +Working on [GNU/Hurd](https://www.gnu.org/software/hurd/index.html) [Debian](https://www.debian.org/ports/hurd/) now. ❤️ + +I love everything in the world. Contact me via my email, *<crupest@crupest.life>*, or create an issue in any of my repos. I love talking to people a lot. + +> *Die Philosophen haben die Welt nur verschieden interpretiert, es kömmt aber darauf an, sie zu verändern.* +(*The philosophers have only interpreted the world in various ways, the point is to change it.*) +\- Marx, K. (1845). *Theses on Feuerbach*. diff --git a/assets/crupest-transparent.png b/assets/crupest-transparent.png Binary files differnew file mode 100755 index 0000000..d890d8d --- /dev/null +++ b/assets/crupest-transparent.png diff --git a/configs/Microsoft.PowerShell_profile.ps1 b/configs/Microsoft.PowerShell_profile.ps1 new file mode 100644 index 0000000..aeced5f --- /dev/null +++ b/configs/Microsoft.PowerShell_profile.ps1 @@ -0,0 +1,35 @@ +function Use-VC { + param( + [Parameter()] + [ValidateSet('x64', 'x86')] + $Arch = 'x64' + ) + + if ($Arch -eq 'x86') { + $p = 'x86'; + } + else { + $p = 'amd64' + } + + cmd /c "`"$(vswhere.exe -format value -property installationPath)\VC\Auxiliary\Build\vcvars64.bat`" $p & set" | + ForEach-Object { + if ($_ -match '=') { + $v = $_ -split '=' + Set-Item -Force -Path "ENV:\$($v[0])" -Value "$($v[1])" + } + } + Pop-Location + Write-Host "Visual Studio Command Prompt variables set." -ForegroundColor Yellow +} + + +function Set-Proxy { + $env:http_proxy = "http://127.0.0.1:2080" + $env:https_proxy = "http://127.0.0.1:2080" +} + +function Reset-Proxy { + Remove-Item env:http_proxy + Remove-Item env:https_proxy +} diff --git a/configs/bruno/ComfyUI/Get Object Info.bru b/configs/bruno/ComfyUI/Get Object Info.bru new file mode 100644 index 0000000..d1a833c --- /dev/null +++ b/configs/bruno/ComfyUI/Get Object Info.bru @@ -0,0 +1,11 @@ +meta { + name: Get Object Info + type: http + seq: 4 +} + +get { + url: {{BASE_URL}}/object_info + body: none + auth: none +} diff --git a/configs/bruno/ComfyUI/Get Prompt History.bru b/configs/bruno/ComfyUI/Get Prompt History.bru new file mode 100644 index 0000000..2e26888 --- /dev/null +++ b/configs/bruno/ComfyUI/Get Prompt History.bru @@ -0,0 +1,15 @@ +meta { + name: Get Prompt History + type: http + seq: 6 +} + +get { + url: {{BASE_URL}}/history/{{prompt_id}} + body: none + auth: none +} + +vars:pre-request { + prompt_id: 7e345a55-21c4-4bdc-9b34-add561775144 +} diff --git a/configs/bruno/ComfyUI/Post Prompt.bru b/configs/bruno/ComfyUI/Post Prompt.bru new file mode 100644 index 0000000..09bf89a --- /dev/null +++ b/configs/bruno/ComfyUI/Post Prompt.bru @@ -0,0 +1,124 @@ +meta { + name: Post Prompt + type: http + seq: 5 +} + +post { + url: {{BASE_URL}}/prompt + body: json + auth: none +} + +body:json { + { + "client_id": "crupest", + "prompt": { + "3": { + "inputs": { + "seed": 156680208700286, + "steps": 20, + "cfg": 8, + "sampler_name": "euler", + "scheduler": "normal", + "denoise": 1, + "model": [ + "4", + 0 + ], + "positive": [ + "6", + 0 + ], + "negative": [ + "7", + 0 + ], + "latent_image": [ + "5", + 0 + ] + }, + "class_type": "KSampler", + "_meta": { + "title": "KSampler" + } + }, + "4": { + "inputs": { + "ckpt_name": "SUPIR/SUPIR-v0Q.ckpt" + }, + "class_type": "CheckpointLoaderSimple", + "_meta": { + "title": "Load Checkpoint" + } + }, + "5": { + "inputs": { + "width": 512, + "height": 512, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage", + "_meta": { + "title": "Empty Latent Image" + } + }, + "6": { + "inputs": { + "text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,", + "clip": [ + "4", + 1 + ] + }, + "class_type": "CLIPTextEncode", + "_meta": { + "title": "CLIP Text Encode (Prompt)" + } + }, + "7": { + "inputs": { + "text": "text, watermark", + "clip": [ + "4", + 1 + ] + }, + "class_type": "CLIPTextEncode", + "_meta": { + "title": "CLIP Text Encode (Prompt)" + } + }, + "8": { + "inputs": { + "samples": [ + "3", + 0 + ], + "vae": [ + "4", + 2 + ] + }, + "class_type": "VAEDecode", + "_meta": { + "title": "VAE Decode" + } + }, + "9": { + "inputs": { + "filename_prefix": "ComfyUI", + "images": [ + "8", + 0 + ] + }, + "class_type": "SaveImage", + "_meta": { + "title": "Save Image" + } + } + } + } +} diff --git a/configs/bruno/ComfyUI/Upload Image.bru b/configs/bruno/ComfyUI/Upload Image.bru new file mode 100644 index 0000000..92b4aeb --- /dev/null +++ b/configs/bruno/ComfyUI/Upload Image.bru @@ -0,0 +1,18 @@ +meta { + name: Upload Image + type: http + seq: 2 +} + +post { + url: {{BASE_URL}}/upload/image + body: multipartForm + auth: none +} + +body:multipart-form { + overwrite: true + type: input + subfolder: crupest-test + image: @file(/Users/crupest/codes/crupest/assets/crupest-transparent.png) +} diff --git a/configs/bruno/ComfyUI/View Image.bru b/configs/bruno/ComfyUI/View Image.bru new file mode 100644 index 0000000..395eccd --- /dev/null +++ b/configs/bruno/ComfyUI/View Image.bru @@ -0,0 +1,19 @@ +meta { + name: View Image + type: http + seq: 1 +} + +get { + url: {{BASE_URL}}/view?filename=crupest-transparent.png&type=input&subfolder=crupest-test&preview=jpeg;90&channel=rgb + body: none + auth: none +} + +query { + filename: crupest-transparent.png + type: input + subfolder: crupest-test + preview: jpeg;90 + channel: rgb +} diff --git a/configs/bruno/ComfyUI/bruno.json b/configs/bruno/ComfyUI/bruno.json new file mode 100644 index 0000000..ee35540 --- /dev/null +++ b/configs/bruno/ComfyUI/bruno.json @@ -0,0 +1,9 @@ +{ + "version": "1", + "name": "ComfyUI", + "type": "collection", + "ignore": [ + "node_modules", + ".git" + ] +}
\ No newline at end of file diff --git a/configs/bruno/ComfyUI/environments/ChimerAI ComfyUI Server.bru b/configs/bruno/ComfyUI/environments/ChimerAI ComfyUI Server.bru new file mode 100644 index 0000000..480c8da --- /dev/null +++ b/configs/bruno/ComfyUI/environments/ChimerAI ComfyUI Server.bru @@ -0,0 +1,3 @@ +vars:secret [ + BASE_URL +] diff --git a/configs/crupest-winget.json b/configs/crupest-winget.json new file mode 100644 index 0000000..df2e7d9 --- /dev/null +++ b/configs/crupest-winget.json @@ -0,0 +1,104 @@ +{
+ "$schema" : "https://aka.ms/winget-packages.schema.2.0.json",
+ "CreationDate" : "2024-10-31T18:34:15.174-00:00",
+ "Sources" :
+ [
+ {
+ "Packages" :
+ [
+ {
+ "PackageIdentifier" : "7zip.7zip"
+ },
+ {
+ "PackageIdentifier" : "Docker.DockerDesktop"
+ },
+ {
+ "PackageIdentifier" : "Git.Git"
+ },
+ {
+ "PackageIdentifier" : "Mozilla.Firefox"
+ },
+ {
+ "PackageIdentifier" : "Mozilla.Thunderbird"
+ },
+ {
+ "PackageIdentifier" : "VideoLAN.VLC"
+ },
+ {
+ "PackageIdentifier" : "vim.vim"
+ },
+ {
+ "PackageIdentifier" : "Neovim.Neovim"
+ },
+ {
+ "PackageIdentifier" : "OpenJS.NodeJS"
+ },
+ {
+ "PackageIdentifier" : "voidtools.Everything"
+ },
+ {
+ "PackageIdentifier" : "Neovide.Neovide"
+ },
+ {
+ "PackageIdentifier" : "Microsoft.PowerShell"
+ },
+ {
+ "PackageIdentifier" : "Kitware.CMake"
+ },
+ {
+ "PackageIdentifier" : "JetBrains.PyCharm.Community"
+ },
+ {
+ "PackageIdentifier" : "Tencent.QQ.NT"
+ },
+ {
+ "PackageIdentifier" : "Tencent.WeChat"
+ },
+ {
+ "PackageIdentifier" : "Python.Launcher"
+ },
+ {
+ "PackageIdentifier" : "NetEase.CloudMusic"
+ },
+ {
+ "PackageIdentifier" : "agalwood.Motrix"
+ },
+ {
+ "PackageIdentifier" : "BurntSushi.ripgrep.MSVC"
+ },
+ {
+ "PackageIdentifier" : "Microsoft.VisualStudio.Locator"
+ },
+ {
+ "PackageIdentifier" : "Ninja-build.Ninja"
+ },
+ {
+ "PackageIdentifier" : "Rufus.Rufus"
+ },
+ {
+ "PackageIdentifier" : "Rustlang.Rustup"
+ },
+ {
+ "PackageIdentifier" : "Python.Python.3.13"
+ },
+ {
+ "PackageIdentifier" : "Microsoft.PowerToys"
+ },
+ {
+ "PackageIdentifier" : "Microsoft.VisualStudioCode"
+ },
+ {
+ "PackageIdentifier" : "Microsoft.WinDbg"
+ }
+ ],
+ "SourceDetails" :
+ {
+ "Argument" : "https://cdn.winget.microsoft.com/cache",
+ "Identifier" : "Microsoft.Winget.Source_8wekyb3d8bbwe",
+ "Name" : "winget",
+ "Type" : "Microsoft.PreIndexed.Package"
+ }
+ }
+ ],
+ "WinGetVersion" : "1.9.2411-preview"
+}
\ No newline at end of file diff --git a/configs/magic/extend-script.js b/configs/magic/extend-script.js new file mode 100644 index 0000000..519cee9 --- /dev/null +++ b/configs/magic/extend-script.js @@ -0,0 +1,21 @@ +// Define main function (script entry) + +function main(config, profileName) { + delete config.dns; + delete config.tun; + delete config.hosts; + + delete config["cfw-latency-timeout"] + delete config["cfw-latency-url"] + delete config["cfw-conn-break-strategy"] + + config["proxies"] = [config["crupest-proxy"], ...config["proxies"]] + delete config["crupest-proxy"] + + select_proxy = { name: "node-select", type: "select", proxies: ["auto-select", ...config.proxies.map(p => p.name)] } + auto_select_proxy = config["crupest-auto-select"] + config["proxy-groups"] = [ select_proxy, auto_select_proxy ] + delete config["crupest-auto-select"] + + return config; +} diff --git a/configs/magic/extend.yaml b/configs/magic/extend.yaml new file mode 100644 index 0000000..3006f08 --- /dev/null +++ b/configs/magic/extend.yaml @@ -0,0 +1,65 @@ +# Profile Enhancement Merge Template for Clash Verge + +profile: + store-selected: true + +rules: + - "GEOSITE,github,node-select" + - "GEOSITE,google,node-select" + - "GEOSITE,youtube,node-select" + - "GEOSITE,twitter,node-select" + - "GEOSITE,facebook,node-select" + - "GEOSITE,discord,node-select" + - "GEOSITE,reddit,node-select" + - "GEOSITE,twitch,node-select" + - "GEOSITE,quora,node-select" + - "GEOSITE,telegram,node-select" + - "GEOSITE,imgur,node-select" + - "GEOSITE,stackexchange,node-select" + - "GEOSITE,onedrive,node-select" + + - "GEOSITE,duckduckgo,node-select" + - "GEOSITE,wikimedia,node-select" + - "GEOSITE,gitbook,node-select" + - "GEOSITE,gitlab,node-select" + - "GEOSITE,creativecommons,node-select" + - "GEOSITE,archive,node-select" + - "GEOSITE,matrix,node-select" + - "GEOSITE,tor,node-select" + + - "GEOSITE,python,node-select" + - "GEOSITE,ruby,node-select" + - "GEOSITE,rust,node-select" + - "GEOSITE,nodejs,node-select" + - "GEOSITE,npmjs,node-select" + - "GEOSITE,qt,node-select" + - "GEOSITE,docker,node-select" + - "GEOSITE,v2ray,node-select" + - "GEOSITE,homebrew,node-select" + - "GEOSITE,bootstrap,node-select" + + - "GEOSITE,heroku,node-select" + - "GEOSITE,vercel,node-select" + + - "GEOSITE,ieee,node-select" + - "GEOSITE,sci-hub,node-select" + - "GEOSITE,libgen,node-select" + + - "DOMAIN-SUFFIX,gnu.org,node-select" + - "DOMAIN-SUFFIX,nongnu.org,node-select" + - "DOMAIN-SUFFIX,ietf.org,node-select" + - "DOMAIN-SUFFIX,packagist.org,node-select" + - "DOMAIN-SUFFIX,metacubex.one,node-select" + - "MATCH,DIRECT" + +crupest-proxy: + ... + +crupest-auto-select: + name: "auto-select" + type: url-test + interval: 1800 + include-all-proxies: true + url: 'https://www.gstatic.com/generate_204' + filter: "日本|新加坡|香港|台湾|美国" + expected-status: 204 diff --git a/configs/nvim/config-root/.gitignore b/configs/nvim/config-root/.gitignore new file mode 100644 index 0000000..722d5e7 --- /dev/null +++ b/configs/nvim/config-root/.gitignore @@ -0,0 +1 @@ +.vscode diff --git a/configs/nvim/config-root/.luarc.json b/configs/nvim/config-root/.luarc.json new file mode 100644 index 0000000..f704d01 --- /dev/null +++ b/configs/nvim/config-root/.luarc.json @@ -0,0 +1,4 @@ +{ + "$schema": "https://raw.githubusercontent.com/LuaLS/vscode-lua/master/setting/schema.json", + "runtime.version": "LuaJIT" +}
\ No newline at end of file diff --git a/configs/nvim/config-root/cspell.yaml b/configs/nvim/config-root/cspell.yaml new file mode 100644 index 0000000..2a716e2 --- /dev/null +++ b/configs/nvim/config-root/cspell.yaml @@ -0,0 +1,13 @@ +dictionaryDefinitions: + - name: nvim-words + path: './nvim-words.txt' + addWords: true + +dictionaries: + - nvim-words + +words: + - crupest + +ignorePaths: + - lazy-lock.json diff --git a/configs/nvim/config-root/init.lua b/configs/nvim/config-root/init.lua new file mode 100644 index 0000000..9de0b2c --- /dev/null +++ b/configs/nvim/config-root/init.lua @@ -0,0 +1,63 @@ +if vim.g.neovide then + -- spellchecker: disable-next-line + vim.opt.guifont = "FiraCode Nerd Font"; + vim.g.neovide_window_blurred = true; + vim.g.neovide_transparency = 0.9; + vim.g.neovide_input_ime = false; + vim.g.neovide_cursor_animate_in_insert_mode = false + vim.g.neovide_cursor_vfx_mode = "pixiedust"; + vim.g.neovide_input_macos_option_key_is_meta = 'only_left' +end + +local is_win = vim.fn.has("win32") ~= 0 + +-- spellchecker: disable +if is_win then + vim.cmd([[ + let &shell = executable('pwsh') ? 'pwsh' : 'powershell' + let &shellcmdflag = '-NoLogo -ExecutionPolicy RemoteSigned -Command [Console]::InputEncoding=[Console]::OutputEncoding=[System.Text.UTF8Encoding]::new();$PSDefaultParameterValues[''Out-File:Encoding'']=''utf8'';Remove-Alias -Force -ErrorAction SilentlyContinue tee;' + let &shellredir = '2>&1 | %%{ "$_" } | Out-File %s; exit $LastExitCode' + let &shellpipe = '2>&1 | %%{ "$_" } | tee %s; exit $LastExitCode' + set shellquote= shellxquote= + ]]) + vim.opt.completeslash = 'slash' +end +-- spellchecker: enable + +-- spellchecker: disable +vim.opt.termguicolors = true; +vim.opt.fileformats = "unix,dos"; +vim.opt.softtabstop = 4; +vim.opt.shiftwidth = 4; +vim.opt.expandtab = true; +vim.opt.wrap = false; +vim.opt.number = true; +-- spellchecker: enable + +vim.g.load_doxygen_syntax = true; +vim.g.doxygen_javadoc_autobrief = false; + +-- Init lazy.nvim +local lazy_path = vim.fn.stdpath("data") .. "/lazy/lazy.nvim" +if not vim.uv.fs_stat(lazy_path) then + vim.fn.system({ + "git", + "clone", + "--filter=blob:none", + "https://github.com/folke/lazy.nvim.git", + "--branch=stable", -- latest stable release + lazy_path, + }) +end +vim.opt.rtp:prepend(lazy_path) + +-- Use lazy.nvim +require("lazy").setup("plugins") + +vim.cmd("colorscheme catppuccin-macchiato") + +require("crupest.nvim.lsp").setup() +require("crupest.nvim.plugins").setup() +require("crupest.nvim.keymap").setup() + +vim.cmd("autocmd FileType gitcommit,gitrebase,gitconfig set bufhidden=delete") diff --git a/configs/nvim/config-root/lazy-lock.json b/configs/nvim/config-root/lazy-lock.json new file mode 100644 index 0000000..3d08239 --- /dev/null +++ b/configs/nvim/config-root/lazy-lock.json @@ -0,0 +1,20 @@ +{ + "LuaSnip": { "branch": "master", "commit": "0f7bbce41ea152a94d12aea286f2ce98e63c0f58" }, + "catppuccin": { "branch": "main", "commit": "faf15ab0201b564b6368ffa47b56feefc92ce3f4" }, + "cmp-buffer": { "branch": "main", "commit": "3022dbc9166796b644a841a02de8dd1cc1d311fa" }, + "cmp-cmdline": { "branch": "main", "commit": "d250c63aa13ead745e3a40f61fdd3470efde3923" }, + "cmp-nvim-lsp": { "branch": "main", "commit": "39e2eda76828d88b773cc27a3f61d2ad782c922d" }, + "cmp-path": { "branch": "main", "commit": "91ff86cd9c29299a64f968ebb45846c485725f23" }, + "cmp_luasnip": { "branch": "master", "commit": "98d9cb5c2c38532bd9bdb481067b20fea8f32e90" }, + "gitsigns.nvim": { "branch": "main", "commit": "5f808b5e4fef30bd8aca1b803b4e555da07fc412" }, + "lazy.nvim": { "branch": "main", "commit": "56ead98e05bb37a4ec28930a54d836d033cf00f2" }, + "lualine.nvim": { "branch": "master", "commit": "2a5bae925481f999263d6f5ed8361baef8df4f83" }, + "nvim-autopairs": { "branch": "master", "commit": "b464658e9b880f463b9f7e6ccddd93fb0013f559" }, + "nvim-cmp": { "branch": "main", "commit": "ed31156aa2cc14e3bc066c59357cc91536a2bc01" }, + "nvim-lint": { "branch": "master", "commit": "6b46370d02cd001509a765591a3ffc481b538794" }, + "nvim-lspconfig": { "branch": "master", "commit": "4ae9796c4e95ca84ec77946a9f9089b8f1a3eec9" }, + "nvim-tree.lua": { "branch": "master", "commit": "ca7c4c33cac2ad66ec69d45e465379716ef0cc97" }, + "nvim-web-devicons": { "branch": "master", "commit": "edbe0a65cfacbbfff6a4a1e98ddd60c28c560509" }, + "plenary.nvim": { "branch": "master", "commit": "2d9b06177a975543726ce5c73fca176cedbffe9d" }, + "telescope.nvim": { "branch": "master", "commit": "85922dde3767e01d42a08e750a773effbffaea3e" } +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/keymap.lua b/configs/nvim/config-root/lua/crupest/nvim/keymap.lua new file mode 100644 index 0000000..624c04c --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/keymap.lua @@ -0,0 +1,9 @@ +local function setup() + vim.keymap.set("n", "<c-tab>", "<cmd>bnext<cr>") + vim.keymap.set("n", "<c-s-tab>", "<cmd>bNext<cr>") + vim.keymap.set("n", "<esc>", require("crupest.utils.nvim").close_float) +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/lsp/c.lua b/configs/nvim/config-root/lua/crupest/nvim/lsp/c.lua new file mode 100644 index 0000000..bb1f6f7 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/lsp/c.lua @@ -0,0 +1,25 @@ +local lspconfig = require("lspconfig") + +local brew_clangd_path = "/usr/local/opt/llvm/bin/clangd" + +local function setup() + local clangd = "clangd" + + if vim.uv.fs_stat(brew_clangd_path) ~= nil then + clangd = brew_clangd_path + end + + -- setup lsp clangd + lspconfig.clangd.setup { + cmd = { clangd }, + on_attach = function(_, bufnr) + vim.keymap.set('n', '<space>s', "<cmd>ClangdSwitchSourceHeader<cr>", { + buffer = bufnr + }) + end + } +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/lsp/init.lua b/configs/nvim/config-root/lua/crupest/nvim/lsp/init.lua new file mode 100644 index 0000000..0fd29a3 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/lsp/init.lua @@ -0,0 +1,50 @@ +local lspconfig = require("lspconfig") +local cmp_nvim_lsp = require("cmp_nvim_lsp") +local cmp_default_caps = cmp_nvim_lsp.default_capabilities() + +local lspconfig_default_caps = lspconfig.util.default_config.capabilities + +lspconfig.util.default_config = vim.tbl_extend( + "force", + lspconfig.util.default_config, + { + capabilities = vim.tbl_extend("force", lspconfig_default_caps, cmp_default_caps), + autostart = false, + }) + +local function setup() + lspconfig.cmake.setup {} + lspconfig.bashls.setup {} + require("crupest.nvim.lsp.c").setup() + require("crupest.nvim.lsp.lua").setup() + + -- Use LspAttach auto command to only map the following keys + -- after the language server attaches to the current buffer + vim.api.nvim_create_autocmd('LspAttach', { + group = vim.api.nvim_create_augroup('UserLspConfig', {}), + callback = function(ev) + -- Buffer local mappings. + -- See `:help vim.lsp.*` for documentation on any of the below functions + local opts = { buffer = ev.buf } + vim.keymap.set('n', 'gD', vim.lsp.buf.declaration, opts) + vim.keymap.set('n', 'gd', vim.lsp.buf.definition, opts) + vim.keymap.set('n', 'gi', vim.lsp.buf.implementation, opts) + vim.keymap.set('n', '<C-k>', vim.lsp.buf.signature_help, opts) + vim.keymap.set('n', '<space>wa', vim.lsp.buf.add_workspace_folder, opts) + vim.keymap.set('n', '<space>wr', vim.lsp.buf.remove_workspace_folder, opts) + vim.keymap.set('n', '<space>wl', function() + print(vim.inspect(vim.lsp.buf.list_workspace_folders())) + end, opts) + vim.keymap.set('n', '<space>D', vim.lsp.buf.type_definition, opts) + vim.keymap.set('n', '<space>rn', vim.lsp.buf.rename, opts) + vim.keymap.set({ 'n', 'v' }, '<space>ca', vim.lsp.buf.code_action, opts) + vim.keymap.set('n', 'gr', vim.lsp.buf.references, opts) + vim.keymap.set('n', '<space>f', vim.lsp.buf.format, opts) + end, + }) +end + + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/lsp/lua.lua b/configs/nvim/config-root/lua/crupest/nvim/lsp/lua.lua new file mode 100644 index 0000000..93aa503 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/lsp/lua.lua @@ -0,0 +1,29 @@ +local lspconfig = require("lspconfig") + +local function setup() + lspconfig.lua_ls.setup { + settings = { + Lua = { + runtime = { + version = "LuaJIT" + }, + diagnostics = { + globals = { "vim" }, + }, + workspace = { + library = { + [vim.fn.expand "$VIMRUNTIME/lua"] = true, + [vim.fn.expand "$VIMRUNTIME/lua/vim/lsp"] = true, + [vim.fn.stdpath "data" .. "/lazy/lazy.nvim/lua/lazy"] = true, + }, + maxPreload = 100000, + preloadFileSize = 10000, + }, + }, + }, + } +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/cmp.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/cmp.lua new file mode 100644 index 0000000..9b1d876 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/cmp.lua @@ -0,0 +1,31 @@ +local function setup() + local cmp = require("cmp") + local luasnip = require("luasnip") + + cmp.setup { + snippet = { + expand = function(args) + luasnip.lsp_expand(args.body) + end, + }, + window = { + }, + mapping = cmp.mapping.preset.insert({ + ['<C-b>'] = cmp.mapping.scroll_docs(-4), + ['<C-f>'] = cmp.mapping.scroll_docs(4), + ['<C-Space>'] = cmp.mapping.complete(), + ['<C-e>'] = cmp.mapping.abort(), + ['<CR>'] = cmp.mapping.confirm({ select = true }), -- Accept currently selected item. Set `select` to `false` to only confirm explicitly selected items. + }), + sources = cmp.config.sources({ + { name = 'nvim_lsp' }, + { name = 'luasnip' }, + }, { + { name = 'buffer' }, + }) + } +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/gitsigns.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/gitsigns.lua new file mode 100644 index 0000000..220c91a --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/gitsigns.lua @@ -0,0 +1,51 @@ +local function setup() + local gitsigns = require('gitsigns') + gitsigns.setup { + on_attach = function(bufnr) + local function map(mode, l, r, opts) + opts = opts or {} + opts.buffer = bufnr + vim.keymap.set(mode, l, r, opts) + end + + -- Navigation + map('n', ']c', function() + if vim.wo.diff then + vim.cmd.normal({ ']c', bang = true }) + else + gitsigns.nav_hunk('next') + end + end) + + map('n', '[c', function() + if vim.wo.diff then + vim.cmd.normal({ '[c', bang = true }) + else + gitsigns.nav_hunk('prev') + end + end) + + -- Actions + map('n', '<leader>hs', gitsigns.stage_hunk) + map('n', '<leader>hr', gitsigns.reset_hunk) + map('v', '<leader>hs', function() gitsigns.stage_hunk { vim.fn.line('.'), vim.fn.line('v') } end) + map('v', '<leader>hr', function() gitsigns.reset_hunk { vim.fn.line('.'), vim.fn.line('v') } end) + map('n', '<leader>hS', gitsigns.stage_buffer) + map('n', '<leader>hu', gitsigns.undo_stage_hunk) + map('n', '<leader>hR', gitsigns.reset_buffer) + map('n', '<leader>hp', gitsigns.preview_hunk) + map('n', '<leader>hb', function() gitsigns.blame_line { full = true } end) + map('n', '<leader>tb', gitsigns.toggle_current_line_blame) + map('n', '<leader>hd', gitsigns.diffthis) + map('n', '<leader>hD', function() gitsigns.diffthis('~') end) + map('n', '<leader>td', gitsigns.toggle_deleted) + + -- Text object + map({ 'o', 'x' }, 'ih', ':<C-U>Gitsigns select_hunk<CR>') + end + } +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/init.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/init.lua new file mode 100644 index 0000000..24e0c2e --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/init.lua @@ -0,0 +1,12 @@ +local function setup() + require("crupest.nvim.plugins.lint").setup() + require("crupest.nvim.plugins.snip").setup() + require("crupest.nvim.plugins.cmp").setup() + require("crupest.nvim.plugins.telescope").setup() + require("crupest.nvim.plugins.gitsigns").setup() + require("crupest.nvim.plugins.others").setup() +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/lint.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/lint.lua new file mode 100644 index 0000000..5e348d6 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/lint.lua @@ -0,0 +1,82 @@ +local lint = require("lint") + +local find = require('crupest.utils.find') +local is_win = vim.fn.has("win32") ~= 0 + +local cspell_config_patterns = { + ".cspell.json", + "cspell.json", + ".cSpell.json", + "cSpell.json", + "cspell.config.js", + "cspell.config.cjs", + "cspell.config.json", + "cspell.config.yaml", + "cspell.config.yml", + "cspell.yaml", + "cspell.yml", +} + +--- @type FindExeForBufOpts[] +local my_linters = { + { + name = "cspell", + places = { "node_modules", "global" }, + config_files = cspell_config_patterns, + }, +} + +local function run(opt) + if not opt then + opt = {} + end + + if not opt.buf then + opt.buf = 0 + end + + local linters = {} + + for _, l in ipairs(my_linters) do + local linter = find.find_exe_for_buf(opt.buf, l) + if linter then table.insert(linters, linter) end + end + + + local linter_names = {} + + for _, linter in ipairs(linters) do + table.insert(linter_names, linter.name) + require('lint.linters.' .. linter.name).cmd = linter.exe_path + end + + lint.try_lint(linter_names) +end + +local function setup() + if is_win then + for _, l in ipairs(my_linters) do + local name = l.name + local linter = require('lint.linters.' .. name) + if linter.cmd == 'cmd.exe' then + linter.cmd = linter.args[2] + end + table.remove(linter.args, 1) + table.remove(linter.args, 1) + end + end + + vim.api.nvim_create_autocmd({ "BufWritePost" }, { + callback = function(opt) + run({ + buf = opt.buffer + }) + end, + }) + + vim.keymap.set('n', '<leader>lr', run) +end + +return { + setup = setup, +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/others.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/others.lua new file mode 100644 index 0000000..2ef0d75 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/others.lua @@ -0,0 +1,9 @@ +local function setup() + require('lualine').setup {} + require("nvim-tree").setup {} + require("nvim-autopairs").setup {} +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/snip.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/snip.lua new file mode 100644 index 0000000..78ed2eb --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/snip.lua @@ -0,0 +1,75 @@ +--- spellchecker: disable + +local luasnip = require("luasnip") + +local ls = luasnip +-- some shorthands... +local s = ls.snippet +local sn = ls.snippet_node +local t = ls.text_node +local i = ls.insert_node +local f = ls.function_node +local c = ls.choice_node +local d = ls.dynamic_node +local r = ls.restore_node +local l = require("luasnip.extras").lambda +local rep = require("luasnip.extras").rep +local p = require("luasnip.extras").partial +local m = require("luasnip.extras").match +local n = require("luasnip.extras").nonempty +local dl = require("luasnip.extras").dynamic_lambda +local fmt = require("luasnip.extras.fmt").fmt +local fmta = require("luasnip.extras.fmt").fmta +local types = require("luasnip.util.types") +local conds = require("luasnip.extras.conditions") +local conds_expand = require("luasnip.extras.conditions.expand") + +local function copy(args) + return args[1] +end + +local function setup() + vim.keymap.set({ "i", "s" }, "<C-L>", function() luasnip.jump(1) end, { silent = true }) + vim.keymap.set({ "i", "s" }, "<C-J>", function() luasnip.jump(-1) end, { silent = true }) + + vim.keymap.set({ "i", "s" }, "<C-E>", function() + if luasnip.choice_active() then + luasnip.change_choice(1) + end + end, { silent = true }) + + luasnip.add_snippets("cpp", { + s("cs", { + i(1, "classname"), + t("::"), + f(copy, 1), + t("("), + i(0), + t(") { }") + }), + + s("ds", { + i(1, "classname"), + t("::~"), + f(copy, 1), + t("() { }") + }), + + s("csds", { + i(1, "classname"), + t("::"), + f(copy, 1), + t("("), + i(0), + t({ ") { }", "", "" }), + f(copy, 1), + t("::~"), + f(copy, 1), + t("() { }") + }), + }) +end + +return { + setup = setup, +} diff --git a/configs/nvim/config-root/lua/crupest/nvim/plugins/telescope.lua b/configs/nvim/config-root/lua/crupest/nvim/plugins/telescope.lua new file mode 100644 index 0000000..d68b7f2 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/nvim/plugins/telescope.lua @@ -0,0 +1,11 @@ +local function setup() + local builtin = require('telescope.builtin') + vim.keymap.set('n', '<leader>ff', builtin.find_files, {}) + vim.keymap.set('n', '<leader>fg', builtin.live_grep, {}) + vim.keymap.set('n', '<leader>fb', builtin.buffers, {}) + vim.keymap.set('n', '<leader>fh', builtin.help_tags, {}) +end + +return { + setup = setup +} diff --git a/configs/nvim/config-root/lua/crupest/utils/find.lua b/configs/nvim/config-root/lua/crupest/utils/find.lua new file mode 100644 index 0000000..dd1f663 --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/utils/find.lua @@ -0,0 +1,101 @@ +local is_win = vim.fn.has("win32") ~= 0 + +local M = {} + +local windows_exe_ext = { "exe", "bat", "cmd", "ps1" } + +--- Find real path (with ext) for an executable. +--- @param dir string +--- @param name string | string[] +--- @return string | nil +function M.find_exe_file(dir, name) + if type(name) == "string" then + name = { name } + end + for _, n in ipairs(name) do + if vim.uv.fs_stat(vim.fs.joinpath(dir, n)) ~= nil then + return n + end + if is_win then + for _, ext in ipairs(windows_exe_ext) do + if vim.uv.fs_stat(vim.fs.joinpath(dir, n .. "." .. ext)) ~= nil then + return n .. "." .. ext + end + end + end + end + return nil +end + +--- Walk up until found an executable in node_modules. +--- @param path string +--- @param name string +--- @return string | nil exe_path Path to the executable. +function M.find_node_modules_exe(path, name) + local bin_dirs = vim.fs.find("node_modules/.bin", { path = path, upward = true, type = "directory" }) + if #bin_dirs == 0 then return nil end + local exe = M.find_exe_file(bin_dirs[1], name) + return exe and vim.fs.joinpath(bin_dirs[1], exe) +end + +--- Find executable in PATH. +--- @param name string +--- @return string | nil +function M.find_global_exe(name) + local exe = vim.fn.exepath(name) + if exe == "" then return nil end + return exe +end + +--- @alias ExePlace "node_modules" | "global" +--- @param path string +--- @param name string +--- @param places ExePlace[] +--- @return string | nil, ExePlace? +function M.find_exe(path, name, places) + for _, place in ipairs(places) do + if place == "node_modules" then + local r = M.find_node_modules_exe(path, name) + if r then return r, "node_modules" end + end + if place == "global" then + local r = M.find_global_exe(name) + if r then return r, "global" end + end + end + return nil, nil +end + +--- @alias FindExeForBufOpts { name: string, exe: string?, places: ExePlace[], config_files: string[]?, filetypes: string[]? } +--- @alias FindExeForBufResult { name: string, file: string, exe: string, exe_path: string, place: ExePlace, config_file: string?, filetype: string? } +--- @param buf number +--- @param opts FindExeForBufOpts +--- @return FindExeForBufResult | nil +function M.find_exe_for_buf(buf, opts) + local r = {} --- @type FindExeForBufResult + r.name = opts.name + r.file = vim.api.nvim_buf_get_name(buf) + r.exe = opts.exe or opts.name + + if opts.filetypes then + r.filetype = vim.api.nvim_get_option_value("filetype", { scope = "buffer", buf = buf }) + if not vim.tbl_contains(opts.filetypes, r.filetype) then return nil end + end + + if opts.config_files then + local config_file_list = vim.fs.find(opts.config_files, { path = r.file, upward = true }) + if #config_file_list == 0 then return nil end + r.config_file = config_file_list[1] + end + + local exe_path, place = M.find_exe(r.file, r.exe, opts.places) + if exe_path == nil then return nil end + r.exe_path = exe_path + + --- @cast place ExePlace + r.place = place + + return r +end + +return M diff --git a/configs/nvim/config-root/lua/crupest/utils/nvim.lua b/configs/nvim/config-root/lua/crupest/utils/nvim.lua new file mode 100644 index 0000000..4477ecc --- /dev/null +++ b/configs/nvim/config-root/lua/crupest/utils/nvim.lua @@ -0,0 +1,12 @@ +local M = {} + +function M.close_float() + local wins = vim.api.nvim_list_wins() + for _, v in ipairs(wins) do + if vim.api.nvim_win_get_config(v).relative ~= '' then + vim.api.nvim_win_close(v, false) + end + end +end + +return M diff --git a/configs/nvim/config-root/lua/plugins.lua b/configs/nvim/config-root/lua/plugins.lua new file mode 100644 index 0000000..b689559 --- /dev/null +++ b/configs/nvim/config-root/lua/plugins.lua @@ -0,0 +1,30 @@ +-- spellchecker: disable +return { + { "catppuccin/nvim", name = "catppuccin", priority = 1000 }, + "neovim/nvim-lspconfig", + "L3MON4D3/LuaSnip", + "hrsh7th/nvim-cmp", + "hrsh7th/cmp-nvim-lsp", + "hrsh7th/cmp-buffer", + "hrsh7th/cmp-path", + "hrsh7th/cmp-cmdline", + "saadparwaiz1/cmp_luasnip", + { + "nvim-tree/nvim-tree.lua", + lazy = false, + dependencies = { + "nvim-tree/nvim-web-devicons", + }, + }, + { + "nvim-lualine/lualine.nvim", + dependencies = { 'nvim-tree/nvim-web-devicons' } + }, + { + "nvim-telescope/telescope.nvim", + dependencies = { 'nvim-lua/plenary.nvim' } + }, + "windwp/nvim-autopairs", + "mfussenegger/nvim-lint", + "lewis6991/gitsigns.nvim", +} diff --git a/configs/nvim/config-root/nvim-words.txt b/configs/nvim/config-root/nvim-words.txt new file mode 100644 index 0000000..564904f --- /dev/null +++ b/configs/nvim/config-root/nvim-words.txt @@ -0,0 +1,27 @@ +nvim +vimruntime +bnext +bufhidden +exepath +stdpath +augroup +autocmd +autobrief +autopairs +keymap +joinpath +bufnr + +neovide +pixiedust + +lspconfig +clangd +bashls + +catppuccin +macchiato +diffthis +gitsigns +lualine +luasnip diff --git a/configs/nvim/copy-nvim-config b/configs/nvim/copy-nvim-config new file mode 100755 index 0000000..92863a9 --- /dev/null +++ b/configs/nvim/copy-nvim-config @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +MYDIR="$(dirname "$(realpath "$0")")/config-root" + +mkdir -p ${MYDIR} +cp -R ~/.config/nvim/. ${MYDIR} diff --git a/configs/nvim/homebrew-packages b/configs/nvim/homebrew-packages new file mode 100644 index 0000000..03a95bf --- /dev/null +++ b/configs/nvim/homebrew-packages @@ -0,0 +1,6 @@ +llvm +cmake-language-server +bash-language-server +shellcheck +shfmt +lua-language-server diff --git a/crupest-words.txt b/crupest-words.txt new file mode 100644 index 0000000..d5e637c --- /dev/null +++ b/crupest-words.txt @@ -0,0 +1,51 @@ +# me +crupest +yuqian + +# self-hosted services +2fauth +forgejo +rspamd +certbot + +# general +cheatsheet +aarch64 +pythonpath +gerrit + +# unix +cpio +kmod +nproc +sourceware + +# hurd +gnumach +settrans +fsysopts +pfinet + +# debian +debhelper +debootstrap +devscripts +dquilt +buildpackage +quiltrc +nocheck +chroot +indep +confdir +createchroot +sbuild +sbuildrc +schroot + +# misc +geodata +geoip +geosite +vmess +vnext +ustc diff --git a/cspell.yaml b/cspell.yaml new file mode 100644 index 0000000..5d6d72b --- /dev/null +++ b/cspell.yaml @@ -0,0 +1,31 @@ +version: "0.2" +language: en +dictionaries: + - crupest-words + - en_US + - companies + - softwareTerms + - misc + - node + - typescript + - python + - html + - css + - cpp + - csharp + - latex + - bash + - fonts + - filetypes + - npm + +dictionaryDefinitions: + - name: crupest-words + path: './crupest-words.txt' + +patterns: + - name: libs + pattern: /lib.+/g + +ignoreRegExpList: + - libs diff --git a/docker/auto-backup/.dockerignore b/docker/auto-backup/.dockerignore new file mode 100644 index 0000000..7a09751 --- /dev/null +++ b/docker/auto-backup/.dockerignore @@ -0,0 +1,2 @@ +AutoBackup/bin +AutoBackup/obj diff --git a/docker/auto-backup/AutoBackup/.dockerignore b/docker/auto-backup/AutoBackup/.dockerignore new file mode 100644 index 0000000..7de5508 --- /dev/null +++ b/docker/auto-backup/AutoBackup/.dockerignore @@ -0,0 +1,2 @@ +obj +bin diff --git a/docker/auto-backup/AutoBackup/.gitignore b/docker/auto-backup/AutoBackup/.gitignore new file mode 100644 index 0000000..7de5508 --- /dev/null +++ b/docker/auto-backup/AutoBackup/.gitignore @@ -0,0 +1,2 @@ +obj +bin diff --git a/docker/auto-backup/AutoBackup/AutoBackup.csproj b/docker/auto-backup/AutoBackup/AutoBackup.csproj new file mode 100644 index 0000000..694035b --- /dev/null +++ b/docker/auto-backup/AutoBackup/AutoBackup.csproj @@ -0,0 +1,10 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net9.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + +</Project> diff --git a/docker/auto-backup/AutoBackup/Program.cs b/docker/auto-backup/AutoBackup/Program.cs new file mode 100644 index 0000000..c2e7a0d --- /dev/null +++ b/docker/auto-backup/AutoBackup/Program.cs @@ -0,0 +1,121 @@ +// Read args to determine what file to upload
+
+const string DefaultUploadFilePath = "/tmp/data.tar.xz";
+string uploadFilePath = DefaultUploadFilePath;
+string? uploadDestinationPath = null;
+if (args.Length == 0)
+{
+ Console.WriteLine("You don't specify the file to upload, will upload /tmp/data.tar.xz by default.");
+ Console.WriteLine("You don't specify the destination to upload, will use timestamp with proper file extension.");
+}
+else if (args.Length == 1)
+{
+ if (args[0].Length == 0)
+ {
+ Console.Error.WriteLine("File to upload can't be empty string.");
+ Environment.Exit(2);
+ }
+ uploadFilePath = args[0];
+ Console.WriteLine("You don't specify the destination to upload, will use timestamp with proper file extension.");
+}
+else if (args.Length == 2)
+{
+ if (args[0].Length == 0)
+ {
+ Console.Error.WriteLine("File to upload can't be empty string.");
+ Environment.Exit(2);
+ }
+
+ if (args[1].Length == 0)
+ {
+ Console.Error.WriteLine("Destination to upload can't be empty string.");
+ Environment.Exit(2);
+ }
+
+ uploadFilePath = args[0];
+ uploadDestinationPath = args[1];
+}
+else
+{
+ // Write to stderr
+ Console.Error.WriteLine("You can only specify one optional file and one optional destination to upload.");
+ Environment.Exit(2);
+}
+
+// Check the upload exists
+if (!File.Exists(uploadFilePath))
+{
+ Console.Error.WriteLine($"The file {uploadFilePath} doesn't exist.");
+ Environment.Exit(3);
+}
+
+// Check the upload file is not a directory
+if (File.GetAttributes(uploadFilePath).HasFlag(FileAttributes.Directory))
+{
+ Console.Error.WriteLine($"The file {uploadFilePath} is a directory.");
+ Environment.Exit(4);
+}
+
+// Check the upload file is not bigger than 5G
+if (new FileInfo(uploadFilePath).Length > 5L * 1024L * 1024L * 1024L)
+{
+ Console.Error.WriteLine($"The file {uploadFilePath} is bigger than 5G, which is not support now.");
+ Environment.Exit(5);
+}
+
+// Get config from environment variables
+var configNameList = new List<string>{
+ "CRUPEST_AUTO_BACKUP_COS_SECRET_ID",
+ "CRUPEST_AUTO_BACKUP_COS_SECRET_KEY",
+ "CRUPEST_AUTO_BACKUP_COS_REGION",
+ "CRUPEST_AUTO_BACKUP_BUCKET_NAME"
+};
+
+var config = new Dictionary<string, string>();
+foreach (var configName in configNameList)
+{
+ var configValue = Environment.GetEnvironmentVariable(configName);
+ if (configValue is null)
+ {
+ Console.Error.WriteLine($"Environment variable {configName} is required.");
+ Environment.Exit(5);
+ }
+ config.Add(configName, configValue);
+}
+
+var region = config["CRUPEST_AUTO_BACKUP_COS_REGION"];
+var secretId = config["CRUPEST_AUTO_BACKUP_COS_SECRET_ID"];
+var secretKey = config["CRUPEST_AUTO_BACKUP_COS_SECRET_KEY"];
+var bucketName = config["CRUPEST_AUTO_BACKUP_BUCKET_NAME"];
+
+var credentials = new TencentCloudCOSHelper.Credentials(secretId, secretKey);
+
+if (uploadDestinationPath is null)
+{
+ var uploadFileName = Path.GetFileName(uploadFilePath);
+ var firstDotPosition = uploadFileName.IndexOf('.');
+ uploadDestinationPath = DateTime.Now.ToString("s");
+ if (firstDotPosition != -1)
+ {
+ uploadDestinationPath += uploadFileName.Substring(firstDotPosition + 1);
+ }
+}
+
+Console.WriteLine($"Upload file source: {uploadFilePath}");
+Console.WriteLine($"Upload COS region: {config["CRUPEST_AUTO_BACKUP_COS_REGION"]}");
+Console.WriteLine($"Upload bucket name: {config["CRUPEST_AUTO_BACKUP_BUCKET_NAME"]}");
+Console.WriteLine($"Upload file destination: {uploadDestinationPath}");
+
+await using var fileStream = new FileStream(uploadFilePath, FileMode.Open, FileAccess.Read);
+
+// 上传对象
+try
+{
+ await TencentCloudCOSHelper.PutObject(credentials, region, bucketName, uploadDestinationPath, fileStream);
+ Console.WriteLine("Upload completed!");
+}
+catch (Exception e)
+{
+ Console.Error.WriteLine("Exception: " + e);
+ Environment.Exit(6);
+}
diff --git a/docker/auto-backup/AutoBackup/TencentCloudCOS.cs b/docker/auto-backup/AutoBackup/TencentCloudCOS.cs new file mode 100644 index 0000000..28d032c --- /dev/null +++ b/docker/auto-backup/AutoBackup/TencentCloudCOS.cs @@ -0,0 +1,211 @@ +using System.Net; +using System.Security.Cryptography; +using System.Text; + + +public static class TencentCloudCOSHelper +{ + public class Credentials + { + public Credentials(string secretId, string secretKey) + { + SecretId = secretId; + SecretKey = secretKey; + } + + public string SecretId { get; } + public string SecretKey { get; } + } + + public class RequestInfo + { + public RequestInfo(string method, string urlPathname, IEnumerable<KeyValuePair<string, string>> parameters, IEnumerable<KeyValuePair<string, string>> headers) + { + Method = method; + UrlPathname = urlPathname; + Parameters = new Dictionary<string, string>(parameters); + Headers = new Dictionary<string, string>(headers); + } + + public string Method { get; } + public string UrlPathname { get; } + public IReadOnlyDictionary<string, string> Parameters { get; } + public IReadOnlyDictionary<string, string> Headers { get; } + } + + public class TimeDuration + { + public TimeDuration(DateTimeOffset start, DateTimeOffset end) + { + if (start > end) + { + throw new ArgumentException("Start time must be earlier than end time."); + } + + Start = start; + End = end; + } + + public DateTimeOffset Start { get; } + public DateTimeOffset End { get; } + } + + public static string GenerateSign(Credentials credentials, RequestInfo request, TimeDuration signValidTime) + { + List<(string key, string value)> Transform(IEnumerable<KeyValuePair<string, string>> raw) + { + if (raw == null) + return new List<(string key, string value)>(); + + var sorted = raw.Select(p => (key: WebUtility.UrlEncode(p.Key.ToLower()), value: WebUtility.UrlEncode(p.Value))).ToList(); + sorted.Sort((left, right) => string.CompareOrdinal(left.key, right.key)); + return sorted; + } + + var transformedParameters = Transform(request.Parameters); + var transformedHeaders = Transform(request.Headers); + + + const string signAlgorithm = "sha1"; + + static string ByteArrayToString(byte[] bytes) + { + return BitConverter.ToString(bytes).Replace("-", "").ToLower(); + } + + var keyTime = $"{signValidTime.Start.ToUnixTimeSeconds().ToString()};{signValidTime.End.ToUnixTimeSeconds().ToString()}"; + using HMACSHA1 hmac = new HMACSHA1(Encoding.ASCII.GetBytes(credentials.SecretKey)); + var signKey = ByteArrayToString(hmac.ComputeHash(Encoding.UTF8.GetBytes(keyTime))); + + static string Join(IEnumerable<(string key, string value)> raw) + { + return string.Join('&', raw.Select(p => string.Concat(p.key, "=", p.value))); + } + + var httpParameters = Join(transformedParameters); + var urlParamList = string.Join(';', transformedParameters.Select(p => p.key)); + var httpHeaders = Join(transformedHeaders); + var headerList = string.Join(';', transformedHeaders.Select(h => h.key)); + + var httpString = new StringBuilder() + .Append(request.Method.ToLower()).Append('\n') + .Append(request.UrlPathname).Append('\n') + .Append(httpParameters).Append('\n') + .Append(httpHeaders).Append('\n') + .ToString(); + + using var sha1 = SHA1.Create(); + string Sha1(string data) + { + var result = sha1.ComputeHash(Encoding.UTF8.GetBytes(data)); + return ByteArrayToString(result); + } + + var stringToSign = new StringBuilder() + .Append(signAlgorithm).Append('\n') + .Append(keyTime).Append('\n') + .Append(Sha1(httpString)).Append('\n') + .ToString(); + + hmac.Key = Encoding.UTF8.GetBytes(signKey); + var signature = ByteArrayToString(hmac.ComputeHash( + Encoding.UTF8.GetBytes(stringToSign))); + + + List<(string, string)> result = new List<(string, string)>(); + result.Add(("q-sign-algorithm", signAlgorithm)); + result.Add(("q-ak", credentials.SecretId)); + result.Add(("q-sign-time", keyTime)); + result.Add(("q-key-time", keyTime)); + result.Add(("q-header-list", headerList)); + result.Add(("q-url-param-list", urlParamList)); + result.Add(("q-signature", signature)); + return Join(result); + } + + private static string GetHost(string bucket, string region) + { + return $"{bucket}.cos.{region}.myqcloud.com"; + } + + public static async Task<bool> IsObjectExists(Credentials credentials, string region, string bucket, string key) + { + var host = GetHost(bucket, region); + var encodedKey = WebUtility.UrlEncode(key); + + using var request = new HttpRequestMessage(); + request.Method = HttpMethod.Head; + request.RequestUri = new Uri($"https://{host}/{encodedKey}"); + request.Headers.Host = host; + request.Headers.Date = DateTimeOffset.Now; + request.Headers.TryAddWithoutValidation("Authorization", GenerateSign(credentials, new RequestInfo( + "head", "/" + key, new Dictionary<string, string>(), + new Dictionary<string, string> + { + ["Host"] = host + } + ), new TimeDuration(DateTimeOffset.Now, DateTimeOffset.Now.AddMinutes(5)))); + + using var client = new HttpClient(); + using var response = await client.SendAsync(request); + + if (response.IsSuccessStatusCode) + return true; + if (response.StatusCode == HttpStatusCode.NotFound) + return false; + + throw new Exception($"Unknown response code. {response.ToString()}"); + } + + public static async Task PutObject(Credentials credentials, string region, string bucket, string key, Stream dataStream) + { + if (!dataStream.CanSeek) + { + throw new ArgumentException("Data stream must be seekable."); + } + + if (dataStream.Seek(0, SeekOrigin.End) > 5L * 1024L * 1024L * 1024L) + { + throw new ArgumentException("Data stream must be smaller than 5GB."); + } + + dataStream.Seek(0, SeekOrigin.Begin); + + var host = GetHost(bucket, region); + var encodedKey = WebUtility.UrlEncode(key); + using var md5Handler = MD5.Create(); + var md5 = Convert.ToBase64String(await md5Handler.ComputeHashAsync(dataStream)); + + dataStream.Seek(0, SeekOrigin.Begin); + + const string kContentMD5HeaderName = "Content-MD5"; + + using var httpRequest = new HttpRequestMessage() + { + Method = HttpMethod.Put, + RequestUri = new Uri($"https://{host}/{encodedKey}") + }; + httpRequest.Headers.Host = host; + httpRequest.Headers.Date = DateTimeOffset.Now; + + using var httpContent = new StreamContent(dataStream); + httpContent.Headers.Add(kContentMD5HeaderName, md5); + httpRequest.Content = httpContent; + + var signedHeaders = new Dictionary<string, string> + { + ["Host"] = host, + [kContentMD5HeaderName] = md5 + }; + + httpRequest.Headers.TryAddWithoutValidation("Authorization", GenerateSign(credentials, new RequestInfo( + "put", "/" + key, new Dictionary<string, string>(), signedHeaders + ), new TimeDuration(DateTimeOffset.Now, DateTimeOffset.Now.AddMinutes(10)))); + + using var client = new HttpClient(); + using var response = await client.SendAsync(httpRequest); + + if (!response.IsSuccessStatusCode) + throw new Exception($"Not success status code: {response.StatusCode}\n{await response.Content.ReadAsStringAsync()}"); + } +}
\ No newline at end of file diff --git a/docker/auto-backup/Dockerfile b/docker/auto-backup/Dockerfile new file mode 100644 index 0000000..c7ff4fc --- /dev/null +++ b/docker/auto-backup/Dockerfile @@ -0,0 +1,24 @@ +FROM mcr.microsoft.com/dotnet/sdk:9.0-alpine AS build +COPY AutoBackup /AutoBackup +WORKDIR /AutoBackup +RUN dotnet publish AutoBackup.csproj --configuration Release --output ./publish/ -r linux-x64 --self-contained false + +FROM mcr.microsoft.com/dotnet/runtime:9.0-alpine +RUN apk add --no-cache tini coreutils bash tar xz +ARG CRUPEST_AUTO_BACKUP_INIT_DELAY=0 +ARG CRUPEST_AUTO_BACKUP_INTERVAL=1d +ARG CRUPEST_AUTO_BACKUP_COS_SECRET_ID +ARG CRUPEST_AUTO_BACKUP_COS_SECRET_KEY +ARG CRUPEST_AUTO_BACKUP_COS_REGION +ARG CRUPEST_AUTO_BACKUP_BUCKET_NAME +ENV CRUPEST_AUTO_BACKUP_INIT_DELAY=${CRUPEST_AUTO_BACKUP_INIT_DELAY} +ENV CRUPEST_AUTO_BACKUP_INTERVAL=${CRUPEST_AUTO_BACKUP_INTERVAL} +ENV CRUPEST_AUTO_BACKUP_COS_SECRET_ID=${CRUPEST_AUTO_BACKUP_COS_SECRET_ID} +ENV CRUPEST_AUTO_BACKUP_COS_SECRET_KEY=${CRUPEST_AUTO_BACKUP_COS_SECRET_KEY} +ENV CRUPEST_AUTO_BACKUP_COS_REGION=${CRUPEST_AUTO_BACKUP_COS_REGION} +ENV CRUPEST_AUTO_BACKUP_BUCKET_NAME=${CRUPEST_AUTO_BACKUP_BUCKET_NAME} +VOLUME [ "/data" ] +COPY daemon.bash /daemon.bash +COPY --from=build /AutoBackup/publish /AutoBackup +ENTRYPOINT ["tini", "--"] +CMD [ "/daemon.bash" ] diff --git a/docker/auto-backup/daemon.bash b/docker/auto-backup/daemon.bash new file mode 100755 index 0000000..a4dd5dc --- /dev/null +++ b/docker/auto-backup/daemon.bash @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +set -e + +# Check I'm root. +if [[ $EUID -ne 0 ]]; then + echo "This script must be run as root" 1>&2 + exit 1 +fi + + +# Check xz, tar and coscmd +xz --version +tar --version + +function backup { + # Output "Begin backup..." in yellow and restore default + echo -e "\e[0;103m\e[K\e[1mBegin backup..." "\e[0m" + + # Get current time and convert it to YYYY-MM-DDTHH:MM:SSZ + current_time=$(date +%Y-%m-%dT%H:%M:%SZ) + echo "Current time: $current_time" + + echo "Create tar.xz for data..." + + # tar and xz /data to tmp + tar -cJf /tmp/data.tar.xz -C / data + + # Output /tmp/data.tar.xz size + du -h /tmp/data.tar.xz | cut -f1 | xargs echo "Size of data.tar.xz:" + + destination="${current_time}.tar.xz" + + # upload to remote + dotnet /AutoBackup/AutoBackup.dll /tmp/data.tar.xz "$destination" + + echo "Remove tmp file..." + # remove tmp + rm /tmp/data.tar.xz + + echo "$destination" >> /data/backup.log + + # echo "Backup finished!" in green and restore default + echo -e "\e[0;102m\e[K\e[1mFinish backup!\e[0m" +} + +echo "Initial delay: $CRUPEST_AUTO_BACKUP_INIT_DELAY" +sleep "$CRUPEST_AUTO_BACKUP_INIT_DELAY" + +# forever loop +while true; do + backup + + # sleep for CRUPEST_AUTO_BACKUP_INTERVAL + echo "Sleep for $CRUPEST_AUTO_BACKUP_INTERVAL for next backup..." + sleep "$CRUPEST_AUTO_BACKUP_INTERVAL" +done diff --git a/docker/auto-certbot/Dockerfile b/docker/auto-certbot/Dockerfile new file mode 100644 index 0000000..eeb6475 --- /dev/null +++ b/docker/auto-certbot/Dockerfile @@ -0,0 +1,20 @@ +FROM certbot/certbot:latest + +ARG CRUPEST_AUTO_CERTBOT_ADDITIONAL_PACKAGES="" +RUN apk add --no-cache tini coreutils bash ${CRUPEST_AUTO_CERTBOT_ADDITIONAL_PACKAGES} && python -m pip install cryptography + + +ARG CRUPEST_DOMAIN +ARG CRUPEST_ADDITIONAL_DOMAIN_LIST="" +ARG CRUPEST_EMAIL +ARG CRUPEST_AUTO_CERTBOT_POST_HOOK="" +# install bash +ENV CRUPEST_DOMAIN=${CRUPEST_DOMAIN} +ENV CRUPEST_ADDITIONAL_DOMAIN_LIST=${CRUPEST_ADDITIONAL_DOMAIN_LIST} +ENV CRUPEST_EMAIL=${CRUPEST_EMAIL} +ENV CRUPEST_AUTO_CERTBOT_POST_HOOK=${CRUPEST_AUTO_CERTBOT_POST_HOOK} +COPY daemon.bash /daemon.bash +COPY get-cert-domains.py /get-cert-domains.py +VOLUME ["/var/www/certbot", "/etc/letsencrypt", "/var/lib/letsencrypt"] +ENTRYPOINT ["tini", "--"] +CMD [ "/daemon.bash" ] diff --git a/docker/auto-certbot/daemon.bash b/docker/auto-certbot/daemon.bash new file mode 100755 index 0000000..d79387e --- /dev/null +++ b/docker/auto-certbot/daemon.bash @@ -0,0 +1,107 @@ +#!/usr/bin/env bash + +set -e + +# Check I'm root. +if [[ $EUID -ne 0 ]]; then + echo "This script must be run as root" 1>&2 + exit 1 +fi + +# Check certbot version. +certbot --version + +# Check domain +if [[ -z "$CRUPEST_DOMAIN" ]]; then + echo "CRUPEST_DOMAIN can't be empty!" 1>&2 + exit 1 +fi + +# Check email +if [[ -z "$CRUPEST_EMAIL" ]]; then + echo "CRUPEST_EMAIL can't be empty!" 1>&2 + exit 2 +fi + +# Check CRUPEST_CERT_PATH, default to /etc/letsencrypt/live/$CRUPEST_DOMAIN/fullchain.pem +if [ -z "$CRUPEST_CERT_PATH" ]; then + CRUPEST_CERT_PATH="/etc/letsencrypt/live/$CRUPEST_DOMAIN/fullchain.pem" +fi + +# Check CRUPEST_CERT_PATH exists. +if [ ! -f "$CRUPEST_CERT_PATH" ]; then + echo "Cert file does not exist. You may want to generate it manually with aio script." 1>&2 + exit 3 +fi + +echo "Root domain:" "$CRUPEST_DOMAIN" +echo "Email:" "$CRUPEST_EMAIL" +echo "Cert path: ${CRUPEST_CERT_PATH}" + +# Check CRUPEST_AUTO_CERTBOT_RENEW_COMMAND is defined. +if [ -z "$CRUPEST_AUTO_CERTBOT_RENEW_COMMAND" ]; then + echo "CRUPEST_AUTO_CERTBOT_RENEW_COMMAND is not defined or empty. Will use the default one." +else + printf "CRUPEST_AUTO_CERTBOT_RENEW_COMMAND is defined as:\n%s\n" "$CRUPEST_AUTO_CERTBOT_RENEW_COMMAND" +fi + +domains_str="$(/get-cert-domains.py "${CRUPEST_CERT_PATH}")" + +printf "Domain list:\n%s\n" "$domains_str" + +mapfile -t domains <<< "$domains_str" + +for domain in "${domains[@]}"; do + domain_options=("${domain_options[@]}" -d "$domain") +done + +options=(-n --agree-tos -m "$CRUPEST_EMAIL" --webroot -w /var/www/certbot "${domain_options[@]}") +if [ -n "$CRUPEST_AUTO_CERTBOT_POST_HOOK" ]; then + printf "You have defined a post hook:\n%s\n" "$CRUPEST_AUTO_CERTBOT_POST_HOOK" + options=("${options[@]}" --post-hook "$CRUPEST_AUTO_CERTBOT_POST_HOOK") +fi + +# Use test server to test. +certbot certonly --force-renewal --test-cert --dry-run "${options[@]}" + +function check_and_renew_cert { + expire_info=$(openssl x509 -enddate -noout -in "$CRUPEST_CERT_PATH") + + # Get ssl certificate expire date. + expire_date=$(echo "$expire_info" | cut -d= -f2) + + echo "SSL certificate expire date: $expire_date" + + # Convert expire date to UNIX timestamp. + expire_timestamp="$(date -d "$expire_date" +%s)" + + # Minus expire timestamp with 30 days in UNIX timestamp. + renew_timestamp="$((expire_timestamp - 2592000))" + echo "Renew SSL certificate at: $(date -d @$renew_timestamp)" + + # Get rest time til renew. + rest_time_in_second="$((renew_timestamp - $(date +%s)))" + rest_time_in_day=$((rest_time_in_second / 86400)) + echo "Rest time til renew: $rest_time_in_second seconds, aka, about $rest_time_in_day days" + + # Do we have rest time? + if [ $rest_time_in_second -gt 0 ]; then + # Sleep 1 hour. + echo "I'm going to sleep for 1 day to check again." + sleep 1d + else + # No, renew now. + echo "Renewing now..." + + if [ -n "$CRUPEST_AUTO_CERTBOT_RENEW_COMMAND" ]; then + $CRUPEST_AUTO_CERTBOT_RENEW_COMMAND + else + certbot certonly "${options[@]}" + fi + fi +} + +# Run check_and_renew_cert in infinate loop. +while true; do + check_and_renew_cert +done diff --git a/docker/auto-certbot/get-cert-domains.py b/docker/auto-certbot/get-cert-domains.py new file mode 100755 index 0000000..9bd28c8 --- /dev/null +++ b/docker/auto-certbot/get-cert-domains.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + +import sys +import os +from os.path import * +from cryptography.x509 import * +from cryptography.x509.oid import ExtensionOID + +# Check only one argument +if len(sys.argv) != 2: + print("You should only specify one argument, aka, the path of cert.", + file=sys.stderr) + exit(1) + +cert_path = sys.argv[1] + +if not exists(cert_path): + print("Cert file does not exist.", file=sys.stderr) + exit(2) + +if not isfile(cert_path): + print("Cert path is not a file.") + exit(3) + +if not 'CRUPEST_DOMAIN' in os.environ: + print("Please set CRUPEST_DOMAIN environment variable to root domain.", file=sys.stderr) + exit(4) + +root_domain = os.environ['CRUPEST_DOMAIN'] + +with open(cert_path, 'rb') as f: + cert = load_pem_x509_certificate(f.read()) + ext = cert.extensions.get_extension_for_oid( + ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + domains: list = ext.value.get_values_for_type(DNSName) + domains.remove(root_domain) + domains = [root_domain, *domains] + print('\n'.join(domains)) diff --git a/docker/blog/Dockerfile b/docker/blog/Dockerfile new file mode 100644 index 0000000..7414d4e --- /dev/null +++ b/docker/blog/Dockerfile @@ -0,0 +1,9 @@ +FROM debian:latest +ARG CRUPEST_BLOG_UPDATE_INTERVAL=1d +COPY install-hugo.bash /install-hugo.bash +RUN /install-hugo.bash && rm /install-hugo.bash +ENV CRUPEST_BLOG_UPDATE_INTERVAL=${CRUPEST_BLOG_UPDATE_INTERVAL} +COPY daemon.bash update.bash /scripts/ +VOLUME [ "/public" ] +ENTRYPOINT ["tini", "--"] +CMD [ "/scripts/daemon.bash" ] diff --git a/docker/blog/daemon.bash b/docker/blog/daemon.bash new file mode 100755 index 0000000..561a80a --- /dev/null +++ b/docker/blog/daemon.bash @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +set -e + +# Check I'm root. +if [[ $EUID -ne 0 ]]; then + echo "This script must be run as root" 1>&2 + exit 1 +fi + +hugo version + +while true; do + /scripts/update.bash + + # sleep for CRUPEST_AUTO_BACKUP_INTERVAL + echo "Sleep for $CRUPEST_BLOG_UPDATE_INTERVAL for next build..." + sleep "$CRUPEST_BLOG_UPDATE_INTERVAL" +done diff --git a/docker/blog/install-hugo.bash b/docker/blog/install-hugo.bash new file mode 100755 index 0000000..a448138 --- /dev/null +++ b/docker/blog/install-hugo.bash @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +set -e + +apt-get update +apt-get install -y tini locales curl git +rm -rf /var/lib/apt/lists/* +localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 + +VERSION=$(curl -s https://api.github.com/repos/gohugoio/hugo/releases/latest | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/') + +echo "The latest version of hugo is $VERSION." + +url="https://github.com/gohugoio/hugo/releases/download/v${VERSION}/hugo_extended_${VERSION}_linux-amd64.deb" + +echo "Download hugo from $url." + +curl -sSfOL "$url" +dpkg -i "hugo_extended_${VERSION}_linux-amd64.deb" +rm "hugo_extended_${VERSION}_linux-amd64.deb" + +echo "Hugo version: $(hugo version)." diff --git a/docker/blog/update.bash b/docker/blog/update.bash new file mode 100755 index 0000000..d4bcadc --- /dev/null +++ b/docker/blog/update.bash @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +set -e + +echo -e "\e[0;103m\e[K\e[1mBegin to build blog...\e[0m" +echo "Begin time: $(date +%Y-%m-%dT%H:%M:%SZ)" + +mkdir -p /public + +# check /blog directory exists +if [[ ! -d /blog ]]; then + echo "Directory /blog not found, clone blog repository..." + git clone https://github.com/crupest/blog.git /blog + cd /blog + git submodule update --init --recursive +else + echo "Directory /blog founded, update blog repository..." + cd /blog + git fetch -p + git reset --hard origin/master + git submodule update --init --recursive +fi + +# Now hugo it +echo "Run hugo to generate blog..." +hugo -d /public + +echo "Finish time: $(date +%Y-%m-%dT%H:%M:%SZ)" +echo -e "\e[0;102m\e[K\e[1mFinish build!\e[0m" + diff --git a/docker/debian-dev/Dockerfile b/docker/debian-dev/Dockerfile new file mode 100644 index 0000000..95f0602 --- /dev/null +++ b/docker/debian-dev/Dockerfile @@ -0,0 +1,21 @@ +FROM debian:latest + +ARG USER=crupest +ARG IN_CHINA= +ARG CODE_SERVER=true + +ENV CRUPEST_DEBIAN_DEV_USER=${USER} +ENV CRUPEST_DEBIAN_DEV_IN_CHINA=${IN_CHINA} +ENV CRUPEST_DEBIAN_DEV_SETUP_CODE_SERVER=${CODE_SERVER} + +ADD bootstrap /bootstrap + +RUN /bootstrap/setup.bash +ENV LANG=en_US.utf8 + +USER ${USER} +WORKDIR /home/${USER} + +EXPOSE 8080 +VOLUME [ "/data", "/home/${USER}" ] +CMD [ "bash", "-l" ] diff --git a/docker/debian-dev/bootstrap/apt-source/11/add-deb-src.bash b/docker/debian-dev/bootstrap/apt-source/11/add-deb-src.bash new file mode 100755 index 0000000..e134a00 --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/11/add-deb-src.bash @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +set -e + +dir=$(dirname "$0") +domain=$("$dir/get-domain.bash") + +cat <<EOF >> /etc/apt/sources.list + +deb-src https://$domain/debian/ bullseye main +deb-src https://$domain/debian-security/ bullseye-security main +deb-src https://$domain/debian-updates/ bullseye-updates main + +EOF diff --git a/docker/debian-dev/bootstrap/apt-source/11/get-domain.bash b/docker/debian-dev/bootstrap/apt-source/11/get-domain.bash new file mode 100755 index 0000000..d44ea65 --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/11/get-domain.bash @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +set -e + +sed "s|.*https\?://\([-_.a-zA-Z0-9]\+\)/.*|\\1|;q" /etc/apt/sources.list diff --git a/docker/debian-dev/bootstrap/apt-source/11/replace-domain.bash b/docker/debian-dev/bootstrap/apt-source/11/replace-domain.bash new file mode 100755 index 0000000..86e88dc --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/11/replace-domain.bash @@ -0,0 +1,7 @@ +#! /usr/bin/env bash + +set -e + +echo "Backup /etc/apt/sources.list to /etc/apt/sources.list.bak." +echo "Replace source domain in /etc/apt/sources.list to $1." +sed -i.bak "s|\(https\?://\)[-_.a-zA-Z0-9]\+/|\\1$1/|" /etc/apt/sources.list diff --git a/docker/debian-dev/bootstrap/apt-source/11/replace-http.bash b/docker/debian-dev/bootstrap/apt-source/11/replace-http.bash new file mode 100755 index 0000000..fae082a --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/11/replace-http.bash @@ -0,0 +1,7 @@ +#! /usr/bin/env bash + +set -e + +echo "Backup /etc/apt/sources.list to /etc/apt/sources.list.bak." +echo "Replace http to https in /etc/apt/sources.list." +sed -i.bak 's/https\?/https/' /etc/apt/sources.list diff --git a/docker/debian-dev/bootstrap/apt-source/12/add-deb-src.bash b/docker/debian-dev/bootstrap/apt-source/12/add-deb-src.bash new file mode 100755 index 0000000..cf741d6 --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/12/add-deb-src.bash @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +set -e + +dir=$(dirname "$0") +domain=$("$dir/get-domain.bash") + +cat <<EOF >> /etc/apt/sources.list.d/debian.sources + +Types: deb-src +URIs: https://$domain/debian +Suites: bookworm bookworm-updates +Components: main +Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg + +Types: deb-src +URIs: https://$domain/debian-security +Suites: bookworm-security +Components: main +Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg + +EOF
\ No newline at end of file diff --git a/docker/debian-dev/bootstrap/apt-source/12/get-domain.bash b/docker/debian-dev/bootstrap/apt-source/12/get-domain.bash new file mode 100755 index 0000000..a24538c --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/12/get-domain.bash @@ -0,0 +1,6 @@ +#! /usr/bin/env bash + +set -e + +grep -e 'URIs:' /etc/apt/sources.list.d/debian.sources | \ + sed -E 's|URIs:\s*https?://([-_.a-zA-Z0-9]+)/.*|\1|;q' diff --git a/docker/debian-dev/bootstrap/apt-source/12/replace-domain.bash b/docker/debian-dev/bootstrap/apt-source/12/replace-domain.bash new file mode 100755 index 0000000..d55307c --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/12/replace-domain.bash @@ -0,0 +1,7 @@ +#! /usr/bin/env bash + +set -e + +echo "Backup /etc/apt/sources.list.d/debian.sources to /etc/apt/sources.list.d/debian.sources.bak." +echo "Replace source domain in /etc/apt/sources.list.d/debian.sources to $1." +sed -i.bak -E "s|(URIs:\\s*https?://)[-_.a-zA-Z0-9]+(/.*)|\\1$1\\2|" /etc/apt/sources.list.d/debian.sources diff --git a/docker/debian-dev/bootstrap/apt-source/12/replace-http.bash b/docker/debian-dev/bootstrap/apt-source/12/replace-http.bash new file mode 100755 index 0000000..ed4391d --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/12/replace-http.bash @@ -0,0 +1,7 @@ +#! /usr/bin/env bash + +set -e + +echo "Backup /etc/apt/sources.list to /etc/apt/sources.list.d/debian.sources.bak." +echo "Replace http to https in /etc/apt/sources.list.d/debian.sources." +sed -i.bak -E "s|(URIs:\\s*)https?(://[-_.a-zA-Z0-9]+/.*)|\\1https\\2|" /etc/apt/sources.list.d/debian.sources diff --git a/docker/debian-dev/bootstrap/apt-source/china-source.txt b/docker/debian-dev/bootstrap/apt-source/china-source.txt new file mode 100644 index 0000000..4312686 --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/china-source.txt @@ -0,0 +1 @@ +mirrors.tuna.tsinghua.edu.cn
\ No newline at end of file diff --git a/docker/debian-dev/bootstrap/apt-source/install-apt-https.bash b/docker/debian-dev/bootstrap/apt-source/install-apt-https.bash new file mode 100755 index 0000000..70fb371 --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/install-apt-https.bash @@ -0,0 +1,8 @@ +#! /usr/bin/env bash + +set -e + +echo "Install apt https transport." +apt-get update +apt-get install -y apt-utils +apt-get install -y apt-transport-https ca-certificates diff --git a/docker/debian-dev/bootstrap/apt-source/setup.bash b/docker/debian-dev/bootstrap/apt-source/setup.bash new file mode 100755 index 0000000..cdf68af --- /dev/null +++ b/docker/debian-dev/bootstrap/apt-source/setup.bash @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +set -e + +dir=/bootstrap/apt-source + +echo "Getting debian version..." +debian_version=$("$dir/../get-debian-version.bash") + +if [[ -z $debian_version ]]; then + echo "Debian version not found." + exit 1 +else + echo "Debian version: $debian_version" +fi + +if [[ $debian_version -ge 12 ]]; then + setup_dir=$dir/12 +else + setup_dir=$dir/11 +fi + +echo "Setting up apt source..." + +if [[ -n $CRUPEST_DEBIAN_DEV_IN_CHINA ]]; then + echo "In China, using China source..." + "$setup_dir/replace-domain.bash" "$(cat "$dir/china-source.txt")" +fi + +"$dir/install-apt-https.bash" +"$setup_dir/replace-http.bash" +"$setup_dir/add-deb-src.bash" + +echo "Setting up apt source done." diff --git a/docker/debian-dev/bootstrap/bash/bash-completion.bash b/docker/debian-dev/bootstrap/bash/bash-completion.bash new file mode 100644 index 0000000..75f8333 --- /dev/null +++ b/docker/debian-dev/bootstrap/bash/bash-completion.bash @@ -0,0 +1,4 @@ +if [ -f /etc/bash_completion ]; then + . /etc/bash_completion +fi + diff --git a/docker/debian-dev/bootstrap/bash/code-server.bash b/docker/debian-dev/bootstrap/bash/code-server.bash new file mode 100644 index 0000000..255c280 --- /dev/null +++ b/docker/debian-dev/bootstrap/bash/code-server.bash @@ -0,0 +1,2 @@ +mkdir -p ~/.local/share/code-server +/bootstrap/start/code-server.bash > ~/.local/share/code-server/log 2> ~/.local/share/code-server/error & diff --git a/docker/debian-dev/bootstrap/bash/dquilt.bash b/docker/debian-dev/bootstrap/bash/dquilt.bash new file mode 100644 index 0000000..96a4eb2 --- /dev/null +++ b/docker/debian-dev/bootstrap/bash/dquilt.bash @@ -0,0 +1,4 @@ +alias dquilt="quilt --quiltrc=${HOME}/.quiltrc-dpkg" +. /usr/share/bash-completion/completions/quilt +complete -F _quilt_completion $_quilt_complete_opt dquilt + diff --git a/docker/debian-dev/bootstrap/extra/setup-cmake.bash b/docker/debian-dev/bootstrap/extra/setup-cmake.bash new file mode 100755 index 0000000..76c1ae4 --- /dev/null +++ b/docker/debian-dev/bootstrap/extra/setup-cmake.bash @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +set -e + +CMAKE_VERSION=$(curl -s https://api.github.com/repos/Kitware/CMake/releases/latest | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/') +wget -O cmake-installer.sh https://github.com/Kitware/CMake/releases/download/v"$CMAKE_VERSION"/cmake-"$CMAKE_VERSION"-linux-x86_64.sh +chmod +x cmake-installer.sh +./cmake-installer.sh --skip-license --prefix=/usr +rm cmake-installer.sh diff --git a/docker/debian-dev/bootstrap/extra/setup-dotnet.bash b/docker/debian-dev/bootstrap/extra/setup-dotnet.bash new file mode 100755 index 0000000..0ef7743 --- /dev/null +++ b/docker/debian-dev/bootstrap/extra/setup-dotnet.bash @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +set -e + +wget https://packages.microsoft.com/config/debian/11/packages-microsoft-prod.deb -O packages-microsoft-prod.deb +dpkg -i packages-microsoft-prod.deb +rm packages-microsoft-prod.deb + +apt-get update +apt-get install -y dotnet-sdk-7.0 diff --git a/docker/debian-dev/bootstrap/extra/setup-llvm.bash b/docker/debian-dev/bootstrap/extra/setup-llvm.bash new file mode 100755 index 0000000..48dde86 --- /dev/null +++ b/docker/debian-dev/bootstrap/extra/setup-llvm.bash @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +set -e + +LLVM_VERSION=18 + +. /bootstrap/func.bash + +if is_true "$CRUPEST_DEBIAN_DEV_IN_CHINA"; then + base_url=https://mirrors.tuna.tsinghua.edu.cn/llvm-apt +else + base_url=https://apt.llvm.org +fi + +wget "$base_url/llvm.sh" +chmod +x llvm.sh +./llvm.sh $LLVM_VERSION all -m "$base_url" +rm llvm.sh + +update-alternatives --install /usr/bin/clang clang /usr/bin/clang-$LLVM_VERSION 100 \ + --slave /usr/bin/clang++ clang++ /usr/bin/clang++-$LLVM_VERSION \ + --slave /usr/bin/clangd clangd /usr/bin/clangd-$LLVM_VERSION \ + --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-$LLVM_VERSION \ + --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-$LLVM_VERSION \ + --slave /usr/bin/lldb lldb /usr/bin/lldb-$LLVM_VERSION \ + --slave /usr/bin/lld lld /usr/bin/lld-$LLVM_VERSION diff --git a/docker/debian-dev/bootstrap/func.bash b/docker/debian-dev/bootstrap/func.bash new file mode 100644 index 0000000..7782035 --- /dev/null +++ b/docker/debian-dev/bootstrap/func.bash @@ -0,0 +1,19 @@ +is_true() { + if [[ "$1" =~ 1|on|true ]]; then + return 0 + else + return 1 + fi +} + +append-bash-profile() { + cat "/bootstrap/bash/$1" >> /home/$CRUPEST_DEBIAN_DEV_USER/.bash_profile +} + +append-bashrc() { + cat "/bootstrap/bash/$1" >> /home/$CRUPEST_DEBIAN_DEV_USER/.bashrc +} + +copy-home-dot-file() { + cp "/bootstrap/home-dot/$1" "/home/$CRUPEST_DEBIAN_DEV_USER/.$1" +} diff --git a/docker/debian-dev/bootstrap/get-debian-version.bash b/docker/debian-dev/bootstrap/get-debian-version.bash new file mode 100755 index 0000000..2cc10b9 --- /dev/null +++ b/docker/debian-dev/bootstrap/get-debian-version.bash @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +set -e + +if [ -f /etc/os-release ]; then + . /etc/os-release + if [ "$ID" = "debian" ]; then + echo "$VERSION_ID" + exit 0 + fi +fi + +exit 1 diff --git a/docker/debian-dev/bootstrap/home-dot/devscripts b/docker/debian-dev/bootstrap/home-dot/devscripts new file mode 100644 index 0000000..a15b041 --- /dev/null +++ b/docker/debian-dev/bootstrap/home-dot/devscripts @@ -0,0 +1 @@ +export DGET_VERIFY=no
\ No newline at end of file diff --git a/docker/debian-dev/bootstrap/home-dot/quiltrc-dpkg b/docker/debian-dev/bootstrap/home-dot/quiltrc-dpkg new file mode 100644 index 0000000..e8fc3c5 --- /dev/null +++ b/docker/debian-dev/bootstrap/home-dot/quiltrc-dpkg @@ -0,0 +1,13 @@ +d=. +while [ ! -d $d/debian -a `readlink -e $d` != / ]; + do d=$d/..; done +if [ -d $d/debian ] && [ -z $QUILT_PATCHES ]; then + # if in Debian packaging tree with unset $QUILT_PATCHES + QUILT_PATCHES="debian/patches" + QUILT_PATCH_OPTS="--reject-format=unified" + QUILT_DIFF_ARGS="-p ab --no-timestamps --no-index --color=auto" + QUILT_REFRESH_ARGS="-p ab --no-timestamps --no-index" + QUILT_COLORS="diff_hdr=1;32:diff_add=1;34:diff_rem=1;31:diff_hunk=1;33:" + QUILT_COLORS="${QUILT_COLORS}diff_ctx=35:diff_cctx=33" + if ! [ -d $d/debian/patches ]; then mkdir $d/debian/patches; fi +fi diff --git a/docker/debian-dev/bootstrap/setup-base.bash b/docker/debian-dev/bootstrap/setup-base.bash new file mode 100755 index 0000000..31ded36 --- /dev/null +++ b/docker/debian-dev/bootstrap/setup-base.bash @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +set -e + +. /bootstrap/func.bash + +echo "Setting up basic system function..." + +echo "Installing basic packages..." +apt-get install -y apt-utils +apt-get install -y locales procps vim less man bash-completion software-properties-common rsync curl wget +echo "Installing basic packages done." + +echo "Setting up locale..." +localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 +echo "Setting up locale done." + +echo "Creating data dir..." +mkdir -p /data +chown $CRUPEST_DEBIAN_DEV_USER:$CRUPEST_DEBIAN_DEV_USER /data +echo "Creating data dir done." + +append-bashrc bash-completion.bash + +echo "Setting up basic system function done." diff --git a/docker/debian-dev/bootstrap/setup-code-server.bash b/docker/debian-dev/bootstrap/setup-code-server.bash new file mode 100755 index 0000000..34c9697 --- /dev/null +++ b/docker/debian-dev/bootstrap/setup-code-server.bash @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +set -e + +. /bootstrap/func.bash + +echo "Setting up code server..." + +echo "Get latest version of code-server..." +VERSION=$(curl -s https://api.github.com/repos/coder/code-server/releases/latest | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/') +echo "Current latest version of code-server is $VERSION" + +echo "Downloading code-server..." +url="https://github.com/coder/code-server/releases/download/v${VERSION}/code-server_${VERSION}_amd64.deb" +curl -sSfOL "$url" +echo "Downloading code-server done." + +echo "Installing code-server..." +apt-get install -y "./code-server_${VERSION}_amd64.deb" +echo "Installing code-server done." + +echo "Cleaning up deb..." +rm "code-server_${VERSION}_amd64.deb" +echo "Cleaning up deb done." + +append-bash-profile code-server.bash + +echo "Setting up code server done." diff --git a/docker/debian-dev/bootstrap/setup-dev.bash b/docker/debian-dev/bootstrap/setup-dev.bash new file mode 100755 index 0000000..92deacb --- /dev/null +++ b/docker/debian-dev/bootstrap/setup-dev.bash @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +set -e + +. /bootstrap/func.bash + +echo "Setting up dev function..." + +echo "Installing dev packages..." +apt-get install -y build-essential git devscripts debhelper quilt +apt-get build-dep -y linux +echo "Installing dev packages done." + +append-bashrc dquilt.bash +copy-home-dot-file devscripts +copy-home-dot-file quiltrc-dpkg + +echo "Setting up dev function done." diff --git a/docker/debian-dev/bootstrap/setup-user.bash b/docker/debian-dev/bootstrap/setup-user.bash new file mode 100755 index 0000000..f74dcdb --- /dev/null +++ b/docker/debian-dev/bootstrap/setup-user.bash @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +set -e + +echo "Setting up user..." + +echo "Installing sudo..." +apt-get install -y sudo +echo "Installing sudo done." + +echo "Setting up sudo..." +sed -i.bak 's|%sudo[[:space:]]\+ALL=(ALL:ALL)[[:space:]]\+ALL|%sudo ALL=(ALL:ALL) NOPASSWD: ALL|' /etc/sudoers +echo "Setting up sudo done." + +echo "Adding user $CRUPEST_DEBIAN_DEV_USER ..." +useradd -m -G sudo -s /usr/bin/bash "$CRUPEST_DEBIAN_DEV_USER" +echo "Adding user done." + +echo "Setting up user done." + diff --git a/docker/debian-dev/bootstrap/setup.bash b/docker/debian-dev/bootstrap/setup.bash new file mode 100755 index 0000000..09b8137 --- /dev/null +++ b/docker/debian-dev/bootstrap/setup.bash @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +set -e + +export DEBIAN_FRONTEND=noninteractive + +echo "Setting up crupest-debian-dev..." + +. /bootstrap/func.bash + +/bootstrap/apt-source/setup.bash + +echo "Updating apt source index..." +apt-get update +echo "Updating apt source index done." + +/bootstrap/setup-user.bash +/bootstrap/setup-base.bash +/bootstrap/setup-dev.bash + +if is_true "$CRUPEST_DEBIAN_DEV_SETUP_CODE_SERVER"; then + echo "CRUPEST_DEBIAN_DEV_SETUP_CODE_SERVER is true, setting up code-server..." + /bootstrap/setup-code-server.bash +fi + +echo "Cleaning up apt source index..." +rm -rf /var/lib/apt/lists/* +echo "Cleaning up apt source index done." + +echo "Setting up crupest-debian-dev done." diff --git a/docker/debian-dev/bootstrap/start/code-server.bash b/docker/debian-dev/bootstrap/start/code-server.bash new file mode 100755 index 0000000..7dfc0e9 --- /dev/null +++ b/docker/debian-dev/bootstrap/start/code-server.bash @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +export CODE_SERVER_CONFIG="/data/code-server-config.yaml" + +CODE_SERVER_PROGRAM=code-server +CODE_SERVER_PORT=8080 + +if which "$CODE_SERVER_PROGRAM" > /dev/null 2>&1; then + if ! pgrep -x "$CODE_SERVER_PROGRAM" > /dev/null 2>&1; then + echo "code-server is not running, starting..." + "$CODE_SERVER_PROGRAM" "--bind-addr" "0.0.0.0:$CODE_SERVER_PORT" + else + echo "code-server is already running." + fi +else + echo "code-server not found, skipping code-server setup." >&2 + exit 1 +fi diff --git a/docker/debian-dev/bootstrap/wait.bash b/docker/debian-dev/bootstrap/wait.bash new file mode 100755 index 0000000..501c706 --- /dev/null +++ b/docker/debian-dev/bootstrap/wait.bash @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +set -e + +tail -f /dev/null diff --git a/docker/nginx/Dockerfile b/docker/nginx/Dockerfile new file mode 100644 index 0000000..6d0400b --- /dev/null +++ b/docker/nginx/Dockerfile @@ -0,0 +1,8 @@ +FROM node:lts AS build-www +RUN npm install -g pnpm +COPY sites/www /sites/www +WORKDIR /sites/www +RUN pnpm install --frozen-lockfile && pnpm run build + +FROM nginx:mainline +COPY --from=build-www /sites/www/dist /srv/www diff --git a/docker/nginx/sites/www/.dockerignore b/docker/nginx/sites/www/.dockerignore new file mode 100644 index 0000000..ef718b9 --- /dev/null +++ b/docker/nginx/sites/www/.dockerignore @@ -0,0 +1,3 @@ +.parcel-cache +dist +node_modules diff --git a/docker/nginx/sites/www/.gitignore b/docker/nginx/sites/www/.gitignore new file mode 100644 index 0000000..0b1e50b --- /dev/null +++ b/docker/nginx/sites/www/.gitignore @@ -0,0 +1,26 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + +.parcel-cache diff --git a/docker/nginx/sites/www/avatar.png b/docker/nginx/sites/www/avatar.png Binary files differnew file mode 100644 index 0000000..d890d8d --- /dev/null +++ b/docker/nginx/sites/www/avatar.png diff --git a/docker/nginx/sites/www/favicon.ico b/docker/nginx/sites/www/favicon.ico Binary files differnew file mode 100644 index 0000000..922a523 --- /dev/null +++ b/docker/nginx/sites/www/favicon.ico diff --git a/docker/nginx/sites/www/github-mark.png b/docker/nginx/sites/www/github-mark.png Binary files differnew file mode 100644 index 0000000..6cb3b70 --- /dev/null +++ b/docker/nginx/sites/www/github-mark.png diff --git a/docker/nginx/sites/www/index.html b/docker/nginx/sites/www/index.html new file mode 100644 index 0000000..6fae9ef --- /dev/null +++ b/docker/nginx/sites/www/index.html @@ -0,0 +1,111 @@ +<!DOCTYPE html> +<html lang="en"> + +<head> + <meta charset="UTF-8" /> + <meta http-equiv="X-UA-Compatible" content="IE=edge"> + <link rel="icon" href="./favicon.ico" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0" /> + <title>crupest</title> +</head> + +<body> + <div class="slogan-container"> + <div class="slogan happy"> + <span>🙃The world is full of pain, but we can fix it with love!</span> + </div> + <div class="slogan angry"> + <span>😡The world is a piece of shit, so let's make it a little better!</span> + </div> + </div> + <article id="main-article"> + <img id="avatar" src="./avatar.png" alt="My avatar" width="80" height="80"/> + <h1 id="title">Hello! This is <span id="title-name">crupest</span> !</h1> + <hr/> + <section> + <p>Welcome to my home page! Nice to meet you here! 🥰</p> + <p>If you have something interesting to share with me, feel free to email me at + <a rel="noopener noreferrer" href="mailto:crupest@crupest.life">crupest@crupest.life</a>.</p> + <p>You can also create an issue in any of my repos on GitHub to talk anything to me, + <a rel="noopener noreferrer" href="https://github.com/crupest">https://github.com/crupest</a>.</p> + </section> + <section> + <h2 id="friends">My Friends <small>(more links are being collected ...)</small></h2> + <div id="friends-container"> + <div class="friend"> + <a rel="noopener noreferrer" href="https://wsmcs.cn"> + <img class="friend-avatar" alt="Friend WSM's avatar" + src="https://wsmcs.cn/wp-content/uploads/2023/02/BifengxiaPanda_ZH-CN8879969527_UHD-scaled.jpg" + width="80" height="80"/><br/> + wsm</a> + <a rel="noopener noreferrer" href="https://github.com/wushuming666"><img + class="friend-github" src="./github-mark.png"/></a> + </div> + <div class="friend"> + <a rel="noopener noreferrer" href="https://www.hszsoft.com"> + <img class="friend-avatar" alt="Friend HSZ's avatar" + src="https://avatars.githubusercontent.com/u/63097618?v=4" + width="80" height="80"/><br/> + hsz</a> + <a rel="noopener noreferrer" href="https://github.com/hszSoft"><img + class="friend-github" src="./github-mark.png"/></a><br/> + <span class="friend-tag">随性の程序员</span> + </div> + </div> + </section> + <section> + <h2>Other Links</h2> + <ul> + <li><a rel="noopener noreferrer" href="https://crupest.life">https://crupest.life</a> + : home page, aka the one you are reading, built with + <a rel="noopener noreferrer" href="https://parceljs.org">Parcel</a> + and + <a rel="noopener noreferrer" href="https://pnpm.io">pnpm</a>.</li> + <li><a rel="noopener noreferrer" href="https://crupest.life/blog">https://crupest.life/blog</a> + : blogs, built with + <a rel="noopener noreferrer" href="https://gohugo.io">hugo</a>.</li> + <li><a rel="noopener noreferrer" href="https://git.crupest.life">https://git.crupest.life</a> + : self-hosted + <a rel="noopener noreferrer" href="https://forgejo.org">Forgejo</a> + instance.</li> + <li><del><span class="fake-link">https://timeline.crupest.life</span> : micro-blog with my own web app + <a rel="noopener noreferrer" href="https://github.com/crupest/Timeline">Timeline</a>.</del> + No longer maintain, so it stops serving due to security concerns. + </li> + </ul> + </section> + <section> + <h2>Always Remember</h2> + <figure class="citation"> + <blockquote> + <p>Die Philosophen haben die Welt nur verschieden interpretiert, es kömmt aber darauf an, sie zu verändern.</p> + <p><small>Translated from German:</small> + The philosophers have only interpreted the world in various ways, the point is to change it.</p> + </blockquote> + <figcaption> + <cite>Karl Marx, Theses on Feuerbach (1845)</cite> + </figcaption> + </figure> + </section> + <hr/> + <footer> + <p id="license"> + <small>This work is licensed under + <a rel="license noopener noreferrer" + href="https://creativecommons.org/licenses/by-nc/4.0/" + target="_blank"> + <span id="license-text">CC BY-NC 4.0</span> + <span id="license-img-container"> + <img src="https://mirrors.creativecommons.org/presskit/icons/cc.svg"/> + <img src="https://mirrors.creativecommons.org/presskit/icons/by.svg"/> + <img src="https://mirrors.creativecommons.org/presskit/icons/nc.svg"/> + </span> + </a> + </small> + </p> + </footer> + </article> + <script type="module" src="./src/main.ts"></script> +</body> + +</html>
\ No newline at end of file diff --git a/docker/nginx/sites/www/package.json b/docker/nginx/sites/www/package.json new file mode 100644 index 0000000..c5c5d4f --- /dev/null +++ b/docker/nginx/sites/www/package.json @@ -0,0 +1,17 @@ +{ + "name": "crupest-www", + "private": true, + "version": "0.1.0", + "source": "index.html", + "scripts": { + "start": "parcel", + "build": "tsc && parcel build" + }, + "devDependencies": { + "@tsconfig/recommended": "^1.0.8", + "@types/parcel-env": "^0.0.8", + "parcel": "^2.13.3", + "prettier": "^3.4.2", + "typescript": "^5.7.3" + } +}
\ No newline at end of file diff --git a/docker/nginx/sites/www/pnpm-lock.yaml b/docker/nginx/sites/www/pnpm-lock.yaml new file mode 100644 index 0000000..1d440a9 --- /dev/null +++ b/docker/nginx/sites/www/pnpm-lock.yaml @@ -0,0 +1,2016 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@tsconfig/recommended': + specifier: ^1.0.8 + version: 1.0.8 + '@types/parcel-env': + specifier: ^0.0.8 + version: 0.0.8 + parcel: + specifier: ^2.13.3 + version: 2.13.3(@swc/helpers@0.5.15)(typescript@5.7.3) + prettier: + specifier: ^3.4.2 + version: 3.4.2 + typescript: + specifier: ^5.7.3 + version: 5.7.3 + +packages: + + '@babel/code-frame@7.26.2': + resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.25.9': + resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} + engines: {node: '>=6.9.0'} + + '@lezer/common@1.2.3': + resolution: {integrity: sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==} + + '@lezer/lr@1.4.2': + resolution: {integrity: sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==} + + '@lmdb/lmdb-darwin-arm64@2.8.5': + resolution: {integrity: sha512-KPDeVScZgA1oq0CiPBcOa3kHIqU+pTOwRFDIhxvmf8CTNvqdZQYp5cCKW0bUk69VygB2PuTiINFWbY78aR2pQw==} + cpu: [arm64] + os: [darwin] + + '@lmdb/lmdb-darwin-x64@2.8.5': + resolution: {integrity: sha512-w/sLhN4T7MW1nB3R/U8WK5BgQLz904wh+/SmA2jD8NnF7BLLoUgflCNxOeSPOWp8geP6nP/+VjWzZVip7rZ1ug==} + cpu: [x64] + os: [darwin] + + '@lmdb/lmdb-linux-arm64@2.8.5': + resolution: {integrity: sha512-vtbZRHH5UDlL01TT5jB576Zox3+hdyogvpcbvVJlmU5PdL3c5V7cj1EODdh1CHPksRl+cws/58ugEHi8bcj4Ww==} + cpu: [arm64] + os: [linux] + + '@lmdb/lmdb-linux-arm@2.8.5': + resolution: {integrity: sha512-c0TGMbm2M55pwTDIfkDLB6BpIsgxV4PjYck2HiOX+cy/JWiBXz32lYbarPqejKs9Flm7YVAKSILUducU9g2RVg==} + cpu: [arm] + os: [linux] + + '@lmdb/lmdb-linux-x64@2.8.5': + resolution: {integrity: sha512-Xkc8IUx9aEhP0zvgeKy7IQ3ReX2N8N1L0WPcQwnZweWmOuKfwpS3GRIYqLtK5za/w3E60zhFfNdS+3pBZPytqQ==} + cpu: [x64] + os: [linux] + + '@lmdb/lmdb-win32-x64@2.8.5': + resolution: {integrity: sha512-4wvrf5BgnR8RpogHhtpCPJMKBmvyZPhhUtEwMJbXh0ni2BucpfF07jlmyM11zRqQ2XIq6PbC2j7W7UCCcm1rRQ==} + cpu: [x64] + os: [win32] + + '@mischnic/json-sourcemap@0.1.1': + resolution: {integrity: sha512-iA7+tyVqfrATAIsIRWQG+a7ZLLD0VaOCKV2Wd/v4mqIU3J9c4jx9p7S0nw1XH3gJCKNBOOwACOPYYSUu9pgT+w==} + engines: {node: '>=12.0.0'} + + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} + cpu: [arm64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==} + cpu: [x64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==} + cpu: [arm64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==} + cpu: [arm] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==} + cpu: [x64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==} + cpu: [x64] + os: [win32] + + '@parcel/bundler-default@2.13.3': + resolution: {integrity: sha512-mOuWeth0bZzRv1b9Lrvydis/hAzJyePy0gwa0tix3/zyYBvw0JY+xkXVR4qKyD/blc1Ra2qOlfI2uD3ucnsdXA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/cache@2.13.3': + resolution: {integrity: sha512-Vz5+K5uCt9mcuQAMDo0JdbPYDmVdB8Nvu/A2vTEK2rqZPxvoOTczKeMBA4JqzKqGURHPRLaJCvuR8nDG+jhK9A==} + engines: {node: '>= 16.0.0'} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/codeframe@2.13.3': + resolution: {integrity: sha512-L/PQf+PT0xM8k9nc0B+PxxOYO2phQYnbuifu9o4pFRiqVmCtHztP+XMIvRJ2gOEXy3pgAImSPFVJ3xGxMFky4g==} + engines: {node: '>= 16.0.0'} + + '@parcel/compressor-raw@2.13.3': + resolution: {integrity: sha512-C6vjDlgTLjYc358i7LA/dqcL0XDQZ1IHXFw6hBaHHOfxPKW2T4bzUI6RURyToEK9Q1X7+ggDKqgdLxwp4veCFg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/config-default@2.13.3': + resolution: {integrity: sha512-WUsx83ic8DgLwwnL1Bua4lRgQqYjxiTT+DBxESGk1paNm1juWzyfPXEQDLXwiCTcWMQGiXQFQ8OuSISauVQ8dQ==} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/core@2.13.3': + resolution: {integrity: sha512-SRZFtqGiaKHlZ2YAvf+NHvBFWS3GnkBvJMfOJM7kxJRK3M1bhbwJa/GgSdzqro5UVf9Bfj6E+pkdrRQIOZ7jMQ==} + engines: {node: '>= 16.0.0'} + + '@parcel/diagnostic@2.13.3': + resolution: {integrity: sha512-C70KXLBaXLJvr7XCEVu8m6TqNdw1gQLxqg5BQ8roR62R4vWWDnOq8PEksxDi4Y8Z/FF4i3Sapv6tRx9iBNxDEg==} + engines: {node: '>= 16.0.0'} + + '@parcel/events@2.13.3': + resolution: {integrity: sha512-ZkSHTTbD/E+53AjUzhAWTnMLnxLEU5yRw0H614CaruGh+GjgOIKyukGeToF5Gf/lvZ159VrJCGE0Z5EpgHVkuQ==} + engines: {node: '>= 16.0.0'} + + '@parcel/feature-flags@2.13.3': + resolution: {integrity: sha512-UZm14QpamDFoUut9YtCZSpG1HxPs07lUwUCpsAYL0PpxASD3oWJQxIJGfDZPa2272DarXDG9adTKrNXvkHZblw==} + engines: {node: '>= 16.0.0'} + + '@parcel/fs@2.13.3': + resolution: {integrity: sha512-+MPWAt0zr+TCDSlj1LvkORTjfB/BSffsE99A9AvScKytDSYYpY2s0t4vtV9unSh0FHMS2aBCZNJ4t7KL+DcPIg==} + engines: {node: '>= 16.0.0'} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/graph@3.3.3': + resolution: {integrity: sha512-pxs4GauEdvCN8nRd6wG3st6LvpHske3GfqGwUSR0P0X0pBPI1/NicvXz6xzp3rgb9gPWfbKXeI/2IOTfIxxVfg==} + engines: {node: '>= 16.0.0'} + + '@parcel/logger@2.13.3': + resolution: {integrity: sha512-8YF/ZhsQgd7ohQ2vEqcMD1Ag9JlJULROWRPGgGYLGD+twuxAiSdiFBpN3f+j4gQN4PYaLaIS/SwUFx11J243fQ==} + engines: {node: '>= 16.0.0'} + + '@parcel/markdown-ansi@2.13.3': + resolution: {integrity: sha512-B4rUdlNUulJs2xOQuDbN7Hq5a9roq8IZUcJ1vQ8PAv+zMGb7KCfqIIr/BSCDYGhayfAGBVWW8x55Kvrl1zrDYw==} + engines: {node: '>= 16.0.0'} + + '@parcel/namer-default@2.13.3': + resolution: {integrity: sha512-A2a5A5fuyNcjSGOS0hPcdQmOE2kszZnLIXof7UMGNkNkeC62KAG8WcFZH5RNOY3LT5H773hq51zmc2Y2gE5Rnw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/node-resolver-core@3.4.3': + resolution: {integrity: sha512-IEnMks49egEic1ITBp59VQyHzkSQUXqpU9hOHwqN3KoSTdZ6rEgrXcS3pa6tdXay4NYGlcZ88kFCE8i/xYoVCg==} + engines: {node: '>= 16.0.0'} + + '@parcel/optimizer-css@2.13.3': + resolution: {integrity: sha512-A8o9IVCv919vhv69SkLmyW2WjJR5WZgcMqV6L1uiGF8i8z18myrMhrp2JuSHx29PRT9uNyzNC4Xrd4StYjIhJg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/optimizer-htmlnano@2.13.3': + resolution: {integrity: sha512-K4Uvg0Sy2pECP7pdvvbud++F0pfcbNkq+IxTrgqBX5HJnLEmRZwgdvZEKF43oMEolclMnURMQRGjRplRaPdbXg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/optimizer-image@2.13.3': + resolution: {integrity: sha512-wlDUICA29J4UnqkKrWiyt68g1e85qfYhp4zJFcFJL0LX1qqh1QwsLUz3YJ+KlruoqPxJSFEC8ncBEKiVCsqhEQ==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/optimizer-svgo@2.13.3': + resolution: {integrity: sha512-piIKxQKzhZK54dJR6yqIcq+urZmpsfgUpLCZT3cnWlX4ux5+S2iN66qqZBs0zVn+a58LcWcoP4Z9ieiJmpiu2w==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/optimizer-swc@2.13.3': + resolution: {integrity: sha512-zNSq6oWqLlW8ksPIDjM0VgrK6ZAJbPQCDvs1V+p0oX3CzEe85lT5VkRpnfrN1+/vvEJNGL8e60efHKpI+rXGTA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/package-manager@2.13.3': + resolution: {integrity: sha512-FLNI5OrZxymGf/Yln0E/kjnGn5sdkQAxW7pQVdtuM+5VeN75yibJRjsSGv88PvJ+KvpD2ANgiIJo1RufmoPcww==} + engines: {node: '>= 16.0.0'} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/packager-css@2.13.3': + resolution: {integrity: sha512-ghDqRMtrUwaDERzFm9le0uz2PTeqqsjsW0ihQSZPSAptElRl9o5BR+XtMPv3r7Ui0evo+w35gD55oQCJ28vCig==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/packager-html@2.13.3': + resolution: {integrity: sha512-jDLnKSA/EzVEZ3/aegXO3QJ/Ij732AgBBkIQfeC8tUoxwVz5b3HiPBAjVjcUSfZs7mdBSHO+ELWC3UD+HbsIrQ==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/packager-js@2.13.3': + resolution: {integrity: sha512-0pMHHf2zOn7EOJe88QJw5h/wcV1bFfj6cXVcE55Wa8GX3V+SdCgolnlvNuBcRQ1Tlx0Xkpo+9hMFVIQbNQY6zw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/packager-raw@2.13.3': + resolution: {integrity: sha512-AWu4UB+akBdskzvT3KGVHIdacU9f7cI678DQQ1jKQuc9yZz5D0VFt3ocFBOmvDfEQDF0uH3jjtJR7fnuvX7Biw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/packager-svg@2.13.3': + resolution: {integrity: sha512-tKGRiFq/4jh5u2xpTstNQ7gu+RuZWzlWqpw5NaFmcKe6VQe5CMcS499xTFoREAGnRvevSeIgC38X1a+VOo+/AA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/packager-wasm@2.13.3': + resolution: {integrity: sha512-SZB56/b230vFrSehVXaUAWjJmWYc89gzb8OTLkBm7uvtFtov2J1R8Ig9TTJwinyXE3h84MCFP/YpQElSfoLkJw==} + engines: {node: '>=16.0.0', parcel: ^2.13.3} + + '@parcel/plugin@2.13.3': + resolution: {integrity: sha512-cterKHHcwg6q11Gpif/aqvHo056TR+yDVJ3fSdiG2xr5KD1VZ2B3hmofWERNNwjMcnR1h9Xq40B7jCKUhOyNFA==} + engines: {node: '>= 16.0.0'} + + '@parcel/profiler@2.13.3': + resolution: {integrity: sha512-ok6BwWSLvyHe5TuSXjSacYnDStFgP5Y30tA9mbtWSm0INDsYf+m5DqzpYPx8U54OaywWMK8w3MXUClosJX3aPA==} + engines: {node: '>= 16.0.0'} + + '@parcel/reporter-cli@2.13.3': + resolution: {integrity: sha512-EA5tKt/6bXYNMEavSs35qHlFdx6cZmRazlZxPBgxPePQYoouNAPMNLUOEQozaPhz9f5fvNDN7EHOFaAWcdO2LA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/reporter-dev-server@2.13.3': + resolution: {integrity: sha512-ZNeFp6AOIQFv7mZIv2P5O188dnZHNg0ymeDVcakfZomwhpSva2dFNS3AnvWo4eyWBlUxkmQO8BtaxeWTs7jAuA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/reporter-tracer@2.13.3': + resolution: {integrity: sha512-aBsVPI8jLZTDkFYrI69GxnsdvZKEYerkPsu935LcX9rfUYssOnmmUP+3oI+8fbg+qNjJuk9BgoQ4hCp9FOphMQ==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/resolver-default@2.13.3': + resolution: {integrity: sha512-urBZuRALWT9pFMeWQ8JirchLmsQEyI9lrJptiwLbJWrwvmlwSUGkcstmPwoNRf/aAQjICB7ser/247Vny0pFxA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/runtime-browser-hmr@2.13.3': + resolution: {integrity: sha512-EAcPojQFUNUGUrDk66cu3ySPO0NXRVS5CKPd4QrxPCVVbGzde4koKu8krC/TaGsoyUqhie8HMnS70qBP0GFfcQ==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/runtime-js@2.13.3': + resolution: {integrity: sha512-62OucNAnxb2Q0uyTFWW/0Hvv2DJ4b5H6neh/YFu2/wmxaZ37xTpEuEcG2do7KW54xE5DeLP+RliHLwi4NvR3ww==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/runtime-react-refresh@2.13.3': + resolution: {integrity: sha512-PYZ1klpJVwqE3WuifILjtF1dugtesHEuJcXYZI85T6UoRSD5ctS1nAIpZzT14Ga1lRt/jd+eAmhWL1l3m/Vk1Q==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/runtime-service-worker@2.13.3': + resolution: {integrity: sha512-BjMhPuT7Us1+YIo31exPRwomPiL+jrZZS5UUAwlEW2XGHDceEotzRM94LwxeFliCScT4IOokGoxixm19qRuzWg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/rust@2.13.3': + resolution: {integrity: sha512-dLq85xDAtzr3P5200cvxk+8WXSWauYbxuev9LCPdwfhlaWo/JEj6cu9seVdWlkagjGwkoV1kXC+GGntgUXOLAQ==} + engines: {node: '>= 16.0.0'} + + '@parcel/source-map@2.1.1': + resolution: {integrity: sha512-Ejx1P/mj+kMjQb8/y5XxDUn4reGdr+WyKYloBljpppUy8gs42T+BNoEOuRYqDVdgPc6NxduzIDoJS9pOFfV5Ew==} + engines: {node: ^12.18.3 || >=14} + + '@parcel/transformer-babel@2.13.3': + resolution: {integrity: sha512-ikzK9f5WTFrdQsPitQgjCPH6HmVU8AQPRemIJ2BndYhtodn5PQut5cnSvTrqax8RjYvheEKCQk/Zb/uR7qgS3g==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-css@2.13.3': + resolution: {integrity: sha512-zbrNURGph6JeVADbGydyZ7lcu/izj41kDxQ9xw4RPRW/3rofQiTU0OTREi+uBWiMENQySXVivEdzHA9cA+aLAA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-html@2.13.3': + resolution: {integrity: sha512-Yf74FkL9RCCB4+hxQRVMNQThH9+fZ5w0NLiQPpWUOcgDEEyxTi4FWPQgEBsKl/XK2ehdydbQB9fBgPQLuQxwPg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-image@2.13.3': + resolution: {integrity: sha512-wL1CXyeFAqbp2wcEq/JD3a/tbAyVIDMTC6laQxlIwnVV7dsENhK1qRuJZuoBdixESeUpFQSmmQvDIhcfT/cUUg==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/transformer-js@2.13.3': + resolution: {integrity: sha512-KqfNGn1IHzDoN2aPqt4nDksgb50Xzcny777C7A7hjlQ3cmkjyJrixYjzzsPaPSGJ+kJpknh3KE8unkQ9mhFvRQ==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@parcel/transformer-json@2.13.3': + resolution: {integrity: sha512-rrq0ab6J0w9ePtsxi0kAvpCmrUYXXAx1Z5PATZakv89rSYbHBKEdXxyCoKFui/UPVCUEGVs5r0iOFepdHpIyeA==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-postcss@2.13.3': + resolution: {integrity: sha512-AIiWpU0QSFBrPcYIqAnhqB8RGE6yHFznnxztfg1t2zMSOnK3xoU6xqYKv8H/MduShGGrC3qVOeDfM8MUwzL3cw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-posthtml@2.13.3': + resolution: {integrity: sha512-5GSLyccpHASwFAu3uJ83gDIBSvfsGdVmhJvy0Vxe+K1Fklk2ibhvvtUHMhB7mg6SPHC+R9jsNc3ZqY04ZLeGjw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-raw@2.13.3': + resolution: {integrity: sha512-BFsAbdQF0l8/Pdb7dSLJeYcd8jgwvAUbHgMink2MNXJuRUvDl19Gns8jVokU+uraFHulJMBj40+K/RTd33in4g==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-react-refresh-wrap@2.13.3': + resolution: {integrity: sha512-mOof4cRyxsZRdg8kkWaFtaX98mHpxUhcGPU+nF9RQVa9q737ItxrorsPNR9hpZAyE2TtFNflNW7RoYsgvlLw8w==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/transformer-svg@2.13.3': + resolution: {integrity: sha512-9jm7ZF4KHIrGLWlw/SFUz5KKJ20nxHvjFAmzde34R9Wu+F1BOjLZxae7w4ZRwvIc+UVOUcBBQFmhSVwVDZg6Dw==} + engines: {node: '>= 16.0.0', parcel: ^2.13.3} + + '@parcel/types-internal@2.13.3': + resolution: {integrity: sha512-Lhx0n+9RCp+Ipktf/I+CLm3zE9Iq9NtDd8b2Vr5lVWyoT8AbzBKIHIpTbhLS4kjZ80L3I6o93OYjqAaIjsqoZw==} + + '@parcel/types@2.13.3': + resolution: {integrity: sha512-+RpFHxx8fy8/dpuehHUw/ja9PRExC3wJoIlIIF42E7SLu2SvlTHtKm6EfICZzxCXNEBzjoDbamCRcN0nmTPlhw==} + + '@parcel/utils@2.13.3': + resolution: {integrity: sha512-yxY9xw2wOUlJaScOXYZmMGoZ4Ck4Kqj+p6Koe5kLkkWM1j98Q0Dj2tf/mNvZi4yrdnlm+dclCwNRnuE8Q9D+pw==} + engines: {node: '>= 16.0.0'} + + '@parcel/watcher-android-arm64@2.5.1': + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [android] + + '@parcel/watcher-darwin-arm64@2.5.1': + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [darwin] + + '@parcel/watcher-darwin-x64@2.5.1': + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [darwin] + + '@parcel/watcher-freebsd-x64@2.5.1': + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + + '@parcel/watcher-linux-arm-glibc@2.5.1': + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm-musl@2.5.1': + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-arm64-musl@2.5.1': + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-x64-glibc@2.5.1': + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-linux-x64-musl@2.5.1': + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-win32-arm64@2.5.1': + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [win32] + + '@parcel/watcher-win32-ia32@2.5.1': + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + + '@parcel/watcher-win32-x64@2.5.1': + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [win32] + + '@parcel/watcher@2.5.1': + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} + engines: {node: '>= 10.0.0'} + + '@parcel/workers@2.13.3': + resolution: {integrity: sha512-oAHmdniWTRwwwsKbcF4t3VjOtKN+/W17Wj5laiYB+HLkfsjGTfIQPj3sdXmrlBAGpI4omIcvR70PHHXnfdTfwA==} + engines: {node: '>= 16.0.0'} + peerDependencies: + '@parcel/core': ^2.13.3 + + '@swc/core-darwin-arm64@1.10.12': + resolution: {integrity: sha512-pOANQegUTAriW7jq3SSMZGM5l89yLVMs48R0F2UG6UZsH04SiViCnDctOGlA/Sa++25C+rL9MGMYM1jDLylBbg==} + engines: {node: '>=10'} + cpu: [arm64] + os: [darwin] + + '@swc/core-darwin-x64@1.10.12': + resolution: {integrity: sha512-m4kbpIDDsN1FrwfNQMU+FTrss356xsXvatLbearwR+V0lqOkjLBP0VmRvQfHEg+uy13VPyrT9gj4HLoztlci7w==} + engines: {node: '>=10'} + cpu: [x64] + os: [darwin] + + '@swc/core-linux-arm-gnueabihf@1.10.12': + resolution: {integrity: sha512-OY9LcupgqEu8zVK+rJPes6LDJJwPDmwaShU96beTaxX2K6VrXbpwm5WbPS/8FfQTsmpnuA7dCcMPUKhNgmzTrQ==} + engines: {node: '>=10'} + cpu: [arm] + os: [linux] + + '@swc/core-linux-arm64-gnu@1.10.12': + resolution: {integrity: sha512-nJD587rO0N4y4VZszz3xzVr7JIiCzSMhEMWnPjuh+xmPxDBz0Qccpr8xCr1cSxpl1uY7ERkqAGlKr6CwoV5kVg==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + + '@swc/core-linux-arm64-musl@1.10.12': + resolution: {integrity: sha512-oqhSmV+XauSf0C//MoQnVErNUB/5OzmSiUzuazyLsD5pwqKNN+leC3JtRQ/QVzaCpr65jv9bKexT9+I2Tt3xDw==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + + '@swc/core-linux-x64-gnu@1.10.12': + resolution: {integrity: sha512-XldSIHyjD7m1Gh+/8rxV3Ok711ENLI420CU2EGEqSe3VSGZ7pHJvJn9ZFbYpWhsLxPqBYMFjp3Qw+J6OXCPXCA==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + + '@swc/core-linux-x64-musl@1.10.12': + resolution: {integrity: sha512-wvPXzJxzPgTqhyp1UskOx1hRTtdWxlyFD1cGWOxgLsMik0V9xKRgqKnMPv16Nk7L9xl6quQ6DuUHj9ID7L3oVw==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + + '@swc/core-win32-arm64-msvc@1.10.12': + resolution: {integrity: sha512-TUYzWuu1O7uyIcRfxdm6Wh1u+gNnrW5M1DUgDOGZLsyQzgc2Zjwfh2llLhuAIilvCVg5QiGbJlpibRYJ/8QGsg==} + engines: {node: '>=10'} + cpu: [arm64] + os: [win32] + + '@swc/core-win32-ia32-msvc@1.10.12': + resolution: {integrity: sha512-4Qrw+0Xt+Fe2rz4OJ/dEPMeUf/rtuFWWAj/e0vL7J5laUHirzxawLRE5DCJLQTarOiYR6mWnmadt9o3EKzV6Xg==} + engines: {node: '>=10'} + cpu: [ia32] + os: [win32] + + '@swc/core-win32-x64-msvc@1.10.12': + resolution: {integrity: sha512-YiloZXLW7rUxJpALwHXaGjVaAEn+ChoblG7/3esque+Y7QCyheoBUJp2DVM1EeVA43jBfZ8tvYF0liWd9Tpz1A==} + engines: {node: '>=10'} + cpu: [x64] + os: [win32] + + '@swc/core@1.10.12': + resolution: {integrity: sha512-+iUL0PYpPm6N9AdV1wvafakvCqFegQus1aoEDxgFsv3/uNVNIyRaupf/v/Zkp5hbep2EzhtoJR0aiJIzDbXWHg==} + engines: {node: '>=10'} + peerDependencies: + '@swc/helpers': '*' + peerDependenciesMeta: + '@swc/helpers': + optional: true + + '@swc/counter@0.1.3': + resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} + + '@swc/helpers@0.5.15': + resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} + + '@swc/types@0.1.17': + resolution: {integrity: sha512-V5gRru+aD8YVyCOMAjMpWR1Ui577DD5KSJsHP8RAxopAH22jFz6GZd/qxqjO6MJHQhcsjvjOFXyDhyLQUnMveQ==} + + '@tsconfig/recommended@1.0.8': + resolution: {integrity: sha512-TotjFaaXveVUdsrXCdalyF6E5RyG6+7hHHQVZonQtdlk1rJZ1myDIvPUUKPhoYv+JAzThb2lQJh9+9ZfF46hsA==} + + '@types/parcel-env@0.0.8': + resolution: {integrity: sha512-6Sa7yWgEPn6jxv1A4AdEMUTAth909LMjJhMfQOp3icwA3fVHZo1wPY+tQTWE/tZvomSa2M82V05pdk1CW8T7Xw==} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + base-x@3.0.10: + resolution: {integrity: sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.24.4: + resolution: {integrity: sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + caniuse-lite@1.0.30001696: + resolution: {integrity: sha512-pDCPkvzfa39ehJtJ+OwGT/2yvT2SbjfHhiIW2LWOAcMQ7BzwxT/XuyUp4OTOd0XFWA6BKw0JalnBHgSi5DGJBQ==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chrome-trace-event@1.0.4: + resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} + engines: {node: '>=6.0'} + + clone@2.1.2: + resolution: {integrity: sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==} + engines: {node: '>=0.8'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + commander@12.1.0: + resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} + engines: {node: '>=18'} + + cosmiconfig@9.0.0: + resolution: {integrity: sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==} + engines: {node: '>=14'} + peerDependencies: + typescript: '>=4.9.5' + peerDependenciesMeta: + typescript: + optional: true + + detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + + detect-libc@2.0.3: + resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} + engines: {node: '>=8'} + + dom-serializer@1.4.1: + resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==} + + dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@4.3.1: + resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==} + engines: {node: '>= 4'} + + domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + + domutils@2.8.0: + resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} + + domutils@3.2.2: + resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==} + + dotenv-expand@11.0.7: + resolution: {integrity: sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==} + engines: {node: '>=12'} + + dotenv@16.4.7: + resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} + engines: {node: '>=12'} + + electron-to-chromium@1.5.90: + resolution: {integrity: sha512-C3PN4aydfW91Natdyd449Kw+BzhLmof6tzy5W1pFC5SpQxVXT+oyiyOG9AgYYSN9OdA/ik3YkCrpwqI8ug5Tug==} + + entities@2.2.0: + resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} + + entities@3.0.1: + resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==} + engines: {node: '>=0.12'} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + + error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + get-port@4.2.0: + resolution: {integrity: sha512-/b3jarXkH8KJoOMQc3uVGHASwGLPq3gSFJ7tgJm2diza+bydJPTGOibin2steecKeOylE8oY2JERlVWkAJO6yw==} + engines: {node: '>=6'} + + globals@13.24.0: + resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} + engines: {node: '>=8'} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + htmlnano@2.1.1: + resolution: {integrity: sha512-kAERyg/LuNZYmdqgCdYvugyLWNFAm8MWXpQMz1pLpetmCbFwoMxvkSoaAMlFrOC4OKTWI4KlZGT/RsNxg4ghOw==} + peerDependencies: + cssnano: ^7.0.0 + postcss: ^8.3.11 + purgecss: ^6.0.0 + relateurl: ^0.2.7 + srcset: 5.0.1 + svgo: ^3.0.2 + terser: ^5.10.0 + uncss: ^0.17.3 + peerDependenciesMeta: + cssnano: + optional: true + postcss: + optional: true + purgecss: + optional: true + relateurl: + optional: true + srcset: + optional: true + svgo: + optional: true + terser: + optional: true + uncss: + optional: true + + htmlparser2@7.2.0: + resolution: {integrity: sha512-H7MImA4MS6cw7nbyURtLPO1Tms7C5H602LRETv95z1MxO/7CP7rDVROehUYeYBUYEON94NXXDEPmZuq+hX4sog==} + + htmlparser2@9.1.0: + resolution: {integrity: sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==} + + import-fresh@3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-json@2.0.1: + resolution: {integrity: sha512-6BEnpVn1rcf3ngfmViLM6vjUjGErbdrL4rwlv+u1NO1XO8kqT4YGL8+19Q+Z/bas8tY90BTWMk2+fW1g6hQjbA==} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + lightningcss-darwin-arm64@1.29.1: + resolution: {integrity: sha512-HtR5XJ5A0lvCqYAoSv2QdZZyoHNttBpa5EP9aNuzBQeKGfbyH5+UipLWvVzpP4Uml5ej4BYs5I9Lco9u1fECqw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.29.1: + resolution: {integrity: sha512-k33G9IzKUpHy/J/3+9MCO4e+PzaFblsgBjSGlpAaFikeBFm8B/CkO3cKU9oI4g+fjS2KlkLM/Bza9K/aw8wsNA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.29.1: + resolution: {integrity: sha512-0SUW22fv/8kln2LnIdOCmSuXnxgxVC276W5KLTwoehiO0hxkacBxjHOL5EtHD8BAXg2BvuhsJPmVMasvby3LiQ==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.29.1: + resolution: {integrity: sha512-sD32pFvlR0kDlqsOZmYqH/68SqUMPNj+0pucGxToXZi4XZgZmqeX/NkxNKCPsswAXU3UeYgDSpGhu05eAufjDg==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.29.1: + resolution: {integrity: sha512-0+vClRIZ6mmJl/dxGuRsE197o1HDEeeRk6nzycSy2GofC2JsY4ifCRnvUWf/CUBQmlrvMzt6SMQNMSEu22csWQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-arm64-musl@1.29.1: + resolution: {integrity: sha512-UKMFrG4rL/uHNgelBsDwJcBqVpzNJbzsKkbI3Ja5fg00sgQnHw/VrzUTEc4jhZ+AN2BvQYz/tkHu4vt1kLuJyw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-x64-gnu@1.29.1: + resolution: {integrity: sha512-u1S+xdODy/eEtjADqirA774y3jLcm8RPtYztwReEXoZKdzgsHYPl0s5V52Tst+GKzqjebkULT86XMSxejzfISw==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-linux-x64-musl@1.29.1: + resolution: {integrity: sha512-L0Tx0DtaNUTzXv0lbGCLB/c/qEADanHbu4QdcNOXLIe1i8i22rZRpbT3gpWYsCh9aSL9zFujY/WmEXIatWvXbw==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-win32-arm64-msvc@1.29.1: + resolution: {integrity: sha512-QoOVnkIEFfbW4xPi+dpdft/zAKmgLgsRHfJalEPYuJDOWf7cLQzYg0DEh8/sn737FaeMJxHZRc1oBreiwZCjog==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.29.1: + resolution: {integrity: sha512-NygcbThNBe4JElP+olyTI/doBNGJvLs3bFCRPdvuCcxZCcCZ71B858IHpdm7L1btZex0FvCmM17FK98Y9MRy1Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.29.1: + resolution: {integrity: sha512-FmGoeD4S05ewj+AkhTY+D+myDvXI6eL27FjHIjoyUkO/uw7WZD1fBVs0QxeYWa7E17CUHJaYX/RUGISCtcrG4Q==} + engines: {node: '>= 12.0.0'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + lmdb@2.8.5: + resolution: {integrity: sha512-9bMdFfc80S+vSldBmG3HOuLVHnxRdNTlpzR6QDnzqCQtCzGUEAGTzBKYMeIM+I/sU4oZfgbcbS7X7F65/z/oxQ==} + hasBin: true + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + msgpackr-extract@3.0.3: + resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==} + hasBin: true + + msgpackr@1.11.2: + resolution: {integrity: sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==} + + node-addon-api@6.1.0: + resolution: {integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==} + + node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} + + node-gyp-build-optional-packages@5.1.1: + resolution: {integrity: sha512-+P72GAjVAbTxjjwUmwjVrqrdZROD4nf8KgpBoDxqXXTiYZZt/ud60dE5yvCSr9lRO8e8yv6kgJIC0K0PfZFVQw==} + hasBin: true + + node-gyp-build-optional-packages@5.2.2: + resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==} + hasBin: true + + node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + + nullthrows@1.1.1: + resolution: {integrity: sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==} + + ordered-binary@1.5.3: + resolution: {integrity: sha512-oGFr3T+pYdTGJ+YFEILMpS3es+GiIbs9h/XQrclBXUtd44ey7XwfsMzM31f64I1SQOawDoDr/D823kNCADI8TA==} + + parcel@2.13.3: + resolution: {integrity: sha512-8GrC8C7J8mwRpAlk7EJ7lwdFTbCN+dcXH2gy5AsEs9pLfzo9wvxOTx6W0fzSlvCOvZOita+8GdfYlGfEt0tRgA==} + engines: {node: '>= 16.0.0'} + hasBin: true + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + posthtml-parser@0.11.0: + resolution: {integrity: sha512-QecJtfLekJbWVo/dMAA+OSwY79wpRmbqS5TeXvXSX+f0c6pW4/SE6inzZ2qkU7oAMCPqIDkZDvd/bQsSFUnKyw==} + engines: {node: '>=12'} + + posthtml-parser@0.12.1: + resolution: {integrity: sha512-rYFmsDLfYm+4Ts2Oh4DCDSZPtdC1BLnRXAobypVzX9alj28KGl65dIFtgDY9zB57D0TC4Qxqrawuq/2et1P0GA==} + engines: {node: '>=16'} + + posthtml-render@3.0.0: + resolution: {integrity: sha512-z+16RoxK3fUPgwaIgH9NGnK1HKY9XIDpydky5eQGgAFVXTCSezalv9U2jQuNV+Z9qV1fDWNzldcw4eK0SSbqKA==} + engines: {node: '>=12'} + + posthtml@0.16.6: + resolution: {integrity: sha512-JcEmHlyLK/o0uGAlj65vgg+7LIms0xKXe60lcDOTU7oVX/3LuEuLwrQpW3VJ7de5TaFKiW4kWkaIpJL42FEgxQ==} + engines: {node: '>=12.0.0'} + + prettier@3.4.2: + resolution: {integrity: sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==} + engines: {node: '>=14'} + hasBin: true + + react-error-overlay@6.0.9: + resolution: {integrity: sha512-nQTTcUu+ATDbrSD1BZHr5kgSD4oF8OFjxun8uAaL8RwPBacGBNPf/yAuVVdx17N8XNzRDMrZ9XcKZHCjPW+9ew==} + + react-refresh@0.14.2: + resolution: {integrity: sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==} + engines: {node: '>=0.10.0'} + + regenerator-runtime@0.14.1: + resolution: {integrity: sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + semver@7.7.0: + resolution: {integrity: sha512-DrfFnPzblFmNrIZzg5RzHegbiRWg7KMR7btwi2yjHwx06zsUbO5g613sVwEV7FTwmzJu+Io0lJe2GJ3LxqpvBQ==} + engines: {node: '>=10'} + hasBin: true + + srcset@4.0.0: + resolution: {integrity: sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw==} + engines: {node: '>=12'} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + term-size@2.2.1: + resolution: {integrity: sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==} + engines: {node: '>=8'} + + timsort@0.3.0: + resolution: {integrity: sha512-qsdtZH+vMoCARQtyod4imc2nIJwg9Cc7lPRrw9CzF8ZKR0khdr8+2nX80PBhET3tcyTtJDxAffGh2rXH4tyU8A==} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + + typescript@5.7.3: + resolution: {integrity: sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==} + engines: {node: '>=14.17'} + hasBin: true + + update-browserslist-db@1.1.2: + resolution: {integrity: sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + utility-types@3.11.0: + resolution: {integrity: sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==} + engines: {node: '>= 4'} + + weak-lru-cache@1.2.2: + resolution: {integrity: sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==} + +snapshots: + + '@babel/code-frame@7.26.2': + dependencies: + '@babel/helper-validator-identifier': 7.25.9 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/helper-validator-identifier@7.25.9': {} + + '@lezer/common@1.2.3': {} + + '@lezer/lr@1.4.2': + dependencies: + '@lezer/common': 1.2.3 + + '@lmdb/lmdb-darwin-arm64@2.8.5': + optional: true + + '@lmdb/lmdb-darwin-x64@2.8.5': + optional: true + + '@lmdb/lmdb-linux-arm64@2.8.5': + optional: true + + '@lmdb/lmdb-linux-arm@2.8.5': + optional: true + + '@lmdb/lmdb-linux-x64@2.8.5': + optional: true + + '@lmdb/lmdb-win32-x64@2.8.5': + optional: true + + '@mischnic/json-sourcemap@0.1.1': + dependencies: + '@lezer/common': 1.2.3 + '@lezer/lr': 1.4.2 + json5: 2.2.3 + + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + optional: true + + '@parcel/bundler-default@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/graph': 3.3.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/cache@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/fs': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/logger': 2.13.3 + '@parcel/utils': 2.13.3 + lmdb: 2.8.5 + + '@parcel/codeframe@2.13.3': + dependencies: + chalk: 4.1.2 + + '@parcel/compressor-raw@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/config-default@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15)(typescript@5.7.3)': + dependencies: + '@parcel/bundler-default': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/compressor-raw': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/namer-default': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/optimizer-css': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/optimizer-htmlnano': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(typescript@5.7.3) + '@parcel/optimizer-image': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/optimizer-svgo': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/optimizer-swc': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15) + '@parcel/packager-css': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/packager-html': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/packager-js': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/packager-raw': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/packager-svg': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/packager-wasm': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/reporter-dev-server': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/resolver-default': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/runtime-browser-hmr': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/runtime-js': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/runtime-react-refresh': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/runtime-service-worker': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-babel': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-css': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-html': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-image': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-js': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-json': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-postcss': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-posthtml': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-raw': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-react-refresh-wrap': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/transformer-svg': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@swc/helpers' + - cssnano + - postcss + - purgecss + - relateurl + - srcset + - svgo + - terser + - typescript + - uncss + + '@parcel/core@2.13.3(@swc/helpers@0.5.15)': + dependencies: + '@mischnic/json-sourcemap': 0.1.1 + '@parcel/cache': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/diagnostic': 2.13.3 + '@parcel/events': 2.13.3 + '@parcel/feature-flags': 2.13.3 + '@parcel/fs': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/graph': 3.3.3 + '@parcel/logger': 2.13.3 + '@parcel/package-manager': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15) + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/profiler': 2.13.3 + '@parcel/rust': 2.13.3 + '@parcel/source-map': 2.1.1 + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + base-x: 3.0.10 + browserslist: 4.24.4 + clone: 2.1.2 + dotenv: 16.4.7 + dotenv-expand: 11.0.7 + json5: 2.2.3 + msgpackr: 1.11.2 + nullthrows: 1.1.1 + semver: 7.7.0 + transitivePeerDependencies: + - '@swc/helpers' + + '@parcel/diagnostic@2.13.3': + dependencies: + '@mischnic/json-sourcemap': 0.1.1 + nullthrows: 1.1.1 + + '@parcel/events@2.13.3': {} + + '@parcel/feature-flags@2.13.3': {} + + '@parcel/fs@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/feature-flags': 2.13.3 + '@parcel/rust': 2.13.3 + '@parcel/types-internal': 2.13.3 + '@parcel/utils': 2.13.3 + '@parcel/watcher': 2.5.1 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + + '@parcel/graph@3.3.3': + dependencies: + '@parcel/feature-flags': 2.13.3 + nullthrows: 1.1.1 + + '@parcel/logger@2.13.3': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/events': 2.13.3 + + '@parcel/markdown-ansi@2.13.3': + dependencies: + chalk: 4.1.2 + + '@parcel/namer-default@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/node-resolver-core@3.4.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@mischnic/json-sourcemap': 0.1.1 + '@parcel/diagnostic': 2.13.3 + '@parcel/fs': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + semver: 7.7.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/optimizer-css@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + browserslist: 4.24.4 + lightningcss: 1.29.1 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/optimizer-htmlnano@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(typescript@5.7.3)': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + htmlnano: 2.1.1(typescript@5.7.3) + nullthrows: 1.1.1 + posthtml: 0.16.6 + transitivePeerDependencies: + - '@parcel/core' + - cssnano + - postcss + - purgecss + - relateurl + - srcset + - svgo + - terser + - typescript + - uncss + + '@parcel/optimizer-image@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/utils': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + + '@parcel/optimizer-svgo@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/optimizer-swc@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15)': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + '@swc/core': 1.10.12(@swc/helpers@0.5.15) + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + - '@swc/helpers' + + '@parcel/package-manager@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15)': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/diagnostic': 2.13.3 + '@parcel/fs': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/logger': 2.13.3 + '@parcel/node-resolver-core': 3.4.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@swc/core': 1.10.12(@swc/helpers@0.5.15) + semver: 7.7.0 + transitivePeerDependencies: + - '@swc/helpers' + + '@parcel/packager-css@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + lightningcss: 1.29.1 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/packager-html@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + posthtml: 0.16.6 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/packager-js@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/source-map': 2.1.1 + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + globals: 13.24.0 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/packager-raw@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/packager-svg@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + posthtml: 0.16.6 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/packager-wasm@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/plugin@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/profiler@2.13.3': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/events': 2.13.3 + '@parcel/types-internal': 2.13.3 + chrome-trace-event: 1.0.4 + + '@parcel/reporter-cli@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/types': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + chalk: 4.1.2 + term-size: 2.2.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/reporter-dev-server@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/reporter-tracer@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + chrome-trace-event: 1.0.4 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/resolver-default@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/node-resolver-core': 3.4.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/runtime-browser-hmr@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/runtime-js@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/runtime-react-refresh@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + react-error-overlay: 6.0.9 + react-refresh: 0.14.2 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/runtime-service-worker@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/rust@2.13.3': {} + + '@parcel/source-map@2.1.1': + dependencies: + detect-libc: 1.0.3 + + '@parcel/transformer-babel@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + browserslist: 4.24.4 + json5: 2.2.3 + nullthrows: 1.1.1 + semver: 7.7.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-css@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + browserslist: 4.24.4 + lightningcss: 1.29.1 + nullthrows: 1.1.1 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-html@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + nullthrows: 1.1.1 + posthtml: 0.16.6 + posthtml-parser: 0.12.1 + posthtml-render: 3.0.0 + semver: 7.7.0 + srcset: 4.0.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-image@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + nullthrows: 1.1.1 + + '@parcel/transformer-js@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/source-map': 2.1.1 + '@parcel/utils': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@swc/helpers': 0.5.15 + browserslist: 4.24.4 + nullthrows: 1.1.1 + regenerator-runtime: 0.14.1 + semver: 7.7.0 + + '@parcel/transformer-json@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + json5: 2.2.3 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-postcss@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + '@parcel/utils': 2.13.3 + clone: 2.1.2 + nullthrows: 1.1.1 + postcss-value-parser: 4.2.0 + semver: 7.7.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-posthtml@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + posthtml: 0.16.6 + posthtml-parser: 0.12.1 + posthtml-render: 3.0.0 + semver: 7.7.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-raw@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-react-refresh-wrap@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + react-refresh: 0.14.2 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/transformer-svg@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/rust': 2.13.3 + nullthrows: 1.1.1 + posthtml: 0.16.6 + posthtml-parser: 0.12.1 + posthtml-render: 3.0.0 + semver: 7.7.0 + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/types-internal@2.13.3': + dependencies: + '@parcel/diagnostic': 2.13.3 + '@parcel/feature-flags': 2.13.3 + '@parcel/source-map': 2.1.1 + utility-types: 3.11.0 + + '@parcel/types@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/types-internal': 2.13.3 + '@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + transitivePeerDependencies: + - '@parcel/core' + + '@parcel/utils@2.13.3': + dependencies: + '@parcel/codeframe': 2.13.3 + '@parcel/diagnostic': 2.13.3 + '@parcel/logger': 2.13.3 + '@parcel/markdown-ansi': 2.13.3 + '@parcel/rust': 2.13.3 + '@parcel/source-map': 2.1.1 + chalk: 4.1.2 + nullthrows: 1.1.1 + + '@parcel/watcher-android-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-x64@2.5.1': + optional: true + + '@parcel/watcher-freebsd-x64@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-musl@2.5.1': + optional: true + + '@parcel/watcher-win32-arm64@2.5.1': + optional: true + + '@parcel/watcher-win32-ia32@2.5.1': + optional: true + + '@parcel/watcher-win32-x64@2.5.1': + optional: true + + '@parcel/watcher@2.5.1': + dependencies: + detect-libc: 1.0.3 + is-glob: 4.0.3 + micromatch: 4.0.8 + node-addon-api: 7.1.1 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 + + '@parcel/workers@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))': + dependencies: + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/diagnostic': 2.13.3 + '@parcel/logger': 2.13.3 + '@parcel/profiler': 2.13.3 + '@parcel/types-internal': 2.13.3 + '@parcel/utils': 2.13.3 + nullthrows: 1.1.1 + + '@swc/core-darwin-arm64@1.10.12': + optional: true + + '@swc/core-darwin-x64@1.10.12': + optional: true + + '@swc/core-linux-arm-gnueabihf@1.10.12': + optional: true + + '@swc/core-linux-arm64-gnu@1.10.12': + optional: true + + '@swc/core-linux-arm64-musl@1.10.12': + optional: true + + '@swc/core-linux-x64-gnu@1.10.12': + optional: true + + '@swc/core-linux-x64-musl@1.10.12': + optional: true + + '@swc/core-win32-arm64-msvc@1.10.12': + optional: true + + '@swc/core-win32-ia32-msvc@1.10.12': + optional: true + + '@swc/core-win32-x64-msvc@1.10.12': + optional: true + + '@swc/core@1.10.12(@swc/helpers@0.5.15)': + dependencies: + '@swc/counter': 0.1.3 + '@swc/types': 0.1.17 + optionalDependencies: + '@swc/core-darwin-arm64': 1.10.12 + '@swc/core-darwin-x64': 1.10.12 + '@swc/core-linux-arm-gnueabihf': 1.10.12 + '@swc/core-linux-arm64-gnu': 1.10.12 + '@swc/core-linux-arm64-musl': 1.10.12 + '@swc/core-linux-x64-gnu': 1.10.12 + '@swc/core-linux-x64-musl': 1.10.12 + '@swc/core-win32-arm64-msvc': 1.10.12 + '@swc/core-win32-ia32-msvc': 1.10.12 + '@swc/core-win32-x64-msvc': 1.10.12 + '@swc/helpers': 0.5.15 + + '@swc/counter@0.1.3': {} + + '@swc/helpers@0.5.15': + dependencies: + tslib: 2.8.1 + + '@swc/types@0.1.17': + dependencies: + '@swc/counter': 0.1.3 + + '@tsconfig/recommended@1.0.8': {} + + '@types/parcel-env@0.0.8': {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + base-x@3.0.10: + dependencies: + safe-buffer: 5.2.1 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.24.4: + dependencies: + caniuse-lite: 1.0.30001696 + electron-to-chromium: 1.5.90 + node-releases: 2.0.19 + update-browserslist-db: 1.1.2(browserslist@4.24.4) + + callsites@3.1.0: {} + + caniuse-lite@1.0.30001696: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chrome-trace-event@1.0.4: {} + + clone@2.1.2: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + commander@12.1.0: {} + + cosmiconfig@9.0.0(typescript@5.7.3): + dependencies: + env-paths: 2.2.1 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + parse-json: 5.2.0 + optionalDependencies: + typescript: 5.7.3 + + detect-libc@1.0.3: {} + + detect-libc@2.0.3: {} + + dom-serializer@1.4.1: + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + entities: 2.2.0 + + dom-serializer@2.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + + domelementtype@2.3.0: {} + + domhandler@4.3.1: + dependencies: + domelementtype: 2.3.0 + + domhandler@5.0.3: + dependencies: + domelementtype: 2.3.0 + + domutils@2.8.0: + dependencies: + dom-serializer: 1.4.1 + domelementtype: 2.3.0 + domhandler: 4.3.1 + + domutils@3.2.2: + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + + dotenv-expand@11.0.7: + dependencies: + dotenv: 16.4.7 + + dotenv@16.4.7: {} + + electron-to-chromium@1.5.90: {} + + entities@2.2.0: {} + + entities@3.0.1: {} + + entities@4.5.0: {} + + env-paths@2.2.1: {} + + error-ex@1.3.2: + dependencies: + is-arrayish: 0.2.1 + + escalade@3.2.0: {} + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + get-port@4.2.0: {} + + globals@13.24.0: + dependencies: + type-fest: 0.20.2 + + has-flag@4.0.0: {} + + htmlnano@2.1.1(typescript@5.7.3): + dependencies: + cosmiconfig: 9.0.0(typescript@5.7.3) + posthtml: 0.16.6 + timsort: 0.3.0 + transitivePeerDependencies: + - typescript + + htmlparser2@7.2.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + domutils: 2.8.0 + entities: 3.0.1 + + htmlparser2@9.1.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + entities: 4.5.0 + + import-fresh@3.3.0: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + is-arrayish@0.2.1: {} + + is-extglob@2.1.1: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-json@2.0.1: {} + + is-number@7.0.0: {} + + js-tokens@4.0.0: {} + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + json-parse-even-better-errors@2.3.1: {} + + json5@2.2.3: {} + + lightningcss-darwin-arm64@1.29.1: + optional: true + + lightningcss-darwin-x64@1.29.1: + optional: true + + lightningcss-freebsd-x64@1.29.1: + optional: true + + lightningcss-linux-arm-gnueabihf@1.29.1: + optional: true + + lightningcss-linux-arm64-gnu@1.29.1: + optional: true + + lightningcss-linux-arm64-musl@1.29.1: + optional: true + + lightningcss-linux-x64-gnu@1.29.1: + optional: true + + lightningcss-linux-x64-musl@1.29.1: + optional: true + + lightningcss-win32-arm64-msvc@1.29.1: + optional: true + + lightningcss-win32-x64-msvc@1.29.1: + optional: true + + lightningcss@1.29.1: + dependencies: + detect-libc: 1.0.3 + optionalDependencies: + lightningcss-darwin-arm64: 1.29.1 + lightningcss-darwin-x64: 1.29.1 + lightningcss-freebsd-x64: 1.29.1 + lightningcss-linux-arm-gnueabihf: 1.29.1 + lightningcss-linux-arm64-gnu: 1.29.1 + lightningcss-linux-arm64-musl: 1.29.1 + lightningcss-linux-x64-gnu: 1.29.1 + lightningcss-linux-x64-musl: 1.29.1 + lightningcss-win32-arm64-msvc: 1.29.1 + lightningcss-win32-x64-msvc: 1.29.1 + + lines-and-columns@1.2.4: {} + + lmdb@2.8.5: + dependencies: + msgpackr: 1.11.2 + node-addon-api: 6.1.0 + node-gyp-build-optional-packages: 5.1.1 + ordered-binary: 1.5.3 + weak-lru-cache: 1.2.2 + optionalDependencies: + '@lmdb/lmdb-darwin-arm64': 2.8.5 + '@lmdb/lmdb-darwin-x64': 2.8.5 + '@lmdb/lmdb-linux-arm': 2.8.5 + '@lmdb/lmdb-linux-arm64': 2.8.5 + '@lmdb/lmdb-linux-x64': 2.8.5 + '@lmdb/lmdb-win32-x64': 2.8.5 + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + msgpackr-extract@3.0.3: + dependencies: + node-gyp-build-optional-packages: 5.2.2 + optionalDependencies: + '@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3 + optional: true + + msgpackr@1.11.2: + optionalDependencies: + msgpackr-extract: 3.0.3 + + node-addon-api@6.1.0: {} + + node-addon-api@7.1.1: {} + + node-gyp-build-optional-packages@5.1.1: + dependencies: + detect-libc: 2.0.3 + + node-gyp-build-optional-packages@5.2.2: + dependencies: + detect-libc: 2.0.3 + optional: true + + node-releases@2.0.19: {} + + nullthrows@1.1.1: {} + + ordered-binary@1.5.3: {} + + parcel@2.13.3(@swc/helpers@0.5.15)(typescript@5.7.3): + dependencies: + '@parcel/config-default': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15)(typescript@5.7.3) + '@parcel/core': 2.13.3(@swc/helpers@0.5.15) + '@parcel/diagnostic': 2.13.3 + '@parcel/events': 2.13.3 + '@parcel/feature-flags': 2.13.3 + '@parcel/fs': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/logger': 2.13.3 + '@parcel/package-manager': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(@swc/helpers@0.5.15) + '@parcel/reporter-cli': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/reporter-dev-server': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/reporter-tracer': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15)) + '@parcel/utils': 2.13.3 + chalk: 4.1.2 + commander: 12.1.0 + get-port: 4.2.0 + transitivePeerDependencies: + - '@swc/helpers' + - cssnano + - postcss + - purgecss + - relateurl + - srcset + - svgo + - terser + - typescript + - uncss + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.26.2 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + postcss-value-parser@4.2.0: {} + + posthtml-parser@0.11.0: + dependencies: + htmlparser2: 7.2.0 + + posthtml-parser@0.12.1: + dependencies: + htmlparser2: 9.1.0 + + posthtml-render@3.0.0: + dependencies: + is-json: 2.0.1 + + posthtml@0.16.6: + dependencies: + posthtml-parser: 0.11.0 + posthtml-render: 3.0.0 + + prettier@3.4.2: {} + + react-error-overlay@6.0.9: {} + + react-refresh@0.14.2: {} + + regenerator-runtime@0.14.1: {} + + resolve-from@4.0.0: {} + + safe-buffer@5.2.1: {} + + semver@7.7.0: {} + + srcset@4.0.0: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + term-size@2.2.1: {} + + timsort@0.3.0: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tslib@2.8.1: {} + + type-fest@0.20.2: {} + + typescript@5.7.3: {} + + update-browserslist-db@1.1.2(browserslist@4.24.4): + dependencies: + browserslist: 4.24.4 + escalade: 3.2.0 + picocolors: 1.1.1 + + utility-types@3.11.0: {} + + weak-lru-cache@1.2.2: {} diff --git a/docker/nginx/sites/www/src/main.ts b/docker/nginx/sites/www/src/main.ts new file mode 100644 index 0000000..09e8661 --- /dev/null +++ b/docker/nginx/sites/www/src/main.ts @@ -0,0 +1,47 @@ +import "./style.css"; + +class Emotion { + static opposite_map = new Map<Emotion, Emotion>(); + + constructor(public readonly name: string) { + } + + get opposite(): Emotion { + return Emotion.opposite_map.get(this)!; + } + + get element(): HTMLDivElement { + return document.querySelector<HTMLDivElement>(`.slogan.${this.name}`)! + } + + get elementHeight(): number { + return this.element.clientHeight; + } + + apply() { + localStorage.setItem(emotionKey, this.name); + document.body.dataset.emotion = this.name; + document.body.style.paddingTop = `${this.elementHeight}px`; + } +} + +const happy = new Emotion("happy") +const angry = new Emotion("angry") +Emotion.opposite_map.set(happy, angry) +Emotion.opposite_map.set(angry, happy) + +const emotionKey = "emotion"; +const savedEmotionName = localStorage.getItem(emotionKey) ?? happy.name; + +for (const emotion of [happy, angry]) { + if (emotion.name == savedEmotionName) { + emotion.apply(); + } + emotion.element.addEventListener("click", () => { + emotion.opposite.apply(); + }); +} + +setTimeout(() => { + document.body.style.transition = "padding-top 0.8s"; +}); diff --git a/docker/nginx/sites/www/src/style.css b/docker/nginx/sites/www/src/style.css new file mode 100644 index 0000000..05c98a0 --- /dev/null +++ b/docker/nginx/sites/www/src/style.css @@ -0,0 +1,148 @@ +html { + width: 100%; +} + +body { + width: 100%; + margin: 0; + display: flex; + flex-direction: column; +} + +a { + font-family: monospace; +} + +.fake-link { + font-family: monospace; +} + +#main-article { + max-width: 880px; + margin-top: 1em; + padding: 0 1em; + align-self: center; +} + +#title-name { + font-family: monospace; + background-color: black; + color: white; +} + +@keyframes content-enter { + from { + opacity: 0; + transform: translateY(100px); + } + + to { + opacity: 1; + transform: translateY(0); + } +} + +@keyframes avatar-enter { + from { + opacity: 0; + transform: translateX(100%); + } + + to { + opacity: 1; + transform: translateX(0); + } +} + +#main-article > * { + animation: content-enter 0.8s; +} + +#avatar { + float: right; + animation: avatar-enter 0.8s; +} + +.slogan-container { + width: 100vw; + top: 0; + position: fixed; +} + +.slogan { + width: 100%; + padding: 0.5em 1em; + text-align: center; + box-sizing: border-box; + color: white; + position: absolute; + transform: translateY(-100%); + transition: transform 0.8s; +} + +.slogan.happy { + background-color: dodgerblue; +} + +.slogan.angry { + background-color: orangered; +} + +body[data-emotion="happy"] .slogan.happy { + transform: translateY(0); +} + +body[data-emotion="angry"] .slogan.angry { + transform: translateY(0); +} + +#friends-container { + display: flex; + gap: 1em; +} + +.friend { + flex-grow: 0; + text-align: center; +} + +.friend a { + font-family: unset; +} + +.friend-avatar { + object-fit: cover; +} + +.friend-github { + width: 1em; + vertical-align: middle; + margin-right: -0.5em; +} + +.friend-tag { + font-size: 0.8em; +} + +.citation { + margin: auto; +} + +.citation figcaption { + text-align: right; +} + +#license a { + font-family: initial; + text-decoration: none; +} + +#license-text { + font-family: monospace; + text-decoration: initial; +} + +#license-img-container img { + height: 1em; + vertical-align: middle; +} diff --git a/docker/nginx/sites/www/tsconfig.json b/docker/nginx/sites/www/tsconfig.json new file mode 100644 index 0000000..9d1434c --- /dev/null +++ b/docker/nginx/sites/www/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "@tsconfig/recommended/tsconfig.json", + "compilerOptions": { + "lib": [ + "ESNext", + "DOM", + "DOM.Iterable" + ], + "types": [ + "parcel-env" + ], + "target": "ESNext", + "module": "ESNext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true + } +}
\ No newline at end of file diff --git a/docker/v2ray/Dockerfile b/docker/v2ray/Dockerfile new file mode 100644 index 0000000..250a6b8 --- /dev/null +++ b/docker/v2ray/Dockerfile @@ -0,0 +1,5 @@ +FROM alpine:edge + +RUN apk add --no-cache v2ray + +ENTRYPOINT [ "/usr/bin/v2ray" ] diff --git a/dropped/docker/crupest-api/CrupestApi/.dockerignore b/dropped/docker/crupest-api/CrupestApi/.dockerignore new file mode 100644 index 0000000..f1c182d --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/.dockerignore @@ -0,0 +1,2 @@ +*/obj +*/bin diff --git a/dropped/docker/crupest-api/CrupestApi/.gitignore b/dropped/docker/crupest-api/CrupestApi/.gitignore new file mode 100644 index 0000000..371ea59 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/.gitignore @@ -0,0 +1,4 @@ +.vs +obj +bin +dev-config.json diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/ColumnTypeInfoTest.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/ColumnTypeInfoTest.cs new file mode 100644 index 0000000..b9ec03e --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/ColumnTypeInfoTest.cs @@ -0,0 +1,39 @@ +using System.Data; + +namespace CrupestApi.Commons.Crud.Tests; + +public class ColumnTypeInfoTest +{ + private ColumnTypeProvider _provider = new ColumnTypeProvider(); + + [Theory] + [InlineData(typeof(int), DbType.Int32, 123)] + [InlineData(typeof(long), DbType.Int64, 456)] + [InlineData(typeof(sbyte), DbType.SByte, 789)] + [InlineData(typeof(short), DbType.Int16, 101)] + [InlineData(typeof(float), DbType.Single, 1.0f)] + [InlineData(typeof(double), DbType.Double, 1.0)] + [InlineData(typeof(string), DbType.String, "Hello world!")] + [InlineData(typeof(byte[]), DbType.Binary, new byte[] { 1, 2, 3 })] + public void BasicColumnTypeTest(Type type, DbType dbType, object? value) + { + var typeInfo = _provider.Get(type); + Assert.True(typeInfo.IsSimple); + Assert.Equal(dbType, typeInfo.DbType); + Assert.Equal(value, typeInfo.ConvertFromDatabase(value)); + Assert.Equal(value, typeInfo.ConvertToDatabase(value)); + } + + [Fact] + public void DateTimeColumnTypeTest() + { + var dateTimeColumnTypeInfo = _provider.Get(typeof(DateTime)); + Assert.Equal(typeof(DateTime), dateTimeColumnTypeInfo.ClrType); + Assert.Equal(typeof(string), dateTimeColumnTypeInfo.DatabaseClrType); + + var dateTime = new DateTime(2000, 1, 1, 0, 0, 0, DateTimeKind.Utc); + var dateTimeString = "2000-01-01T00:00:00Z"; + Assert.Equal(dateTimeString, dateTimeColumnTypeInfo.ConvertToDatabase(dateTime)); + Assert.Equal(dateTime, dateTimeColumnTypeInfo.ConvertFromDatabase(dateTimeString)); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudIntegratedTest.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudIntegratedTest.cs new file mode 100644 index 0000000..bd07c70 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudIntegratedTest.cs @@ -0,0 +1,200 @@ +using System.Net; +using System.Net.Http.Headers; +using CrupestApi.Commons.Secrets; +using Microsoft.AspNetCore.TestHost; + +namespace CrupestApi.Commons.Crud.Tests; + +public class CrudIntegratedTest : IAsyncLifetime +{ + private readonly WebApplication _app; + private HttpClient _httpClient = default!; + private HttpClient _authorizedHttpClient = default!; + private string _token = default!; + + public CrudIntegratedTest() + { + var builder = WebApplication.CreateBuilder(); + builder.Logging.ClearProviders(); + builder.Services.AddSingleton<IDbConnectionFactory, SqliteMemoryConnectionFactory>(); + builder.Services.AddCrud<TestEntity>(); + builder.WebHost.UseTestServer(); + _app = builder.Build(); + _app.UseCrudCore(); + _app.MapCrud<TestEntity>("/test", "test-perm"); + } + + public async Task InitializeAsync() + { + await _app.StartAsync(); + _httpClient = _app.GetTestClient(); + + using (var scope = _app.Services.CreateScope()) + { + var secretService = (SecretService)scope.ServiceProvider.GetRequiredService<ISecretService>(); + var key = secretService.Create(new SecretInfo + { + Key = "test-perm" + }); + _token = secretService.GetByKey(key).Secret; + } + + _authorizedHttpClient = _app.GetTestClient(); + _authorizedHttpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _token); + } + + public async Task DisposeAsync() + { + await _app.StopAsync(); + } + + + [Fact] + public async Task EmptyTest() + { + using var response = await _authorizedHttpClient.GetAsync("/test"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<List<TestEntity>>(); + Assert.NotNull(body); + Assert.Empty(body); + } + + [Fact] + public async Task CrudTest() + { + { + using var response = await _authorizedHttpClient.PostAsJsonAsync("/test", new TestEntity + { + Name = "test", + Age = 22 + }); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<TestEntity>(); + Assert.NotNull(body); + Assert.Equal("test", body.Name); + Assert.Equal(22, body.Age); + Assert.Null(body.Height); + Assert.NotEmpty(body.Secret); + } + + { + using var response = await _authorizedHttpClient.GetAsync("/test"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<List<TestEntity>>(); + Assert.NotNull(body); + var entity = Assert.Single(body); + Assert.Equal("test", entity.Name); + Assert.Equal(22, entity.Age); + Assert.Null(entity.Height); + Assert.NotEmpty(entity.Secret); + } + + { + using var response = await _authorizedHttpClient.GetAsync("/test/test"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<TestEntity>(); + Assert.NotNull(body); + Assert.Equal("test", body.Name); + Assert.Equal(22, body.Age); + Assert.Null(body.Height); + Assert.NotEmpty(body.Secret); + } + + { + using var response = await _authorizedHttpClient.PatchAsJsonAsync("/test/test", new TestEntity + { + Name = "test-2", + Age = 23, + Height = 188.0f + }); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<TestEntity>(); + Assert.NotNull(body); + Assert.Equal("test-2", body.Name); + Assert.Equal(23, body.Age); + Assert.Equal(188.0f, body.Height); + Assert.NotEmpty(body.Secret); + } + + { + using var response = await _authorizedHttpClient.GetAsync("/test/test-2"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<TestEntity>(); + Assert.NotNull(body); + Assert.Equal("test-2", body.Name); + Assert.Equal(23, body.Age); + Assert.Equal(188.0f, body.Height); + Assert.NotEmpty(body.Secret); + } + + { + using var response = await _authorizedHttpClient.DeleteAsync("/test/test-2"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + } + + { + using var response = await _authorizedHttpClient.GetAsync("/test"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<List<TestEntity>>(); + Assert.NotNull(body); + Assert.Empty(body); + } + } + + [Fact] + public async Task UnauthorizedTest() + { + { + using var response = await _httpClient.GetAsync("/test"); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + { + using var response = await _httpClient.GetAsync("/test/test"); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + { + using var response = await _httpClient.PostAsJsonAsync("/test", new TestEntity + { + Name = "test", + Age = 22 + }); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + { + using var response = await _httpClient.PatchAsJsonAsync("/test/test", new TestEntity + { + Name = "test-2", + Age = 23, + Height = 188.0f + }); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + { + using var response = await _httpClient.DeleteAsync("/test/test"); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + } + + [Fact] + public async Task NotFoundTest() + { + { + using var response = await _authorizedHttpClient.GetAsync("/test/test"); + Assert.Equal(HttpStatusCode.NotFound, response.StatusCode); + } + + { + using var response = await _authorizedHttpClient.PatchAsJsonAsync("/test/test", new TestEntity + { + Name = "test-2", + Age = 23, + Height = 188.0f + }); + Assert.Equal(HttpStatusCode.NotFound, response.StatusCode); + } + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudServiceTest.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudServiceTest.cs new file mode 100644 index 0000000..ad0d34c --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/CrudServiceTest.cs @@ -0,0 +1,77 @@ +using CrupestApi.Commons.Crud.Migrations; +using Microsoft.Extensions.Logging.Abstractions; + +namespace CrupestApi.Commons.Crud.Tests; + +public class CrudServiceTest +{ + private readonly SqliteMemoryConnectionFactory _memoryConnectionFactory = new SqliteMemoryConnectionFactory(); + + private readonly CrudService<TestEntity> _crudService; + + public CrudServiceTest() + { + var columnTypeProvider = new ColumnTypeProvider(); + var tableInfoFactory = new TableInfoFactory(columnTypeProvider, NullLoggerFactory.Instance); + var dbConnectionFactory = new SqliteMemoryConnectionFactory(); + + _crudService = new CrudService<TestEntity>( + tableInfoFactory, dbConnectionFactory, new SqliteDatabaseMigrator(), NullLoggerFactory.Instance); + } + + [Fact] + public void CrudTest() + { + var key = _crudService.Create(new TestEntity() + { + Name = "crupest", + Age = 18, + }); + + Assert.Equal("crupest", key); + + var entity = _crudService.GetByKey(key); + Assert.Equal("crupest", entity.Name); + Assert.Equal(18, entity.Age); + Assert.Null(entity.Height); + Assert.NotEmpty(entity.Secret); + + var list = _crudService.GetAll(); + entity = Assert.Single(list); + Assert.Equal("crupest", entity.Name); + Assert.Equal(18, entity.Age); + Assert.Null(entity.Height); + Assert.NotEmpty(entity.Secret); + + var count = _crudService.GetCount(); + Assert.Equal(1, count); + + _crudService.UpdateByKey(key, new TestEntity() + { + Name = "crupest2.0", + Age = 22, + Height = 180, + }); + + entity = _crudService.GetByKey("crupest2.0"); + Assert.Equal("crupest2.0", entity.Name); + Assert.Equal(22, entity.Age); + Assert.Equal(180, entity.Height); + Assert.NotEmpty(entity.Secret); + + _crudService.DeleteByKey("crupest2.0"); + + count = _crudService.GetCount(); + Assert.Equal(0, count); + } + + [Fact] + public void EntityNotExistTest() + { + Assert.Throws<EntityNotExistException>(() => _crudService.GetByKey("KeyNotExist")); + Assert.Throws<EntityNotExistException>(() => _crudService.UpdateByKey("KeyNotExist", new TestEntity + { + Name = "crupest" + })); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/SqlCompareHelper.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/SqlCompareHelper.cs new file mode 100644 index 0000000..72b6218 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/SqlCompareHelper.cs @@ -0,0 +1,85 @@ +using System.Text; + +namespace CrupestApi.Commons.Crud.Tests; + +public class SqlCompareHelper +{ + private static List<char> SymbolTokens = new List<char>() { '(', ')', ';' }; + + public static List<string> SqlExtractWords(string? sql, bool toLower = true) + { + var result = new List<string>(); + + if (string.IsNullOrEmpty(sql)) + { + return result; + } + + var current = 0; + + StringBuilder? wordBuilder = null; + + while (current < sql.Length) + { + if (char.IsWhiteSpace(sql[current])) + { + if (wordBuilder is not null) + { + result.Add(wordBuilder.ToString()); + wordBuilder = null; + } + } + else if (SymbolTokens.Contains(sql[current])) + { + if (wordBuilder is not null) + { + result.Add(wordBuilder.ToString()); + wordBuilder = null; + } + result.Add(sql[current].ToString()); + } + else + { + if (wordBuilder is not null) + { + wordBuilder.Append(sql[current]); + } + else + { + wordBuilder = new StringBuilder(); + wordBuilder.Append(sql[current]); + } + } + current++; + } + + if (wordBuilder is not null) + { + result.Add(wordBuilder.ToString()); + } + + if (toLower) + { + for (int i = 0; i < result.Count; i++) + { + result[i] = result[i].ToLower(); + } + } + + return result; + } + + public static bool SqlEqual(string left, string right) + { + return SqlExtractWords(left) == SqlExtractWords(right); + } + + [Fact] + public void TestSqlExtractWords() + { + var sql = "SELECT * FROM TableName WHERE (id = @abcd);"; + var words = SqlExtractWords(sql); + + Assert.Equal(new List<string> { "select", "*", "from", "tablename", "where", "(", "id", "=", "@abcd", ")", ";" }, words); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TableInfoTest.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TableInfoTest.cs new file mode 100644 index 0000000..b0aa702 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TableInfoTest.cs @@ -0,0 +1,35 @@ +using Microsoft.Extensions.Logging.Abstractions; + +namespace CrupestApi.Commons.Crud.Tests; + +public class TableInfoTest +{ + private static TableInfoFactory TableInfoFactory = new TableInfoFactory(new ColumnTypeProvider(), NullLoggerFactory.Instance); + + private TableInfo _tableInfo; + + public TableInfoTest() + { + _tableInfo = TableInfoFactory.Get(typeof(TestEntity)); + } + + [Fact] + public void TestColumnCount() + { + Assert.Equal(5, _tableInfo.Columns.Count); + Assert.Equal(4, _tableInfo.PropertyColumns.Count); + Assert.Equal(4, _tableInfo.ColumnProperties.Count); + Assert.Equal(1, _tableInfo.NonColumnProperties.Count); + } + + [Fact] + public void GenerateSelectSqlTest() + { + var (sql, parameters) = _tableInfo.GenerateSelectSql(null, WhereClause.Create().Eq("Name", "Hello")); + var parameterName = parameters.First().Name; + + // TODO: Is there a way to auto detect parameters? + SqlCompareHelper.SqlEqual($"SELECT * FROM TestEntity WHERE (Name = @{parameterName})", sql); + Assert.Equal("Hello", parameters.Get<string>(parameterName)); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TestEntity.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TestEntity.cs new file mode 100644 index 0000000..c15334c --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Crud/TestEntity.cs @@ -0,0 +1,23 @@ +namespace CrupestApi.Commons.Crud.Tests; + +public class TestEntity +{ + [Column(ActAsKey = true, NotNull = true)] + public string Name { get; set; } = default!; + + [Column(NotNull = true)] + public int Age { get; set; } + + [Column] + public float? Height { get; set; } + + [Column(OnlyGenerated = true, NotNull = true, NoUpdate = true)] + public string Secret { get; set; } = default!; + + public static string SecretDefaultValueGenerator() + { + return "secret"; + } + + public string NonColumn { get; set; } = "Not A Column"; +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/CrupestApi.Commons.Tests.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/CrupestApi.Commons.Tests.csproj new file mode 100644 index 0000000..0360ee1 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/CrupestApi.Commons.Tests.csproj @@ -0,0 +1,29 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <Nullable>enable</Nullable>
+
+ <IsPackable>false</IsPackable>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <PackageReference Include="Microsoft.AspNetCore.TestHost" Version="7.0.1" />
+ <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" />
+ <PackageReference Include="xunit" Version="2.4.2" />
+ <PackageReference Include="xunit.runner.visualstudio" Version="2.4.5">
+ <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
+ <PrivateAssets>all</PrivateAssets>
+ </PackageReference>
+ <PackageReference Include="coverlet.collector" Version="3.2.0">
+ <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
+ <PrivateAssets>all</PrivateAssets>
+ </PackageReference>
+ </ItemGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="..\CrupestApi.Commons\CrupestApi.Commons.csproj" />
+ </ItemGroup>
+
+</Project>
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Usings.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Usings.cs new file mode 100644 index 0000000..8c927eb --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons.Tests/Usings.cs @@ -0,0 +1 @@ +global using Xunit;
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Config.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Config.cs new file mode 100644 index 0000000..0ca3547 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Config.cs @@ -0,0 +1,23 @@ +namespace CrupestApi.Commons; + +public class CrupestApiConfig +{ + public string DataDir { get; set; } = string.Empty; +} + +public static class CrupestApiConfigExtensions +{ + public static IServiceCollection AddCrupestApiConfig(this IServiceCollection services) + { + services.AddOptions<CrupestApiConfig>().BindConfiguration("CrupestApi"); + services.PostConfigure<CrupestApiConfig>(config => + { + if (config.DataDir is null || config.DataDir.Length == 0) + { + config.DataDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), "crupest-api"); + } + }); + + return services; + } +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnInfo.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnInfo.cs new file mode 100644 index 0000000..e8d3c2e --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnInfo.cs @@ -0,0 +1,236 @@ +using System.Diagnostics; +using System.Reflection; +using System.Text; + +namespace CrupestApi.Commons.Crud; + +public class ColumnInfo +{ + private readonly AggregateColumnMetadata _metadata = new AggregateColumnMetadata(); + private ILogger<ColumnInfo> _logger; + + /// <summary> + /// Initialize a column without corresponding property. + /// </summary> + public ColumnInfo(TableInfo table, IColumnMetadata metadata, Type clrType, IColumnTypeProvider typeProvider, ILoggerFactory loggerFactory) + { + _logger = loggerFactory.CreateLogger<ColumnInfo>(); + if (metadata is null) + throw new ArgumentException("You must specify metadata for non-property column."); + if (metadata.TryGetValue(ColumnMetadataKeys.ColumnName, out var columnName)) + _logger.LogInformation("Create column without property.", columnName); + else + throw new ArgumentException("You must specify name in metadata for non-property column."); + + Table = table; + _metadata.Add(metadata); + ColumnType = typeProvider.Get(clrType); + } + + /// <summary> + /// Initialize a column with corresponding property. + /// </summary> + public ColumnInfo(TableInfo table, PropertyInfo propertyInfo, IColumnTypeProvider typeProvider, ILoggerFactory loggerFactory) + { + _logger = loggerFactory.CreateLogger<ColumnInfo>(); + _logger.LogInformation("Create column with property {}.", propertyInfo.Name); + + Table = table; + PropertyInfo = propertyInfo; + ColumnType = typeProvider.Get(propertyInfo.PropertyType); + + var columnAttribute = propertyInfo.GetCustomAttribute<ColumnAttribute>(); + if (columnAttribute is not null) + { + _metadata.Add(columnAttribute); + } + } + + public TableInfo Table { get; } + + public Type EntityType => Table.EntityType; + + // If null, there is no corresponding property. + public PropertyInfo? PropertyInfo { get; } = null; + + public IColumnMetadata Metadata => _metadata; + + public IColumnTypeInfo ColumnType { get; } + + public bool IsPrimaryKey => Metadata.GetValueOrDefault(ColumnMetadataKeys.IsPrimaryKey) is true; + public bool IsAutoIncrement => IsPrimaryKey; + public bool IsNotNull => IsPrimaryKey || Metadata.GetValueOrDefault(ColumnMetadataKeys.NotNull) is true; + public bool IsOnlyGenerated => Metadata.GetValueOrDefault(ColumnMetadataKeys.OnlyGenerated) is true; + public bool IsNoUpdate => Metadata.GetValueOrDefault(ColumnMetadataKeys.NoUpdate) is true; + public object? DefaultValue => Metadata.GetValueOrDefault(ColumnMetadataKeys.DefaultValue); + /// <summary> + /// This only returns metadata value. It doesn't not fall back to primary column. If you want to get the real key column, go to table info. + /// </summary> + /// <seealso cref="ColumnMetadataKeys.ActAsKey"/> + /// <seealso cref="TableInfo.KeyColumn"/> + public bool IsSpecifiedAsKey => Metadata.GetValueOrDefault(ColumnMetadataKeys.ActAsKey) is true; + public ColumnIndexType Index => Metadata.GetValueOrDefault<ColumnIndexType?>(ColumnMetadataKeys.Index) ?? ColumnIndexType.None; + + /// <summary> + /// Whether the column value can be generated, which means the column has a default value or a default value generator or is AUTOINCREMENT. + /// </summary> + public bool CanBeGenerated => DefaultValue is not null || DefaultValueGeneratorMethod is not null || IsAutoIncrement; + + /// <summary> + /// The real column name. Maybe set in metadata or just the property name. + /// </summary> + /// <value></value> + public string ColumnName + { + get + { + object? value = Metadata.GetValueOrDefault(ColumnMetadataKeys.ColumnName); + Debug.Assert(value is null || value is string); + return ((string?)value ?? PropertyInfo?.Name) ?? throw new Exception("Failed to get column name."); + } + } + + public MethodInfo? DefaultValueGeneratorMethod + { + get + { + object? value = Metadata.GetValueOrDefault(ColumnMetadataKeys.DefaultValueGenerator); + Debug.Assert(value is null || value is string); + MethodInfo? result; + if (value is null) + { + string methodName = ColumnName + "DefaultValueGenerator"; + result = Table.EntityType.GetMethod(methodName, BindingFlags.Public | BindingFlags.Static); + } + else + { + string methodName = (string)value; + result = Table.EntityType.GetMethod(methodName, BindingFlags.Static) ?? throw new Exception("The default value generator does not exist."); + } + + return result; + } + } + + public MethodInfo? ValidatorMethod + { + get + { + object? value = Metadata.GetValueOrDefault(ColumnMetadataKeys.DefaultValueGenerator); + Debug.Assert(value is null || value is string); + MethodInfo? result; + if (value is null) + { + string methodName = ColumnName + "Validator"; + result = Table.EntityType.GetMethod(methodName, BindingFlags.Static); + } + else + { + string methodName = (string)value; + result = Table.EntityType.GetMethod(methodName, BindingFlags.Static) ?? throw new Exception("The validator does not exist."); + } + + return result; + } + } + + public void InvokeValidator(object? value) + { + var method = this.ValidatorMethod; + if (method is null) + { + _logger.LogInformation("Try to invoke validator for column {} but it does not exist.", ColumnName); + return; + } + var parameters = method.GetParameters(); + if (parameters.Length == 0) + { + throw new Exception("The validator method must have at least one parameter."); + } + else if (parameters.Length == 1) + { + method.Invoke(null, new object?[] { value }); + } + else if (parameters.Length == 2) + { + if (parameters[0].ParameterType == typeof(ColumnInfo)) + method.Invoke(null, new object?[] { this, value }); + else if (parameters[1].ParameterType == typeof(ColumnInfo)) + method.Invoke(null, new object?[] { value, this }); + else + throw new Exception("The validator method must have a parameter of type ColumnInfo if it has 2 parameters."); + } + else + { + throw new Exception("The validator method can only have 1 or 2 parameters."); + } + } + + public object? InvokeDefaultValueGenerator() + { + var method = this.DefaultValueGeneratorMethod; + if (method is null) + { + _logger.LogInformation("Try to invoke default value generator for column {} but it does not exist.", ColumnName); + return null; + } + var parameters = method.GetParameters(); + if (parameters.Length == 0) + { + return method.Invoke(null, new object?[0]); + } + else if (parameters.Length == 1) + { + if (parameters[0].ParameterType != typeof(ColumnInfo)) + throw new Exception("The default value generator method can only have a parameter of type ColumnInfo."); + return method.Invoke(null, new object?[] { this }); + } + else + { + throw new Exception("The default value generator method can only have 0 or 1 parameter."); + } + } + + public object? GenerateDefaultValue() + { + if (DefaultValueGeneratorMethod is not null) + { + return InvokeDefaultValueGenerator(); + } + + if (Metadata.TryGetValue(ColumnMetadataKeys.DefaultValue, out object? value)) + { + return value; + } + else + { + return null; + } + } + + public string GenerateCreateTableColumnString(string? dbProviderId = null) + { + StringBuilder result = new StringBuilder(); + result.Append(ColumnName); + result.Append(' '); + result.Append(ColumnType.GetSqlTypeString(dbProviderId)); + if (IsPrimaryKey) + { + result.Append(' '); + result.Append("PRIMARY KEY"); + } + else if (IsNotNull) + { + result.Append(' '); + result.Append("NOT NULL"); + } + + if (IsAutoIncrement) + { + result.Append(' '); + result.Append("AUTOINCREMENT"); + } + + return result.ToString(); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnMetadata.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnMetadata.cs new file mode 100644 index 0000000..7247ff1 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnMetadata.cs @@ -0,0 +1,188 @@ +namespace CrupestApi.Commons.Crud; + +public static class ColumnMetadataKeys +{ + public const string ColumnName = nameof(ColumnAttribute.ColumnName); + public const string NotNull = nameof(ColumnAttribute.NotNull); + public const string IsPrimaryKey = nameof(ColumnAttribute.IsPrimaryKey); + public const string Index = nameof(ColumnAttribute.Index); + + /// <summary> + /// This will add hooks for string type column to coerce null to ""(empty string) when get or set. No effect on non-string type. + /// </summary> + public const string DefaultEmptyForString = nameof(ColumnAttribute.DefaultEmptyForString); + + /// <summary> + /// This indicates that you take care of generate this column value when create entity. User calling the api can not specify the value. + /// </summary> + public const string OnlyGenerated = nameof(ColumnAttribute.OnlyGenerated); + + /// <summary> + /// The default value generator method name in entity type. Default to null, aka, search for ColumnNameDefaultValueGenerator. + /// Generator has signature <code>static void DefaultValueGenerator(ColumnInfo column)</code> + /// </summary> + public const string DefaultValueGenerator = nameof(ColumnAttribute.DefaultValueGenerator); + + /// <summary> + /// The validator method name in entity type. Default to null, aka, the default validator. + /// Validator has signature <code>static void Validator(ColumnInfo column, object value)</code> + /// Value param is never null. If you want to mean NULL, it should be a <see cref="DbNullValue"/>. + /// </summary> + public const string Validator = nameof(ColumnAttribute.Validator); + + /// <summary> + /// The column can only be set when inserted, can't be changed in update. + /// </summary> + /// <returns></returns> + public const string NoUpdate = nameof(ColumnAttribute.NoUpdate); + + /// <summary> + /// This column acts as key when get one entity for http get method in path. + /// </summary> + public const string ActAsKey = nameof(ColumnAttribute.ActAsKey); + + /// <summary> + /// The default value used for the column. + /// </summary> + public const string DefaultValue = nameof(ColumnAttribute.DefaultValue); +} + +public interface IColumnMetadata +{ + bool TryGetValue(string key, out object? value); + + object? GetValueOrDefault(string key) + { + if (TryGetValue(key, out var value)) + { + return value; + } + else + { + return null; + } + } + + T? GetValueOrDefault<T>(string key) + { + return (T?)GetValueOrDefault(key); + } + + object? this[string key] + { + get + { + if (TryGetValue(key, out var value)) + { + return value; + } + else + { + throw new KeyNotFoundException("Key not found."); + } + } + } +} + +public enum ColumnIndexType +{ + None, + Unique, + NonUnique +} + +[AttributeUsage(AttributeTargets.Property, AllowMultiple = false)] +public class ColumnAttribute : Attribute, IColumnMetadata +{ + // if null, use the property name. + public string? ColumnName { get; init; } + + // default false. + public bool NotNull { get; init; } + + // default false + public bool IsPrimaryKey { get; init; } + + // default None + public ColumnIndexType Index { get; init; } = ColumnIndexType.None; + + /// <seealso cref="ColumnMetadataKeys.DefaultEmptyForString"/> + public bool DefaultEmptyForString { get; init; } + + /// <seealso cref="ColumnMetadataKeys.OnlyGenerated"/> + public bool OnlyGenerated { get; init; } + + /// <seealso cref="ColumnMetadataKeys.DefaultValueGenerator"/> + public string? DefaultValueGenerator { get; init; } + + /// <seealso cref="ColumnMetadataKeys.Validator"/> + public string? Validator { get; init; } + + /// <seealso cref="ColumnMetadataKeys.NoUpdate"/> + public bool NoUpdate { get; init; } + + /// <seealso cref="ColumnMetadataKeys.ActAsKey"/> + public bool ActAsKey { get; init; } + + public object? DefaultValue { get; init; } + + public bool TryGetValue(string key, out object? value) + { + var property = GetType().GetProperty(key); + if (property is null) + { + value = null; + return false; + } + value = property.GetValue(this); + return true; + } +} + +public class AggregateColumnMetadata : IColumnMetadata +{ + private IDictionary<string, object?> _own = new Dictionary<string, object?>(); + private IList<IColumnMetadata> _children = new List<IColumnMetadata>(); + + public void Add(string key, object? value) + { + _own[key] = value; + } + + public void Remove(string key) + { + _own.Remove(key); + } + + public void Add(IColumnMetadata child) + { + _children.Add(child); + } + + public void Remove(IColumnMetadata child) + { + _children.Remove(child); + } + + public bool TryGetValue(string key, out object? value) + { + if (_own.ContainsKey(key)) + { + value = _own[key]; + return true; + } + + bool found = false; + value = null; + foreach (var child in _children) + { + if (child.TryGetValue(key, out var tempValue)) + { + value = tempValue; + found = true; + } + } + + return found; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnTypeInfo.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnTypeInfo.cs new file mode 100644 index 0000000..19eff52 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ColumnTypeInfo.cs @@ -0,0 +1,218 @@ +using System.Data; +using System.Diagnostics; +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace CrupestApi.Commons.Crud; + +public interface IColumnTypeInfo +{ + public static IColumnTypeInfo BoolColumnTypeInfo { get; } = new SimpleColumnTypeInfo<bool>(); + public static IColumnTypeInfo IntColumnTypeInfo { get; } = new SimpleColumnTypeInfo<int>(); + public static IColumnTypeInfo ShortColumnTypeInfo { get; } = new SimpleColumnTypeInfo<short>(); + public static IColumnTypeInfo SByteColumnTypeInfo { get; } = new SimpleColumnTypeInfo<sbyte>(); + public static IColumnTypeInfo LongColumnTypeInfo { get; } = new SimpleColumnTypeInfo<long>(); + public static IColumnTypeInfo FloatColumnTypeInfo { get; } = new SimpleColumnTypeInfo<float>(); + public static IColumnTypeInfo DoubleColumnTypeInfo { get; } = new SimpleColumnTypeInfo<double>(); + public static IColumnTypeInfo StringColumnTypeInfo { get; } = new SimpleColumnTypeInfo<string>(); + public static IColumnTypeInfo BytesColumnTypeInfo { get; } = new SimpleColumnTypeInfo<byte[]>(); + public static IColumnTypeInfo DateTimeColumnTypeInfo { get; } = new DateTimeColumnTypeInfo(); + + Type ClrType { get; } + Type DatabaseClrType { get; } + bool IsSimple { get { return ClrType == DatabaseClrType; } } + DbType DbType + { + get + { + if (DatabaseClrType == typeof(bool)) + { + return DbType.Boolean; + } + else if (DatabaseClrType == typeof(int)) + { + return DbType.Int32; + } + else if (DatabaseClrType == typeof(long)) + { + return DbType.Int64; + } + else if (DatabaseClrType == typeof(short)) + { + return DbType.Int16; + } + else if (DatabaseClrType == typeof(sbyte)) + { + return DbType.SByte; + } + else if (DatabaseClrType == typeof(double)) + { + return DbType.Double; + } + else if (DatabaseClrType == typeof(float)) + { + return DbType.Single; + } + else if (DatabaseClrType == typeof(string)) + { + return DbType.String; + } + else if (DatabaseClrType == typeof(byte[])) + { + return DbType.Binary; + } + else + { + throw new Exception("Can't deduce DbType."); + } + } + } + + string GetSqlTypeString(string? dbProviderId = null) + { + // Default implementation for SQLite + return DbType switch + { + DbType.String => "TEXT", + DbType.Boolean or DbType.Int16 or DbType.Int32 or DbType.Int64 => "INTEGER", + DbType.Single or DbType.Double => "REAL", + DbType.Binary => "BLOB", + _ => throw new Exception($"Unsupported DbType: {DbType}"), + }; + } + + JsonConverter? JsonConverter { get { return null; } } + + // You must override this method if ClrType != DatabaseClrType + object? ConvertFromDatabase(object? databaseValue) + { + Debug.Assert(IsSimple); + return databaseValue; + } + + // You must override this method if ClrType != DatabaseClrType + object? ConvertToDatabase(object? value) + { + Debug.Assert(IsSimple); + return value; + } +} + +public interface IColumnTypeProvider +{ + IReadOnlyList<IColumnTypeInfo> GetAll(); + IColumnTypeInfo Get(Type clrType); + + IList<IColumnTypeInfo> GetAllCustom() + { + return GetAll().Where(t => !t.IsSimple).ToList(); + } +} + +public class SimpleColumnTypeInfo<T> : IColumnTypeInfo +{ + public Type ClrType => typeof(T); + public Type DatabaseClrType => typeof(T); +} + +public class DateTimeColumnTypeInfo : IColumnTypeInfo +{ + private JsonConverter<DateTime> _jsonConverter; + + public DateTimeColumnTypeInfo() + { + _jsonConverter = new DateTimeJsonConverter(this); + } + + public Type ClrType => typeof(DateTime); + public Type DatabaseClrType => typeof(string); + + public JsonConverter JsonConverter => _jsonConverter; + + public object? ConvertToDatabase(object? value) + { + if (value is null) return null; + Debug.Assert(value is DateTime); + return ((DateTime)value).ToUniversalTime().ToString("s") + "Z"; + } + + public object? ConvertFromDatabase(object? databaseValue) + { + if (databaseValue is null) return null; + Debug.Assert(databaseValue is string); + var databaseString = (string)databaseValue; + var dateTimeStyles = DateTimeStyles.None; + if (databaseString.Length > 0 && databaseString[^1] == 'Z') + { + databaseString = databaseString.Substring(0, databaseString.Length - 1); + dateTimeStyles = DateTimeStyles.AssumeUniversal & DateTimeStyles.AdjustToUniversal; + } + return DateTime.ParseExact(databaseString, "s", null, dateTimeStyles); + } +} + +public class DateTimeJsonConverter : JsonConverter<DateTime> +{ + private readonly DateTimeColumnTypeInfo _typeInfo; + + public DateTimeJsonConverter(DateTimeColumnTypeInfo typeInfo) + { + _typeInfo = typeInfo; + } + + public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var databaseValue = reader.GetString(); + return (DateTime)_typeInfo.ConvertFromDatabase(databaseValue)!; + } + + public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options) + { + var databaseValue = _typeInfo.ConvertToDatabase(value); + writer.WriteStringValue((string)databaseValue!); + } +} + +public class ColumnTypeProvider : IColumnTypeProvider +{ + private Dictionary<Type, IColumnTypeInfo> _typeMap = new Dictionary<Type, IColumnTypeInfo>(); + + public ColumnTypeProvider() + { + _typeMap.Add(IColumnTypeInfo.BoolColumnTypeInfo.ClrType, IColumnTypeInfo.BoolColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.IntColumnTypeInfo.ClrType, IColumnTypeInfo.IntColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.ShortColumnTypeInfo.ClrType, IColumnTypeInfo.ShortColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.SByteColumnTypeInfo.ClrType, IColumnTypeInfo.SByteColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.LongColumnTypeInfo.ClrType, IColumnTypeInfo.LongColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.FloatColumnTypeInfo.ClrType, IColumnTypeInfo.FloatColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.DoubleColumnTypeInfo.ClrType, IColumnTypeInfo.DoubleColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.StringColumnTypeInfo.ClrType, IColumnTypeInfo.StringColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.BytesColumnTypeInfo.ClrType, IColumnTypeInfo.BytesColumnTypeInfo); + _typeMap.Add(IColumnTypeInfo.DateTimeColumnTypeInfo.ClrType, IColumnTypeInfo.DateTimeColumnTypeInfo); + } + + public IReadOnlyList<IColumnTypeInfo> GetAll() + { + return _typeMap.Values.ToList(); + } + + // This is thread-safe. + public IColumnTypeInfo Get(Type clrType) + { + if (_typeMap.TryGetValue(clrType, out var typeInfo)) + { + return typeInfo; + } + else + { + if (clrType.IsGenericType && clrType.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + clrType = clrType.GetGenericArguments()[0]; + return Get(clrType); + } + + throw new Exception($"Unsupported type: {clrType}"); + } + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudService.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudService.cs new file mode 100644 index 0000000..1e881d3 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudService.cs @@ -0,0 +1,132 @@ +using System.Data; +using CrupestApi.Commons.Crud.Migrations; + +namespace CrupestApi.Commons.Crud; + +[Flags] +public enum UpdateBehavior +{ + None = 0, + SaveNull = 1 +} + +public class CrudService<TEntity> : IDisposable where TEntity : class +{ + protected readonly TableInfo _table; + protected readonly string? _connectionName; + protected readonly IDbConnection _dbConnection; + private readonly bool _shouldDisposeConnection; + private IDatabaseMigrator _migrator; + private readonly ILogger<CrudService<TEntity>> _logger; + + public CrudService(ITableInfoFactory tableInfoFactory, IDbConnectionFactory dbConnectionFactory, IDatabaseMigrator migrator, ILoggerFactory loggerFactory) + { + _connectionName = GetConnectionName(); + _table = tableInfoFactory.Get(typeof(TEntity)); + _dbConnection = dbConnectionFactory.Get(_connectionName); + _shouldDisposeConnection = dbConnectionFactory.ShouldDisposeConnection; + _migrator = migrator; + _logger = loggerFactory.CreateLogger<CrudService<TEntity>>(); + } + + protected virtual void EnsureDatabase() + { + if (_migrator.NeedMigrate(_dbConnection, _table)) + { + _logger.LogInformation($"Entity {_table.TableName} needs migration."); + _migrator.AutoMigrate(_dbConnection, _table); + } + } + + protected virtual string GetConnectionName() + { + return typeof(TEntity).Name; + } + + protected virtual void AfterMigrate(IDbConnection dbConnection, TableInfo tableInfo) + { + + } + + public void Dispose() + { + if (_shouldDisposeConnection) + _dbConnection.Dispose(); + } + + public List<TEntity> GetAll() + { + EnsureDatabase(); + var result = _table.Select<TEntity>(_dbConnection, null); + return result; + } + + public int GetCount() + { + EnsureDatabase(); + var result = _table.SelectCount(_dbConnection); + return result; + } + + public TEntity GetByKey(object key) + { + EnsureDatabase(); + var result = _table.Select<TEntity>(_dbConnection, null, WhereClause.Create().Eq(_table.KeyColumn.ColumnName, key)).SingleOrDefault(); + if (result is null) + { + throw new EntityNotExistException($"Required entity for key {key} not found."); + } + return result; + } + + public IInsertClause ConvertEntityToInsertClauses(TEntity entity) + { + var result = new InsertClause(); + foreach (var column in _table.PropertyColumns) + { + var value = column.PropertyInfo!.GetValue(entity); + result.Add(column.ColumnName, value); + } + return result; + } + + public object Create(TEntity entity) + { + EnsureDatabase(); + var insertClause = ConvertEntityToInsertClauses(entity); + _table.Insert(_dbConnection, insertClause, out var key); + return key; + } + + public IUpdateClause ConvertEntityToUpdateClauses(TEntity entity, UpdateBehavior behavior) + { + var result = UpdateClause.Create(); + var saveNull = behavior.HasFlag(UpdateBehavior.SaveNull); + foreach (var column in _table.PropertyColumns) + { + var value = column.PropertyInfo!.GetValue(entity); + if (!saveNull && value is null) continue; + result.Add(column.ColumnName, value); + } + return result; + } + + // Return new key. + public object UpdateByKey(object key, TEntity entity, UpdateBehavior behavior = UpdateBehavior.None) + { + EnsureDatabase(); + var affectedCount = _table.Update(_dbConnection, WhereClause.Create().Eq(_table.KeyColumn.ColumnName, key), + ConvertEntityToUpdateClauses(entity, behavior), out var newKey); + if (affectedCount == 0) + { + throw new EntityNotExistException($"Required entity for key {key} not found."); + } + return newKey ?? key; + } + + public bool DeleteByKey(object key) + { + EnsureDatabase(); + return _table.Delete(_dbConnection, WhereClause.Create().Eq(_table.KeyColumn.ColumnName, key)) == 1; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudServiceCollectionExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudServiceCollectionExtensions.cs new file mode 100644 index 0000000..a7e5193 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudServiceCollectionExtensions.cs @@ -0,0 +1,34 @@ +using CrupestApi.Commons.Crud.Migrations; +using CrupestApi.Commons.Secrets; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace CrupestApi.Commons.Crud; + +public static class CrudServiceCollectionExtensions +{ + public static IServiceCollection AddCrudCore(this IServiceCollection services) + { + services.TryAddSingleton<IDbConnectionFactory, SqliteConnectionFactory>(); + services.TryAddSingleton<IColumnTypeProvider, ColumnTypeProvider>(); + services.TryAddSingleton<ITableInfoFactory, TableInfoFactory>(); + services.TryAddSingleton<IDatabaseMigrator, SqliteDatabaseMigrator>(); + services.AddSecrets(); + return services; + } + + public static IServiceCollection AddCrud<TEntity, TCrudService>(this IServiceCollection services) where TEntity : class where TCrudService : CrudService<TEntity> + { + AddCrudCore(services); + + services.TryAddScoped<CrudService<TEntity>, TCrudService>(); + services.TryAddScoped<EntityJsonHelper<TEntity>>(); + + return services; + } + + public static IServiceCollection AddCrud<TEntity>(this IServiceCollection services) where TEntity : class + { + return services.AddCrud<TEntity, CrudService<TEntity>>(); + } + +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudWebApplicationExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudWebApplicationExtensions.cs new file mode 100644 index 0000000..8942979 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/CrudWebApplicationExtensions.cs @@ -0,0 +1,101 @@ +namespace CrupestApi.Commons.Crud; + +public static class CrudWebApplicationExtensions +{ + public static WebApplication UseCrudCore(this WebApplication app) + { + app.Use(async (context, next) => + { + try + { + await next(); + } + catch (EntityNotExistException) + { + await context.ResponseMessageAsync("Requested entity does not exist.", StatusCodes.Status404NotFound); + } + catch (UserException e) + { + await context.ResponseMessageAsync(e.Message); + } + }); + + return app; + } + + public static WebApplication MapCrud<TEntity>(this WebApplication app, string path, string? permission) where TEntity : class + { + app.MapGet(path, async (context) => + { + if (!context.RequirePermission(permission)) return; + var crudService = context.RequestServices.GetRequiredService<CrudService<TEntity>>(); + var entityJsonHelper = context.RequestServices.GetRequiredService<EntityJsonHelper<TEntity>>(); + var allEntities = crudService.GetAll(); + await context.ResponseJsonAsync(allEntities.Select(e => entityJsonHelper.ConvertEntityToDictionary(e))); + }); + + app.MapGet(path + "/{key}", async (context) => + { + if (!context.RequirePermission(permission)) return; + var crudService = context.RequestServices.GetRequiredService<CrudService<TEntity>>(); + var entityJsonHelper = context.RequestServices.GetRequiredService<EntityJsonHelper<TEntity>>(); + var key = context.Request.RouteValues["key"]?.ToString(); + if (key == null) + { + await context.ResponseMessageAsync("Please specify a key in path."); + return; + } + + var entity = crudService.GetByKey(key); + await context.ResponseJsonAsync(entityJsonHelper.ConvertEntityToDictionary(entity)); + }); + + app.MapPost(path, async (context) => + { + if (!context.RequirePermission(permission)) return; + var crudService = context.RequestServices.GetRequiredService<CrudService<TEntity>>(); + var entityJsonHelper = context.RequestServices.GetRequiredService<EntityJsonHelper<TEntity>>(); + var jsonDocument = await context.Request.ReadJsonAsync(); + var key = crudService.Create(entityJsonHelper.ConvertJsonToEntityForInsert(jsonDocument.RootElement)); + await context.ResponseJsonAsync(entityJsonHelper.ConvertEntityToDictionary(crudService.GetByKey(key))); + }); + + app.MapPatch(path + "/{key}", async (context) => + { + if (!context.RequirePermission(permission)) return; + var key = context.Request.RouteValues["key"]?.ToString(); + var crudService = context.RequestServices.GetRequiredService<CrudService<TEntity>>(); + var entityJsonHelper = context.RequestServices.GetRequiredService<EntityJsonHelper<TEntity>>(); + if (key == null) + { + await context.ResponseMessageAsync("Please specify a key in path."); + return; + } + + var jsonDocument = await context.Request.ReadJsonAsync(); + var entity = entityJsonHelper.ConvertJsonToEntityForUpdate(jsonDocument.RootElement, out var updateBehavior); + var newKey = crudService.UpdateByKey(key, entity, updateBehavior); + await context.ResponseJsonAsync(entityJsonHelper.ConvertEntityToDictionary(crudService.GetByKey(newKey))); + }); + + app.MapDelete(path + "/{key}", async (context) => + { + if (!context.RequirePermission(permission)) return; + var crudService = context.RequestServices.GetRequiredService<CrudService<TEntity>>(); + var key = context.Request.RouteValues["key"]?.ToString(); + if (key == null) + { + await context.ResponseMessageAsync("Please specify a key in path."); + return; + } + + var deleted = crudService.DeleteByKey(key); + if (deleted) + await context.ResponseMessageAsync("Deleted.", StatusCodes.Status200OK); + else + await context.ResponseMessageAsync("Not exist.", StatusCodes.Status200OK); + }); + + return app; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbConnectionFactory.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbConnectionFactory.cs new file mode 100644 index 0000000..701622c --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbConnectionFactory.cs @@ -0,0 +1,75 @@ +using System.Data; +using Microsoft.Data.Sqlite; +using Microsoft.Extensions.Options; + +namespace CrupestApi.Commons.Crud; + +public interface IDbConnectionFactory +{ + IDbConnection Get(string? name = null); + bool ShouldDisposeConnection { get; } +} + +public class SqliteConnectionFactory : IDbConnectionFactory +{ + private readonly IOptionsMonitor<CrupestApiConfig> _apiConfigMonitor; + + public SqliteConnectionFactory(IOptionsMonitor<CrupestApiConfig> apiConfigMonitor) + { + _apiConfigMonitor = apiConfigMonitor; + } + + public IDbConnection Get(string? name = null) + { + var connectionString = new SqliteConnectionStringBuilder() + { + DataSource = Path.Combine(_apiConfigMonitor.CurrentValue.DataDir, $"{name ?? "crupest-api"}.db"), + Mode = SqliteOpenMode.ReadWriteCreate + }.ToString(); + + var connection = new SqliteConnection(connectionString); + connection.Open(); + return connection; + } + + public bool ShouldDisposeConnection => true; +} + +public class SqliteMemoryConnectionFactory : IDbConnectionFactory, IDisposable +{ + private readonly Dictionary<string, IDbConnection> _connections = new(); + + public IDbConnection Get(string? name = null) + { + name = name ?? "crupest-api"; + + if (_connections.TryGetValue(name, out var connection)) + { + return connection; + } + else + { + var connectionString = new SqliteConnectionStringBuilder() + { + DataSource = ":memory:", + Mode = SqliteOpenMode.ReadWriteCreate + }.ToString(); + + connection = new SqliteConnection(connectionString); + _connections.Add(name, connection); + connection.Open(); + return connection; + } + } + + public bool ShouldDisposeConnection => false; + + + public void Dispose() + { + foreach (var connection in _connections.Values) + { + connection.Dispose(); + } + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbNullValue.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbNullValue.cs new file mode 100644 index 0000000..5dc5a61 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/DbNullValue.cs @@ -0,0 +1,9 @@ +namespace CrupestApi.Commons.Crud; + +/// <summary> +/// This will always represent null value in database. +/// </summary> +public class DbNullValue +{ + public static DbNullValue Instance { get; } = new DbNullValue(); +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/EntityJsonHelper.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/EntityJsonHelper.cs new file mode 100644 index 0000000..cf3f178 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/EntityJsonHelper.cs @@ -0,0 +1,206 @@ +using System.Globalization; +using System.Text.Json; +using Microsoft.Extensions.Options; + +namespace CrupestApi.Commons.Crud; + +/// <summary> +/// Contains all you need to do with json. +/// </summary> +public class EntityJsonHelper<TEntity> where TEntity : class +{ + private readonly TableInfo _table; + private readonly IOptionsMonitor<JsonSerializerOptions> _jsonSerializerOptions; + + public EntityJsonHelper(ITableInfoFactory tableInfoFactory, IOptionsMonitor<JsonSerializerOptions> jsonSerializerOptions) + { + _table = tableInfoFactory.Get(typeof(TEntity)); + _jsonSerializerOptions = jsonSerializerOptions; + } + + public Dictionary<string, object?> ConvertEntityToDictionary(TEntity entity, bool includeNonColumnProperties = false) + { + var result = new Dictionary<string, object?>(); + + foreach (var column in _table.PropertyColumns) + { + var value = column.PropertyInfo!.GetValue(entity); + var realValue = column.ColumnType.ConvertToDatabase(value); + result[column.ColumnName] = realValue; + } + + if (includeNonColumnProperties) + { + foreach (var propertyInfo in _table.NonColumnProperties) + { + var value = propertyInfo.GetValue(entity); + result[propertyInfo.Name] = value; + } + } + + return result; + } + + public string ConvertEntityToJson(TEntity entity, bool includeNonColumnProperties = false) + { + var dictionary = ConvertEntityToDictionary(entity, includeNonColumnProperties); + return JsonSerializer.Serialize(dictionary, _jsonSerializerOptions.CurrentValue); + } + + private object? ConvertJsonValue(JsonElement? optionalJsonElement, Type type, string propertyName) + { + if (optionalJsonElement is null) + { + return null; + } + + var jsonElement = optionalJsonElement.Value; + + if (jsonElement.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + return null; + } + + if (jsonElement.ValueKind is JsonValueKind.String) + { + if (type != typeof(string)) + { + throw new UserException($"Property {propertyName} must be a string."); + } + return jsonElement.GetString()!; + } + + if (jsonElement.ValueKind is JsonValueKind.True or JsonValueKind.False) + { + if (type != typeof(bool)) + { + throw new UserException($"Property {propertyName} must be a boolean."); + } + return jsonElement.GetBoolean(); + } + + if (jsonElement.ValueKind is JsonValueKind.Number) + { + try + { + return Convert.ChangeType(jsonElement.GetRawText(), type, CultureInfo.InvariantCulture); + } + catch (Exception) + { + throw new UserException($"Property {propertyName} must be a valid number."); + } + } + + throw new UserException($"Property {propertyName} is of wrong type."); + } + + public Dictionary<string, JsonElement> ConvertJsonObjectToDictionary(JsonElement jsonElement) + { + var result = new Dictionary<string, JsonElement>(); + + foreach (var property in jsonElement.EnumerateObject()) + { + result[property.Name.ToLower()] = property.Value; + } + + return result; + } + + public TEntity ConvertJsonToEntityForInsert(JsonElement jsonElement) + { + if (jsonElement.ValueKind is not JsonValueKind.Object) + throw new ArgumentException("The jsonElement must be an object."); + + var result = Activator.CreateInstance<TEntity>(); + + Dictionary<string, JsonElement> jsonProperties = ConvertJsonObjectToDictionary(jsonElement); + + foreach (var column in _table.PropertyColumns) + { + var jsonPropertyValue = jsonProperties.GetValueOrDefault(column.ColumnName.ToLower()); + var value = ConvertJsonValue(jsonPropertyValue, column.ColumnType.DatabaseClrType, column.ColumnName); + if (column.IsOnlyGenerated && value is not null) + { + throw new UserException($"Property {column.ColumnName} is auto generated, you cannot set it."); + } + if (!column.CanBeGenerated && value is null && column.IsNotNull) + { + throw new UserException($"Property {column.ColumnName} can NOT be generated, you must set it."); + } + var realValue = column.ColumnType.ConvertFromDatabase(value); + column.PropertyInfo!.SetValue(result, realValue); + } + + return result; + } + + public TEntity ConvertJsonToEntityForInsert(string json) + { + var jsonElement = JsonSerializer.Deserialize<JsonElement>(json, _jsonSerializerOptions.CurrentValue); + return ConvertJsonToEntityForInsert(jsonElement!); + } + + public TEntity ConvertJsonToEntityForUpdate(JsonElement jsonElement, out UpdateBehavior updateBehavior) + { + if (jsonElement.ValueKind is not JsonValueKind.Object) + throw new UserException("The jsonElement must be an object."); + + updateBehavior = UpdateBehavior.None; + + Dictionary<string, JsonElement> jsonProperties = ConvertJsonObjectToDictionary(jsonElement); + + bool saveNull = false; + if (jsonProperties.TryGetValue("$saveNull".ToLower(), out var saveNullValue)) + { + if (saveNullValue.ValueKind is JsonValueKind.True) + { + updateBehavior |= UpdateBehavior.SaveNull; + saveNull = true; + } + else if (saveNullValue.ValueKind is JsonValueKind.False) + { + + } + else + { + throw new UserException("The $saveNull must be a boolean."); + } + } + + var result = Activator.CreateInstance<TEntity>(); + foreach (var column in _table.PropertyColumns) + { + if (jsonProperties.TryGetValue(column.ColumnName.ToLower(), out var jsonPropertyValue)) + { + if (jsonPropertyValue.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + if ((column.IsOnlyGenerated || column.IsNoUpdate) && saveNull) + { + throw new UserException($"Property {column.ColumnName} is auto generated or not updatable, you cannot set it."); + } + + column.PropertyInfo!.SetValue(result, null); + } + else + { + if (column.IsOnlyGenerated || column.IsNoUpdate) + { + throw new UserException($"Property {column.ColumnName} is auto generated or not updatable, you cannot set it."); + } + + var value = ConvertJsonValue(jsonPropertyValue, column.ColumnType.DatabaseClrType, column.ColumnName); + var realValue = column.ColumnType.ConvertFromDatabase(value); + column.PropertyInfo!.SetValue(result, realValue); + } + } + } + + return result; + } + + public TEntity ConvertJsonToEntityForUpdate(string json, out UpdateBehavior updateBehavior) + { + var jsonElement = JsonSerializer.Deserialize<JsonElement>(json, _jsonSerializerOptions.CurrentValue); + return ConvertJsonToEntityForUpdate(jsonElement!, out updateBehavior); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/IClause.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/IClause.cs new file mode 100644 index 0000000..964a669 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/IClause.cs @@ -0,0 +1,24 @@ +using Dapper; + +namespace CrupestApi.Commons.Crud; + +public interface IClause +{ + IEnumerable<IClause> GetSubclauses() + { + return Enumerable.Empty<IClause>(); + } + + IEnumerable<string> GetRelatedColumns() + { + var subclauses = GetSubclauses(); + var result = new List<string>(); + foreach (var subclause in subclauses) + { + var columns = subclause.GetRelatedColumns(); + if (columns is not null) + result.AddRange(columns); + } + return result; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/InsertClause.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/InsertClause.cs new file mode 100644 index 0000000..a880e66 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/InsertClause.cs @@ -0,0 +1,77 @@ +using System.Text; + +namespace CrupestApi.Commons.Crud; + +public class InsertItem +{ + /// <summary> + /// Null means use default value. Use <see cref="DbNullValue"/>. + /// </summary> + public InsertItem(string columnName, object? value) + { + ColumnName = columnName; + Value = value; + } + + public string ColumnName { get; set; } + public object? Value { get; set; } +} + +public interface IInsertClause : IClause +{ + List<InsertItem> Items { get; } + string GenerateColumnListSql(string? dbProviderId = null); + (string sql, ParamList parameters) GenerateValueListSql(string? dbProviderId = null); +} + +public class InsertClause : IInsertClause +{ + public List<InsertItem> Items { get; } = new List<InsertItem>(); + + public InsertClause(params InsertItem[] items) + { + Items.AddRange(items); + } + + public InsertClause Add(params InsertItem[] items) + { + Items.AddRange(items); + return this; + } + + public InsertClause Add(string column, object? value) + { + return Add(new InsertItem(column, value)); + } + + public static InsertClause Create(params InsertItem[] items) + { + return new InsertClause(items); + } + + public List<string> GetRelatedColumns() + { + return Items.Select(i => i.ColumnName).ToList(); + } + + public string GenerateColumnListSql(string? dbProviderId = null) + { + return string.Join(", ", Items.Select(i => i.ColumnName)); + } + + public (string sql, ParamList parameters) GenerateValueListSql(string? dbProviderId = null) + { + var parameters = new ParamList(); + var sb = new StringBuilder(); + for (var i = 0; i < Items.Count; i++) + { + var item = Items[i]; + var parameterName = parameters.AddRandomNameParameter(item.Value, item.ColumnName); + sb.Append($"@{parameterName}"); + if (i != Items.Count - 1) + sb.Append(", "); + } + + return (sb.ToString(), parameters); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/DatabaseMigrator.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/DatabaseMigrator.cs new file mode 100644 index 0000000..f1ae616 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/DatabaseMigrator.cs @@ -0,0 +1,44 @@ +using System.Data; + +namespace CrupestApi.Commons.Crud.Migrations; + +public class TableColumn +{ + public TableColumn(string name, string type, bool notNull, int primaryKey) + { + Name = name; + Type = type; + NotNull = notNull; + PrimaryKey = primaryKey; + } + + public string Name { get; set; } + public string Type { get; set; } + public bool NotNull { get; set; } + + /// <summary> + /// 0 if not primary key. 1-based index if in primary key. + /// </summary> + public int PrimaryKey { get; set; } +} + +public class Table +{ + public Table(string name) + { + Name = name; + } + + public string Name { get; set; } + public List<TableColumn> Columns { get; set; } = new List<TableColumn>(); +} + +public interface IDatabaseMigrator +{ + Table? GetTable(IDbConnection dbConnection, string tableName); + Table ConvertTableInfoToTable(TableInfo tableInfo); + string GenerateCreateTableColumnSqlSegment(TableColumn column); + string GenerateCreateTableSql(string tableName, IEnumerable<TableColumn> columns); + bool NeedMigrate(IDbConnection dbConnection, TableInfo tableInfo); + void AutoMigrate(IDbConnection dbConnection, TableInfo tableInfo); +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/SqliteDatabaseMigrator.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/SqliteDatabaseMigrator.cs new file mode 100644 index 0000000..33310d6 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/Migrations/SqliteDatabaseMigrator.cs @@ -0,0 +1,175 @@ +using System.Data; +using System.Text; +using System.Text.Json; +using System.Text.RegularExpressions; +using Dapper; + +namespace CrupestApi.Commons.Crud.Migrations; + +public class SqliteDatabaseMigrator : IDatabaseMigrator +{ + private void CheckTableName(string name) + { + if (Regex.Match(name, @"^[_0-9a-zA-Z]+$").Success is false) + { + throw new ArgumentException("Fxxk, what have you passed as table name."); + } + } + + public Table? GetTable(IDbConnection dbConnection, string tableName) + { + var count = dbConnection.QuerySingle<int>( + "SELECT count(*) FROM sqlite_schema WHERE type = 'table' AND name = @TableName;", + new { TableName = tableName }); + if (count == 0) + { + return null; + } + else if (count > 1) + { + throw new Exception($"More than 1 table has name {tableName}. What happened?"); + } + else + { + var table = new Table(tableName); + var queryColumns = dbConnection.Query<dynamic>($"PRAGMA table_info({tableName})"); + + foreach (var column in queryColumns) + { + var columnName = (string)column.name; + var columnType = (string)column.type; + var isNullable = Convert.ToBoolean(column.notnull); + var primaryKey = Convert.ToInt32(column.pk); + + table.Columns.Add(new TableColumn(columnName, columnType, isNullable, primaryKey)); + } + + return table; + } + } + + public Table ConvertTableInfoToTable(TableInfo tableInfo) + { + var table = new Table(tableInfo.TableName); + + foreach (var columnInfo in tableInfo.Columns) + { + table.Columns.Add(new TableColumn(columnInfo.ColumnName, columnInfo.ColumnType.GetSqlTypeString(), + columnInfo.IsNotNull, columnInfo.IsPrimaryKey ? 1 : 0)); + } + + return table; + } + + public string GenerateCreateTableColumnSqlSegment(TableColumn column) + { + StringBuilder result = new StringBuilder(); + result.Append(column.Name); + result.Append(' '); + result.Append(column.Type); + if (column.PrimaryKey is not 0) + { + result.Append(" PRIMARY KEY AUTOINCREMENT"); + } + else if (column.NotNull) + { + result.Append(" NOT NULL"); + } + + return result.ToString(); + } + + public string GenerateCreateTableSql(string tableName, IEnumerable<TableColumn> columns) + { + CheckTableName(tableName); + + var sql = $@" +CREATE TABLE {tableName} ( + {string.Join(",\n ", columns.Select(GenerateCreateTableColumnSqlSegment))} +); + ".Trim(); + + return sql; + + } + + public void AutoMigrate(IDbConnection dbConnection, TableInfo tableInfo) + { + var tableName = tableInfo.TableName; + var databaseTable = GetTable(dbConnection, tableName); + var wantedTable = ConvertTableInfoToTable(tableInfo); + var databaseTableColumnNames = databaseTable is null ? new List<string>() : databaseTable.Columns.Select(column => column.Name).ToList(); + var wantedTableColumnNames = wantedTable.Columns.Select(column => column.Name).ToList(); + + var notChangeColumns = wantedTableColumnNames.Where(column => databaseTableColumnNames.Contains(column)).ToList(); + var addColumns = wantedTableColumnNames.Where(column => !databaseTableColumnNames.Contains(column)).ToList(); + + if (databaseTable is not null && dbConnection.QuerySingle<int>($"SELECT count(*) FROM {tableName}") > 0) + { + foreach (var columnName in addColumns) + { + var columnInfo = tableInfo.GetColumn(columnName); + if (!columnInfo.CanBeGenerated) + { + throw new Exception($"Column {columnName} cannot be generated. So we can't auto-migrate."); + } + } + } + + // We are sqlite, so it's a little bit difficult. + using var transaction = dbConnection.BeginTransaction(); + + if (databaseTable is not null) + { + var tempTableName = tableInfo.TableName + "_temp"; + dbConnection.Execute($"ALTER TABLE {tableName} RENAME TO {tempTableName}", new { TableName = tableName, tempTableName }); + + var createTableSql = GenerateCreateTableSql(tableName, wantedTable.Columns); + dbConnection.Execute(createTableSql); + + // Copy old data to new table. + var originalRows = dbConnection.Query<dynamic>($"SELECT * FROM {tempTableName}").Cast<IDictionary<string, object?>>().ToList(); + foreach (var originalRow in originalRows) + { + var parameters = new DynamicParameters(); + + foreach (var columnName in notChangeColumns) + { + parameters.Add(columnName, originalRow[columnName]); + } + + foreach (var columnName in addColumns) + { + parameters.Add(columnName, tableInfo.GetColumn(columnName).GenerateDefaultValue()); + } + + string columnSql = string.Join(", ", wantedTableColumnNames); + string valuesSql = string.Join(", ", wantedTableColumnNames.Select(c => "@" + c)); + + string sql = $"INSERT INTO {tableName} ({columnSql}) VALUES {valuesSql})"; + dbConnection.Execute(sql, parameters); + } + + // Finally drop old table + dbConnection.Execute($"DROP TABLE {tempTableName}"); + } + else + { + var createTableSql = GenerateCreateTableSql(tableName, wantedTable.Columns); + dbConnection.Execute(createTableSql); + } + + // Commit transaction. + transaction.Commit(); + } + + public bool NeedMigrate(IDbConnection dbConnection, TableInfo tableInfo) + { + var tableName = tableInfo.TableName; + var databaseTable = GetTable(dbConnection, tableName); + var wantedTable = ConvertTableInfoToTable(tableInfo); + var databaseTableColumns = databaseTable is null ? new HashSet<string>() : new HashSet<string>(databaseTable.Columns.Select(c => c.Name)); + var wantedTableColumns = new HashSet<string>(wantedTable.Columns.Select(c => c.Name)); + return !databaseTableColumns.SetEquals(wantedTableColumns); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/OrderByClause.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/OrderByClause.cs new file mode 100644 index 0000000..734d044 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/OrderByClause.cs @@ -0,0 +1,50 @@ +namespace CrupestApi.Commons.Crud; + +public class OrderByItem +{ + public OrderByItem(string columnName, bool isAscending) + { + ColumnName = columnName; + IsAscending = isAscending; + } + + public string ColumnName { get; } + public bool IsAscending { get; } + + public string GenerateSql() + { + return $"{ColumnName} {(IsAscending ? "ASC" : "DESC")}"; + } +} + +public interface IOrderByClause : IClause +{ + List<OrderByItem> Items { get; } + // Contains "ORDER BY" keyword! + string GenerateSql(string? dbProviderId = null); +} + +public class OrderByClause : IOrderByClause +{ + public List<OrderByItem> Items { get; } = new List<OrderByItem>(); + + public OrderByClause(params OrderByItem[] items) + { + Items.AddRange(items); + } + + public static OrderByClause Create(params OrderByItem[] items) + { + return new OrderByClause(items); + } + + public List<string> GetRelatedColumns() + { + return Items.Select(x => x.ColumnName).ToList(); + } + + public string GenerateSql(string? dbProviderId = null) + { + return "ORDER BY " + string.Join(", ", Items.Select(i => i.GenerateSql())); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ParamMap.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ParamMap.cs new file mode 100644 index 0000000..37d77ca --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/ParamMap.cs @@ -0,0 +1,73 @@ +using System.Data; +using System.Diagnostics; + +namespace CrupestApi.Commons.Crud; + +/// <summary> +/// <see cref="ColumnName"/> is an optional column name related to the param. You may use it to do some column related things. Like use a more accurate conversion. +/// </summary> +/// <remarks> +/// If value is DbNullValue, it will be treated as null. +/// </remarks> +public record ParamInfo(string Name, object? Value, string? ColumnName = null); + +public class ParamList : List<ParamInfo> +{ + private static Random random = new Random(); + private const string chars = "abcdefghijklmnopqrstuvwxyz"; + public static string GenerateRandomKey(int length) + { + lock (random) + { + var result = new string(Enumerable.Repeat(chars, length) + .Select(s => s[random.Next(s.Length)]).ToArray()); + return result; + } + } + + public string GenerateRandomParameterName() + { + var parameterName = GenerateRandomKey(10); + int retryTimes = 1; + while (ContainsKey(parameterName)) + { + retryTimes++; + Debug.Assert(retryTimes <= 100); + parameterName = GenerateRandomKey(10); + } + return parameterName; + } + + + public bool ContainsKey(string name) + { + return this.SingleOrDefault(p => p.Name.Equals(name, StringComparison.OrdinalIgnoreCase)) is not null; + } + + public T? Get<T>(string key) + { + return (T?)this.SingleOrDefault(p => p.Name.Equals(key, StringComparison.OrdinalIgnoreCase))?.Value; + } + + public object? this[string key] + { + get + { + return this.SingleOrDefault(p => p.Name.Equals(key, StringComparison.OrdinalIgnoreCase)) ?? throw new KeyNotFoundException("Key not found."); + } + } + + public void Add(string name, object? value, string? columnName = null) + { + Add(new ParamInfo(name, value, columnName)); + } + + // Return the random name. + public string AddRandomNameParameter(object? value, string? columnName = null) + { + var parameterName = GenerateRandomParameterName(); + var param = new ParamInfo(parameterName, value, columnName); + Add(param); + return parameterName; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/README.md b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/README.md new file mode 100644 index 0000000..b008ea7 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/README.md @@ -0,0 +1,47 @@ +# CRUD Technic Notes + +## Overview + +The ultimate CRUD scaffold finally comes. + +## Database Pipeline + +### Select + +1. Create select `what`, where clause, order clause, `Offset` and `Limit`. +2. Check clauses' related columns are valid. +3. Generate sql string and param list. +4. Convert param list to `Dapper` dynamic params with proper type conversion in `IColumnTypeInfo`. +5. Execute sql and get `dynamic`s. +6. (Optional) Convert `dynamic`s to `TEntity`s. + +### Insert + +1. Create insert clause. +2. Check clauses' related columns are valid. +3. Create a real empty insert clause. +4. For each column: + 1. If insert item exists and value is not null but the column `IsGenerated` is true, throw exception. + 2. If insert item does not exist or value is `null`, use default value generator to generate value. However, `DbNullValue` always means use `NULL` for that column. + 3. If value is `null` and the column `IsAutoIncrement` is true, skip to next column. + 4. Coerce null to `DbNullValue`. + 5. Run validator to validate the value. + 6. If value is `DbNullValue`, `IsNotNull` is true, throw exception. + 7. Add column and value to real insert clause. +5. Generate sql string and param list. +6. Convert param list to `Dapper` dynamic params with proper type conversion in `IColumnTypeInfo`. +7. Execute sql and return `KeyColumn` value. + +### Update + +1. Create update clause, where clause. +2. Check clauses' related columns are valid. Then generate sql string and param list. +3. Create a real empty update clause. +4. For each column: + 1. If update item exists and value is not null but the column `IsNoUpdate` is true, throw exception. + 2. Invoke validator to validate the value. + 3. If `IsNotNull` is true and value is `DbNullValue`, throw exception. + 4. Add column and value to real update clause. +5. Generate sql string and param list. +6. Convert param list to `Dapper` dynamic params with proper type conversion in `IColumnTypeInfo`. +7. Execute sql and return count of affected rows. diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/TableInfo.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/TableInfo.cs new file mode 100644 index 0000000..4a7ea95 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/TableInfo.cs @@ -0,0 +1,628 @@ +using System.Data; +using System.Diagnostics; +using System.Reflection; +using System.Text; +using Dapper; + +namespace CrupestApi.Commons.Crud; + +/// <summary> +/// Contains all you need to manipulate a table. +/// </summary> +public class TableInfo +{ + private readonly IColumnTypeProvider _columnTypeProvider; + private readonly Lazy<List<string>> _lazyColumnNameList; + private readonly ILoggerFactory _loggerFactory; + private readonly ILogger<TableInfo> _logger; + + public TableInfo(Type entityType, IColumnTypeProvider columnTypeProvider, ILoggerFactory loggerFactory) + : this(entityType.Name, entityType, columnTypeProvider, loggerFactory) + { + } + + public TableInfo(string tableName, Type entityType, IColumnTypeProvider columnTypeProvider, ILoggerFactory loggerFactory) + { + _loggerFactory = loggerFactory; + _logger = loggerFactory.CreateLogger<TableInfo>(); + + _logger.LogInformation("Create TableInfo for entity type '{}'.", entityType.Name); + + _columnTypeProvider = columnTypeProvider; + + TableName = tableName; + EntityType = entityType; + + + var properties = entityType.GetProperties(); + _logger.LogInformation("Find following properties: {}", string.Join(", ", properties.Select(p => p.Name))); + + var columnInfos = new List<ColumnInfo>(); + + bool hasId = false; + ColumnInfo? primaryKeyColumn = null; + ColumnInfo? keyColumn = null; + + List<PropertyInfo> nonColumnProperties = new(); + + foreach (var property in properties) + { + _logger.LogInformation("Check property '{}'.", property.Name); + if (CheckPropertyIsColumn(property)) + { + _logger.LogInformation("{} is a column, create ColumnInfo for it.", property.Name); + var columnInfo = new ColumnInfo(this, property, _columnTypeProvider, _loggerFactory); + columnInfos.Add(columnInfo); + if (columnInfo.IsPrimaryKey) + { + _logger.LogInformation("Column {} is a primary key.", property.Name); + primaryKeyColumn = columnInfo; + } + if (columnInfo.ColumnName.Equals("id", StringComparison.OrdinalIgnoreCase)) + { + _logger.LogInformation("Column {} has name id.", property.Name); + hasId = true; + } + if (columnInfo.IsSpecifiedAsKey) + { + if (keyColumn is not null) + { + throw new Exception("Already exists a key column."); + } + _logger.LogInformation("Column {} is specified as key.", property.Name); + keyColumn = columnInfo; + } + } + else + { + _logger.LogInformation("{} is not a column.", property.Name); + nonColumnProperties.Add(property); + } + } + + if (primaryKeyColumn is null) + { + if (hasId) throw new Exception("A column named id already exists but is not primary key."); + _logger.LogInformation("No primary key column found, create one automatically."); + primaryKeyColumn = CreateAutoIdColumn(); + columnInfos.Add(primaryKeyColumn); + } + + if (keyColumn is null) + { + _logger.LogInformation("No key column is specified, will use primary key."); + keyColumn = primaryKeyColumn; + } + + Columns = columnInfos; + PrimaryKeyColumn = primaryKeyColumn; + KeyColumn = keyColumn; + NonColumnProperties = nonColumnProperties; + + _logger.LogInformation("Check table validity."); + CheckValidity(); + + _logger.LogInformation("TableInfo succeeded to create."); + + _lazyColumnNameList = new Lazy<List<string>>(() => Columns.Select(c => c.ColumnName).ToList()); + } + + private ColumnInfo CreateAutoIdColumn() + { + return new ColumnInfo(this, + new ColumnAttribute + { + ColumnName = "Id", + NotNull = true, + IsPrimaryKey = true, + }, + typeof(long), _columnTypeProvider, _loggerFactory); + } + + public Type EntityType { get; } + public string TableName { get; } + public IReadOnlyList<ColumnInfo> Columns { get; } + public IReadOnlyList<ColumnInfo> PropertyColumns => Columns.Where(c => c.PropertyInfo is not null).ToList(); + public ColumnInfo PrimaryKeyColumn { get; } + /// <summary> + /// Maybe not the primary key. But acts as primary key. + /// </summary> + /// <seealso cref="ColumnMetadataKeys.ActAsKey"/> + public ColumnInfo KeyColumn { get; } + public IReadOnlyList<PropertyInfo> ColumnProperties => PropertyColumns.Select(c => c.PropertyInfo!).ToList(); + public IReadOnlyList<PropertyInfo> NonColumnProperties { get; } + public IReadOnlyList<string> ColumnNameList => _lazyColumnNameList.Value; + + protected bool CheckPropertyIsColumn(PropertyInfo property) + { + var columnAttribute = property.GetCustomAttribute<ColumnAttribute>(); + if (columnAttribute is null) return false; + return true; + } + + public ColumnInfo GetColumn(string columnName) + { + foreach (var column in Columns) + { + if (column.ColumnName.Equals(columnName, StringComparison.OrdinalIgnoreCase)) + { + return column; + } + } + throw new KeyNotFoundException("No such column with given name."); + } + + public void CheckGeneratedColumnHasGenerator() + { + foreach (var column in Columns) + { + if (column.IsOnlyGenerated && column.DefaultValueGeneratorMethod is null) + { + throw new Exception($"Column '{column.ColumnName}' is generated but has no generator."); + } + } + } + + public void CheckValidity() + { + // Check if there is only one primary key. + bool hasPrimaryKey = false; + bool hasKey = false; + foreach (var column in Columns) + { + if (column.IsPrimaryKey) + { + if (hasPrimaryKey) throw new Exception("More than one columns are primary key."); + hasPrimaryKey = true; + } + + if (column.IsSpecifiedAsKey) + { + if (hasKey) throw new Exception("More than one columns are specified as key column."); + } + } + + if (!hasPrimaryKey) throw new Exception("No column is primary key."); + + // Check two columns have the same sql name. + HashSet<string> sqlNameSet = new HashSet<string>(); + + foreach (var column in Columns) + { + if (sqlNameSet.Contains(column.ColumnName)) + throw new Exception($"Two columns have the same sql name '{column.ColumnName}'."); + sqlNameSet.Add(column.ColumnName); + } + + CheckGeneratedColumnHasGenerator(); + } + + public string GenerateCreateIndexSql(string? dbProviderId = null) + { + var sb = new StringBuilder(); + + foreach (var column in Columns) + { + if (column.Index == ColumnIndexType.None) continue; + + sb.Append($"CREATE {(column.Index == ColumnIndexType.Unique ? "UNIQUE" : "")} INDEX {TableName}_{column.ColumnName}_index ON {TableName} ({column.ColumnName});\n"); + } + + return sb.ToString(); + } + + public string GenerateCreateTableSql(bool createIndex = true, string? dbProviderId = null) + { + var tableName = TableName; + var columnSql = string.Join(",\n", Columns.Select(c => c.GenerateCreateTableColumnString(dbProviderId))); + + var sql = $@" +CREATE TABLE {tableName}( + {columnSql} +); + "; + + if (createIndex) + { + sql += GenerateCreateIndexSql(dbProviderId); + } + + return sql; + } + + public void CheckColumnName(string columnName) + { + if (!ColumnNameList.Contains(columnName)) + { + throw new ArgumentException($"Column {columnName} is not in the table."); + } + } + + public void CheckRelatedColumns(IClause? clause) + { + if (clause is not null) + { + var relatedColumns = clause.GetRelatedColumns(); + foreach (var column in relatedColumns) + { + CheckColumnName(column); + } + } + } + + /// <summary> + /// If you call this manually, it's your duty to call hooks. + /// </summary> + /// <seealso cref="SelectDynamic"/> + public (string sql, ParamList parameters) GenerateSelectSql(string? selectWhat, IWhereClause? whereClause, IOrderByClause? orderByClause = null, int? skip = null, int? limit = null, string? dbProviderId = null) + { + CheckRelatedColumns(whereClause); + CheckRelatedColumns(orderByClause); + + var parameters = new ParamList(); + + StringBuilder result = new StringBuilder() + .Append($"SELECT {selectWhat ?? "*"} FROM ") + .Append(TableName); + + if (whereClause is not null) + { + result.Append(" WHERE "); + var (whereSql, whereParameters) = whereClause.GenerateSql(dbProviderId); + parameters.AddRange(whereParameters); + result.Append(whereSql); + } + + if (orderByClause is not null) + { + result.Append(' '); + var orderBySql = orderByClause.GenerateSql(dbProviderId); + result.Append(orderBySql); + } + + if (limit is not null) + { + result.Append(" LIMIT @Limit"); + parameters.Add("Limit", limit.Value); + } + + if (skip is not null) + { + result.Append(" OFFSET @Skip"); + parameters.Add("Skip", skip.Value); + } + + result.Append(';'); + + return (result.ToString(), parameters); + } + + /// <summary> + /// If you call this manually, it's your duty to call hooks. + /// </summary> + /// <seealso cref="Insert"/> + public (string sql, ParamList parameters) GenerateInsertSql(IInsertClause insertClause, string? dbProviderId = null) + { + CheckRelatedColumns(insertClause); + + var parameters = new ParamList(); + + var result = new StringBuilder() + .Append("INSERT INTO ") + .Append(TableName) + .Append(" (") + .Append(insertClause.GenerateColumnListSql(dbProviderId)) + .Append(") VALUES ("); + + var (valueSql, valueParameters) = insertClause.GenerateValueListSql(dbProviderId); + result.Append(valueSql).Append(");"); + + parameters.AddRange(valueParameters); + + return (result.ToString(), parameters); + } + + /// <summary> + /// If you call this manually, it's your duty to call hooks. + /// </summary> + /// <seealso cref="Update"/> + public (string sql, ParamList parameters) GenerateUpdateSql(IWhereClause? whereClause, IUpdateClause updateClause) + { + CheckRelatedColumns(whereClause); + CheckRelatedColumns(updateClause); + + var parameters = new ParamList(); + + StringBuilder sb = new StringBuilder("UPDATE "); + sb.Append(TableName); + sb.Append(" SET "); + var (updateSql, updateParameters) = updateClause.GenerateSql(); + sb.Append(updateSql); + parameters.AddRange(updateParameters); + if (whereClause is not null) + { + sb.Append(" WHERE "); + var (whereSql, whereParameters) = whereClause.GenerateSql(); + sb.Append(whereSql); + parameters.AddRange(whereParameters); + } + sb.Append(';'); + + return (sb.ToString(), parameters); + } + + /// <summary> + /// If you call this manually, it's your duty to call hooks. + /// </summary> + /// <seealso cref="Delete"/> + public (string sql, ParamList parameters) GenerateDeleteSql(IWhereClause? whereClause) + { + CheckRelatedColumns(whereClause); + + var parameters = new ParamList(); + + StringBuilder sb = new StringBuilder("DELETE FROM "); + sb.Append(TableName); + if (whereClause is not null) + { + sb.Append(" WHERE "); + var (whereSql, whereParameters) = whereClause.GenerateSql(); + parameters.AddRange(whereParameters); + sb.Append(whereSql); + } + sb.Append(';'); + + return (sb.ToString(), parameters); + } + + private DynamicParameters ConvertParameters(ParamList parameters) + { + var result = new DynamicParameters(); + foreach (var param in parameters) + { + if (param.Value is null || param.Value is DbNullValue) + { + result.Add(param.Name, null); + continue; + } + + var columnName = param.ColumnName; + IColumnTypeInfo typeInfo; + if (columnName is not null) + { + typeInfo = GetColumn(columnName).ColumnType; + } + else + { + typeInfo = _columnTypeProvider.Get(param.Value.GetType()); + } + + result.Add(param.Name, typeInfo.ConvertToDatabase(param.Value), typeInfo.DbType); + } + return result; + } + + /// <summary> + /// ConvertParameters. Select. Call hooks. + /// </summary> + public virtual List<dynamic> SelectDynamic(IDbConnection dbConnection, string? what = null, IWhereClause? where = null, IOrderByClause? orderBy = null, int? skip = null, int? limit = null) + { + var (sql, parameters) = GenerateSelectSql(what, where, orderBy, skip, limit); + var queryResult = dbConnection.Query<dynamic>(sql, ConvertParameters(parameters)); + return queryResult.ToList(); + } + + public virtual int SelectCount(IDbConnection dbConnection, IWhereClause? where = null, IOrderByClause? orderBy = null, int? skip = null, int? limit = null) + { + var (sql, parameters) = GenerateSelectSql("COUNT(*)", where, orderBy, skip, limit); + var result = dbConnection.QuerySingle<int>(sql, ConvertParameters(parameters)); + return result; + } + + public virtual TResult MapDynamicTo<TResult>(dynamic d) + { + var dict = (IDictionary<string, object?>)d; + + var result = Activator.CreateInstance<TResult>(); + Type resultType = typeof(TResult); + + foreach (var column in Columns) + { + var resultProperty = resultType.GetProperty(column.ColumnName); + if (dict.ContainsKey(column.ColumnName) && resultProperty is not null) + { + if (dict[column.ColumnName] is null) + { + resultProperty.SetValue(result, null); + continue; + } + object? value = Convert.ChangeType(dict[column.ColumnName], column.ColumnType.DatabaseClrType); + value = column.ColumnType.ConvertFromDatabase(value); + resultProperty.SetValue(result, value); + } + } + + return result; + } + + /// <summary> + /// Select and call hooks. + /// </summary> + public virtual List<TResult> Select<TResult>(IDbConnection dbConnection, string? what = null, IWhereClause? where = null, IOrderByClause? orderBy = null, int? skip = null, int? limit = null) + { + List<dynamic> queryResult = SelectDynamic(dbConnection, what, where, orderBy, skip, limit).ToList(); + + return queryResult.Select(MapDynamicTo<TResult>).ToList(); + } + + public IInsertClause ConvertEntityToInsertClause(object entity) + { + Debug.Assert(EntityType.IsInstanceOfType(entity)); + var result = new InsertClause(); + foreach (var column in PropertyColumns) + { + var value = column.PropertyInfo!.GetValue(entity); + result.Add(column.ColumnName, value); + } + return result; + } + + /// <summary> + /// Insert a entity and call hooks. + /// </summary> + /// <returns>The key of insert entity.</returns> + public int Insert(IDbConnection dbConnection, IInsertClause insert, out object key) + { + object? finalKey = null; + + var realInsert = InsertClause.Create(); + + foreach (var column in Columns) + { + InsertItem? item = insert.Items.SingleOrDefault(i => i.ColumnName == column.ColumnName); + + var value = item?.Value; + + if (column.IsOnlyGenerated && value is not null) + { + throw new Exception($"The column '{column.ColumnName}' is auto generated. You can't specify it explicitly."); + } + + if (value is null) + { + value = column.GenerateDefaultValue(); + } + + if (value is null && column.IsAutoIncrement) + { + continue; + } + + if (value is null) + { + value = DbNullValue.Instance; + } + + column.InvokeValidator(value); + + InsertItem realInsertItem; + + if (value is DbNullValue) + { + if (column.IsNotNull) + { + throw new Exception($"Column '{column.ColumnName}' is not nullable. Please specify a non-null value."); + } + + realInsertItem = new InsertItem(column.ColumnName, null); + } + else + { + realInsertItem = new InsertItem(column.ColumnName, value); + } + + realInsert.Add(realInsertItem); + + if (realInsertItem.ColumnName == KeyColumn.ColumnName) + { + finalKey = realInsertItem.Value; + } + } + + if (finalKey is null) throw new Exception("No key???"); + key = finalKey; + + var (sql, parameters) = GenerateInsertSql(realInsert); + + var affectedRowCount = dbConnection.Execute(sql, ConvertParameters(parameters)); + + if (affectedRowCount != 1) + throw new Exception("Failed to insert."); + + return affectedRowCount; + } + + /// <summary> + /// Upgrade a entity and call hooks. + /// </summary> + /// <returns>The key of insert entity.</returns> + public virtual int Update(IDbConnection dbConnection, IWhereClause? where, IUpdateClause update, out object? newKey) + { + newKey = null; + + var realUpdate = UpdateClause.Create(); + + foreach (var column in Columns) + { + UpdateItem? item = update.Items.FirstOrDefault(i => i.ColumnName == column.ColumnName); + object? value = item?.Value; + + if (value is not null) + { + if (column.IsNoUpdate) + { + throw new Exception($"The column '{column.ColumnName}' can't be update."); + } + + column.InvokeValidator(value); + + realUpdate.Add(column.ColumnName, value); + + if (column.ColumnName == KeyColumn.ColumnName) + { + newKey = value; + } + } + } + + var (sql, parameters) = GenerateUpdateSql(where, realUpdate); + return dbConnection.Execute(sql, ConvertParameters(parameters)); + } + + public virtual int Delete(IDbConnection dbConnection, IWhereClause? where) + { + var (sql, parameters) = GenerateDeleteSql(where); + return dbConnection.Execute(sql, ConvertParameters(parameters)); + } +} + +public interface ITableInfoFactory +{ + TableInfo Get(Type type); +} + +public class TableInfoFactory : ITableInfoFactory +{ + private readonly Dictionary<Type, TableInfo> _cache = new Dictionary<Type, TableInfo>(); + private readonly IColumnTypeProvider _columnTypeProvider; + private readonly ILoggerFactory _loggerFactory; + private readonly ILogger<TableInfoFactory> _logger; + + public TableInfoFactory(IColumnTypeProvider columnTypeProvider, ILoggerFactory loggerFactory) + { + _columnTypeProvider = columnTypeProvider; + _loggerFactory = loggerFactory; + _logger = loggerFactory.CreateLogger<TableInfoFactory>(); + } + + // This is thread-safe. + public TableInfo Get(Type type) + { + lock (_cache) + { + if (_cache.TryGetValue(type, out var tableInfo)) + { + _logger.LogDebug("Table info of type '{}' is cached, return it.", type.Name); + return tableInfo; + } + else + { + _logger.LogDebug("Table info for type '{}' is not in cache, create it.", type.Name); + tableInfo = new TableInfo(type, _columnTypeProvider, _loggerFactory); + _logger.LogDebug("Table info for type '{}' is created, add it to cache.", type.Name); + _cache.Add(type, tableInfo); + return tableInfo; + } + } + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UpdateClause.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UpdateClause.cs new file mode 100644 index 0000000..de5c6c3 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UpdateClause.cs @@ -0,0 +1,77 @@ +using System.Text; + +namespace CrupestApi.Commons.Crud; + +public class UpdateItem +{ + public UpdateItem(string columnName, object? value) + { + ColumnName = columnName; + Value = value; + } + + public string ColumnName { get; set; } + public object? Value { get; set; } +} + +public interface IUpdateClause : IClause +{ + List<UpdateItem> Items { get; } + (string sql, ParamList parameters) GenerateSql(); +} + +public class UpdateClause : IUpdateClause +{ + public List<UpdateItem> Items { get; } = new List<UpdateItem>(); + + public UpdateClause(IEnumerable<UpdateItem> items) + { + Items.AddRange(items); + } + + public UpdateClause(params UpdateItem[] items) + { + Items.AddRange(items); + } + + public UpdateClause Add(params UpdateItem[] items) + { + Items.AddRange(items); + return this; + } + + public UpdateClause Add(string column, object? value) + { + return Add(new UpdateItem(column, value)); + } + + public static UpdateClause Create(params UpdateItem[] items) + { + return new UpdateClause(items); + } + + public List<string> GetRelatedColumns() + { + return Items.Select(i => i.ColumnName).ToList(); + } + + public (string sql, ParamList parameters) GenerateSql() + { + var parameters = new ParamList(); + + StringBuilder result = new StringBuilder(); + + foreach (var item in Items) + { + if (result.Length > 0) + { + result.Append(", "); + } + + var parameterName = parameters.AddRandomNameParameter(item.Value, item.ColumnName); + result.Append($"{item.ColumnName} = @{parameterName}"); + } + + return (result.ToString(), parameters); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UserException.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UserException.cs new file mode 100644 index 0000000..1a10b97 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/UserException.cs @@ -0,0 +1,15 @@ +namespace CrupestApi.Commons.Crud; + +/// <summary> +/// This exception means the exception is caused by user and can be safely shown to user. +/// </summary> +[System.Serializable] +public class UserException : Exception +{ + public UserException() { } + public UserException(string message) : base(message) { } + public UserException(string message, System.Exception inner) : base(message, inner) { } + protected UserException( + System.Runtime.Serialization.SerializationInfo info, + System.Runtime.Serialization.StreamingContext context) : base(info, context) { } +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/WhereClause.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/WhereClause.cs new file mode 100644 index 0000000..de69f2f --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Crud/WhereClause.cs @@ -0,0 +1,182 @@ +using System.Text; + +namespace CrupestApi.Commons.Crud; + +public interface IWhereClause : IClause +{ + // Does not contain "WHERE" keyword! + (string sql, ParamList parameters) GenerateSql(string? dbProviderId = null); +} + +public class CompositeWhereClause : IWhereClause +{ + public CompositeWhereClause(string concatOp, bool parenthesesSubclause, params IWhereClause[] subclauses) + { + ConcatOp = concatOp; + ParenthesesSubclause = parenthesesSubclause; + Subclauses = subclauses.ToList(); + } + + public string ConcatOp { get; } + public bool ParenthesesSubclause { get; } + public List<IWhereClause> Subclauses { get; } + + public CompositeWhereClause Eq(string column, object? value) + { + Subclauses.Add(SimpleCompareWhereClause.Eq(column, value)); + return this; + } + + public (string sql, ParamList parameters) GenerateSql(string? dbProviderId = null) + { + var parameters = new ParamList(); + var sql = new StringBuilder(); + var subclauses = GetSubclauses(); + if (subclauses is null) return ("", new()); + var first = true; + foreach (var subclause in Subclauses) + { + var (subSql, subParameters) = subclause.GenerateSql(dbProviderId); + if (subSql is null) continue; + if (first) + { + first = false; + } + else + { + sql.Append($" {ConcatOp} "); + } + if (ParenthesesSubclause) + { + sql.Append("("); + } + sql.Append(subSql); + if (ParenthesesSubclause) + { + sql.Append(")"); + } + parameters.AddRange(subParameters); + } + return (sql.ToString(), parameters); + } + + public object GetSubclauses() + { + return Subclauses; + } +} + +public class AndWhereClause : CompositeWhereClause +{ + public AndWhereClause(params IWhereClause[] clauses) + : this(true, clauses) + { + + } + + public AndWhereClause(bool parenthesesSubclause, params IWhereClause[] clauses) + : base("AND", parenthesesSubclause, clauses) + { + + } + + public static AndWhereClause Create(params IWhereClause[] clauses) + { + return new AndWhereClause(clauses); + } +} + +public class OrWhereClause : CompositeWhereClause +{ + public OrWhereClause(params IWhereClause[] clauses) + : this(true, clauses) + { + + } + + public OrWhereClause(bool parenthesesSubclause, params IWhereClause[] clauses) + : base("OR", parenthesesSubclause, clauses) + { + + } + + public static OrWhereClause Create(params IWhereClause[] clauses) + { + return new OrWhereClause(clauses); + } +} + +// It's simple because it only compare column and value but not expressions. +public class SimpleCompareWhereClause : IWhereClause +{ + public string Column { get; } + public string Operator { get; } + public object? Value { get; } + + public List<string> GetRelatedColumns() + { + return new List<string> { Column }; + } + + // It's user's responsibility to keep column safe, with proper escape. + public SimpleCompareWhereClause(string column, string op, object? value) + { + Column = column; + Operator = op; + Value = value; + } + + public static SimpleCompareWhereClause Create(string column, string op, object? value) + { + return new SimpleCompareWhereClause(column, op, value); + } + + public static SimpleCompareWhereClause Eq(string column, object? value) + { + return new SimpleCompareWhereClause(column, "=", value); + } + + public static SimpleCompareWhereClause Neq(string column, object? value) + { + return new SimpleCompareWhereClause(column, "<>", value); + } + + public static SimpleCompareWhereClause Gt(string column, object? value) + { + return new SimpleCompareWhereClause(column, ">", value); + } + + public static SimpleCompareWhereClause Gte(string column, object? value) + { + return new SimpleCompareWhereClause(column, ">=", value); + } + + public static SimpleCompareWhereClause Lt(string column, object? value) + { + return new SimpleCompareWhereClause(column, "<", value); + } + + public static SimpleCompareWhereClause Lte(string column, object? value) + { + return new SimpleCompareWhereClause(column, "<=", value); + } + + public (string sql, ParamList parameters) GenerateSql(string? dbProviderId = null) + { + var parameters = new ParamList(); + var parameterName = parameters.AddRandomNameParameter(Value, Column); + return ($"{Column} {Operator} @{parameterName}", parameters); + } +} + +public class WhereClause : AndWhereClause +{ + public WhereClause() + { + } + + public void Add(IWhereClause subclause) + { + Subclauses.Add(subclause); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/CrupestApi.Commons.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/CrupestApi.Commons.csproj new file mode 100644 index 0000000..8e291fa --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/CrupestApi.Commons.csproj @@ -0,0 +1,16 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <TargetType>library</TargetType>
+ <Nullable>enable</Nullable>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <SelfContained>false</SelfContained>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <PackageReference Include="Dapper" Version="2.0.123" />
+ <PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.0" />
+ </ItemGroup>
+
+</Project>
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/EntityNotExistException.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/EntityNotExistException.cs new file mode 100644 index 0000000..0e1f4f4 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/EntityNotExistException.cs @@ -0,0 +1,8 @@ +namespace CrupestApi.Commons; + +public class EntityNotExistException : Exception +{ + public EntityNotExistException() { } + public EntityNotExistException(string message) : base(message) { } + public EntityNotExistException(string message, Exception inner) : base(message, inner) { } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/HttpContextExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/HttpContextExtensions.cs new file mode 100644 index 0000000..a0b2d89 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/HttpContextExtensions.cs @@ -0,0 +1,113 @@ +using System.Text.Json; +using CrupestApi.Commons.Secrets; +using Microsoft.Extensions.Options; + +namespace CrupestApi.Commons; + +public delegate void HttpResponseAction(HttpResponse response); + +public class MessageBody +{ + public MessageBody(string message) + { + Message = message; + } + + public string Message { get; set; } +} + +public static class CrupestApiJsonExtensions +{ + public static IServiceCollection AddJsonOptions(this IServiceCollection services) + { + services.AddOptions<JsonSerializerOptions>(); + services.Configure<JsonSerializerOptions>(config => + { + config.AllowTrailingCommas = true; + config.PropertyNameCaseInsensitive = true; + config.PropertyNamingPolicy = JsonNamingPolicy.CamelCase; + }); + + return services; + } + + public static async Task<JsonDocument> ReadJsonAsync(this HttpRequest request) + { + var jsonOptions = request.HttpContext.RequestServices.GetRequiredService<IOptionsSnapshot<JsonSerializerOptions>>(); + using var stream = request.Body; + var body = await JsonSerializer.DeserializeAsync<JsonDocument>(stream, jsonOptions.Value); + return body!; + } + + public static async Task WriteJsonAsync<T>(this HttpResponse response, T bodyObject, int statusCode = 200, HttpResponseAction? beforeWriteBody = null, CancellationToken cancellationToken = default) + { + var jsonOptions = response.HttpContext.RequestServices.GetRequiredService<IOptionsSnapshot<JsonSerializerOptions>>(); + byte[] json = JsonSerializer.SerializeToUtf8Bytes<T>(bodyObject, jsonOptions.Value); + + var byteCount = json.Length; + + response.StatusCode = statusCode; + response.Headers.ContentType = "application/json; charset=utf-8"; + response.Headers.ContentLength = byteCount; + + if (beforeWriteBody is not null) + { + beforeWriteBody(response); + } + + await response.Body.WriteAsync(json, cancellationToken); + } + + public static async Task WriteMessageAsync(this HttpResponse response, string message, int statusCode = 400, HttpResponseAction? beforeWriteBody = null, CancellationToken cancellationToken = default) + { + await response.WriteJsonAsync(new MessageBody(message), statusCode: statusCode, beforeWriteBody, cancellationToken); + } + + public static Task ResponseJsonAsync<T>(this HttpContext context, T bodyObject, int statusCode = 200, HttpResponseAction? beforeWriteBody = null, CancellationToken cancellationToken = default) + { + return context.Response.WriteJsonAsync<T>(bodyObject, statusCode, beforeWriteBody, cancellationToken); + } + + public static Task ResponseMessageAsync(this HttpContext context, string message, int statusCode = 400, HttpResponseAction? beforeWriteBody = null, CancellationToken cancellationToken = default) + { + return context.Response.WriteMessageAsync(message, statusCode, beforeWriteBody, cancellationToken); + } + + public static string? GetToken(this HttpRequest request) + { + var token = request.Headers["Authorization"].ToString(); + if (token.StartsWith("Bearer ")) + { + token = token.Substring("Bearer ".Length); + return token; + } + + if (request.Query.TryGetValue("token", out var tokenValues)) + { + return tokenValues.Last(); + } + + return null; + } + + public static bool RequirePermission(this HttpContext context, string? permission) + { + if (permission is null) return true; + + var token = context.Request.GetToken(); + if (token is null) + { + context.ResponseMessageAsync("Unauthorized", 401); + return false; + } + + var secretService = context.RequestServices.GetRequiredService<ISecretService>(); + var permissions = secretService.GetPermissions(token); + if (!permissions.Contains(permission)) + { + context.ResponseMessageAsync("Forbidden", 403); + return false; + } + return true; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/ISecretService.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/ISecretService.cs new file mode 100644 index 0000000..83025f8 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/ISecretService.cs @@ -0,0 +1,8 @@ +namespace CrupestApi.Commons.Secrets; + +public interface ISecretService +{ + void CreateTestSecret(string key, string secret); + + List<string> GetPermissions(string secret); +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretInfo.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretInfo.cs new file mode 100644 index 0000000..c3a4de0 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretInfo.cs @@ -0,0 +1,48 @@ +using System.Security.Cryptography; +using System.Text; +using CrupestApi.Commons.Crud; + +namespace CrupestApi.Commons.Secrets; + +public class SecretInfo +{ + [Column(NotNull = true)] + public string Key { get; set; } = default!; + [Column(NotNull = true, NoUpdate = true, ActAsKey = true)] + public string Secret { get; set; } = default!; + [Column(DefaultEmptyForString = true)] + public string Description { get; set; } = default!; + [Column(NotNull = false)] + public DateTime? ExpireTime { get; set; } + [Column(NotNull = true, DefaultValue = false)] + public bool Revoked { get; set; } + [Column(NotNull = true)] + public DateTime CreateTime { get; set; } + + private static RandomNumberGenerator RandomNumberGenerator = RandomNumberGenerator.Create(); + + private static string GenerateRandomKey(int length) + { + const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + var result = new StringBuilder(length); + lock (RandomNumberGenerator) + { + for (int i = 0; i < length; i++) + { + result.Append(chars[RandomNumberGenerator.GetInt32(chars.Length)]); + } + } + return result.ToString(); + } + + + public static string SecretDefaultValueGenerator() + { + return GenerateRandomKey(16); + } + + public static DateTime CreateTimeDefaultValueGenerator() + { + return DateTime.UtcNow; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretService.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretService.cs new file mode 100644 index 0000000..c693d8d --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretService.cs @@ -0,0 +1,48 @@ +using System.Data; +using CrupestApi.Commons.Crud; +using CrupestApi.Commons.Crud.Migrations; + +namespace CrupestApi.Commons.Secrets; + +public class SecretService : CrudService<SecretInfo>, ISecretService +{ + private readonly ILogger<SecretService> _logger; + + public SecretService(ITableInfoFactory tableInfoFactory, IDbConnectionFactory dbConnectionFactory, IDatabaseMigrator migrator, ILoggerFactory loggerFactory) + : base(tableInfoFactory, dbConnectionFactory, migrator, loggerFactory) + { + _logger = loggerFactory.CreateLogger<SecretService>(); + } + + protected override void AfterMigrate(IDbConnection connection, TableInfo table) + { + if (table.SelectCount(connection) == 0) + { + _logger.LogInformation("No secrets found, insert default secrets."); + using var transaction = connection.BeginTransaction(); + var insertClause = InsertClause.Create() + .Add(nameof(SecretInfo.Key), SecretsConstants.SecretManagementKey) + .Add(nameof(SecretInfo.Secret), "crupest") + .Add(nameof(SecretInfo.Description), "This is the init key. Please revoke it immediately after creating a new one."); + _table.Insert(connection, insertClause, out var _); + transaction.Commit(); + } + } + + public void CreateTestSecret(string key, string secret) + { + var connection = _dbConnection; + var insertClause = InsertClause.Create() + .Add(nameof(SecretInfo.Key), key) + .Add(nameof(SecretInfo.Secret), secret) + .Add(nameof(SecretInfo.Description), "Test secret."); + _table.Insert(connection, insertClause, out var _); + } + + public List<string> GetPermissions(string secret) + { + var list = _table.Select<SecretInfo>(_dbConnection, + where: WhereClause.Create().Eq(nameof(SecretInfo.Secret), secret)); + return list.Select(x => x.Key).ToList(); + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretServiceCollectionExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretServiceCollectionExtensions.cs new file mode 100644 index 0000000..a9c0e5f --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretServiceCollectionExtensions.cs @@ -0,0 +1,12 @@ +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace CrupestApi.Commons.Secrets; + +public static class SecretServiceCollectionExtensions +{ + public static IServiceCollection AddSecrets(this IServiceCollection services) + { + services.TryAddScoped<ISecretService, SecretService>(); + return services; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretsConstants.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretsConstants.cs new file mode 100644 index 0000000..207cc45 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Commons/Secrets/SecretsConstants.cs @@ -0,0 +1,6 @@ +namespace CrupestApi.Commons.Secrets; + +public static class SecretsConstants +{ + public const string SecretManagementKey = "crupest.secrets.management"; +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/CrupestApi.Files.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/CrupestApi.Files.csproj new file mode 100644 index 0000000..2221809 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/CrupestApi.Files.csproj @@ -0,0 +1,20 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <ItemGroup>
+ <ProjectReference Include="..\CrupestApi.Commons\CrupestApi.Commons.csproj" />
+ </ItemGroup>
+
+ <ItemGroup> + <PackageReference Include="Dapper" Version="2.0.123" /> + <PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.0" /> + </ItemGroup>
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <TargetType>library</TargetType>
+ <Nullable>enable</Nullable>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <SelfContained>false</SelfContained>
+ </PropertyGroup>
+
+</Project>
diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/FilesService.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/FilesService.cs new file mode 100644 index 0000000..c851a92 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Files/FilesService.cs @@ -0,0 +1,6 @@ +namespace CrupestApi.Files; + +public class FilesService +{ + +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/CrupestApi.Secrets.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/CrupestApi.Secrets.csproj new file mode 100644 index 0000000..70c83f3 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/CrupestApi.Secrets.csproj @@ -0,0 +1,20 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <ItemGroup>
+ <ProjectReference Include="..\CrupestApi.Commons\CrupestApi.Commons.csproj" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <PackageReference Include="Dapper" Version="2.0.123" />
+ <PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.0" />
+ </ItemGroup>
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <TargetType>library</TargetType>
+ <Nullable>enable</Nullable>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <SelfContained>false</SelfContained>
+ </PropertyGroup>
+
+</Project>
diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/SecretsExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/SecretsExtensions.cs new file mode 100644 index 0000000..e09887b --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Secrets/SecretsExtensions.cs @@ -0,0 +1,19 @@ +using CrupestApi.Commons.Secrets; +using CrupestApi.Commons.Crud; + +namespace CrupestApi.Secrets; + +public static class SecretsExtensions +{ + public static IServiceCollection AddSecrets(this IServiceCollection services) + { + services.AddCrud<SecretInfo, SecretService>(); + return services; + } + + public static WebApplication MapSecrets(this WebApplication webApplication, string path = "/api/secrets") + { + webApplication.MapCrud<SecretInfo>(path, SecretsConstants.SecretManagementKey); + return webApplication; + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/CrupestApi.Todos.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/CrupestApi.Todos.csproj new file mode 100644 index 0000000..86460e3 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/CrupestApi.Todos.csproj @@ -0,0 +1,15 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <ItemGroup>
+ <ProjectReference Include="..\CrupestApi.Commons\CrupestApi.Commons.csproj" />
+ </ItemGroup>
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <TargetType>library</TargetType>
+ <Nullable>enable</Nullable>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <SelfContained>false</SelfContained>
+ </PropertyGroup>
+
+</Project>
diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosConfiguration.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosConfiguration.cs new file mode 100644 index 0000000..e8160d2 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosConfiguration.cs @@ -0,0 +1,14 @@ +using System.ComponentModel.DataAnnotations; + +namespace CrupestApi.Todos; + +public class TodosConfiguration +{ + [Required] + public string Username { get; set; } = default!; + [Required] + public int ProjectNumber { get; set; } = default!; + [Required] + public string Token { get; set; } = default!; + public int Count { get; set; } +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosService.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosService.cs new file mode 100644 index 0000000..5839086 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosService.cs @@ -0,0 +1,163 @@ +using System.Net.Http.Headers; +using System.Net.Mime; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Options; + +namespace CrupestApi.Todos; + +public class TodosItem +{ + public string Status { get; set; } = default!; + public string Title { get; set; } = default!; + public bool Closed { get; set; } + public string Color { get; set; } = default!; +} + +public class TodosService +{ + private readonly IOptionsSnapshot<TodosConfiguration> _options; + private readonly ILogger<TodosService> _logger; + + public TodosService(IOptionsSnapshot<TodosConfiguration> options, ILogger<TodosService> logger) + { + _options = options; + _logger = logger; + } + + private static string CreateGraphQLQuery(TodosConfiguration todoConfiguration) + { + return $$""" +{ + user(login: "{{todoConfiguration.Username}}") { + projectV2(number: {{todoConfiguration.ProjectNumber}}) { + items(last: {{todoConfiguration.Count}}) { + nodes { + fieldValueByName(name: "Status") { + ... on ProjectV2ItemFieldSingleSelectValue { + name + } + } + content { + __typename + ... on Issue { + title + closed + } + ... on PullRequest { + title + closed + } + ... on DraftIssue { + title + } + } + } + } + } + } +} +"""; + } + + + public async Task<List<TodosItem>> GetTodosAsync() + { + var todoOptions = _options.Value; + if (todoOptions is null) + { + throw new Exception("Fail to get todos configuration."); + } + + _logger.LogInformation("Username: {}; ProjectNumber: {}; Count: {}", todoOptions.Username, todoOptions.ProjectNumber, todoOptions.Count); + _logger.LogInformation("Getting todos from GitHub GraphQL API..."); + + using var httpClient = new HttpClient(); + + using var requestContent = new StringContent(JsonSerializer.Serialize(new + { + query = CreateGraphQLQuery(todoOptions) + })); + requestContent.Headers.ContentType = new MediaTypeHeaderValue(MediaTypeNames.Application.Json, Encoding.UTF8.WebName); + + using var request = new HttpRequestMessage(HttpMethod.Post, "https://api.github.com/graphql"); + request.Content = requestContent; + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", todoOptions.Token); + request.Headers.TryAddWithoutValidation("User-Agent", todoOptions.Username); + + using var response = await httpClient.SendAsync(request); + var responseBody = await response.Content.ReadAsStringAsync(); + + _logger.LogInformation("GitHub server returned status code: {}", response.StatusCode); + _logger.LogInformation("GitHub server returned body: {}", responseBody); + + if (response.IsSuccessStatusCode) + { + using var responseJson = JsonSerializer.Deserialize<JsonDocument>(responseBody); + if (responseJson is null) + { + throw new Exception("Fail to deserialize response body."); + } + + var nodes = responseJson.RootElement.GetProperty("data").GetProperty("user").GetProperty("projectV2").GetProperty("items").GetProperty("nodes").EnumerateArray(); + + var result = new List<TodosItem>(); + + foreach (var node in nodes) + { + var content = node.GetProperty("content"); + var title = content.GetProperty("title").GetString(); + if (title is null) + { + throw new Exception("Fail to get title."); + } + + bool done = false; + + var statusField = node.GetProperty("fieldValueByName"); + if (statusField.ValueKind != JsonValueKind.Null) // if there is a "Status" field + { + var statusName = statusField.GetProperty("name").GetString(); + if (statusName is null) + { + throw new Exception("Fail to get status."); + } + + // if name is "Done", then it is closed, otherwise we check if the issue is closed + if (statusName.Equals("Done", StringComparison.OrdinalIgnoreCase)) + { + done = true; + } + } + + JsonElement closedElement; + // if item has a "closed" field, then it is a pull request or an issue, and we check if it is closed + if (content.TryGetProperty("closed", out closedElement) && closedElement.GetBoolean()) + { + done = true; + } + + // If item "Status" field is "Done' or item is a pull request or issue and it is closed, then it is done. + // Otherwise it is not closed. Like: + // 1. it is a draft issue with no "Status" field or "Status" field is not "Done" + // 2. it is a pull request or issue with no "Status" field or "Status" field is not "Done" and it is not closed + + result.Add(new TodosItem + { + Title = title, + Status = done ? "Done" : "Todo", + Closed = done, + Color = done ? "green" : "blue" + }); + } + + return result; + } + else + { + const string message = "Fail to get todos from GitHub."; + _logger.LogError(message); + throw new Exception(message); + } + } +} diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosServiceCollectionExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosServiceCollectionExtensions.cs new file mode 100644 index 0000000..a49d55d --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosServiceCollectionExtensions.cs @@ -0,0 +1,21 @@ +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace CrupestApi.Todos; + +public static class TodosServiceCollectionExtensions +{ + public static IServiceCollection AddTodos(this IServiceCollection services) + { + services.AddOptions<TodosConfiguration>().BindConfiguration("CrupestApi:Todos"); + services.PostConfigure<TodosConfiguration>(config => + { + if (config.Count == 0) + { + config.Count = 20; + } + }); + services.TryAddScoped<TodosService>(); + return services; + } +} + diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosWebApplicationExtensions.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosWebApplicationExtensions.cs new file mode 100644 index 0000000..0ff05a0 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.Todos/TodosWebApplicationExtensions.cs @@ -0,0 +1,32 @@ +using CrupestApi.Commons; + +namespace CrupestApi.Todos; + +public static class TodosWebApplicationExtensions +{ + public static WebApplication MapTodos(this WebApplication app, string path) + { + if (app is null) + { + throw new ArgumentNullException(nameof(app)); + } + + app.MapGet(path, async (context) => + { + var todosService = context.RequestServices.GetRequiredService<TodosService>(); + + try + { + var todos = await todosService.GetTodosAsync(); + await context.Response.WriteJsonAsync(todos); + + } + catch (Exception e) + { + await context.Response.WriteMessageAsync(e.Message, statusCode: StatusCodes.Status503ServiceUnavailable); + } + }); + + return app; + } +}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi.sln b/dropped/docker/crupest-api/CrupestApi/CrupestApi.sln new file mode 100644 index 0000000..ebfd960 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi.sln @@ -0,0 +1,46 @@ +
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CrupestApi", "CrupestApi\CrupestApi.csproj", "{E30916BB-08F9-45F0-BC1A-69B66AE79913}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CrupestApi.Todos", "CrupestApi.Todos\CrupestApi.Todos.csproj", "{BF9F5F71-AE65-4896-8E6F-FE0D4AD0E7D1}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CrupestApi.Secrets", "CrupestApi.Secrets\CrupestApi.Secrets.csproj", "{9A7CC9F9-70CB-408A-ADFC-5119C0BDB236}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CrupestApi.Commons", "CrupestApi.Commons\CrupestApi.Commons.csproj", "{38083CCA-E56C-4D24-BAB6-EEC30E0F478F}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CrupestApi.Commons.Tests", "CrupestApi.Commons.Tests\CrupestApi.Commons.Tests.csproj", "{0D0304BF-6A18-444C-BAF4-6ABFF98A0F77}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {E30916BB-08F9-45F0-BC1A-69B66AE79913}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E30916BB-08F9-45F0-BC1A-69B66AE79913}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E30916BB-08F9-45F0-BC1A-69B66AE79913}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E30916BB-08F9-45F0-BC1A-69B66AE79913}.Release|Any CPU.Build.0 = Release|Any CPU
+ {BF9F5F71-AE65-4896-8E6F-FE0D4AD0E7D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {BF9F5F71-AE65-4896-8E6F-FE0D4AD0E7D1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {BF9F5F71-AE65-4896-8E6F-FE0D4AD0E7D1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {BF9F5F71-AE65-4896-8E6F-FE0D4AD0E7D1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9A7CC9F9-70CB-408A-ADFC-5119C0BDB236}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9A7CC9F9-70CB-408A-ADFC-5119C0BDB236}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9A7CC9F9-70CB-408A-ADFC-5119C0BDB236}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9A7CC9F9-70CB-408A-ADFC-5119C0BDB236}.Release|Any CPU.Build.0 = Release|Any CPU
+ {38083CCA-E56C-4D24-BAB6-EEC30E0F478F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {38083CCA-E56C-4D24-BAB6-EEC30E0F478F}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {38083CCA-E56C-4D24-BAB6-EEC30E0F478F}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {38083CCA-E56C-4D24-BAB6-EEC30E0F478F}.Release|Any CPU.Build.0 = Release|Any CPU
+ {0D0304BF-6A18-444C-BAF4-6ABFF98A0F77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {0D0304BF-6A18-444C-BAF4-6ABFF98A0F77}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {0D0304BF-6A18-444C-BAF4-6ABFF98A0F77}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {0D0304BF-6A18-444C-BAF4-6ABFF98A0F77}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+EndGlobal
diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi/CrupestApi.csproj b/dropped/docker/crupest-api/CrupestApi/CrupestApi/CrupestApi.csproj new file mode 100644 index 0000000..5954f00 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi/CrupestApi.csproj @@ -0,0 +1,17 @@ +<Project Sdk="Microsoft.NET.Sdk.Web">
+
+ <ItemGroup>
+ <ProjectReference Include="..\CrupestApi.Todos\CrupestApi.Todos.csproj" />
+ <ProjectReference Include="..\CrupestApi.Files\CrupestApi.Files.csproj" />
+ <ProjectReference Include="..\CrupestApi.Commons\CrupestApi.Commons.csproj" />
+ <ProjectReference Include="..\CrupestApi.Secrets\CrupestApi.Secrets.csproj" />
+ </ItemGroup>
+
+ <PropertyGroup>
+ <TargetFramework>net7.0</TargetFramework>
+ <Nullable>enable</Nullable>
+ <ImplicitUsings>enable</ImplicitUsings>
+ <SelfContained>false</SelfContained>
+ </PropertyGroup>
+
+</Project>
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi/Program.cs b/dropped/docker/crupest-api/CrupestApi/CrupestApi/Program.cs new file mode 100644 index 0000000..46648d9 --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi/Program.cs @@ -0,0 +1,24 @@ +using CrupestApi.Commons;
+using CrupestApi.Commons.Crud;
+using CrupestApi.Secrets;
+using CrupestApi.Todos;
+
+var builder = WebApplication.CreateBuilder(args);
+
+string configFilePath = Environment.GetEnvironmentVariable("CRUPEST_API_CONFIG_FILE") ?? "/crupest-api-config.json";
+builder.Configuration.AddJsonFile(configFilePath, optional: false, reloadOnChange: true);
+
+builder.Services.AddJsonOptions();
+builder.Services.AddCrupestApiConfig();
+
+builder.Services.AddTodos();
+builder.Services.AddSecrets();
+
+var app = builder.Build();
+
+app.UseCrudCore();
+app.MapTodos("/api/todos");
+// TODO: It's not safe now!
+// app.MapSecrets("/api/secrets");
+
+app.Run();
diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi/Properties/launchSettings.json b/dropped/docker/crupest-api/CrupestApi/CrupestApi/Properties/launchSettings.json new file mode 100644 index 0000000..a4a5cbf --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi/Properties/launchSettings.json @@ -0,0 +1,15 @@ +{
+ "$schema": "https://json.schemastore.org/launchsettings.json",
+ "profiles": {
+ "dev": {
+ "commandName": "Project",
+ "dotnetRunMessages": true,
+ "applicationUrl": "http://localhost:5188",
+ "workingDirectory": ".",
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development",
+ "CRUPEST_API_CONFIG_FILE": "dev-config.json"
+ }
+ }
+ }
+}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/CrupestApi/CrupestApi/appsettings.json b/dropped/docker/crupest-api/CrupestApi/CrupestApi/appsettings.json new file mode 100644 index 0000000..53753bd --- /dev/null +++ b/dropped/docker/crupest-api/CrupestApi/CrupestApi/appsettings.json @@ -0,0 +1,8 @@ +{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information"
+ }
+ },
+ "AllowedHosts": "*"
+}
\ No newline at end of file diff --git a/dropped/docker/crupest-api/Dockerfile b/dropped/docker/crupest-api/Dockerfile new file mode 100644 index 0000000..feb7522 --- /dev/null +++ b/dropped/docker/crupest-api/Dockerfile @@ -0,0 +1,13 @@ +FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build +COPY CrupestApi /CrupestApi +WORKDIR /CrupestApi +RUN dotnet publish CrupestApi/CrupestApi.csproj --configuration Release --output ./publish -r linux-x64 + +FROM mcr.microsoft.com/dotnet/aspnet:7.0-alpine +ENV ASPNETCORE_URLS=http://0.0.0.0:5000 +ENV ASPNETCORE_FORWARDEDHEADERS_ENABLED=true +COPY --from=build /CrupestApi/publish /CrupestApi +WORKDIR /CrupestApi +VOLUME [ "/crupest-api-config.json" ] +EXPOSE 5000 +ENTRYPOINT ["dotnet", "CrupestApi.dll"] diff --git a/dropped/template/crupest-api-config.json.template b/dropped/template/crupest-api-config.json.template new file mode 100644 index 0000000..65a7944 --- /dev/null +++ b/dropped/template/crupest-api-config.json.template @@ -0,0 +1,10 @@ +{ + "CrupestApi": { + "Todos": { + "Username": "$CRUPEST_GITHUB_USERNAME", + "ProjectNumber": "$CRUPEST_GITHUB_PROJECT_NUMBER", + "Token": "$CRUPEST_GITHUB_TOKEN", + "Count": "$CRUPEST_GITHUB_TODO_COUNT" + } + } +} diff --git a/dropped/template/docker-compose.yaml.template b/dropped/template/docker-compose.yaml.template new file mode 100644 index 0000000..1b28c5b --- /dev/null +++ b/dropped/template/docker-compose.yaml.template @@ -0,0 +1,44 @@ +services: + debian-dev: + pull_policy: build + build: + context: ./docker/debian-dev + dockerfile: Dockerfile + pull: true + args: + - USER=crupest + tags: + - "crupest/debian-dev:latest" + container_name: debian-dev + init: true + command: [ "/bootstrap/start/code-server.bash" ] + volumes: + - ./data/debian-dev:/data + - debian-dev-home:/home/crupest + restart: on-failure:3 + + timeline: + image: crupest/timeline:latest + pull_policy: always + container_name: timeline + restart: on-failure:3 + environment: + - ASPNETCORE_FORWARDEDHEADERS_ENABLED=true + - TIMELINE_DisableAutoBackup=true + volumes: + - ./data/timeline:/root/timeline + + crupest-api: + pull_policy: build + build: + context: ./docker/crupest-api + dockerfile: Dockerfile + pull: true + tags: + - "crupest/crupest-api:latest" + container_name: crupest-api + volumes: + - "./crupest-api-config.json:/crupest-api-config.json:ro" + +volumes: + debian-dev-home: diff --git a/dropped/template/nginx/code.conf.template b/dropped/template/nginx/code.conf.template new file mode 100644 index 0000000..205c7ba --- /dev/null +++ b/dropped/template/nginx/code.conf.template @@ -0,0 +1,20 @@ +server { + server_name code.${CRUPEST_DOMAIN}; + include common/https-listen; + + location / { + include common/proxy-common; + proxy_pass http://debian-dev:8080/; + } + + client_max_body_size 5G; +} + + +server { + server_name code.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/dropped/template/nginx/timeline.conf.template b/dropped/template/nginx/timeline.conf.template new file mode 100644 index 0000000..551e0ae --- /dev/null +++ b/dropped/template/nginx/timeline.conf.template @@ -0,0 +1,21 @@ +server { + listen 443 ssl http2; + listen [::]:443 ssl http2; + server_name timeline.${CRUPEST_DOMAIN}; + + location / { + include common/reverse-proxy; + proxy_pass http://timeline:5000/; + } + + client_max_body_size 5G; +} + +server { + listen 80; + listen [::]:80; + server_name timeline.${CRUPEST_DOMAIN}; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/dropped/template/v2ray-client-config.json.template b/dropped/template/v2ray-client-config.json.template new file mode 100644 index 0000000..0c99c6d --- /dev/null +++ b/dropped/template/v2ray-client-config.json.template @@ -0,0 +1,46 @@ +{ + "inbounds": [ + { + "port": 1080, + "listen": "127.0.0.1", + "protocol": "socks", + "sniffing": { + "enabled": true, + "destOverride": [ + "http", + "tls" + ] + }, + "settings": { + "auth": "noauth", + "udp": false + } + } + ], + "outbounds": [ + { + "protocol": "vmess", + "settings": { + "vnext": [ + { + "address": "$CRUPEST_DOMAIN", + "port": 443, + "users": [ + { + "id": "$CRUPEST_V2RAY_TOKEN", + "alterId": 0 + } + ] + } + ] + }, + "streamSettings": { + "network": "ws", + "security": "tls", + "wsSettings": { + "path": "/_$CRUPEST_V2RAY_PATH" + } + } + } + ] +}
\ No newline at end of file diff --git a/hurd.yaml b/hurd.yaml new file mode 100644 index 0000000..efd99a3 --- /dev/null +++ b/hurd.yaml @@ -0,0 +1,94 @@ +kernel: + site: + home: https://www.gnu.org/software/hurd/index.html + irc-archive: https://logs.guix.gnu.org/hurd/ + + mailing-list: + - address: bug-hurd@gnu.org + archive: https://lists.gnu.org/archive/html/bug-hurd/ + + project: + + - name: hurd + git: + my: https://git.crupest.life/love-hurd/hurd + upstream: https://git.savannah.gnu.org/git/hurd/hurd.git + debian: https://salsa.debian.org/hurd-team/hurd + + - name: gnumach + git: + my: https://git.crupest.life/love-hurd/gnumach + upstream: https://git.savannah.gnu.org/git/hurd/gnumach.git + debian: https://salsa.debian.org/hurd-team/gnumach + + - name: mig + git: + my: https://git.crupest.life/love-hurd/mig + upstream: https://git.savannah.gnu.org/git/hurd/mig.git + debian: https://salsa.debian.org/hurd-team/mig + + - name: glibc + git: + my: https://git.crupest.life/love-hurd/glibc + upstream: git://sourceware.org/git/glibc.git + debian: https://salsa.debian.org/glibc-team/glibc + mirror: https://mirrors.tuna.tsinghua.edu.cn/git/glibc.git + + - name: web + git: + my: https://git.crupest.life/love-hurd/web + upstream: https://git.savannah.gnu.org/git/hurd/web.git + + +debian-port: + site: + home: https://www.debian.org/ports/hurd/ + + mailing-list: + - address: debian-hurd@lists.debian.org + archive: https://lists.debian.org/debian-hurd/ + + package: + + - name: abseil + git: + my: https://git.crupest.life/love-hurd/abseil + debian: https://salsa.debian.org/debian/abseil + upstream: https://github.com/abseil/abseil-cpp + mail: + - https://lists.debian.org/debian-hurd/2025/02/msg00011.html + - https://lists.debian.org/debian-hurd/2025/02/msg00035.html + + + - name: libgav1 + git: + my: https://git.crupest.life/love-hurd/libgav1 + debian: https://salsa.debian.org/multimedia-team/libgav1 + upstream: https://chromium.googlesource.com/codecs/libgav1/ + mail: + - https://lists.debian.org/debian-hurd/2025/02/msg00016.html + gerrit: + - https://chromium-review.googlesource.com/c/codecs/libgav1/+/6239812 + + +cheatsheet: + + - name: Configure/Setup network. + tag: + - network + - setup + - configure + command: + - settrans -fgap /servers/socket/2 /hurd/pfinet -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 + - fsysopts /servers/socket/2 /hurd/pfinet -i /dev/eth0 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 + - fsysopts /server/socket/2 -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 + note: -a 10.0.2.15 -g 10.0.2.2 -m 255.255.255.0 is used in VirtualBox. + + - name: Setup apt after system installation or when cert/gpg are outdated. + tag: + - setup + - debian + - apt + command: + - apt-get --allow-unauthenticated --allow-insecure-repositories update + - apt-get --allow-unauthenticated upgrade diff --git a/templates/docker-compose.yaml.template b/templates/docker-compose.yaml.template new file mode 100644 index 0000000..6de0c12 --- /dev/null +++ b/templates/docker-compose.yaml.template @@ -0,0 +1,184 @@ +services: + + blog: + pull_policy: build + build: + context: ./docker/blog + dockerfile: Dockerfile + pull: true + tags: + - "crupest/blog:latest" + container_name: blog + volumes: + - "blog-public:/public" + restart: on-failure:3 + + nginx: + pull_policy: build + build: + context: ./docker/nginx + dockerfile: Dockerfile + pull: true + tags: + - "crupest/nginx:latest" + container_name: nginx + ports: + - "80:80" + - "443:443" + - "443:443/udp" + volumes: + - "./generated/nginx/conf.d:/etc/nginx/conf.d:ro" + - "./generated/nginx/common:/etc/nginx/common:ro" + - "./data/certbot/certs:/etc/letsencrypt:ro" + - "./data/certbot/webroot:/srv/acme:ro" + - "blog-public:/srv/www/blog:ro" + restart: on-failure:3 + + v2ray: + pull_policy: build + build: + context: ./docker/v2ray + dockerfile: Dockerfile + pull: true + tags: + - "crupest/v2ray:latest" + container_name: v2ray + command: [ "run", "-c", "/etc/v2fly/config.json" ] + volumes: + - "./generated/v2ray-config.json:/etc/v2fly/config.json:ro" + restart: on-failure:3 + + auto-certbot: + pull_policy: build + depends_on: + - nginx + build: + context: ./docker/auto-certbot + dockerfile: Dockerfile + pull: true + args: + - CRUPEST_DOMAIN=$CRUPEST_DOMAIN + - CRUPEST_EMAIL=$CRUPEST_EMAIL + - CRUPEST_AUTO_CERTBOT_ADDITIONAL_PACKAGES=docker-cli + - CRUPEST_AUTO_CERTBOT_POST_HOOK=docker restart nginx + tags: + - "crupest/auto-certbot:latest" + container_name: auto-certbot + volumes: + - "./data/certbot/certs:/etc/letsencrypt" + - "./data/certbot/data:/var/lib/letsencrypt" + - "./data/certbot/webroot:/var/www/certbot" + # map docker socket to allow auto-certbot to restart nginx + - "/var/run/docker.sock:/var/run/docker.sock" + restart: on-failure:3 + + auto-backup: + pull_policy: build + build: + context: ./docker/auto-backup + dockerfile: Dockerfile + pull: true + args: + - CRUPEST_AUTO_BACKUP_COS_SECRET_ID=${CRUPEST_AUTO_BACKUP_COS_SECRET_ID} + - CRUPEST_AUTO_BACKUP_COS_SECRET_KEY=${CRUPEST_AUTO_BACKUP_COS_SECRET_KEY} + - CRUPEST_AUTO_BACKUP_COS_REGION=${CRUPEST_AUTO_BACKUP_COS_REGION} + - CRUPEST_AUTO_BACKUP_BUCKET_NAME=${CRUPEST_AUTO_BACKUP_BUCKET_NAME} + tags: + - "crupest/auto-backup:latest" + container_name: auto-backup + volumes: + - "./data:/data" + restart: on-failure:3 + + mailserver: + image: docker.io/mailserver/docker-mailserver:latest + pull_policy: always + container_name: mailserver + # If the FQDN for your mail-server is only two labels (eg: example.com), + # you can assign this entirely to `hostname` and remove `domainname`. + hostname: mail + domainname: $CRUPEST_DOMAIN + env_file: generated/mailserver.env + # More information about the mail-server ports: + # https://docker-mailserver.github.io/docker-mailserver/edge/config/security/understanding-the-ports/ + # To avoid conflicts with yaml base-60 float, DO NOT remove the quotation marks. + ports: + - "25:25" # SMTP (explicit TLS => STARTTLS) + - "143:143" # IMAP4 (explicit TLS => STARTTLS) + - "465:465" # ESMTP (implicit TLS) + - "587:587" # ESMTP (explicit TLS => STARTTLS) + - "993:993" # IMAP4 (implicit TLS) + - "4190:4190" # manage sieve protocol + volumes: + - ./data/dms/mail-data/:/var/mail/ + - ./data/dms/mail-state/:/var/mail-state/ + - ./data/dms/mail-logs/:/var/log/mail/ + - ./data/dms/config/:/tmp/docker-mailserver/ + - ./data/certbot/certs:/etc/letsencrypt + - /etc/localtime:/etc/localtime:ro + restart: on-failure:3 + stop_grace_period: 1m + cap_add: + - NET_ADMIN + healthcheck: + test: "ss --listening --tcp | grep -P 'LISTEN.+:smtp' || exit 1" + timeout: 3s + retries: 0 + + forgejo: + image: code.forgejo.org/forgejo/forgejo:10 + pull_policy: always + container_name: forgejo + environment: + - USER_UID=1000 + - USER_GID=1000 + volumes: + - ./data/forgejo:/data + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + restart: on-failure:3 + + roundcubemail: + image: roundcube/roundcubemail:latest + pull_policy: always + container_name: roundcubemail + volumes: + - ./data/secret/gnupg:/gnupg + - ./data/roundcube/www/html:/var/www/html + - ./data/roundcube/db:/var/roundcube/db + - ./data/roundcube/config:/var/roundcube/config + - roundcubemail-temp:/tmp/roundcube-temp + environment: + - ROUNDCUBEMAIL_DEFAULT_HOST=ssl://mail.crupest.life + - ROUNDCUBEMAIL_DEFAULT_PORT=993 + - ROUNDCUBEMAIL_SMTP_SERVER=ssl://mail.crupest.life + - ROUNDCUBEMAIL_SMTP_PORT=465 + - ROUNDCUBEMAIL_DB_TYPE=sqlite + - ROUNDCUBEMAIL_PLUGINS=archive,enigma,jqueryui,newmail_notifier,show_additional_headers,userinfo,zipdownload,managesieve + restart: on-failure:3 + + 2fauth: + image: 2fauth/2fauth + pull_policy: always + container_name: 2fauth + volumes: + - ./data/2fauth:/2fauth + environment: + - APP_NAME=2FAuth-crupest + - APP_TIMEZONE=UTC + - SITE_OWNER=crupest@crupest.life + - APP_KEY=${CRUPEST_2FAUTH_APP_KEY} + - APP_URL=https://2fa.${CRUPEST_DOMAIN} + - MAIL_MAILER=smtp + - MAIL_HOST=mail.crupest.life + - MAIL_PORT=465 + - MAIL_USERNAME=${CRUPEST_2FAUTH_MAIL_USERNAME} + - MAIL_PASSWORD=${CRUPEST_2FAUTH_MAIL_PASSWORD} + - MAIL_ENCRYPTION=ssl + - MAIL_FROM_NAME=2FAuth-crupest + - MAIL_FROM_ADDRESS=${CRUPEST_2FAUTH_MAIL_USERNAME} + - TRUSTED_PROXIES=* + +volumes: + blog-public: + roundcubemail-temp: diff --git a/templates/forgejo.app.ini.init.template b/templates/forgejo.app.ini.init.template new file mode 100644 index 0000000..7dc3800 --- /dev/null +++ b/templates/forgejo.app.ini.init.template @@ -0,0 +1,42 @@ +# Copy this file to ./data/forgejo/gitea/conf/app.ini +# TODO: Copy this to data directory automatically. + +APP_NAME = Forgejo, loved by crupest. +RUN_MODE = prod +WORK_PATH = /data/gitea + +[server] +HTTP_ADDR = 0.0.0.0 +HTTP_PORT = 3000 +ROOT_URL = https://git.${CRUPEST_DOMAIN} +DISABLE_SSH = true +LFS_START_SERVER = true + +[database] +DB_TYPE = sqlite3 + +[security] +INSTALL_LOCK = false +REVERSE_PROXY_LIMIT = 1 +REVERSE_PROXY_TRUSTED_PROXIES = * + +[service] +DISABLE_REGISTRATION = false +ALLOW_ONLY_INTERNAL_REGISTRATION = true + +[mailer] +ENABLED = true +PROTOCOL = smtp +SMTP_ADDR = mail.${CRUPEST_DOMAIN} +SMTP_PORT = 465 +USER = ${CRUPEST_FORGEJO_MAILER_USER} +PASSWD = ${CRUPEST_FORGEJO_MAILER_PASSWD} + +[log] +MODE = console,file + +[cron] +ENABLED = true + +[actions] +ENABLED = false diff --git a/templates/mailserver.env b/templates/mailserver.env new file mode 100644 index 0000000..9b12dfe --- /dev/null +++ b/templates/mailserver.env @@ -0,0 +1,661 @@ +# ----------------------------------------------- +# --- Mailserver Environment Variables ---------- +# ----------------------------------------------- + +# DOCUMENTATION FOR THESE VARIABLES IS FOUND UNDER +# https://docker-mailserver.github.io/docker-mailserver/latest/config/environment/ + +# ----------------------------------------------- +# --- General Section --------------------------- +# ----------------------------------------------- + +# empty => uses the `hostname` command to get the mail server's canonical hostname +# => Specify a fully-qualified domainname to serve mail for. This is used for many of the config features so if you can't set your hostname (e.g. you're in a container platform that doesn't let you) specify it in this environment variable. +OVERRIDE_HOSTNAME= + +# REMOVED in version v11.0.0! Use LOG_LEVEL instead. +DMS_DEBUG=0 + +# Set the log level for DMS. +# This is mostly relevant for container startup scripts and change detection event feedback. +# +# Valid values (in order of increasing verbosity) are: `error`, `warn`, `info`, `debug` and `trace`. +# The default log level is `info`. +LOG_LEVEL=info + +# critical => Only show critical messages +# error => Only show erroneous output +# **warn** => Show warnings +# info => Normal informational output +# debug => Also show debug messages +SUPERVISOR_LOGLEVEL= + +# Support for deployment where these defaults are not compatible (eg: some NAS appliances): +# /var/mail vmail User ID (default: 5000) +DMS_VMAIL_UID= +# /var/mail vmail Group ID (default: 5000) +DMS_VMAIL_GID= + +# **empty** => use FILE +# LDAP => use LDAP authentication +# OIDC => use OIDC authentication (not yet implemented) +# FILE => use local files (this is used as the default) +ACCOUNT_PROVISIONER= + +# empty => postmaster@domain.com +# => Specify the postmaster address +POSTMASTER_ADDRESS= + +# Check for updates on container start and then once a day +# If an update is available, a mail is sent to POSTMASTER_ADDRESS +# 0 => Update check disabled +# 1 => Update check enabled +ENABLE_UPDATE_CHECK=1 + +# Customize the update check interval. +# Number + Suffix. Suffix must be 's' for seconds, 'm' for minutes, 'h' for hours or 'd' for days. +UPDATE_CHECK_INTERVAL=1d + +# Set different options for mynetworks option (can be overwrite in postfix-main.cf) +# **WARNING**: Adding the docker network's gateway to the list of trusted hosts, e.g. using the `network` or +# `connected-networks` option, can create an open relay +# https://github.com/docker-mailserver/docker-mailserver/issues/1405#issuecomment-590106498 +# The same can happen for rootless podman. To prevent this, set the value to "none" or configure slirp4netns +# https://github.com/docker-mailserver/docker-mailserver/issues/2377 +# +# none => Explicitly force authentication +# container => Container IP address only +# host => Add docker container network (ipv4 only) +# network => Add all docker container networks (ipv4 only) +# connected-networks => Add all connected docker networks (ipv4 only) +PERMIT_DOCKER=none + +# Set the timezone. If this variable is unset, the container runtime will try to detect the time using +# `/etc/localtime`, which you can alternatively mount into the container. The value of this variable +# must follow the pattern `AREA/ZONE`, i.e. of you want to use Germany's time zone, use `Europe/Berlin`. +# You can lookup all available timezones here: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List +TZ= + +# In case you network interface differs from 'eth0', e.g. when you are using HostNetworking in Kubernetes, +# you can set NETWORK_INTERFACE to whatever interface you want. This interface will then be used. +# - **empty** => eth0 +NETWORK_INTERFACE= + +# empty => modern +# modern => Enables TLSv1.2 and modern ciphers only. (default) +# intermediate => Enables TLSv1, TLSv1.1 and TLSv1.2 and broad compatibility ciphers. +TLS_LEVEL= + +# Configures the handling of creating mails with forged sender addresses. +# +# **0** => (not recommended) Mail address spoofing allowed. Any logged in user may create email messages with a forged sender address (see also https://en.wikipedia.org/wiki/Email_spoofing). +# 1 => Mail spoofing denied. Each user may only send with his own or his alias addresses. Addresses with extension delimiters(http://www.postfix.org/postconf.5.html#recipient_delimiter) are not able to send messages. +SPOOF_PROTECTION= + +# Enables the Sender Rewriting Scheme. SRS is needed if your mail server acts as forwarder. See [postsrsd](https://github.com/roehling/postsrsd/blob/master/README.md#sender-rewriting-scheme-crash-course) for further explanation. +# - **0** => Disabled +# - 1 => Enabled +ENABLE_SRS=0 + +# Enables the OpenDKIM service. +# **1** => Enabled +# 0 => Disabled +ENABLE_OPENDKIM=0 + +# Enables the OpenDMARC service. +# **1** => Enabled +# 0 => Disabled +ENABLE_OPENDMARC=0 + + +# Enabled `policyd-spf` in Postfix's configuration. You will likely want to set this +# to `0` in case you're using Rspamd (`ENABLE_RSPAMD=1`). +# +# - 0 => Disabled +# - **1** => Enabled +ENABLE_POLICYD_SPF=0 + +# Enables POP3 service +# - **0** => Disabled +# - 1 => Enabled +ENABLE_POP3= + +# Enables IMAP service +# - 0 => Disabled +# - **1** => Enabled +ENABLE_IMAP=1 + +# Enables ClamAV, and anti-virus scanner. +# 1 => Enabled +# **0** => Disabled +ENABLE_CLAMAV=0 + +# Add the value of this ENV as a prefix to the mail subject when spam is detected. +# NOTE: This subject prefix may be redundant (by default spam is delivered to a junk folder). +# It provides value when your junk mail is stored alongside legitimate mail instead of a separate location (like with `SPAMASSASSIN_SPAM_TO_INBOX=1` or `MOVE_SPAM_TO_JUNK=0` or a POP3 only setup, without IMAP). +# NOTE: When not using Docker Compose, other CRI may not support quote-wrapping the value here to preserve any trailing white-space. +SPAM_SUBJECT= + +# Enables Rspamd +# **0** => Disabled +# 1 => Enabled +ENABLE_RSPAMD=1 + +# When `ENABLE_RSPAMD=1`, an internal Redis instance is enabled implicitly. +# This setting provides an opt-out to allow using an external instance instead. +# 0 => Disabled +# 1 => Enabled +ENABLE_RSPAMD_REDIS= + +# When enabled, +# +# 1. the "[autolearning][rspamd-autolearn]" feature is turned on; +# 2. the Bayes classifier will be trained when moving mails from or to the Junk folder (with the help of Sieve scripts). +# +# **0** => disabled +# 1 => enabled +RSPAMD_LEARN=0 + +# This settings controls whether checks should be performed on emails coming +# from authenticated users (i.e. most likely outgoing emails). The default value +# is `0` in order to align better with SpamAssassin. We recommend reading +# through https://rspamd.com/doc/tutorials/scanning_outbound.html though to +# decide for yourself whether you need and want this feature. +# +# Note that DKIM signing of e-mails will still happen. +RSPAMD_CHECK_AUTHENTICATED=0 + +# Controls whether the Rspamd Greylisting module is enabled. +# This module can further assist in avoiding spam emails by greylisting +# e-mails with a certain spam score. +# +# **0** => disabled +# 1 => enabled +RSPAMD_GREYLISTING=1 + +# Can be used to enable or disable the Hfilter group module. +# +# - 0 => Disabled +# - **1** => Enabled +RSPAMD_HFILTER=1 + +# Can be used to control the score when the HFILTER_HOSTNAME_UNKNOWN symbol applies. A higher score is more punishing. Setting it to 15 is equivalent to rejecting the email when the check fails. +# +# Default: 6 +RSPAMD_HFILTER_HOSTNAME_UNKNOWN_SCORE=6 + +# Can be used to enable or disable the (still experimental) neural module. +# +# - **0** => Disabled +# - 1 => Enabled +RSPAMD_NEURAL=0 + +# Amavis content filter (used for ClamAV & SpamAssassin) +# 0 => Disabled +# 1 => Enabled +ENABLE_AMAVIS=0 + +# -1/-2/-3 => Only show errors +# **0** => Show warnings +# 1/2 => Show default informational output +# 3/4/5 => log debug information (very verbose) +AMAVIS_LOGLEVEL=0 + +# This enables DNS block lists in Postscreen. +# Note: Emails will be rejected, if they don't pass the block list checks! +# **0** => DNS block lists are disabled +# 1 => DNS block lists are enabled +ENABLE_DNSBL=0 + +# If you enable Fail2Ban, don't forget to add the following lines to your `compose.yaml`: +# cap_add: +# - NET_ADMIN +# Otherwise, `nftables` won't be able to ban IPs. +ENABLE_FAIL2BAN=0 + +# Fail2Ban blocktype +# drop => drop packet (send NO reply) +# reject => reject packet (send ICMP unreachable) +FAIL2BAN_BLOCKTYPE=drop + +# 1 => Enables Managesieve on port 4190 +# empty => disables Managesieve +ENABLE_MANAGESIEVE=1 + +# **enforce** => Allow other tests to complete. Reject attempts to deliver mail with a 550 SMTP reply, and log the helo/sender/recipient information. Repeat this test the next time the client connects. +# drop => Drop the connection immediately with a 521 SMTP reply. Repeat this test the next time the client connects. +# ignore => Ignore the failure of this test. Allow other tests to complete. Repeat this test the next time the client connects. This option is useful for testing and collecting statistics without blocking mail. +POSTSCREEN_ACTION=enforce + +# empty => all daemons start +# 1 => only launch postfix smtp +SMTP_ONLY= + +# Please read [the SSL page in the documentation](https://docker-mailserver.github.io/docker-mailserver/latest/config/security/ssl) for more information. +# +# empty => SSL disabled +# letsencrypt => Enables Let's Encrypt certificates +# custom => Enables custom certificates +# manual => Let's you manually specify locations of your SSL certificates for non-standard cases +# self-signed => Enables self-signed certificates +SSL_TYPE=letsencrypt + +# These are only supported with `SSL_TYPE=manual`. +# Provide the path to your cert and key files that you've mounted access to within the container. +SSL_CERT_PATH= +SSL_KEY_PATH= +# Optional: A 2nd certificate can be supported as fallback (dual cert support), eg ECDSA with an RSA fallback. +# Useful for additional compatibility with older MTA and MUA (eg pre-2015). +SSL_ALT_CERT_PATH= +SSL_ALT_KEY_PATH= + +# Set how many days a virusmail will stay on the server before being deleted +# empty => 7 days +VIRUSMAILS_DELETE_DELAY= + +# Configure Postfix `virtual_transport` to deliver mail to a different LMTP client (default is a dovecot socket). +# Provide any valid URI. Examples: +# +# empty => `lmtp:unix:/var/run/dovecot/lmtp` (default, configured in Postfix main.cf) +# `lmtp:unix:private/dovecot-lmtp` (use socket) +# `lmtps:inet:<host>:<port>` (secure lmtp with starttls) +# `lmtp:<kopano-host>:2003` (use kopano as mailstore) +POSTFIX_DAGENT= + +# Set the mailbox size limit for all users. If set to zero, the size will be unlimited (default). Size is in bytes. +# +# empty => 0 +POSTFIX_MAILBOX_SIZE_LIMIT= + +# See https://docker-mailserver.github.io/docker-mailserver/latest/config/account-management/overview/#quotas +# 0 => Dovecot quota is disabled +# 1 => Dovecot quota is enabled +ENABLE_QUOTAS=1 + +# Set the message size limit for all users. If set to zero, the size will be unlimited (not recommended!). Size is in bytes. +# +# empty => 10240000 (~10 MB) +POSTFIX_MESSAGE_SIZE_LIMIT= + +# Mails larger than this limit won't be scanned. +# ClamAV must be enabled (ENABLE_CLAMAV=1) for this. +# +# empty => 25M (25 MB) +CLAMAV_MESSAGE_SIZE_LIMIT= + +# Enables regular pflogsumm mail reports. +# This is a new option. The old REPORT options are still supported for backwards compatibility. If this is not set and reports are enabled with the old options, logrotate will be used. +# +# not set => No report +# daily_cron => Daily report for the previous day +# logrotate => Full report based on the mail log when it is rotated +PFLOGSUMM_TRIGGER= + +# Recipient address for pflogsumm reports. +# +# not set => Use REPORT_RECIPIENT or POSTMASTER_ADDRESS +# => Specify the recipient address(es) +PFLOGSUMM_RECIPIENT= + +# Sender address (`FROM`) for pflogsumm reports if pflogsumm reports are enabled. +# +# not set => Use REPORT_SENDER +# => Specify the sender address +PFLOGSUMM_SENDER= + +# Interval for logwatch report. +# +# none => No report is generated +# daily => Send a daily report +# weekly => Send a report every week +LOGWATCH_INTERVAL= + +# Recipient address for logwatch reports if they are enabled. +# +# not set => Use REPORT_RECIPIENT or POSTMASTER_ADDRESS +# => Specify the recipient address(es) +LOGWATCH_RECIPIENT= + +# Sender address (`FROM`) for logwatch reports if logwatch reports are enabled. +# +# not set => Use REPORT_SENDER +# => Specify the sender address +LOGWATCH_SENDER= + +# Defines who receives reports if they are enabled. +# **empty** => ${POSTMASTER_ADDRESS} +# => Specify the recipient address +REPORT_RECIPIENT= + +# Defines who sends reports if they are enabled. +# **empty** => mailserver-report@${DOMAINNAME} +# => Specify the sender address +REPORT_SENDER= + +# Changes the interval in which log files are rotated +# **weekly** => Rotate log files weekly +# daily => Rotate log files daily +# monthly => Rotate log files monthly +# +# Note: This Variable actually controls logrotate inside the container +# and rotates the log files depending on this setting. The main log output is +# still available in its entirety via `docker logs mail` (Or your +# respective container name). If you want to control logrotation for +# the Docker-generated logfile see: +# https://docs.docker.com/config/containers/logging/configure/ +# +# Note: This variable can also determine the interval for Postfix's log summary reports, see [`PFLOGSUMM_TRIGGER`](#pflogsumm_trigger). +LOGROTATE_INTERVAL=weekly + +# Defines how many log files are kept by logrorate +LOGROTATE_COUNT=4 + + +# If enabled, employs `reject_unknown_client_hostname` to sender restrictions in Postfix's configuration. +# +# - **0** => Disabled +# - 1 => Enabled +POSTFIX_REJECT_UNKNOWN_CLIENT_HOSTNAME=0 + +# Choose TCP/IP protocols for postfix to use +# **all** => All possible protocols. +# ipv4 => Use only IPv4 traffic. Most likely you want this behind Docker. +# ipv6 => Use only IPv6 traffic. +# +# Note: More details at http://www.postfix.org/postconf.5.html#inet_protocols +POSTFIX_INET_PROTOCOLS=all + +# Enables MTA-STS support for outbound mail. +# More details: https://docker-mailserver.github.io/docker-mailserver/v13.3/config/best-practices/mta-sts/ +# - **0** ==> MTA-STS disabled +# - 1 => MTA-STS enabled +ENABLE_MTA_STS=0 + +# Choose TCP/IP protocols for dovecot to use +# **all** => Listen on all interfaces +# ipv4 => Listen only on IPv4 interfaces. Most likely you want this behind Docker. +# ipv6 => Listen only on IPv6 interfaces. +# +# Note: More information at https://dovecot.org/doc/dovecot-example.conf +DOVECOT_INET_PROTOCOLS=all + +# ----------------------------------------------- +# --- SpamAssassin Section ---------------------- +# ----------------------------------------------- + +ENABLE_SPAMASSASSIN=0 + +# KAM is a 3rd party SpamAssassin ruleset, provided by the McGrail Foundation. +# If SpamAssassin is enabled, KAM can be used in addition to the default ruleset. +# - **0** => KAM disabled +# - 1 => KAM enabled +# +# Note: only has an effect if `ENABLE_SPAMASSASSIN=1` +ENABLE_SPAMASSASSIN_KAM=0 + +# deliver spam messages to the inbox (tagged using SPAM_SUBJECT) +SPAMASSASSIN_SPAM_TO_INBOX=1 + +# spam messages will be moved in the Junk folder (SPAMASSASSIN_SPAM_TO_INBOX=1 required) +MOVE_SPAM_TO_JUNK=1 + +# spam messages will be marked as read +MARK_SPAM_AS_READ=0 + +# add 'spam info' headers at, or above this level +SA_TAG=2.0 + +# add 'spam detected' headers at, or above this level +SA_TAG2=6.31 + +# triggers spam evasive actions +SA_KILL=10.0 + +# ----------------------------------------------- +# --- Fetchmail Section ------------------------- +# ----------------------------------------------- + +ENABLE_FETCHMAIL=0 + +# The interval to fetch mail in seconds +FETCHMAIL_POLL=300 +# Use multiple fetchmail instances (1 per poll entry in fetchmail.cf) +# Supports multiple IMAP IDLE connections when a server is used across multiple poll entries +# https://otremba.net/wiki/Fetchmail_(Debian)#Immediate_Download_via_IMAP_IDLE +FETCHMAIL_PARALLEL=0 + +# Enable or disable `getmail`. +# +# - **0** => Disabled +# - 1 => Enabled +ENABLE_GETMAIL=0 + +# The number of minutes for the interval. Min: 1; Default: 5. +GETMAIL_POLL=5 + +# ----------------------------------------------- +# --- OAUTH2 Section ---------------------------- +# ----------------------------------------------- + +# empty => OAUTH2 authentication is disabled +# 1 => OAUTH2 authentication is enabled +ENABLE_OAUTH2= + +# Specify the user info endpoint URL of the oauth2 provider +# Example: https://oauth2.example.com/userinfo/ +OAUTH2_INTROSPECTION_URL= + +# ----------------------------------------------- +# --- LDAP Section ------------------------------ +# ----------------------------------------------- + +# A second container for the ldap service is necessary (i.e. https://hub.docker.com/r/bitnami/openldap/) + +# empty => no +# yes => LDAP over TLS enabled for Postfix +LDAP_START_TLS= + +# empty => mail.example.com +# Specify the `<dns-name>` / `<ip-address>` where the LDAP server is reachable via a URI like: `ldaps://mail.example.com`. +# Note: You must include the desired URI scheme (`ldap://`, `ldaps://`, `ldapi://`). +LDAP_SERVER_HOST= + +# empty => ou=people,dc=domain,dc=com +# => e.g. LDAP_SEARCH_BASE=dc=mydomain,dc=local +LDAP_SEARCH_BASE= + +# empty => cn=admin,dc=domain,dc=com +# => take a look at examples of SASL_LDAP_BIND_DN +LDAP_BIND_DN= + +# empty** => admin +# => Specify the password to bind against ldap +LDAP_BIND_PW= + +# e.g. `"(&(mail=%s)(mailEnabled=TRUE))"` +# => Specify how ldap should be asked for users +LDAP_QUERY_FILTER_USER= + +# e.g. `"(&(mailGroupMember=%s)(mailEnabled=TRUE))"` +# => Specify how ldap should be asked for groups +LDAP_QUERY_FILTER_GROUP= + +# e.g. `"(&(mailAlias=%s)(mailEnabled=TRUE))"` +# => Specify how ldap should be asked for aliases +LDAP_QUERY_FILTER_ALIAS= + +# e.g. `"(&(|(mail=*@%s)(mailalias=*@%s)(mailGroupMember=*@%s))(mailEnabled=TRUE))"` +# => Specify how ldap should be asked for domains +LDAP_QUERY_FILTER_DOMAIN= + +# ----------------------------------------------- +# --- Dovecot Section --------------------------- +# ----------------------------------------------- + +# empty => no +# yes => LDAP over TLS enabled for Dovecot +DOVECOT_TLS= + +# e.g. `"(&(objectClass=PostfixBookMailAccount)(uniqueIdentifier=%n))"` +DOVECOT_USER_FILTER= + +# e.g. `"(&(objectClass=PostfixBookMailAccount)(uniqueIdentifier=%n))"` +DOVECOT_PASS_FILTER= + +# Define the mailbox format to be used +# default is maildir, supported values are: sdbox, mdbox, maildir +DOVECOT_MAILBOX_FORMAT=maildir + +# empty => no +# yes => Allow bind authentication for LDAP +# https://wiki.dovecot.org/AuthDatabase/LDAP/AuthBinds +DOVECOT_AUTH_BIND= + +# ----------------------------------------------- +# --- Postgrey Section -------------------------- +# ----------------------------------------------- + +ENABLE_POSTGREY=0 +# greylist for N seconds +POSTGREY_DELAY=300 +# delete entries older than N days since the last time that they have been seen +POSTGREY_MAX_AGE=35 +# response when a mail is greylisted +POSTGREY_TEXT="Delayed by Postgrey" +# whitelist host after N successful deliveries (N=0 to disable whitelisting) +POSTGREY_AUTO_WHITELIST_CLIENTS=5 + +# ----------------------------------------------- +# --- SASL Section ------------------------------ +# ----------------------------------------------- + +ENABLE_SASLAUTHD=0 + +# empty => pam +# `ldap` => authenticate against ldap server +# `shadow` => authenticate against local user db +# `mysql` => authenticate against mysql db +# `rimap` => authenticate against imap server +# Note: can be a list of mechanisms like pam ldap shadow +SASLAUTHD_MECHANISMS= + +# empty => None +# e.g. with SASLAUTHD_MECHANISMS rimap you need to specify the ip-address/servername of the imap server ==> xxx.xxx.xxx.xxx +SASLAUTHD_MECH_OPTIONS= + +# empty => Use value of LDAP_SERVER_HOST +# Note: You must include the desired URI scheme (`ldap://`, `ldaps://`, `ldapi://`). +SASLAUTHD_LDAP_SERVER= + +# empty => Use value of LDAP_BIND_DN +# specify an object with privileges to search the directory tree +# e.g. active directory: SASLAUTHD_LDAP_BIND_DN=cn=Administrator,cn=Users,dc=mydomain,dc=net +# e.g. openldap: SASLAUTHD_LDAP_BIND_DN=cn=admin,dc=mydomain,dc=net +SASLAUTHD_LDAP_BIND_DN= + +# empty => Use value of LDAP_BIND_PW +SASLAUTHD_LDAP_PASSWORD= + +# empty => Use value of LDAP_SEARCH_BASE +# specify the search base +SASLAUTHD_LDAP_SEARCH_BASE= + +# empty => default filter `(&(uniqueIdentifier=%u)(mailEnabled=TRUE))` +# e.g. for active directory: `(&(sAMAccountName=%U)(objectClass=person))` +# e.g. for openldap: `(&(uid=%U)(objectClass=person))` +SASLAUTHD_LDAP_FILTER= + +# empty => no +# yes => LDAP over TLS enabled for SASL +# If set to yes, the protocol in SASLAUTHD_LDAP_SERVER must be ldap:// or missing. +SASLAUTHD_LDAP_START_TLS= + +# empty => no +# yes => Require and verify server certificate +# If yes you must/could specify SASLAUTHD_LDAP_TLS_CACERT_FILE or SASLAUTHD_LDAP_TLS_CACERT_DIR. +SASLAUTHD_LDAP_TLS_CHECK_PEER= + +# File containing CA (Certificate Authority) certificate(s). +# empty => Nothing is added to the configuration +# Any value => Fills the `ldap_tls_cacert_file` option +SASLAUTHD_LDAP_TLS_CACERT_FILE= + +# Path to directory with CA (Certificate Authority) certificates. +# empty => Nothing is added to the configuration +# Any value => Fills the `ldap_tls_cacert_dir` option +SASLAUTHD_LDAP_TLS_CACERT_DIR= + +# Specify what password attribute to use for password verification. +# empty => Nothing is added to the configuration but the documentation says it is `userPassword` by default. +# Any value => Fills the `ldap_password_attr` option +SASLAUTHD_LDAP_PASSWORD_ATTR= + +# empty => `bind` will be used as a default value +# `fastbind` => The fastbind method is used +# `custom` => The custom method uses userPassword attribute to verify the password +SASLAUTHD_LDAP_AUTH_METHOD= + +# Specify the authentication mechanism for SASL bind +# empty => Nothing is added to the configuration +# Any value => Fills the `ldap_mech` option +SASLAUTHD_LDAP_MECH= + +# ----------------------------------------------- +# --- SRS Section ------------------------------- +# ----------------------------------------------- + +# envelope_sender => Rewrite only envelope sender address (default) +# header_sender => Rewrite only header sender (not recommended) +# envelope_sender,header_sender => Rewrite both senders +# An email has an "envelope" sender (indicating the sending server) and a +# "header" sender (indicating who sent it). More strict SPF policies may require +# you to replace both instead of just the envelope sender. +SRS_SENDER_CLASSES=envelope_sender + +# empty => Envelope sender will be rewritten for all domains +# provide comma separated list of domains to exclude from rewriting +SRS_EXCLUDE_DOMAINS= + +# empty => generated when the image is built +# provide a secret to use in base64 +# you may specify multiple keys, comma separated. the first one is used for +# signing and the remaining will be used for verification. this is how you +# rotate and expire keys +SRS_SECRET= + +# ----------------------------------------------- +# --- Default Relay Host Section ---------------- +# ----------------------------------------------- + +# Setup relaying all mail through a default relay host +# +# Set a default host to relay all mail through (optionally include a port) +# Example: [mail.example.com]:587 +DEFAULT_RELAY_HOST= + +# ----------------------------------------------- +# --- Multi-Domain Relay Section ---------------- +# ----------------------------------------------- + +# Setup relaying for multiple domains based on the domain name of the sender +# optionally uses usernames and passwords in postfix-sasl-password.cf and relay host mappings in postfix-relaymap.cf +# +# Set a default host to relay mail through +# Example: mail.example.com +RELAY_HOST= + +# empty => 25 +# default port to relay mail +RELAY_PORT=25 + +# ----------------------------------------------- +# --- Relay Host Credentials Section ------------ +# ----------------------------------------------- + +# Configure a relay user and password to use with RELAY_HOST / DEFAULT_RELAY_HOST + +# empty => no default +RELAY_USER= + +# empty => no default +RELAY_PASSWORD= diff --git a/templates/nginx/common/acme-challenge b/templates/nginx/common/acme-challenge new file mode 100644 index 0000000..26054b8 --- /dev/null +++ b/templates/nginx/common/acme-challenge @@ -0,0 +1,3 @@ +location /.well-known/acme-challenge { + root /srv/acme; +} diff --git a/templates/nginx/common/http-listen b/templates/nginx/common/http-listen new file mode 100644 index 0000000..76cb18d --- /dev/null +++ b/templates/nginx/common/http-listen @@ -0,0 +1,2 @@ +listen 80; +listen [::]:80; diff --git a/templates/nginx/common/https-listen b/templates/nginx/common/https-listen new file mode 100644 index 0000000..db2f68e --- /dev/null +++ b/templates/nginx/common/https-listen @@ -0,0 +1,3 @@ +listen 443 ssl; +listen [::]:443 ssl; +http2 on; diff --git a/templates/nginx/common/https-redirect b/templates/nginx/common/https-redirect new file mode 100644 index 0000000..56d095d --- /dev/null +++ b/templates/nginx/common/https-redirect @@ -0,0 +1,3 @@ +location / { + return 301 https://$host$request_uri; +} diff --git a/templates/nginx/common/proxy-common b/templates/nginx/common/proxy-common new file mode 100644 index 0000000..4193548 --- /dev/null +++ b/templates/nginx/common/proxy-common @@ -0,0 +1,7 @@ +proxy_http_version 1.1; +proxy_set_header Upgrade $http_upgrade; +proxy_set_header Connection $connection_upgrade; +proxy_set_header Host $host; +proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; +proxy_set_header X-Forwarded-Proto $scheme; +proxy_set_header X-Real-IP $remote_addr; diff --git a/templates/nginx/conf.d/2fa.conf.template b/templates/nginx/conf.d/2fa.conf.template new file mode 100644 index 0000000..278e4a1 --- /dev/null +++ b/templates/nginx/conf.d/2fa.conf.template @@ -0,0 +1,17 @@ +server { + server_name 2fa.${CRUPEST_DOMAIN}; + include common/https-listen; + + location / { + include common/proxy-common; + proxy_pass http://2fauth:8000/; + } +} + +server { + server_name 2fa.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/code.conf.template b/templates/nginx/conf.d/code.conf.template new file mode 100644 index 0000000..aa70ebc --- /dev/null +++ b/templates/nginx/conf.d/code.conf.template @@ -0,0 +1,6 @@ +server { + server_name code.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/forbid_unknown_domain.conf b/templates/nginx/conf.d/forbid_unknown_domain.conf new file mode 100644 index 0000000..515942b --- /dev/null +++ b/templates/nginx/conf.d/forbid_unknown_domain.conf @@ -0,0 +1,9 @@ +server { + listen 80 default_server; + listen [::]:80 default_server; + listen 443 ssl default_server; + listen [::]:443 ssl default_server; + http2 on; + + return 444; +} diff --git a/templates/nginx/conf.d/git.conf.template b/templates/nginx/conf.d/git.conf.template new file mode 100644 index 0000000..3a2948c --- /dev/null +++ b/templates/nginx/conf.d/git.conf.template @@ -0,0 +1,20 @@ +server { + server_name git.${CRUPEST_DOMAIN}; + include common/https-listen; + + location / { + include common/proxy-common; + proxy_pass http://forgejo:3000/; + } + + client_max_body_size 5G; +} + + +server { + server_name git.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/mail.conf.template b/templates/nginx/conf.d/mail.conf.template new file mode 100644 index 0000000..40adf28 --- /dev/null +++ b/templates/nginx/conf.d/mail.conf.template @@ -0,0 +1,25 @@ +server { + server_name mail.${CRUPEST_DOMAIN}; + include common/https-listen; + + location / { + include common/proxy-common; + proxy_pass http://roundcubemail:80/; + } + + location /rspamd/ { + include common/proxy-common; + proxy_pass http://mailserver:11334/; + } + + client_max_body_size 5G; +} + + +server { + server_name mail.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/root.conf.template b/templates/nginx/conf.d/root.conf.template new file mode 100644 index 0000000..a0b08f8 --- /dev/null +++ b/templates/nginx/conf.d/root.conf.template @@ -0,0 +1,26 @@ +server { + server_name ${CRUPEST_DOMAIN}; + include common/https-listen; + + location / { + root /srv/www; + } + + location /_$CRUPEST_V2RAY_PATH { + if ($http_upgrade != "websocket") { + return 404; + } + + proxy_redirect off; + include common/proxy-common; + proxy_pass http://v2ray:10000; + } +} + +server { + server_name ${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/https-redirect; + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/ssl.conf.template b/templates/nginx/conf.d/ssl.conf.template new file mode 100644 index 0000000..54205f1 --- /dev/null +++ b/templates/nginx/conf.d/ssl.conf.template @@ -0,0 +1,17 @@ +# This file contains important security parameters. If you modify this file +# manually, Certbot will be unable to automatically provide future security +# updates. Instead, Certbot will print and log an error message with a path to +# the up-to-date file that you will need to refer to when manually updating +# this file. Contents are based on https://ssl-config.mozilla.org + +ssl_certificate /etc/letsencrypt/live/${CRUPEST_DOMAIN}/fullchain.pem; +ssl_certificate_key /etc/letsencrypt/live/${CRUPEST_DOMAIN}/privkey.pem; + +ssl_session_cache shared:le_nginx_SSL:10m; +ssl_session_timeout 1440m; +ssl_session_tickets off; + +ssl_protocols TLSv1.2 TLSv1.3; +ssl_prefer_server_ciphers off; + +ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; diff --git a/templates/nginx/conf.d/timeline.conf.template b/templates/nginx/conf.d/timeline.conf.template new file mode 100644 index 0000000..a467594 --- /dev/null +++ b/templates/nginx/conf.d/timeline.conf.template @@ -0,0 +1,6 @@ +server { + server_name timeline.${CRUPEST_DOMAIN}; + include common/http-listen; + + include common/acme-challenge; +} diff --git a/templates/nginx/conf.d/websocket.conf b/templates/nginx/conf.d/websocket.conf new file mode 100644 index 0000000..32af4c3 --- /dev/null +++ b/templates/nginx/conf.d/websocket.conf @@ -0,0 +1,4 @@ +map $http_upgrade $connection_upgrade { + default upgrade; + '' close; +} diff --git a/templates/v2ray-config.json.template b/templates/v2ray-config.json.template new file mode 100644 index 0000000..33d3f16 --- /dev/null +++ b/templates/v2ray-config.json.template @@ -0,0 +1,29 @@ +{ + "inbounds": [ + { + "port": 10000, + "listen": "0.0.0.0", + "protocol": "vmess", + "settings": { + "clients": [ + { + "id": "$CRUPEST_V2RAY_TOKEN", + "alterId": 0 + } + ] + }, + "streamSettings": { + "network": "ws", + "wsSettings": { + "path": "/_$CRUPEST_V2RAY_PATH" + } + } + } + ], + "outbounds": [ + { + "protocol": "freedom", + "settings": {} + } + ] +}
\ No newline at end of file diff --git a/tools/Crupest.SecretTool/.gitignore b/tools/Crupest.SecretTool/.gitignore new file mode 100644 index 0000000..ac4d8a4 --- /dev/null +++ b/tools/Crupest.SecretTool/.gitignore @@ -0,0 +1,7 @@ +.vs +bin +obj +*.pubxml.user +*.csproj.user + +publish diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool.sln b/tools/Crupest.SecretTool/Crupest.SecretTool.sln new file mode 100644 index 0000000..fde4347 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool.sln @@ -0,0 +1,30 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.7.34024.191 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F4C2CE80-CDF8-4B08-8912-D1F0F14196AD}" + ProjectSection(SolutionItems) = preProject + .gitignore = .gitignore + EndProjectSection +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Crupest.SecretTool", "Crupest.SecretTool\Crupest.SecretTool.csproj", "{D6335AE4-FD22-49CD-9624-37371F3B4F82}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D6335AE4-FD22-49CD-9624-37371F3B4F82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D6335AE4-FD22-49CD-9624-37371F3B4F82}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D6335AE4-FD22-49CD-9624-37371F3B4F82}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D6335AE4-FD22-49CD-9624-37371F3B4F82}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {B1E8FD9C-9157-4F4E-8265-4B37F30EEC5E} + EndGlobalSection +EndGlobal diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/.gitignore b/tools/Crupest.SecretTool/Crupest.SecretTool/.gitignore new file mode 100644 index 0000000..c936492 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/.gitignore @@ -0,0 +1 @@ +vmess.txt diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Config.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Config.cs new file mode 100644 index 0000000..ff58551 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Config.cs @@ -0,0 +1,95 @@ +namespace Crupest.SecretTool; + +public record ConfigItem(string Value, int LineNumber); + +public class DictionaryConfig(string configString, List<string>? requiredKeys = null) +{ + private static Dictionary<string, ConfigItem> Parse(string configString, List<string>? requiredKeys = null) + { + var config = new Dictionary<string, ConfigItem>(); + var lines = configString.Split('\n'); + int lineNumber = 1; + + foreach (var line in lines) + { + var l = line; + var beginOfComment = l.IndexOf('#'); + if (beginOfComment >= 0) + { + l = line[..beginOfComment]; + } + l = l.Trim(); + if (!string.IsNullOrEmpty(l)) + { + var equalIndex = l.IndexOf('='); + if (equalIndex == -1) + { + throw new FormatException($"No '=' found in line {lineNumber}."); + } + + config.Add(l[..equalIndex].Trim(), new ConfigItem(l[(equalIndex + 1)..].Trim(), lineNumber)); + } + + lineNumber++; + } + + if (requiredKeys is not null) + { + foreach (var key in requiredKeys) + { + if (!config.ContainsKey(key)) + { + throw new FormatException($"Required key '{key}' not found in config."); + } + } + } + + return config; + } + + public string ConfigString { get; } = configString; + public List<string>? RequiredKeys { get; } = requiredKeys; + public Dictionary<string, ConfigItem> Config { get; } = Parse(configString); + public ConfigItem GetItemCaseInsensitive(string key) + { + foreach (var (originalKey, value) in Config) + { + if (string.Equals(originalKey, key, StringComparison.OrdinalIgnoreCase)) + { + return value; + } + } + throw new KeyNotFoundException($"Key '{key}' not found in config case-insensitively."); + } +} + +public class ListConfig(string configString) +{ + private static List<ConfigItem> Parse(string configString) + { + var config = new List<ConfigItem>(); + var lines = configString.Split('\n'); + int lineNumber = 1; + + foreach (var line in lines) + { + var l = line; + var beginOfComment = l.IndexOf('#'); + if (beginOfComment >= 0) + { + l = line[..beginOfComment]; + } + l = l.Trim(); + if (!string.IsNullOrEmpty(l)) + { + config.Add(new ConfigItem(l, lineNumber)); + } + lineNumber++; + } + + return config; + } + + public string ConfigString { get; } = configString; + public List<ConfigItem> Config { get; } = Parse(configString); +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Controller.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Controller.cs new file mode 100644 index 0000000..0803b01 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Controller.cs @@ -0,0 +1,113 @@ +using System.Diagnostics; + +namespace Crupest.SecretTool; + +public class Controller(string executablePath, string configPath, string? assetPath) +{ + public const string ToolAssetEnvironmentVariableName = "v2ray.location.asset"; + + public static string? FindExecutable(string contentDir, out bool isLocal, string? executableName = null) + { + isLocal = false; + executableName ??= "v2ray"; + + if (OperatingSystem.IsWindows()) + { + executableName += ".exe"; + } + + var localToolPath = Path.Combine(contentDir, executableName); + if (File.Exists(localToolPath)) + { + isLocal = true; + return localToolPath; + } + + var paths = Environment.GetEnvironmentVariable("PATH")?.Split(Path.PathSeparator); + if (paths is not null) + { + foreach (var p in paths) + { + var toolPath = Path.Combine(p, executableName); + if (File.Exists(toolPath)) + { + return toolPath; + } + } + } + + return null; + } + + public string ExecutablePath { get; } = executablePath; + public string ConfigPath { get; } = configPath; + public string? AssetPath { get; } = assetPath; + public Process? CurrentProcess { get; private set; } + + private Process CreateProcess() + { + var process = new Process(); + + var startInfo = new ProcessStartInfo + { + FileName = ExecutablePath, + }; + startInfo.ArgumentList.Add("run"); + startInfo.ArgumentList.Add("-c"); + startInfo.ArgumentList.Add(ConfigPath); + if (AssetPath is not null) + { + startInfo.EnvironmentVariables[ToolAssetEnvironmentVariableName] = AssetPath; + } + + process.StartInfo = startInfo; + process.OutputDataReceived += (_, args) => + { + Console.Out.Write(args.Data); + }; + process.ErrorDataReceived += (_, args) => + { + Console.Error.WriteLine(args.Data); + }; + + return process; + } + + public void Stop() + { + if (CurrentProcess is not null) + { + CurrentProcess.Kill(); + CurrentProcess.Dispose(); + CurrentProcess = null; + Console.WriteLine("V2ray stopped."); + } + } + + public void Start(bool stopOld = false) + { + if (stopOld) Stop(); + + if (CurrentProcess is null) + { + CurrentProcess = CreateProcess(); + CurrentProcess.EnableRaisingEvents = true; + CurrentProcess.Exited += (_, _) => + { + if (CurrentProcess.ExitCode != 0) + { + const string message = "V2ray exited with error."; + Console.Error.WriteLine(message); + throw new Exception(message); + } + }; + CurrentProcess.Start(); + Console.WriteLine("V2ray started."); + } + } + + public void Restart() + { + Start(true); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Crupest.SecretTool.csproj b/tools/Crupest.SecretTool/Crupest.SecretTool/Crupest.SecretTool.csproj new file mode 100644 index 0000000..2502e74 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Crupest.SecretTool.csproj @@ -0,0 +1,34 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net8.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <None Update="config.json.template"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="proxy.txt"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="vmess.txt"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="hosts.txt"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="sing-config.json.template"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="sing-inbounds-mobile.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="sing-inbounds-pc.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> + +</Project> diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/FileWatcher.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/FileWatcher.cs new file mode 100644 index 0000000..26e9231 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/FileWatcher.cs @@ -0,0 +1,26 @@ +namespace Crupest.SecretTool; + +public class FileWatcher(string directory, List<string> fileNames) +{ + public string Directory { get; set; } = directory; + public List<string> FileNames { get; set; } = fileNames; + + public delegate void OnChangedHandler(); + public event OnChangedHandler? OnChanged; + + public void Run() + { + var sourceWatcher = new FileSystemWatcher(Directory); + foreach (var fileName in FileNames) + { + sourceWatcher.Filters.Add(fileName); + } + sourceWatcher.NotifyFilter = NotifyFilters.LastWrite; + + while (true) + { + var result = sourceWatcher.WaitForChanged(WatcherChangeTypes.Changed | WatcherChangeTypes.Created); + OnChanged?.Invoke(); + } + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/GeoDataManager.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/GeoDataManager.cs new file mode 100644 index 0000000..8f4c171 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/GeoDataManager.cs @@ -0,0 +1,324 @@ +using System.IO.Compression; + +namespace Crupest.SecretTool; + +public interface IGeoSiteEntry +{ + bool IsInclude { get; } + string Value { get; } +} + +public record GeoSiteIncludeEntry(string Value, string ContainingSite) : IGeoSiteEntry +{ + public bool IsInclude => true; +} + +public record GeoSiteRuleEntry(HostMatchKind Kind, string Value, List<string> Attributes, string ContainingSite) : IGeoSiteEntry +{ + public bool IsInclude => false; + + public RoutingRuleMatcher GetRoutingRuleMatcher() => new(Kind, Value); +} + +public record GeoSite(string Name, List<IGeoSiteEntry> Entries) +{ + public static GeoSite Parse(string name, string str) + { + List<IGeoSiteEntry> entries = []; + var listConfig = new ListConfig(str); + foreach (var item in listConfig.Config) + { + var (value, line) = item; + + if (value.StartsWith("include:")) + { + var include = value["include:".Length..].Trim(); + if (include.Length == 0 || include.Contains(' ')) + { + throw new FormatException($"Invalid geo site rule '{name}' in line {line}. Invalid include value."); + } + entries.Add(new GeoSiteIncludeEntry(include, name)); + continue; + } + + var segments = value.Split(':', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries); + if (segments.Length > 2) + { + throw new FormatException($"Invalid geo site rule '{name}' in line {line}. More than one ':'."); + } + + HostMatchKind kind; + if (segments.Length == 2) + { + kind = segments[0] switch + { + "domain" => kind = HostMatchKind.DomainSuffix, + "full" => kind = HostMatchKind.DomainFull, + "keyword" => kind = HostMatchKind.DomainKeyword, + "regexp" => kind = HostMatchKind.DomainRegex, + _ => throw new FormatException($"Invalid geo site rule '{name}' in line {line}. Unknown matcher.") + }; + } + else + { + kind = HostMatchKind.DomainSuffix; + } + + var domainSegments = segments[^1].Split('@', StringSplitOptions.TrimEntries); + var domain = domainSegments[0]; + if (kind != HostMatchKind.DomainRegex && Uri.CheckHostName(domain) != UriHostNameType.Dns) + { + throw new FormatException($"Invalid geo site rule '{name}' in line {line}. Invalid domain."); + } + + List<string> attributes = []; + foreach (var s in domainSegments[1..]) + { + if (s.Length == 0) + { + throw new FormatException($"Invalid geo site rule '{name}' in line {line}. Empty attribute value."); + } + attributes.Add(s); + } + + entries.Add(new GeoSiteRuleEntry(kind, domain, attributes, name)); + } + return new GeoSite(name, entries); + } +} + +public class GeoSiteData(string directory) +{ + private static List<GeoSite> Parse(string directory) + { + var sites = new List<GeoSite>(); + foreach (var path in Directory.GetFileSystemEntries(directory)) + { + var content = File.ReadAllText(path); + sites.Add(GeoSite.Parse(Path.GetFileName(path), content)); + } + return sites; + } + + public string DataDirectory { get; } = directory; + + public List<GeoSite> Sites { get; } = Parse(directory); + + public GeoSite? GetSite(string name) + { + return Sites.Where(s => s.Name == name).FirstOrDefault(); + } + + public List<GeoSiteRuleEntry> GetEntriesRecursive(List<string> sites, + List<HostMatchKind>? onlyMatcherKinds = null, List<string>? onlyAttributes = null) + { + List<GeoSiteRuleEntry> entries = []; + HashSet<string> visited = []; + HashSet<HostMatchKind>? kinds = onlyMatcherKinds?.ToHashSet(); + + void Visit(string site) + { + if (visited.Contains(site)) + { + return; + } + + visited.Add(site); + var siteData = GetSite(site); + if (siteData == null) + { + return; + } + foreach (var entry in siteData.Entries) + { + if (entry is GeoSiteIncludeEntry includeEntry) + { + Visit(includeEntry.Value); + } + else if (entry is GeoSiteRuleEntry geoSiteRuleEntry) + { + if (kinds != null && !kinds.Contains(geoSiteRuleEntry.Kind)) + { + continue; + } + + if (onlyAttributes != null && !geoSiteRuleEntry.Attributes.Intersect(onlyAttributes).Any()) + { + continue; + } + + entries.Add(geoSiteRuleEntry); + } + } + } + + foreach (var s in sites) + { + Visit(s); + } + + return entries; + } +} + +public class GeoDataManager +{ + public const string GeoSiteFileName = "geosite.dat"; + public const string GeoIpFileName = "geoip.dat"; + public const string GeoIpCnFileName = "geoip-only-cn-private.dat"; + + public static class ToolGithub + { + public const string Organization = "v2fly"; + public const string GeoSiteRepository = "domain-list-community"; + public const string GeoIpRepository = "geoip"; + public const string GeoSiteReleaseFilename = "dlc.dat"; + public const string GeoIpReleaseFilename = "geoip.dat"; + public const string GeoIpCnReleaseFilename = "geoip-only-cn-private.dat"; + } + + public static GeoDataManager Instance { get; } = new GeoDataManager(); + + public record GeoDataAsset(string Name, string FileName, string GithubUser, string GithubRepo, string GithubReleaseFileName); + + public GeoDataManager() + { + Assets = + [ + new("geosite", GeoSiteFileName, ToolGithub.Organization, ToolGithub.GeoSiteRepository, ToolGithub.GeoSiteReleaseFilename), + new("geoip", GeoIpFileName, ToolGithub.Organization, ToolGithub.GeoIpRepository, ToolGithub.GeoIpReleaseFilename), + new("geoip-cn", GeoIpCnFileName, ToolGithub.Organization, ToolGithub.GeoIpRepository, ToolGithub.GeoIpCnReleaseFilename), + ]; + } + + public List<GeoDataAsset> Assets { get; set; } + + public GeoSiteData? GeoSiteData { get; set; } + + public GeoSiteData GetOrCreateGeoSiteData(bool clean, bool silent) + { + if (GeoSiteData is not null) { return GeoSiteData; } + GeoSiteData = DownloadAndGenerateGeoSiteData(clean, silent); + return GeoSiteData; + } + + private static string GetReleaseFileUrl(string user, string repo, string fileName) + { + return $"https://github.com/{user}/{repo}/releases/latest/download/{fileName}"; + } + + private static void GithubDownloadRelease(HttpClient httpClient, string user, string repo, string fileName, string outputPath, bool silent) + { + var url = GetReleaseFileUrl(user, repo, fileName); + if (!silent) Console.WriteLine($"Downloading {url} to {outputPath}"); + using var responseStream = httpClient.GetStreamAsync(url).Result; + using var outputFileStream = File.OpenWrite(outputPath); + responseStream.CopyTo(outputFileStream); + } + + public bool HasAllAssets(string directory, out List<string> missing) + { + missing = []; + foreach (var asset in Assets) + { + var assetPath = Path.Combine(directory, asset.FileName); + if (!File.Exists(assetPath)) + { + missing.Add(asset.Name); + } + } + return missing.Count == 0; + } + + public void Download(string outputDir, bool silent) + { + using var httpClient = new HttpClient(); + + foreach (var asset in Assets) + { + if (!silent) + { + Console.WriteLine($"Downloading {asset.Name}..."); + } + GithubDownloadRelease(httpClient, asset.GithubUser, asset.GithubRepo, asset.GithubReleaseFileName, Path.Combine(outputDir, asset.FileName), silent); + if (!silent) + { + Console.WriteLine($"Downloaded {asset.Name}!"); + } + } + + if (!File.Exists(Program.RestartLabelFilePath)) + { + File.Create(Program.RestartLabelFilePath); + } + else + { + File.SetLastWriteTime(Program.RestartLabelFilePath, DateTime.Now); + } + } + + private static string GetGithubRepositoryArchiveUrl(string user, string repo) + { + return $"https://github.com/{user}/{repo}/archive/refs/heads/master.zip"; + } + + private static void GithubDownloadRepository(HttpClient httpClient, string user, string repo, string outputPath, bool silent) + { + var url = GetGithubRepositoryArchiveUrl(user, repo); + if (!silent) { Console.WriteLine($"Begin to download data from {url} to {outputPath}."); } + using var responseStream = httpClient.GetStreamAsync(url).Result; + using var outputFileStream = File.OpenWrite(outputPath); + responseStream.CopyTo(outputFileStream); + if (!silent) { Console.WriteLine("Succeeded to download."); } + } + + private static void Unzip(string zipPath, string outputPath) + { + using var zip = ZipFile.OpenRead(zipPath) ?? throw new Exception($"Failed to open zip file {zipPath}"); + zip.ExtractToDirectory(outputPath); + } + + private static string DownloadAndExtractGeoDataRepository(bool cleanTempDirIfFailed, bool silent, out string tempDirectoryPath) + { + tempDirectoryPath = ""; + const string zipFileName = "v2ray-geosite-master.zip"; + using var httpClient = new HttpClient(); + var tempDirectory = Directory.CreateTempSubdirectory(Program.Name); + tempDirectoryPath = tempDirectory.FullName; + try + { + var archivePath = Path.Combine(tempDirectoryPath, zipFileName); + var extractPath = Path.Combine(tempDirectoryPath, "repo"); + GithubDownloadRepository(httpClient, ToolGithub.Organization, ToolGithub.GeoSiteRepository, archivePath, silent); + if (!silent) { Console.WriteLine($"Extract geo data to {extractPath}."); } + Directory.CreateDirectory(extractPath); + Unzip(archivePath, extractPath); + if (!silent) { Console.WriteLine($"Extraction done."); } + return Path.Join(extractPath, "domain-list-community-master"); + } + catch (Exception) + { + if (cleanTempDirIfFailed) + { + Directory.Delete(tempDirectoryPath, true); + } + throw; + } + } + + private static GeoSiteData DownloadAndGenerateGeoSiteData(bool clean, bool silent) + { + var repoDirectory = DownloadAndExtractGeoDataRepository(clean, silent, out var tempDirectoryPath); + try + { + return new GeoSiteData(Path.Join(repoDirectory, "data")); + } + finally + { + if (clean) + { + Directory.Delete(tempDirectoryPath, true); + } + } + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/HostMatchConfig.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/HostMatchConfig.cs new file mode 100644 index 0000000..858333d --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/HostMatchConfig.cs @@ -0,0 +1,123 @@ +namespace Crupest.SecretTool; + +public enum HostMatchKind +{ + DomainFull, + DomainSuffix, + DomainKeyword, + DomainRegex, + Ip, + GeoSite, + GeoIp, +} + +public static class HostMatchKindExtensions +{ + public static bool IsDomain(this HostMatchKind kind) + { + return kind.IsNonRegexDomain() || kind == HostMatchKind.DomainRegex; + } + + public static bool IsNonRegexDomain(this HostMatchKind kind) + { + return kind is HostMatchKind.DomainFull or HostMatchKind.DomainSuffix or HostMatchKind.DomainKeyword; + } + + + public static List<HostMatchKind> DomainMatchKinds { get; } = [HostMatchKind.DomainFull, HostMatchKind.DomainSuffix, HostMatchKind.DomainKeyword, HostMatchKind.DomainRegex]; + + public static List<HostMatchKind> NonRegexDomainMatchKinds { get; } = [HostMatchKind.DomainFull, HostMatchKind.DomainSuffix, HostMatchKind.DomainKeyword]; + + public static List<HostMatchKind> SupportedInSingRouteMatchKinds { get; } = [..DomainMatchKinds, HostMatchKind.Ip]; + + public static bool IsSupportedInSingRoute(this HostMatchKind kind) => SupportedInSingRouteMatchKinds.Contains(kind); +} + +public record HostMatchConfigItem(HostMatchKind Kind, string MatchString, List<string> Values); + +public class HostMatchConfig(string configString, List<HostMatchKind> allowedMatchKinds, int minComponentCount = -1, int maxComponentCount = -1) +{ + private static List<HostMatchConfigItem> Parse(string configString, List<HostMatchKind> allowedMatchKinds, int minComponentCount = -1, int maxComponentCount = -1) + { + var items = new ListConfig(configString).Config; + var result = new List<HostMatchConfigItem>(); + + foreach (var item in items) + { + var lineNumber = item.LineNumber; + var line = item.Value; + var hasExplicitMatchKind = false; + var segments = line.Split(' ', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries).ToList(); + + foreach (var matchKind in Enum.GetValues<HostMatchKind>()) + { + var matchKindName = Enum.GetName(matchKind) ?? throw new Exception("No such match kind."); + if (segments[0] == matchKindName) + { + hasExplicitMatchKind = true; + + if (segments.Count < 2) + { + throw new FormatException($"Explicit match item needs a value in line {lineNumber}."); + } + if (allowedMatchKinds.Contains(matchKind)) + { + if (matchKind.IsNonRegexDomain() && Uri.CheckHostName(matchKindName) != UriHostNameType.Dns) + { + throw new FormatException($"Invalid domain format in line {lineNumber}."); + } + + var components = segments[2..].ToList(); + if (minComponentCount > 0 && components.Count < minComponentCount) + { + throw new FormatException($"Too few components in line {lineNumber}, at least {minComponentCount} required."); + } + if (maxComponentCount >= 0 && components.Count > maxComponentCount) + { + throw new FormatException($"Too many components in line {lineNumber}, only {maxComponentCount} allowed."); + } + result.Add(new HostMatchConfigItem(matchKind, segments[1], components)); + } + else + { + throw new FormatException($"Match kind {matchKindName} is not allowed at line {lineNumber}."); + } + } + } + + if (!hasExplicitMatchKind) + { + if (minComponentCount > 0 && segments.Count - 1 < minComponentCount) + { + throw new FormatException($"Too few components in line {lineNumber}, at least {minComponentCount} required."); + } + if (maxComponentCount >= 0 && segments.Count - 1 > maxComponentCount) + { + throw new FormatException($"Too many components in line {lineNumber}, only {maxComponentCount} allowed."); + } + result.Add(new HostMatchConfigItem(HostMatchKind.DomainSuffix, segments[0], segments.Count == 1 ? [] : segments[1..])); + } + } + return result; + } + + public string ConfigString { get; } = configString; + public List<HostMatchKind> AllowedMatchKinds { get; } = allowedMatchKinds; + public int MinComponentCount { get; } = minComponentCount; + public int MaxComponentCount { get; } = maxComponentCount; + public List<HostMatchConfigItem> Items { get; } = Parse(configString, allowedMatchKinds, minComponentCount, maxComponentCount); +} + +public class HostMatchConfigFile +{ + public HostMatchConfigFile(string path, List<HostMatchKind> allowedMatchKinds, int minComponentCount = -1, int maxComponentCount = -1) + { + Path = path; + FileContent = File.ReadAllText(path); + Config = new HostMatchConfig(FileContent, allowedMatchKinds, minComponentCount, maxComponentCount); ; + } + + public string Path { get; } + public string FileContent { get; } + public HostMatchConfig Config { get; } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Program.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Program.cs new file mode 100644 index 0000000..18b1ac0 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Program.cs @@ -0,0 +1,113 @@ +using System.Reflection; + +namespace Crupest.SecretTool; + +public static class Program +{ + public static string Name { get; } = typeof(Program).Namespace ?? throw new Exception("Can't get the name of Crupest.SecretTool."); + + public static string CrupestSecretToolDirectory { get; } = + Environment.GetEnvironmentVariable("CRUPEST_V2RAY_DIR") ?? + Path.GetFullPath(Path.GetDirectoryName( + Assembly.GetExecutingAssembly().Location) ?? throw new Exception("Can't get the path of Crupest.SecretTool.")); + + private const string ConfigOutputFileName = "config.json"; + private const string SurgeRuleSetChinaOutputFileName = "ChinaRuleSet.txt"; + private const string SurgeRuleSetGlobalOutputFileName = "GlobalRuleSet.txt"; + + public const string RestartLabelFileName = "restart.label"; + public static string RestartLabelFilePath { get; } = Path.Combine(CrupestSecretToolDirectory, RestartLabelFileName); + + public static void RunToolAndWatchConfigChange() + { + var executablePath = Controller.FindExecutable(CrupestSecretToolDirectory, out var isLocal) ?? + throw new Exception("Can't find v2ray executable either in Crupest.SecretTool directory or in PATH."); + + string? assetsPath; + if (isLocal) + { + assetsPath = CrupestSecretToolDirectory; + var assetsComplete = GeoDataManager.Instance.HasAllAssets(CrupestSecretToolDirectory, out var missing); + if (!assetsComplete) + { + throw new Exception($"Missing assets: {string.Join(", ", missing)} in {CrupestSecretToolDirectory}. This v2ray is local. So only use assets in Crupest.SecretTool directory."); + } + } + else + { + assetsPath = CrupestSecretToolDirectory; + var assetsComplete = GeoDataManager.Instance.HasAllAssets(CrupestSecretToolDirectory, out var missing); + if (!assetsComplete) + { + Console.WriteLine($"Missing assets: {string.Join(", ", missing)} in {CrupestSecretToolDirectory}. This v2ray is global. So fallback to its own assets."); + assetsPath = null; + } + } + + var controller = new Controller(executablePath, Path.Combine(CrupestSecretToolDirectory, ConfigOutputFileName), assetsPath); + var configFileWatcher = new FileWatcher(CrupestSecretToolDirectory, + [.. ToolConfig.ConfigFileNames, RestartLabelFileName]); + + ToolConfig.FromDirectoryAndWriteToFile(CrupestSecretToolDirectory, Path.Join(CrupestSecretToolDirectory, ConfigOutputFileName)); + controller.Start(); + + configFileWatcher.OnChanged += () => + { + ToolConfig.FromDirectoryAndWriteToFile(CrupestSecretToolDirectory, Path.Join(CrupestSecretToolDirectory, ConfigOutputFileName)); + controller.Restart(); + }; + + configFileWatcher.Run(); + } + + public static void Main(string[] args) + { + if (args.Length != 0) + { + var verb = args[0].ToLower(); + if (verb == "download-geodata" || verb == "dg") + { + if (args.Length != 1) + { + throw new Exception("Invalid command line arguments. download-geodata requires no arguments."); + } + GeoDataManager.Instance.Download(CrupestSecretToolDirectory, false); + return; + } + else if (verb == "generate-surge-rule-set" || verb == "gsr") + { + if (args.Length != 1) + { + throw new Exception("Invalid command line arguments. download-geodata requires no arguments."); + } + SurgeConfigGenerator.GenerateTo( + CrupestSecretToolDirectory, + Path.Join(CrupestSecretToolDirectory, SurgeRuleSetChinaOutputFileName), + Path.Join(CrupestSecretToolDirectory, SurgeRuleSetGlobalOutputFileName), + true, true + ); + return; + } + else if (verb == "generate-sing-config" || verb == "gs") + { + if (args.Length != 2 || args[1].ToLower() is not ("pc" or "mobile")) + { + throw new Exception("Invalid command line arguments. generate-sing-config requires 1 argument. The argument must be either 'pc' or 'mobile'."); + } + + var config = SingToolConfig.FromDirectory(CrupestSecretToolDirectory, args[1].ToLower() == "mobile", true, true); + Console.Out.WriteLine(config.ToSingConfigString()); + return; + } + else if (verb == "generate" || verb == "g") + { + var config = ToolConfig.FromDirectory(CrupestSecretToolDirectory); + Console.Out.WriteLine(config.ToJsonStringV4()); + return; + } + throw new Exception("Invalid command line arguments."); + } + + RunToolAndWatchConfigChange(); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Properties/PublishProfiles/FolderProfile.pubxml b/tools/Crupest.SecretTool/Crupest.SecretTool/Properties/PublishProfiles/FolderProfile.pubxml new file mode 100644 index 0000000..5fca454 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Properties/PublishProfiles/FolderProfile.pubxml @@ -0,0 +1,13 @@ +<?xml version="1.0" encoding="utf-8"?> +<!-- +https://go.microsoft.com/fwlink/?LinkID=208121. +--> +<Project> + <PropertyGroup> + <Configuration>Release</Configuration> + <Platform>Any CPU</Platform> + <PublishDir>bin\Release\net8.0\publish\</PublishDir> + <PublishProtocol>FileSystem</PublishProtocol> + <_TargetId>Folder</_TargetId> + </PropertyGroup> +</Project>
\ No newline at end of file diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Proxy.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Proxy.cs new file mode 100644 index 0000000..d2703ba --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Proxy.cs @@ -0,0 +1,76 @@ +namespace Crupest.SecretTool; + +public abstract class Proxy(string tag) : IV4ConfigObject, ISingConfigObject +{ + public string Tag { get; set; } = tag; + + public abstract V4ConfigJsonObjects.Outbound ToJsonObjectV4(); + public abstract SingConfigJsonObjects.OutboundBase ToJsonObjectSing(); + + object IV4ConfigObject.ToJsonObjectV4() + { + return ToJsonObjectV4(); + } + + object ISingConfigObject.ToJsonObjectSing() + { + return ToJsonObjectSing(); + } +} + +public class HttpProxy(string host, int port, string tag) : Proxy(tag) +{ + public string Host { get; set; } = host; + public int Port { get; set; } = port; + + public override SingConfigJsonObjects.OutboundBase ToJsonObjectSing() + { + throw new NotImplementedException("Http proxy is not supported in sing now."); + } + + public override V4ConfigJsonObjects.Outbound ToJsonObjectV4() + { + return new V4ConfigJsonObjects.Outbound(Tag, "http", + new V4ConfigJsonObjects.HttpOutboundSettings([new V4ConfigJsonObjects.HttpOutboundServer(Host, Port, [])]), + null + ); + } +} + + +public class VmessProxy(string host, int port, string userId, string path, string tag) : Proxy(tag) +{ + public string Host { get; set; } = host; + public int Port { get; set; } = port; + public string Path { get; set; } = path; + public string UserId { get; set; } = userId; + + public override SingConfigJsonObjects.OutboundBase ToJsonObjectSing() + { + return new SingConfigJsonObjects.VmessOutbound(Tag, Host, Port, UserId, + Transport: new SingConfigJsonObjects.V2rayWebsocketTransport(Path, new Dictionary<string, string> { { "Host", Host } }), + Tls: new SingConfigJsonObjects.OutboundTls(true)); + } + + public override V4ConfigJsonObjects.Outbound ToJsonObjectV4() + { + return new V4ConfigJsonObjects.Outbound(Tag, "vmess", + new V4ConfigJsonObjects.VmessOutboundSettings( + [new V4ConfigJsonObjects.VnextServer(Host, Port, [new V4ConfigJsonObjects.VnextServerUser(UserId, 0, "auto", 0)])]), + new V4ConfigJsonObjects.WsStreamSettings("ws", "tls", new V4ConfigJsonObjects.WsSettings(Path, new() { ["Host"] = Host })) + ); + } + + public static VmessProxy CreateFromConfigString(string configString, string tag) + { + var config = new DictionaryConfig(configString, ["host", "port", "userid", "path"]); + var portString = config.GetItemCaseInsensitive("port").Value; + if (!int.TryParse(portString, out var port) || port <= 0) + { + throw new FormatException($"Invalid port number: {portString}: not an integer or is a invalid number."); + } + return new VmessProxy(config.GetItemCaseInsensitive("host").Value, port, + config.GetItemCaseInsensitive("userid").Value, config.GetItemCaseInsensitive("path").Value, tag + ); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/ProxyFile.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/ProxyFile.cs new file mode 100644 index 0000000..81698a3 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/ProxyFile.cs @@ -0,0 +1,31 @@ +namespace Crupest.SecretTool;
+
+public class ProxyFile : HostMatchConfigFile
+{
+ public ProxyFile(string path) : base(path, [.. Enum.GetValues<HostMatchKind>()], maxComponentCount: 0)
+ {
+ RoutingRuleMatchers = Config.Items.Select(i => new RoutingRuleMatcher(i.Kind, i.MatchString)).ToList();
+ }
+
+ public List<RoutingRuleMatcher> RoutingRuleMatchers { get; }
+
+ public List<RoutingRuleMatcher> GetChinaRulesByGeoSite(GeoSiteData geoSiteData)
+ {
+ var geoSites = RoutingRuleMatchers.Where(m => m.MatchKind == HostMatchKind.GeoSite).Select(i => i.MatchString).ToList();
+ return geoSiteData.GetEntriesRecursive(geoSites, HostMatchKindExtensions.DomainMatchKinds, ["cn"]).Select(e => e.GetRoutingRuleMatcher()).ToList();
+ }
+
+ public List<RoutingRuleMatcher> GetRulesFlattenGeoSite(GeoSiteData geoSiteData, bool noCn = false)
+ {
+ var geoSites = RoutingRuleMatchers.Where(m => m.MatchKind == HostMatchKind.GeoSite).Select(i => i.MatchString).ToList();
+ var flattenGeoSiteRules = geoSiteData.GetEntriesRecursive(geoSites, HostMatchKindExtensions.DomainMatchKinds)
+ .Where(e => !noCn || !e.Attributes.Contains("cn"))
+ .Select(e => e.GetRoutingRuleMatcher())
+ .ToList();
+ var otherRules = RoutingRuleMatchers.Where(m => m.MatchKind != HostMatchKind.GeoSite).ToList();
+ return [
+ ..flattenGeoSiteRules,
+ ..otherRules
+ ];
+ }
+}
diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Routing.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Routing.cs new file mode 100644 index 0000000..fdf1b93 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Routing.cs @@ -0,0 +1,155 @@ +namespace Crupest.SecretTool; + +public record RoutingRuleMatcher(HostMatchKind MatchKind, string MatchString) +{ + public RoutingRule ToRoutingRule(string OutboundTag) => new(MatchKind, MatchString, OutboundTag); +} + +public record RoutingRule(HostMatchKind MatchKind, string MatchString, string OutboundTag) : IV4ConfigObject +{ + public string ToolConfigString => MatchKind switch + { + HostMatchKind.DomainFull => $"full:{MatchString}", + HostMatchKind.DomainSuffix => $"domain:{MatchString}", + HostMatchKind.DomainKeyword => MatchString, + HostMatchKind.DomainRegex => $"regexp:{MatchString}", + HostMatchKind.Ip => MatchString, + HostMatchKind.GeoSite => $"geosite:{MatchString}", + HostMatchKind.GeoIp => $"geoip:{MatchString}", + _ => throw new ArgumentException("Invalid matcher kind.") + }; + + public string ToolConfigStringSing => MatchKind.IsSupportedInSingRoute() ? MatchString : throw new ArgumentException("Unsupported matcher kind for sing."); + + public static Dictionary<string, List<RoutingRule>> GroupByOutboundTag(List<RoutingRule> rules) + => rules.GroupBy(r => r.OutboundTag).Select(g => (g.Key, g.ToList())).ToDictionary(); + + public static Dictionary<HostMatchKind, List<RoutingRule>> GroupByMatchKind(List<RoutingRule> rules) + => rules.GroupBy(r => r.MatchKind).Select(g => (g.Key, g.ToList())).ToDictionary(); + + public static List<List<RoutingRule>> GroupByOutboundTagAndMatcherKind(List<RoutingRule> rules) + => GroupByOutboundTag(rules).Values.SelectMany((groupByTag) => GroupByMatchKind(groupByTag).Values).ToList(); + + public static SingConfigJsonObjects.RouteRule ListToJsonObjectSing(List<RoutingRule> rules) + { + if (rules.Count == 0) + { + throw new ArgumentException("Rule list is empty."); + } + + var outboundTag = rules[0].OutboundTag; + + if (rules.Any(r => !r.MatchKind.IsSupportedInSingRoute())) + { + throw new ArgumentException("Rules must have matcher kinds supported in sing."); + } + + if (rules.Any(r => r.OutboundTag != outboundTag)) + { + throw new ArgumentException("Rules must have the same outbound tag."); + } + + return new SingConfigJsonObjects.RouteRule(Outbound: outboundTag, + Domain: rules.Where(r => r.MatchKind == HostMatchKind.DomainFull).Select(r => r.ToolConfigStringSing).ToList(), + DomainSuffix: rules.Where(r => r.MatchKind == HostMatchKind.DomainSuffix).Select(r => r.ToolConfigStringSing).ToList(), + DomainKeyword: rules.Where(r => r.MatchKind == HostMatchKind.DomainKeyword).Select(r => r.ToolConfigStringSing).ToList(), + DomainRegex: rules.Where(r => r.MatchKind == HostMatchKind.DomainRegex).Select(r => r.ToolConfigStringSing).ToList(), + IpCidr: rules.Where(r => r.MatchKind == HostMatchKind.Ip).Select(r => r.ToolConfigStringSing).ToList() + ); + } + + public static V4ConfigJsonObjects.RoutingRule ListToJsonObject(List<RoutingRule> rules) + { + if (rules.Count == 0) + { + throw new ArgumentException("Rule list is empty."); + } + + var matchKind = rules[0].MatchKind; + var outboundTag = rules[0].OutboundTag; + + if (rules.Any(r => r.OutboundTag != outboundTag) || rules.Any(r => r.MatchKind != matchKind)) + { + throw new ArgumentException("Rules must have the same matcher kind and outbound tag."); + } + + List<string> toolConfigList = rules.Select(r => r.ToolConfigString).ToList(); + + return new V4ConfigJsonObjects.RoutingRule(OutboundTag: outboundTag, + Ip: (matchKind is HostMatchKind.Ip or HostMatchKind.GeoIp) ? toolConfigList : null, + Domains: (matchKind.IsDomain() || matchKind == HostMatchKind.GeoSite) ? toolConfigList : null + ); + } + + public RoutingRule CloneGeositeWithCnAttribute(string outboundTag) + { + if (MatchKind is not HostMatchKind.GeoSite) + { + throw new ArgumentException("Matcher kind must be GeoSite."); + } + + return new RoutingRule(HostMatchKind.GeoSite, $"{MatchString}@cn", outboundTag); + } + + public RoutingRuleMatcher GetMatcher() => new(MatchKind, MatchString); + + public V4ConfigJsonObjects.RoutingRule ToJsonObjectV4() => ListToJsonObject([this]); + + object IV4ConfigObject.ToJsonObjectV4() => ToJsonObjectV4(); +} + +public record Routing(List<RoutingRule> Rules) : IV4ConfigObject, ISingConfigObject +{ + public List<RoutingRule> CreateGeositeCnDirectRules() + { + return Rules.Where(r => r.MatchKind is HostMatchKind.GeoSite) + .Select(r => r.CloneGeositeWithCnAttribute("direct")).ToList(); + } + + public SingConfigJsonObjects.Route ToJsonObjectSing() + { + List<SingConfigJsonObjects.RouteRule> ruleJsonObjects = [ new SingConfigJsonObjects.RouteRule(Outbound: "dns-out", Protocol: "dns")]; + ruleJsonObjects.AddRange(RoutingRule.GroupByOutboundTag(Rules).Values.Select(RoutingRule.ListToJsonObjectSing)); + return new SingConfigJsonObjects.Route(ruleJsonObjects); + } + + public V4ConfigJsonObjects.Routing ToJsonObjectV4(string domainStrategy = "IpOnDemand", bool directGeositeCn = true) + { + List<V4ConfigJsonObjects.RoutingRule> ruleJsonObjects = []; + + if (directGeositeCn) + { + ruleJsonObjects.Add(RoutingRule.ListToJsonObject(CreateGeositeCnDirectRules())); + } + + ruleJsonObjects.AddRange(RoutingRule.GroupByOutboundTagAndMatcherKind(Rules).Select(RoutingRule.ListToJsonObject)); + + return new V4ConfigJsonObjects.Routing(ruleJsonObjects, domainStrategy); + } + + object IV4ConfigObject.ToJsonObjectV4() => ToJsonObjectV4(); + + object ISingConfigObject.ToJsonObjectSing() => ToJsonObjectSing(); + + public static Routing FromProxyFile(ProxyFile proxyFile, string outboundTag) + { + return new Routing( + proxyFile.RoutingRuleMatchers.Select(m => m.ToRoutingRule(outboundTag)).ToList()); + } + + public static Routing FromProxyFileForSing(ProxyFile proxyFile, GeoSiteData geoSiteData, string outboundTag, string? directCnOutboundTag = null) + { + List<RoutingRule> rules = []; + + if (directCnOutboundTag is not null) + { + rules.AddRange(proxyFile.GetChinaRulesByGeoSite(geoSiteData).Select(m => m.ToRoutingRule(directCnOutboundTag)).ToList()); + } + + rules.AddRange(proxyFile.GetRulesFlattenGeoSite(geoSiteData).Where(m => m.MatchKind.IsSupportedInSingRoute()).Select(m => m.ToRoutingRule(outboundTag)).ToList()); + + return new Routing( + rules + ); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/SingConfigJsonObjects.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/SingConfigJsonObjects.cs new file mode 100644 index 0000000..56b5563 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/SingConfigJsonObjects.cs @@ -0,0 +1,20 @@ +namespace Crupest.SecretTool;
+
+public static class SingConfigJsonObjects
+{
+ public interface IObject;
+
+ public record OutboundTls(bool Enabled);
+ public record V2rayTransportBase(string Type);
+ public record V2rayWebsocketTransport(string Path, Dictionary<string, string>? Headers = null) : V2rayTransportBase("ws");
+ public record OutboundBase(string Tag, string Type) : IObject;
+ public record VmessOutbound(string Tag, string Server, int ServerPort, string Uuid, string Security = "auto",
+ V2rayTransportBase? Transport = null, OutboundTls? Tls = null): OutboundBase(Tag, "vmess");
+
+ public record RouteRule(List<string>? Domain = null, List<string>? DomainSuffix = null, List<string>? DomainKeyword = null,
+ List<string>? DomainRegex = null, List<string>? IpCidr = null, List<string>? SourceIpCidr = null, string? Protocol = null,
+ List<int>? Port = null, List<int>? SourcePort = null, List<string>? PortRange = null, List<string>? SourcePortRange = null,
+ string? Network = null, List<string>? Inbound = null, string? Outbound = null) : IObject;
+
+ public record Route(List<RouteRule> Rules) : IObject;
+}
diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/StaticHosts.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/StaticHosts.cs new file mode 100644 index 0000000..b112e1c --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/StaticHosts.cs @@ -0,0 +1,40 @@ +namespace Crupest.SecretTool; + +public record StaticHostRule(HostMatchKind MatchKind, string MatchString, List<string> ResolveResult) +{ + public string AddressString() + { + return MatchKind switch + { + HostMatchKind.DomainFull => MatchString, + HostMatchKind.DomainSuffix => $"domain:{MatchString}", + HostMatchKind.DomainKeyword => $"keyword:{MatchString}", + HostMatchKind.DomainRegex => $"regexp:{MatchString}", + _ => throw new ArgumentOutOfRangeException($"Match kind {MatchKind} is not allowed in static host rule."), + }; + } + + public object ResolveResultToJsonObject() + { + return ResolveResult.Count == 1 ? ResolveResult[0] : ResolveResult; + } +} + +public class StaticHosts(List<StaticHostRule> rules) : IV4ConfigObject +{ + public List<StaticHostRule> Rules { get; } = rules; + + public Dictionary<string, object> ToJsonObjectV4() => + Rules.ToDictionary(rule => rule.AddressString(), rule => rule.ResolveResultToJsonObject()); + + object IV4ConfigObject.ToJsonObjectV4() + { + return ToJsonObjectV4(); + } + + public static StaticHosts CreateFromHostMatchConfigString(string configString) + { + var config = new HostMatchConfig(configString, HostMatchKindExtensions.DomainMatchKinds, minComponentCount: 1); + return new StaticHosts(config.Items.Select(i => new StaticHostRule(i.Kind, i.MatchString, [.. i.Values])).ToList()); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/SurgeConfigGenerator.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/SurgeConfigGenerator.cs new file mode 100644 index 0000000..8a57c9f --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/SurgeConfigGenerator.cs @@ -0,0 +1,56 @@ +namespace Crupest.SecretTool; + +public class SurgeConfigGenerator(ProxyFile proxyFile, GeoSiteData geoData) +{ + public ProxyFile ProxyFile => proxyFile; + public GeoSiteData GeoData => geoData; + + private static string ToSurgeRuleString(HostMatchKind kind, string value) + { + var ruleType = kind switch + { + HostMatchKind.DomainFull => "DOMAIN", + HostMatchKind.DomainSuffix => "DOMAIN-SUFFIX", + HostMatchKind.DomainKeyword => "DOMAIN-KEYWORD", + HostMatchKind.DomainRegex => "URL-REGEX", + _ => throw new Exception("Unacceptable matcher kind for Surge rule.") + }; + + return $"{ruleType},{value}"; + } + + public static string GenerateSurgeRuleSetString(List<RoutingRuleMatcher> rules) + { + return string.Join('\n', rules.Select(r => ToSurgeRuleString(r.MatchKind, r.MatchString))); + } + + public string GenerateChinaRuleSet() + { + return GenerateSurgeRuleSetString(proxyFile.GetChinaRulesByGeoSite(GeoData)); + } + + public string GenerateGlobalRuleSet() + { + return GenerateSurgeRuleSetString(proxyFile.GetRulesFlattenGeoSite(geoData, true)); + } + + public static void GenerateTo(ProxyFile proxyFile, GeoSiteData geoSiteData, string cnPath, string globalPath, bool silent) + { + var generator = new SurgeConfigGenerator(proxyFile, geoSiteData); + File.WriteAllText(cnPath, generator.GenerateChinaRuleSet()); + if (!silent) Console.WriteLine($"China rule set written to {cnPath}."); + File.WriteAllText(globalPath, generator.GenerateGlobalRuleSet()); + if (!silent) Console.WriteLine($"Global rule set written to {globalPath}."); + } + + public static void GenerateTo(string directory, string cnPath, string globalPath, bool clean, bool silent) + { + var geoSiteData = GeoDataManager.Instance.GetOrCreateGeoSiteData(clean, silent); + var proxyFile = new ProxyFile(Path.Combine(directory, ToolConfig.ProxyConfigFileName)); + var generator = new SurgeConfigGenerator(proxyFile, geoSiteData); + File.WriteAllText(cnPath, generator.GenerateChinaRuleSet()); + if (!silent) Console.WriteLine($"China rule set written to {cnPath}."); + File.WriteAllText(globalPath, generator.GenerateGlobalRuleSet()); + if (!silent) Console.WriteLine($"Global rule set written to {globalPath}."); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/Template.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/Template.cs new file mode 100644 index 0000000..1fe91b1 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/Template.cs @@ -0,0 +1,231 @@ +using System.Diagnostics.CodeAnalysis; +using System.Text; + +namespace Crupest.SecretTool; + +public class Template +{ + private enum ParseState + { + Text, + Dollar, + LeftBracket, + VariableName, + VariableNameFinish, + } + + private interface ITemplateNode + { + string Render(Dictionary<string, string> values); + } + + private class TextNode(string text) : ITemplateNode + { + + public string Text { get; } = text; + + public string Render(Dictionary<string, string> values) + { + return Text; + } + } + + private class VariableNode(string variableName) : ITemplateNode + { + public string VariableName { get; } = variableName; + + public string Render(Dictionary<string, string> values) + { + return values.GetValueOrDefault(VariableName) ?? ""; + } + } + + public Template(string templateString) + { + TemplateString = templateString; + Nodes = Parse(templateString); + VariableNames = Nodes.OfType<VariableNode>().Select(node => node.VariableName).ToList(); + } + + private static List<ITemplateNode> Parse(string templateString) + { + int lineNumber = 1; + int columnNumber = 0; + List<ITemplateNode> nodes = []; + ParseState state = ParseState.Text; + StringBuilder stringBuilder = new(); + + string GetPosition() => $"line {lineNumber} column{columnNumber}"; + + [DoesNotReturn] + void ReportInvalidState(string message) + { + throw new Exception($"Invalid state at {GetPosition()}: {message}"); + } + + [DoesNotReturn] + void ReportInvalidCharacter(char c) + { + throw new FormatException($"Unexpected '{c}' at {GetPosition()}."); + } + + void FinishText() + { + if (state != ParseState.Text) + { + ReportInvalidState($"Can't call FinishText here."); + } + + if (stringBuilder.Length > 0) + { + nodes.Add(new TextNode(stringBuilder.ToString())); + stringBuilder.Clear(); + } + } + + foreach (var c in templateString) + { + if (c == '\n') + { + lineNumber++; + columnNumber = 0; + } + + columnNumber++; + + switch (c) + { + case '$': + if (state == ParseState.Text) + { + FinishText(); + state = ParseState.Dollar; + } + else if (state == ParseState.Dollar) + { + if (stringBuilder.Length > 0) + { + throw new Exception($"Invalid state at {GetPosition()}: when we meet the second '$', text builder should be empty."); + } + stringBuilder.Append(c); + state = ParseState.Text; + } + else + { + throw new FormatException($"Unexpected '$' at {GetPosition()}."); + } + break; + case '{': + if (state == ParseState.Text) + { + stringBuilder.Append(c); + } + else if (state == ParseState.Dollar) + { + state = ParseState.LeftBracket; + } + else + { + throw new Exception($"Unexpected '{{' at {GetPosition()}."); + } + break; + case '}': + if (state == ParseState.Text) + { + stringBuilder.Append(c); + state = ParseState.Text; + } + else if (state == ParseState.VariableName || state == ParseState.VariableNameFinish) + { + nodes.Add(new VariableNode(stringBuilder.ToString())); + stringBuilder.Clear(); + state = ParseState.Text; + } + else + { + ReportInvalidCharacter(c); + } + break; + default: + if (state == ParseState.Dollar) + { + ReportInvalidCharacter(c); + } + + if (char.IsWhiteSpace(c)) + { + if (state == ParseState.LeftBracket || state == ParseState.VariableNameFinish) + { + continue; + } + else if (state == ParseState.Text) + { + stringBuilder.Append(c); + } + else if (state == ParseState.VariableName) + { + state = ParseState.VariableNameFinish; + } + else + { + ReportInvalidCharacter(c); + } + } + else + { + if (state == ParseState.Text) + { + stringBuilder.Append(c); + } + else if (state == ParseState.LeftBracket || state == ParseState.VariableName) + { + stringBuilder.Append(c); + state = ParseState.VariableName; + } + else + { + ReportInvalidCharacter(c); + } + } + break; + } + } + + if (state == ParseState.Text) + { + FinishText(); + } + else + { + throw new FormatException("Unexpected end of template string."); + } + + return nodes; + } + + public string TemplateString { get; } + private List<ITemplateNode> Nodes { get; set; } + public List<string> VariableNames { get; } + + public string Generate(Dictionary<string, string> values, bool allowMissingVariable = false) + { + StringBuilder stringBuilder = new(); + foreach (var node in Nodes) + { + if (node is TextNode textNode) + { + stringBuilder.Append(textNode.Text); + } + else if (node is VariableNode variableNode) + { + var hasValue = values.TryGetValue(variableNode.VariableName, out var value); + if (!hasValue && !allowMissingVariable) + { + throw new Exception($"Variable '{variableNode.VariableName}' is not set."); + } + stringBuilder.Append(hasValue ? value : string.Empty); + } + } + return stringBuilder.ToString(); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/ToolConfig.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/ToolConfig.cs new file mode 100644 index 0000000..809fba1 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/ToolConfig.cs @@ -0,0 +1,271 @@ +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Crupest.SecretTool; + +public interface IV4ConfigObject +{ + object ToJsonObjectV4(); +} + +public interface ISingConfigObject +{ + object ToJsonObjectSing(); +} + +public class ToolConfigBase(Template template, List<Proxy> proxies, Routing router) +{ + protected class JsonInterfaceConverter<Interface> : JsonConverter<Interface> + { + public override Interface Read( + ref Utf8JsonReader reader, + Type typeToConvert, + JsonSerializerOptions options) + { + throw new NotImplementedException(); + } + + public override void Write( + Utf8JsonWriter writer, + Interface value, + JsonSerializerOptions options) + { + JsonSerializer.Serialize(writer, value, typeof(object), options); + } + } + + public const string VmessConfigFileName = "vmess.txt"; + public const string ProxyConfigFileName = "proxy.txt"; + + public Template Template { get; set; } = template; + public List<Proxy> Proxies { get; set; } = proxies; + public Routing Routing { get; set; } = router; +} + +public class ToolConfig(Template template, List<Proxy> proxies, Routing router, StaticHosts? hosts) : ToolConfigBase(template, proxies, router) +{ + public const string ConfigTemplateFileName = "config.json.template"; + public const string HostsConfigFileName = "hosts.txt"; + + public static List<string> RequiredConfigFileNames { get; } = [ConfigTemplateFileName, VmessConfigFileName, ProxyConfigFileName]; + public static List<string> ConfigFileNames { get; } = [ConfigTemplateFileName, VmessConfigFileName, ProxyConfigFileName, HostsConfigFileName]; + + private const string ProxyAnchor = "PROXY_ANCHOR"; + private const string RoutingAnchor = "ROUTING_ANCHOR"; + private const string HostsAnchor = "HOSTS_ANCHOR"; + + public const string AddCnAttributeToGeositeEnvironmentVariable = "CRUPEST_V2RAY_GEOSITE_USE_CN"; + + private static bool UseCnGeoSite => Environment.GetEnvironmentVariable(AddCnAttributeToGeositeEnvironmentVariable) switch + { + "0" or "false" or "off" or "disable" => false, + _ => true + }; + + public StaticHosts Hosts { get; set; } = hosts is null ? new StaticHosts([]) : hosts; + + public string ToJsonStringV4(string domainStrategy = "IpOnDemand", bool directGeositeCn = true, bool pretty = true) + { + var jsonOptions = new JsonSerializerOptions(new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DictionaryKeyPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }); + // TODO: Make interface converter generic. + jsonOptions.Converters.Add(new JsonInterfaceConverter<V4ConfigJsonObjects.IOutboundSettings>()); + jsonOptions.Converters.Add(new JsonInterfaceConverter<V4ConfigJsonObjects.IOutboundStreamSettings>()); + + var templateValues = new Dictionary<string, string> + { + [ProxyAnchor] = string.Join(',', Proxies.Select(p => JsonSerializer.Serialize(p.ToJsonObjectV4(), jsonOptions))), + [RoutingAnchor] = JsonSerializer.Serialize(Routing.ToJsonObjectV4(domainStrategy, directGeositeCn), jsonOptions), + [HostsAnchor] = JsonSerializer.Serialize(Hosts.ToJsonObjectV4(), jsonOptions), + }; + + var configString = Template.Generate(templateValues); + + if (pretty) + { + var jsonOptionsPretty = new JsonSerializerOptions(jsonOptions) + { + WriteIndented = true, + }; + return JsonSerializer.Serialize(JsonSerializer.Deserialize<object>(configString, jsonOptionsPretty), jsonOptionsPretty); + } + else + { + return configString; + } + } + + public static ToolConfig FromFiles(string templatePath, string vmessPath, string proxyPath, string? hostsPath) + { + foreach (var path in new List<string>([templatePath, vmessPath, proxyPath])) + { + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Required config file not found: {path}."); + } + } + + ProxyFile proxyFile = new(proxyPath); + string templateString, vmessString; + string? hostsString; + + string file = ""; + try + { + file = templatePath; + templateString = File.ReadAllText(templatePath); + file = vmessPath; + vmessString = File.ReadAllText(vmessPath); + hostsString = hostsPath is not null ? File.ReadAllText(hostsPath) : null; + } + catch (Exception e) + { + throw new Exception($"Error reading config file {file}.", e); + } + + try + { + file = templatePath; + var template = new Template(templateString); + file = vmessPath; + var vmess = VmessProxy.CreateFromConfigString(vmessString, "proxy"); + file = proxyPath; + var routing = Routing.FromProxyFile(proxyFile, "proxy"); + file = hostsPath ?? ""; + var hosts = hostsString is not null ? StaticHosts.CreateFromHostMatchConfigString(hostsString) : null; + return new ToolConfig(template, [vmess], routing, hosts); + } + catch (Exception e) + { + throw new Exception($"Error parsing config file {file}.", e); + } + } + + public static ToolConfig FromDirectory(string directory) + { + return FromFiles( + Path.Join(directory, ConfigTemplateFileName), + Path.Join(directory, VmessConfigFileName), + Path.Join(directory, ProxyConfigFileName), + Path.Join(directory, HostsConfigFileName) + ); + } + + public static void FromDirectoryAndWriteToFile(string directory, string outputPath) + { + var config = FromDirectory(directory); + File.WriteAllText(outputPath, config.ToJsonStringV4()); + } +} + +public class SingToolConfig(Template template, List<Proxy> proxies, Routing router, string inboundsString) : ToolConfigBase(template, proxies, router) +{ + + public const string ConfigTemplateFileName = "sing-config.json.template"; + public const string ConfigInboundsPcFileName = "sing-inbounds-pc.json"; + public const string ConfigInboundsMobileFileName = "sing-inbounds-mobile.json"; + + public static List<string> RequiredConfigFileNames { get; } = [ConfigTemplateFileName, VmessConfigFileName, ProxyConfigFileName, ConfigInboundsMobileFileName, ConfigInboundsPcFileName]; + + private const string ProxyAnchor = "PROXY_ANCHOR"; + private const string RouteAnchor = "ROUTE_ANCHOR"; + private const string InboundsAnchor = "INBOUNDS_ANCHOR"; + + public string InboundsString { get; } = inboundsString; + + public string ToSingConfigString(bool pretty = true) + { + var jsonOptions = new JsonSerializerOptions(new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + DictionaryKeyPolicy = JsonNamingPolicy.SnakeCaseLower, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }); + // TODO: Make interface converter generic. + jsonOptions.Converters.Add(new JsonInterfaceConverter<SingConfigJsonObjects.OutboundBase>()); + jsonOptions.Converters.Add(new JsonInterfaceConverter<SingConfigJsonObjects.V2rayTransportBase>()); + + var templateValues = new Dictionary<string, string> + { + [ProxyAnchor] = string.Join(',', Proxies.Select(p => JsonSerializer.Serialize(p.ToJsonObjectSing(), jsonOptions))), + [RouteAnchor] = JsonSerializer.Serialize(Routing.ToJsonObjectSing(), jsonOptions), + [InboundsAnchor] = InboundsString + }; + + var configString = Template.Generate(templateValues); + + if (pretty) + { + var jsonOptionsPretty = new JsonSerializerOptions(jsonOptions) + { + WriteIndented = true, + }; + return JsonSerializer.Serialize(JsonSerializer.Deserialize<object>(configString, jsonOptionsPretty), jsonOptionsPretty); + } + else + { + return configString; + } + } + + public static SingToolConfig FromFiles(string templatePath, string vmessPath, string proxyPath, string inboundsPath, bool clean, bool silent) + { + foreach (var path in new List<string>([templatePath, vmessPath, proxyPath, inboundsPath])) + { + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Required config file not found: {path}."); + } + } + + var geoSiteData = GeoDataManager.Instance.GetOrCreateGeoSiteData(clean, silent); + + ProxyFile proxyFile = new(proxyPath); + string templateString, vmessString, inboundsString; + + string file = ""; + try + { + file = templatePath; + templateString = File.ReadAllText(templatePath); + file = vmessPath; + vmessString = File.ReadAllText(vmessPath); + file = inboundsPath; + inboundsString = File.ReadAllText(inboundsPath); + } + catch (Exception e) + { + throw new Exception($"Error reading config file {file}.", e); + } + + try + { + file = templatePath; + var template = new Template(templateString); + file = vmessPath; + var vmess = VmessProxy.CreateFromConfigString(vmessString, "proxy-out"); + file = proxyPath; + var routing = Routing.FromProxyFileForSing(proxyFile, geoSiteData, "proxy-out", "direct-out"); + return new SingToolConfig(template, [vmess], routing, inboundsString); + } + catch (Exception e) + { + throw new Exception($"Error parsing config file {file}.", e); + } + } + + public static SingToolConfig FromDirectory(string directory, bool isMobile, bool clean, bool silent) + { + return FromFiles( + Path.Join(directory, ConfigTemplateFileName), + Path.Join(directory, VmessConfigFileName), + Path.Join(directory, ProxyConfigFileName), + isMobile ? Path.Join(directory, ConfigInboundsMobileFileName) : Path.Join(directory, ConfigInboundsPcFileName), + clean, silent + ); + } +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/V4ConfigJsonObjects.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/V4ConfigJsonObjects.cs new file mode 100644 index 0000000..3e81dbb --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/V4ConfigJsonObjects.cs @@ -0,0 +1,25 @@ +namespace Crupest.SecretTool; + +public static class V4ConfigJsonObjects +{ + public interface IObject; + public interface IOutboundSettings : IObject; + public interface IOutboundStreamSettings : IObject; + + public record WsSettings(string Path, Dictionary<string, string> Headers) : IObject; + public record WsStreamSettings(string Network, string Security, WsSettings WsSettings) : IOutboundStreamSettings; + public record VnextServerUser(string Id, int AlterId, string Security, int Level) : IObject; + public record VnextServer(string Address, int Port, List<VnextServerUser> Users) : IObject; + public record VmessOutboundSettings(List<VnextServer> Vnext) : IOutboundSettings; + public record HttpOutboundUser(string User, string Pass) : IObject; + public record HttpOutboundServer(string Address, int Port, List<HttpOutboundUser> Users) : IObject; + public record HttpOutboundSettings(List<HttpOutboundServer> Servers) : IOutboundSettings; + public record Outbound(string Tag, string Protocol, IOutboundSettings Settings, + IOutboundStreamSettings? StreamSettings) : IObject; + + public record RoutingRule(string DomainMatcher = "mph", string Type = "field", List<string>? Domains = null, List<string>? Ip = null, + string? Port = null, string? SourcePort = null, string? Network = null, List<string>? Source = null, + List<string>? User = null, List<string>? InboundTag = null, List<string>? Protocol = null, string? Attrs = null, + string? OutboundTag = null, string? BalancerTag = null) : IObject; + public record Routing(List<RoutingRule> Rules, string DomainStrategy = "IpOnDemand", string DomainMatcher = "mph") : IObject; +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/V5ConfigJsonObjects.cs b/tools/Crupest.SecretTool/Crupest.SecretTool/V5ConfigJsonObjects.cs new file mode 100644 index 0000000..a50e9be --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/V5ConfigJsonObjects.cs @@ -0,0 +1,31 @@ +namespace Crupest.SecretTool; + +public static class V5ConfigJsonObjects +{ + public record OutboundObject(string Protocol, object Settings, string Tag, object? StreamSettings) + { + public static OutboundObject VmessViaWs(string tag, string address, int port, string uuid, string path) + { + return new OutboundObject("vmess", new VmessSettings(address, port, uuid), tag, StreamSettingsObject.Ws(path)); + } + + public static OutboundObject Http(string tag, string address, int port) + { + return new OutboundObject("http", new HttpSettingsObject(address, port), tag, null); + } + } + + public record WsSettingsObject(string Path, Dictionary<string, string> Headers); + + public record StreamSettingsObject(string Transport, object TransportSettings, string Security, object SecuritySettings) + { + public static StreamSettingsObject Ws(string path) + { + return new StreamSettingsObject("ws", new WsSettingsObject(path, new()), "tls", new()); + } + } + + public record VmessSettings(string Address, int Port, string Uuid); + + public record HttpSettingsObject(string Address, int Port); +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/config.json.template b/tools/Crupest.SecretTool/Crupest.SecretTool/config.json.template new file mode 100644 index 0000000..424e996 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/config.json.template @@ -0,0 +1,63 @@ +{ + "log": { + "loglevel": "warning" + }, + "inbounds": [ + { + "port": 2081, + "listen": "127.0.0.1", + "tag": "socks-inbound", + "protocol": "socks", + "settings": { + "auth": "noauth" + } + }, + { + "port": 2080, + "listen": "127.0.0.1", + "tag": "http-inbound", + "protocol": "http", + "settings": { + "auth": "noauth" + } + } + ], + "outbounds": [ + { + "protocol": "freedom", + "settings": {}, + "tag": "direct" + }, + { + "protocol": "blackhole", + "settings": {}, + "tag": "blocked" + }, + ${PROXY_ANCHOR} + ], + "routing": ${ROUTING_ANCHOR}, + "dns": { + "hosts": ${HOSTS_ANCHOR}, + "servers": [ + "https://doh.pub/dns-query", + "1.1.1.1", + "8.8.8.8", + "localhost" + ] + }, + "policy": { + "levels": { + "0": { + "uplinkOnly": 0, + "downlinkOnly": 0 + } + }, + "system": { + "statsInboundUplink": false, + "statsInboundDownlink": false, + "statsOutboundUplink": false, + "statsOutboundDownlink": false + } + }, + "other": {} +} diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/config.v5.json.template b/tools/Crupest.SecretTool/Crupest.SecretTool/config.v5.json.template new file mode 100644 index 0000000..01ccf7a --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/config.v5.json.template @@ -0,0 +1,55 @@ +{ + "log": { + "access": { + "type": "Console", + "level": "Info" + } + }, + "dns": { + "nameServer": [{ + "address": "https://doh.pub/dns-query" + }, { + "address": "1.1.1.1" + }, { + "address": "8.8.8.8" + }, { + "address": "localhost" + }], + "staticHosts": ${HOSTS_ANCHOR} + }, + "inbounds": [{ + { + "protocol": "socks", + "port": 2081, + "listen": "127.0.0.1", + "tag": "socks-inbound", + "settings": { + "auth": "noauth" + } + }, + { + "protocol": "http", + "port": 2080, + "listen": "127.0.0.1", + "tag": "http-inbound", + "settings": { + "auth": "noauth" + } + } + }], + "outbounds": [ + { + "protocol": "freedom", + "settings": {}, + "tag": "direct" + }, + { + "protocol": "blackhole", + "settings": {}, + "tag": "blocked" + }, + ${PROXY_ANCHOR} + ], + "router": ${ROUTER_ANCHOR} +} + diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/hosts.txt b/tools/Crupest.SecretTool/Crupest.SecretTool/hosts.txt new file mode 100644 index 0000000..88d5015 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/hosts.txt @@ -0,0 +1,2 @@ +cdn.jsdelivr.net cdn.jsdelivr.net.cdn.cloudflare.net + diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/proxy.txt b/tools/Crupest.SecretTool/Crupest.SecretTool/proxy.txt new file mode 100644 index 0000000..39800f9 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/proxy.txt @@ -0,0 +1,50 @@ +GeoSite microsoft +GeoSite google +GeoSite youtube +GeoSite x +GeoSite facebook +GeoSite discord +GeoSite reddit +GeoSite twitch +GeoSite quora +GeoSite telegram +GeoSite imgur +GeoSite stackexchange +GeoSite medium + +GeoSite duckduckgo +GeoSite wikimedia +GeoSite gitbook +GeoSite github +GeoSite gitlab +GeoSite sourceforge +GeoSite creativecommons +GeoSite archive +GeoSite matrix +GeoSite tor + +GeoSite python +GeoSite ruby +GeoSite rust +GeoSite nodejs +GeoSite npmjs +GeoSite qt +GeoSite docker +GeoSite v2ray +GeoSite homebrew + +GeoSite azure +GeoSite akamai +GeoSite aws +GeoSite jsdelivr +GeoSite fastly +GeoSite heroku +GeoSite bootstrap +GeoSite vercel + +GeoSite ieee +GeoSite sci-hub +GeoSite libgen +GeoSite z-library + +sagernet.org diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/sing-config.json.template b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-config.json.template new file mode 100644 index 0000000..d7e55a0 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-config.json.template @@ -0,0 +1,45 @@ +{
+ "log": {
+ "disabled": false,
+ "level": "info",
+ "timestamp": true
+ },
+ "dns": {
+ "servers": [
+ {
+ "tag": "ali-doh",
+ "address": "https://dns.alidns.com/dns-query",
+ "address_resolver": "ali"
+ },
+ {
+ "tag": "ali",
+ "address": "223.5.5.5"
+ },
+ {
+ "tag": "cloudflare",
+ "address": "1.1.1.1"
+ },
+ {
+ "tag": "google",
+ "address": "8.8.8.8"
+ }
+ ]
+ },
+ "inbounds": ${INBOUNDS_ANCHOR},
+ "outbounds": [
+ {
+ "type": "direct",
+ "tag": "direct-out"
+ },
+ {
+ "type": "block",
+ "tag": "block-out"
+ },
+ {
+ "tag": "dns-out",
+ "type": "dns"
+ },
+ ${PROXY_ANCHOR}
+ ],
+ "route": ${ROUTE_ANCHOR}
+}
diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-mobile.json b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-mobile.json new file mode 100644 index 0000000..5038c40 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-mobile.json @@ -0,0 +1,11 @@ +[
+ {
+ "tag": "tun-in",
+ "type": "tun",
+ "auto_route": true,
+ "strict_route": true,
+ "address": [ "172.23.0.1/30", "fdfe:acbd:9876::1/126"],
+ "sniff": true,
+ "sniff_override_destination": true
+ }
+]
diff --git a/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-pc.json b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-pc.json new file mode 100644 index 0000000..956d751 --- /dev/null +++ b/tools/Crupest.SecretTool/Crupest.SecretTool/sing-inbounds-pc.json @@ -0,0 +1,14 @@ +[
+ {
+ "tag": "http-in",
+ "type": "http",
+ "listen": "127.0.0.1",
+ "listen_port": 3080
+ },
+ {
+ "tag": "socks-in",
+ "type": "socks",
+ "listen": "127.0.0.1",
+ "listen_port": 3081
+ }
+]
\ No newline at end of file diff --git a/tools/Crupest.SecretTool/build-secret.bash b/tools/Crupest.SecretTool/build-secret.bash new file mode 100755 index 0000000..8878049 --- /dev/null +++ b/tools/Crupest.SecretTool/build-secret.bash @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +set -e + +function print_argument_error_message_and_exit() { + argument_error_message="You must specify exactly one argument, the build target (win-x64 | linux-x64 | osx-x64)." + echo "$argument_error_message" + exit 1 +} + + + +if [[ $# != 1 ]]; then + print_argument_error_message_and_exit +fi + +case "$1" in + win-x64 | linux-x64 | osx-x64) + echo "Build target: $1" + ;; + *) + print_argument_error_message_and_exit + ;; +esac + +secret_dir=$(realpath "$(dirname "$0")") + +echo "Secret dir: ${secret_dir}" + +echo "Check dotnet..." +dotnet --version + +echo "Enter \"secret\" dir..." +pushd "$secret_dir" + +echo "Begin to build..." +dotnet publish Crupest.SecretTool -c Release -o "$secret_dir/publish" --sc -r "$1" + +popd + +echo "Finish!" diff --git a/tools/Crupest.SecretTool/build-secret.ps1 b/tools/Crupest.SecretTool/build-secret.ps1 new file mode 100644 index 0000000..8aa7987 --- /dev/null +++ b/tools/Crupest.SecretTool/build-secret.ps1 @@ -0,0 +1,25 @@ +if ($args.Count -ne 1 || $args[0] -notmatch "^win-x64|linux-x64|osx-x64$") +{ + Write-Error "You must specify exactly one argument, the build target (win-x64 | linux-x64 | osx-x64)." + exit 1 +} + +Write-Output "Secret dir: $PSScriptRoot" + +Write-Output "Check dotnet..." +dotnet --version +if ($LASTEXITCODE -ne 0) +{ + Write-Error "dotnet not found." + exit 2 +} + +Write-Output "Enter `"secret`" dir..." +Push-Location $PSScriptRoot + +Write-Output "Begin to build..." +dotnet publish Crupest.SecretTool -c Release -o "$secret_dir/publish" --sc -r $args[0] + +Pop-Location + +Write-Host "Finish!" -ForegroundColor Green diff --git a/tools/Crupest.SecretTool/tools/cru-proxy-edit b/tools/Crupest.SecretTool/tools/cru-proxy-edit new file mode 100755 index 0000000..51a33e1 --- /dev/null +++ b/tools/Crupest.SecretTool/tools/cru-proxy-edit @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +set -e + +p="$HOME/codes/crupest/tools/Crupest.SecretTool/publish/proxy.txt" + +if [[ ! -f "$p" ]]; then + echo "File $p does not exist!" >&2 + exit 1 +fi + +exec vim "$p" diff --git a/tools/Crupest.SecretTool/tools/cru-proxy-log b/tools/Crupest.SecretTool/tools/cru-proxy-log new file mode 100755 index 0000000..6ec6ee1 --- /dev/null +++ b/tools/Crupest.SecretTool/tools/cru-proxy-log @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +set -e + +if [[ -e /proc ]]; then + # I don't believe your system is Linux but there is no /proc. + exec journalctl --user -u crupest-secret-tool "$@" +elif [[ "$(uname)" == "Darwin" ]]; then + exec less "$HOME/.local/state/Crupest.SecretTool/log" +else + echo "Not supported on systems other than macOS and Linux now." >&2 + exit 1 +fi diff --git a/tools/Crupest.SecretTool/tools/crupest-secret-tool.service b/tools/Crupest.SecretTool/tools/crupest-secret-tool.service new file mode 100644 index 0000000..df6d172 --- /dev/null +++ b/tools/Crupest.SecretTool/tools/crupest-secret-tool.service @@ -0,0 +1,8 @@ +[Unit] +Description=crupest v2ray service + +[Service] +ExecStart=%h/.local/bin/Crupest.SecretTool + +[Install] +WantedBy=default.target diff --git a/tools/Crupest.SecretTool/tools/crupest-secret-tool.xml b/tools/Crupest.SecretTool/tools/crupest-secret-tool.xml new file mode 100644 index 0000000..9b85f13 --- /dev/null +++ b/tools/Crupest.SecretTool/tools/crupest-secret-tool.xml @@ -0,0 +1,49 @@ +<!-- + MIT License + + Copyright (c) 2008-2020 Kohsuke Kawaguchi, Sun Microsystems, Inc., CloudBees, + Inc., Oleg Nenashev and other contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +--> + +<!-- + This is a sample configuration of the Windows Service Wrapper. + This configuration file should be placed near the WinSW executable, the name should be the same. + E.g. for myapp.exe the configuration file name should be myapp.xml + + You can find more information about configuration options here: +https://github.com/kohsuke/winsw/blob/master/doc/xmlConfigFile.md +--> +<service> + <id>crupest-secret-tool</id> + <name>Crupest Secret Tool</name> + <description>Crupest Secret Tool (powered by WinSW)</description> + + <!-- Path to the executable, which should be started --> + <executable>%BASE%\Crupest.SecretTool.exe</executable> + + <onfailure action="restart" delay="10 sec" /> + <onfailure action="restart" delay="30 sec" /> + <onfailure action="restart" delay="50 sec" /> + + <workingdirectory>%BASE%</workingdirectory> + + <startmode>Automatic</startmode> +</service>
\ No newline at end of file diff --git a/tools/Crupest.SecretTool/tools/life.crupest.secret-tool.plist b/tools/Crupest.SecretTool/tools/life.crupest.secret-tool.plist new file mode 100644 index 0000000..bdfe490 --- /dev/null +++ b/tools/Crupest.SecretTool/tools/life.crupest.secret-tool.plist @@ -0,0 +1,18 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> +<plist version="1.0"> +<dict> + <key>Label</key> + <string>life.crupest.secret-tool</string> + <key>ProgramArguments</key> + <array> + <string>/Users/crupest/.local/bin/Crupest.SecretTool</string> + </array> + <key>KeepAlive</key> + <true/> + <key>StandardOutPath</key> + <string>/Users/crupest/.local/state/Crupest.SecretTool/log</string> + <key>StandardErrorPath</key> + <string>/Users/crupest/.local/state/Crupest.SecretTool/error</string> +</dict> +</plist> diff --git a/tools/cru-py/.gitignore b/tools/cru-py/.gitignore new file mode 100644 index 0000000..9f7550b --- /dev/null +++ b/tools/cru-py/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +.venv diff --git a/tools/cru-py/.python-version b/tools/cru-py/.python-version new file mode 100644 index 0000000..37504c5 --- /dev/null +++ b/tools/cru-py/.python-version @@ -0,0 +1 @@ +3.11
diff --git a/tools/cru-py/cru/__init__.py b/tools/cru-py/cru/__init__.py new file mode 100644 index 0000000..17799a9 --- /dev/null +++ b/tools/cru-py/cru/__init__.py @@ -0,0 +1,60 @@ +import sys + +from ._base import CRU, CruNamespaceError, CRU_NAME_PREFIXES +from ._error import ( + CruException, + CruLogicError, + CruInternalError, + CruUnreachableError, + cru_unreachable, +) +from ._const import ( + CruConstantBase, + CruDontChange, + CruNotFound, + CruNoValue, + CruPlaceholder, + CruUseDefault, +) +from ._func import CruFunction +from ._iter import CruIterable, CruIterator +from ._event import CruEvent, CruEventHandlerToken +from ._type import CruTypeSet, CruTypeCheckError + + +class CruInitError(CruException): + pass + + +def check_python_version(required_version=(3, 11)): + if sys.version_info < required_version: + raise CruInitError(f"Python version must be >= {required_version}!") + + +check_python_version() + +__all__ = [ + "CRU", + "CruNamespaceError", + "CRU_NAME_PREFIXES", + "check_python_version", + "CruException", + "CruInternalError", + "CruLogicError", + "CruUnreachableError", + "cru_unreachable", + "CruInitError", + "CruConstantBase", + "CruDontChange", + "CruNotFound", + "CruNoValue", + "CruPlaceholder", + "CruUseDefault", + "CruFunction", + "CruIterable", + "CruIterator", + "CruEvent", + "CruEventHandlerToken", + "CruTypeSet", + "CruTypeCheckError", +] diff --git a/tools/cru-py/cru/_base.py b/tools/cru-py/cru/_base.py new file mode 100644 index 0000000..2599d8f --- /dev/null +++ b/tools/cru-py/cru/_base.py @@ -0,0 +1,101 @@ +from typing import Any + +from ._helper import remove_none +from ._error import CruException + + +class CruNamespaceError(CruException): + """Raised when a namespace is not found.""" + + +class _Cru: + NAME_PREFIXES = ("CRU_", "Cru", "cru_") + + def __init__(self) -> None: + self._d: dict[str, Any] = {} + + def all_names(self) -> list[str]: + return list(self._d.keys()) + + def get(self, name: str) -> Any: + return self._d[name] + + def has_name(self, name: str) -> bool: + return name in self._d + + @staticmethod + def _maybe_remove_prefix(name: str) -> str | None: + for prefix in _Cru.NAME_PREFIXES: + if name.startswith(prefix): + return name[len(prefix) :] + return None + + def _check_name_exist(self, *names: str | None) -> None: + for name in names: + if name is None: + continue + if self.has_name(name): + raise CruNamespaceError(f"Name {name} exists in CRU.") + + @staticmethod + def check_name_format(name: str) -> tuple[str, str]: + no_prefix_name = _Cru._maybe_remove_prefix(name) + if no_prefix_name is None: + raise CruNamespaceError( + f"Name {name} is not prefixed with any of {_Cru.NAME_PREFIXES}." + ) + return name, no_prefix_name + + @staticmethod + def _check_object_name(o) -> tuple[str, str]: + return _Cru.check_name_format(o.__name__) + + def _do_add(self, o, *names: str | None) -> list[str]: + name_list: list[str] = remove_none(names) + for name in name_list: + self._d[name] = o + return name_list + + def add(self, o, name: str | None) -> tuple[str, str | None]: + no_prefix_name: str | None + if name is None: + name, no_prefix_name = self._check_object_name(o) + else: + no_prefix_name = self._maybe_remove_prefix(name) + + self._check_name_exist(name, no_prefix_name) + self._do_add(o, name, no_prefix_name) + return name, no_prefix_name + + def add_with_alias(self, o, name: str | None = None, *aliases: str) -> list[str]: + final_names: list[str | None] = [] + no_prefix_name: str | None + if name is None: + name, no_prefix_name = self._check_object_name(o) + self._check_name_exist(name, no_prefix_name) + final_names.extend([name, no_prefix_name]) + for alias in aliases: + no_prefix_name = self._maybe_remove_prefix(alias) + self._check_name_exist(alias, no_prefix_name) + final_names.extend([alias, no_prefix_name]) + + return self._do_add(o, *final_names) + + def add_objects(self, *objects): + final_list = [] + for o in objects: + name, no_prefix_name = self._check_object_name(o) + self._check_name_exist(name, no_prefix_name) + final_list.append((o, name, no_prefix_name)) + for o, name, no_prefix_name in final_list: + self._do_add(o, name, no_prefix_name) + + def __getitem__(self, item): + return self.get(item) + + def __getattr__(self, item): + return self.get(item) + + +CRU_NAME_PREFIXES = _Cru.NAME_PREFIXES +CRU = _Cru() diff --git a/tools/cru-py/cru/_const.py b/tools/cru-py/cru/_const.py new file mode 100644 index 0000000..8246b35 --- /dev/null +++ b/tools/cru-py/cru/_const.py @@ -0,0 +1,49 @@ +from enum import Enum, auto +from typing import Self, TypeGuard, TypeVar + +from ._base import CRU + +_T = TypeVar("_T") + + +class CruConstantBase(Enum): + @classmethod + def check(cls, v: _T | Self) -> TypeGuard[Self]: + return isinstance(v, cls) + + @classmethod + def check_not(cls, v: _T | Self) -> TypeGuard[_T]: + return not cls.check(v) + + @classmethod + def value(cls) -> Self: + return cls.VALUE # type: ignore + + +class CruNotFound(CruConstantBase): + VALUE = auto() + + +class CruUseDefault(CruConstantBase): + VALUE = auto() + + +class CruDontChange(CruConstantBase): + VALUE = auto() + + +class CruNoValue(CruConstantBase): + VALUE = auto() + + +class CruPlaceholder(CruConstantBase): + VALUE = auto() + + +CRU.add_objects( + CruNotFound, + CruUseDefault, + CruDontChange, + CruNoValue, + CruPlaceholder, +) diff --git a/tools/cru-py/cru/_decorator.py b/tools/cru-py/cru/_decorator.py new file mode 100644 index 0000000..137fc05 --- /dev/null +++ b/tools/cru-py/cru/_decorator.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import ( + Concatenate, + Generic, + ParamSpec, + TypeVar, + cast, +) + +from ._base import CRU + +_P = ParamSpec("_P") +_T = TypeVar("_T") +_O = TypeVar("_O") +_R = TypeVar("_R") + + +class CruDecorator: + + class ConvertResult(Generic[_T, _O]): + def __init__( + self, + converter: Callable[[_T], _O], + ) -> None: + self.converter = converter + + def __call__(self, origin: Callable[_P, _T]) -> Callable[_P, _O]: + converter = self.converter + + def real_impl(*args: _P.args, **kwargs: _P.kwargs) -> _O: + return converter(origin(*args, **kwargs)) + + return real_impl + + class ImplementedBy(Generic[_T, _O, _P, _R]): + def __init__( + self, + impl: Callable[Concatenate[_O, _P], _R], + converter: Callable[[_T], _O], + ) -> None: + self.impl = impl + self.converter = converter + + def __call__( + self, _origin: Callable[[_T], None] + ) -> Callable[Concatenate[_T, _P], _R]: + converter = self.converter + impl = self.impl + + def real_impl(_self: _T, *args: _P.args, **kwargs: _P.kwargs) -> _R: + return cast(Callable[Concatenate[_O, _P], _R], impl)( + converter(_self), *args, **kwargs + ) + + return real_impl + + @staticmethod + def create_factory(converter: Callable[[_T], _O]) -> Callable[ + [Callable[Concatenate[_O, _P], _R]], + CruDecorator.ImplementedBy[_T, _O, _P, _R], + ]: + def create( + m: Callable[Concatenate[_O, _P], _R], + ) -> CruDecorator.ImplementedBy[_T, _O, _P, _R]: + return CruDecorator.ImplementedBy(m, converter) + + return create + + class ImplementedByNoSelf(Generic[_P, _R]): + def __init__(self, impl: Callable[_P, _R]) -> None: + self.impl = impl + + def __call__( + self, _origin: Callable[[_T], None] + ) -> Callable[Concatenate[_T, _P], _R]: + impl = self.impl + + def real_impl(_self: _T, *args: _P.args, **kwargs: _P.kwargs) -> _R: + return cast(Callable[_P, _R], impl)(*args, **kwargs) + + return real_impl + + @staticmethod + def create_factory() -> ( + Callable[[Callable[_P, _R]], CruDecorator.ImplementedByNoSelf[_P, _R]] + ): + def create( + m: Callable[_P, _R], + ) -> CruDecorator.ImplementedByNoSelf[_P, _R]: + return CruDecorator.ImplementedByNoSelf(m) + + return create + + +CRU.add_objects(CruDecorator) diff --git a/tools/cru-py/cru/_error.py b/tools/cru-py/cru/_error.py new file mode 100644 index 0000000..e53c787 --- /dev/null +++ b/tools/cru-py/cru/_error.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +from typing import NoReturn, cast, overload + + +class CruException(Exception): + """Base exception class of all exceptions in cru.""" + + @overload + def __init__( + self, + message: None = None, + *args, + user_message: str, + **kwargs, + ): ... + + @overload + def __init__( + self, + message: str, + *args, + user_message: str | None = None, + **kwargs, + ): ... + + def __init__( + self, + message: str | None = None, + *args, + user_message: str | None = None, + **kwargs, + ): + if message is None: + message = user_message + + super().__init__( + message, + *args, + **kwargs, + ) + self._message: str + self._message = cast(str, message) + self._user_message = user_message + + @property + def message(self) -> str: + return self._message + + def get_user_message(self) -> str | None: + return self._user_message + + def get_message(self, use_user: bool = True) -> str: + if use_user and self._user_message is not None: + return self._user_message + else: + return self._message + + @property + def is_internal(self) -> bool: + return False + + @property + def is_logic_error(self) -> bool: + return False + + +class CruLogicError(CruException): + """Raised when a logic error occurs.""" + + @property + def is_logic_error(self) -> bool: + return True + + +class CruInternalError(CruException): + """Raised when an internal error occurs.""" + + @property + def is_internal(self) -> bool: + return True + + +class CruUnreachableError(CruInternalError): + """Raised when a code path is unreachable.""" + + +def cru_unreachable() -> NoReturn: + raise CruUnreachableError("Code should not reach here!") diff --git a/tools/cru-py/cru/_event.py b/tools/cru-py/cru/_event.py new file mode 100644 index 0000000..51a794c --- /dev/null +++ b/tools/cru-py/cru/_event.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import Generic, ParamSpec, TypeVar + +from .list import CruList + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class CruEventHandlerToken(Generic[_P, _R]): + def __init__( + self, event: CruEvent, handler: Callable[_P, _R], once: bool = False + ) -> None: + self._event = event + self._handler = handler + self._once = once + + @property + def event(self) -> CruEvent: + return self._event + + @property + def handler(self) -> Callable[_P, _R]: + return self._handler + + @property + def once(self) -> bool: + return self._once + + +class CruEvent(Generic[_P, _R]): + def __init__(self, name: str) -> None: + self._name = name + self._tokens: CruList[CruEventHandlerToken] = CruList() + + def register( + self, handler: Callable[_P, _R], once: bool = False + ) -> CruEventHandlerToken: + token = CruEventHandlerToken(self, handler, once) + self._tokens.append(token) + return token + + def unregister(self, *handlers: CruEventHandlerToken | Callable[_P, _R]) -> int: + old_length = len(self._tokens) + self._tokens.reset( + self._tokens.as_cru_iterator().filter( + (lambda t: t in handlers or t.handler in handlers) + ) + ) + return old_length - len(self._tokens) + + def trigger(self, *args: _P.args, **kwargs: _P.kwargs) -> CruList[_R]: + results = CruList( + self._tokens.as_cru_iterator() + .transform(lambda t: t.handler(*args, **kwargs)) + .to_list() + ) + self._tokens.reset(self._tokens.as_cru_iterator().filter(lambda t: not t.once)) + return results diff --git a/tools/cru-py/cru/_func.py b/tools/cru-py/cru/_func.py new file mode 100644 index 0000000..fc57802 --- /dev/null +++ b/tools/cru-py/cru/_func.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +from collections.abc import Callable, Iterable +from enum import Flag, auto +from typing import ( + Any, + Generic, + Literal, + ParamSpec, + TypeAlias, + TypeVar, +) + + +from ._base import CRU +from ._const import CruPlaceholder + +_P = ParamSpec("_P") +_P1 = ParamSpec("_P1") +_T = TypeVar("_T") + + +class _Dec: + @staticmethod + def wrap( + origin: Callable[_P, Callable[_P1, _T]] + ) -> Callable[_P, _Wrapper[_P1, _T]]: + def _wrapped(*args: _P.args, **kwargs: _P.kwargs) -> _Wrapper[_P1, _T]: + return _Wrapper(origin(*args, **kwargs)) + + return _wrapped + + +class _RawBase: + @staticmethod + def none(*_v, **_kwargs) -> None: + return None + + @staticmethod + def true(*_v, **_kwargs) -> Literal[True]: + return True + + @staticmethod + def false(*_v, **_kwargs) -> Literal[False]: + return False + + @staticmethod + def identity(v: _T) -> _T: + return v + + @staticmethod + def only_you(v: _T, *_v, **_kwargs) -> _T: + return v + + @staticmethod + def equal(a: Any, b: Any) -> bool: + return a == b + + @staticmethod + def not_equal(a: Any, b: Any) -> bool: + return a != b + + @staticmethod + def not_(v: Any) -> Any: + return not v + + +class _Wrapper(Generic[_P, _T]): + def __init__(self, f: Callable[_P, _T]): + self._f = f + + @property + def me(self) -> Callable[_P, _T]: + return self._f + + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _T: + return self._f(*args, **kwargs) + + @_Dec.wrap + def bind(self, *bind_args, **bind_kwargs) -> Callable[..., _T]: + func = self.me + + def bound_func(*args, **kwargs): + popped = 0 + real_args = [] + for arg in bind_args: + if CruPlaceholder.check(arg): + real_args.append(args[popped]) + popped += 1 + else: + real_args.append(arg) + real_args.extend(args[popped:]) + return func(*real_args, **(bind_kwargs | kwargs)) + + return bound_func + + class ChainMode(Flag): + ARGS = auto() + KWARGS = auto() + BOTH = ARGS | KWARGS + + ArgsChainableCallable: TypeAlias = Callable[..., Iterable[Any]] + KwargsChainableCallable: TypeAlias = Callable[..., Iterable[tuple[str, Any]]] + ChainableCallable: TypeAlias = Callable[ + ..., tuple[Iterable[Any], Iterable[tuple[str, Any]]] + ] + + @_Dec.wrap + def chain_with_args( + self, funcs: Iterable[ArgsChainableCallable], *bind_args, **bind_kwargs + ) -> ArgsChainableCallable: + def chained_func(*args): + args = self.bind(*bind_args, **bind_kwargs)(*args) + + for func in funcs: + args = _Wrapper(func).bind(*bind_args, **bind_kwargs)(*args) + return args + + return chained_func + + @_Dec.wrap + def chain_with_kwargs( + self, funcs: Iterable[KwargsChainableCallable], *bind_args, **bind_kwargs + ) -> KwargsChainableCallable: + def chained_func(**kwargs): + kwargs = self.bind(*bind_args, **bind_kwargs)(**kwargs) + for func in funcs: + kwargs = _Wrapper(func).bind(func, *bind_args, **bind_kwargs)(**kwargs) + return kwargs + + return chained_func + + @_Dec.wrap + def chain_with_both( + self, funcs: Iterable[ChainableCallable], *bind_args, **bind_kwargs + ) -> ChainableCallable: + def chained_func(*args, **kwargs): + for func in funcs: + args, kwargs = _Wrapper(func).bind(func, *bind_args, **bind_kwargs)( + *args, **kwargs + ) + return args, kwargs + + return chained_func + + +class _Base: + none = _Wrapper(_RawBase.none) + true = _Wrapper(_RawBase.true) + false = _Wrapper(_RawBase.false) + identity = _Wrapper(_RawBase.identity) + only_you = _Wrapper(_RawBase.only_you) + equal = _Wrapper(_RawBase.equal) + not_equal = _Wrapper(_RawBase.not_equal) + not_ = _Wrapper(_RawBase.not_) + + +class _Creators: + @staticmethod + def make_isinstance_of_types(*types: type) -> Callable: + return _Wrapper(lambda v: type(v) in types) + + +class CruFunction: + RawBase: TypeAlias = _RawBase + Base: TypeAlias = _Base + Creators: TypeAlias = _Creators + Wrapper: TypeAlias = _Wrapper + Decorators: TypeAlias = _Dec + + +CRU.add_objects(CruFunction) diff --git a/tools/cru-py/cru/_helper.py b/tools/cru-py/cru/_helper.py new file mode 100644 index 0000000..43baf46 --- /dev/null +++ b/tools/cru-py/cru/_helper.py @@ -0,0 +1,16 @@ +from collections.abc import Callable +from typing import Any, Iterable, TypeVar, cast + +_T = TypeVar("_T") +_D = TypeVar("_D") + + +def remove_element( + iterable: Iterable[_T | None], to_rm: Iterable[Any], des: type[_D] | None = None +) -> _D: + to_rm = set(to_rm) + return cast(Callable[..., _D], des or list)(v for v in iterable if v not in to_rm) + + +def remove_none(iterable: Iterable[_T | None], des: type[_D] | None = None) -> _D: + return cast(Callable[..., _D], des or list)(v for v in iterable if v is not None) diff --git a/tools/cru-py/cru/_iter.py b/tools/cru-py/cru/_iter.py new file mode 100644 index 0000000..8f58561 --- /dev/null +++ b/tools/cru-py/cru/_iter.py @@ -0,0 +1,466 @@ +from __future__ import annotations + +from collections.abc import Iterable, Callable, Generator, Iterator +from dataclasses import dataclass +from enum import Enum +from typing import ( + Concatenate, + Literal, + Never, + Self, + TypeAlias, + TypeVar, + ParamSpec, + Any, + Generic, + cast, +) + +from ._base import CRU +from ._const import CruNotFound +from ._error import cru_unreachable + +_P = ParamSpec("_P") +_T = TypeVar("_T") +_O = TypeVar("_O") +_V = TypeVar("_V") +_R = TypeVar("_R") + + +class _Generic: + class StepActionKind(Enum): + SKIP = 0 + PUSH = 1 + STOP = 2 + AGGREGATE = 3 + + @dataclass + class StepAction(Generic[_V, _R]): + value: Iterable[Self] | _V | _R | None + kind: _Generic.StepActionKind + + @property + def push_value(self) -> _V: + assert self.kind == _Generic.StepActionKind.PUSH + return cast(_V, self.value) + + @property + def stop_value(self) -> _R: + assert self.kind == _Generic.StepActionKind.STOP + return cast(_R, self.value) + + @staticmethod + def skip() -> _Generic.StepAction[_V, _R]: + return _Generic.StepAction(None, _Generic.StepActionKind.SKIP) + + @staticmethod + def push(value: _V | None) -> _Generic.StepAction[_V, _R]: + return _Generic.StepAction(value, _Generic.StepActionKind.PUSH) + + @staticmethod + def stop(value: _R | None = None) -> _Generic.StepAction[_V, _R]: + return _Generic.StepAction(value, _Generic.StepActionKind.STOP) + + @staticmethod + def aggregate( + *results: _Generic.StepAction[_V, _R], + ) -> _Generic.StepAction[_V, _R]: + return _Generic.StepAction(results, _Generic.StepActionKind.AGGREGATE) + + @staticmethod + def push_last(value: _V | None) -> _Generic.StepAction[_V, _R]: + return _Generic.StepAction.aggregate( + _Generic.StepAction.push(value), _Generic.StepAction.stop() + ) + + def flatten(self) -> Iterable[Self]: + return _Generic.flatten( + self, + is_leave=lambda r: r.kind != _Generic.StepActionKind.AGGREGATE, + get_children=lambda r: cast(Iterable[Self], r.value), + ) + + GeneralStepAction: TypeAlias = StepAction[_V, _R] | _V | _R | None + IterateOperation: TypeAlias = Callable[[_T, int], GeneralStepAction[_V, _R]] + IteratePreHook: TypeAlias = Callable[[Iterable[_T]], GeneralStepAction[_V, _R]] + IteratePostHook: TypeAlias = Callable[[int], GeneralStepAction[_V, _R]] + + @staticmethod + def _is_not_iterable(o: Any) -> bool: + return not isinstance(o, Iterable) + + @staticmethod + def _return_self(o): + return o + + @staticmethod + def iterable_flatten( + maybe_iterable: Iterable[_T] | _T, max_depth: int = -1, *, _depth: int = 0 + ) -> Iterable[Iterable[_T] | _T]: + if _depth == max_depth or not isinstance(maybe_iterable, Iterable): + yield maybe_iterable + return + + for child in maybe_iterable: + yield from _Generic.iterable_flatten( + child, + max_depth, + _depth=_depth + 1, + ) + + @staticmethod + def flatten( + o: _O, + max_depth: int = -1, + /, + is_leave: CruIterator.ElementPredicate[_O] = _is_not_iterable, + get_children: CruIterator.ElementTransformer[_O, Iterable[_O]] = _return_self, + *, + _depth: int = 0, + ) -> Iterable[_O]: + if _depth == max_depth or is_leave(o): + yield o + return + for child in get_children(o): + yield from _Generic.flatten( + child, + max_depth, + is_leave, + get_children, + _depth=_depth + 1, + ) + + class Results: + @staticmethod + def true(_) -> Literal[True]: + return True + + @staticmethod + def false(_) -> Literal[False]: + return False + + @staticmethod + def not_found(_) -> Literal[CruNotFound.VALUE]: + return CruNotFound.VALUE + + @staticmethod + def _non_result_to_push(value: Any) -> StepAction[_V, _R]: + return _Generic.StepAction.push(value) + + @staticmethod + def _non_result_to_stop(value: Any) -> StepAction[_V, _R]: + return _Generic.StepAction.stop(value) + + @staticmethod + def _none_hook(_: Any) -> StepAction[_V, _R]: + return _Generic.StepAction.skip() + + def iterate( + iterable: Iterable[_T], + operation: IterateOperation[_T, _V, _R], + fallback_return: _R, + pre_iterate: IteratePreHook[_T, _V, _R], + post_iterate: IteratePostHook[_V, _R], + convert_value_result: Callable[[_V | _R | None], StepAction[_V, _R]], + ) -> Generator[_V, None, _R]: + pre_result = pre_iterate(iterable) + if not isinstance(pre_result, _Generic.StepAction): + real_pre_result = convert_value_result(pre_result) + for r in real_pre_result.flatten(): + if r.kind == _Generic.StepActionKind.STOP: + return r.stop_value + elif r.kind == _Generic.StepActionKind.PUSH: + yield r.push_value + else: + assert r.kind == _Generic.StepActionKind.SKIP + + for index, element in enumerate(iterable): + result = operation(element, index) + if not isinstance(result, _Generic.StepAction): + real_result = convert_value_result(result) + for r in real_result.flatten(): + if r.kind == _Generic.StepActionKind.STOP: + return r.stop_value + elif r.kind == _Generic.StepActionKind.PUSH: + yield r.push_value + else: + assert r.kind == _Generic.StepActionKind.SKIP + continue + + post_result = post_iterate(index + 1) + if not isinstance(post_result, _Generic.StepAction): + real_post_result = convert_value_result(post_result) + for r in real_post_result.flatten(): + if r.kind == _Generic.StepActionKind.STOP: + return r.stop_value + elif r.kind == _Generic.StepActionKind.PUSH: + yield r.push_value + else: + assert r.kind == _Generic.StepActionKind.SKIP + + return fallback_return + + def create_new( + iterable: Iterable[_T], + operation: IterateOperation[_T, _V, _R], + fallback_return: _R, + /, + pre_iterate: IteratePreHook[_T, _V, _R] | None = None, + post_iterate: IteratePostHook[_V, _R] | None = None, + ) -> Generator[_V, None, _R]: + return _Generic.iterate( + iterable, + operation, + fallback_return, + pre_iterate or _Generic._none_hook, + post_iterate or _Generic._none_hook, + _Generic._non_result_to_push, + ) + + def get_result( + iterable: Iterable[_T], + operation: IterateOperation[_T, _V, _R], + fallback_return: _R, + /, + pre_iterate: IteratePreHook[_T, _V, _R] | None = None, + post_iterate: IteratePostHook[_V, _R] | None = None, + ) -> _R: + try: + for _ in _Generic.iterate( + iterable, + operation, + fallback_return, + pre_iterate or _Generic._none_hook, + post_iterate or _Generic._none_hook, + _Generic._non_result_to_stop, + ): + pass + except StopIteration as stop: + return stop.value + cru_unreachable() + + +class _Helpers: + @staticmethod + def auto_count(c: Callable[Concatenate[int, _P], _O]) -> Callable[_P, _O]: + count = 0 + + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _O: + nonlocal count + r = c(count, *args, **kwargs) + count += 1 + return r + + return wrapper + + +class _Creators: + class Raw: + @staticmethod + def empty() -> Iterator[Never]: + return iter([]) + + @staticmethod + def range(*args) -> Iterator[int]: + return iter(range(*args)) + + @staticmethod + def unite(*args: _T) -> Iterator[_T]: + return iter(args) + + @staticmethod + def _concat(*iterables: Iterable[_T]) -> Iterable[_T]: + for iterable in iterables: + yield from iterable + + @staticmethod + def concat(*iterables: Iterable[_T]) -> Iterator[_T]: + return iter(_Creators.Raw._concat(*iterables)) + + @staticmethod + def _wrap(f: Callable[_P, Iterable[_O]]) -> Callable[_P, CruIterator[_O]]: + def _wrapped(*args: _P.args, **kwargs: _P.kwargs) -> CruIterator[_O]: + return CruIterator(f(*args, **kwargs)) + + return _wrapped + + empty = _wrap(Raw.empty) + range = _wrap(Raw.range) + unite = _wrap(Raw.unite) + concat = _wrap(Raw.concat) + + +class CruIterator(Generic[_T]): + ElementOperation: TypeAlias = Callable[[_V], Any] + ElementPredicate: TypeAlias = Callable[[_V], bool] + AnyElementPredicate: TypeAlias = ElementPredicate[Any] + ElementTransformer: TypeAlias = Callable[[_V], _O] + SelfElementTransformer: TypeAlias = ElementTransformer[_V, _V] + AnyElementTransformer: TypeAlias = ElementTransformer[Any, Any] + + Creators: TypeAlias = _Creators + Helpers: TypeAlias = _Helpers + + def __init__(self, iterable: Iterable[_T]) -> None: + self._iterator = iter(iterable) + + def __iter__(self) -> Iterator[_T]: + return self._iterator + + def create_new_me(self, iterable: Iterable[_O]) -> CruIterator[_O]: + return type(self)(iterable) # type: ignore + + @staticmethod + def _wrap( + f: Callable[Concatenate[CruIterator[_T], _P], Iterable[_O]], + ) -> Callable[Concatenate[CruIterator[_T], _P], CruIterator[_O]]: + def _wrapped( + self: CruIterator[_T], *args: _P.args, **kwargs: _P.kwargs + ) -> CruIterator[_O]: + return self.create_new_me(f(self, *args, **kwargs)) + + return _wrapped + + @_wrap + def replace_me(self, iterable: Iterable[_O]) -> Iterable[_O]: + return iterable + + def replace_me_with_empty(self) -> CruIterator[Never]: + return self.create_new_me(_Creators.Raw.empty()) + + def replace_me_with_range(self, *args) -> CruIterator[int]: + return self.create_new_me(_Creators.Raw.range(*args)) + + def replace_me_with_unite(self, *args: _O) -> CruIterator[_O]: + return self.create_new_me(_Creators.Raw.unite(*args)) + + def replace_me_with_concat(self, *iterables: Iterable[_T]) -> CruIterator[_T]: + return self.create_new_me(_Creators.Raw.concat(*iterables)) + + def to_set(self) -> set[_T]: + return set(self) + + def to_list(self) -> list[_T]: + return list(self) + + def all(self, predicate: ElementPredicate[_T]) -> bool: + for value in self: + if not predicate(value): + return False + return True + + def any(self, predicate: ElementPredicate[_T]) -> bool: + for value in self: + if predicate(value): + return True + return False + + def foreach(self, operation: ElementOperation[_T]) -> None: + for value in self: + operation(value) + + @_wrap + def transform(self, transformer: ElementTransformer[_T, _O]) -> Iterable[_O]: + for value in self: + yield transformer(value) + + map = transform + + @_wrap + def filter(self, predicate: ElementPredicate[_T]) -> Iterable[_T]: + for value in self: + if predicate(value): + yield value + + @_wrap + def continue_if(self, predicate: ElementPredicate[_T]) -> Iterable[_T]: + for value in self: + yield value + if not predicate(value): + break + + def first_n(self, max_count: int) -> CruIterator[_T]: + if max_count < 0: + raise ValueError("max_count must be 0 or positive.") + if max_count == 0: + return self.replace_me_with_empty() # type: ignore + return self.continue_if(_Helpers.auto_count(lambda i, _: i < max_count - 1)) + + def drop_n(self, n: int) -> CruIterator[_T]: + if n < 0: + raise ValueError("n must be 0 or positive.") + if n == 0: + return self + return self.filter(_Helpers.auto_count(lambda i, _: i < n)) + + def single_or( + self, fallback: _O | CruNotFound = CruNotFound.VALUE + ) -> _T | _O | CruNotFound: + first_2 = self.first_n(2) + has_value = False + for element in first_2: + if has_value: + raise ValueError("More than one value found.") + has_value = True + value = element + if has_value: + return value + else: + return fallback + + def first_or( + self, fallback: _O | CruNotFound = CruNotFound.VALUE + ) -> _T | _O | CruNotFound: + return self.first_n(1).single_or(fallback) + + @_wrap + def flatten(self, max_depth: int = -1) -> Iterable[Any]: + return _Generic.iterable_flatten(self, max_depth) + + def select_by_indices(self, indices: Iterable[int]) -> CruIterator[_T]: + index_set = set(indices) + max_index = max(index_set) + return self.first_n(max_index + 1).filter( + _Helpers.auto_count(lambda i, _: i in index_set) + ) + + def remove_values(self, values: Iterable[Any]) -> CruIterator[_T]: + value_set = set(values) + return self.filter(lambda v: v not in value_set) + + def replace_values( + self, old_values: Iterable[Any], new_value: _O + ) -> Iterable[_T | _O]: + value_set = set(old_values) + return self.transform(lambda v: new_value if v in value_set else v) + + def group_by(self, key_getter: Callable[[_T], _O]) -> dict[_O, list[_T]]: + result: dict[_O, list[_T]] = {} + + for item in self: + key = key_getter(item) + if key not in result: + result[key] = [] + result[key].append(item) + + return result + + +class CruIterMixin(Generic[_T]): + def cru_iter(self: Iterable[_T]) -> CruIterator[_T]: + return CruIterator(self) + + +class CruIterList(list[_T], CruIterMixin[_T]): + pass + + +class CruIterable: + Generic: TypeAlias = _Generic + Iterator: TypeAlias = CruIterator[_T] + Helpers: TypeAlias = _Helpers + Mixin: TypeAlias = CruIterMixin[_T] + IterList: TypeAlias = CruIterList[_T] + + +CRU.add_objects(CruIterable, CruIterator) diff --git a/tools/cru-py/cru/_type.py b/tools/cru-py/cru/_type.py new file mode 100644 index 0000000..1f81da3 --- /dev/null +++ b/tools/cru-py/cru/_type.py @@ -0,0 +1,52 @@ +from collections.abc import Iterable +from typing import Any + +from ._error import CruException, CruLogicError +from ._iter import CruIterator + + +class CruTypeCheckError(CruException): + pass + + +DEFAULT_NONE_ERR_MSG = "None is not allowed here." +DEFAULT_TYPE_ERR_MSG = "Object of this type is not allowed here." + + +class CruTypeSet(set[type]): + def __init__(self, *types: type): + type_set = CruIterator(types).filter(lambda t: t is not None).to_set() + if not CruIterator(type_set).all(lambda t: isinstance(t, type)): + raise CruLogicError("TypeSet can only contain type.") + super().__init__(type_set) + + def check_value( + self, + value: Any, + /, + allow_none: bool = False, + empty_allow_all: bool = True, + ) -> None: + if value is None: + if allow_none: + return + else: + raise CruTypeCheckError(DEFAULT_NONE_ERR_MSG) + if len(self) == 0 and empty_allow_all: + return + if not CruIterator(self).any(lambda t: isinstance(value, t)): + raise CruTypeCheckError(DEFAULT_TYPE_ERR_MSG) + + def check_value_list( + self, + values: Iterable[Any], + /, + allow_none: bool = False, + empty_allow_all: bool = True, + ) -> None: + for value in values: + self.check_value( + value, + allow_none, + empty_allow_all, + ) diff --git a/tools/cru-py/cru/attr.py b/tools/cru-py/cru/attr.py new file mode 100644 index 0000000..d4cc86a --- /dev/null +++ b/tools/cru-py/cru/attr.py @@ -0,0 +1,364 @@ +from __future__ import annotations + +import copy +from collections.abc import Callable, Iterable +from dataclasses import dataclass, field +from typing import Any + +from .list import CruUniqueKeyList +from ._type import CruTypeSet +from ._const import CruNotFound, CruUseDefault, CruDontChange +from ._iter import CruIterator + + +@dataclass +class CruAttr: + + name: str + value: Any + description: str | None + + @staticmethod + def make( + name: str, value: Any = CruUseDefault.VALUE, description: str | None = None + ) -> CruAttr: + return CruAttr(name, value, description) + + +CruAttrDefaultFactory = Callable[["CruAttrDef"], Any] +CruAttrTransformer = Callable[[Any, "CruAttrDef"], Any] +CruAttrValidator = Callable[[Any, "CruAttrDef"], None] + + +@dataclass +class CruAttrDef: + name: str + description: str + default_factory: CruAttrDefaultFactory + transformer: CruAttrTransformer + validator: CruAttrValidator + + def __init__( + self, + name: str, + description: str, + default_factory: CruAttrDefaultFactory, + transformer: CruAttrTransformer, + validator: CruAttrValidator, + ) -> None: + self.name = name + self.description = description + self.default_factory = default_factory + self.transformer = transformer + self.validator = validator + + def transform(self, value: Any) -> Any: + if self.transformer is not None: + return self.transformer(value, self) + return value + + def validate(self, value: Any, /, force_allow_none: bool = False) -> None: + if force_allow_none is value is None: + return + if self.validator is not None: + self.validator(value, self) + + def transform_and_validate( + self, value: Any, /, force_allow_none: bool = False + ) -> Any: + value = self.transform(value) + self.validate(value, force_allow_none) + return value + + def make_default_value(self) -> Any: + return self.transform_and_validate(self.default_factory(self)) + + def adopt(self, attr: CruAttr) -> CruAttr: + attr = copy.deepcopy(attr) + + if attr.name is None: + attr.name = self.name + elif attr.name != self.name: + raise ValueError(f"Attr name is not match: {attr.name} != {self.name}") + + if attr.value is CruUseDefault.VALUE: + attr.value = self.make_default_value() + else: + attr.value = self.transform_and_validate(attr.value) + + if attr.description is None: + attr.description = self.description + + return attr + + def make( + self, value: Any = CruUseDefault.VALUE, description: None | str = None + ) -> CruAttr: + value = self.make_default_value() if value is CruUseDefault.VALUE else value + value = self.transform_and_validate(value) + return CruAttr( + self.name, + value, + description if description is not None else self.description, + ) + + +@dataclass +class CruAttrDefBuilder: + + name: str + description: str + types: list[type] | None = field(default=None) + allow_none: bool = field(default=False) + default: Any = field(default=CruUseDefault.VALUE) + default_factory: CruAttrDefaultFactory | None = field(default=None) + auto_list: bool = field(default=False) + transformers: list[CruAttrTransformer] = field(default_factory=list) + validators: list[CruAttrValidator] = field(default_factory=list) + override_transformer: CruAttrTransformer | None = field(default=None) + override_validator: CruAttrValidator | None = field(default=None) + + build_hook: Callable[[CruAttrDef], None] | None = field(default=None) + + def __init__(self, name: str, description: str) -> None: + super().__init__() + self.name = name + self.description = description + + def auto_adjust_default(self) -> None: + if self.default is not CruUseDefault.VALUE and self.default is not None: + return + if self.allow_none and self.default is CruUseDefault.VALUE: + self.default = None + if not self.allow_none and self.default is None: + self.default = CruUseDefault.VALUE + if self.auto_list and not self.allow_none: + self.default = [] + + def with_name(self, name: str | CruDontChange) -> CruAttrDefBuilder: + if name is not CruDontChange.VALUE: + self.name = name + return self + + def with_description( + self, default_description: str | CruDontChange + ) -> CruAttrDefBuilder: + if default_description is not CruDontChange.VALUE: + self.description = default_description + return self + + def with_default(self, default: Any) -> CruAttrDefBuilder: + if default is not CruDontChange.VALUE: + self.default = default + return self + + def with_default_factory( + self, + default_factory: CruAttrDefaultFactory | CruDontChange, + ) -> CruAttrDefBuilder: + if default_factory is not CruDontChange.VALUE: + self.default_factory = default_factory + return self + + def with_types( + self, + types: Iterable[type] | None | CruDontChange, + ) -> CruAttrDefBuilder: + if types is not CruDontChange.VALUE: + self.types = None if types is None else list(types) + return self + + def with_allow_none(self, allow_none: bool | CruDontChange) -> CruAttrDefBuilder: + if allow_none is not CruDontChange.VALUE: + self.allow_none = allow_none + return self + + def with_auto_list( + self, auto_list: bool | CruDontChange = True + ) -> CruAttrDefBuilder: + if auto_list is not CruDontChange.VALUE: + self.auto_list = auto_list + return self + + def with_constraint( + self, + /, + allow_none: bool | CruDontChange = CruDontChange.VALUE, + types: Iterable[type] | None | CruDontChange = CruDontChange.VALUE, + default: Any = CruDontChange.VALUE, + default_factory: CruAttrDefaultFactory | CruDontChange = CruDontChange.VALUE, + auto_list: bool | CruDontChange = CruDontChange.VALUE, + ) -> CruAttrDefBuilder: + return ( + self.with_allow_none(allow_none) + .with_types(types) + .with_default(default) + .with_default_factory(default_factory) + .with_auto_list(auto_list) + ) + + def add_transformer(self, transformer: CruAttrTransformer) -> CruAttrDefBuilder: + self.transformers.append(transformer) + return self + + def clear_transformers(self) -> CruAttrDefBuilder: + self.transformers.clear() + return self + + def add_validator(self, validator: CruAttrValidator) -> CruAttrDefBuilder: + self.validators.append(validator) + return self + + def clear_validators(self) -> CruAttrDefBuilder: + self.validators.clear() + return self + + def with_override_transformer( + self, override_transformer: CruAttrTransformer | None | CruDontChange + ) -> CruAttrDefBuilder: + if override_transformer is not CruDontChange.VALUE: + self.override_transformer = override_transformer + return self + + def with_override_validator( + self, override_validator: CruAttrValidator | None | CruDontChange + ) -> CruAttrDefBuilder: + if override_validator is not CruDontChange.VALUE: + self.override_validator = override_validator + return self + + def is_valid(self) -> tuple[bool, str]: + if not isinstance(self.name, str): + return False, "Name must be a string!" + if not isinstance(self.description, str): + return False, "Default description must be a string!" + if ( + not self.allow_none + and self.default is None + and self.default_factory is None + ): + return False, "Default must be set if allow_none is False!" + return True, "" + + @staticmethod + def _build( + builder: CruAttrDefBuilder, auto_adjust_default: bool = True + ) -> CruAttrDef: + if auto_adjust_default: + builder.auto_adjust_default() + + valid, err = builder.is_valid() + if not valid: + raise ValueError(err) + + def composed_transformer(value: Any, attr_def: CruAttrDef) -> Any: + def transform_value(single_value: Any) -> Any: + for transformer in builder.transformers: + single_value = transformer(single_value, attr_def) + return single_value + + if builder.auto_list: + if not isinstance(value, list): + value = [value] + value = CruIterator(value).transform(transform_value).to_list() + + else: + value = transform_value(value) + return value + + type_set = None if builder.types is None else CruTypeSet(*builder.types) + + def composed_validator(value: Any, attr_def: CruAttrDef): + def validate_value(single_value: Any) -> None: + if type_set is not None: + type_set.check_value(single_value, allow_none=builder.allow_none) + for validator in builder.validators: + validator(single_value, attr_def) + + if builder.auto_list: + CruIterator(value).foreach(validate_value) + else: + validate_value(value) + + real_transformer = builder.override_transformer or composed_transformer + real_validator = builder.override_validator or composed_validator + + default_factory = builder.default_factory + if default_factory is None: + + def default_factory(_d): + return copy.deepcopy(builder.default) + + d = CruAttrDef( + builder.name, + builder.description, + default_factory, + real_transformer, + real_validator, + ) + if builder.build_hook: + builder.build_hook(d) + return d + + def build(self, auto_adjust_default=True) -> CruAttrDef: + c = copy.deepcopy(self) + self.build_hook = None + return CruAttrDefBuilder._build(c, auto_adjust_default) + + +class CruAttrDefRegistry(CruUniqueKeyList[CruAttrDef, str]): + + def __init__(self) -> None: + super().__init__(lambda d: d.name) + + def make_builder(self, name: str, default_description: str) -> CruAttrDefBuilder: + b = CruAttrDefBuilder(name, default_description) + b.build_hook = lambda a: self.add(a) + return b + + def adopt(self, attr: CruAttr) -> CruAttr: + d = self.get(attr.name) + return d.adopt(attr) + + +class CruAttrTable(CruUniqueKeyList[CruAttr, str]): + def __init__(self, registry: CruAttrDefRegistry) -> None: + self._registry: CruAttrDefRegistry = registry + super().__init__(lambda a: a.name, before_add=registry.adopt) + + @property + def registry(self) -> CruAttrDefRegistry: + return self._registry + + def get_value_or(self, name: str, fallback: Any = CruNotFound.VALUE) -> Any: + a = self.get_or(name, CruNotFound.VALUE) + if a is CruNotFound.VALUE: + return fallback + return a.value + + def get_value(self, name: str) -> Any: + a = self.get(name) + return a.value + + def make_attr( + self, + name: str, + value: Any = CruUseDefault.VALUE, + /, + description: str | None = None, + ) -> CruAttr: + d = self._registry.get(name) + return d.make(value, description or d.description) + + def add_value( + self, + name: str, + value: Any = CruUseDefault.VALUE, + /, + description: str | None = None, + *, + replace: bool = False, + ) -> CruAttr: + attr = self.make_attr(name, value, description) + self.add(attr, replace) + return attr diff --git a/tools/cru-py/cru/config.py b/tools/cru-py/cru/config.py new file mode 100644 index 0000000..0f6f0d0 --- /dev/null +++ b/tools/cru-py/cru/config.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +from typing import Any, TypeVar, Generic + +from ._error import CruException +from .list import CruUniqueKeyList +from .value import ( + INTEGER_VALUE_TYPE, + TEXT_VALUE_TYPE, + CruValueTypeError, + ValueGeneratorBase, + ValueType, +) + +_T = TypeVar("_T") + + +class CruConfigError(CruException): + def __init__(self, message: str, item: ConfigItem, *args, **kwargs): + super().__init__(message, *args, **kwargs) + self._item = item + + @property + def item(self) -> ConfigItem: + return self._item + + +class ConfigItem(Generic[_T]): + def __init__( + self, + name: str, + description: str, + value_type: ValueType[_T], + value: _T | None = None, + /, + default: ValueGeneratorBase[_T] | _T | None = None, + ) -> None: + self._name = name + self._description = description + self._value_type = value_type + self._value = value + self._default = default + + @property + def name(self) -> str: + return self._name + + @property + def description(self) -> str: + return self._description + + @property + def value_type(self) -> ValueType[_T]: + return self._value_type + + @property + def is_set(self) -> bool: + return self._value is not None + + @property + def value(self) -> _T: + if self._value is None: + raise CruConfigError( + "Config value is not set.", + self, + user_message=f"Config item {self.name} is not set.", + ) + return self._value + + @property + def value_str(self) -> str: + return self.value_type.convert_value_to_str(self.value) + + def set_value(self, v: _T | str, allow_convert_from_str=False): + if allow_convert_from_str: + self._value = self.value_type.check_value_or_try_convert_from_str(v) + else: + self._value = self.value_type.check_value(v) + + def reset(self): + self._value = None + + @property + def default(self) -> ValueGeneratorBase[_T] | _T | None: + return self._default + + @property + def can_generate_default(self) -> bool: + return self.default is not None + + def generate_default_value(self) -> _T: + if self.default is None: + raise CruConfigError( + "Config item does not support default value generation.", self + ) + elif isinstance(self.default, ValueGeneratorBase): + v = self.default.generate() + else: + v = self.default + try: + self.value_type.check_value(v) + return v + except CruValueTypeError as e: + raise CruConfigError( + "Config value generator returns an invalid value.", self + ) from e + + def copy(self) -> "ConfigItem": + return ConfigItem( + self.name, + self.description, + self.value_type, + self.value, + self.default, + ) + + @property + def description_str(self) -> str: + return f"{self.name} ({self.value_type.name}): {self.description}" + + +class Configuration(CruUniqueKeyList[ConfigItem[Any], str]): + def __init__(self): + super().__init__(lambda c: c.name) + + def get_set_items(self) -> list[ConfigItem[Any]]: + return [item for item in self if item.is_set] + + def get_unset_items(self) -> list[ConfigItem[Any]]: + return [item for item in self if not item.is_set] + + @property + def all_set(self) -> bool: + return len(self.get_unset_items()) == 0 + + @property + def all_not_set(self) -> bool: + return len(self.get_set_items()) == 0 + + def add_text_config( + self, + name: str, + description: str, + value: str | None = None, + default: ValueGeneratorBase[str] | str | None = None, + ) -> ConfigItem[str]: + item = ConfigItem(name, description, TEXT_VALUE_TYPE, value, default) + self.add(item) + return item + + def add_int_config( + self, + name: str, + description: str, + value: int | None = None, + default: ValueGeneratorBase[int] | int | None = None, + ) -> ConfigItem[int]: + item = ConfigItem(name, description, INTEGER_VALUE_TYPE, value, default) + self.add(item) + return item + + def set_config_item( + self, + name: str, + value: Any | str, + allow_convert_from_str=True, + ) -> None: + item = self.get(name) + item.set_value( + value, + allow_convert_from_str=allow_convert_from_str, + ) + + def reset_all(self) -> None: + for item in self: + item.reset() + + def to_dict(self) -> dict[str, Any]: + return {item.name: item.value for item in self} + + def to_str_dict(self) -> dict[str, str]: + return { + item.name: item.value_type.convert_value_to_str(item.value) for item in self + } + + def set_value_dict( + self, + value_dict: dict[str, Any], + allow_convert_from_str: bool = False, + ) -> None: + for name, value in value_dict.items(): + item = self.get(name) + item.set_value( + value, + allow_convert_from_str=allow_convert_from_str, + ) diff --git a/tools/cru-py/cru/list.py b/tools/cru-py/cru/list.py new file mode 100644 index 0000000..9d210b7 --- /dev/null +++ b/tools/cru-py/cru/list.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +from collections.abc import Callable, Iterator +from typing import Any, Generic, Iterable, TypeAlias, TypeVar, overload + +from ._error import CruInternalError +from ._iter import CruIterator +from ._const import CruNotFound + +_T = TypeVar("_T") +_O = TypeVar("_O") + + +class CruListEdit(CruIterator[_T]): + def __init__(self, iterable: Iterable[_T], _list: CruList[Any]) -> None: + super().__init__(iterable) + self._list = _list + + def create_me(self, iterable: Iterable[_O]) -> CruListEdit[_O]: + return CruListEdit(iterable, self._list) + + @property + def list(self) -> CruList[Any]: + return self._list + + def done(self) -> CruList[Any]: + self._list.reset(self) + return self._list + + +class CruList(list[_T]): + def reset(self, new_values: Iterable[_T]): + if self is new_values: + new_values = list(new_values) + self.clear() + self.extend(new_values) + return self + + def as_cru_iterator(self) -> CruIterator[_T]: + return CruIterator(self) + + @staticmethod + def make(maybe_list: Iterable[_T] | _T | None) -> CruList[_T]: + if maybe_list is None: + return CruList() + if isinstance(maybe_list, Iterable): + return CruList(maybe_list) + return CruList([maybe_list]) + + +_K = TypeVar("_K") + +_KeyGetter: TypeAlias = Callable[[_T], _K] + + +class CruUniqueKeyList(Generic[_T, _K]): + def __init__( + self, + key_getter: _KeyGetter[_T, _K], + *, + before_add: Callable[[_T], _T] | None = None, + ): + super().__init__() + self._key_getter = key_getter + self._before_add = before_add + self._list: CruList[_T] = CruList() + + @property + def key_getter(self) -> _KeyGetter[_T, _K]: + return self._key_getter + + @property + def internal_list(self) -> CruList[_T]: + return self._list + + def validate_self(self): + keys = self._list.transform(self._key_getter) + if len(keys) != len(set(keys)): + raise CruInternalError("Duplicate keys!") + + @overload + def get_or( + self, key: _K, fallback: CruNotFound = CruNotFound.VALUE + ) -> _T | CruNotFound: ... + + @overload + def get_or(self, key: _K, fallback: _O) -> _T | _O: ... + + def get_or( + self, key: _K, fallback: _O | CruNotFound = CruNotFound.VALUE + ) -> _T | _O | CruNotFound: + return ( + self._list.as_cru_iterator() + .filter(lambda v: key == self._key_getter(v)) + .first_or(fallback) + ) + + def get(self, key: _K) -> _T: + value = self.get_or(key) + if value is CruNotFound: + raise KeyError(f"Key {key} not found!") + return value # type: ignore + + @property + def keys(self) -> Iterable[_K]: + return self._list.as_cru_iterator().map(self._key_getter) + + def has_key(self, key: _K) -> bool: + return self.get_or(key) != CruNotFound.VALUE + + def try_remove(self, key: _K) -> bool: + value = self.get_or(key) + if value is CruNotFound.VALUE: + return False + self._list.remove(value) + return True + + def remove(self, key: _K, allow_absence: bool = False) -> None: + if not self.try_remove(key) and not allow_absence: + raise KeyError(f"Key {key} not found!") + + def add(self, value: _T, /, replace: bool = False) -> None: + v = self.get_or(self._key_getter(value)) + if v is not CruNotFound.VALUE: + if not replace: + raise KeyError(f"Key {self._key_getter(v)} already exists!") + self._list.remove(v) + if self._before_add is not None: + value = self._before_add(value) + self._list.append(value) + + def set(self, value: _T) -> None: + self.add(value, True) + + def extend(self, iterable: Iterable[_T], /, replace: bool = False) -> None: + values = list(iterable) + to_remove = [] + for value in values: + v = self.get_or(self._key_getter(value)) + if v is not CruNotFound.VALUE: + if not replace: + raise KeyError(f"Key {self._key_getter(v)} already exists!") + to_remove.append(v) + for value in to_remove: + self._list.remove(value) + if self._before_add is not None: + values = [self._before_add(value) for value in values] + self._list.extend(values) + + def clear(self) -> None: + self._list.reset([]) + + def __iter__(self) -> Iterator[_T]: + return iter(self._list) + + def __len__(self) -> int: + return len(self._list) + + def cru_iter(self) -> CruIterator[_T]: + return CruIterator(self._list) diff --git a/tools/cru-py/cru/parsing.py b/tools/cru-py/cru/parsing.py new file mode 100644 index 0000000..1d2fa7f --- /dev/null +++ b/tools/cru-py/cru/parsing.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from typing import NamedTuple, TypeAlias, TypeVar, Generic, NoReturn, Callable + +from ._error import CruException +from ._iter import CruIterable + +_T = TypeVar("_T") + + +class ParseError(CruException, Generic[_T]): + def __init__( + self, + message, + parser: Parser[_T], + text: str, + line_number: int | None = None, + *args, + **kwargs, + ): + super().__init__(message, *args, **kwargs) + self._parser = parser + self._text = text + self._line_number = line_number + + @property + def parser(self) -> Parser[_T]: + return self._parser + + @property + def text(self) -> str: + return self._text + + @property + def line_number(self) -> int | None: + return self._line_number + + +class Parser(Generic[_T], metaclass=ABCMeta): + def __init__(self, name: str) -> None: + self._name = name + + @property + def name(self) -> str: + return self._name + + @abstractmethod + def parse(self, s: str) -> _T: + raise NotImplementedError() + + def raise_parse_exception( + self, text: str, line_number: int | None = None + ) -> NoReturn: + a = line_number and f" at line {line_number}" or "" + raise ParseError(f"Parser {self.name} failed{a}.", self, text, line_number) + + +class SimpleLineConfigParserEntry(NamedTuple): + key: str + value: str + line_number: int | None = None + + +class SimpleLineConfigParserResult(CruIterable.IterList[SimpleLineConfigParserEntry]): + pass + + +class SimpleLineConfigParser(Parser[SimpleLineConfigParserResult]): + """ + The parsing result is a list of tuples (key, value, line number). + """ + + Entry: TypeAlias = SimpleLineConfigParserEntry + Result: TypeAlias = SimpleLineConfigParserResult + + def __init__(self) -> None: + super().__init__(type(self).__name__) + + def _parse(self, text: str, callback: Callable[[Entry], None]) -> None: + for ln, line in enumerate(text.splitlines()): + line_number = ln + 1 + # check if it's a comment + if line.strip().startswith("#"): + continue + # check if there is a '=' + if line.find("=") == -1: + self.raise_parse_exception("There is even no '='!", line_number) + # split at first '=' + key, value = line.split("=", 1) + key = key.strip() + value = value.strip() + callback(SimpleLineConfigParserEntry(key, value, line_number)) + + def parse(self, text: str) -> Result: + result = SimpleLineConfigParserResult() + self._parse(text, lambda item: result.append(item)) + return result diff --git a/tools/cru-py/cru/service/__init__.py b/tools/cru-py/cru/service/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tools/cru-py/cru/service/__init__.py diff --git a/tools/cru-py/cru/service/__main__.py b/tools/cru-py/cru/service/__main__.py new file mode 100644 index 0000000..1c10e82 --- /dev/null +++ b/tools/cru-py/cru/service/__main__.py @@ -0,0 +1,20 @@ +from cru import CruException + +from ._app import create_app + + +def main(): + app = create_app() + app.run_command() + + +if __name__ == "__main__": + try: + main() + except CruException as e: + user_message = e.get_user_message() + if user_message is not None: + print(f"Error: {user_message}") + exit(1) + else: + raise diff --git a/tools/cru-py/cru/service/_app.py b/tools/cru-py/cru/service/_app.py new file mode 100644 index 0000000..6030dad --- /dev/null +++ b/tools/cru-py/cru/service/_app.py @@ -0,0 +1,34 @@ +from ._base import ( + AppBase, + CommandDispatcher, + AppInitializer, + PathCommandProvider, +) +from ._config import ConfigManager +from ._template import TemplateManager +from ._nginx import NginxManager +from ._external import CliToolCommandProvider + +APP_ID = "crupest" + + +class App(AppBase): + def __init__(self): + super().__init__(APP_ID, f"{APP_ID}-service") + self.add_feature(PathCommandProvider()) + self.add_feature(AppInitializer()) + self.add_feature(ConfigManager()) + self.add_feature(TemplateManager()) + self.add_feature(NginxManager()) + self.add_feature(CliToolCommandProvider()) + self.add_feature(CommandDispatcher()) + + def run_command(self): + command_dispatcher = self.get_feature(CommandDispatcher) + command_dispatcher.run_command() + + +def create_app() -> App: + app = App() + app.setup() + return app diff --git a/tools/cru-py/cru/service/_base.py b/tools/cru-py/cru/service/_base.py new file mode 100644 index 0000000..ad813c9 --- /dev/null +++ b/tools/cru-py/cru/service/_base.py @@ -0,0 +1,449 @@ +from __future__ import annotations + +from argparse import ArgumentParser, Namespace +from abc import ABC, abstractmethod +import argparse +import os +from pathlib import Path +from typing import TypeVar, overload + +from cru import CruException, CruLogicError + +_Feature = TypeVar("_Feature", bound="AppFeatureProvider") + + +class AppError(CruException): + pass + + +class AppFeatureError(AppError): + def __init__(self, message, feature: type | str, *args, **kwargs): + super().__init__(message, *args, **kwargs) + self._feature = feature + + @property + def feature(self) -> type | str: + return self._feature + + +class AppPathError(CruException): + def __init__(self, message, _path: str | Path, *args, **kwargs): + super().__init__(message, *args, **kwargs) + self._path = str(_path) + + @property + def path(self) -> str: + return self._path + + +class AppPath(ABC): + def __init__(self, id: str, is_dir: bool, description: str) -> None: + self._is_dir = is_dir + self._id = id + self._description = description + + @property + @abstractmethod + def parent(self) -> AppPath | None: ... + + @property + @abstractmethod + def app(self) -> AppBase: ... + + @property + def id(self) -> str: + return self._id + + @property + def description(self) -> str: + return self._description + + @property + def is_dir(self) -> bool: + return self._is_dir + + @property + @abstractmethod + def full_path(self) -> Path: ... + + @property + def full_path_str(self) -> str: + return str(self.full_path) + + def check_parents(self, must_exist: bool = False) -> bool: + for p in reversed(self.full_path.parents): + if not p.exists() and not must_exist: + return False + if not p.is_dir(): + raise AppPathError("Parents' path must be a dir.", self.full_path) + return True + + def check_self(self, must_exist: bool = False) -> bool: + if not self.check_parents(must_exist): + return False + if not self.full_path.exists(): + if not must_exist: + return False + raise AppPathError("Not exist.", self.full_path) + if self.is_dir: + if not self.full_path.is_dir(): + raise AppPathError("Should be a directory, but not.", self.full_path) + else: + return True + else: + if not self.full_path.is_file(): + raise AppPathError("Should be a file, but not.", self.full_path) + else: + return True + + def ensure(self, create_file: bool = False) -> None: + e = self.check_self(False) + if not e: + os.makedirs(self.full_path.parent, exist_ok=True) + if self.is_dir: + os.mkdir(self.full_path) + elif create_file: + with open(self.full_path, "w") as f: + f.write("") + + def add_subpath( + self, + name: str, + is_dir: bool, + /, + id: str | None = None, + description: str = "", + ) -> AppFeaturePath: + return self.app.add_path(name, is_dir, self, id, description) + + @property + def app_relative_path(self) -> Path: + return self.full_path.relative_to(self.app.root.full_path) + + +class AppFeaturePath(AppPath): + def __init__( + self, + parent: AppPath, + name: str, + is_dir: bool, + /, + id: str | None = None, + description: str = "", + ) -> None: + super().__init__(id or name, is_dir, description) + self._name = name + self._parent = parent + + @property + def name(self) -> str: + return self._name + + @property + def parent(self) -> AppPath: + return self._parent + + @property + def app(self) -> AppBase: + return self.parent.app + + @property + def full_path(self) -> Path: + return Path(self.parent.full_path, self.name).resolve() + + +class AppRootPath(AppPath): + def __init__(self, app: AppBase): + super().__init__("root", True, "Application root path.") + self._app = app + self._full_path: Path | None = None + + @property + def parent(self) -> None: + return None + + @property + def app(self) -> AppBase: + return self._app + + @property + def full_path(self) -> Path: + if self._full_path is None: + raise AppError("App root path is not set yet.") + return self._full_path + + def setup(self, path: os.PathLike) -> None: + if self._full_path is not None: + raise AppError("App root path is already set.") + self._full_path = Path(path).resolve() + + +class AppFeatureProvider(ABC): + def __init__(self, name: str, /, app: AppBase | None = None): + super().__init__() + self._name = name + self._app = app if app else AppBase.get_instance() + + @property + def app(self) -> AppBase: + return self._app + + @property + def name(self) -> str: + return self._name + + @abstractmethod + def setup(self) -> None: ... + + +class AppCommandFeatureProvider(AppFeatureProvider): + @abstractmethod + def get_command_info(self) -> tuple[str, str]: ... + + @abstractmethod + def setup_arg_parser(self, arg_parser: ArgumentParser): ... + + @abstractmethod + def run_command(self, args: Namespace) -> None: ... + + +DATA_DIR_NAME = "data" + + +class PathCommandProvider(AppCommandFeatureProvider): + def __init__(self) -> None: + super().__init__("path-command-provider") + + def setup(self): + pass + + def get_command_info(self): + return ("path", "Get information about paths used by app.") + + def setup_arg_parser(self, arg_parser: ArgumentParser) -> None: + subparsers = arg_parser.add_subparsers( + dest="path_command", required=True, metavar="PATH_COMMAND" + ) + _list_parser = subparsers.add_parser( + "list", help="list special paths used by app" + ) + + def run_command(self, args: Namespace) -> None: + if args.path_command == "list": + for path in self.app.paths: + print(f"{path.app_relative_path.as_posix()}: {path.description}") + + +class CommandDispatcher(AppFeatureProvider): + def __init__(self) -> None: + super().__init__("command-dispatcher") + self._parsed_args: argparse.Namespace | None = None + + def setup_arg_parser(self) -> None: + epilog = """ +==> to start, +./tools/manage init +./tools/manage config init +ln -s generated/docker-compose.yaml . +# Then edit config file. + +==> to update +git pull +./tools/manage template generate --no-dry-run +docker compose up + """.strip() + + self._map: dict[str, AppCommandFeatureProvider] = {} + arg_parser = argparse.ArgumentParser( + description="Service management", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=epilog, + ) + arg_parser.add_argument( + "--project-dir", + help="The path of the project directory.", + required=True, + type=str, + ) + subparsers = arg_parser.add_subparsers( + dest="command", + help="The management command to execute.", + metavar="COMMAND", + ) + for feature in self.app.features: + if isinstance(feature, AppCommandFeatureProvider): + info = feature.get_command_info() + command_subparser = subparsers.add_parser(info[0], help=info[1]) + feature.setup_arg_parser(command_subparser) + self._map[info[0]] = feature + self._arg_parser = arg_parser + + def setup(self): + pass + + @property + def arg_parser(self) -> argparse.ArgumentParser: + return self._arg_parser + + @property + def map(self) -> dict[str, AppCommandFeatureProvider]: + return self._map + + def get_program_parsed_args(self) -> argparse.Namespace: + if self._parsed_args is None: + self._parsed_args = self.arg_parser.parse_args() + return self._parsed_args + + def run_command(self, args: argparse.Namespace | None = None) -> None: + real_args = args or self.get_program_parsed_args() + if real_args.command is None: + self.arg_parser.print_help() + return + self.map[real_args.command].run_command(real_args) + + +class AppInitializer(AppCommandFeatureProvider): + def __init__(self) -> None: + super().__init__("app-initializer") + + def _init_app(self) -> bool: + if self.app.app_initialized: + return False + self.app.data_dir.ensure() + return True + + def setup(self): + pass + + def get_command_info(self): + return ("init", "Initialize the app.") + + def setup_arg_parser(self, arg_parser): + pass + + def run_command(self, args): + init = self._init_app() + if init: + print("App initialized successfully.") + else: + print("App is already initialized. Do nothing.") + + +class AppBase: + _instance: AppBase | None = None + + @staticmethod + def get_instance() -> AppBase: + if AppBase._instance is None: + raise AppError("App instance not initialized") + return AppBase._instance + + def __init__(self, app_id: str, name: str): + AppBase._instance = self + self._app_id = app_id + self._name = name + self._root = AppRootPath(self) + self._paths: list[AppFeaturePath] = [] + self._features: list[AppFeatureProvider] = [] + + def setup(self) -> None: + command_dispatcher = self.get_feature(CommandDispatcher) + command_dispatcher.setup_arg_parser() + program_args = command_dispatcher.get_program_parsed_args() + self.setup_root(program_args.project_dir) + self._data_dir = self.add_path(DATA_DIR_NAME, True, id="data") + for feature in self.features: + feature.setup() + for path in self.paths: + path.check_self() + + @property + def app_id(self) -> str: + return self._app_id + + @property + def name(self) -> str: + return self._name + + @property + def root(self) -> AppRootPath: + return self._root + + def setup_root(self, path: os.PathLike) -> None: + self._root.setup(path) + + @property + def data_dir(self) -> AppFeaturePath: + return self._data_dir + + @property + def app_initialized(self) -> bool: + return self.data_dir.check_self() + + def ensure_app_initialized(self) -> AppRootPath: + if not self.app_initialized: + raise AppError( + user_message="Root directory does not exist. " + "Please run 'init' to create one." + ) + return self.root + + @property + def features(self) -> list[AppFeatureProvider]: + return self._features + + @property + def paths(self) -> list[AppFeaturePath]: + return self._paths + + def add_feature(self, feature: _Feature) -> _Feature: + for f in self.features: + if f.name == feature.name: + raise AppFeatureError( + f"Duplicate feature name: {feature.name}.", feature.name + ) + self._features.append(feature) + return feature + + def add_path( + self, + name: str, + is_dir: bool, + /, + parent: AppPath | None = None, + id: str | None = None, + description: str = "", + ) -> AppFeaturePath: + p = AppFeaturePath( + parent or self.root, name, is_dir, id=id, description=description + ) + self._paths.append(p) + return p + + @overload + def get_feature(self, feature: str) -> AppFeatureProvider: ... + + @overload + def get_feature(self, feature: type[_Feature]) -> _Feature: ... + + def get_feature( + self, feature: str | type[_Feature] + ) -> AppFeatureProvider | _Feature: + if isinstance(feature, str): + for f in self._features: + if f.name == feature: + return f + elif isinstance(feature, type): + for f in self._features: + if isinstance(f, feature): + return f + else: + raise CruLogicError("Argument must be the name of feature or its class.") + + raise AppFeatureError(f"Feature {feature} not found.", feature) + + def get_path(self, name: str) -> AppFeaturePath: + for p in self._paths: + if p.id == name or p.name == name: + return p + raise AppPathError(f"Application path {name} not found.", name) diff --git a/tools/cru-py/cru/service/_config.py b/tools/cru-py/cru/service/_config.py new file mode 100644 index 0000000..b51e21c --- /dev/null +++ b/tools/cru-py/cru/service/_config.py @@ -0,0 +1,446 @@ +from collections.abc import Iterable +from typing import Any, Literal, overload + +from cru import CruException +from cru.config import Configuration, ConfigItem +from cru.value import ( + INTEGER_VALUE_TYPE, + TEXT_VALUE_TYPE, + CruValueTypeError, + RandomStringValueGenerator, + UuidValueGenerator, +) +from cru.parsing import ParseError, SimpleLineConfigParser + +from ._base import AppFeaturePath, AppCommandFeatureProvider + + +class AppConfigError(CruException): + def __init__( + self, message: str, configuration: Configuration, *args, **kwargs + ) -> None: + super().__init__(message, *args, **kwargs) + self._configuration = configuration + + @property + def configuration(self) -> Configuration: + return self._configuration + + +class AppConfigFileError(AppConfigError): + def __init__( + self, + message: str, + configuration: Configuration, + *args, + **kwargs, + ) -> None: + super().__init__(message, configuration, *args, **kwargs) + + +class AppConfigFileNotFoundError(AppConfigFileError): + def __init__( + self, + message: str, + configuration: Configuration, + file_path: str, + *args, + **kwargs, + ) -> None: + super().__init__(message, configuration, *args, **kwargs) + self._file_path = file_path + + @property + def file_path(self) -> str: + return self._file_path + + +class AppConfigFileParseError(AppConfigFileError): + def __init__( + self, + message: str, + configuration: Configuration, + file_content: str, + *args, + **kwargs, + ) -> None: + super().__init__(message, configuration, *args, **kwargs) + self._file_content = file_content + self.__cause__: ParseError + + @property + def file_content(self) -> str: + return self._file_content + + def get_user_message(self) -> str: + return f"Error while parsing config file at line {self.__cause__.line_number}." + + +class AppConfigFileEntryError(AppConfigFileError): + def __init__( + self, + message: str, + configuration: Configuration, + entries: Iterable[SimpleLineConfigParser.Entry], + *args, + **kwargs, + ) -> None: + super().__init__(message, configuration, *args, **kwargs) + self._entries = list(entries) + + @property + def error_entries(self) -> list[SimpleLineConfigParser.Entry]: + return self._entries + + @staticmethod + def entries_to_friendly_message( + entries: Iterable[SimpleLineConfigParser.Entry], + ) -> str: + return "\n".join( + f"line {entry.line_number}: {entry.key}={entry.value}" for entry in entries + ) + + @property + def friendly_message_head(self) -> str: + return "Error entries found in config file" + + def get_user_message(self) -> str: + return ( + f"{self.friendly_message_head}:\n" + f"{self.entries_to_friendly_message(self.error_entries)}" + ) + + +class AppConfigDuplicateEntryError(AppConfigFileEntryError): + @property + def friendly_message_head(self) -> str: + return "Duplicate entries found in config file" + + +class AppConfigEntryValueFormatError(AppConfigFileEntryError): + @property + def friendly_message_head(self) -> str: + return "Invalid value format for entries" + + +class AppConfigItemNotSetError(AppConfigError): + def __init__( + self, + message: str, + configuration: Configuration, + items: list[ConfigItem], + *args, + **kwargs, + ) -> None: + super().__init__(message, configuration, *args, **kwargs) + self._items = items + + +class ConfigManager(AppCommandFeatureProvider): + def __init__(self) -> None: + super().__init__("config-manager") + configuration = Configuration() + self._configuration = configuration + self._loaded: bool = False + self._init_app_defined_items() + + def _init_app_defined_items(self) -> None: + prefix = self.config_name_prefix + + def _add_text(name: str, description: str) -> ConfigItem: + item = ConfigItem(f"{prefix}_{name}", description, TEXT_VALUE_TYPE) + self.configuration.add(item) + return item + + def _add_uuid(name: str, description: str) -> ConfigItem: + item = ConfigItem( + f"{prefix}_{name}", + description, + TEXT_VALUE_TYPE, + default=UuidValueGenerator(), + ) + self.configuration.add(item) + return item + + def _add_random_string( + name: str, description: str, length: int = 32, secure: bool = True + ) -> ConfigItem: + item = ConfigItem( + f"{prefix}_{name}", + description, + TEXT_VALUE_TYPE, + default=RandomStringValueGenerator(length, secure), + ) + self.configuration.add(item) + return item + + def _add_int(name: str, description: str) -> ConfigItem: + item = ConfigItem(f"{prefix}_{name}", description, INTEGER_VALUE_TYPE) + self.configuration.add(item) + return item + + self._domain = _add_text("DOMAIN", "domain name") + self._email = _add_text("EMAIL", "admin email address") + _add_text( + "AUTO_BACKUP_COS_SECRET_ID", + "access key id for Tencent COS, used for auto backup", + ) + _add_text( + "AUTO_BACKUP_COS_SECRET_KEY", + "access key secret for Tencent COS, used for auto backup", + ) + _add_text( + "AUTO_BACKUP_COS_REGION", "region for Tencent COS, used for auto backup" + ) + _add_text( + "AUTO_BACKUP_BUCKET_NAME", + "bucket name for Tencent COS, used for auto backup", + ) + _add_text("GITHUB_USERNAME", "github username for fetching todos") + _add_int("GITHUB_PROJECT_NUMBER", "github project number for fetching todos") + _add_text("GITHUB_TOKEN", "github token for fetching todos") + _add_text("GITHUB_TODO_COUNT", "github todo count") + _add_uuid("V2RAY_TOKEN", "v2ray user id") + _add_uuid("V2RAY_PATH", "v2ray path, which will be prefixed by _") + _add_text("FORGEJO_MAILER_USER", "Forgejo SMTP user") + _add_text("FORGEJO_MAILER_PASSWD", "Forgejo SMTP password") + _add_random_string("2FAUTH_APP_KEY", "2FAuth App Key") + _add_text("2FAUTH_MAIL_USERNAME", "2FAuth SMTP user") + _add_text("2FAUTH_MAIL_PASSWORD", "2FAuth SMTP password") + + def setup(self) -> None: + self._config_file_path = self.app.data_dir.add_subpath( + "config", False, description="Configuration file path." + ) + + @property + def config_name_prefix(self) -> str: + return self.app.app_id.upper() + + @property + def configuration(self) -> Configuration: + return self._configuration + + @property + def config_file_path(self) -> AppFeaturePath: + return self._config_file_path + + @property + def all_set(self) -> bool: + return self.configuration.all_set + + def get_item(self, name: str) -> ConfigItem[Any]: + if not name.startswith(self.config_name_prefix + "_"): + name = f"{self.config_name_prefix}_{name}" + + item = self.configuration.get_or(name, None) + if item is None: + raise AppConfigError(f"Config item '{name}' not found.", self.configuration) + return item + + @overload + def get_item_value_str(self, name: str) -> str: ... + + @overload + def get_item_value_str(self, name: str, ensure_set: Literal[True]) -> str: ... + + @overload + def get_item_value_str(self, name: str, ensure_set: bool = True) -> str | None: ... + + def get_item_value_str(self, name: str, ensure_set: bool = True) -> str | None: + self.load_config_file() + item = self.get_item(name) + if not item.is_set: + if ensure_set: + raise AppConfigItemNotSetError( + f"Config item '{name}' is not set.", self.configuration, [item] + ) + return None + return item.value_str + + def get_str_dict(self, ensure_all_set: bool = True) -> dict[str, str]: + self.load_config_file() + if ensure_all_set and not self.configuration.all_set: + raise AppConfigItemNotSetError( + "Some config items are not set.", + self.configuration, + self.configuration.get_unset_items(), + ) + return self.configuration.to_str_dict() + + @property + def domain_item_name(self) -> str: + return self._domain.name + + def get_domain_value_str(self) -> str: + return self.get_item_value_str(self._domain.name) + + def get_email_value_str_optional(self) -> str | None: + return self.get_item_value_str(self._email.name, ensure_set=False) + + def _set_with_default(self) -> None: + if not self.configuration.all_not_set: + raise AppConfigError( + "Config is not clean. " + "Some config items are already set. " + "Can't set again with default value.", + self.configuration, + ) + for item in self.configuration: + if item.can_generate_default: + item.set_value(item.generate_default_value()) + + def _to_config_file_content(self) -> str: + content = "".join( + [ + f"{item.name}={item.value_str if item.is_set else ''}\n" + for item in self.configuration + ] + ) + return content + + def _create_init_config_file(self) -> None: + if self.config_file_path.check_self(): + raise AppConfigError( + "Config file already exists.", + self.configuration, + user_message=f"The config file at " + f"{self.config_file_path.full_path_str} already exists.", + ) + self._set_with_default() + self.config_file_path.ensure() + with open( + self.config_file_path.full_path, "w", encoding="utf-8", newline="\n" + ) as file: + file.write(self._to_config_file_content()) + + def _parse_config_file(self) -> SimpleLineConfigParser.Result: + if not self.config_file_path.check_self(): + raise AppConfigFileNotFoundError( + "Config file not found.", + self.configuration, + self.config_file_path.full_path_str, + user_message=f"The config file at " + f"{self.config_file_path.full_path_str} does not exist. " + f"You can create an initial one with 'init' command.", + ) + + text = self.config_file_path.full_path.read_text() + try: + parser = SimpleLineConfigParser() + return parser.parse(text) + except ParseError as e: + raise AppConfigFileParseError( + "Failed to parse config file.", self.configuration, text + ) from e + + def _parse_and_print_config_file(self) -> None: + parse_result = self._parse_config_file() + for entry in parse_result: + print(f"{entry.key}={entry.value}") + + def _check_duplicate( + self, + parse_result: dict[str, list[SimpleLineConfigParser.Entry]], + ) -> dict[str, SimpleLineConfigParser.Entry]: + entry_dict: dict[str, SimpleLineConfigParser.Entry] = {} + duplicate_entries: list[SimpleLineConfigParser.Entry] = [] + for key, entries in parse_result.items(): + entry_dict[key] = entries[0] + if len(entries) > 1: + duplicate_entries.extend(entries) + if len(duplicate_entries) > 0: + raise AppConfigDuplicateEntryError( + "Duplicate entries found.", self.configuration, duplicate_entries + ) + + return entry_dict + + def _check_type( + self, entry_dict: dict[str, SimpleLineConfigParser.Entry] + ) -> dict[str, Any]: + value_dict: dict[str, Any] = {} + error_entries: list[SimpleLineConfigParser.Entry] = [] + errors: list[CruValueTypeError] = [] + for key, entry in entry_dict.items(): + config_item = self.configuration.get(key) + try: + if entry.value == "": + value_dict[key] = None + else: + value_dict[key] = config_item.value_type.convert_str_to_value( + entry.value + ) + except CruValueTypeError as e: + error_entries.append(entry) + errors.append(e) + if len(error_entries) > 0: + raise AppConfigEntryValueFormatError( + "Entry value format is not correct.", + self.configuration, + error_entries, + ) from ExceptionGroup("Multiple format errors occurred.", errors) + return value_dict + + def _read_config_file(self) -> dict[str, Any]: + parsed = self._parse_config_file() + entry_groups = parsed.cru_iter().group_by(lambda e: e.key) + entry_dict = self._check_duplicate(entry_groups) + value_dict = self._check_type(entry_dict) + return value_dict + + def _real_load_config_file(self) -> None: + self.configuration.reset_all() + value_dict = self._read_config_file() + for key, value in value_dict.items(): + if value is None: + continue + self.configuration.set_config_item(key, value) + + def load_config_file(self, force=False) -> None: + if force or not self._loaded: + self._real_load_config_file() + self._loaded = True + + def _print_app_config_info(self): + for item in self.configuration: + print(item.description_str) + + def get_command_info(self): + return "config", "Manage configuration." + + def setup_arg_parser(self, arg_parser) -> None: + subparsers = arg_parser.add_subparsers( + dest="config_command", required=True, metavar="CONFIG_COMMAND" + ) + _init_parser = subparsers.add_parser( + "init", help="create an initial config file" + ) + _print_app_parser = subparsers.add_parser( + "print-app", + help="print information of the config items defined by app", + ) + _print_parser = subparsers.add_parser("print", help="print current config") + _check_config_parser = subparsers.add_parser( + "check", + help="check the validity of the config file", + ) + _check_config_parser.add_argument( + "-f", + "--format-only", + action="store_true", + help="only check content format, not app config item requirements.", + ) + + def run_command(self, args) -> None: + if args.config_command == "init": + self._create_init_config_file() + elif args.config_command == "print-app": + self._print_app_config_info() + elif args.config_command == "print": + self._parse_and_print_config_file() + elif args.config_command == "check": + if args.format_only: + self._parse_config_file() + else: + self._read_config_file() diff --git a/tools/cru-py/cru/service/_external.py b/tools/cru-py/cru/service/_external.py new file mode 100644 index 0000000..2347e95 --- /dev/null +++ b/tools/cru-py/cru/service/_external.py @@ -0,0 +1,81 @@ +from ._base import AppCommandFeatureProvider +from ._nginx import NginxManager + + +class CliToolCommandProvider(AppCommandFeatureProvider): + def __init__(self) -> None: + super().__init__("cli-tool-command-provider") + + def setup(self): + pass + + def get_command_info(self): + return ("gen-cli", "Get commands of running external cli tools.") + + def setup_arg_parser(self, arg_parser): + subparsers = arg_parser.add_subparsers( + dest="gen_cli_command", required=True, metavar="GEN_CLI_COMMAND" + ) + certbot_parser = subparsers.add_parser("certbot", help="print certbot commands") + certbot_parser.add_argument( + "-t", "--test", action="store_true", help="run certbot in test mode" + ) + _install_docker_parser = subparsers.add_parser( + "install-docker", help="print docker installation commands" + ) + _update_blog_parser = subparsers.add_parser( + "update-blog", help="print blog update command" + ) + + def _print_install_docker_commands(self) -> None: + output = """ +### COMMAND: uninstall apt docker +for pkg in docker.io docker-doc docker-compose \ +podman-docker containerd runc; \ +do sudo apt-get remove $pkg; done + +### COMMAND: prepare apt certs +sudo apt-get update +sudo apt-get install ca-certificates curl +sudo install -m 0755 -d /etc/apt/keyrings + +### COMMAND: install certs +sudo curl -fsSL https://download.docker.com/linux/debian/gpg \ +-o /etc/apt/keyrings/docker.asc +sudo chmod a+r /etc/apt/keyrings/docker.asc + +### COMMAND: add docker apt source +echo \\ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] \ +https://download.docker.com/linux/debian \\ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \\ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + +### COMMAND: update apt and install docker +sudo apt-get update +sudo apt-get install docker-ce docker-ce-cli containerd.io \ +docker-buildx-plugin docker-compose-plugin + +### COMMAND: setup system for docker +sudo systemctl enable docker +sudo systemctl start docker +sudo groupadd -f docker +sudo usermod -aG docker $USER +# Remember to log out and log back in for the group changes to take effect +""".strip() + print(output) + + def _print_update_blog_command(self): + output = """ +### COMMAND: update blog +docker exec -it blog /scripts/update.bash +""".strip() + print(output) + + def run_command(self, args): + if args.gen_cli_command == "certbot": + self.app.get_feature(NginxManager).print_all_certbot_commands(args.test) + elif args.gen_cli_command == "install-docker": + self._print_install_docker_commands() + elif args.gen_cli_command == "update-blog": + self._print_update_blog_command()
\ No newline at end of file diff --git a/tools/cru-py/cru/service/_nginx.py b/tools/cru-py/cru/service/_nginx.py new file mode 100644 index 0000000..e0a9c60 --- /dev/null +++ b/tools/cru-py/cru/service/_nginx.py @@ -0,0 +1,281 @@ +from argparse import Namespace +from enum import Enum, auto +import re +import subprocess +from typing import TypeAlias + +from cru import CruInternalError + +from ._base import AppCommandFeatureProvider +from ._config import ConfigManager +from ._template import TemplateManager + + +class CertbotAction(Enum): + CREATE = auto() + EXPAND = auto() + SHRINK = auto() + RENEW = auto() + + +class NginxManager(AppCommandFeatureProvider): + CertbotAction: TypeAlias = CertbotAction + + def __init__(self) -> None: + super().__init__("nginx-manager") + self._domains_cache: list[str] | None = None + + def setup(self) -> None: + pass + + @property + def _config_manager(self) -> ConfigManager: + return self.app.get_feature(ConfigManager) + + @property + def root_domain(self) -> str: + return self._config_manager.get_domain_value_str() + + @property + def domains(self) -> list[str]: + if self._domains_cache is None: + self._domains_cache = self._get_domains() + return self._domains_cache + + @property + def subdomains(self) -> list[str]: + suffix = "." + self.root_domain + return [d[: -len(suffix)] for d in self.domains if d.endswith(suffix)] + + @property + def _domain_config_name(self) -> str: + return self._config_manager.domain_item_name + + def _get_domains_from_text(self, text: str) -> set[str]: + domains: set[str] = set() + regex = re.compile(r"server_name\s+(\S+)\s*;") + domain_variable_str = f"${self._domain_config_name}" + brace_domain_variable_regex = re.compile( + r"\$\{\s*" + self._domain_config_name + r"\s*\}" + ) + for match in regex.finditer(text): + domain_part = match.group(1) + if domain_variable_str in domain_part: + domains.add(domain_part.replace(domain_variable_str, self.root_domain)) + continue + m = brace_domain_variable_regex.search(domain_part) + if m: + domains.add(domain_part.replace(m.group(0), self.root_domain)) + continue + domains.add(domain_part) + return domains + + def _get_nginx_conf_template_text(self) -> str: + template_manager = self.app.get_feature(TemplateManager) + text = "" + for path, template in template_manager.template_tree.templates: + if path.as_posix().startswith("nginx/"): + text += template.raw_text + return text + + def _get_domains(self) -> list[str]: + text = self._get_nginx_conf_template_text() + domains = list(self._get_domains_from_text(text)) + domains.remove(self.root_domain) + return [self.root_domain, *domains] + + def _print_domains(self) -> None: + for domain in self.domains: + print(domain) + + def _certbot_command( + self, + action: CertbotAction | str, + test: bool, + *, + docker=True, + standalone=None, + email=None, + agree_tos=True, + ) -> str: + if isinstance(action, str): + action = CertbotAction[action.upper()] + + command_args = [] + + add_domain_option = True + if action is CertbotAction.CREATE: + if standalone is None: + standalone = True + command_action = "certonly" + elif action in [CertbotAction.EXPAND, CertbotAction.SHRINK]: + if standalone is None: + standalone = False + command_action = "certonly" + elif action is CertbotAction.RENEW: + if standalone is None: + standalone = False + add_domain_option = False + command_action = "renew" + else: + raise CruInternalError("Invalid certbot action.") + + data_dir = self.app.data_dir.full_path.as_posix() + + if not docker: + command_args.append("certbot") + else: + command_args.extend( + [ + "docker run -it --rm --name certbot", + f'-v "{data_dir}/certbot/certs:/etc/letsencrypt"', + f'-v "{data_dir}/certbot/data:/var/lib/letsencrypt"', + ] + ) + if standalone: + command_args.append('-p "0.0.0.0:80:80"') + else: + command_args.append(f'-v "{data_dir}/certbot/webroot:/var/www/certbot"') + + command_args.append("certbot/certbot") + + command_args.append(command_action) + + command_args.append(f"--cert-name {self.root_domain}") + + if standalone: + command_args.append("--standalone") + else: + command_args.append("--webroot -w /var/www/certbot") + + if add_domain_option: + command_args.append(" ".join([f"-d {domain}" for domain in self.domains])) + + if email is not None: + command_args.append(f"--email {email}") + + if agree_tos: + command_args.append("--agree-tos") + + if test: + command_args.append("--test-cert --dry-run") + + return " ".join(command_args) + + def print_all_certbot_commands(self, test: bool): + print("### COMMAND: (standalone) create certs") + print( + self._certbot_command( + CertbotAction.CREATE, + test, + email=self._config_manager.get_email_value_str_optional(), + ) + ) + print() + print("### COMMAND: (webroot+nginx) expand or shrink certs") + print( + self._certbot_command( + CertbotAction.EXPAND, + test, + email=self._config_manager.get_email_value_str_optional(), + ) + ) + print() + print("### COMMAND: (webroot+nginx) renew certs") + print( + self._certbot_command( + CertbotAction.RENEW, + test, + email=self._config_manager.get_email_value_str_optional(), + ) + ) + + @property + def _cert_path_str(self) -> str: + return str( + self.app.data_dir.full_path + / "certbot/certs/live" + / self.root_domain + / "fullchain.pem" + ) + + def get_command_info(self): + return "nginx", "Manage nginx related things." + + def setup_arg_parser(self, arg_parser): + subparsers = arg_parser.add_subparsers( + dest="nginx_command", required=True, metavar="NGINX_COMMAND" + ) + _list_parser = subparsers.add_parser("list", help="list domains") + certbot_parser = subparsers.add_parser("certbot", help="print certbot commands") + certbot_parser.add_argument( + "--no-test", + action="store_true", + help="remove args making certbot run in test mode", + ) + + def run_command(self, args: Namespace) -> None: + if args.nginx_command == "list": + self._print_domains() + elif args.nginx_command == "certbot": + self.print_all_certbot_commands(not args.no_test) + + def _generate_dns_zone( + self, + ip: str, + /, + ttl: str | int = 600, + *, + enable_mail: bool = True, + dkim: str | None = None, + ) -> str: + # TODO: Not complete and test now. + root_domain = self.root_domain + result = f"$ORIGIN {root_domain}.\n\n" + result += "; A records\n" + result += f"@ {ttl} IN A {ip}\n" + for subdomain in self.subdomains: + result += f"{subdomain} {ttl} IN A {ip}\n" + + if enable_mail: + result += "\n; MX records\n" + result += f"@ {ttl} IN MX 10 mail.{root_domain}.\n" + result += "\n; SPF record\n" + result += f'@ {ttl} IN TXT "v=spf1 mx ~all"\n' + if dkim is not None: + result += "\n; DKIM record\n" + result += f'mail._domainkey {ttl} IN TEXT "{dkim}"' + result += "\n; DMARC record\n" + dmarc_options = [ + "v=DMARC1", + "p=none", + f"rua=mailto:dmarc.report@{root_domain}", + f"ruf=mailto:dmarc.report@{root_domain}", + "sp=none", + "ri=86400", + ] + result += f'_dmarc {ttl} IN TXT "{"; ".join(dmarc_options)}"\n' + return result + + def _get_dkim_from_mailserver(self) -> str | None: + # TODO: Not complete and test now. + dkim_path = ( + self.app.data_dir.full_path + / "dms/config/opendkim/keys" + / self.root_domain + / "mail.txt" + ) + if not dkim_path.exists(): + return None + + p = subprocess.run(["sudo", "cat", dkim_path], capture_output=True, check=True) + value = "" + for match in re.finditer('"(.*)"', p.stdout.decode("utf-8")): + value += match.group(1) + return value + + def _generate_dns_zone_with_dkim(self, ip: str, /, ttl: str | int = 600) -> str: + # TODO: Not complete and test now. + return self._generate_dns_zone( + ip, ttl, enable_mail=True, dkim=self._get_dkim_from_mailserver() + ) diff --git a/tools/cru-py/cru/service/_template.py b/tools/cru-py/cru/service/_template.py new file mode 100644 index 0000000..170116c --- /dev/null +++ b/tools/cru-py/cru/service/_template.py @@ -0,0 +1,86 @@ +from argparse import Namespace +import shutil + +from cru import CruIterator +from cru.template import TemplateTree + +from ._base import AppCommandFeatureProvider, AppFeaturePath +from ._config import ConfigManager + + +class TemplateManager(AppCommandFeatureProvider): + def __init__(self, prefix: str | None = None): + super().__init__("template-manager") + self._prefix = prefix or self.app.app_id.upper() + + def setup(self) -> None: + self._templates_dir = self.app.add_path("templates", True) + self._generated_dir = self.app.add_path("generated", True) + self._template_tree: TemplateTree | None = None + + @property + def prefix(self) -> str: + return self._prefix + + @property + def templates_dir(self) -> AppFeaturePath: + return self._templates_dir + + @property + def generated_dir(self) -> AppFeaturePath: + return self._generated_dir + + @property + def template_tree(self) -> TemplateTree: + if self._template_tree is None: + return self.reload() + return self._template_tree + + def reload(self) -> TemplateTree: + self._template_tree = TemplateTree( + self.prefix, self.templates_dir.full_path_str + ) + return self._template_tree + + def _print_file_lists(self) -> None: + for file in CruIterator(self.template_tree.templates).transform(lambda t: t[0]): + print(file.as_posix()) + + def _generate_files(self, dry_run: bool) -> None: + config_manager = self.app.get_feature(ConfigManager) + if not dry_run and self.generated_dir.full_path.exists(): + shutil.rmtree(self.generated_dir.full_path) + self.template_tree.generate_to( + self.generated_dir.full_path_str, config_manager.get_str_dict(), dry_run + ) + + def get_command_info(self): + return ("template", "Manage templates.") + + def setup_arg_parser(self, arg_parser): + subparsers = arg_parser.add_subparsers( + dest="template_command", required=True, metavar="TEMPLATE_COMMAND" + ) + _list_parser = subparsers.add_parser("list", help="list templates") + _variables_parser = subparsers.add_parser( + "variables", help="list variables used in all templates" + ) + generate_parser = subparsers.add_parser("generate", help="generate templates") + generate_parser.add_argument( + "--no-dry-run", action="store_true", help="generate and write target files" + ) + + def run_command(self, args: Namespace) -> None: + if args.template_command == "list": + self._print_file_lists() + elif args.template_command == "variables": + for var in self.template_tree.variables: + print(var) + elif args.template_command == "generate": + dry_run = not args.no_dry_run + self._generate_files(dry_run) + if dry_run: + print("Dry run successfully.") + print( + f"Will delete dir {self.generated_dir.full_path_str} if it exists." + ) diff --git a/tools/cru-py/cru/system.py b/tools/cru-py/cru/system.py new file mode 100644 index 0000000..f321717 --- /dev/null +++ b/tools/cru-py/cru/system.py @@ -0,0 +1,23 @@ +import os.path +import re + + +def check_debian_derivative_version(name: str) -> None | str: + if not os.path.isfile("/etc/os-release"): + return None + with open("/etc/os-release", "r") as f: + content = f.read() + if f"ID={name}" not in content: + return None + m = re.search(r'VERSION_ID="(.+)"', content) + if m is None: + return None + return m.group(1) + + +def check_ubuntu_version() -> None | str: + return check_debian_derivative_version("ubuntu") + + +def check_debian_version() -> None | str: + return check_debian_derivative_version("debian") diff --git a/tools/cru-py/cru/template.py b/tools/cru-py/cru/template.py new file mode 100644 index 0000000..6749cab --- /dev/null +++ b/tools/cru-py/cru/template.py @@ -0,0 +1,153 @@ +from collections.abc import Mapping +import os +import os.path +from pathlib import Path +from string import Template + +from ._iter import CruIterator +from ._error import CruException + + +class CruTemplateError(CruException): + pass + + +class CruTemplate: + def __init__(self, prefix: str, text: str): + self._prefix = prefix + self._template = Template(text) + self._variables = ( + CruIterator(self._template.get_identifiers()) + .filter(lambda i: i.startswith(self._prefix)) + .to_set() + ) + self._all_variables = set(self._template.get_identifiers()) + + @property + def prefix(self) -> str: + return self._prefix + + @property + def raw_text(self) -> str: + return self._template.template + + @property + def py_template(self) -> Template: + return self._template + + @property + def variables(self) -> set[str]: + return self._variables + + @property + def all_variables(self) -> set[str]: + return self._all_variables + + @property + def has_variables(self) -> bool: + """ + If the template does not has any variables that starts with the given prefix, + it returns False. This usually indicates that the template is not a real + template and should be copied as is. Otherwise, it returns True. + + This can be used as a guard to prevent invalid templates created accidentally + without notice. + """ + return len(self.variables) > 0 + + def generate(self, mapping: Mapping[str, str], allow_extra: bool = True) -> str: + values = dict(mapping) + if not self.variables <= set(values.keys()): + raise CruTemplateError("Missing variables.") + if not allow_extra and not set(values.keys()) <= self.variables: + raise CruTemplateError("Extra variables.") + return self._template.safe_substitute(values) + + +class TemplateTree: + def __init__( + self, + prefix: str, + source: str, + template_file_suffix: str | None = ".template", + ): + """ + If template_file_suffix is not None, the files will be checked according to the + suffix of the file name. If the suffix matches, the file will be regarded as a + template file. Otherwise, it will be regarded as a non-template file. + Content of template file must contain variables that need to be replaced, while + content of non-template file may not contain any variables. + If either case is false, it generally means whether the file is a template is + wrongly handled. + """ + self._prefix = prefix + self._files: list[tuple[Path, CruTemplate]] = [] + self._source = source + self._template_file_suffix = template_file_suffix + self._load() + + @property + def prefix(self) -> str: + return self._prefix + + @property + def templates(self) -> list[tuple[Path, CruTemplate]]: + return self._files + + @property + def source(self) -> str: + return self._source + + @property + def template_file_suffix(self) -> str | None: + return self._template_file_suffix + + @staticmethod + def _scan_files(root_path: str) -> list[Path]: + result: list[Path] = [] + for root, _dirs, files in os.walk(root_path): + for file in files: + path = Path(root, file) + path = path.relative_to(root_path) + result.append(Path(path)) + return result + + def _load(self) -> None: + files = self._scan_files(self.source) + for file_path in files: + template_file = Path(self.source) / file_path + with open(template_file, "r") as f: + content = f.read() + template = CruTemplate(self.prefix, content) + if self.template_file_suffix is not None: + should_be_template = file_path.name.endswith(self.template_file_suffix) + if should_be_template and not template.has_variables: + raise CruTemplateError( + f"Template file {file_path} has no variables." + ) + elif not should_be_template and template.has_variables: + raise CruTemplateError(f"Non-template {file_path} has variables.") + self._files.append((file_path, template)) + + @property + def variables(self) -> set[str]: + s = set() + for _, template in self.templates: + s.update(template.variables) + return s + + def generate_to( + self, destination: str, variables: Mapping[str, str], dry_run: bool + ) -> None: + for file, template in self.templates: + des = Path(destination) / file + if self.template_file_suffix is not None and des.name.endswith( + self.template_file_suffix + ): + des = des.parent / (des.name[: -len(self.template_file_suffix)]) + + text = template.generate(variables) + if not dry_run: + des.parent.mkdir(parents=True, exist_ok=True) + with open(des, "w") as f: + f.write(text) diff --git a/tools/cru-py/cru/tool.py b/tools/cru-py/cru/tool.py new file mode 100644 index 0000000..377f5d7 --- /dev/null +++ b/tools/cru-py/cru/tool.py @@ -0,0 +1,82 @@ +import shutil +import subprocess +from typing import Any +from collections.abc import Iterable + +from ._error import CruException + + +class CruExternalToolError(CruException): + def __init__(self, message: str, tool: str, *args, **kwargs) -> None: + super().__init__(message, *args, **kwargs) + self._tool = tool + + @property + def tool(self) -> str: + return self._tool + + +class CruExternalToolNotFoundError(CruExternalToolError): + def __init__(self, message: str | None, tool: str, *args, **kwargs) -> None: + super().__init__( + message or f"Could not find binary for {tool}.", tool, *args, **kwargs + ) + + +class CruExternalToolRunError(CruExternalToolError): + def __init__( + self, + message: str, + tool: str, + tool_args: Iterable[str], + tool_error: Any, + *args, + **kwargs, + ) -> None: + super().__init__(message, tool, *args, **kwargs) + self._tool_args = list(tool_args) + self._tool_error = tool_error + + @property + def tool_args(self) -> list[str]: + return self._tool_args + + @property + def tool_error(self) -> Any: + return self._tool_error + + +class ExternalTool: + def __init__(self, bin: str) -> None: + self._bin = bin + + @property + def bin(self) -> str: + return self._bin + + @bin.setter + def bin(self, value: str) -> None: + self._bin = value + + @property + def bin_path(self) -> str: + real_bin = shutil.which(self.bin) + if not real_bin: + raise CruExternalToolNotFoundError(None, self.bin) + return real_bin + + def run( + self, *process_args: str, **subprocess_kwargs + ) -> subprocess.CompletedProcess: + try: + return subprocess.run( + [self.bin_path] + list(process_args), **subprocess_kwargs + ) + except subprocess.CalledProcessError as e: + raise CruExternalToolError("Subprocess failed.", self.bin) from e + except OSError as e: + raise CruExternalToolError("Failed to start subprocess", self.bin) from e + + def run_get_output(self, *process_args: str, **subprocess_kwargs) -> Any: + process = self.run(*process_args, capture_output=True, **subprocess_kwargs) + return process.stdout diff --git a/tools/cru-py/cru/value.py b/tools/cru-py/cru/value.py new file mode 100644 index 0000000..9c03219 --- /dev/null +++ b/tools/cru-py/cru/value.py @@ -0,0 +1,292 @@ +from __future__ import annotations + +import random +import secrets +import string +import uuid +from abc import abstractmethod, ABCMeta +from collections.abc import Callable +from typing import Any, ClassVar, TypeVar, Generic + +from ._error import CruException + + +def _str_case_in(s: str, case: bool, str_list: list[str]) -> bool: + if case: + return s in str_list + else: + return s.lower() in [s.lower() for s in str_list] + + +_T = TypeVar("_T") + + +class CruValueTypeError(CruException): + def __init__( + self, + message: str, + value: Any, + value_type: ValueType | None, + *args, + **kwargs, + ): + super().__init__( + message, + *args, + **kwargs, + ) + self._value = value + self._value_type = value_type + + @property + def value(self) -> Any: + return self._value + + @property + def value_type(self) -> ValueType | None: + return self._value_type + + +class ValueType(Generic[_T], metaclass=ABCMeta): + def __init__(self, name: str, _type: type[_T]) -> None: + self._name = name + self._type = _type + + @property + def name(self) -> str: + return self._name + + @property + def type(self) -> type[_T]: + return self._type + + def check_value_type(self, value: Any) -> None: + if not isinstance(value, self.type): + raise CruValueTypeError("Type of value is wrong.", value, self) + + def _do_check_value(self, value: Any) -> _T: + return value + + def check_value(self, value: Any) -> _T: + self.check_value_type(value) + return self._do_check_value(value) + + @abstractmethod + def _do_check_str_format(self, s: str) -> None: + raise NotImplementedError() + + def check_str_format(self, s: str) -> None: + if not isinstance(s, str): + raise CruValueTypeError("Try to check format on a non-str.", s, self) + self._do_check_str_format(s) + + @abstractmethod + def _do_convert_value_to_str(self, value: _T) -> str: + raise NotImplementedError() + + def convert_value_to_str(self, value: _T) -> str: + self.check_value(value) + return self._do_convert_value_to_str(value) + + @abstractmethod + def _do_convert_str_to_value(self, s: str) -> _T: + raise NotImplementedError() + + def convert_str_to_value(self, s: str) -> _T: + self.check_str_format(s) + return self._do_convert_str_to_value(s) + + def check_value_or_try_convert_from_str(self, value_or_str: Any) -> _T: + try: + return self.check_value(value_or_str) + except CruValueTypeError: + if isinstance(value_or_str, str): + return self.convert_str_to_value(value_or_str) + else: + raise + + def create_default_value(self) -> _T: + return self.type() + + +class TextValueType(ValueType[str]): + def __init__(self) -> None: + super().__init__("text", str) + + def _do_check_str_format(self, _s): + return + + def _do_convert_value_to_str(self, value): + return value + + def _do_convert_str_to_value(self, s): + return s + + +class IntegerValueType(ValueType[int]): + def __init__(self) -> None: + super().__init__("integer", int) + + def _do_check_str_format(self, s): + try: + int(s) + except ValueError as e: + raise CruValueTypeError("Invalid integer format.", s, self) from e + + def _do_convert_value_to_str(self, value): + return str(value) + + def _do_convert_str_to_value(self, s): + return int(s) + + +class FloatValueType(ValueType[float]): + def __init__(self) -> None: + super().__init__("float", float) + + def _do_check_str_format(self, s): + try: + float(s) + except ValueError as e: + raise CruValueTypeError("Invalid float format.", s, self) from e + + def _do_convert_value_to_str(self, value): + return str(value) + + def _do_convert_str_to_value(self, s): + return float(s) + + +class BooleanValueType(ValueType[bool]): + DEFAULT_TRUE_LIST: ClassVar[list[str]] = ["true", "yes", "y", "on", "1"] + DEFAULT_FALSE_LIST: ClassVar[list[str]] = ["false", "no", "n", "off", "0"] + + def __init__( + self, + *, + case_sensitive=False, + true_list: None | list[str] = None, + false_list: None | list[str] = None, + ) -> None: + super().__init__("boolean", bool) + self._case_sensitive = case_sensitive + self._valid_true_strs: list[str] = ( + true_list or BooleanValueType.DEFAULT_TRUE_LIST + ) + self._valid_false_strs: list[str] = ( + false_list or BooleanValueType.DEFAULT_FALSE_LIST + ) + + @property + def case_sensitive(self) -> bool: + return self._case_sensitive + + @property + def valid_true_strs(self) -> list[str]: + return self._valid_true_strs + + @property + def valid_false_strs(self) -> list[str]: + return self._valid_false_strs + + @property + def valid_boolean_strs(self) -> list[str]: + return self._valid_true_strs + self._valid_false_strs + + def _do_check_str_format(self, s): + if not _str_case_in(s, self.case_sensitive, self.valid_boolean_strs): + raise CruValueTypeError("Invalid boolean format.", s, self) + + def _do_convert_value_to_str(self, value): + return self._valid_true_strs[0] if value else self._valid_false_strs[0] + + def _do_convert_str_to_value(self, s): + return _str_case_in(s, self.case_sensitive, self._valid_true_strs) + + def create_default_value(self): + return self.valid_false_strs[0] + + +class EnumValueType(ValueType[str]): + def __init__(self, valid_values: list[str], /, case_sensitive=False) -> None: + super().__init__(f"enum({'|'.join(valid_values)})", str) + self._case_sensitive = case_sensitive + self._valid_values = valid_values + + @property + def case_sensitive(self) -> bool: + return self._case_sensitive + + @property + def valid_values(self) -> list[str]: + return self._valid_values + + def _do_check_value(self, value): + self._do_check_str_format(value) + + def _do_check_str_format(self, s): + if not _str_case_in(s, self.case_sensitive, self.valid_values): + raise CruValueTypeError("Invalid enum value", s, self) + + def _do_convert_value_to_str(self, value): + return value + + def _do_convert_str_to_value(self, s): + return s + + def create_default_value(self): + return self.valid_values[0] + + +TEXT_VALUE_TYPE = TextValueType() +INTEGER_VALUE_TYPE = IntegerValueType() +BOOLEAN_VALUE_TYPE = BooleanValueType() + + +class ValueGeneratorBase(Generic[_T], metaclass=ABCMeta): + @abstractmethod + def generate(self) -> _T: + raise NotImplementedError() + + def __call__(self) -> _T: + return self.generate() + + +class ValueGenerator(ValueGeneratorBase[_T]): + def __init__(self, generate_func: Callable[[], _T]) -> None: + self._generate_func = generate_func + + @property + def generate_func(self) -> Callable[[], _T]: + return self._generate_func + + def generate(self) -> _T: + return self._generate_func() + + +class UuidValueGenerator(ValueGeneratorBase[str]): + def generate(self): + return str(uuid.uuid4()) + + +class RandomStringValueGenerator(ValueGeneratorBase[str]): + def __init__(self, length: int, secure: bool) -> None: + self._length = length + self._secure = secure + + @property + def length(self) -> int: + return self._length + + @property + def secure(self) -> bool: + return self._secure + + def generate(self): + random_func = secrets.choice if self._secure else random.choice + characters = string.ascii_letters + string.digits + random_string = "".join(random_func(characters) for _ in range(self._length)) + return random_string + + +UUID_VALUE_GENERATOR = UuidValueGenerator() diff --git a/tools/cru-py/poetry.lock b/tools/cru-py/poetry.lock new file mode 100644 index 0000000..305aaee --- /dev/null +++ b/tools/cru-py/poetry.lock @@ -0,0 +1,80 @@ +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+
+[[package]]
+name = "mypy"
+version = "1.14.0"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e971c1c667007f9f2b397ffa80fa8e1e0adccff336e5e77e74cb5f22868bee87"},
+ {file = "mypy-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e86aaeaa3221a278c66d3d673b297232947d873773d61ca3ee0e28b2ff027179"},
+ {file = "mypy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1628c5c3ce823d296e41e2984ff88c5861499041cb416a8809615d0c1f41740e"},
+ {file = "mypy-1.14.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fadb29b77fc14a0dd81304ed73c828c3e5cde0016c7e668a86a3e0dfc9f3af3"},
+ {file = "mypy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:3fa76988dc760da377c1e5069200a50d9eaaccf34f4ea18428a3337034ab5a44"},
+ {file = "mypy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e73c8a154eed31db3445fe28f63ad2d97b674b911c00191416cf7f6459fd49a"},
+ {file = "mypy-1.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:273e70fcb2e38c5405a188425aa60b984ffdcef65d6c746ea5813024b68c73dc"},
+ {file = "mypy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1daca283d732943731a6a9f20fdbcaa927f160bc51602b1d4ef880a6fb252015"},
+ {file = "mypy-1.14.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7e68047bedb04c1c25bba9901ea46ff60d5eaac2d71b1f2161f33107e2b368eb"},
+ {file = "mypy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:7a52f26b9c9b1664a60d87675f3bae00b5c7f2806e0c2800545a32c325920bcc"},
+ {file = "mypy-1.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d5326ab70a6db8e856d59ad4cb72741124950cbbf32e7b70e30166ba7bbf61dd"},
+ {file = "mypy-1.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf4ec4980bec1e0e24e5075f449d014011527ae0055884c7e3abc6a99cd2c7f1"},
+ {file = "mypy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:390dfb898239c25289495500f12fa73aa7f24a4c6d90ccdc165762462b998d63"},
+ {file = "mypy-1.14.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e026d55ddcd76e29e87865c08cbe2d0104e2b3153a523c529de584759379d3d"},
+ {file = "mypy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:585ed36031d0b3ee362e5107ef449a8b5dfd4e9c90ccbe36414ee405ee6b32ba"},
+ {file = "mypy-1.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9f6f4c0b27401d14c483c622bc5105eff3911634d576bbdf6695b9a7c1ba741"},
+ {file = "mypy-1.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56b2280cedcb312c7a79f5001ae5325582d0d339bce684e4a529069d0e7ca1e7"},
+ {file = "mypy-1.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:342de51c48bab326bfc77ce056ba08c076d82ce4f5a86621f972ed39970f94d8"},
+ {file = "mypy-1.14.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00df23b42e533e02a6f0055e54de9a6ed491cd8b7ea738647364fd3a39ea7efc"},
+ {file = "mypy-1.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:e8c8387e5d9dff80e7daf961df357c80e694e942d9755f3ad77d69b0957b8e3f"},
+ {file = "mypy-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b16738b1d80ec4334654e89e798eb705ac0c36c8a5c4798496cd3623aa02286"},
+ {file = "mypy-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10065fcebb7c66df04b05fc799a854b1ae24d9963c8bb27e9064a9bdb43aa8ad"},
+ {file = "mypy-1.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fbb7d683fa6bdecaa106e8368aa973ecc0ddb79a9eaeb4b821591ecd07e9e03c"},
+ {file = "mypy-1.14.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3498cb55448dc5533e438cd13d6ddd28654559c8c4d1fd4b5ca57a31b81bac01"},
+ {file = "mypy-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:c7b243408ea43755f3a21a0a08e5c5ae30eddb4c58a80f415ca6b118816e60aa"},
+ {file = "mypy-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14117b9da3305b39860d0aa34b8f1ff74d209a368829a584eb77524389a9c13e"},
+ {file = "mypy-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af98c5a958f9c37404bd4eef2f920b94874507e146ed6ee559f185b8809c44cc"},
+ {file = "mypy-1.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b343a1d3989547024377c2ba0dca9c74a2428ad6ed24283c213af8dbb0710b"},
+ {file = "mypy-1.14.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cdb5563c1726c85fb201be383168f8c866032db95e1095600806625b3a648cb7"},
+ {file = "mypy-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:74e925649c1ee0a79aa7448baf2668d81cc287dc5782cff6a04ee93f40fb8d3f"},
+ {file = "mypy-1.14.0-py3-none-any.whl", hash = "sha256:2238d7f93fc4027ed1efc944507683df3ba406445a2b6c96e79666a045aadfab"},
+ {file = "mypy-1.14.0.tar.gz", hash = "sha256:822dbd184d4a9804df5a7d5335a68cf7662930e70b8c1bc976645d1509f9a9d6"},
+]
+
+[package.dependencies]
+mypy_extensions = ">=1.0.0"
+typing_extensions = ">=4.6.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+faster-cache = ["orjson"]
+install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.11"
+content-hash = "34a84c9f444021c048be3a70dbb3246bb73c4e7e8f0cc980b8050debcf21a6f9"
diff --git a/tools/cru-py/pyproject.toml b/tools/cru-py/pyproject.toml new file mode 100644 index 0000000..e5e7f09 --- /dev/null +++ b/tools/cru-py/pyproject.toml @@ -0,0 +1,26 @@ +[project] +name = "cru-py" +version = "0.1.0" +requires-python = ">=3.11" + +[tool.poetry] +package-mode = false +name = "cru" +version = "0.1.0" +description = "" +authors = ["Yuqian Yang <crupest@crupest.life>"] +license = "MIT" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" + +[tool.poetry.group.dev.dependencies] +mypy = "^1.13.0" + +[tool.ruff.lint] +select = ["E", "F", "B"] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tools/cru-py/www-dev b/tools/cru-py/www-dev new file mode 100644 index 0000000..f56d679 --- /dev/null +++ b/tools/cru-py/www-dev @@ -0,0 +1,8 @@ +#! /usr/bin/env sh + +set -e + +cd "$(dirname "$0")/../.." + +exec tmux new-session 'cd docker/crupest-nginx/sites/www && pnpm start' \; \ + split-window -h 'cd docker/crupest-api/CrupestApi/CrupestApi && dotnet run --launch-profile dev' diff --git a/tools/manage b/tools/manage new file mode 100755 index 0000000..dc7f64b --- /dev/null +++ b/tools/manage @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -e + +python3.11 --version > /dev/null 2>&1 || ( + echo Error: failed to run Python with python3.11 --version. + exit 1 +) + +script_dir=$(dirname "$0") +project_dir=$(realpath "$script_dir/..") + +cd "$project_dir" + +export PYTHONPATH="$project_dir/tools/cru-py:$PYTHONPATH" +python3.11 -m cru.service --project-dir "$project_dir" "$@" diff --git a/tools/manage.cmd b/tools/manage.cmd new file mode 100644 index 0000000..fce913d --- /dev/null +++ b/tools/manage.cmd @@ -0,0 +1,15 @@ +@echo off + +set PYTHON=py -3 +%PYTHON% --version >NUL 2>&1 || ( + echo Error: failed to run Python with py -3 --version. + exit 1 +) + +set TOOLS_DIR=%~dp0 +set PROJECT_DIR=%TOOLS_DIR%.. + +cd /d "%PROJECT_DIR%" + +set PYTHONPATH=%PROJECT_DIR%\tools\cru-py;%PYTHONPATH% +%PYTHON% -m cru.service --project-dir "%PROJECT_DIR%" %* diff --git a/tools/scripts/neovide-listen b/tools/scripts/neovide-listen new file mode 100755 index 0000000..2591842 --- /dev/null +++ b/tools/scripts/neovide-listen @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +if [[ -z "$NVIM_SOCKET" ]]; then + NVIM_SOCKET="/tmp/nvimsocket" +fi + +args=() +MY_NEOVIM_PATH="$HOME/codes/neovim/build/bin/nvim" +if [[ -e "$MY_NEOVIM_PATH" ]]; then + echo "Found my neovim at $MY_NEOVIM_PATH" + export VIMRUNTIME="$HOME/codes/neovim/runtime" + args=("${args[@]}" "--neovim-bin" "$MY_NEOVIM_PATH") +fi + +listen_added=0 +for arg in "$@"; do + args=("${args[@]}" "$arg") + if [ "$arg" = '--' ]; then + args=("${args[@]}" "--listen" "$NVIM_SOCKET") + listen_added=1 + fi +done + +if [[ $listen_added = 0 ]]; then + args=("${args[@]}" "--" "--listen" "$NVIM_SOCKET") +fi + +NEOVIDE_BIN=neovide +MY_NEOVIDE_PATH="$HOME/codes/neovide/target/release/neovide" +if [ -e "$MY_NEOVIDE_PATH" ]; then + echo "Found my neovide at $MY_NEOVIDE_PATH" + NEOVIDE_BIN="$MY_NEOVIDE_PATH" +fi + +if which nvr > /dev/null; then + echo "Detected nvr, set git editor env" + export GIT_EDITOR='nvr -cc split --remote-wait' +fi + +args=("$NEOVIDE_BIN" "${args[@]}") +echo "Command is ${args[@]}" +exec "${args[@]}" + diff --git a/tools/scripts/neovide-listen.ps1 b/tools/scripts/neovide-listen.ps1 new file mode 100644 index 0000000..e84f3a2 --- /dev/null +++ b/tools/scripts/neovide-listen.ps1 @@ -0,0 +1,38 @@ +$env:NVIM_LISTEN_ADDRESS ??= "\\.\pipe\nvimsocket" + +$neovide_args = @() + +$MY_NEOVIM_PATH="$HOME/codes/neovim/build/bin/nvim.exe" +if (Get-Item $MY_NEOVIM_PATH -ErrorAction Ignore) { + Write-Output "Found my neovim at $MY_NEOVIM_PATH." + $env:VIMRUNTIME="$HOME/codes/neovim/runtime" + $neovide_args += "--neovim-bin", "$MY_NEOVIM_PATH" +} + +$listen_added = $false +foreach ($arg in $args) { + $neovide_args += $arg + if ( $arg -eq '--') { + $neovide_args += "--listen", $env:NVIM_LISTEN_ADDRESS + $listen_added=$true + } +} + +if (-not $listen_added) { + $neovide_args += "--", "--listen", $env:NVIM_LISTEN_ADDRESS +} + +$neovide_bin = "neovide" +$my_neovide_path = "$HOME/codes/neovide/target/release/neovide.exe" +if (Get-Item $my_neovide_path -ErrorAction Ignore) { + Write-Output "Found my neovide at $my_neovide_path." + $neovide_bin = "$my_neovide_path" +} + +if (Get-Command nvr -ErrorAction Ignore) { + Write-Output "Detected nvr, set git editor env." + $env:GIT_EDITOR = "nvr -cc split --remote-wait" +} + +Write-Output "Command is $($neovide_args -join ' ')." +Start-Process $neovide_bin -ArgumentList $neovide_args -Wait diff --git a/tools/update-blog b/tools/update-blog new file mode 100755 index 0000000..5314f47 --- /dev/null +++ b/tools/update-blog @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -e + +exec docker exec -it blog /scripts/update.bash diff --git a/tools/utility/rename-tree.py b/tools/utility/rename-tree.py new file mode 100755 index 0000000..c177eb6 --- /dev/null +++ b/tools/utility/rename-tree.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import argparse +import os +import os.path +import re + +parser = argparse.ArgumentParser( + prog='rename-tree', + description='Recursively rename directories and files') + +parser.add_argument('old') +parser.add_argument('new') +parser.add_argument('dirs', nargs="+") + +args = parser.parse_args() + +old_regex = re.compile(args.old) +new = args.new + +def rename(path, isdir): + dirname = os.path.dirname(path) + filename = os.path.basename(path) + new_filename = re.sub(old_regex, new, filename) + dir_str = "/" if isdir else "" + if new_filename != filename: + os.rename(path, os.path.join(dirname, new_filename)) + print(f"{path}{dir_str} -> {new_filename}{dir_str}") + +for i, d in enumerate(args.dirs): + print(f"[{i + 1}/{len(args.dirs)}] Run for {d}:") + for dirpath, dirnames, filenames in os.walk(d, topdown=False): + for filename in filenames: + rename(os.path.join(dirpath, filename), False) + rename(dirpath, True) + +print("Done!") |