diff options
| author | Yuqian Yang <crupest@crupest.life> | 2025-06-11 17:56:54 +0800 | 
|---|---|---|
| committer | Yuqian Yang <crupest@crupest.life> | 2025-06-11 17:56:54 +0800 | 
| commit | ab125417922f686a193b8e5a16856a1b7ae70aeb (patch) | |
| tree | ea53170d9a108533afbe8d28cf150adb844f846e /deno | |
| parent | 8eb35af237d400ea17e1f2d3b1609928ea98344e (diff) | |
| download | crupest-ab125417922f686a193b8e5a16856a1b7ae70aeb.tar.gz crupest-ab125417922f686a193b8e5a16856a1b7ae70aeb.tar.bz2 crupest-ab125417922f686a193b8e5a16856a1b7ae70aeb.zip | |
feat(tools): add a secret tool.
Diffstat (limited to 'deno')
| -rw-r--r-- | deno/deno.json | 2 | ||||
| -rw-r--r-- | deno/tools/deno.json | 7 | ||||
| -rw-r--r-- | deno/tools/gen-geosite-rules.ts | 161 | 
3 files changed, 169 insertions, 1 deletions
| diff --git a/deno/deno.json b/deno/deno.json index 2125808..55ffcb8 100644 --- a/deno/deno.json +++ b/deno/deno.json @@ -1,5 +1,5 @@  { -  "workspace": ["./base", "./service-manager", "./mail-relay"], +  "workspace": ["./base", "./service-manager", "./mail-relay", "./tools" ],    "tasks": {      "compile:mail-relay": "deno task --cwd=mail-relay compile",      "compile:service-manager": "deno task --cwd=service-manager compile" diff --git a/deno/tools/deno.json b/deno/tools/deno.json new file mode 100644 index 0000000..3597182 --- /dev/null +++ b/deno/tools/deno.json @@ -0,0 +1,7 @@ +{ +  "version": "0.1.0", +  "tasks": { +  }, +  "imports": { +  } +} diff --git a/deno/tools/gen-geosite-rules.ts b/deno/tools/gen-geosite-rules.ts new file mode 100644 index 0000000..c59d34f --- /dev/null +++ b/deno/tools/gen-geosite-rules.ts @@ -0,0 +1,161 @@ +const PROXY_NAME = "node-select" +const ATTR = "cn" +const REPO_NAME = "domain-list-community"; +const URL = "https://github.com/v2fly/domain-list-community/archive/refs/heads/master.zip" +const SITES = [ +  "github", +  "google", +  "youtube", +  "twitter", +  "facebook", +  "discord", +  "reddit", +  "twitch", +  "quora", +  "telegram", +  "imgur", +  "stackexchange", +  "onedrive", +  "duckduckgo", +  "wikimedia", +  "gitbook", +  "gitlab", +  "creativecommons", +  "archive", +  "matrix", +  "tor", +  "python", +  "ruby", +  "rust", +  "nodejs", +  "npmjs", +  "qt", +  "docker", +  "v2ray", +  "homebrew", +  "bootstrap", +  "heroku", +  "vercel", +  "ieee", +  "sci-hub", +  "libgen", +] + +const prefixes = ["include", "domain", "keyword", "full", "regexp"] as const + +interface Rule { +  kind: (typeof prefixes)[number]; +  value: string; +  attrs: string[]; +} + +type FileProvider = (name: string) => string; + +function extract(starts: string[], provider: FileProvider): Rule[] { +function parseLine(line: string): Rule { +  let kind = prefixes.find((p) => line.startsWith(p + ":")); +  if (kind != null) { +    line = line.slice(line.indexOf(":") + 1); +  } else { +    kind = "domain"; +  } +  const segs = line.split("@"); +  return { +    kind, +    value: segs[0].trim(), +    attrs: [...segs.slice(1)].map((s) => s.trim()), +  }; +} + +  function parse(text: string): Rule[] { +    return text +      .replaceAll("\c\n", "\n") +      .split("\n") +      .map((l) => l.trim()) +      .filter((l) => l.length !== 0 && !l.startsWith("#")) +      .map((l) => parseLine(l)); +  } + +  const visited = [] as string[] +  const rules = [] as Rule[] + +  function add(name :string) { +    const text = provider(name); +    for (const rule of parse(text)) { +      if (rule.kind === "include") { +        if (visited.includes(rule.value)) { +          console.warn(`circular refs found: ${name} includes ${rule.value}.`); +          continue; +        } else { +          visited.push(rule.value); +          add(rule.value); +        } +      } else { +        rules.push(rule); +      } +    } +  } + +  for (const start of starts) { +    add(start); +  } + +  return rules +} + +function toNewFormat(rules: Rule[], attr: string): [string, string] { +  function toLine(rule: Rule) { +    const prefixMap = { +      "domain": "DOMAIN-SUFFIX", +      "full": "DOMAIN", +      "keyword": "DOMAIN-KEYWORD", +      "regexp": "DOMAIN-REGEX", +    } as const; +    if (rule.kind === "include") { +      throw new Error("Include rule not parsed.") +    } +    return `${prefixMap[rule.kind]},${rule.value}` +  } + +  function toLines(rules: Rule[]) { +    return rules.map(r => toLine(r)).join("\n") +  } + +  const has: Rule[] = []; +  const notHas: Rule[] = []; +  rules.forEach((r) => (r.attrs.includes(attr) ? has.push(r) : notHas.push(r))); + +  return [toLines(has), toLines(notHas)]; +} + + +if (import.meta.main) { +  const tmpDir = Deno.makeTempDirSync({ prefix: "geosite-rules-" }); +  console.log("Work dir is ", tmpDir); +  const zipFilePath = tmpDir + "/repo.zip"; +  const res = await fetch(URL); +  if (!res.ok) { +    throw new Error("Failed to download repo."); +  } +  Deno.writeFileSync(zipFilePath, await res.bytes()); +  const unzip = new Deno.Command("unzip", { +    args: ["-q", zipFilePath], +    cwd: tmpDir, +  }); +  if (!(await unzip.spawn().status).success) { +    throw new Error("Failed to unzip"); +  } + +  const dataDir = tmpDir + "/" + REPO_NAME + "-master/data"; +  const provider = (name: string) => +    Deno.readTextFileSync(dataDir + "/" + name); + +  const rules = extract(SITES, provider) +  const [has, notHas] = toNewFormat(rules, ATTR) +  const hasFile = tmpDir + "/has-rule" +  const notHasFile = tmpDir + "/not-has-rule" +  console.log("Write result to: " + hasFile + " , " + notHasFile) +  Deno.writeTextFileSync(hasFile, has) +  Deno.writeTextFileSync(notHasFile, notHas) +} + | 
