Browse Source

Chore: more refactor to the bun

SukkaW 2 years ago
parent
commit
071b8120a6

+ 2 - 4
Build/build-anti-bogus-domain.js

@@ -23,13 +23,11 @@ const getBogusNxDomainIPs = async () => {
 };
 
 const buildAntiBogusDomain = task(__filename, async () => {
-  const filePath = path.resolve(__dirname, '../Source/ip/reject.conf');
-
   const bogusIpPromise = getBogusNxDomainIPs();
 
   /** @type {string[]} */
   const result = [];
-  for await (const line of readFileByLine(filePath)) {
+  for await (const line of readFileByLine(path.resolve(__dirname, '../Source/ip/reject.conf'))) {
     if (line === '# --- [Anti Bogus Domain Replace Me] ---') {
       (await bogusIpPromise).forEach(rule => result.push(rule));
       continue;
@@ -65,6 +63,6 @@ const buildAntiBogusDomain = task(__filename, async () => {
 
 module.exports.buildAntiBogusDomain = buildAntiBogusDomain;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildAntiBogusDomain();
 }

+ 1 - 1
Build/build-apple-cdn.js

@@ -44,6 +44,6 @@ const buildAppleCdn = task(__filename, async () => {
 
 module.exports.buildAppleCdn = buildAppleCdn;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildAppleCdn();
 }

+ 22 - 23
Build/build-cdn-conf.js → Build/build-cdn-conf.ts

@@ -1,18 +1,18 @@
-// @ts-check
-const path = require('path');
-const { createRuleset } = require('./lib/create-file');
-const { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } = require('./lib/fetch-remote-text-by-line');
-const createTrie = require('./lib/trie');
-const { task } = require('./lib/trace-runner');
-const fs = require('fs');
-const { processLine } = require('./lib/process-line');
+import path from 'path';
+import { createRuleset } from './lib/create-file';
+import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib/fetch-remote-text-by-line';
+import { createTrie } from './lib/trie';
+import { task } from './lib/trace-runner';
+import { processLine } from './lib/process-line';
 
-const publicSuffixPath = path.resolve(__dirname, '../node_modules/.cache/public_suffix_list_dat.txt');
+const publicSuffixPath: string = path.resolve(__dirname, '../node_modules/.cache/public_suffix_list_dat.txt');
 
-const getS3OSSDomains = async () => {
+const getS3OSSDomains = async (): Promise<Set<string>> => {
   const trie = createTrie();
 
-  if (fs.existsSync(publicSuffixPath)) {
+  const publicSuffixFile = Bun.file(publicSuffixPath);
+
+  if (await publicSuffixFile.exists()) {
     for await (const line of readFileByLine(publicSuffixPath)) {
       trie.add(line);
     }
@@ -25,11 +25,10 @@ const getS3OSSDomains = async () => {
 
   /**
    * Extract OSS domain from publicsuffix list
-   * @type {Set<string>}
    */
-  const S3OSSDomains = new Set();
+  const S3OSSDomains = new Set<string>();
 
-  trie.find('.amazonaws.com').forEach(line => {
+  trie.find('.amazonaws.com').forEach((line: string) => {
     if (
       (line.startsWith('s3-') || line.startsWith('s3.'))
       && !line.includes('cn-')
@@ -37,7 +36,7 @@ const getS3OSSDomains = async () => {
       S3OSSDomains.add(line);
     }
   });
-  trie.find('.scw.cloud').forEach(line => {
+  trie.find('.scw.cloud').forEach((line: string) => {
     if (
       (line.startsWith('s3-') || line.startsWith('s3.'))
       && !line.includes('cn-')
@@ -45,7 +44,7 @@ const getS3OSSDomains = async () => {
       S3OSSDomains.add(line);
     }
   });
-  trie.find('sakurastorage.jp').forEach(line => {
+  trie.find('sakurastorage.jp').forEach((line: string) => {
     if (
       (line.startsWith('s3-') || line.startsWith('s3.'))
     ) {
@@ -56,15 +55,15 @@ const getS3OSSDomains = async () => {
   return S3OSSDomains;
 };
 
-const buildCdnConf = task(__filename, async () => {
+const buildCdnConf = task(__filename, async ()  => {
   /** @type {string[]} */
-  const cdnDomainsList = [];
+  const cdnDomainsList: string[] = [];
 
-  const getS3OSSDomainsPromise = getS3OSSDomains();
+  const getS3OSSDomainsPromise: Promise<Set<string>> = getS3OSSDomains();
 
   for await (const l of readFileByLine(path.resolve(__dirname, '../Source/non_ip/cdn.conf'))) {
     if (l === '# --- [AWS S3 Replace Me] ---') {
-      (await getS3OSSDomainsPromise).forEach(domain => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
+      (await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
       continue;
     }
     const line = processLine(l);
@@ -73,7 +72,7 @@ const buildCdnConf = task(__filename, async () => {
     }
   }
 
-  const description = [
+  const description: string[] = [
     'License: AGPL 3.0',
     'Homepage: https://ruleset.skk.moe',
     'GitHub: https://github.com/SukkaW/Surge',
@@ -92,8 +91,8 @@ const buildCdnConf = task(__filename, async () => {
   ));
 });
 
-module.exports.buildCdnConf = buildCdnConf;
+export { buildCdnConf };
 
-if (require.main === module) {
+if (import.meta.main) {
   buildCdnConf();
 }

+ 1 - 1
Build/build-chn-cidr.js

@@ -52,6 +52,6 @@ const buildChnCidr = task(__filename, async () => {
 
 module.exports.buildChnCidr = buildChnCidr;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildChnCidr();
 }

+ 1 - 1
Build/build-common.js

@@ -47,7 +47,7 @@ const buildCommon = task(__filename, async () => {
 
 module.exports.buildCommon = buildCommon;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildCommon();
 }
 

+ 1 - 1
Build/build-domestic-ruleset.js

@@ -60,6 +60,6 @@ const buildDomesticRuleset = task(__filename, async () => {
 
 module.exports.buildDomesticRuleset = buildDomesticRuleset;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildDomesticRuleset();
 }

+ 1 - 1
Build/build-internal-cdn-rules.js

@@ -95,6 +95,6 @@ const buildInternalCDNDomains = task(__filename, async () => {
 
 module.exports.buildInternalCDNDomains = buildInternalCDNDomains;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildInternalCDNDomains();
 }

+ 1 - 1
Build/build-internal-chn-domains.js

@@ -19,6 +19,6 @@ const buildInternalChnDomains = task(__filename, async () => {
 
 module.exports.buildInternalChnDomains = buildInternalChnDomains;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildInternalChnDomains();
 }

+ 2 - 6
Build/build-internal-reverse-chn-cidr.js

@@ -2,7 +2,6 @@
 const { fetchRemoteTextAndCreateReadlineInterface } = require('./lib/fetch-remote-text-by-line');
 const { processLineFromReadline } = require('./lib/process-line');
 const path = require('path');
-const fs = require('fs');
 const fsp = require('fs/promises');
 const { task } = require('./lib/trace-runner');
 
@@ -42,14 +41,11 @@ const buildInternalReverseChnCIDR = task(__filename, async () => {
     true
   );
 
-  return fs.promises.writeFile(
-    path.resolve(__dirname, '../List/internal/reversed-chn-cidr.txt'),
-    `${reversedCidr.join('\n')}\n`
-  );
+  return Bun.write(path.resolve(__dirname, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`);
 });
 
 module.exports.buildInternalReverseChnCIDR = buildInternalReverseChnCIDR;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildInternalReverseChnCIDR();
 }

+ 1 - 1
Build/build-phishing-domainset.js

@@ -185,6 +185,6 @@ const buildPhishingDomainSet = task(__filename, async () => {
 
 module.exports.buildPhishingDomainSet = buildPhishingDomainSet;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildPhishingDomainSet();
 }

+ 2 - 3
Build/build-public.js

@@ -1,6 +1,5 @@
 const listDir = require('@sukka/listdir');
 const path = require('path');
-const fs = require('fs');
 const fsp = require('fs/promises');
 const { task } = require('./lib/trace-runner');
 
@@ -31,12 +30,12 @@ const buildPublicHtml = task(__filename, async () => {
 
   const html = template(list);
 
-  return fs.promises.writeFile(path.join(publicPath, 'index.html'), html, 'utf-8');
+  return Bun.write(path.join(publicPath, 'index.html'), html);
 });
 
 module.exports.buildPublicHtml = buildPublicHtml;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildPublicHtml();
 }
 

+ 1 - 1
Build/build-redirect-module.js

@@ -96,6 +96,6 @@ const buildRedirectModule = task(__filename, async () => {
 
 module.exports.buildRedirectModule = buildRedirectModule;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildRedirectModule();
 }

+ 1 - 1
Build/build-reject-domainset.js

@@ -233,6 +233,6 @@ const buildRejectDomainSet = task(__filename, async () => {
 
 module.exports.buildRejectDomainSet = buildRejectDomainSet;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildRejectDomainSet();
 }

+ 1 - 2
Build/build-speedtest-domainset.js

@@ -1,4 +1,3 @@
-const { fetch } = require('undici');
 const { domainDeduper } = require('./lib/domain-deduper');
 const path = require('path');
 const { createRuleset } = require('./lib/create-file');
@@ -143,6 +142,6 @@ const buildSpeedtestDomainSet = task(__filename, async () => {
 
 module.exports.buildSpeedtestDomainSet = buildSpeedtestDomainSet;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildSpeedtestDomainSet();
 }

+ 1 - 1
Build/build-stream-service.js

@@ -74,6 +74,6 @@ const buildStreamService = task(__filename, async () => {
 
 module.exports.buildStreamService = buildStreamService;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildStreamService();
 }

+ 2 - 1
Build/build-telegram-cidr.js

@@ -1,3 +1,4 @@
+// @ts-check
 const { fetchWithRetry } = require('./lib/fetch-retry');
 const { createReadlineInterfaceFromResponse } = require('./lib/fetch-remote-text-by-line');
 const path = require('path');
@@ -53,6 +54,6 @@ const buildTelegramCIDR = task(__filename, async () => {
 
 module.exports.buildTelegramCIDR = buildTelegramCIDR;
 
-if (require.main === module) {
+if (import.meta.main) {
   buildTelegramCIDR();
 }

+ 9 - 16
Build/download-previous-build.js

@@ -1,6 +1,5 @@
-const { fetch } = require('undici');
+// @ts-check
 const tar = require('tar');
-const fs = require('fs');
 const fsp = require('fs/promises');
 const path = require('path');
 const { tmpdir } = require('os');
@@ -10,12 +9,6 @@ const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
 const { isCI } = require('ci-info');
 const { task, traceAsync } = require('./lib/trace-runner');
 
-const fileExists = (path) => {
-  return fs.promises.access(path, fs.constants.F_OK)
-    .then(() => true)
-    .catch(() => false);
-};
-
 const downloadPreviousBuild = task(__filename, async () => {
   const filesList = ['Clash', 'List'];
 
@@ -31,7 +24,7 @@ const downloadPreviousBuild = task(__filename, async () => {
       filesList.push(line);
 
       if (!isCI) {
-        allFileExists = fs.existsSync(path.join(__dirname, '..', line));
+        allFileExists = await Bun.file(path.join(__dirname, '..', line)).exists();
         if (!allFileExists) {
           break;
         }
@@ -59,6 +52,9 @@ const downloadPreviousBuild = task(__filename, async () => {
       Readable.fromWeb(resp.body),
       tar.x({
         cwd: extractedPath,
+        /**
+         * @param {string} p
+         */
         filter(p) {
           return p.includes('/List/') || p.includes('/Modules/') || p.includes('/Clash/');
         }
@@ -70,7 +66,7 @@ const downloadPreviousBuild = task(__filename, async () => {
 
   await Promise.all(filesList.map(async p => {
     const src = path.join(extractedPath, 'ruleset.skk.moe-master', p);
-    if (await fileExists(src)) {
+    if (await Bun.file(src).exists()) {
       return fsp.cp(
         src,
         path.join(__dirname, '..', p),
@@ -79,7 +75,7 @@ const downloadPreviousBuild = task(__filename, async () => {
     }
   }));
 
-  // return fs.promises.unlink(extractedPath).catch(() => { });
+  // return fsp.unlink(extractedPath).catch(() => { });
 });
 
 const downloadPublicSuffixList = task(__filename, async () => {
@@ -91,16 +87,13 @@ const downloadPublicSuffixList = task(__filename, async () => {
     fsp.mkdir(publicSuffixDir, { recursive: true })
   ]);
 
-  return pipeline(
-    Readable.fromWeb(resp.body),
-    fs.createWriteStream(publicSuffixPath)
-  );
+  return Bun.write(publicSuffixPath, resp);
 }, 'download-publicsuffixlist');
 
 module.exports.downloadPreviousBuild = downloadPreviousBuild;
 module.exports.downloadPublicSuffixList = downloadPublicSuffixList;
 
-if (require.main === module) {
+if (import.meta.main) {
   Promise.all([
     downloadPreviousBuild(),
     downloadPublicSuffixList()

+ 3 - 3
Build/lib/create-file.js

@@ -1,5 +1,4 @@
 // @ts-check
-const fs = require('fs');
 const { readFileByLine } = require('./fetch-remote-text-by-line');
 const { surgeDomainsetToClashDomainset, surgeRulesetToClashClassicalTextRuleset } = require('./clash');
 
@@ -9,7 +8,9 @@ const { surgeDomainsetToClashDomainset, surgeRulesetToClashClassicalTextRuleset
  */
 async function compareAndWriteFile(linesA, filePath) {
   let isEqual = true;
-  if (!fs.existsSync(filePath)) {
+  const file = Bun.file(filePath);
+
+  if (!(await file.exists())) {
     console.log(`${filePath} does not exists, writing...`);
     isEqual = false;
   } else if (linesA.length === 0) {
@@ -44,7 +45,6 @@ async function compareAndWriteFile(linesA, filePath) {
   }
 
   if (!isEqual) {
-    const file = Bun.file(filePath);
     const writer = file.writer();
 
     for (let i = 0, len = linesA.length; i < len; i++) {

+ 0 - 32
Build/lib/domain-deduper.js

@@ -1,32 +0,0 @@
-// @ts-check
-const createTrie = require('./trie');
-
-/**
- * @param {string[]} inputDomains
- */
-const domainDeduper = (inputDomains) => {
-  const trie = createTrie(inputDomains);
-  const sets = new Set(inputDomains);
-
-  for (let j = 0, len = inputDomains.length; j < len; j++) {
-    const d = inputDomains[j];
-    if (d[0] !== '.') {
-      continue;
-    }
-
-    // delete all included subdomains (ends with `.example.com`)
-    // eslint-disable-next-line sukka/unicorn/no-array-method-this-argument -- it is not an array
-    trie.find(d, false).forEach(f => sets.delete(f));
-
-    // if `.example.com` exists, then `example.com` should also be removed
-    const a = d.slice(1);
-
-    if (sets.has(a)) {
-      sets.delete(a);
-    }
-  }
-
-  return Array.from(sets);
-};
-
-module.exports.domainDeduper = domainDeduper;

+ 25 - 0
Build/lib/domain-deduper.ts

@@ -0,0 +1,25 @@
+import createTrie from './trie';
+
+const domainDeduper = (inputDomains: string[]): string[] => {
+  const trie = createTrie(inputDomains);
+  const sets = new Set(inputDomains);
+
+  for (let j = 0, len = inputDomains.length; j < len; j++) {
+    const d = inputDomains[j];
+    if (d[0] !== '.') {
+      continue;
+    }
+
+    trie.find(d, false).forEach(f => sets.delete(f));
+
+    const a: string = d.slice(1);
+
+    if (sets.has(a)) {
+      sets.delete(a);
+    }
+  }
+
+  return Array.from(sets);
+};
+
+export default domainDeduper;

+ 14 - 21
Build/lib/fetch-remote-text-by-line.js → Build/lib/fetch-remote-text-by-line.ts

@@ -1,14 +1,16 @@
-// @ts-check
-const { fetchWithRetry } = require('./fetch-retry');
+import type { BunFile } from 'bun';
+import { fetchWithRetry } from './fetch-retry';
 
 const decoder = new TextDecoder('utf-8');
-/**
- * @param {string} path
- */
-module.exports.readFileByLine = async function *(path) {
+
+export async function* readFileByLine(file: string | BunFile): AsyncGenerator<string> {
+  if (typeof file === 'string') {
+    file = Bun.file(file);
+  }
+
   let buf = '';
 
-  for await (const chunk of Bun.file(path).stream()) {
+  for await (const chunk of file.stream()) {
     const chunkStr = decoder.decode(chunk).replaceAll('\r\n', '\n');
     for (let i = 0, len = chunkStr.length; i < len; i++) {
       const char = chunkStr[i];
@@ -24,12 +26,9 @@ module.exports.readFileByLine = async function *(path) {
   if (buf) {
     yield buf;
   }
-};
+}
 
-/**
- * @param {import('undici').Response} resp
- */
-const createReadlineInterfaceFromResponse = async function *(resp) {
+export async function* createReadlineInterfaceFromResponse(resp: Response): AsyncGenerator<string> {
   if (!resp.body) {
     throw new Error('Failed to fetch remote text');
   }
@@ -55,15 +54,9 @@ const createReadlineInterfaceFromResponse = async function *(resp) {
   if (buf) {
     yield buf;
   }
-};
-
-module.exports.createReadlineInterfaceFromResponse = createReadlineInterfaceFromResponse;
+}
 
-/**
- * @param {import('undici').RequestInfo} url
- * @param {import('undici').RequestInit} [opt]
- */
-module.exports.fetchRemoteTextAndCreateReadlineInterface = async (url, opt) => {
+export async function fetchRemoteTextAndCreateReadlineInterface(url: string | URL, opt?: RequestInit): Promise<AsyncGenerator<string>> {
   const resp = await fetchWithRetry(url, opt);
   return createReadlineInterfaceFromResponse(resp);
-};
+}

+ 1 - 9
Build/lib/fetch-retry.js

@@ -1,11 +1,3 @@
 // @ts-check
-const undici = require('undici');
-
-// Enable HTTP/2 supports
-// undici.setGlobalDispatcher(new undici.Agent({
-//   allowH2: true,
-//   pipelining: 10
-// }));
-
-const fetchWithRetry = /** @type {import('undici').fetch} */(require('@vercel/fetch-retry')(undici.fetch));
+const fetchWithRetry = require('@vercel/fetch-retry')(fetch);
 module.exports.fetchWithRetry = fetchWithRetry;

+ 5 - 7
Build/lib/get-gorhill-publicsuffix.js

@@ -1,5 +1,4 @@
 const { toASCII } = require('punycode/');
-const fs = require('fs');
 const path = require('path');
 const { traceAsync } = require('./trace-runner');
 
@@ -7,15 +6,14 @@ const publicSuffixPath = path.resolve(__dirname, '../../node_modules/.cache/publ
 
 const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
   const customFetch = async (url) => {
-    const buf = await fs.promises.readFile(url);
-    return {
-      arrayBuffer() { return Promise.resolve(buf.buffer); }
-    };
+    return Bun.file(url);
   };
 
+  const publicSuffixFile = Bun.file(publicSuffixPath);
+
   const [publicSuffixListDat, { default: gorhill }] = await Promise.all([
-    fs.existsSync(publicSuffixPath)
-      ? fs.promises.readFile(publicSuffixPath, 'utf-8')
+    await publicSuffixFile.exists()
+      ? publicSuffixFile.text()
       : fetch('https://publicsuffix.org/list/public_suffix_list.dat').then(r => {
         console.log('public_suffix_list.dat not found, fetch directly from remote.');
         return r.text();

+ 6 - 10
Build/lib/parse-dnsmasq.js → Build/lib/parse-dnsmasq.ts

@@ -1,17 +1,13 @@
-const { fetchRemoteTextAndCreateReadlineInterface } = require('./fetch-remote-text-by-line');
-const tldts = require('tldts');
+import { fetchRemoteTextAndCreateReadlineInterface } from './fetch-remote-text-by-line';
+import tldts from 'tldts';
 
-const isDomainLoose = (domain) => {
+const isDomainLoose = (domain: string): boolean => {
   const { isIcann, isPrivate, isIp } = tldts.parse(domain);
   return !!(!isIp && (isIcann || isPrivate));
 };
 
-/**
- * @param {string | URL} url
- */
-const parseFelixDnsmasq = async (url) => {
-  /** @type {string[]} */
-  const res = [];
+const parseFelixDnsmasq = async (url: string | URL): Promise<string[]> => {
+  const res: string[] = [];
   for await (const line of await fetchRemoteTextAndCreateReadlineInterface(url)) {
     if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
       const domain = line.replace('server=/', '').replace('/114.114.114.114', '');
@@ -24,4 +20,4 @@ const parseFelixDnsmasq = async (url) => {
   return res;
 };
 
-module.exports.parseFelixDnsmasq = parseFelixDnsmasq;
+export { parseFelixDnsmasq };

+ 10 - 0
Build/lib/parse-filter.js

@@ -11,6 +11,13 @@ const DEBUG_DOMAIN_TO_FIND = null; // example.com | null
 let foundDebugDomain = false;
 
 const warnOnceUrl = new Set();
+/**
+ * 
+ * @param {string} url
+ * @param {boolean} isWhite
+ * @param  {...any} message
+ * @returns 
+ */
 const warnOnce = (url, isWhite, ...message) => {
   const key = `${url}${isWhite ? 'white' : 'black'}`;
   if (warnOnceUrl.has(key)) {
@@ -153,6 +160,9 @@ async function processFilterRules(filterRulesUrl, fallbackUrls) {
   let downloadTime = 0;
   const gorhill = await getGorhillPublicSuffixPromise();
 
+  /**
+   * @param {string} line
+   */
   const lineCb = (line) => {
     const result = parse(line, gorhill);
     if (result) {

+ 0 - 48
Build/lib/process-line.js

@@ -1,48 +0,0 @@
-/* eslint-disable camelcase -- cache index access */
-
-/**
- * If line is commented out or empty, return null.
- * Otherwise, return trimmed line.
- *
- * @param {string} line
- */
-const processLine = (line) => {
-  if (!line) {
-    return null;
-  }
-
-  const line_0 = line[0];
-
-  if (
-    line_0 === '#'
-    || line_0 === ' '
-    || line_0 === '\r'
-    || line_0 === '\n'
-    || line_0 === '!'
-  ) {
-    return null;
-  }
-
-  const trimmed = line.trim();
-  if (trimmed === '') {
-    return null;
-  }
-
-  return trimmed;
-};
-module.exports.processLine = processLine;
-
-/**
- * @param {import('readline').ReadLine} rl
- */
-module.exports.processLineFromReadline = async (rl) => {
-  /** @type {string[]} */
-  const res = [];
-  for await (const line of rl) {
-    const l = processLine(line);
-    if (l) {
-      res.push(l);
-    }
-  }
-  return res;
-};

+ 35 - 0
Build/lib/process-line.ts

@@ -0,0 +1,35 @@
+export const processLine = (line: string): string | null => {
+  if (!line) {
+    return null;
+  }
+
+  const line_0: string = line[0];
+
+  if (
+    line_0 === '#'
+    || line_0 === ' '
+    || line_0 === '\r'
+    || line_0 === '\n'
+    || line_0 === '!'
+  ) {
+    return null;
+  }
+
+  const trimmed: string = line.trim();
+  if (trimmed === '') {
+    return null;
+  }
+
+  return trimmed;
+};
+
+export const processLineFromReadline = async (rl: AsyncGenerator<string>): Promise<string[]> => {
+  const res: string[] = [];
+  for await (const line of rl) {
+    const l: string | null = processLine(line);
+    if (l) {
+      res.push(l);
+    }
+  }
+  return res;
+};

+ 6 - 14
Build/lib/reject-data-source.js → Build/lib/reject-data-source.ts

@@ -1,6 +1,4 @@
-// @ts-check
-/** @type {[string, boolean][]} */
-const HOSTS = [
+export const HOSTS: [string, boolean][] = [
   // ['https://pgl.yoyo.org/adservers/serverlist.php?hostformat=hosts&showintro=0&mimetype=plaintext', false],
   ['https://raw.githubusercontent.com/hoshsadiq/adblock-nocoin-list/master/hosts.txt', false],
   ['https://raw.githubusercontent.com/crazy-max/WindowsSpyBlocker/master/data/hosts/spy.txt', false],
@@ -9,7 +7,7 @@ const HOSTS = [
   ['https://raw.githubusercontent.com/durablenapkin/block/master/luminati.txt', false]
 ];
 
-const ADGUARD_FILTERS = /** @type {const} */([
+export const ADGUARD_FILTERS = [
   // EasyList
   [
     'https://easylist.to/easylist/easylist.txt',
@@ -146,9 +144,9 @@ const ADGUARD_FILTERS = /** @type {const} */([
   'https://paulgb.github.io/BarbBlock/blacklists/ublock-origin.txt',
   // Brave First Party & First Party CNAME
   'https://raw.githubusercontent.com/brave/adblock-lists/master/brave-lists/brave-firstparty.txt'
-]);
+] as const;
 
-const PREDEFINED_WHITELIST = [
+export const PREDEFINED_WHITELIST = [
   'localhost',
   'broadcasthost',
   'ip6-loopback',
@@ -194,11 +192,11 @@ const PREDEFINED_WHITELIST = [
   'vlscppe.microsoft.com'
 ];
 
-const PREDEFINED_ENFORCED_BACKLIST = [
+export const PREDEFINED_ENFORCED_BACKLIST = [
   'telemetry.mozilla.org'
 ];
 
-const PREDEFINED_ENFORCED_WHITELIST = [
+export const PREDEFINED_ENFORCED_WHITELIST = [
   'godaddysites.com',
   'web.app',
   'firebaseapp.com',
@@ -219,9 +217,3 @@ const PREDEFINED_ENFORCED_WHITELIST = [
   'blogspot.com',
   'appspot.com'
 ];
-
-module.exports.HOSTS = HOSTS;
-module.exports.ADGUARD_FILTERS = ADGUARD_FILTERS;
-module.exports.PREDEFINED_WHITELIST = PREDEFINED_WHITELIST;
-module.exports.PREDEFINED_ENFORCED_BACKLIST = PREDEFINED_ENFORCED_BACKLIST;
-module.exports.PREDEFINED_ENFORCED_WHITELIST = PREDEFINED_ENFORCED_WHITELIST;

+ 29 - 33
Build/lib/trie.js → Build/lib/trie.ts

@@ -2,23 +2,23 @@
  * Suffix Trie based on Mnemonist Trie
  */
 
-const SENTINEL = String.fromCodePoint(0);
+export const SENTINEL: string = String.fromCodePoint(0);
 
 /**
  * @param {string[] | Set<string>} [from]
  */
-const createTrie = (from) => {
-  let size = 0;
-  const root = {};
+export const createTrie = (from?: string[] | Set<string>) => {
+  let size: number = 0;
+  const root: any = {};
 
   /**
    * Method used to add the given prefix to the trie.
    *
    * @param  {string} suffix - Prefix to follow.
    */
-  const add = (suffix) => {
-    let node = root;
-    let token;
+  const add = (suffix: string): void => {
+    let node: any = root;
+    let token: string;
     for (let i = suffix.length - 1; i >= 0; i--) {
       token = suffix[i];
       node[token] ||= {};
@@ -35,9 +35,9 @@ const createTrie = (from) => {
   /**
    * @param {string} suffix
    */
-  const contains = (suffix) => {
-    let node = root;
-    let token;
+  const contains = (suffix: string): boolean => {
+    let node: any = root;
+    let token: string;
 
     for (let i = suffix.length - 1; i >= 0; i--) {
       token = suffix[i];
@@ -56,10 +56,10 @@ const createTrie = (from) => {
    * @param  {boolean} [includeEqualWithSuffix]
    * @return {string[]}
    */
-  const find = (suffix, includeEqualWithSuffix = true) => {
-    let node = root;
-    const matches = [];
-    let token;
+  const find = (suffix: string, includeEqualWithSuffix: boolean = true): string[] => {
+    let node: any = root;
+    const matches: string[] = [];
+    let token: string;
 
     for (let i = suffix.length - 1; i >= 0; i--) {
       token = suffix[i];
@@ -70,15 +70,15 @@ const createTrie = (from) => {
     }
 
     // Performing DFS from prefix
-    const nodeStack = [node];
+    const nodeStack: any[] = [node];
 
-    const suffixStack = [suffix];
-    let k;
+    const suffixStack: string[] = [suffix];
+    let k: string;
 
-    let $suffix = suffix;
+    let $suffix: string = suffix;
 
     while (nodeStack.length) {
-      $suffix = suffixStack.pop();
+      $suffix = suffixStack.pop()!;
       node = nodeStack.pop();
 
       // eslint-disable-next-line guard-for-in -- plain object
@@ -105,12 +105,12 @@ const createTrie = (from) => {
    * @param  {string} suffix - Prefix to delete.
    * @return {boolean}
    */
-  const remove = (suffix) => {
-    let node = root;
-    let toPrune = null;
-    let tokenToPrune = null;
-    let parent;
-    let token;
+  const remove = (suffix: string): boolean => {
+    let node: any = root;
+    let toPrune: any = null;
+    let tokenToPrune: string | null = null;
+    let parent: any;
+    let token: string;
 
     for (let i = suffix.length - 1; i >= 0; i--) {
       token = suffix[i];
@@ -138,7 +138,7 @@ const createTrie = (from) => {
 
     size--;
 
-    if (toPrune) {
+    if (tokenToPrune) {
       delete toPrune[tokenToPrune];
     } else {
       delete node[SENTINEL];
@@ -153,8 +153,8 @@ const createTrie = (from) => {
    * @param  {string} suffix - Prefix to check.
    * @return {boolean}
    */
-  const has = (suffix) => {
-    let node = root;
+  const has = (suffix: string): boolean => {
+    let node: any = root;
 
     for (let i = suffix.length - 1; i >= 0; i--) {
       node = node[suffix[i]];
@@ -445,8 +445,4 @@ const createTrie = (from) => {
 //   };
 // }
 
-/**
- * Exporting.
- */
-module.exports.SENTINEL = SENTINEL;
-module.exports = createTrie;
+export default createTrie;

+ 1 - 1
Build/validate-domainset.js

@@ -71,6 +71,6 @@ const validate = task(__filename, async () => {
 });
 module.exports.validate = validate;
 
-if (require.main === module) {
+if (import.meta.main) {
   validate();
 }

+ 1 - 1
Source/domainset/cdn.conf

@@ -298,7 +298,7 @@ mir-s3-cdn-cf.behance.net
 # >> Amazon CDN
 .ssl-images-amazon.com
 .media-amazon.com
-.cloudfront.net
+# .cloudfront.net public suffix
 # AWS
 .awsstatic.com
 cdn.assets.as2.amazonaws.com

+ 2 - 0
Source/non_ip/cdn.conf

@@ -1,5 +1,7 @@
 # $ custom_build_script
 
+# >> Amazon CloudFront
+DOMAIN-SUFFIX,cloudfront.net
 # >> GitHub Pages
 DOMAIN-SUFFIX,github.io
 # >> GitHub

BIN
bun.lockb


+ 4 - 2
eslint.config.js

@@ -9,10 +9,12 @@ module.exports = require('eslint-config-sukka').sukka({
       }
     }
   },
-  node: true
+  node: true,
+  ts: true
 }, {
   rules: {
     'sukka/unicorn/prefer-math-trunc': 'off',
-    'sukka/unicorn/prefer-number-properties': ['warn', { checkInfinity: false }]
+    'sukka/unicorn/prefer-number-properties': ['warn', { checkInfinity: false }],
+    'n/no-missing-require': 'off'
   }
 });

+ 3 - 3
package.json

@@ -29,8 +29,7 @@
     "punycode": "^2.3.1",
     "table": "^6.8.1",
     "tar": "^6.2.0",
-    "tldts": "^6.0.19",
-    "undici": "5.27.0"
+    "tldts": "^6.0.19"
   },
   "devDependencies": {
     "@eslint-sukka/node": "^4.1.7",
@@ -39,7 +38,8 @@
     "chai": "4.3.10",
     "eslint-config-sukka": "4.1.7",
     "eslint-formatter-sukka": "4.1.7",
-    "mocha": "^10.2.0"
+    "mocha": "^10.2.0",
+    "typescript": "^5.2.2"
   },
   "resolutions": {
     "has": "npm:@nolyfill/has@latest"

+ 5 - 1
tsconfig.json

@@ -14,5 +14,9 @@
     "forceConsistentCasingInFileNames": true,
     "strict": true,
     "skipLibCheck": true
-  }
+  },
+  "include": [
+    "./Build/**/*.js",
+    "./Build/**/*.ts"
+  ]
 }