build-reject-domainset.ts 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177
  1. // @ts-check
  2. import path from 'path';
  3. import { processHosts, processFilterRules, processDomainLists } from './lib/parse-filter';
  4. import { createTrie } from './lib/trie';
  5. import { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, DOMAIN_LISTS } from './lib/reject-data-source';
  6. import { createRuleset, compareAndWriteFile } from './lib/create-file';
  7. import { domainDeduper } from './lib/domain-deduper';
  8. import createKeywordFilter from './lib/aho-corasick';
  9. import { readFileByLine, readFileIntoProcessedArray } from './lib/fetch-text-by-line';
  10. import { buildParseDomainMap, sortDomains } from './lib/stable-sort-domain';
  11. import { task } from './trace';
  12. // tldts-experimental is way faster than tldts, but very little bit inaccurate
  13. // (since it is hashes based). But the result is still deterministic, which is
  14. // enough when creating a simple stat of reject hosts.
  15. import * as tldts from 'tldts-experimental';
  16. import { SHARED_DESCRIPTION } from './lib/constants';
  17. import { getPhishingDomains } from './lib/get-phishing-domains';
  18. import { subtract as SetSubstract } from 'mnemonist/set';
  19. import { setAddFromArray, setAddFromArrayCurried } from './lib/set-add-from-array';
  20. import { sort } from './lib/timsort';
  21. import { looseTldtsOpt } from './constants/loose-tldts-opt';
  22. import { build } from 'bun';
  23. const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'));
  24. export const buildRejectDomainSet = task(import.meta.path, async (span) => {
  25. /** Whitelists */
  26. const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
  27. const domainSets = new Set<string>();
  28. const appendArrayToDomainSets = setAddFromArrayCurried(domainSets);
  29. // Parse from AdGuard Filters
  30. const shouldStop = await span
  31. .traceChild('download and process hosts / adblock filter rules')
  32. .traceAsyncFn(async (childSpan) => {
  33. // eslint-disable-next-line sukka/no-single-return -- not single return
  34. let shouldStop = false;
  35. await Promise.all([
  36. // Parse from remote hosts & domain lists
  37. HOSTS.map(entry => processHosts(childSpan, ...entry).then(appendArrayToDomainSets)),
  38. DOMAIN_LISTS.map(entry => processDomainLists(childSpan, ...entry).then(appendArrayToDomainSets)),
  39. ADGUARD_FILTERS.map(
  40. input => processFilterRules(childSpan, ...input)
  41. .then(({ white, black, foundDebugDomain }) => {
  42. if (foundDebugDomain) {
  43. // eslint-disable-next-line sukka/no-single-return -- not single return
  44. shouldStop = true;
  45. // we should not break here, as we want to see full matches from all data source
  46. }
  47. setAddFromArray(filterRuleWhitelistDomainSets, white);
  48. setAddFromArray(domainSets, black);
  49. })
  50. ),
  51. ([
  52. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exceptions.txt',
  53. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt'
  54. ].map(
  55. input => processFilterRules(childSpan, input)
  56. .then(({ white, black }) => {
  57. setAddFromArray(filterRuleWhitelistDomainSets, white);
  58. setAddFromArray(filterRuleWhitelistDomainSets, black);
  59. })
  60. )),
  61. getPhishingDomains(childSpan).then(appendArrayToDomainSets),
  62. getRejectSukkaConfPromise.then(appendArrayToDomainSets)
  63. ].flat());
  64. // eslint-disable-next-line sukka/no-single-return -- not single return
  65. return shouldStop;
  66. });
  67. if (shouldStop) {
  68. process.exit(1);
  69. }
  70. console.log(`Import ${domainSets.size} rules from Hosts / AdBlock Filter Rules & reject_sukka.conf!`);
  71. // Dedupe domainSets
  72. await span.traceChildAsync('dedupe from black keywords/suffixes', async (childSpan) => {
  73. /** Collect DOMAIN-KEYWORD from non_ip/reject.conf for deduplication */
  74. const domainKeywordsSet = new Set<string>();
  75. await childSpan.traceChildAsync('collect keywords/suffixes', async () => {
  76. for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/reject.conf'))) {
  77. const [type, value] = line.split(',');
  78. if (type === 'DOMAIN-KEYWORD') {
  79. domainKeywordsSet.add(value.trim());
  80. } else if (type === 'DOMAIN-SUFFIX') {
  81. domainSets.add(`.${value.trim()}`); // Add to domainSets for later deduplication
  82. }
  83. }
  84. });
  85. // Perform kwfilter to remove as many domains as possible from domainSets before creating trie
  86. childSpan.traceChildSync('dedupe from black keywords', () => {
  87. const kwfilter = createKeywordFilter(domainKeywordsSet);
  88. for (const domain of domainSets) {
  89. // Remove keyword
  90. if (kwfilter(domain)) {
  91. domainSets.delete(domain);
  92. }
  93. }
  94. });
  95. });
  96. const trie = span.traceChildSync('create smol trie', () => createTrie(domainSets, true, true));
  97. span.traceChildSync('dedupe from white suffixes', () => filterRuleWhitelistDomainSets.forEach(trie.whitelist));
  98. // Dedupe domainSets
  99. const dudupedDominArray = span.traceChildSync('dedupe from covered subdomain', () => domainDeduper(trie));
  100. console.log(`Final size ${dudupedDominArray.length}`);
  101. const {
  102. domainMap: domainArrayMainDomainMap,
  103. subdomainMap: domainArraySubdomainMap
  104. } = span.traceChildSync(
  105. 'build map for stat and sort',
  106. () => buildParseDomainMap(dudupedDominArray)
  107. );
  108. // Create reject stats
  109. const rejectDomainsStats: Array<[string, number]> = span
  110. .traceChild('create reject stats')
  111. .traceSyncFn(() => {
  112. const statMap = dudupedDominArray.reduce<Map<string, number>>((acc, cur) => {
  113. const suffix = domainArrayMainDomainMap.get(cur);
  114. if (suffix) {
  115. acc.set(suffix, (acc.get(suffix) ?? 0) + 1);
  116. }
  117. return acc;
  118. }, new Map());
  119. return sort(Array.from(statMap.entries()).filter(a => a[1] > 9), (a, b) => (b[1] - a[1]) || a[0].localeCompare(b[0]));
  120. });
  121. const description = [
  122. ...SHARED_DESCRIPTION,
  123. '',
  124. 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
  125. '',
  126. 'Build from:',
  127. ...HOSTS.map(host => ` - ${host[0]}`),
  128. ...DOMAIN_LISTS.map(domainList => ` - ${domainList[0]}`),
  129. ...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`),
  130. ' - https://curbengh.github.io/phishing-filter/phishing-filter-hosts.txt',
  131. ' - https://phishing.army/download/phishing_army_blocklist.txt'
  132. ];
  133. return Promise.all([
  134. createRuleset(
  135. span,
  136. 'Sukka\'s Ruleset - Reject Base',
  137. description,
  138. new Date(),
  139. span.traceChildSync('sort reject domainset', () => sortDomains(dudupedDominArray, domainArrayMainDomainMap, domainArraySubdomainMap)),
  140. 'domainset',
  141. path.resolve(import.meta.dir, '../List/domainset/reject.conf'),
  142. path.resolve(import.meta.dir, '../Clash/domainset/reject.txt')
  143. ),
  144. compareAndWriteFile(
  145. span,
  146. rejectDomainsStats.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`),
  147. path.resolve(import.meta.dir, '../Internal/reject-stats.txt')
  148. )
  149. ]);
  150. });
  151. if (import.meta.main) {
  152. buildRejectDomainSet();
  153. }