build-reject-domainset.js 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. // @ts-check
  2. const { promises: fsPromises } = require('fs');
  3. const fse = require('fs-extra');
  4. const { resolve: pathResolve } = require('path');
  5. const { processHosts, processFilterRules, preprocessFullDomainSetBeforeUsedAsWorkerData } = require('./lib/parse-filter');
  6. const { getDomain } = require('tldts');
  7. const Trie = require('./lib/trie');
  8. const { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, PREDEFINED_ENFORCED_BACKLIST } = require('./lib/reject-data-source');
  9. const { withBannerArray } = require('./lib/with-banner');
  10. const { compareAndWriteFile } = require('./lib/string-array-compare');
  11. /** Whitelists */
  12. const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
  13. /** @type {Set<string>} Dedupe domains inclued by DOMAIN-KEYWORD */
  14. const domainKeywordsSet = new Set();
  15. /** @type {Set<string>} Dedupe domains included by DOMAIN-SUFFIX */
  16. const domainSuffixSet = new Set();
  17. (async () => {
  18. console.time('Total Time - build-reject-domain-set');
  19. /** @type Set<string> */
  20. const domainSets = new Set();
  21. console.log('Downloading hosts file...');
  22. console.time('* Download and process Hosts');
  23. // Parse from remote hosts & domain lists
  24. (await Promise.all(HOSTS.map(entry => processHosts(entry[0], entry[1]))))
  25. .forEach(hosts => {
  26. hosts.forEach(host => {
  27. if (host) {
  28. domainSets.add(host);
  29. }
  30. });
  31. });
  32. console.timeEnd('* Download and process Hosts');
  33. let previousSize = domainSets.size;
  34. console.log(`Import ${previousSize} rules from hosts files!`);
  35. // Parse from AdGuard Filters
  36. console.time('* Download and process AdBlock Filter Rules');
  37. let shouldStop = false;
  38. await Promise.all(ADGUARD_FILTERS.map(input => {
  39. const promise = typeof input === 'string'
  40. ? processFilterRules(input, undefined, false)
  41. : processFilterRules(input[0], input[1] || undefined, input[2] ?? false)
  42. return promise.then((i) => {
  43. if (i) {
  44. const { white, black, foundDebugDomain } = i;
  45. if (foundDebugDomain) {
  46. shouldStop = true;
  47. }
  48. white.forEach(i => {
  49. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  50. return;
  51. }
  52. filterRuleWhitelistDomainSets.add(i);
  53. });
  54. black.forEach(i => domainSets.add(i));
  55. } else {
  56. process.exit(1);
  57. }
  58. });
  59. }));
  60. await Promise.all([
  61. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exceptions.txt',
  62. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt'
  63. ].map(
  64. input => processFilterRules(input).then((i) => {
  65. if (i) {
  66. const { white, black } = i;
  67. white.forEach(i => {
  68. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  69. return;
  70. }
  71. filterRuleWhitelistDomainSets.add(i)
  72. });
  73. black.forEach(i => {
  74. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  75. return;
  76. }
  77. filterRuleWhitelistDomainSets.add(i)
  78. });
  79. } else {
  80. process.exit(1);
  81. }
  82. })
  83. ));
  84. console.timeEnd('* Download and process AdBlock Filter Rules');
  85. if (shouldStop) {
  86. process.exit(1);
  87. }
  88. previousSize = domainSets.size - previousSize;
  89. console.log(`Import ${previousSize} rules from adguard filters!`);
  90. await fsPromises.readFile(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }).then(data => {
  91. data.split('\n').forEach(line => {
  92. const trimmed = line.trim();
  93. if (
  94. line.startsWith('#')
  95. || line.startsWith(' ')
  96. || line.startsWith('\r')
  97. || line.startsWith('\n')
  98. || trimmed === ''
  99. ) {
  100. return;
  101. }
  102. domainSets.add(trimmed);
  103. });
  104. });
  105. previousSize = domainSets.size - previousSize;
  106. console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
  107. await Promise.all([
  108. // Copy reject_sukka.conf for backward compatibility
  109. fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf')),
  110. fsPromises.readFile(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }).then(data => {
  111. data.split('\n').forEach(line => {
  112. if (line.startsWith('DOMAIN-KEYWORD')) {
  113. const [, ...keywords] = line.split(',');
  114. domainKeywordsSet.add(keywords.join(',').trim());
  115. } else if (line.startsWith('DOMAIN-SUFFIX')) {
  116. const [, ...keywords] = line.split(',');
  117. domainSuffixSet.add(keywords.join(',').trim());
  118. }
  119. });
  120. }),
  121. // Read Special Phishing Suffix list
  122. fsPromises.readFile(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }).then(data => {
  123. data.split('\n').forEach(line => {
  124. const trimmed = line.trim();
  125. if (
  126. line.startsWith('#')
  127. || line.startsWith(' ')
  128. || line.startsWith('\r')
  129. || line.startsWith('\n')
  130. || trimmed === ''
  131. ) {
  132. return;
  133. }
  134. domainSuffixSet.add(trimmed);
  135. });
  136. })
  137. ]);
  138. console.log(`Import ${domainKeywordsSet.size} black keywords and ${domainSuffixSet.size} black suffixes!`);
  139. previousSize = domainSets.size;
  140. // Dedupe domainSets
  141. console.log(`Start deduping from black keywords/suffixes! (${previousSize})`);
  142. console.time(`* Dedupe from black keywords/suffixes`);
  143. const trie1 = Trie.from(Array.from(domainSets));
  144. domainSuffixSet.forEach(suffix => {
  145. trie1.find(suffix, true).forEach(f => domainSets.delete(f));
  146. });
  147. filterRuleWhitelistDomainSets.forEach(suffix => {
  148. trie1.find(suffix, true).forEach(f => domainSets.delete(f));
  149. });
  150. // Build whitelist trie, to handle case like removing `g.msn.com` due to white `.g.msn.com` (`@@||g.msn.com`)
  151. const trieWhite = Trie.from(Array.from(filterRuleWhitelistDomainSets));
  152. for (const domain of domainSets) {
  153. if (domain[0] !== '.' && trieWhite.has(`.${domain}`)) {
  154. domainSets.delete(domain);
  155. continue;
  156. }
  157. if (domain[0] === '.') {
  158. const found = trieWhite.find(domain);
  159. if (found.length > 0) {
  160. domainSets.delete(domain);
  161. continue;
  162. }
  163. }
  164. // Remove keyword
  165. if (isMatchKeyword(domain)) {
  166. domainSets.delete(domain);
  167. }
  168. }
  169. console.timeEnd(`* Dedupe from black keywords/suffixes`);
  170. console.log(`Deduped ${previousSize} - ${domainSets.size} = ${previousSize - domainSets.size} from black keywords and suffixes!`);
  171. previousSize = domainSets.size;
  172. // Dedupe domainSets
  173. console.log(`Start deduping! (${previousSize})`);
  174. const START_TIME = Date.now();
  175. const domainSetsArray = Array.from(domainSets);
  176. const trie2 = Trie.from(domainSetsArray);
  177. const fullsetDomainStartsWithADot = preprocessFullDomainSetBeforeUsedAsWorkerData(domainSetsArray);
  178. console.log(fullsetDomainStartsWithADot.length);
  179. for (let j = 0, len = fullsetDomainStartsWithADot.length; j < len; j++) {
  180. const domainStartsWithADotAndFromFullSet = fullsetDomainStartsWithADot[j];
  181. const found = trie2.find(domainStartsWithADotAndFromFullSet, false);
  182. if (found.length) {
  183. found.forEach(f => {
  184. domainSets.delete(f);
  185. })
  186. }
  187. const a = domainStartsWithADotAndFromFullSet.slice(1);
  188. if (trie2.has(a)) {
  189. domainSets.delete(a);
  190. }
  191. }
  192. console.log(`* Dedupe from covered subdomain - ${(Date.now() - START_TIME) / 1000}s`);
  193. console.log(`Deduped ${previousSize - domainSets.size} rules!`);
  194. console.time('* Write reject.conf');
  195. const sorter = (a, b) => {
  196. if (a.domain > b.domain) {
  197. return 1;
  198. }
  199. if (a.domain < b.domain) {
  200. return -1;
  201. }
  202. return 0;
  203. };
  204. const sortedDomainSets = Array.from(domainSets)
  205. .map((v) => {
  206. return { v, domain: getDomain(v.charCodeAt(0) === 46 ? v.slice(1) : v)?.toLowerCase() || v };
  207. })
  208. .sort(sorter)
  209. .map((i) => {
  210. return i.v;
  211. });
  212. await compareAndWriteFile(
  213. withBannerArray(
  214. 'Sukka\'s Surge Rules - Reject Base',
  215. [
  216. 'License: AGPL 3.0',
  217. 'Homepage: https://ruleset.skk.moe',
  218. 'GitHub: https://github.com/SukkaW/Surge',
  219. '',
  220. 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
  221. '',
  222. 'Build from:',
  223. ...HOSTS.map(host => ` - ${host[0]}`),
  224. ...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`),
  225. ],
  226. new Date(),
  227. sortedDomainSets
  228. ),
  229. pathResolve(__dirname, '../List/domainset/reject.conf')
  230. );
  231. console.timeEnd('* Write reject.conf');
  232. console.timeEnd('Total Time - build-reject-domain-set');
  233. })();
  234. /**
  235. * @param {string} domain
  236. */
  237. function isMatchKeyword(domain) {
  238. for (const keyword of domainKeywordsSet) {
  239. if (domain.includes(keyword)) {
  240. return true;
  241. }
  242. }
  243. return false;
  244. }