build-reject-domainset.js 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229
  1. const { promises: fsPromises } = require('fs');
  2. const fse = require('fs-extra');
  3. const { resolve: pathResolve } = require('path');
  4. const Piscina = require('piscina');
  5. const { processHosts, processFilterRules, preprocessFullDomainSetBeforeUsedAsWorkerData } = require('./lib/parse-filter');
  6. const cpuCount = require('os').cpus().length;
  7. const { isCI } = require('ci-info');
  8. const threads = isCI ? cpuCount : cpuCount / 2;
  9. const { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST } = require('./lib/reject-data-source');
  10. const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
  11. (async () => {
  12. console.time('Total Time - build-reject-domain-set');
  13. /** @type Set<string> */
  14. const domainSets = new Set();
  15. console.log('Downloading hosts file...');
  16. console.time('* Download and process Hosts');
  17. // Parse from remote hosts & domain lists
  18. (await Promise.all(
  19. HOSTS.map(entry => processHosts(entry[0], entry[1]))
  20. )).forEach(hosts => {
  21. hosts.forEach(host => {
  22. if (host) {
  23. domainSets.add(host);
  24. }
  25. });
  26. });
  27. console.timeEnd('* Download and process Hosts');
  28. let previousSize = domainSets.size;
  29. console.log(`Import ${previousSize} rules from hosts files!`);
  30. // Parse from AdGuard Filters
  31. console.time('* Download and process AdBlock Filter Rules');
  32. (await Promise.all(ADGUARD_FILTERS.map(input => {
  33. if (typeof input === 'string') {
  34. return processFilterRules(input);
  35. }
  36. if (Array.isArray(input) && input.length === 2) {
  37. return processFilterRules(input[0], input[1]);
  38. }
  39. }))).forEach(({ white, black, foundDebugDomain }) => {
  40. if (foundDebugDomain) {
  41. process.exit(1);
  42. };
  43. white.forEach(i => filterRuleWhitelistDomainSets.add(i));
  44. black.forEach(i => domainSets.add(i));
  45. });
  46. console.timeEnd('* Download and process AdBlock Filter Rules');
  47. previousSize = domainSets.size - previousSize;
  48. console.log(`Import ${previousSize} rules from adguard filters!`);
  49. await fsPromises.readFile(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }).then(data => {
  50. data.split('\n').forEach(line => {
  51. const trimmed = line.trim();
  52. if (
  53. line.startsWith('#')
  54. || line.startsWith(' ')
  55. || line.startsWith('\r')
  56. || line.startsWith('\n')
  57. || trimmed === ''
  58. ) {
  59. return;
  60. }
  61. domainSets.add(trimmed);
  62. });
  63. });
  64. // Copy reject_sukka.conf for backward compatibility
  65. await fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'))
  66. previousSize = domainSets.size - previousSize;
  67. console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
  68. // Read DOMAIN Keyword
  69. const domainKeywordsSet = new Set();
  70. const domainSuffixSet = new Set();
  71. await fsPromises.readFile(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }).then(data => {
  72. data.split('\n').forEach(line => {
  73. if (line.startsWith('DOMAIN-KEYWORD')) {
  74. const [, ...keywords] = line.split(',');
  75. domainKeywordsSet.add(keywords.join(',').trim());
  76. } else if (line.startsWith('DOMAIN-SUFFIX')) {
  77. const [, ...keywords] = line.split(',');
  78. domainSuffixSet.add(keywords.join(',').trim());
  79. }
  80. });
  81. });
  82. // Read Special Phishing Suffix list
  83. await fsPromises.readFile(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }).then(data => {
  84. data.split('\n').forEach(line => {
  85. const trimmed = line.trim();
  86. if (
  87. line.startsWith('#')
  88. || line.startsWith(' ')
  89. || line.startsWith('\r')
  90. || line.startsWith('\n')
  91. || trimmed === ''
  92. ) {
  93. return;
  94. }
  95. domainSuffixSet.add(trimmed);
  96. });
  97. });
  98. console.log(`Import ${domainKeywordsSet.size} black keywords and ${domainSuffixSet.size} black suffixes!`);
  99. previousSize = domainSets.size;
  100. // Dedupe domainSets
  101. console.log(`Start deduping from black keywords/suffixes! (${previousSize})`);
  102. console.time(`* Dedupe from black keywords/suffixes`);
  103. for (const domain of domainSets) {
  104. let isTobeRemoved = false;
  105. for (const suffix of domainSuffixSet) {
  106. if (domain.endsWith(suffix)) {
  107. isTobeRemoved = true;
  108. break;
  109. }
  110. }
  111. if (!isTobeRemoved) {
  112. for (const keyword of domainKeywordsSet) {
  113. if (domain.includes(keyword)) {
  114. isTobeRemoved = true;
  115. break;
  116. }
  117. }
  118. }
  119. if (!isTobeRemoved) {
  120. if (isInWhiteList(domain)) {
  121. isTobeRemoved = true;
  122. }
  123. }
  124. if (isTobeRemoved) {
  125. domainSets.delete(domain);
  126. }
  127. }
  128. console.timeEnd(`* Dedupe from black keywords/suffixes`);
  129. console.log(`Deduped ${previousSize} - ${domainSets.size} = ${previousSize - domainSets.size} from black keywords and suffixes!`);
  130. previousSize = domainSets.size;
  131. // Dedupe domainSets
  132. console.log(`Start deduping! (${previousSize})`);
  133. const START_TIME = Date.now();
  134. const piscina = new Piscina({
  135. filename: pathResolve(__dirname, 'worker/build-reject-domainset-worker.js'),
  136. workerData: preprocessFullDomainSetBeforeUsedAsWorkerData([...domainSets]),
  137. idleTimeout: 50,
  138. minThreads: threads,
  139. maxThreads: threads
  140. });
  141. console.log(`Launching ${threads} threads...`)
  142. const tasksArray = Array.from(domainSets)
  143. .reduce((result, element, index) => {
  144. const chunk = index % threads;
  145. result[chunk] ??= [];
  146. result[chunk].push(element);
  147. return result;
  148. }, []);
  149. (
  150. await Promise.all(
  151. Array.from(domainSets)
  152. .reduce((result, element, index) => {
  153. const chunk = index % threads;
  154. result[chunk] ??= [];
  155. result[chunk].push(element);
  156. return result;
  157. }, [])
  158. .map(chunk => piscina.run({ chunk }, { name: 'dedupe' }))
  159. )
  160. ).forEach((result, taskIndex) => {
  161. const chunk = tasksArray[taskIndex];
  162. for (let i = 0, len = result.length; i < len; i++) {
  163. if (result[i]) {
  164. domainSets.delete(chunk[i]);
  165. }
  166. }
  167. });
  168. console.log(`* Dedupe from covered subdomain - ${(Date.now() - START_TIME) / 1000}s`);
  169. console.log(`Deduped ${previousSize - domainSets.size} rules!`);
  170. await Promise.all([
  171. fsPromises.writeFile(
  172. pathResolve(__dirname, '../List/domainset/reject.conf'),
  173. `${[...domainSets].join('\n')}\n`,
  174. { encoding: 'utf-8' }
  175. ),
  176. piscina.destroy()
  177. ]);
  178. console.timeEnd('Total Time - build-reject-domain-set');
  179. if (piscina.queueSize === 0) {
  180. process.exit(0);
  181. }
  182. })();
  183. function isInWhiteList (domain) {
  184. for (const white of filterRuleWhitelistDomainSets) {
  185. if (domain === white || domain.endsWith(white)) {
  186. return true;
  187. }
  188. }
  189. return false;
  190. }