build-reject-domainset.js 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293
  1. const { promises: fsPromises } = require('fs');
  2. const fse = require('fs-extra');
  3. const { resolve: pathResolve } = require('path');
  4. const Piscina = require('piscina');
  5. const { processHosts, processFilterRules, preprocessFullDomainSetBeforeUsedAsWorkerData } = require('./lib/parse-filter');
  6. const cpuCount = require('os').cpus().length;
  7. const { isCI } = require('ci-info');
  8. const threads = isCI ? cpuCount : cpuCount / 2;
  9. const { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, PREDEFINED_ENFORCED_BACKLIST } = require('./lib/reject-data-source');
  10. const { withBannerArray } = require('./lib/with-banner');
  11. const { compareAndWriteFile } = require('./lib/string-array-compare');
  12. const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
  13. (async () => {
  14. console.time('Total Time - build-reject-domain-set');
  15. /** @type Set<string> */
  16. const domainSets = new Set();
  17. console.log('Downloading hosts file...');
  18. console.time('* Download and process Hosts');
  19. // Parse from remote hosts & domain lists
  20. (await Promise.all(
  21. HOSTS.map(entry => processHosts(entry[0], entry[1]))
  22. )).forEach(hosts => {
  23. hosts.forEach(host => {
  24. if (host) {
  25. domainSets.add(host);
  26. }
  27. });
  28. });
  29. console.timeEnd('* Download and process Hosts');
  30. let previousSize = domainSets.size;
  31. console.log(`Import ${previousSize} rules from hosts files!`);
  32. // Parse from AdGuard Filters
  33. console.time('* Download and process AdBlock Filter Rules');
  34. let shouldStop = false;
  35. await Promise.all(ADGUARD_FILTERS.map(input => {
  36. const promise = typeof input === 'string'
  37. ? processFilterRules(input, undefined, false)
  38. : processFilterRules(input[0], input[1] ?? undefined, input[2] ?? false)
  39. return promise.then((i) => {
  40. if (i) {
  41. const { white, black, foundDebugDomain } = i;
  42. if (foundDebugDomain) {
  43. shouldStop = true;
  44. }
  45. white.forEach(i => {
  46. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  47. return;
  48. }
  49. filterRuleWhitelistDomainSets.add(i);
  50. });
  51. black.forEach(i => domainSets.add(i));
  52. } else {
  53. process.exit(1);
  54. }
  55. });
  56. }));
  57. await Promise.all([
  58. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exceptions.txt',
  59. 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt'
  60. ].map(
  61. input => processFilterRules(input).then((i) => {
  62. if (i) {
  63. const { white, black } = i;
  64. white.forEach(i => {
  65. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  66. return;
  67. }
  68. filterRuleWhitelistDomainSets.add(i)
  69. });
  70. black.forEach(i => {
  71. if (PREDEFINED_ENFORCED_BACKLIST.some(j => i.endsWith(j))) {
  72. return;
  73. }
  74. filterRuleWhitelistDomainSets.add(i)
  75. });
  76. } else {
  77. process.exit(1);
  78. }
  79. })
  80. ));
  81. console.timeEnd('* Download and process AdBlock Filter Rules');
  82. if (shouldStop) {
  83. process.exit(1);
  84. }
  85. previousSize = domainSets.size - previousSize;
  86. console.log(`Import ${previousSize} rules from adguard filters!`);
  87. await fsPromises.readFile(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }).then(data => {
  88. data.split('\n').forEach(line => {
  89. const trimmed = line.trim();
  90. if (
  91. line.startsWith('#')
  92. || line.startsWith(' ')
  93. || line.startsWith('\r')
  94. || line.startsWith('\n')
  95. || trimmed === ''
  96. ) {
  97. return;
  98. }
  99. domainSets.add(trimmed);
  100. });
  101. });
  102. // Copy reject_sukka.conf for backward compatibility
  103. await fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'))
  104. previousSize = domainSets.size - previousSize;
  105. console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
  106. // Read DOMAIN Keyword
  107. const domainKeywordsSet = new Set();
  108. const domainSuffixSet = new Set();
  109. await fsPromises.readFile(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }).then(data => {
  110. data.split('\n').forEach(line => {
  111. if (line.startsWith('DOMAIN-KEYWORD')) {
  112. const [, ...keywords] = line.split(',');
  113. domainKeywordsSet.add(keywords.join(',').trim());
  114. } else if (line.startsWith('DOMAIN-SUFFIX')) {
  115. const [, ...keywords] = line.split(',');
  116. domainSuffixSet.add(keywords.join(',').trim());
  117. }
  118. });
  119. });
  120. // Read Special Phishing Suffix list
  121. await fsPromises.readFile(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }).then(data => {
  122. data.split('\n').forEach(line => {
  123. const trimmed = line.trim();
  124. if (
  125. line.startsWith('#')
  126. || line.startsWith(' ')
  127. || line.startsWith('\r')
  128. || line.startsWith('\n')
  129. || trimmed === ''
  130. ) {
  131. return;
  132. }
  133. domainSuffixSet.add(trimmed);
  134. });
  135. });
  136. console.log(`Import ${domainKeywordsSet.size} black keywords and ${domainSuffixSet.size} black suffixes!`);
  137. previousSize = domainSets.size;
  138. // Dedupe domainSets
  139. console.log(`Start deduping from black keywords/suffixes! (${previousSize})`);
  140. console.time(`* Dedupe from black keywords/suffixes`);
  141. for (const domain of domainSets) {
  142. let isTobeRemoved = false;
  143. for (const suffix of domainSuffixSet) {
  144. if (domain.endsWith(suffix)) {
  145. isTobeRemoved = true;
  146. break;
  147. }
  148. }
  149. if (!isTobeRemoved) {
  150. for (const keyword of domainKeywordsSet) {
  151. if (domain.includes(keyword)) {
  152. isTobeRemoved = true;
  153. break;
  154. }
  155. }
  156. }
  157. if (!isTobeRemoved) {
  158. if (isInWhiteList(domain)) {
  159. isTobeRemoved = true;
  160. }
  161. }
  162. if (isTobeRemoved) {
  163. domainSets.delete(domain);
  164. }
  165. }
  166. console.timeEnd(`* Dedupe from black keywords/suffixes`);
  167. console.log(`Deduped ${previousSize} - ${domainSets.size} = ${previousSize - domainSets.size} from black keywords and suffixes!`);
  168. previousSize = domainSets.size;
  169. // Dedupe domainSets
  170. console.log(`Start deduping! (${previousSize})`);
  171. const START_TIME = Date.now();
  172. const piscina = new Piscina({
  173. filename: pathResolve(__dirname, 'worker/build-reject-domainset-worker.js'),
  174. workerData: preprocessFullDomainSetBeforeUsedAsWorkerData([...domainSets]),
  175. idleTimeout: 50,
  176. minThreads: threads,
  177. maxThreads: threads
  178. });
  179. console.log(`Launching ${threads} threads...`)
  180. const tasksArray = Array.from(domainSets)
  181. .reduce((result, element, index) => {
  182. const chunk = index % threads;
  183. result[chunk] ??= [];
  184. result[chunk].push(element);
  185. return result;
  186. }, []);
  187. (
  188. await Promise.all(
  189. Array.from(domainSets)
  190. .reduce((result, element, index) => {
  191. const chunk = index % threads;
  192. result[chunk] ??= [];
  193. result[chunk].push(element);
  194. return result;
  195. }, [])
  196. .map(chunk => piscina.run({ chunk }, { name: 'dedupe' }))
  197. )
  198. ).forEach((result, taskIndex) => {
  199. const chunk = tasksArray[taskIndex];
  200. for (let i = 0, len = result.length; i < len; i++) {
  201. if (result[i]) {
  202. domainSets.delete(chunk[i]);
  203. }
  204. }
  205. });
  206. console.log(`* Dedupe from covered subdomain - ${(Date.now() - START_TIME) / 1000}s`);
  207. console.log(`Deduped ${previousSize - domainSets.size} rules!`);
  208. await piscina.destroy();
  209. console.time('* Write reject.conf');
  210. await compareAndWriteFile(
  211. withBannerArray(
  212. 'Sukka\'s Surge Rules - Reject Base',
  213. [
  214. 'License: AGPL 3.0',
  215. 'Homepage: https://ruleset.skk.moe',
  216. 'GitHub: https://github.com/SukkaW/Surge',
  217. '',
  218. 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
  219. '',
  220. 'Build from:',
  221. ...HOSTS.map(host => ` - ${host[0]}`),
  222. ...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`),
  223. ],
  224. new Date(),
  225. [...domainSets].sort()
  226. ),
  227. pathResolve(__dirname, '../List/domainset/reject.conf')
  228. );
  229. console.timeEnd('* Write reject.conf');
  230. console.timeEnd('Total Time - build-reject-domain-set');
  231. if (piscina.queueSize === 0) {
  232. process.exit(0);
  233. }
  234. })();
  235. function isInWhiteList(domain) {
  236. for (const white of filterRuleWhitelistDomainSets) {
  237. if (domain === white || domain.endsWith(white)) {
  238. return true;
  239. }
  240. if (white.endsWith(domain)) {
  241. // If a whole domain is in blacklist but a subdomain is in whitelist
  242. // We have no choice but to remove the whole domain from blacklist
  243. return true;
  244. }
  245. }
  246. return false;
  247. }