build-reject-domainset.js 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382
  1. const { promises: fsPromises } = require('fs');
  2. const fse = require('fs-extra');
  3. const { resolve: pathResolve } = require('path');
  4. const Piscina = require('piscina');
  5. const { processHosts, processFilterRules, preprocessFullDomainSetBeforeUsedAsWorkerData } = require('./lib/parse-filter');
  6. const cpuCount = require('os').cpus().length;
  7. const { isCI } = require('ci-info');
  8. const threads = isCI ? cpuCount : cpuCount / 2;
  9. (async () => {
  10. console.time('Total Time - build-reject-domain-set');
  11. /** @type Set<string> */
  12. const domainSets = new Set();
  13. console.log('Downloading hosts file...');
  14. console.time('* Download and process Hosts');
  15. // Parse from remote hosts & domain lists
  16. (await Promise.all([
  17. processHosts('https://pgl.yoyo.org/adservers/serverlist.php?hostformat=hosts&showintro=0&mimetype=plaintext', true),
  18. processHosts('https://raw.githubusercontent.com/hoshsadiq/adblock-nocoin-list/master/hosts.txt'),
  19. processHosts('https://raw.githubusercontent.com/crazy-max/WindowsSpyBlocker/master/data/hosts/spy.txt')
  20. ])).forEach(hosts => {
  21. hosts.forEach(host => {
  22. if (host) {
  23. domainSets.add(host);
  24. }
  25. });
  26. });
  27. console.timeEnd('* Download and process Hosts');
  28. let previousSize = domainSets.size;
  29. console.log(`Import ${previousSize} rules from hosts files!`);
  30. await fsPromises.readFile(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }).then(data => {
  31. data.split('\n').forEach(line => {
  32. const trimmed = line.trim();
  33. if (
  34. line.startsWith('#')
  35. || line.startsWith(' ')
  36. || line.startsWith('\r')
  37. || line.startsWith('\n')
  38. || trimmed === ''
  39. ) {
  40. return;
  41. }
  42. /* if (domainSets.has(line) || domainSets.has(`.${line}`)) {
  43. console.warn(`|${line}| is already in the list!`);
  44. } */
  45. domainSets.add(trimmed);
  46. });
  47. });
  48. // Copy reject_sukka.conf for backward compatibility
  49. await fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'))
  50. previousSize = domainSets.size - previousSize;
  51. console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
  52. // Parse from AdGuard Filters
  53. /** @type Set<string> */
  54. const filterRuleWhitelistDomainSets = new Set([
  55. 'localhost',
  56. 'broadcasthost',
  57. 'ip6-loopback',
  58. 'ip6-localnet',
  59. 'ip6-mcastprefix',
  60. 'ip6-allnodes',
  61. 'ip6-allrouters',
  62. 'ip6-allhosts',
  63. 'mcastprefix',
  64. 'skk.moe',
  65. 'analytics.google.com',
  66. 'msa.cdn.mediaset.net', // Added manually using DOMAIN-KEYWORDS
  67. 'cloud.answerhub.com',
  68. 'ae01.alicdn.com',
  69. 'whoami.akamai.net',
  70. 'whoami.ds.akahelp.net',
  71. 'pxlk9.net.', // This one is malformed from EasyList, which I will manually add instead
  72. 'instant.page', // No, it doesn't violate anyone's privacy. I will whitelist it
  73. 'piwik.pro',
  74. 'mixpanel.com',
  75. 'cdn.mxpnl.com',
  76. 'heapanalytics.com',
  77. 'segment.com',
  78. 'segmentify.com',
  79. 't.co', // pgl yoyo add t.co to the blacklist
  80. 'survicate.com', // AdGuardDNSFilter
  81. 'perfops.io', // AdGuardDNSFilter
  82. 'd2axgrpnciinw7.cloudfront.net', // ADGuardDNSFilter
  83. 'tb-lb.sb-cd.com', // AdGuard
  84. 'storage.yandexcloud.net', // phishing list
  85. 'login.microsoftonline.com' // phishing list
  86. ]);
  87. console.time('* Download and process AdBlock Filter Rules');
  88. (await Promise.all([
  89. // Easy List
  90. [
  91. 'https://easylist.to/easylist/easylist.txt',
  92. [
  93. 'https://easylist-downloads.adblockplus.org/easylist.txt',
  94. 'https://raw.githubusercontent.com/easylist/easylist/gh-pages/easylist.txt',
  95. 'https://secure.fanboy.co.nz/easylist.txt'
  96. ]
  97. ],
  98. // AdGuard DNS Filter
  99. 'https://adguardteam.github.io/AdGuardSDNSFilter/Filters/filter.txt',
  100. // uBlock Origin Filter List
  101. [
  102. 'https://ublockorigin.github.io/uAssets/filters/filters.txt',
  103. [
  104. 'https://ublockorigin.github.io/uAssetsCDN/filters/filters.txt',
  105. 'https://ublockorigin.pages.dev/filters/filters.txt'
  106. ]
  107. ],
  108. [
  109. 'https://ublockorigin.github.io/uAssets/filters/filters-2020.txt',
  110. [
  111. 'https://ublockorigin.github.io/uAssetsCDN/filters/filters-2020.txt',
  112. 'https://ublockorigin.pages.dev/filters/filters-2020.txt'
  113. ]
  114. ],
  115. [
  116. 'https://ublockorigin.github.io/uAssets/filters/filters-2021.txt',
  117. [
  118. 'https://ublockorigin.github.io/uAssetsCDN/filters/filters-2021.txt',
  119. 'https://ublockorigin.pages.dev/filters/filters-2021.txt'
  120. ]
  121. ],
  122. [
  123. 'https://ublockorigin.github.io/uAssets/filters/filters-2022.txt',
  124. [
  125. 'https://ublockorigin.github.io/uAssetsCDN/filters/filters-2022.txt',
  126. 'https://ublockorigin.pages.dev/filters/filters-2022.txt'
  127. ]
  128. ],
  129. // uBlock Origin Badware Risk List
  130. [
  131. 'https://ublockorigin.github.io/uAssets/filters/badware.txt',
  132. [
  133. 'https://ublockorigin.github.io/uAssetsCDN/filters/badware.txt',
  134. 'https://ublockorigin.pages.dev/filters/badware.txt'
  135. ]
  136. ],
  137. // uBlock Origin Privacy List
  138. [
  139. 'https://ublockorigin.github.io/uAssets/filters/privacy.txt',
  140. [
  141. 'https://ublockorigin.github.io/uAssetsCDN/filters/privacy.txt',
  142. 'https://ublockorigin.pages.dev/filters/privacy.txt'
  143. ]
  144. ],
  145. // uBlock Origin Resource Abuse
  146. [
  147. 'https://ublockorigin.github.io/uAssets/filters/resource-abuse.txt',
  148. [
  149. 'https://ublockorigin.github.io/uAssetsCDN/filters/resource-abuse.txt',
  150. 'https://ublockorigin.pages.dev/filters/resource-abuse.txt'
  151. ]
  152. ],
  153. // uBlock Origin Unbreak
  154. [
  155. 'https://ublockorigin.github.io/uAssets/filters/unbreak.txt',
  156. [
  157. 'https://ublockorigin.github.io/uAssetsCDN/filters/unbreak.txt',
  158. 'https://ublockorigin.pages.dev/filters/unbreak.txt'
  159. ]
  160. ],
  161. // AdGuard Base Filter
  162. 'https://filters.adtidy.org/extension/ublock/filters/2_without_easylist.txt',
  163. // AdGuard Mobile AD
  164. 'https://filters.adtidy.org/extension/ublock/filters/11.txt',
  165. // AdGuard Tracking Protection
  166. 'https://filters.adtidy.org/extension/ublock/filters/3.txt',
  167. // AdGuard Japanese filter
  168. 'https://filters.adtidy.org/extension/ublock/filters/7.txt',
  169. // AdGuard Chinese filter (EasyList China + AdGuard Chinese filter)
  170. 'https://filters.adtidy.org/extension/ublock/filters/224.txt',
  171. // Easy Privacy
  172. [
  173. 'https://easylist.to/easylist/easyprivacy.txt',
  174. [
  175. 'https://secure.fanboy.co.nz/easyprivacy.txt',
  176. 'https://raw.githubusercontent.com/easylist/easylist/gh-pages/easyprivacy.txt',
  177. 'https://easylist-downloads.adblockplus.org/easyprivacy.txt'
  178. ]
  179. ],
  180. // Curben's UrlHaus Malicious URL Blocklist
  181. [
  182. 'https://curbengh.github.io/urlhaus-filter/urlhaus-filter-agh-online.txt',
  183. [
  184. 'https://urlhaus-filter.pages.dev/urlhaus-filter-agh-online.txt',
  185. // Prefer mirror, since malware-filter.gitlab.io has not been updated for a while
  186. // 'https://malware-filter.gitlab.io/urlhaus-filter/urlhaus-filter-agh-online.txt'
  187. ]
  188. ],
  189. // Curben's Phishing URL Blocklist
  190. [
  191. 'https://curbengh.github.io/phishing-filter/phishing-filter-agh.txt',
  192. [
  193. 'https://phishing-filter.pages.dev/phishing-filter-agh.txt',
  194. // Prefer mirror, since malware-filter.gitlab.io has not been updated for a while
  195. // 'https://malware-filter.gitlab.io/malware-filter/phishing-filter-agh.txt'
  196. ]
  197. ],
  198. // Curben's PUP Domains Blocklist
  199. [
  200. 'https://curbengh.github.io/pup-filter/pup-filter-agh.txt',
  201. [
  202. 'https://pup-filter.pages.dev/pup-filter-agh.txt',
  203. // Prefer mirror, since malware-filter.gitlab.io has not been updated for a while
  204. // 'https://malware-filter.gitlab.io/malware-filter/pup-filter-agh.txt'
  205. ]
  206. ],
  207. // GameConsoleAdblockList
  208. 'https://raw.githubusercontent.com/DandelionSprout/adfilt/master/GameConsoleAdblockList.txt',
  209. // PiHoleBlocklist
  210. 'https://raw.githubusercontent.com/Perflyst/PiHoleBlocklist/master/SmartTV-AGH.txt',
  211. // Spam404
  212. 'https://raw.githubusercontent.com/Spam404/lists/master/adblock-list.txt'
  213. ].map(input => {
  214. if (typeof input === 'string') {
  215. return processFilterRules(input);
  216. }
  217. if (Array.isArray(input) && input.length === 2) {
  218. return processFilterRules(input[0], input[1]);
  219. }
  220. }))).forEach(({ white, black }) => {
  221. white.forEach(i => filterRuleWhitelistDomainSets.add(i));
  222. black.forEach(i => domainSets.add(i));
  223. });
  224. console.timeEnd('* Download and process AdBlock Filter Rules');
  225. previousSize = domainSets.size - previousSize;
  226. console.log(`Import ${previousSize} rules from adguard filters!`);
  227. // Read DOMAIN Keyword
  228. const domainKeywordsSet = new Set();
  229. const domainSuffixSet = new Set();
  230. await fsPromises.readFile(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }).then(data => {
  231. data.split('\n').forEach(line => {
  232. if (line.startsWith('DOMAIN-KEYWORD')) {
  233. const [, ...keywords] = line.split(',');
  234. domainKeywordsSet.add(keywords.join(',').trim());
  235. } else if (line.startsWith('DOMAIN-SUFFIX')) {
  236. const [, ...keywords] = line.split(',');
  237. domainSuffixSet.add(keywords.join(',').trim());
  238. }
  239. });
  240. });
  241. // Read Special Phishing Suffix list
  242. await fsPromises.readFile(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }).then(data => {
  243. data.split('\n').forEach(line => {
  244. const trimmed = line.trim();
  245. if (
  246. line.startsWith('#')
  247. || line.startsWith(' ')
  248. || line.startsWith('\r')
  249. || line.startsWith('\n')
  250. || trimmed === ''
  251. ) {
  252. return;
  253. }
  254. /* if (domainSets.has(line) || domainSets.has(`.${line}`)) {
  255. console.warn(`|${line}| is already in the list!`);
  256. } */
  257. domainSuffixSet.add(trimmed);
  258. });
  259. });
  260. console.log(`Import ${domainKeywordsSet.size} black keywords and ${domainSuffixSet.size} black suffixes!`);
  261. previousSize = domainSets.size;
  262. // Dedupe domainSets
  263. console.log(`Start deduping from black keywords/suffixes! (${previousSize})`);
  264. console.time(`* Dedupe from black keywords/suffixes`);
  265. for (const domain of domainSets) {
  266. let isTobeRemoved = false;
  267. for (const suffix of domainSuffixSet) {
  268. if (domain.endsWith(suffix)) {
  269. isTobeRemoved = true;
  270. break;
  271. }
  272. }
  273. if (!isTobeRemoved) {
  274. for (const keyword of domainKeywordsSet) {
  275. if (domain.includes(keyword)) {
  276. isTobeRemoved = true;
  277. break;
  278. }
  279. }
  280. }
  281. if (!isTobeRemoved) {
  282. for (const white of filterRuleWhitelistDomainSets) {
  283. if (domain.includes(white) || white.includes(domain)) {
  284. isTobeRemoved = true;
  285. break;
  286. }
  287. }
  288. }
  289. if (isTobeRemoved) {
  290. domainSets.delete(domain);
  291. }
  292. }
  293. console.timeEnd(`* Dedupe from black keywords/suffixes`);
  294. console.log(`Deduped ${previousSize} - ${domainSets.size} = ${previousSize - domainSets.size} from black keywords and suffixes!`);
  295. previousSize = domainSets.size;
  296. // Dedupe domainSets
  297. console.log(`Start deduping! (${previousSize})`);
  298. const START_TIME = Date.now();
  299. const piscina = new Piscina({
  300. filename: pathResolve(__dirname, 'worker/build-reject-domainset-worker.js'),
  301. workerData: preprocessFullDomainSetBeforeUsedAsWorkerData([...domainSets]),
  302. idleTimeout: 50,
  303. minThreads: threads,
  304. maxThreads: threads
  305. });
  306. console.log(`Launching ${threads} threads...`)
  307. const tasksArray = Array.from(domainSets)
  308. .reduce((result, element, index) => {
  309. const chunk = index % threads;
  310. result[chunk] ??= [];
  311. result[chunk].push(element);
  312. return result;
  313. }, []);
  314. (
  315. await Promise.all(
  316. Array.from(domainSets)
  317. .reduce((result, element, index) => {
  318. const chunk = index % threads;
  319. result[chunk] ??= [];
  320. result[chunk].push(element);
  321. return result;
  322. }, [])
  323. .map(chunk => piscina.run({ chunk }, { name: 'dedupe' }))
  324. )
  325. ).forEach((result, taskIndex) => {
  326. const chunk = tasksArray[taskIndex];
  327. for (let i = 0, len = result.length; i < len; i++) {
  328. if (result[i]) {
  329. domainSets.delete(chunk[i]);
  330. }
  331. }
  332. });
  333. console.log(`* Dedupe from covered subdomain - ${(Date.now() - START_TIME) / 1000}s`);
  334. console.log(`Deduped ${previousSize - domainSets.size} rules!`);
  335. await Promise.all([
  336. fsPromises.writeFile(
  337. pathResolve(__dirname, '../List/domainset/reject.conf'),
  338. `${[...domainSets].join('\n')}\n`,
  339. { encoding: 'utf-8' }
  340. ),
  341. piscina.destroy()
  342. ]);
  343. console.timeEnd('Total Time - build-reject-domain-set');
  344. if (piscina.queueSize === 0) {
  345. process.exit(0);
  346. }
  347. })();