download-previous-build.ts 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. import fs from 'fs';
  2. import fsp from 'fs/promises';
  3. import path from 'path';
  4. import os from 'os';
  5. import { Readable } from 'stream';
  6. import { pipeline } from 'stream/promises';
  7. import { readFileByLine } from './lib/fetch-text-by-line';
  8. import { isCI } from 'ci-info';
  9. import { task, traceAsync } from './lib/trace-runner';
  10. import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
  11. import tarStream from 'tar-stream';
  12. import zlib from 'zlib';
  13. const IS_READING_BUILD_OUTPUT = 1 << 2;
  14. const ALL_FILES_EXISTS = 1 << 3;
  15. export const downloadPreviousBuild = task(import.meta.path, async () => {
  16. const buildOutputList: string[] = [];
  17. let flag = 1 | ALL_FILES_EXISTS;
  18. for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
  19. if (line === '# $ build output') {
  20. flag = flag | IS_READING_BUILD_OUTPUT;
  21. continue;
  22. }
  23. if (!(flag & IS_READING_BUILD_OUTPUT)) {
  24. continue;
  25. }
  26. buildOutputList.push(line);
  27. if (!isCI) {
  28. // Bun.file().exists() doesn't check directory
  29. if (!fs.existsSync(path.join(import.meta.dir, '..', line))) {
  30. flag = flag & ~ALL_FILES_EXISTS;
  31. }
  32. }
  33. }
  34. if (isCI) {
  35. flag = flag & ~ALL_FILES_EXISTS;
  36. }
  37. if (flag & ALL_FILES_EXISTS) {
  38. console.log('All files exists, skip download.');
  39. // return;
  40. }
  41. const extractedPath = path.join(os.tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  42. const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
  43. await traceAsync(
  44. 'Download and extract previous build',
  45. async () => {
  46. const [resp] = await Promise.all([
  47. fetchWithRetry('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master', defaultRequestInit),
  48. fsp.mkdir(extractedPath, { recursive: true })
  49. ]);
  50. const extract = tarStream.extract();
  51. Readable.fromWeb(resp.body!).pipe(zlib.createGunzip()).pipe(extract);
  52. for await (const entry of extract) {
  53. if (entry.header.type !== 'file') {
  54. entry.resume(); // Drain the entry
  55. continue;
  56. }
  57. // filter entry
  58. if (!filesList.some(f => entry.header.name.startsWith(f))) {
  59. entry.resume(); // Drain the entry
  60. continue;
  61. }
  62. const relativeEntryPath = entry.header.name.replace(`ruleset.skk.moe-master${path.sep}`, '');
  63. const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
  64. await fsp.mkdir(path.dirname(targetPath), { recursive: true });
  65. await pipeline(
  66. entry,
  67. fs.createWriteStream(targetPath)
  68. );
  69. }
  70. }
  71. );
  72. });
  73. export const downloadPublicSuffixList = task(import.meta.path, async () => {
  74. const publicSuffixDir = path.resolve(import.meta.dir, '../node_modules/.cache');
  75. const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
  76. const [resp] = await Promise.all([
  77. fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit),
  78. fsp.mkdir(publicSuffixDir, { recursive: true })
  79. ]);
  80. return Bun.write(publicSuffixPath, resp as Response);
  81. }, 'download-publicsuffixlist');
  82. if (import.meta.main) {
  83. Promise.all([
  84. downloadPreviousBuild(),
  85. downloadPublicSuffixList()
  86. ]);
  87. }