download-previous-build.ts 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. import tar from 'tar';
  2. import fs from 'fs';
  3. import fsp from 'fs/promises';
  4. import path from 'path';
  5. import os from 'os';
  6. import { Readable } from 'stream';
  7. import { pipeline } from 'stream/promises';
  8. import { readFileByLine } from './lib/fetch-text-by-line';
  9. import { isCI } from 'ci-info';
  10. import { task, traceAsync } from './lib/trace-runner';
  11. import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
  12. const IS_READING_BUILD_OUTPUT = 1 << 2;
  13. const ALL_FILES_EXISTS = 1 << 3;
  14. export const downloadPreviousBuild = task(import.meta.path, async () => {
  15. const buildOutputList: string[] = [];
  16. let flag = 1 | ALL_FILES_EXISTS;
  17. for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
  18. if (line === '# $ build output') {
  19. flag = flag | IS_READING_BUILD_OUTPUT;
  20. continue;
  21. }
  22. if (!(flag & IS_READING_BUILD_OUTPUT)) {
  23. continue;
  24. }
  25. buildOutputList.push(line);
  26. if (!isCI) {
  27. // Bun.file().exists() doesn't check directory
  28. if (!fs.existsSync(path.join(import.meta.dir, '..', line))) {
  29. flag = flag & ~ALL_FILES_EXISTS;
  30. }
  31. }
  32. }
  33. if (isCI) {
  34. flag = flag & ~ALL_FILES_EXISTS;
  35. }
  36. if (flag & ALL_FILES_EXISTS) {
  37. console.log('All files exists, skip download.');
  38. // return;
  39. }
  40. const extractedPath = path.join(os.tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  41. const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
  42. await traceAsync(
  43. 'Download and extract previous build',
  44. async () => {
  45. const [resp] = await Promise.all([
  46. fetchWithRetry('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master', defaultRequestInit),
  47. fsp.mkdir(extractedPath, { recursive: true })
  48. ]);
  49. await pipeline(
  50. Readable.fromWeb(resp.body!),
  51. tar.t({
  52. filter(p) {
  53. return filesList.some(f => p.startsWith(f));
  54. },
  55. // onentry is async, so we close entry manually after consumed
  56. noResume: true,
  57. async onentry(entry) {
  58. if (entry.type !== 'File') {
  59. // not a file, throw away
  60. entry.resume();
  61. return;
  62. }
  63. const relativeEntryPath = entry.path.replace(`ruleset.skk.moe-master${path.sep}`, '');
  64. const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
  65. await fsp.mkdir(path.dirname(targetPath), { recursive: true });
  66. const targetFileSink = Bun.file(targetPath).writer();
  67. const onData = (chunk: Buffer) => targetFileSink.write(chunk);
  68. // I don't know, but for some reason it is impossible to consume entry with AsyncIterator
  69. await new Promise<void>((resolve, reject) => {
  70. entry.on('data', onData);
  71. entry.on('end', resolve);
  72. entry.on('error', reject);
  73. });
  74. await targetFileSink.end();
  75. }
  76. })
  77. );
  78. }
  79. );
  80. });
  81. export const downloadPublicSuffixList = task(import.meta.path, async () => {
  82. const publicSuffixDir = path.resolve(import.meta.dir, '../node_modules/.cache');
  83. const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
  84. const [resp] = await Promise.all([
  85. fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit),
  86. fsp.mkdir(publicSuffixDir, { recursive: true })
  87. ]);
  88. return Bun.write(publicSuffixPath, resp as Response);
  89. }, 'download-publicsuffixlist');
  90. if (import.meta.main) {
  91. Promise.all([
  92. downloadPreviousBuild(),
  93. downloadPublicSuffixList()
  94. ]);
  95. }