download-previous-build.ts 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. import tar from 'tar';
  2. import fs from 'fs';
  3. import fsp from 'fs/promises';
  4. import path from 'path';
  5. import os from 'os';
  6. import { Readable } from 'stream';
  7. import { pipeline } from 'stream/promises';
  8. import { readFileByLine } from './lib/fetch-remote-text-by-line';
  9. import { isCI } from 'ci-info';
  10. import { task, traceAsync } from './lib/trace-runner';
  11. import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
  12. const IS_READING_BUILD_OUTPUT = 1 << 2;
  13. const ALL_FILES_EXISTS = 1 << 3;
  14. export const downloadPreviousBuild = task(__filename, async () => {
  15. const buildOutputList: string[] = [];
  16. let flag = 1 | ALL_FILES_EXISTS;
  17. for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) {
  18. if (line === '# $ build output') {
  19. flag = flag | IS_READING_BUILD_OUTPUT;
  20. continue;
  21. }
  22. if (!(flag & IS_READING_BUILD_OUTPUT)) {
  23. continue;
  24. }
  25. buildOutputList.push(line);
  26. if (!isCI) {
  27. // Bun.file().exists() doesn't check directory
  28. if (!fs.existsSync(path.join(__dirname, '..', line))) {
  29. flag = flag & ~ALL_FILES_EXISTS;
  30. }
  31. }
  32. }
  33. if (isCI) {
  34. flag = flag & ~ALL_FILES_EXISTS;
  35. }
  36. if (flag & ALL_FILES_EXISTS) {
  37. console.log('All files exists, skip download.');
  38. // return;
  39. }
  40. const extractedPath = path.join(os.tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  41. const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
  42. await traceAsync(
  43. 'Download and extract previous build',
  44. async () => {
  45. const [resp] = await Promise.all([
  46. fetchWithRetry('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master', defaultRequestInit),
  47. fsp.mkdir(extractedPath, { recursive: true })
  48. ]);
  49. await pipeline(
  50. Readable.fromWeb(resp.body!),
  51. tar.t({
  52. filter(p) {
  53. return filesList.some(f => p.startsWith(f));
  54. },
  55. // onentry is async, so we close entry manually after consumed
  56. noResume: true,
  57. async onentry(entry) {
  58. if (entry.type !== 'File') {
  59. // not a file, throw away
  60. console.log(entry.type, entry.path)
  61. entry.resume();
  62. return;
  63. }
  64. const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, '');
  65. const targetPath = path.join(__dirname, '..', relativeEntryPath);
  66. await fsp.mkdir(path.dirname(targetPath), { recursive: true });
  67. const targetFile = Bun.file(targetPath);
  68. const targetFileSink = targetFile.writer();
  69. // I don't know, but for some reason it is impossible to consume entry with AsyncIterator
  70. await new Promise<void>((resolve, reject) => {
  71. entry.on('data', (chunk) => {
  72. targetFileSink.write(chunk);
  73. });
  74. entry.on('end', resolve);
  75. entry.on('error', reject);
  76. });
  77. await targetFileSink.end();
  78. }
  79. })
  80. );
  81. }
  82. );
  83. });
  84. export const downloadPublicSuffixList = task(__filename, async () => {
  85. const publicSuffixDir = path.resolve(__dirname, '../node_modules/.cache');
  86. const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
  87. const [resp] = await Promise.all([
  88. fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit),
  89. fsp.mkdir(publicSuffixDir, { recursive: true })
  90. ]);
  91. return Bun.write(publicSuffixPath, resp);
  92. }, 'download-publicsuffixlist');
  93. if (import.meta.main) {
  94. Promise.all([
  95. downloadPreviousBuild(),
  96. downloadPublicSuffixList()
  97. ]);
  98. }