download-previous-build.ts 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. import tar from 'tar';
  2. import fs from 'fs';
  3. import fsp from 'fs/promises';
  4. import path from 'path';
  5. import os from 'os';
  6. import { Readable } from 'stream';
  7. import { pipeline } from 'stream/promises';
  8. import { readFileByLine } from './lib/fetch-remote-text-by-line';
  9. import { isCI } from 'ci-info';
  10. import { task, traceAsync } from './lib/trace-runner';
  11. const IS_READING_BUILD_OUTPUT = 1 << 2;
  12. const ALL_FILES_EXISTS = 1 << 3;
  13. export const downloadPreviousBuild = task(__filename, async () => {
  14. const buildOutputList: string[] = [];
  15. let flag = 1 | ALL_FILES_EXISTS;
  16. for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) {
  17. if (line === '# $ build output') {
  18. flag = flag | IS_READING_BUILD_OUTPUT;
  19. continue;
  20. }
  21. if (!(flag & IS_READING_BUILD_OUTPUT)) {
  22. continue;
  23. }
  24. buildOutputList.push(line);
  25. if (!isCI) {
  26. // Bun.file().exists() doesn't check directory
  27. if (!fs.existsSync(path.join(__dirname, '..', line))) {
  28. flag = flag & ~ALL_FILES_EXISTS;
  29. }
  30. }
  31. }
  32. if (isCI) {
  33. flag = flag & ~ALL_FILES_EXISTS;
  34. }
  35. if (flag & ALL_FILES_EXISTS) {
  36. console.log('All files exists, skip download.');
  37. return;
  38. }
  39. const extractedPath = path.join(os.tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  40. const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
  41. await traceAsync(
  42. 'Download and extract previous build',
  43. () => Promise.all([
  44. fetch('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master'),
  45. fsp.mkdir(extractedPath, { recursive: true })
  46. ]).then(([resp]) => pipeline(
  47. Readable.fromWeb(resp.body!),
  48. tar.x({
  49. cwd: extractedPath,
  50. filter(p) {
  51. return filesList.some(f => p.startsWith(f));
  52. }
  53. })
  54. ))
  55. );
  56. await Promise.all(buildOutputList.map(async p => {
  57. const src = path.join(extractedPath, 'ruleset.skk.moe-master', p);
  58. if (fs.existsSync(src)) { // Bun.file().exists() doesn't check directory
  59. return fsp.cp(
  60. src,
  61. path.join(__dirname, '..', p),
  62. { force: true, recursive: true }
  63. );
  64. }
  65. }));
  66. // return fsp.unlink(extractedPath).catch(() => { });
  67. });
  68. export const downloadPublicSuffixList = task(__filename, async () => {
  69. const publicSuffixDir = path.resolve(__dirname, '../node_modules/.cache');
  70. const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
  71. const [resp] = await Promise.all([
  72. fetch('https://publicsuffix.org/list/public_suffix_list.dat'),
  73. fsp.mkdir(publicSuffixDir, { recursive: true })
  74. ]);
  75. return Bun.write(publicSuffixPath, resp);
  76. }, 'download-publicsuffixlist');
  77. if (import.meta.main) {
  78. Promise.all([
  79. downloadPreviousBuild(),
  80. downloadPublicSuffixList()
  81. ]);
  82. }