download-previous-build.js 1.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. const { fetch } = require('undici');
  2. const tar = require('tar');
  3. const fs = require('fs');
  4. const fse = require('fs-extra');
  5. const { join, resolve } = require('path');
  6. const { tmpdir } = require('os');
  7. const { Readable } = require('stream');
  8. const { pipeline } = require('stream/promises');
  9. const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
  10. const fileExists = (path) => {
  11. return fs.promises.access(path, fs.constants.F_OK)
  12. .then(() => true)
  13. .catch(() => false);
  14. };
  15. (async () => {
  16. const filesList = [];
  17. let allFileExists = true;
  18. for await (const line of readFileByLine(resolve(__dirname, '../.gitignore'))) {
  19. if (
  20. (
  21. line.startsWith('List/')
  22. || line.startsWith('Modules/')
  23. ) && !line.endsWith('/')
  24. ) {
  25. allFileExists = await fileExists(join(__dirname, '..', line));
  26. if (!allFileExists) {
  27. break;
  28. }
  29. }
  30. }
  31. if (allFileExists) {
  32. console.log('All files exists, skip download.');
  33. return;
  34. }
  35. const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  36. const [resp] = await Promise.all([
  37. fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages'),
  38. fse.ensureDir(extractedPath)
  39. ]);
  40. await pipeline(
  41. Readable.fromWeb(resp.body),
  42. tar.x({
  43. cwd: extractedPath,
  44. filter(p) {
  45. return p.split('/')[1] === 'List';
  46. }
  47. })
  48. );
  49. await Promise.all(filesList.map(async p => {
  50. const src = join(extractedPath, 'Surge-gh-pages', p);
  51. if (await fileExists(src)) {
  52. return fse.copy(
  53. src,
  54. join(__dirname, '..', p),
  55. { overwrite: true }
  56. );
  57. }
  58. }));
  59. await fs.promises.unlink(extractedPath).catch(() => { });
  60. })();