download-previous-build.js 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. const { fetch } = require('undici');
  2. const tar = require('tar');
  3. const fs = require('fs');
  4. const fse = require('fs-extra');
  5. const { join, resolve } = require('path');
  6. const { tmpdir } = require('os');
  7. const { Stream, Readable } = require('stream');
  8. const { promisify } = require('util');
  9. const pipeline = promisify(Stream.pipeline);
  10. const fileExists = (path) => {
  11. return fs.promises.access(path, fs.constants.F_OK)
  12. .then(() => true)
  13. .catch(() => false);
  14. };
  15. (async () => {
  16. const filesList = (
  17. await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' })
  18. )
  19. .split('\n')
  20. .filter(p => p.startsWith('List/'));
  21. if (
  22. (await Promise.all(
  23. filesList.map(p => fileExists(join(__dirname, '..', p)))
  24. )).some(exist => !exist)
  25. ) {
  26. const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
  27. const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
  28. const readableNodeStream = Readable.fromWeb(resp.body);
  29. await pipeline(
  30. readableNodeStream,
  31. fs.createWriteStream(tempFile)
  32. );
  33. const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  34. await fse.ensureDir(extractedPath);
  35. await tar.x({
  36. file: tempFile,
  37. cwd: extractedPath,
  38. filter: (p) => {
  39. return p.split('/')[1] === 'List'
  40. }
  41. });
  42. await Promise.all(filesList.map(p => fse.copy(
  43. join(extractedPath, 'Surge-gh-pages', p),
  44. join(__dirname, '..', p),
  45. {
  46. overwrite: true
  47. }
  48. )))
  49. await fs.promises.unlink(tempFile).catch(() => { });
  50. await fs.promises.unlink(extractedPath).catch(() => { });
  51. } else {
  52. console.log('All files exists, skip download.');
  53. }
  54. })();