download-previous-build.js 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. const { fetch } = require('undici');
  2. const tar = require('tar');
  3. const fs = require('fs');
  4. const fse = require('fs-extra');
  5. const { join, resolve } = require('path');
  6. const { tmpdir } = require('os');
  7. const { Stream, Readable } = require('stream');
  8. const { promisify } = require('util');
  9. const pipeline = promisify(Stream.pipeline);
  10. (async () => {
  11. const filesList = (
  12. await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' })
  13. )
  14. .split('\n')
  15. .filter(p => p.startsWith('List/'));
  16. const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
  17. const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
  18. const readableNodeStream = Readable.fromWeb(resp.body);
  19. await pipeline(
  20. readableNodeStream,
  21. fs.createWriteStream(tempFile)
  22. );
  23. const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  24. await fse.ensureDir(extractedPath);
  25. await tar.x({
  26. file: tempFile,
  27. cwd: extractedPath,
  28. filter: (p) => {
  29. return p.split('/')[1] === 'List'
  30. }
  31. });
  32. await Promise.all(filesList.map(p => fse.copy(
  33. join(extractedPath, 'Surge-gh-pages', p),
  34. join(__dirname, '..', p),
  35. {
  36. overwrite: true
  37. }
  38. )))
  39. await fs.promises.unlink(tempFile).catch(() => {});
  40. await fs.promises.unlink(extractedPath).catch(() => {});
  41. })();