download-previous-build.js 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. const { fetch } = require('undici');
  2. const tar = require('tar');
  3. const fs = require('fs');
  4. const fse = require('fs-extra');
  5. const { join, resolve } = require('path');
  6. const { tmpdir } = require('os');
  7. const { Readable } = require('stream');
  8. const { pipeline } = require('stream/promises');
  9. const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
  10. const fileExists = (path) => {
  11. return fs.promises.access(path, fs.constants.F_OK)
  12. .then(() => true)
  13. .catch(() => false);
  14. };
  15. (async () => {
  16. const filesList = [];
  17. for await (const line of readFileByLine(resolve(__dirname, '../.gitignore'))) {
  18. if (line.startsWith('List/') && !line.endsWith('/')) {
  19. filesList.push(line);
  20. }
  21. }
  22. if (
  23. !((await Promise.all(
  24. filesList.map(p => fileExists(join(__dirname, '..', p)))
  25. )).some(exist => !exist))
  26. ) {
  27. console.log('All files exists, skip download.');
  28. return;
  29. }
  30. const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  31. await fse.ensureDir(extractedPath);
  32. const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
  33. await pipeline(
  34. Readable.fromWeb(resp.body),
  35. tar.x({
  36. cwd: extractedPath,
  37. filter(p) {
  38. return p.split('/')[1] === 'List';
  39. }
  40. })
  41. );
  42. await Promise.all(filesList.map(async p => {
  43. const src = join(extractedPath, 'Surge-gh-pages', p);
  44. if (await fileExists(src)) {
  45. return fse.copy(
  46. src,
  47. join(__dirname, '..', p),
  48. { overwrite: true }
  49. );
  50. }
  51. }));
  52. await fs.promises.unlink(extractedPath).catch(() => { });
  53. })();