download-previous-build.js 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. const { fetch } = require('undici');
  2. const tar = require('tar');
  3. const fs = require('fs');
  4. const fse = require('fs-extra');
  5. const { join, resolve } = require('path');
  6. const { tmpdir } = require('os');
  7. const { Readable } = require('stream');
  8. const { pipeline } = require('stream/promises');
  9. const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
  10. const { isCI } = require('ci-info');
  11. const { runner } = require('./lib/trace-runner');
  12. const fileExists = (path) => {
  13. return fs.promises.access(path, fs.constants.F_OK)
  14. .then(() => true)
  15. .catch(() => false);
  16. };
  17. const downloadPreviousBuild = async () => {
  18. const filesList = ['Clash', 'List'];
  19. let allFileExists = true;
  20. if (isCI) {
  21. allFileExists = false;
  22. } else {
  23. for await (const line of readFileByLine(resolve(__dirname, '../.gitignore'))) {
  24. if (
  25. (
  26. // line.startsWith('List/')
  27. line.startsWith('Modules/')
  28. ) && !line.endsWith('/')
  29. ) {
  30. allFileExists = await fileExists(join(__dirname, '..', line));
  31. filesList.push(line);
  32. if (!allFileExists) {
  33. console.log(`File not exists: ${line}`);
  34. break;
  35. }
  36. }
  37. }
  38. }
  39. if (allFileExists) {
  40. console.log('All files exists, skip download.');
  41. return;
  42. }
  43. console.log('Download previous build.');
  44. const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
  45. const [resp] = await Promise.all([
  46. fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages'),
  47. fse.ensureDir(extractedPath)
  48. ]);
  49. await pipeline(
  50. Readable.fromWeb(resp.body),
  51. tar.x({
  52. cwd: extractedPath,
  53. filter(p) {
  54. const dir = p.split('/')[1];
  55. return dir === 'List' || dir === 'Modules' || dir === 'Clash';
  56. }
  57. })
  58. );
  59. await Promise.all(filesList.map(async p => {
  60. const src = join(extractedPath, 'Surge-gh-pages', p);
  61. if (await fileExists(src)) {
  62. return fse.copy(
  63. src,
  64. join(__dirname, '..', p),
  65. { overwrite: true }
  66. );
  67. }
  68. }));
  69. await fs.promises.unlink(extractedPath).catch(() => { });
  70. };
  71. const downloadPublicSuffixList = async () => {
  72. const publicSuffixDir = resolve(__dirname, '../node_modules/.cache');
  73. const publicSuffixPath = join(publicSuffixDir, 'public_suffix-list_dat.txt');
  74. console.log('Download public suffix list.');
  75. const [resp] = await Promise.all([
  76. fetch('https://publicsuffix.org/list/public_suffix_list.dat'),
  77. fse.ensureDir(publicSuffixDir)
  78. ]);
  79. await pipeline(
  80. Readable.fromWeb(resp.body),
  81. fs.createWriteStream(publicSuffixPath)
  82. );
  83. };
  84. module.exports.downloadPreviousBuild = downloadPreviousBuild;
  85. module.exports.downloadPublicSuffixList = downloadPublicSuffixList;
  86. if (require.main === module) {
  87. runner(__filename, () => {
  88. return Promise.all([
  89. downloadPreviousBuild(),
  90. downloadPublicSuffixList()
  91. ]);
  92. });
  93. }