|
1 | 1 | const path = require('path');
|
2 | 2 | const fs = require('fs');
|
3 |
| -const fetch = require('node-fetch'); |
4 | 3 |
|
5 | 4 | async function listLibs(src) {
|
6 | 5 | const txt = fs.readFileSync(path.join(__dirname, '..', 'requirements.txt'), 'utf8');
|
@@ -42,78 +41,3 @@ async function listLibs(src) {
|
42 | 41 | };
|
43 | 42 | }
|
44 | 43 | exports.listLibs = listLibs;
|
45 |
| - |
46 |
| -async function findOnDisk(src, dest) { |
47 |
| - console.log(`Organizing packages on disk`, {src, dest}); |
48 |
| - fs.mkdirSync(dest, {recursive: true}); |
49 |
| - let libs = (await listLibs(src)); |
50 |
| - for (const lib of libs.available) { |
51 |
| - fs.copyFileSync(lib.fullName, path.join(dest, lib.fileName)); |
52 |
| - fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`), |
53 |
| - JSON.stringify({ |
54 |
| - name: lib.name, |
55 |
| - version: lib.version, |
56 |
| - fileName: lib.fileName, |
57 |
| - }, null, 2)); |
58 |
| - console.log("Copied", { |
59 |
| - content: path.join(dest, lib.fileName), |
60 |
| - meta: path.join(dest, `${lib.name}-${lib.version}.json`), |
61 |
| - }); |
62 |
| - } |
63 |
| - libs = await listLibs(dest); |
64 |
| - fs.writeFileSync(path.join(__dirname, `package_filenames.json`), |
65 |
| - JSON.stringify(libs.available.map(lib => lib.fileName), null, 2)); |
66 |
| - console.log(`Cached`, {libs: libs.available.map(lib => lib.name)}); |
67 |
| - console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)}); |
68 |
| -} |
69 |
| - |
70 |
| -async function findOnNet(src, dest) { |
71 |
| - console.log(`Caching packages on disk`, {src, dest}); |
72 |
| - fs.mkdirSync(dest, {recursive: true}); |
73 |
| - let libs = await listLibs(dest); |
74 |
| - console.log(`Cached`, {libs: libs.available.map(lib => lib.name)}); |
75 |
| - for (const lib of libs.misses) { |
76 |
| - console.log('Fetching', lib); |
77 |
| - const url = new URL(src); |
78 |
| - url.pathname = url.pathname + lib.name + '-' + lib.version + '.json'; |
79 |
| - const result = await fetch(url.href); |
80 |
| - if (result.status === 200) { |
81 |
| - const data = await result.json(); |
82 |
| - const url2 = new URL(src); |
83 |
| - url2.pathname = url2.pathname + data.fileName; |
84 |
| - const result2 = await fetch(url2.href); |
85 |
| - if (result2.status === 200) { |
86 |
| - fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`), |
87 |
| - JSON.stringify(data, null, 2)); |
88 |
| - fs.writeFileSync(path.join(dest, data.fileName), |
89 |
| - await result2.buffer()); |
90 |
| - } else { |
91 |
| - console.error("No payload available", {lib}); |
92 |
| - } |
93 |
| - } else { |
94 |
| - console.error("No metadata available", {lib}); |
95 |
| - } |
96 |
| - } |
97 |
| - libs = await listLibs(dest); |
98 |
| - console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)}); |
99 |
| -} |
100 |
| - |
101 |
| -async function main(src, dest) { |
102 |
| - if (!src) { |
103 |
| - console.error('please supply a source'); |
104 |
| - process.exit(1); |
105 |
| - } |
106 |
| - if (!dest) { |
107 |
| - console.error('please supply a destination'); |
108 |
| - process.exit(1); |
109 |
| - } |
110 |
| - if (src.startsWith('http:') || src.startsWith('https:')) { |
111 |
| - await findOnNet(src, dest); |
112 |
| - return; |
113 |
| - } |
114 |
| - await findOnDisk(src, dest); |
115 |
| -} |
116 |
| - |
117 |
| -if (require.main === module) { |
118 |
| - main(...process.argv.slice(2)).catch(e => console.error(e)); |
119 |
| -} |
0 commit comments