Skip to content

Commit 8249475

Browse files
authored
[pyodide] move package preparation into a separate script (#1749)
The `pipe.js` script is used by Grist when running pyodide. It loads `packages.js` which has functions to help prepare pyodide packages, but at run-time only needs the ability to list them. So we separate that out into a separate utility. This helps avoid a desktop app packaging problem involving dependencies of `node-fetch`: gristlabs/grist-desktop#79 There are certainly other ways to resolve the packaging problem, but this is a particularly simple one.
1 parent b0ff75b commit 8249475

File tree

4 files changed

+82
-78
lines changed

4 files changed

+82
-78
lines changed

sandbox/pyodide/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ default:
1111
echo "setup # get pyodide node package, and python packages"
1212

1313
fetch_packages:
14-
node ./packages.js https://s3.amazonaws.com/grist-pynbox/pyodide/packages/v$(GRIST_PYODIDE_VERSION)/ _build/packages/
14+
node ./preparePackages.js https://s3.amazonaws.com/grist-pynbox/pyodide/packages/v$(GRIST_PYODIDE_VERSION)/ _build/packages/
1515

1616
build_packages:
1717
./build_packages.sh

sandbox/pyodide/build_packages.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,4 +29,4 @@ echo "###############################################################"
2929
echo "## Copy out python packages"
3030

3131
rm -rf _build/packages/
32-
node ./packages.js _build/pyodide/grist-packages/ _build/packages/
32+
node ./preparePackages.js _build/pyodide/grist-packages/ _build/packages/

sandbox/pyodide/packages.js

Lines changed: 0 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
const path = require('path');
22
const fs = require('fs');
3-
const fetch = require('node-fetch');
43

54
async function listLibs(src) {
65
const txt = fs.readFileSync(path.join(__dirname, '..', 'requirements.txt'), 'utf8');
@@ -42,78 +41,3 @@ async function listLibs(src) {
4241
};
4342
}
4443
exports.listLibs = listLibs;
45-
46-
async function findOnDisk(src, dest) {
47-
console.log(`Organizing packages on disk`, {src, dest});
48-
fs.mkdirSync(dest, {recursive: true});
49-
let libs = (await listLibs(src));
50-
for (const lib of libs.available) {
51-
fs.copyFileSync(lib.fullName, path.join(dest, lib.fileName));
52-
fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`),
53-
JSON.stringify({
54-
name: lib.name,
55-
version: lib.version,
56-
fileName: lib.fileName,
57-
}, null, 2));
58-
console.log("Copied", {
59-
content: path.join(dest, lib.fileName),
60-
meta: path.join(dest, `${lib.name}-${lib.version}.json`),
61-
});
62-
}
63-
libs = await listLibs(dest);
64-
fs.writeFileSync(path.join(__dirname, `package_filenames.json`),
65-
JSON.stringify(libs.available.map(lib => lib.fileName), null, 2));
66-
console.log(`Cached`, {libs: libs.available.map(lib => lib.name)});
67-
console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)});
68-
}
69-
70-
async function findOnNet(src, dest) {
71-
console.log(`Caching packages on disk`, {src, dest});
72-
fs.mkdirSync(dest, {recursive: true});
73-
let libs = await listLibs(dest);
74-
console.log(`Cached`, {libs: libs.available.map(lib => lib.name)});
75-
for (const lib of libs.misses) {
76-
console.log('Fetching', lib);
77-
const url = new URL(src);
78-
url.pathname = url.pathname + lib.name + '-' + lib.version + '.json';
79-
const result = await fetch(url.href);
80-
if (result.status === 200) {
81-
const data = await result.json();
82-
const url2 = new URL(src);
83-
url2.pathname = url2.pathname + data.fileName;
84-
const result2 = await fetch(url2.href);
85-
if (result2.status === 200) {
86-
fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`),
87-
JSON.stringify(data, null, 2));
88-
fs.writeFileSync(path.join(dest, data.fileName),
89-
await result2.buffer());
90-
} else {
91-
console.error("No payload available", {lib});
92-
}
93-
} else {
94-
console.error("No metadata available", {lib});
95-
}
96-
}
97-
libs = await listLibs(dest);
98-
console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)});
99-
}
100-
101-
async function main(src, dest) {
102-
if (!src) {
103-
console.error('please supply a source');
104-
process.exit(1);
105-
}
106-
if (!dest) {
107-
console.error('please supply a destination');
108-
process.exit(1);
109-
}
110-
if (src.startsWith('http:') || src.startsWith('https:')) {
111-
await findOnNet(src, dest);
112-
return;
113-
}
114-
await findOnDisk(src, dest);
115-
}
116-
117-
if (require.main === module) {
118-
main(...process.argv.slice(2)).catch(e => console.error(e));
119-
}

sandbox/pyodide/preparePackages.js

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
const fs = require('fs');
2+
const fetch = require('node-fetch');
3+
const path = require('path');
4+
5+
const {listLibs} = require('./packages');
6+
7+
async function findOnDisk(src, dest) {
8+
console.log(`Organizing packages on disk`, {src, dest});
9+
fs.mkdirSync(dest, {recursive: true});
10+
let libs = (await listLibs(src));
11+
for (const lib of libs.available) {
12+
fs.copyFileSync(lib.fullName, path.join(dest, lib.fileName));
13+
fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`),
14+
JSON.stringify({
15+
name: lib.name,
16+
version: lib.version,
17+
fileName: lib.fileName,
18+
}, null, 2));
19+
console.log("Copied", {
20+
content: path.join(dest, lib.fileName),
21+
meta: path.join(dest, `${lib.name}-${lib.version}.json`),
22+
});
23+
}
24+
libs = await listLibs(dest);
25+
fs.writeFileSync(path.join(__dirname, `package_filenames.json`),
26+
JSON.stringify(libs.available.map(lib => lib.fileName), null, 2));
27+
console.log(`Cached`, {libs: libs.available.map(lib => lib.name)});
28+
console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)});
29+
}
30+
31+
async function findOnNet(src, dest) {
32+
console.log(`Caching packages on disk`, {src, dest});
33+
fs.mkdirSync(dest, {recursive: true});
34+
let libs = await listLibs(dest);
35+
console.log(`Cached`, {libs: libs.available.map(lib => lib.name)});
36+
for (const lib of libs.misses) {
37+
console.log('Fetching', lib);
38+
const url = new URL(src);
39+
url.pathname = url.pathname + lib.name + '-' + lib.version + '.json';
40+
const result = await fetch(url.href);
41+
if (result.status === 200) {
42+
const data = await result.json();
43+
const url2 = new URL(src);
44+
url2.pathname = url2.pathname + data.fileName;
45+
const result2 = await fetch(url2.href);
46+
if (result2.status === 200) {
47+
fs.writeFileSync(path.join(dest, `${lib.name}-${lib.version}.json`),
48+
JSON.stringify(data, null, 2));
49+
fs.writeFileSync(path.join(dest, data.fileName),
50+
await result2.buffer());
51+
} else {
52+
console.error("No payload available", {lib});
53+
}
54+
} else {
55+
console.error("No metadata available", {lib});
56+
}
57+
}
58+
libs = await listLibs(dest);
59+
console.log(`Missing`, {libs: libs.misses.map(lib => lib.name)});
60+
}
61+
62+
async function main(src, dest) {
63+
if (!src) {
64+
console.error('please supply a source');
65+
process.exit(1);
66+
}
67+
if (!dest) {
68+
console.error('please supply a destination');
69+
process.exit(1);
70+
}
71+
if (src.startsWith('http:') || src.startsWith('https:')) {
72+
await findOnNet(src, dest);
73+
return;
74+
}
75+
await findOnDisk(src, dest);
76+
}
77+
78+
if (require.main === module) {
79+
main(...process.argv.slice(2)).catch(e => console.error(e));
80+
}

0 commit comments

Comments
 (0)