remove old build system and unused dependencies

some of these are for the ie11 legacy build, which has been
postponed. They will be brougth back when we bring back the legacy build
This commit is contained in:
Bruno Windels 2021-12-09 18:04:11 +01:00
parent d9ff4a8484
commit 5c5193ef48
8 changed files with 12 additions and 2823 deletions

View file

@ -25,35 +25,17 @@
}, },
"homepage": "https://github.com/vector-im/hydrogen-web/#readme", "homepage": "https://github.com/vector-im/hydrogen-web/#readme",
"devDependencies": { "devDependencies": {
"@babel/core": "^7.11.1",
"@babel/plugin-transform-runtime": "^7.15.8",
"@babel/preset-env": "^7.11.0",
"@babel/standalone": "^7.15.8",
"@rollup/plugin-babel": "^5.1.0",
"@rollup/plugin-commonjs": "^15.0.0",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-multi-entry": "^4.0.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"@rollup/pluginutils": "^4.1.1",
"@typescript-eslint/eslint-plugin": "^4.29.2", "@typescript-eslint/eslint-plugin": "^4.29.2",
"@typescript-eslint/parser": "^4.29.2", "@typescript-eslint/parser": "^4.29.2",
"@vitejs/plugin-legacy": "^1.6.1",
"autoprefixer": "^10.4.0",
"cheerio": "^1.0.0-rc.3",
"commander": "^6.0.0",
"core-js": "^3.6.5", "core-js": "^3.6.5",
"eslint": "^7.32.0", "eslint": "^7.32.0",
"fake-indexeddb": "^3.1.2", "fake-indexeddb": "^3.1.2",
"finalhandler": "^1.1.1",
"impunity": "^1.0.9", "impunity": "^1.0.9",
"mdn-polyfills": "^5.20.0", "mdn-polyfills": "^5.20.0",
"node-html-parser": "^4.0.0", "node-html-parser": "^4.0.0",
"postcss-css-variables": "^0.18.0", "postcss-css-variables": "^0.18.0",
"postcss-flexbugs-fixes": "^5.0.2", "postcss-flexbugs-fixes": "^5.0.2",
"regenerator-runtime": "^0.13.7", "regenerator-runtime": "^0.13.7",
"rollup": "^2.26.4",
"rollup-plugin-cleanup": "^3.1.1",
"serve-static": "^1.13.2",
"typescript": "^4.3.5", "typescript": "^4.3.5",
"vite": "^2.6.14", "vite": "^2.6.14",
"xxhashjs": "^0.2.2" "xxhashjs": "^0.2.2"

View file

@ -1,213 +0,0 @@
const path = require("path");
const {build} = require("vite");
const {babel, getBabelOutputPlugin} = require('@rollup/plugin-babel');
const {createFilter} = require("@rollup/pluginutils");
const { rollup } = require('rollup');
const { nodeResolve } = require('@rollup/plugin-node-resolve');
const commonjs = require('@rollup/plugin-commonjs');
const VIRTUAL_ENTRY = "hydrogen:legacy-entry";
const NODE_MODULES_NEEDING_TRANSPILATION = ["es6-promise"];
module.exports = function legacyBuild(entryModuleId, entryImportReplacements, chunkName, extraImports) {
let parentRoot;
let code;
let legacyBundleRef;
let legacyBundleFileName;
return {
name: "hydrogen:legacyBuild",
apply: "build",
configResolved: config => {
parentRoot = config.root;
},
async moduleParsed(info) {
if (info.id === entryModuleId) {
code = info.code;
}
},
async buildEnd() {
if (!code) {
throw new Error("couldnt find entry");
}
for (const [importSource, newImportSource] of Object.entries(entryImportReplacements)) {
code = replaceImport(this, code, importSource, newImportSource);
}
code = prependExtraImports(code, extraImports);
const bundleCode = await buildLegacyChunk(parentRoot, chunkName, code);
legacyBundleRef = this.emitFile({
type: "asset",
source: bundleCode,
name: `${chunkName}.js`
});
},
generateBundle() {
if (!legacyBundleRef) {
throw new Error("no bundle");
}
legacyBundleFileName = this.getFileName(legacyBundleRef);
},
transformIndexHtml: {
transform(html) {
if (!legacyBundleFileName) {
throw new Error("no bundle");
}
return [{
tag: "script",
attrs: {type: "text/javascript", nomodule: true, src: legacyBundleFileName},
injectTo: "head"
}];
},
},
}
}
/** we replace the imports ourselves instead of relying on rollup-alias or similar, because
* we only want to replace imports in the entry module, not anywhere in the import tree.
* This allows to create sub classes for the legacy build that can still import
* the non-legacy class as a base class, like LegacyPlatform does with Platform.*/
function replaceImport(pluginCtx, code, importSource, newImportSource) {
const ast = pluginCtx.parse(code);
for (const node of ast.body) {
if (node.type === "ImportDeclaration") {
const sourceNode = node.source;
if (sourceNode.value === importSource) {
code = code.substr(0, sourceNode.start) + JSON.stringify(newImportSource) + code.substr(sourceNode.end);
return code;
}
}
}
throw new Error(`Could not find import ${JSON.stringify(importSource)} to replace`);
}
function prependExtraImports(code, extraImports) {
return extraImports.map(i => `import ${JSON.stringify(i)};`).join("\n") + code;
}
async function buildLegacyChunk(root, chunkName, code) {
const projectRootDir = path.resolve(path.join(root, "../../.."));
const nodeModulesDir = path.join(projectRootDir, "node_modules");
const defaultFilter = createFilter([], [], {resolve: projectRootDir});
const transpiledModuleDirs = NODE_MODULES_NEEDING_TRANSPILATION.map(m => {
return path.join(nodeModulesDir, m);
});
const filterModule = id => {
if (!defaultFilter(id)) {
return false;
}
if (id.endsWith("?url") || id.endsWith("?raw")) {
// TODO is this needed
return true;
}
if (transpiledModuleDirs.some(d => id.startsWith(d))) {
return true;
}
if (id.startsWith(nodeModulesDir)) {
return false;
}
return true;
};
// compile down to whatever IE 11 needs
const babelPlugin = getBabelOutputPlugin({
babelrc: false,
compact: false,
extensions: [".js", ".ts"],
// babelHelpers: 'bundled',
presets: [
[
"@babel/preset-env",
{
modules: false,
useBuiltIns: "usage",
corejs: "3.4",
targets: "IE 11",
// we provide our own promise polyfill (es6-promise)
// with support for synchronous flushing of
// the queue for idb where needed
exclude: ["es.promise", "es.promise.all-settled", "es.promise.finally"]
}
]
]
});
const bundle = await build({
root,
configFile: false,
logLevel: 'error',
build: {
write: false,
minify: false,
target: "esnext",
assetsInlineLimit: 0,
polyfillModulePreload: false,
rollupOptions: {
external: id => !filterModule(id),
input: {
[chunkName]: VIRTUAL_ENTRY
},
output: {
format: "esm",
manualChunks: undefined
},
makeAbsoluteExternalsRelative: false,
},
},
plugins: [
memoryBabelInputPlugin(VIRTUAL_ENTRY, root, code),
babelPlugin
]
});
const assets = Array.isArray(bundle.output) ? bundle.output : [bundle.output];
const mainChunk = assets.find(a => a.name === chunkName);
const babelCode = mainChunk.code;
const bundle2 = await rollup({
plugins: [
memoryBabelInputPlugin(VIRTUAL_ENTRY, root, babelCode),
overridesAsRollupPlugin(new Map(
[["safe-buffer", "./scripts/package-overrides/safe-buffer/index.js"],
["buffer", "./scripts/package-overrides/buffer/index.js"]]), projectRootDir),
commonjs(),
nodeResolve(),
],
input: {
[chunkName]: VIRTUAL_ENTRY
}
});
const {output} = await bundle2.generate({
format: 'iife',
name: `hydrogen`
});
const bundledCode = output[0].code;
return bundledCode;
}
function memoryBabelInputPlugin(entryName, dir, code) {
return {
name: "hydrogen:resolve-legacy-entry",
resolveId(id, importer) {
if (id === entryName) {
return id;
} else if (importer === entryName && id.startsWith("./")) {
return this.resolve(path.join(dir, id));
}
},
load(id) {
if (id === entryName) {
return code;
}
},
}
}
function overridesAsRollupPlugin(mapping, basedir) {
return {
name: "rewrite-imports",
async resolveId (source, importer) {
const target = mapping.get(source);
if (target) {
const resolvedTarget = await this.resolve(path.join(basedir, target));
console.log("resolving", source, resolvedTarget);
return resolvedTarget;
}
}
};
}

View file

@ -1,578 +0,0 @@
/*
Copyright 2020 Bruno Windels <bruno@windels.cloud>
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import {build as snowpackBuild, loadConfiguration} from "snowpack"
import cheerio from "cheerio";
import fsRoot from "fs";
const fs = fsRoot.promises;
import path from "path";
import xxhash from 'xxhashjs';
import { rollup } from 'rollup';
import postcss from "postcss";
import postcssImport from "postcss-import";
import { fileURLToPath } from 'url';
import { dirname } from 'path';
import commander from "commander";
// needed for legacy bundle
import babel from '@rollup/plugin-babel';
// needed to find the polyfill modules in the main-legacy.js bundle
import { nodeResolve } from '@rollup/plugin-node-resolve';
// needed because some of the polyfills are written as commonjs modules
import commonjs from '@rollup/plugin-commonjs';
// multi-entry plugin so we can add polyfill file to main
import multi from '@rollup/plugin-multi-entry';
import removeJsComments from 'rollup-plugin-cleanup';
// replace urls of asset names with content hashed version
import postcssUrl from "postcss-url";
import cssvariables from "postcss-css-variables";
import autoprefixer from "autoprefixer";
import flexbugsFixes from "postcss-flexbugs-fixes";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const projectDir = path.join(__dirname, "../");
const snowpackOutPath = path.join(projectDir, "snowpack-build-output");
const cssSrcDir = path.join(projectDir, "src/platform/web/ui/css/");
const snowpackConfig = await loadConfiguration({buildOptions: {out: snowpackOutPath}}, "snowpack.config.js");
const snowpackOutDir = snowpackConfig.buildOptions.out.substring(projectDir.length);
const srcDir = path.join(projectDir, `${snowpackOutDir}/src/`);
const isPathInSrcDir = path => path.startsWith(srcDir);
const parameters = new commander.Command();
parameters
.option("--modern-only", "don't make a legacy build")
.option("--override-imports <json file>", "pass in a file to override import paths, see doc/SKINNING.md")
.option("--override-css <main css file>", "pass in an alternative main css file")
parameters.parse(process.argv);
/**
* We use Snowpack to handle the translation of TypeScript
* into JavaScript. We thus can't bundle files straight from
* the src directory, since some of them are TypeScript, and since
* they may import Node modules. We thus bundle files after they
* have been processed by Snowpack. This function returns paths
* to the files that have already been pre-processed in this manner.
*/
function srcPath(src) {
return path.join(snowpackOutDir, 'src', src);
}
async function build({modernOnly, overrideImports, overrideCss}) {
await snowpackBuild({config: snowpackConfig});
// get version number
const version = JSON.parse(await fs.readFile(path.join(projectDir, "package.json"), "utf8")).version;
let importOverridesMap;
if (overrideImports) {
importOverridesMap = await readImportOverrides(overrideImports);
}
const devHtml = await fs.readFile(path.join(snowpackOutPath, "index.html"), "utf8");
const doc = cheerio.load(devHtml);
const themes = [];
findThemes(doc, themeName => {
themes.push(themeName);
});
// clear target dir
const targetDir = path.join(projectDir, "target/");
await removeDirIfExists(targetDir);
await createDirs(targetDir, themes);
const assets = new AssetMap(targetDir);
// copy olm assets
const olmAssets = await copyFolder(path.join(projectDir, "lib/olm/"), assets.directory);
assets.addSubMap(olmAssets);
await assets.write(`hydrogen.js`, await buildJs(srcPath("main.js"), [srcPath("platform/web/Platform.js")], importOverridesMap));
if (!modernOnly) {
await assets.write(`hydrogen-legacy.js`, await buildJsLegacy(srcPath("main.js"), [
srcPath('platform/web/legacy-polyfill.js'),
srcPath('platform/web/LegacyPlatform.js')
], importOverridesMap));
await assets.write(`worker.js`, await buildJsLegacy(srcPath("platform/web/worker/main.js"), [srcPath('platform/web/worker/polyfill.js')]));
}
// copy over non-theme assets
const baseConfig = JSON.parse(await fs.readFile(path.join(projectDir, "assets/config.json"), {encoding: "utf8"}));
const downloadSandbox = "download-sandbox.html";
let downloadSandboxHtml = await fs.readFile(path.join(projectDir, `assets/${downloadSandbox}`));
await assets.write(downloadSandbox, downloadSandboxHtml);
// creates the directories where the theme css bundles are placed in,
// and writes to assets, so the build bundles can translate them, so do it first
await copyThemeAssets(themes, assets);
await buildCssBundles(buildCssLegacy, themes, assets, overrideCss);
await buildManifest(assets);
// all assets have been added, create a hash from all assets name to cache unhashed files like index.html
assets.addToHashForAll("index.html", devHtml);
let swSource = await fs.readFile(path.join(snowpackOutPath, "sw.js"), "utf8");
assets.addToHashForAll("sw.js", swSource);
const globalHash = assets.hashForAll();
await buildServiceWorker(swSource, version, globalHash, assets);
await buildHtml(doc, version, baseConfig, globalHash, modernOnly, assets);
await removeDirIfExists(snowpackOutPath);
console.log(`built hydrogen ${version} (${globalHash}) successfully with ${assets.size} files`);
}
async function findThemes(doc, callback) {
doc("link[rel~=stylesheet][title]").each((i, el) => {
const theme = doc(el);
const href = theme.attr("href");
const themesPrefix = "/themes/";
const prefixIdx = href.indexOf(themesPrefix);
if (prefixIdx !== -1) {
const themeNameStart = prefixIdx + themesPrefix.length;
const themeNameEnd = href.indexOf("/", themeNameStart);
const themeName = href.substr(themeNameStart, themeNameEnd - themeNameStart);
callback(themeName, theme);
}
});
}
async function createDirs(targetDir, themes) {
await fs.mkdir(targetDir);
const themeDir = path.join(targetDir, "themes");
await fs.mkdir(themeDir);
for (const theme of themes) {
await fs.mkdir(path.join(themeDir, theme));
}
}
async function copyThemeAssets(themes, assets) {
for (const theme of themes) {
const themeDstFolder = path.join(assets.directory, `themes/${theme}`);
const themeSrcFolder = path.join(cssSrcDir, `themes/${theme}`);
const themeAssets = await copyFolder(themeSrcFolder, themeDstFolder, file => {
return !file.endsWith(".css");
});
assets.addSubMap(themeAssets);
}
return assets;
}
async function buildHtml(doc, version, baseConfig, globalHash, modernOnly, assets) {
// transform html file
// change path to main.css to css bundle
doc("link[rel=stylesheet]:not([title])").attr("href", assets.resolve(`hydrogen.css`));
// adjust file name of icon on iOS
doc("link[rel=apple-touch-icon]").attr("href", assets.resolve(`icon-maskable.png`));
// change paths to all theme stylesheets
findThemes(doc, (themeName, theme) => {
theme.attr("href", assets.resolve(`themes/${themeName}/bundle.css`));
});
const configJSON = JSON.stringify(Object.assign({}, baseConfig, {
worker: assets.has("worker.js") ? assets.resolve(`worker.js`) : null,
downloadSandbox: assets.resolve("download-sandbox.html"),
serviceWorker: "sw.js",
olm: {
wasm: assets.resolve("olm.wasm"),
legacyBundle: assets.resolve("olm_legacy.js"),
wasmBundle: assets.resolve("olm.js"),
}
}));
const modernScript = `import {main, Platform} from "./${assets.resolve(`hydrogen.js`)}"; main(new Platform(document.body, ${configJSON}));`;
const mainScripts = [
`<script type="module">${wrapWithLicenseComments(modernScript)}</script>`
];
if (!modernOnly) {
const legacyScript = `hydrogen.main(new hydrogen.Platform(document.body, ${configJSON}));`;
mainScripts.push(
`<script type="text/javascript" nomodule src="${assets.resolve(`hydrogen-legacy.js`)}"></script>`,
`<script type="text/javascript" nomodule>${wrapWithLicenseComments(legacyScript)}</script>`
);
}
doc("script#main").replaceWith(mainScripts.join(""));
const versionScript = doc("script#version");
versionScript.attr("type", "text/javascript");
let vSource = versionScript.contents().text();
vSource = vSource.replace(`"%%VERSION%%"`, `"${version}"`);
vSource = vSource.replace(`"%%GLOBAL_HASH%%"`, `"${globalHash}"`);
versionScript.text(wrapWithLicenseComments(vSource));
doc("head").append(`<link rel="manifest" href="${assets.resolve("manifest.json")}">`);
await assets.writeUnhashed("index.html", doc.html());
}
async function buildJs(mainFile, extraFiles, importOverrides) {
// create js bundle
const plugins = [multi(), removeJsComments({comments: "none"})];
if (importOverrides) {
plugins.push(overridesAsRollupPlugin(importOverrides));
}
const bundle = await rollup({
// for fake-indexeddb, so usage for tests only doesn't put it in bundle
treeshake: {moduleSideEffects: isPathInSrcDir},
input: extraFiles.concat(mainFile),
plugins
});
const {output} = await bundle.generate({
format: 'es',
// TODO: can remove this?
name: `hydrogen`
});
const code = output[0].code;
return wrapWithLicenseComments(code);
}
async function buildJsLegacy(mainFile, extraFiles, importOverrides) {
// compile down to whatever IE 11 needs
const babelPlugin = babel.babel({
babelHelpers: 'bundled',
exclude: 'node_modules/**',
presets: [
[
"@babel/preset-env",
{
useBuiltIns: "entry",
corejs: "3.4",
targets: "IE 11",
// we provide our own promise polyfill (es6-promise)
// with support for synchronous flushing of
// the queue for idb where needed
exclude: ["es.promise", "es.promise.all-settled", "es.promise.finally"]
}
]
]
});
const plugins = [multi(), commonjs()];
if (importOverrides) {
plugins.push(overridesAsRollupPlugin(importOverrides));
}
plugins.push(nodeResolve(), babelPlugin);
// create js bundle
const rollupConfig = {
// for fake-indexeddb, so usage for tests only doesn't put it in bundle
treeshake: {moduleSideEffects: isPathInSrcDir},
// important the extraFiles come first,
// so polyfills are available in the global scope
// if needed for the mainfile
input: extraFiles.concat(mainFile),
plugins
};
const bundle = await rollup(rollupConfig);
const {output} = await bundle.generate({
format: 'iife',
name: `hydrogen`
});
const code = output[0].code;
return wrapWithLicenseComments(code);
}
function wrapWithLicenseComments(code) {
// Add proper license comments to make GNU LibreJS accept the file
const start = '// @license magnet:?xt=urn:btih:8e4f440f4c65981c5bf93c76d35135ba5064d8b7&dn=apache-2.0.txt Apache-2.0';
const end = '// @license-end';
return `${start}\n${code}\n${end}`;
}
const NON_PRECACHED_JS = [
"hydrogen-legacy.js",
"olm_legacy.js",
"worker.js"
];
function isPreCached(asset) {
return asset.endsWith(".svg") ||
asset.endsWith(".png") ||
asset.endsWith(".css") ||
asset.endsWith(".wasm") ||
asset.endsWith(".html") ||
// most environments don't need the worker
asset.endsWith(".js") && !NON_PRECACHED_JS.includes(asset);
}
async function buildManifest(assets) {
const webManifest = JSON.parse(await fs.readFile(path.join(projectDir, "assets/manifest.json"), "utf8"));
// copy manifest icons
for (const icon of webManifest.icons) {
let iconData = await fs.readFile(path.join(projectDir, icon.src));
const iconTargetPath = path.basename(icon.src);
icon.src = await assets.write(iconTargetPath, iconData);
}
await assets.write("manifest.json", JSON.stringify(webManifest));
}
async function buildServiceWorker(swSource, version, globalHash, assets) {
const unhashedPreCachedAssets = ["index.html"];
const hashedPreCachedAssets = [];
const hashedCachedOnRequestAssets = [];
for (const [unresolved, resolved] of assets) {
if (unresolved === resolved) {
unhashedPreCachedAssets.push(resolved);
} else if (isPreCached(unresolved)) {
hashedPreCachedAssets.push(resolved);
} else {
hashedCachedOnRequestAssets.push(resolved);
}
}
const replaceArrayInSource = (name, value) => {
const newSource = swSource.replace(`${name} = []`, `${name} = ${JSON.stringify(value)}`);
if (newSource === swSource) {
throw new Error(`${name} was not found in the service worker source`);
}
return newSource;
};
const replaceStringInSource = (name, value) => {
const newSource = swSource.replace(new RegExp(`${name}\\s=\\s"[^"]*"`), `${name} = ${JSON.stringify(value)}`);
if (newSource === swSource) {
throw new Error(`${name} was not found in the service worker source`);
}
return newSource;
};
// write service worker
swSource = swSource.replace(`"%%VERSION%%"`, `"${version}"`);
swSource = swSource.replace(`"%%GLOBAL_HASH%%"`, `"${globalHash}"`);
swSource = replaceArrayInSource("UNHASHED_PRECACHED_ASSETS", unhashedPreCachedAssets);
swSource = replaceArrayInSource("HASHED_PRECACHED_ASSETS", hashedPreCachedAssets);
swSource = replaceArrayInSource("HASHED_CACHED_ON_REQUEST_ASSETS", hashedCachedOnRequestAssets);
swSource = replaceStringInSource("NOTIFICATION_BADGE_ICON", assets.resolve("icon.png"));
// service worker should not have a hashed name as it is polled by the browser for updates
await assets.writeUnhashed("sw.js", swSource);
}
async function buildCssBundles(buildFn, themes, assets, mainCssFile = null) {
if (!mainCssFile) {
mainCssFile = path.join(cssSrcDir, "main.css");
}
const bundleCss = await buildFn(mainCssFile);
await assets.write(`hydrogen.css`, bundleCss);
for (const theme of themes) {
const themeRelPath = `themes/${theme}/`;
const themeRoot = path.join(cssSrcDir, themeRelPath);
const assetUrlMapper = ({absolutePath}) => {
if (!absolutePath.startsWith(themeRoot)) {
throw new Error("resource is out of theme directory: " + absolutePath);
}
const relPath = absolutePath.substr(themeRoot.length);
const hashedDstPath = assets.resolve(path.join(themeRelPath, relPath));
if (hashedDstPath) {
return hashedDstPath.substr(themeRelPath.length);
}
};
const themeCss = await buildFn(path.join(themeRoot, `theme.css`), assetUrlMapper);
await assets.write(path.join(themeRelPath, `bundle.css`), themeCss);
}
}
// async function buildCss(entryPath, urlMapper = null) {
// const preCss = await fs.readFile(entryPath, "utf8");
// const options = [postcssImport];
// if (urlMapper) {
// options.push(postcssUrl({url: urlMapper}));
// }
// const cssBundler = postcss(options);
// const result = await cssBundler.process(preCss, {from: entryPath});
// return result.css;
// }
async function buildCssLegacy(entryPath, urlMapper = null) {
const preCss = await fs.readFile(entryPath, "utf8");
const options = [
postcssImport,
cssvariables({
preserve: (declaration) => {
return declaration.value.indexOf("var(--ios-") == 0;
}
}),
autoprefixer({overrideBrowserslist: ["IE 11"], grid: "no-autoplace"}),
flexbugsFixes()
];
if (urlMapper) {
options.push(postcssUrl({url: urlMapper}));
}
const cssBundler = postcss(options);
const result = await cssBundler.process(preCss, {from: entryPath});
return result.css;
}
async function removeDirIfExists(targetDir) {
try {
await fs.rmdir(targetDir, {recursive: true});
} catch (err) {
if (err.code !== "ENOENT") {
throw err;
}
}
}
async function copyFolder(srcRoot, dstRoot, filter, assets = null) {
assets = assets || new AssetMap(dstRoot);
const dirEnts = await fs.readdir(srcRoot, {withFileTypes: true});
for (const dirEnt of dirEnts) {
const dstPath = path.join(dstRoot, dirEnt.name);
const srcPath = path.join(srcRoot, dirEnt.name);
if (dirEnt.isDirectory()) {
await fs.mkdir(dstPath);
await copyFolder(srcPath, dstPath, filter, assets);
} else if ((dirEnt.isFile() || dirEnt.isSymbolicLink()) && (!filter || filter(srcPath))) {
const content = await fs.readFile(srcPath);
await assets.write(dstPath, content);
}
}
return assets;
}
function contentHash(str) {
var hasher = new xxhash.h32(0);
hasher.update(str);
return hasher.digest();
}
class AssetMap {
constructor(targetDir) {
// remove last / if any, so substr in create works well
this._targetDir = path.resolve(targetDir);
this._assets = new Map();
// hashes for unhashed resources so changes in these resources also contribute to the hashForAll
this._unhashedHashes = [];
}
_toRelPath(resourcePath) {
let relPath = resourcePath;
if (path.isAbsolute(resourcePath)) {
if (!resourcePath.startsWith(this._targetDir)) {
throw new Error(`absolute path ${resourcePath} that is not within target dir ${this._targetDir}`);
}
relPath = resourcePath.substr(this._targetDir.length + 1); // + 1 for the /
}
return relPath;
}
_create(resourcePath, content) {
const relPath = this._toRelPath(resourcePath);
const hash = contentHash(Buffer.from(content));
const dir = path.dirname(relPath);
const extname = path.extname(relPath);
const basename = path.basename(relPath, extname);
const dstRelPath = path.join(dir, `${basename}-${hash}${extname}`);
this._assets.set(relPath, dstRelPath);
return dstRelPath;
}
async write(resourcePath, content) {
const relPath = this._create(resourcePath, content);
const fullPath = path.join(this.directory, relPath);
if (typeof content === "string") {
await fs.writeFile(fullPath, content, "utf8");
} else {
await fs.writeFile(fullPath, content);
}
return relPath;
}
async writeUnhashed(resourcePath, content) {
const relPath = this._toRelPath(resourcePath);
this._assets.set(relPath, relPath);
const fullPath = path.join(this.directory, relPath);
if (typeof content === "string") {
await fs.writeFile(fullPath, content, "utf8");
} else {
await fs.writeFile(fullPath, content);
}
return relPath;
}
get directory() {
return this._targetDir;
}
resolve(resourcePath) {
const relPath = this._toRelPath(resourcePath);
const result = this._assets.get(relPath);
if (!result) {
throw new Error(`unknown path: ${relPath}, only know ${Array.from(this._assets.keys()).join(", ")}`);
}
return result;
}
addSubMap(assetMap) {
if (!assetMap.directory.startsWith(this.directory)) {
throw new Error(`map directory doesn't start with this directory: ${assetMap.directory} ${this.directory}`);
}
const relSubRoot = assetMap.directory.substr(this.directory.length + 1);
for (const [key, value] of assetMap._assets.entries()) {
this._assets.set(path.join(relSubRoot, key), path.join(relSubRoot, value));
}
}
[Symbol.iterator]() {
return this._assets.entries();
}
isUnhashed(relPath) {
const resolvedPath = this._assets.get(relPath);
if (!resolvedPath) {
throw new Error("Unknown asset: " + relPath);
}
return relPath === resolvedPath;
}
get size() {
return this._assets.size;
}
has(relPath) {
return this._assets.has(relPath);
}
hashForAll() {
const globalHashAssets = Array.from(this).map(([, resolved]) => resolved);
globalHashAssets.push(...this._unhashedHashes);
globalHashAssets.sort();
return contentHash(globalHashAssets.join(","));
}
addToHashForAll(resourcePath, content) {
this._unhashedHashes.push(`${resourcePath}-${contentHash(Buffer.from(content))}`);
}
}
async function readImportOverrides(filename) {
const json = await fs.readFile(filename, "utf8");
const mapping = new Map(Object.entries(JSON.parse(json)));
return {
basedir: path.dirname(path.resolve(filename))+path.sep,
mapping
};
}
function overridesAsRollupPlugin(importOverrides) {
const {mapping, basedir} = importOverrides;
return {
name: "rewrite-imports",
resolveId (source, importer) {
let file;
if (source.startsWith(path.sep)) {
file = source;
} else {
file = path.join(path.dirname(importer), source);
}
if (file.startsWith(basedir)) {
const searchPath = file.substr(basedir.length);
const replacingPath = mapping.get(searchPath);
if (replacingPath) {
console.info(`replacing ${searchPath} with ${replacingPath}`);
return path.join(basedir, replacingPath);
}
}
return null;
}
};
}
build(parameters).catch(err => console.error(err));

View file

@ -1,12 +0,0 @@
import fsRoot from "fs";
const fs = fsRoot.promises;
export async function removeDirIfExists(targetDir) {
try {
await fs.rmdir(targetDir, {recursive: true});
} catch (err) {
if (err.code !== "ENOENT") {
throw err;
}
}
}

View file

@ -1,6 +0,0 @@
git checkout gh-pages
cp -R target/* .
git add $(find . -maxdepth 1 -type f)
git add themes
git commit -m "update hydrogen"
git checkout master

View file

@ -1,43 +0,0 @@
/*
Copyright 2020 Bruno Windels <bruno@windels.cloud>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const finalhandler = require('finalhandler')
const http = require('http')
const serveStatic = require('serve-static')
const path = require('path');
// Serve up parent directory with cache disabled
const serve = serveStatic(
path.resolve(__dirname, "../"),
{
etag: false,
setHeaders: res => {
res.setHeader("Pragma", "no-cache");
res.setHeader("Cache-Control", "no-cache, no-store, must-revalidate");
res.setHeader("Expires", "Wed, 21 Oct 2015 07:28:00 GMT");
},
index: ['index.html', 'index.htm']
}
);
// Create server
const server = http.createServer(function onRequest (req, res) {
console.log(req.method, req.url);
serve(req, res, finalhandler(req, res))
});
// Listen
server.listen(3000);

View file

@ -1,37 +0,0 @@
// Snowpack Configuration File
// See all supported options: https://www.snowpack.dev/reference/configuration
/** @type {import("snowpack").SnowpackUserConfig } */
module.exports = {
mount: {
// More specific paths before less specific paths (if they overlap)
"src/platform/web/docroot": "/",
"src": "/src",
"lib": {url: "/lib", static: true },
"assets": "/assets",
/* ... */
},
exclude: [
/* Avoid scanning scripts which use dev-dependencies and pull in babel, rollup, etc. */
'**/node_modules/**/*',
'**/scripts/**',
'**/target/**',
'**/prototypes/**',
'**/src/platform/web/legacy-polyfill.js',
'**/src/platform/web/worker/polyfill.js'
],
plugins: [
/* ... */
],
packageOptions: {
/* ... */
},
devOptions: {
open: "none",
hmr: false,
/* ... */
},
buildOptions: {
/* ... */
},
};

1928
yarn.lock

File diff suppressed because it is too large Load diff