Compare commits
No commits in common. "master" and "bwindels/dontcrashoninvalidimg" have entirely different histories.
master
...
bwindels/d
605 changed files with 10172 additions and 53648 deletions
|
@ -1,2 +0,0 @@
|
||||||
node_modules
|
|
||||||
target
|
|
15
.eslintrc.js
15
.eslintrc.js
|
@ -5,21 +5,10 @@ module.exports = {
|
||||||
},
|
},
|
||||||
"extends": "eslint:recommended",
|
"extends": "eslint:recommended",
|
||||||
"parserOptions": {
|
"parserOptions": {
|
||||||
"ecmaVersion": 2020,
|
"ecmaVersion": 2018,
|
||||||
"sourceType": "module"
|
"sourceType": "module"
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"no-console": "off",
|
"no-console": "off"
|
||||||
"no-empty": "off",
|
|
||||||
"no-prototype-builtins": "off",
|
|
||||||
"no-unused-vars": "warn"
|
|
||||||
},
|
|
||||||
"globals": {
|
|
||||||
"DEFINE_VERSION": "readonly",
|
|
||||||
"DEFINE_GLOBAL_HASH": "readonly",
|
|
||||||
// only available in sw.js
|
|
||||||
"DEFINE_UNHASHED_PRECACHED_ASSETS": "readonly",
|
|
||||||
"DEFINE_HASHED_PRECACHED_ASSETS": "readonly",
|
|
||||||
"DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS": "readonly"
|
|
||||||
}
|
}
|
||||||
};
|
};
|
47
.github/workflows/codechecks.js.yml
vendored
47
.github/workflows/codechecks.js.yml
vendored
|
@ -1,47 +0,0 @@
|
||||||
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
|
|
||||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
|
||||||
|
|
||||||
# yarn cache setup from https://www.karltarvas.com/2020/12/09/github-actions-cache-yarn-install.html
|
|
||||||
|
|
||||||
name: Code checks
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ master ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [14.x]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout source
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Install tools
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: ${{ matrix.node-version }}
|
|
||||||
# See: https://github.com/actions/cache/blob/main/examples.md#node---yarn
|
|
||||||
- name: Get Yarn cache directory
|
|
||||||
id: yarn-cache-dir-path
|
|
||||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
|
||||||
- name: Use Yarn cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
id: yarn-cache
|
|
||||||
with:
|
|
||||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-yarn-${{ matrix.node-version }}-${{ hashFiles('**/yarn.lock') }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn install --prefer-offline --frozen-lockfile
|
|
||||||
- name: Unit tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Lint
|
|
||||||
run: yarn run lint-ci
|
|
||||||
- name: Typescript
|
|
||||||
run: yarn run tsc
|
|
44
.github/workflows/docker-publish.yml
vendored
44
.github/workflows/docker-publish.yml
vendored
|
@ -1,44 +0,0 @@
|
||||||
name: Container Image
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
tags: [ 'v*' ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ master ]
|
|
||||||
|
|
||||||
env:
|
|
||||||
IMAGE_NAME: ${{ github.repository }}
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
push:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Log into registry ${{ env.REGISTRY }}
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v3
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -1,13 +1,7 @@
|
||||||
*.sublime-project
|
*.sublime-project
|
||||||
*.sublime-workspace
|
*.sublime-workspace
|
||||||
.DS_Store
|
|
||||||
node_modules
|
node_modules
|
||||||
fetchlogs
|
fetchlogs
|
||||||
sessionexports
|
sessionexports
|
||||||
bundle.js
|
bundle.js
|
||||||
target
|
target
|
||||||
lib
|
|
||||||
*.tar.gz
|
|
||||||
.eslintcache
|
|
||||||
.tmp
|
|
||||||
tmp/
|
|
||||||
|
|
|
@ -1,61 +0,0 @@
|
||||||
image: docker.io/alpine
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- test
|
|
||||||
- build
|
|
||||||
|
|
||||||
.yarn-template:
|
|
||||||
image: docker.io/node
|
|
||||||
before_script:
|
|
||||||
- yarn install
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- node_modules
|
|
||||||
test:
|
|
||||||
extends: .yarn-template
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- yarn test
|
|
||||||
|
|
||||||
build:
|
|
||||||
extends: .yarn-template
|
|
||||||
stage: build
|
|
||||||
script:
|
|
||||||
- yarn build
|
|
||||||
artifacts:
|
|
||||||
paths:
|
|
||||||
- target
|
|
||||||
|
|
||||||
.docker-template:
|
|
||||||
image: docker.io/docker
|
|
||||||
stage: build
|
|
||||||
services:
|
|
||||||
- docker:dind
|
|
||||||
before_script:
|
|
||||||
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
|
|
||||||
|
|
||||||
docker-release:
|
|
||||||
extends: .docker-template
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/'
|
|
||||||
script:
|
|
||||||
- docker build --pull -t "${CI_REGISTRY_IMAGE}:latest" -t "${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG}" .
|
|
||||||
- docker push "${CI_REGISTRY_IMAGE}:latest"
|
|
||||||
- docker push "${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG}"
|
|
||||||
|
|
||||||
docker-tags:
|
|
||||||
extends: .docker-template
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_TAG && $CI_COMMIT_TAG !~ /^v\d+\.\d+\.\d+$/'
|
|
||||||
script:
|
|
||||||
- docker build --pull -t "${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG}" .
|
|
||||||
- docker push "${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG}"
|
|
||||||
|
|
||||||
docker-branches:
|
|
||||||
extends: .docker-template
|
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_BRANCH
|
|
||||||
script:
|
|
||||||
- docker build --pull -t "${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG}" .
|
|
||||||
- docker push "${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG}"
|
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
root: true,
|
|
||||||
env: {
|
|
||||||
"browser": true,
|
|
||||||
"es6": true
|
|
||||||
},
|
|
||||||
extends: [
|
|
||||||
// "plugin:@typescript-eslint/recommended",
|
|
||||||
// "plugin:@typescript-eslint/recommended-requiring-type-checking",
|
|
||||||
],
|
|
||||||
parser: '@typescript-eslint/parser',
|
|
||||||
parserOptions: {
|
|
||||||
"ecmaVersion": 2020,
|
|
||||||
"sourceType": "module",
|
|
||||||
"project": "./tsconfig.json"
|
|
||||||
},
|
|
||||||
plugins: [
|
|
||||||
'@typescript-eslint',
|
|
||||||
],
|
|
||||||
rules: {
|
|
||||||
"@typescript-eslint/no-floating-promises": 2,
|
|
||||||
"@typescript-eslint/no-misused-promises": 2,
|
|
||||||
"semi": ["error", "always"]
|
|
||||||
}
|
|
||||||
};
|
|
|
@ -1,18 +0,0 @@
|
||||||
pipeline:
|
|
||||||
buildfrontend:
|
|
||||||
image: node:16
|
|
||||||
commands:
|
|
||||||
- yarn install --prefer-offline --frozen-lockfile
|
|
||||||
- yarn test
|
|
||||||
- yarn run lint-ci
|
|
||||||
- yarn run tsc
|
|
||||||
- yarn build
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
image: python
|
|
||||||
when:
|
|
||||||
event: push
|
|
||||||
branch: master
|
|
||||||
commands:
|
|
||||||
- make ci-deploy
|
|
||||||
secrets: [ GITEA_WRITE_DEPLOY_KEY, LIBREPAGES_DEPLOY_SECRET ]
|
|
150
CONTRIBUTING.md
150
CONTRIBUTING.md
|
@ -1,150 +0,0 @@
|
||||||
Contributing code to hydrogen-web
|
|
||||||
==================================
|
|
||||||
|
|
||||||
Everyone is welcome to contribute code to hydrogen-web, provided that they are
|
|
||||||
willing to license their contributions under the same license as the project
|
|
||||||
itself. We follow a simple 'inbound=outbound' model for contributions: the act
|
|
||||||
of submitting an 'inbound' contribution means that the contributor agrees to
|
|
||||||
license the code under the same terms as the project's overall 'outbound'
|
|
||||||
license - in this case, Apache Software License v2 (see
|
|
||||||
[LICENSE](LICENSE)).
|
|
||||||
|
|
||||||
How to contribute
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
The preferred and easiest way to contribute changes to the project is to fork
|
|
||||||
it on github, and then create a pull request to ask us to pull your changes
|
|
||||||
into our repo (https://help.github.com/articles/using-pull-requests/)
|
|
||||||
|
|
||||||
We use GitHub's pull request workflow to review the contribution, and either
|
|
||||||
ask you to make any refinements needed or merge it and make them ourselves.
|
|
||||||
|
|
||||||
Things that should go into your PR description:
|
|
||||||
* References to any bugs fixed by the change (in GitHub's `Fixes` notation)
|
|
||||||
* Describe the why and what is changing in the PR description so it's easy for
|
|
||||||
onlookers and reviewers to onboard and context switch.
|
|
||||||
* If your PR makes visual changes, include both **before** and **after** screenshots
|
|
||||||
to easily compare and discuss what's changing.
|
|
||||||
* Include a step-by-step testing strategy so that a reviewer can check out the
|
|
||||||
code locally and easily get to the point of testing your change.
|
|
||||||
* Add comments to the diff for the reviewer that might help them to understand
|
|
||||||
why the change is necessary or how they might better understand and review it.
|
|
||||||
|
|
||||||
We use continuous integration, and all pull requests get automatically tested:
|
|
||||||
if your change breaks the build, then the PR will show that there are failed
|
|
||||||
checks, so please check back after a few minutes.
|
|
||||||
|
|
||||||
Tests
|
|
||||||
-----
|
|
||||||
If your PR is a feature then we require that the PR also includes tests.
|
|
||||||
These need to test that your feature works as expected and ideally test edge cases too.
|
|
||||||
|
|
||||||
Tests are written as unit tests by exporting a `tests` function from the file to be tested.
|
|
||||||
The function returns an object where the key is the test label, and the value is a
|
|
||||||
function that accepts an [assert](https://nodejs.org/api/assert.html) object, and return a Promise or nothing.
|
|
||||||
|
|
||||||
Note that there is currently a limitation that files that are not indirectly included from `src/platform/web/main.js` won't be found by the runner.
|
|
||||||
|
|
||||||
You can run the tests by running `yarn test`.
|
|
||||||
This uses the [impunity](https://github.com/bwindels/impunity) runner.
|
|
||||||
|
|
||||||
We don't require tests for bug fixes.
|
|
||||||
|
|
||||||
In the future we may formalise this more.
|
|
||||||
|
|
||||||
Code style
|
|
||||||
----------
|
|
||||||
The js-sdk aims to target TypeScript/ES6. All new files should be written in
|
|
||||||
TypeScript and existing files should use ES6 principles where possible.
|
|
||||||
|
|
||||||
Please disable any automatic formatting tools you may have active.
|
|
||||||
If present, you'll be asked to undo any unrelated whitespace changes during code review.
|
|
||||||
|
|
||||||
Members should not be exported as a default export in general.
|
|
||||||
In general, avoid using `export default`.
|
|
||||||
|
|
||||||
The remaining code-style for hydrogen is [in the process of being documented](codestyle.md), but
|
|
||||||
contributors are encouraged to read the
|
|
||||||
[code style document for matrix-react-sdk](https://github.com/matrix-org/matrix-react-sdk/blob/master/code_style.md)
|
|
||||||
and follow the principles set out there.
|
|
||||||
|
|
||||||
Please ensure your changes match the cosmetic style of the existing project,
|
|
||||||
and ***never*** mix cosmetic and functional changes in the same commit, as it
|
|
||||||
makes it horribly hard to review otherwise.
|
|
||||||
|
|
||||||
Attribution
|
|
||||||
-----------
|
|
||||||
If you change or create a file, feel free to add yourself to the copyright holders
|
|
||||||
in the license header of that file.
|
|
||||||
|
|
||||||
Sign off
|
|
||||||
--------
|
|
||||||
In order to have a concrete record that your contribution is intentional
|
|
||||||
and you agree to license it under the same terms as the project's license, we've
|
|
||||||
adopted the same lightweight approach that the Linux Kernel
|
|
||||||
(https://www.kernel.org/doc/Documentation/SubmittingPatches), Docker
|
|
||||||
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
|
||||||
projects use: the DCO (Developer Certificate of Origin:
|
|
||||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
|
||||||
the contribution or otherwise have the right to contribute it to Matrix:
|
|
||||||
|
|
||||||
```
|
|
||||||
Developer Certificate of Origin
|
|
||||||
Version 1.1
|
|
||||||
|
|
||||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
|
||||||
660 York Street, Suite 102,
|
|
||||||
San Francisco, CA 94110 USA
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies of this
|
|
||||||
license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Developer's Certificate of Origin 1.1
|
|
||||||
|
|
||||||
By making a contribution to this project, I certify that:
|
|
||||||
|
|
||||||
(a) The contribution was created in whole or in part by me and I
|
|
||||||
have the right to submit it under the open source license
|
|
||||||
indicated in the file; or
|
|
||||||
|
|
||||||
(b) The contribution is based upon previous work that, to the best
|
|
||||||
of my knowledge, is covered under an appropriate open source
|
|
||||||
license and I have the right under that license to submit that
|
|
||||||
work with modifications, whether created in whole or in part
|
|
||||||
by me, under the same open source license (unless I am
|
|
||||||
permitted to submit under a different license), as indicated
|
|
||||||
in the file; or
|
|
||||||
|
|
||||||
(c) The contribution was provided directly to me by some other
|
|
||||||
person who certified (a), (b) or (c) and I have not modified
|
|
||||||
it.
|
|
||||||
|
|
||||||
(d) I understand and agree that this project and the contribution
|
|
||||||
are public and that a record of the contribution (including all
|
|
||||||
personal information I submit with it, including my sign-off) is
|
|
||||||
maintained indefinitely and may be redistributed consistent with
|
|
||||||
this project or the open source license(s) involved.
|
|
||||||
```
|
|
||||||
|
|
||||||
If you agree to this for your contribution, then all that's needed is to
|
|
||||||
include the line in your commit or pull request comment:
|
|
||||||
|
|
||||||
```
|
|
||||||
Signed-off-by: Your Name <your@email.example.org>
|
|
||||||
```
|
|
||||||
|
|
||||||
We accept contributions under a legally identifiable name, such as your name on
|
|
||||||
government documentation or common-law names (names claimed by legitimate usage
|
|
||||||
or repute). Unfortunately, we cannot accept anonymous contributions at this
|
|
||||||
time.
|
|
||||||
|
|
||||||
Git allows you to add this signoff automatically when using the `-s` flag to
|
|
||||||
`git commit`, which uses the name and email set in your `user.name` and
|
|
||||||
`user.email` git configs.
|
|
||||||
|
|
||||||
If you forgot to sign off your commits before making your pull request and are
|
|
||||||
on Git 2.17+ you can mass signoff using rebase:
|
|
||||||
|
|
||||||
```
|
|
||||||
git rebase --signoff origin/develop
|
|
||||||
```
|
|
|
@ -1,9 +0,0 @@
|
||||||
FROM docker.io/node:alpine as builder
|
|
||||||
RUN apk add --no-cache git python3 build-base
|
|
||||||
COPY . /app
|
|
||||||
WORKDIR /app
|
|
||||||
RUN yarn install \
|
|
||||||
&& yarn build
|
|
||||||
|
|
||||||
FROM docker.io/nginx:alpine
|
|
||||||
COPY --from=builder /app/target /usr/share/nginx/html
|
|
|
@ -1,7 +0,0 @@
|
||||||
FROM docker.io/node:alpine
|
|
||||||
RUN apk add --no-cache git python3 build-base
|
|
||||||
COPY . /code
|
|
||||||
WORKDIR /code
|
|
||||||
RUN yarn install
|
|
||||||
EXPOSE 3000
|
|
||||||
ENTRYPOINT ["yarn", "start"]
|
|
14
Makefile
14
Makefile
|
@ -1,14 +0,0 @@
|
||||||
ci-deploy: ## Deploy from CI/CD. Only call from within CI
|
|
||||||
@if [ "${CI}" != "woodpecker" ]; \
|
|
||||||
then echo "Only call from within CI. Will re-write your local Git configuration. To override, set export CI=woodpecker"; \
|
|
||||||
exit 1; \
|
|
||||||
fi
|
|
||||||
git config --global user.email "${CI_COMMIT_AUTHOR_EMAIL}"
|
|
||||||
git config --global user.name "${CI_COMMIT_AUTHOR}"
|
|
||||||
./scripts/ci.sh --commit-files librepages target "${CI_COMMIT_AUTHOR} <${CI_COMMIT_AUTHOR_EMAIL}>"
|
|
||||||
./scripts/ci.sh --init "$$GITEA_WRITE_DEPLOY_KEY"
|
|
||||||
./scripts/ci.sh --deploy ${LIBREPAGES_DEPLOY_SECRET} librepages
|
|
||||||
./scripts/ci.sh --clean
|
|
||||||
|
|
||||||
help: ## Prints help for targets with comments
|
|
||||||
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
|
41
README.md
41
README.md
|
@ -1,8 +1,6 @@
|
||||||
[![status-badge](https://ci.batsense.net/api/badges/mystiq/hydrogen-web/status.svg)](https://ci.batsense.net/mystiq/hydrogen-web)
|
|
||||||
|
|
||||||
# Hydrogen
|
# Hydrogen
|
||||||
|
|
||||||
A minimal [Matrix](https://matrix.org/) chat client, focused on performance, offline functionality, and broad browser support. This is work in progress and not yet ready for primetime. Bug reports are welcome, but please don't file any feature requests or other missing things to be on par with Element Web.
|
A minimal [Matrix](https://matrix.org/) chat client, focused on performance, offline functionality, and broad browser support. This is work in progress and not yet ready for primetime. We're currently not accepting any externally reported issues (features, bug reports, ...) at this time.
|
||||||
|
|
||||||
## Goals
|
## Goals
|
||||||
|
|
||||||
|
@ -12,35 +10,16 @@ Hydrogen's goals are:
|
||||||
- It is a standalone webapp, but can also be easily embedded into an existing website/webapp to add chat capabilities.
|
- It is a standalone webapp, but can also be easily embedded into an existing website/webapp to add chat capabilities.
|
||||||
- Loading (unused) parts of the application after initial page load should be supported
|
- Loading (unused) parts of the application after initial page load should be supported
|
||||||
|
|
||||||
For embedded usage, see the [SDK instructions](doc/SDK.md).
|
## Status
|
||||||
|
|
||||||
If you find this interesting, come and discuss on [`#hydrogen:matrix.org`](https://matrix.to/#/#hydrogen:matrix.org).
|
Hydrogen can currently log you in, or pick an existing session, sync already joined rooms, fill gaps in the timeline, and send text messages. Everything is stored locally.
|
||||||
|
|
||||||
|
## Why
|
||||||
|
|
||||||
|
For every interaction or network response (syncing, filling a gap), Hydrogen starts a transaction in indexedb, and only commits it once everything went well. This helps to keep your storage always in a consistent state. As little data is kept in memory as well, and while scrolling in the above GIF, everything is loaded straight from the storage.
|
||||||
|
|
||||||
|
If you find this interesting, feel free to reach me at `@bwindels:matrix.org`.
|
||||||
|
|
||||||
# How to use
|
# How to use
|
||||||
|
|
||||||
Hydrogen is deployed to [hydrogen.element.io](https://hydrogen.element.io). You can also deploy Hydrogen on your own web server:
|
Try it locally by running `npm install dev` (only the first time) and `npm start` in the terminal, and point your browser to `http://localhost:3000`.
|
||||||
|
|
||||||
1. Download the [latest release package](https://github.com/vector-im/hydrogen-web/releases).
|
|
||||||
1. Extract the package to the public directory of your web server.
|
|
||||||
1. If this is your first deploy:
|
|
||||||
1. copy `config.sample.json` to `config.json` and if needed, make any modifications (unless you've set up your own [sygnal](https://github.com/matrix-org/sygnal) instance, you don't need to change anything in the `push` section).
|
|
||||||
1. Disable caching entirely on the server for:
|
|
||||||
- `index.html`
|
|
||||||
- `sw.js`
|
|
||||||
- `config.json`
|
|
||||||
- All theme manifests referenced in the `themeManifests` of `config.json`, these files are typically called `theme-{name}.json`.
|
|
||||||
|
|
||||||
These resources will still be cached client-side by the service worker. Because of this; you'll still need to refresh the app twice before config.json changes are applied.
|
|
||||||
|
|
||||||
## Set up a dev environment
|
|
||||||
|
|
||||||
You can run Hydrogen locally by the following commands in the terminal:
|
|
||||||
|
|
||||||
- `yarn install` (only the first time)
|
|
||||||
- `yarn start` in the terminal
|
|
||||||
|
|
||||||
Now point your browser to `http://localhost:3000`. If you prefer, you can also [use docker](doc/docker.md).
|
|
||||||
|
|
||||||
# FAQ
|
|
||||||
|
|
||||||
Some frequently asked questions are answered [here](doc/FAQ.md).
|
|
||||||
|
|
12
codestyle.md
12
codestyle.md
|
@ -1,12 +0,0 @@
|
||||||
|
|
||||||
# Code-style
|
|
||||||
|
|
||||||
- methods that return a promise should always use async/await
|
|
||||||
otherwise synchronous errors can get swallowed
|
|
||||||
you can return a promise without awaiting it though.
|
|
||||||
- only named exports, no default exports
|
|
||||||
otherwise it becomes hard to remember what was a default/named export
|
|
||||||
- should we return promises from storage mutation calls? probably not, as we don't await them anywhere. only read calls should return promises?
|
|
||||||
- we don't anymore
|
|
||||||
- don't use these features, as they are not widely enough supported.
|
|
||||||
- [lookbehind in regular expressions](https://caniuse.com/js-regexp-lookbehind)
|
|
35
doc/FAQ.md
35
doc/FAQ.md
|
@ -1,35 +0,0 @@
|
||||||
# FAQ
|
|
||||||
|
|
||||||
## What browsers are supported?
|
|
||||||
|
|
||||||
Internet Explorer 11, Chrome [1], Firefox [1] (not in a private window), Edge [1], Safari [1] and any mobile versions of these. It will probably also work on any derivatives of these.
|
|
||||||
|
|
||||||
1: Because of https://github.com/vector-im/hydrogen-web/issues/230, only [more recent versions](https://caniuse.com/mdn-javascript_operators_optional_chaining) are supported.
|
|
||||||
|
|
||||||
TorBrowser ships a crippled IndexedDB implementation and will not work. At some point we should support a memory store as a fallback, but that will still give a sub-par experience with end-to-end encryption.
|
|
||||||
|
|
||||||
It used work in pre-webkit Edge, to have it work on Windows Phone, but that support has probably bit-rotted as it isn't tested anymore.
|
|
||||||
|
|
||||||
## Is there a way to run the app as a desktop app?
|
|
||||||
|
|
||||||
You can install Hydrogen as a PWA using Chrome/Chromium on any platform or Edge on Windows. Gnome Web/Ephiphany also allows to "Install site as web application". There is no Electron build of Hydrogen, and there will likely be none in the near future, as Electron complicates the release process considerably. Once Hydrogen is more mature and feature complete, we might reconsider and use [Tauri](https://tauri.studio) if there are compelling use cases not possible with PWAs. For now though, we want to keep development and releasing fast and nimble ;)
|
|
||||||
|
|
||||||
## Is feature X supported?
|
|
||||||
|
|
||||||
If you can't find an easy way to locate the feature you are looking for, then the anwser is usually "no, not yet" :) But here are some things people have asked about in the past:
|
|
||||||
|
|
||||||
### How does newline work? Shift+Enter has no effect.
|
|
||||||
|
|
||||||
That's not yet a feature, as hydrogen just uses a single line text box for message input for now.
|
|
||||||
|
|
||||||
## How can I verify my session from Element?
|
|
||||||
|
|
||||||
You can only verify by comparing keys manually currently. In Element, go to your own profile in the right panel, click on the Hydrogen device and select Manually Verify by Text. The session key displayed should be the same as in the Hydrogen settings. You can't yet mark your Element session as trusted from Hydrogen.
|
|
||||||
|
|
||||||
## I want to host my own Hydrogen, how do I do that?
|
|
||||||
|
|
||||||
Published builds can be found at https://github.com/vector-im/hydrogen-web/releases. For building your own, you need to checkout the version you want to build, or master if you want to run bleeding edge, and run `yarn install` and then `yarn build` in a console (and install nodejs >= 15 and yarn if you haven't yet). Now you should find all the files needed to host Hydrogen in the `target/` folder, just copy them all over to your server. As always, don't host your client on the same [origin](https://web.dev/same-origin-policy/#what's-considered-same-origin) as your homeserver.
|
|
||||||
|
|
||||||
## I want to embed Hydrogen in my website, how should I do that?
|
|
||||||
|
|
||||||
Hydrogen aims to be usable as an SDK, and while it is still early days, you can find some documentation how to do that in [SDK.md](SDK.md).
|
|
|
@ -1,11 +0,0 @@
|
||||||
## How to import common-js dependency using ES6 syntax
|
|
||||||
---
|
|
||||||
Until [#6632](https://github.com/vitejs/vite/issues/6632) is fixed, such imports should be done as follows:
|
|
||||||
|
|
||||||
```ts
|
|
||||||
import * as pkg from "off-color";
|
|
||||||
// @ts-ignore
|
|
||||||
const offColor = pkg.offColor ?? pkg.default.offColor;
|
|
||||||
```
|
|
||||||
|
|
||||||
This way build, dev server and unit tests should all work.
|
|
|
@ -1,75 +0,0 @@
|
||||||
## Promises, async/await and indexedDB
|
|
||||||
|
|
||||||
Doesn't indexedDB close your transaction if you don't queue more requests from an idb event handler?
|
|
||||||
So wouldn't that mean that you can't use promises and async/await when using idb?
|
|
||||||
|
|
||||||
It used to be like this, and for IE11 on Win7 (not on Windows 10 strangely enough), it still is like this.
|
|
||||||
Here we manually flush the promise queue synchronously at the end of an idb event handler.
|
|
||||||
|
|
||||||
In modern browsers, indexedDB transactions should only be closed after flushing the microtask queue of the event loop,
|
|
||||||
which is where promises run.
|
|
||||||
|
|
||||||
Keep in mind that indexedDB events, just like any other DOM event, are fired as macro tasks.
|
|
||||||
Promises queue micro tasks, of which the queue is drained before proceeding to the next macro task.
|
|
||||||
This also means that if a transaction is completed, you will only receive the event once you are ready to process the next macro tasks.
|
|
||||||
That doesn't prevent any placed request from throwing TransactionInactiveError though.
|
|
||||||
|
|
||||||
## TransactionInactiveError in Safari
|
|
||||||
|
|
||||||
Safari doesn't fully follow the rules above, in that if you open a transaction,
|
|
||||||
you need to "use" (not sure if this means getting a store or actually placing a request) it straight away,
|
|
||||||
without waiting for any *micro*tasks. See comments about Safari at https://github.com/dfahlander/Dexie.js/issues/317#issue-178349994.
|
|
||||||
|
|
||||||
Another failure mode perceived in Hydrogen on Safari is that when the (readonly) prepareTxn in sync wasn't awaited to be completed before opening and using the syncTxn.
|
|
||||||
I haven't found any documentation online about this at all. Awaiting prepareTxn.complete() fixed the issue below. It's strange though the put does not fail.
|
|
||||||
|
|
||||||
## Diagnose of problem
|
|
||||||
|
|
||||||
What is happening below is:
|
|
||||||
- in the sync loop:
|
|
||||||
- we first open a readonly txn on inboundGroupSessions, which we don't use in the example below
|
|
||||||
- we then open a readwrite txn on session, ... (does not overlap with first txn)
|
|
||||||
- first the first incremental sync on a room (!YxKeAxtNcDZDrGgaMF:matrix.org) it seems to work well
|
|
||||||
- on a second incremental sync for that same room, the first get throws TransactionInactiveError for some reason.
|
|
||||||
- the put in the second incremental sync somehow did not throw.
|
|
||||||
|
|
||||||
So it looks like safari doesn't like (some) transactions still being active while a second one is being openened, even with non-overlapping stores.
|
|
||||||
For now I haven't awaited every read txn in the app, as this was the only place it fails, but if this pops up again in safari, we might have to do that.
|
|
||||||
|
|
||||||
Keep in mind that the `txn ... inactive` logs are only logged when the "complete" or "abort" events are processed,
|
|
||||||
which happens in a macro task, as opposed to all of our promises, which run in a micro task.
|
|
||||||
So the transaction is likely to have closed before it appears in the logs.
|
|
||||||
|
|
||||||
```
|
|
||||||
[Log] txn 4504181722375185 active on inboundGroupSessions
|
|
||||||
[Log] txn 861052256474256 active on session, roomSummary, roomState, roomMembers, timelineEvents, timelineFragments, pendingEvents, userIdentities, groupSessionDecryptions, deviceIdentities, outboundGroupSessions, operations, accountData
|
|
||||||
[Info] hydrogen_session_5286139994689036.session.put({"key":"sync","value":{"token":"s1572540047_757284957_7660701_602588550_435736037_1567300_101589125_347651623_132704","filterId":"2"}})
|
|
||||||
[Info] hydrogen_session_5286139994689036.userIdentities.get("@bwindels:matrix.org")
|
|
||||||
[Log] txn 4504181722375185 inactive
|
|
||||||
[Log] * applying sync response to room !YxKeAxtNcDZDrGgaMF:matrix.org ...
|
|
||||||
[Info] hydrogen_session_5286139994689036.roomMembers.put({"roomId":"!YxKeAxtNcDZDrGgaMF:matrix.org","userId":"@bwindels:matrix.org","membership":"join","avatarUrl":"mxc://matrix.org/aerWVfICBMcyFcEyREcivLuI","displayName":"Bruno","key":"!YxKeAxtNcDZDrGgaMF:matrix.org|@bwindels:matrix.org"})
|
|
||||||
[Info] hydrogen_session_5286139994689036.roomMembers.get("!YxKeAxtNcDZDrGgaMF:matrix.org|@bwindels:matrix.org")
|
|
||||||
[Info] hydrogen_session_5286139994689036.timelineEvents.add({"fragmentId":0,"eventIndex":2147483658,"roomId":"!YxKeAxtNcDZDrGgaMF:matrix.org","event":{"content":{"body":"haha","msgtype":"m.text"},"origin_server_ts":1601457573756,"sender":"@bwindels:matrix.org","type":"m.room.message","unsigned":{"age":8360},"event_id":"$eD9z73-lCpXBVby5_fKqzRZzMVHiPzKbE_RSZzqRKx0"},"displayName":"Bruno","avatarUrl":"mxc://matrix.org/aerWVfICBMcyFcEyREcivLuI","key":"!YxKeAxtNcDZDrGgaMF:matrix.org|00000000|8000000a","eventIdKey":"!YxKeAxtNcDZDrGgaMF:matrix.org|$eD9z73-lCpXBVby5_fKqzRZzMVHiPzKbE_RSZzqRKx0"})
|
|
||||||
[Info] hydrogen_session_5286139994689036.roomSummary.put({"roomId":"!YxKeAxtNcDZDrGgaMF:matrix.org","name":"!!!test8!!!!!!","lastMessageBody":"haha","lastMessageTimestamp":1601457573756,"isUnread":true,"encryption":null,"lastDecryptedEventKey":null,"isDirectMessage":false,"membership":"join","inviteCount":0,"joinCount":2,"heroes":null,"hasFetchedMembers":false,"isTrackingMembers":false,"avatarUrl":null,"notificationCount":5,"highlightCount":0,"tags":{"m.lowpriority":{}}})
|
|
||||||
[Log] txn 861052256474256 inactive
|
|
||||||
[Info] syncTxn committed!!
|
|
||||||
|
|
||||||
... two more unrelated sync responses ...
|
|
||||||
|
|
||||||
[Log] starting sync request with since s1572540191_757284957_7660742_602588567_435736063_1567300_101589126_347651632_132704 ...
|
|
||||||
[Log] txn 8104296957004707 active on inboundGroupSessions
|
|
||||||
[Log] txn 2233038992157489 active on session, roomSummary, roomState, roomMembers, timelineEvents, timelineFragments, pendingEvents, userIdentities, groupSessionDecryptions, deviceIdentities, outboundGroupSessions, operations, accountData
|
|
||||||
[Info] hydrogen_session_5286139994689036.session.put({"key":"sync","value":{"token":"s1572540223_757284957_7660782_602588579_435736078_1567300_101589130_347651633_132704","filterId":"2"}})
|
|
||||||
[Log] * applying sync response to room !YxKeAxtNcDZDrGgaMF:matrix.org ...
|
|
||||||
[Info] hydrogen_session_5286139994689036.roomMembers.get("!YxKeAxtNcDZDrGgaMF:matrix.org|@bwindels:matrix.org")
|
|
||||||
[Warning] stopping sync because of error
|
|
||||||
[Error] StorageError: get("!YxKeAxtNcDZDrGgaMF:matrix.org|@bwindels:matrix.org") failed on txn with stores accountData, deviceIdentities, groupSessionDecryptions, operations, outboundGroupSessions, pendingEvents, roomMembers, roomState, roomSummary, session, timelineEvents, timelineFragments, userIdentities on hydrogen_session_5286139994689036.roomMembers: (name: TransactionInactiveError) (code: 0) Failed to execute 'get' on 'IDBObjectStore': The transaction is inactive or finished.
|
|
||||||
(anonymous function)
|
|
||||||
asyncFunctionResume
|
|
||||||
(anonymous function)
|
|
||||||
promiseReactionJobWithoutPromise
|
|
||||||
promiseReactionJob
|
|
||||||
[Log] newStatus – "SyncError"
|
|
||||||
[Log] txn 8104296957004707 inactive
|
|
||||||
[Log] txn 2233038992157489 inactive
|
|
||||||
```
|
|
116
doc/SDK.md
116
doc/SDK.md
|
@ -1,116 +0,0 @@
|
||||||
# Hydrogen View SDK
|
|
||||||
|
|
||||||
|
|
||||||
The Hydrogen view SDK allows developers to integrate parts of the Hydrogen application into the UI of their own application. Hydrogen is written with the MVVM pattern, so to construct a view, you'd first construct a view model, which you then pass into the view. For most view models, you will first need a running client.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
The Hydrogen SDK requires some assets to be shipped along with your app for things like downloading attachments, and end-to-end encryption. A convenient way to make this happen is provided by the SDK (importing `hydrogen-view-sdk/paths/vite`) but depends on your build system. Currently, only [vite](https://vitejs.dev/) is supported, so that's what we'll be using in the example below.
|
|
||||||
|
|
||||||
You can create a vite project using the following commands:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
# you can pick "vanilla-ts" here for project type if you're not using react or vue
|
|
||||||
yarn create vite
|
|
||||||
cd <your-project-name>
|
|
||||||
yarn
|
|
||||||
yarn add hydrogen-view-sdk
|
|
||||||
```
|
|
||||||
|
|
||||||
You should see a `index.html` in the project root directory, containing an element with `id="app"`. Add the attribute `class="hydrogen"` to this element, as the CSS we'll include from the SDK assumes for now that the app is rendered in an element with this classname.
|
|
||||||
|
|
||||||
If you go into the `src` directory, you should see a `main.ts` file. If you put this code in there, you should see a basic timeline after login and initial sync have finished (might take a while before you see anything on the screen actually).
|
|
||||||
|
|
||||||
You'll need to provide the username and password of a user that is already in the [#element-dev:matrix.org](https://matrix.to/#/#element-dev:matrix.org) room (or change the room id).
|
|
||||||
|
|
||||||
```ts
|
|
||||||
import {
|
|
||||||
Platform,
|
|
||||||
Client,
|
|
||||||
LoadStatus,
|
|
||||||
createNavigation,
|
|
||||||
createRouter,
|
|
||||||
RoomViewModel,
|
|
||||||
TimelineView,
|
|
||||||
viewClassForTile
|
|
||||||
} from "hydrogen-view-sdk";
|
|
||||||
import downloadSandboxPath from 'hydrogen-view-sdk/download-sandbox.html?url';
|
|
||||||
import workerPath from 'hydrogen-view-sdk/main.js?url';
|
|
||||||
import olmWasmPath from '@matrix-org/olm/olm.wasm?url';
|
|
||||||
import olmJsPath from '@matrix-org/olm/olm.js?url';
|
|
||||||
import olmLegacyJsPath from '@matrix-org/olm/olm_legacy.js?url';
|
|
||||||
const assetPaths = {
|
|
||||||
downloadSandbox: downloadSandboxPath,
|
|
||||||
worker: workerPath,
|
|
||||||
olm: {
|
|
||||||
wasm: olmWasmPath,
|
|
||||||
legacyBundle: olmLegacyJsPath,
|
|
||||||
wasmBundle: olmJsPath
|
|
||||||
}
|
|
||||||
};
|
|
||||||
import "hydrogen-view-sdk/assets/theme-element-light.css";
|
|
||||||
// OR import "hydrogen-view-sdk/assets/theme-element-dark.css";
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const app = document.querySelector<HTMLDivElement>('#app')!
|
|
||||||
const config = {};
|
|
||||||
const platform = new Platform({container: app, assetPaths, config, options: { development: import.meta.env.DEV }});
|
|
||||||
const navigation = createNavigation();
|
|
||||||
platform.setNavigation(navigation);
|
|
||||||
const urlRouter = createRouter({
|
|
||||||
navigation: navigation,
|
|
||||||
history: platform.history
|
|
||||||
});
|
|
||||||
urlRouter.attach();
|
|
||||||
const client = new Client(platform);
|
|
||||||
|
|
||||||
const loginOptions = await client.queryLogin("matrix.org").result;
|
|
||||||
client.startWithLogin(loginOptions.password("username", "password"));
|
|
||||||
|
|
||||||
await client.loadStatus.waitFor((status: string) => {
|
|
||||||
return status === LoadStatus.Ready ||
|
|
||||||
status === LoadStatus.Error ||
|
|
||||||
status === LoadStatus.LoginFailed;
|
|
||||||
}).promise;
|
|
||||||
|
|
||||||
if (client.loginFailure) {
|
|
||||||
alert("login failed: " + client.loginFailure);
|
|
||||||
} else if (client.loadError) {
|
|
||||||
alert("load failed: " + client.loadError.message);
|
|
||||||
} else {
|
|
||||||
const {session} = client;
|
|
||||||
// looks for room corresponding to #element-dev:matrix.org, assuming it is already joined
|
|
||||||
const room = session.rooms.get("!bEWtlqtDwCLFIAKAcv:matrix.org");
|
|
||||||
const vm = new RoomViewModel({
|
|
||||||
room,
|
|
||||||
ownUserId: session.userId,
|
|
||||||
platform,
|
|
||||||
urlCreator: urlRouter,
|
|
||||||
navigation,
|
|
||||||
});
|
|
||||||
await vm.load();
|
|
||||||
const view = new TimelineView(vm.timelineViewModel, viewClassForTile);
|
|
||||||
app.appendChild(view.mount());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
```
|
|
||||||
|
|
||||||
## Typescript support
|
|
||||||
|
|
||||||
Typescript support is not yet available while we're converting the Hydrogen codebase to Typescript.
|
|
||||||
In your `src` directory, you'll need to add a `.d.ts` (can be called anything, e.g. `deps.d.ts`)
|
|
||||||
containing this snippet to make Typescript not complain that `hydrogen-view-sdk` doesn't have types:
|
|
||||||
|
|
||||||
```ts
|
|
||||||
declare module "hydrogen-view-sdk";
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Stability
|
|
||||||
|
|
||||||
This library follows semantic versioning; there is no API stability promised as long as the major version is still 0. Once 1.0.0 is released, breaking changes will be released with a change in major versioning.
|
|
||||||
|
|
||||||
## Third-party licenses
|
|
||||||
|
|
||||||
This package bundles the bs58 package ([license](https://github.com/cryptocoinjs/bs58/blob/master/LICENSE)), and the Inter font ([license](https://github.com/rsms/inter/blob/master/LICENSE.txt)).
|
|
|
@ -1,22 +0,0 @@
|
||||||
# Replacing javascript files
|
|
||||||
|
|
||||||
Any source file can be replaced at build time by mapping the path in a JSON file passed in to the build command, e.g. `yarn build --override-imports customizations.json`. The file should be written like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"src/platform/web/ui/session/room/timeline/TextMessageView.js": "src/platform/web/ui/session/room/timeline/MyTextMessageView.js"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
The paths are relative to the location of the mapping file, but the mapping file should be in a parent directory of the files you want to replace.
|
|
||||||
|
|
||||||
You should see a "replacing x with y" line (twice actually, for the normal and legacy build).
|
|
||||||
|
|
||||||
# Injecting CSS
|
|
||||||
|
|
||||||
You can override the location of the main css file with the `--override-css <file>` option to the build script. The default is `src/platform/web/ui/css/main.css`, which you probably want to import from your custom css file like so:
|
|
||||||
|
|
||||||
```css
|
|
||||||
@import url('src/platform/web/ui/css/main.css');
|
|
||||||
|
|
||||||
/* additions */
|
|
||||||
```
|
|
204
doc/THEMING.md
204
doc/THEMING.md
|
@ -1,204 +0,0 @@
|
||||||
# Theming Documentation
|
|
||||||
## Basic Architecture
|
|
||||||
A **theme collection** in Hydrogen is represented by a `manifest.json` file and a `theme.css` file.
|
|
||||||
The manifest specifies variants (eg: dark,light ...) each of which is a **theme** and maps to a single css file in the build output.
|
|
||||||
|
|
||||||
Each such theme is produced by changing the values of variables in the base `theme.css` file with those specified in the variant section of the manifest:
|
|
||||||
|
|
||||||
![](images/theming-architecture.png)
|
|
||||||
|
|
||||||
More in depth explanations can be found in later sections.
|
|
||||||
|
|
||||||
## Structure of `manifest.json`
|
|
||||||
[See theme.ts](../src/platform/types/theme.ts)
|
|
||||||
|
|
||||||
## Variables
|
|
||||||
CSS variables specific to a particular variant are specified in the `variants` section of the manifest:
|
|
||||||
```json=
|
|
||||||
"variants": {
|
|
||||||
"light": {
|
|
||||||
...
|
|
||||||
"variables": {
|
|
||||||
"background-color-primary": "#fff",
|
|
||||||
"text-color": "#2E2F32",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dark": {
|
|
||||||
...
|
|
||||||
"variables": {
|
|
||||||
"background-color-primary": "#21262b",
|
|
||||||
"text-color": "#fff",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
These variables will appear in the css file (theme.css):
|
|
||||||
```css=
|
|
||||||
body {
|
|
||||||
background-color: var(--background-color-primary);
|
|
||||||
color: var(--text-color);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
During the build process, this would result in the creation of two css files (one for each variant) where the variables are substitued with the corresponding values specified in the manifest:
|
|
||||||
|
|
||||||
*element-light.css*:
|
|
||||||
```css=
|
|
||||||
body {
|
|
||||||
background-color: #fff;
|
|
||||||
color: #2E2F32;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
*element-dark.css*:
|
|
||||||
```css=
|
|
||||||
body {
|
|
||||||
background-color: #21262b;
|
|
||||||
color: #fff;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Derived Variables
|
|
||||||
In addition to simple substitution of variables in the stylesheet, it is also possible to instruct the build system to first produce a new value from the base variable value before the substitution.
|
|
||||||
|
|
||||||
Such derived variables have the form `base_css_variable--operation-arg` and can be read as:
|
|
||||||
apply `operation` to `base_css_variable` with argument `arg`.
|
|
||||||
|
|
||||||
Continuing with the previous example, it possible to specify:
|
|
||||||
```css=
|
|
||||||
.left-panel {
|
|
||||||
/* background color should be 20% more darker
|
|
||||||
than background-color-primary */
|
|
||||||
background-color: var(--background-color-primary--darker-20);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Currently supported operations are:
|
|
||||||
|
|
||||||
| Operation | Argument | Operates On |
|
|
||||||
| -------- | -------- | -------- |
|
|
||||||
| darker | percentage | color |
|
|
||||||
| lighter | percentage | color |
|
|
||||||
|
|
||||||
## Aliases
|
|
||||||
It is possible give aliases to variables in the `theme.css` file:
|
|
||||||
```css=
|
|
||||||
:root {
|
|
||||||
font-size: 10px;
|
|
||||||
/* Theme aliases */
|
|
||||||
--icon-color: var(--background-color-secondary--darker-40);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
It is possible to further derive from these aliased variables:
|
|
||||||
```css=
|
|
||||||
div {
|
|
||||||
background: var(--icon-color--darker-20);
|
|
||||||
--my-alias: var(--icon-color--darker-20);
|
|
||||||
/* Derive from aliased variable */
|
|
||||||
color: var(--my-alias--lighter-15);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Colorizing svgs
|
|
||||||
Along with a change in color-scheme, it may be necessary to change the colors in the svg icons and images.
|
|
||||||
This can be done by supplying the preferred colors with query parameters:
|
|
||||||
`my-awesome-logo.svg?primary=base-variable-1&secondary=base-variable-2`
|
|
||||||
|
|
||||||
This instructs the build system to colorize the svg with the given primary and secondary colors.
|
|
||||||
`base-variable-1` and `base-variable-2` are the css-variables specified in the `variables` section of the manifest.
|
|
||||||
|
|
||||||
For colorizing svgs, the source svg must use `#ff00ff` as the primary color and `#00ffff` as the secondary color:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
| ![](images/svg-icon-example.png) | ![](images/coloring-process.png) |
|
|
||||||
| :--: |:--: |
|
|
||||||
| **original source image** | **transformation process** |
|
|
||||||
|
|
||||||
## Creating your own theme variant in Hydrogen
|
|
||||||
If you're looking to change the color-scheme of the existing Element theme, you only need to add your own variant to the existing `manifest.json`.
|
|
||||||
|
|
||||||
The steps are fairly simple:
|
|
||||||
1. Copy over an existing variant to the variants section of the manifest.
|
|
||||||
2. Change `dark`, `default` and `name` fields.
|
|
||||||
3. Give new values to each variable in the `variables` section.
|
|
||||||
4. Build hydrogen.
|
|
||||||
|
|
||||||
## Creating your own theme collection in Hydrogen
|
|
||||||
If a theme variant does not solve your needs, you can create a new theme collection with a different base `theme.css` file.
|
|
||||||
1. Create a directory for your new theme-collection under `src/platform/web/ui/css/themes/`.
|
|
||||||
2. Create `manifest.json` and `theme.css` files within the newly created directory.
|
|
||||||
3. Populate `manifest.json` with the base css variables you wish to use.
|
|
||||||
4. Write styles in your `theme.css` file using the base variables, derived variables and colorized svg icons.
|
|
||||||
5. Tell the build system where to find this theme-collection by providing the location of this directory to the `themeBuilder` plugin in `vite.config.js`:
|
|
||||||
```json=
|
|
||||||
...
|
|
||||||
themeBuilder({
|
|
||||||
themeConfig: {
|
|
||||||
themes: {
|
|
||||||
element: "./src/platform/web/ui/css/themes/element",
|
|
||||||
awesome: "path/to/theme-directory"
|
|
||||||
},
|
|
||||||
default: "element",
|
|
||||||
},
|
|
||||||
compiledVariables,
|
|
||||||
}),
|
|
||||||
...
|
|
||||||
```
|
|
||||||
6. Build Hydrogen.
|
|
||||||
|
|
||||||
## Changing the default theme
|
|
||||||
To change the default theme used in Hydrogen, modify the `defaultTheme` field in `config.json` file (which can be found in the build output):
|
|
||||||
```json=
|
|
||||||
"defaultTheme": {
|
|
||||||
"light": theme-id,
|
|
||||||
"dark": theme-id
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Here *theme-id* is of the form `theme-variant` where `theme` is the key used when specifying the manifest location of the theme collection in `vite.config.js` and `variant` is the key used in variants section of the manifest.
|
|
||||||
|
|
||||||
Some examples of theme-ids are `element-dark` and `element-light`.
|
|
||||||
|
|
||||||
To find the theme-id of some theme, you can look at the built-asset section of the manifest in the build output.
|
|
||||||
|
|
||||||
This default theme will render as "Default" option in the theme-chooser dropdown. If the device preference is for dark theme, the dark default is selected and vice versa.
|
|
||||||
|
|
||||||
**You'll need to reload twice so that Hydrogen picks up the config changes!**
|
|
||||||
|
|
||||||
# Derived Theme(Collection)
|
|
||||||
This allows users to theme Hydrogen without the need for rebuilding. Derived theme collections can be thought of as extensions (derivations) of some existing build time theme.
|
|
||||||
|
|
||||||
## Creating a derived theme:
|
|
||||||
Here's how you create a new derived theme:
|
|
||||||
1. You create a new theme manifest file (eg: theme-awesome.json) and mention which build time theme you're basing your new theme on using the `extends` field. The base css file of the mentioned theme is used for your new theme.
|
|
||||||
2. You configure the theme manifest as usual by populating the `variants` field with your desired colors.
|
|
||||||
3. You add your new theme manifest to the list of themes in `config.json`.
|
|
||||||
|
|
||||||
Refresh Hydrogen twice (once to refresh cache, and once to load) and the new theme should show up in the theme chooser.
|
|
||||||
|
|
||||||
## How does it work?
|
|
||||||
|
|
||||||
For every theme collection in hydrogen, the build process emits a runtime css file which like the built theme css file contains variables in the css code. But unlike the theme css file, the runtime css file lacks the definition for these variables:
|
|
||||||
|
|
||||||
CSS for the built theme:
|
|
||||||
```css
|
|
||||||
:root {
|
|
||||||
--background-color-primary: #f2f20f;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
background-color: var(--background-color-primary);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
and the corresponding runtime theme:
|
|
||||||
```css
|
|
||||||
/* Notice the lack of definiton for --background-color-primary here! */
|
|
||||||
body {
|
|
||||||
background-color: var(--background-color-primary);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
When hydrogen loads a derived theme, it takes the runtime css file of the extended theme and dynamically adds the variable definition based on the values specified in the manifest. Icons are also colored dynamically and injected as variables using Data URIs.
|
|
|
@ -1,38 +0,0 @@
|
||||||
# Typescript style guide
|
|
||||||
|
|
||||||
## Use `type` rather than `interface` for named parameters and POJO return values.
|
|
||||||
|
|
||||||
`type` and `interface` can be used somewhat interchangeably, but let's use `type` to describe data and `interface` to describe (polymorphic) behaviour.
|
|
||||||
|
|
||||||
Good examples of data are option objects to have named parameters, and POJO (plain old javascript objects) without any methods, just fields.
|
|
||||||
|
|
||||||
Also see [this playground](https://www.typescriptlang.org/play?#code/C4TwDgpgBACghgJwgO2AeTMAlge2QZygF4oBvAKCiqmTgFsIAuKfYBLZAcwG5LqATCABs4IAPzNkAVzoAjCAl4BfcuVCQoAYQAWWIfwzY8hEvCSpDuAlABkZPlQDGOITgTNW7LstWOR+QjMUYHtqKGcCNilHYDcAChxMK3xmIIsk4wBKewcoFRVyPzgArV19KAgAD2AUfkDEYNDqCM9o2IQEjIJmHT0DLvxsijCw-ClIDsSjAkzeEebjEIYAuE5oEgADABJSKeSAOloGJSgsQh29433nVwQlDbnqfKA)
|
|
||||||
|
|
||||||
## Use `type foo = { [key: string]: any }` for types that you intend to fill in later.
|
|
||||||
|
|
||||||
For instance, if you have a method such as:
|
|
||||||
```js
|
|
||||||
function load(options) {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
and you intend to type options at some later point, do:
|
|
||||||
```ts
|
|
||||||
type Options = { [key: string]: any}
|
|
||||||
```
|
|
||||||
This makes it much easier to add the necessary type information at a later time.
|
|
||||||
|
|
||||||
## Use `object` or `Record<string, any>` to describe a type that accepts any javascript object.
|
|
||||||
|
|
||||||
Sometimes a function or method may genuinely need to accept any object; eg:
|
|
||||||
```js
|
|
||||||
function encodeBody(body) {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
In this scenario:
|
|
||||||
- Use `object` if you know that you will not access any property
|
|
||||||
- Use `Record<string, any>` if you need to access some property
|
|
||||||
|
|
||||||
Both usages prevent the type from accepting primitives (eg: string, boolean...).
|
|
||||||
If using `Record`, ensure that you have guards to check that the properties really do exist.
|
|
|
@ -1,3 +0,0 @@
|
||||||
# Index for UI code
|
|
||||||
|
|
||||||
1. [Rendering DOM elements](./render-dom-elements.md)
|
|
|
@ -1,47 +0,0 @@
|
||||||
tldr; Use `tag` from `ui/general/html.js` to quickly create DOM elements.
|
|
||||||
|
|
||||||
## Syntax
|
|
||||||
---
|
|
||||||
The general syntax is as follows:
|
|
||||||
```js
|
|
||||||
tag.tag_name({attribute1: value, attribute2: value, ...}, [child_elements]);
|
|
||||||
```
|
|
||||||
**tag_name** can be any one of the following:
|
|
||||||
```
|
|
||||||
br, a, ol, ul, li, div, h1, h2, h3, h4, h5, h6,
|
|
||||||
p, strong, em, span, img, section, main, article, aside,
|
|
||||||
pre, button, time, input, textarea, label, form, progress, output, video
|
|
||||||
```
|
|
||||||
|
|
||||||
<br />
|
|
||||||
|
|
||||||
eg:
|
|
||||||
Here is an example HTML segment followed with the code to create it in Hydrogen.
|
|
||||||
```html
|
|
||||||
<section class="main-section">
|
|
||||||
<h1>Demo</h1>
|
|
||||||
<button class="btn_cool">Click me</button>
|
|
||||||
</section>
|
|
||||||
```
|
|
||||||
```js
|
|
||||||
tag.section({className: "main-section"},[
|
|
||||||
tag.h1("Demo"),
|
|
||||||
tag.button({className:"btn_cool"}, "Click me")
|
|
||||||
]);
|
|
||||||
```
|
|
||||||
<br />
|
|
||||||
|
|
||||||
**Note:** In views based on `TemplateView`, you will see `t` used instead of `tag`.
|
|
||||||
`t` is is `TemplateBuilder` object passed to the render function in `TemplateView`.
|
|
||||||
Although syntactically similar, they are not functionally equivalent.
|
|
||||||
Primarily `t` **supports** bindings and event handlers while `tag` **does not**.
|
|
||||||
|
|
||||||
```js
|
|
||||||
// The onClick here wont work!!
|
|
||||||
tag.button({className:"awesome-btn", onClick: () => this.foo()});
|
|
||||||
|
|
||||||
render(t, vm){
|
|
||||||
// The onClick works here.
|
|
||||||
t.button({className:"awesome-btn", onClick: () => this.foo()});
|
|
||||||
}
|
|
||||||
```
|
|
206
doc/UI/ui.md
206
doc/UI/ui.md
|
@ -1,206 +0,0 @@
|
||||||
## IView components
|
|
||||||
|
|
||||||
The [interface](https://github.com/vector-im/hydrogen-web/blob/master/src/platform/web/ui/general/types.ts) adopted by view components is agnostic of how they are rendered to the DOM. This has several benefits:
|
|
||||||
- it allows Hydrogen to not ship a [heavy view framework](https://bundlephobia.com/package/react-dom@18.2.0) that may or may not be used by its SDK users, and also keep bundle size of the app down.
|
|
||||||
- Given the interface is quite simple, is should be easy to integrate this interface into the render lifecycle of other frameworks.
|
|
||||||
- The main implementations used in Hydrogen are [`ListView`](https://github.com/vector-im/hydrogen-web/blob/master/src/platform/web/ui/general/ListView.ts) (rendering [`ObservableList`](https://github.com/vector-im/hydrogen-web/blob/master/src/observable/list/BaseObservableList.ts)s) and [`TemplateView`](https://github.com/vector-im/hydrogen-web/blob/master/src/platform/web/ui/general/TemplateView.ts) (templating and one-way databinding), each only a few 100 lines of code and tailored towards their specific use-case. They work straight with the DOM API and have no other dependencies.
|
|
||||||
- a common inteface allows us to mix and match between these different implementations (and gradually shift if need be in the future) with the code.
|
|
||||||
|
|
||||||
## Templates
|
|
||||||
|
|
||||||
### Template language
|
|
||||||
|
|
||||||
Templates use a mini-DSL language in pure javascript to express declarative templates. This is basically a very thin wrapper around `document.createElement`, `document.createTextNode`, `node.setAttribute` and `node.appendChild` to quickly create DOM trees. The general syntax is as follows:
|
|
||||||
```js
|
|
||||||
t.tag_name({attribute1: value, attribute2: value, ...}, [child_elements]);
|
|
||||||
t.tag_name(child_element);
|
|
||||||
t.tag_name([child_elements]);
|
|
||||||
```
|
|
||||||
**tag_name** can be [most HTML or SVG tags](https://github.com/vector-im/hydrogen-web/blob/master/src/platform/web/ui/general/html.ts#L102-L110).
|
|
||||||
|
|
||||||
eg:
|
|
||||||
Here is an example HTML segment followed with the code to create it in Hydrogen.
|
|
||||||
```html
|
|
||||||
<section class="main-section">
|
|
||||||
<h1>Demo</h1>
|
|
||||||
<button class="btn_cool">Click me</button>
|
|
||||||
</section>
|
|
||||||
```
|
|
||||||
```js
|
|
||||||
t.section({className: "main-section"},[
|
|
||||||
t.h1("Demo"),
|
|
||||||
t.button({className:"btn_cool"}, "Click me")
|
|
||||||
]);
|
|
||||||
```
|
|
||||||
|
|
||||||
All these functions return DOM element nodes, e.g. the result of `document.createElement`.
|
|
||||||
|
|
||||||
### TemplateView
|
|
||||||
|
|
||||||
`TemplateView` builds on top of templating by adopting the IView component model and adding event handling attributes, sub views and one-way databinding.
|
|
||||||
In views based on `TemplateView`, you will see a render method with a `t` argument.
|
|
||||||
`t` is `TemplateBuilder` object passed to the render function in `TemplateView`. It also takes a data object to render and bind to, often called `vm`, short for view model from the MVVM pattern Hydrogen uses.
|
|
||||||
|
|
||||||
You either subclass `TemplateView` and override the `render` method:
|
|
||||||
```js
|
|
||||||
class MyView extends TemplateView {
|
|
||||||
render(t, vm) {
|
|
||||||
return t.div(...);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Or you pass a render function to `InlineTemplateView`:
|
|
||||||
```js
|
|
||||||
new InlineTemplateView(vm, (t, vm) => {
|
|
||||||
return t.div(...);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:** the render function is only called once to build the initial DOM tree and setup bindings, etc ... Any subsequent updates to the DOM of a component happens through bindings.
|
|
||||||
|
|
||||||
#### Event handlers
|
|
||||||
|
|
||||||
Any attribute starting with `on` and having a function as a value will be attached as an event listener on the given node. The event handler will be removed during unmounting.
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.button({onClick: evt => {
|
|
||||||
vm.doSomething(evt.target.value);
|
|
||||||
}}, "Click me");
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Subviews
|
|
||||||
|
|
||||||
`t.view(instance)` will mount the sub view (can be any IView) and return its root node so it can be attached in the DOM tree.
|
|
||||||
All subviews will be unmounted when the parent view gets unmounted.
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.div({className: "Container"}, t.view(new ChildView(vm.childViewModel)));
|
|
||||||
```
|
|
||||||
|
|
||||||
#### One-way data-binding
|
|
||||||
|
|
||||||
A binding couples a part of the DOM to a value on the view model. The view model emits an update when any of its properties change, to which the view can subscribe. When an update is received by the view, it will reevaluate all the bindings, and update the DOM accordingly.
|
|
||||||
|
|
||||||
A binding can appear in many places where a static value can usually be used in the template tree.
|
|
||||||
To create a binding, you pass a function that maps the view value to a static value.
|
|
||||||
|
|
||||||
##### Text binding
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.p(["I've got ", vm => vm.counter, " beans"])
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Attribute binding
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.button({disabled: vm => vm.isBusy}, "Submit");
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Class-name binding
|
|
||||||
```js
|
|
||||||
t.div({className: {
|
|
||||||
button: true,
|
|
||||||
active: vm => vm.isActive
|
|
||||||
}})
|
|
||||||
```
|
|
||||||
##### Subview binding
|
|
||||||
|
|
||||||
So far, all the bindings can only change node values within our tree, but don't change the structure of the DOM. A sub view binding allows you to conditionally add a subview based on the result of a binding function.
|
|
||||||
|
|
||||||
All sub view bindings return a DOM (element or comment) node and can be directly added to the DOM tree by including them in your template.
|
|
||||||
|
|
||||||
###### map
|
|
||||||
|
|
||||||
`t.mapView` allows you to choose a view based on the result of the binding function:
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.mapView(vm => vm.count, count => {
|
|
||||||
return count > 5 ? new LargeView(count) : new SmallView(count);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
Every time the first or binding function returns a different value, the second function is run to create a new view to replace the previous view.
|
|
||||||
|
|
||||||
You can also return `null` or `undefined` from the second function to indicate a view should not be rendered. In this case a comment node will be used as a placeholder.
|
|
||||||
|
|
||||||
There is also a `t.map` which will create a new template view (with the same value) and you directly provide a render function for it:
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.map(vm => vm.shape, (shape, t, vm) => {
|
|
||||||
switch (shape) {
|
|
||||||
case "rect": return t.rect();
|
|
||||||
case "circle": return t.circle();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
###### if
|
|
||||||
|
|
||||||
`t.ifView` will render the subview if the binding returns a truthy value:
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.ifView(vm => vm.isActive, vm => new View(vm.someValue));
|
|
||||||
```
|
|
||||||
|
|
||||||
You equally have `t.if`, which creates a `TemplateView` and passes you the `TemplateBuilder`:
|
|
||||||
|
|
||||||
```js
|
|
||||||
t.if(vm => vm.isActive, (t, vm) => t.div("active!"));
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Side-effects
|
|
||||||
|
|
||||||
Sometimes you want to imperatively modify your DOM tree based on the value of a binding.
|
|
||||||
`mapSideEffect` makes this easy to do:
|
|
||||||
|
|
||||||
```js
|
|
||||||
let node = t.div();
|
|
||||||
t.mapSideEffect(vm => vm.color, (color, oldColor) => node.style.background = color);
|
|
||||||
return node;
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:** you shouldn't add any bindings, subviews or event handlers from the side-effect callback,
|
|
||||||
the safest is to not use the `t` argument at all.
|
|
||||||
If you do, they will be added every time the callback is run and only cleaned up when the view is unmounted.
|
|
||||||
|
|
||||||
#### `tag` vs `t`
|
|
||||||
|
|
||||||
If you don't need a view component with data-binding, sub views and event handler attributes, the template language also is available in `ui/general/html.js` without any of these bells and whistles, exported as `tag`. As opposed to static templates with `tag`, you always use
|
|
||||||
`TemplateView` as an instance of a class, as there is some extra state to keep track (bindings, event handlers and subviews).
|
|
||||||
|
|
||||||
Although syntactically similar, `TemplateBuilder` and `tag` are not functionally equivalent.
|
|
||||||
Primarily `t` **supports** bindings and event handlers while `tag` **does not**. This is because to remove event listeners, we need to keep track of them, and thus we need to keep this state somewhere which
|
|
||||||
we can't do with a simple function call but we can insite the TemplateView class.
|
|
||||||
|
|
||||||
```js
|
|
||||||
// The onClick here wont work!!
|
|
||||||
tag.button({className:"awesome-btn", onClick: () => this.foo()});
|
|
||||||
|
|
||||||
class MyView extends TemplateView {
|
|
||||||
render(t, vm){
|
|
||||||
// The onClick works here.
|
|
||||||
t.button({className:"awesome-btn", onClick: () => this.foo()});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## ListView
|
|
||||||
|
|
||||||
A view component that renders and updates a list of sub views for every item in a `ObservableList`.
|
|
||||||
|
|
||||||
```js
|
|
||||||
const list = new ListView({
|
|
||||||
list: someObservableList
|
|
||||||
}, listValue => return new ChildView(listValue))
|
|
||||||
```
|
|
||||||
|
|
||||||
As items are added, removed, moved (change position) and updated, the DOM will be kept in sync.
|
|
||||||
|
|
||||||
There is also a `LazyListView` that only renders items in and around the current viewport, with the restriction that all items in the list must be rendered with the same height.
|
|
||||||
|
|
||||||
### Sub view updates
|
|
||||||
|
|
||||||
Unless the `parentProvidesUpdates` option in the constructor is set to `false`, the ListView will call the `update` method on the child `IView` component when it receives an update event for one of the items in the `ObservableList`.
|
|
||||||
|
|
||||||
This way, not every sub view has to have an individual listener on it's view model (a value from the observable list), and all updates go from the observable list to the list view, who then notifies the correct sub view.
|
|
|
@ -1,58 +0,0 @@
|
||||||
## Warning
|
|
||||||
|
|
||||||
Usage of docker is a third-party contribution and not actively tested, used or supported by the main developer(s).
|
|
||||||
|
|
||||||
Having said that, you can also use Docker to create a local dev environment or a production deployment.
|
|
||||||
|
|
||||||
## Dev environment
|
|
||||||
|
|
||||||
In this repository, create a Docker image:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker build -t hydrogen-dev -f Dockerfile-dev .
|
|
||||||
```
|
|
||||||
|
|
||||||
Then start up a container from that image:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker run \
|
|
||||||
--name hydrogen-dev \
|
|
||||||
--publish 3000:3000 \
|
|
||||||
--volume "$PWD":/code \
|
|
||||||
--interactive \
|
|
||||||
--tty \
|
|
||||||
--rm \
|
|
||||||
hydrogen-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
Then point your browser to `http://localhost:3000`. You can see the server logs in the terminal where you started the container.
|
|
||||||
|
|
||||||
To stop the container, simply hit `ctrl+c`.
|
|
||||||
|
|
||||||
## Production deployment
|
|
||||||
|
|
||||||
### Build or pull image
|
|
||||||
|
|
||||||
In this repository, create a Docker image:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker build -t hydrogen .
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, pull the docker image from GitLab:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker pull registry.gitlab.com/jcgruenhage/hydrogen-web
|
|
||||||
docker tag registry.gitlab.com/jcgruenhage/hydrogen-web hydrogen
|
|
||||||
```
|
|
||||||
|
|
||||||
### Start container image
|
|
||||||
|
|
||||||
Then, start up a container from that image:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker run \
|
|
||||||
--name hydrogen \
|
|
||||||
--publish 80:80 \
|
|
||||||
hydrogen
|
|
||||||
```
|
|
Binary file not shown.
Before Width: | Height: | Size: 7.8 KiB |
Binary file not shown.
Before Width: | Height: | Size: 4.2 KiB |
Binary file not shown.
Before Width: | Height: | Size: 19 KiB |
|
@ -5,8 +5,3 @@ we should automatically fill gaps (capped at a certain (large) amount of events,
|
||||||
during these fills (once supported), we should calculate push actions and trigger notifications, as we would otherwise have received this through sync.
|
during these fills (once supported), we should calculate push actions and trigger notifications, as we would otherwise have received this through sync.
|
||||||
|
|
||||||
we could also trigger notifications when just backfilling on initial sync up to a certain amount of time in the past?
|
we could also trigger notifications when just backfilling on initial sync up to a certain amount of time in the past?
|
||||||
|
|
||||||
|
|
||||||
we also need to backfill if we didn't receive any m.room.message in a limited sync for an encrypted room, as it's possible the room summary hasn't seen the last message in the room and is now out of date. this is also true for a non-encrypted room actually, although wrt to the above, here notifications would work well though.
|
|
||||||
|
|
||||||
a room should request backfills in needsAfterSyncCompleted and do them in afterSyncCompleted.
|
|
||||||
|
|
|
@ -2,10 +2,7 @@
|
||||||
## Olm
|
## Olm
|
||||||
- implement MemberList as ObservableMap
|
- implement MemberList as ObservableMap
|
||||||
- make sure we have all members (as we're using lazy loading members), and store these somehow
|
- make sure we have all members (as we're using lazy loading members), and store these somehow
|
||||||
- keep in mind that the server might not support lazy loading? E.g. we should store in a memberlist all the membership events passed by sync, perhaps with a flag if we already attempted to fetch all. We could also check if the server announces lazy loading support in the version response (I think r0.6.0).
|
|
||||||
- do we need to update /members on every limited sync response or did we find a way around this?
|
- do we need to update /members on every limited sync response or did we find a way around this?
|
||||||
- I don't think we need to ... we get all state events that were sent during the gap in `room.state`
|
|
||||||
- I tested this with riot and synapse, and indeed, we get membership events from the gap on a limited sync. This could be clearer in the spec though.
|
|
||||||
- fields:
|
- fields:
|
||||||
- user id
|
- user id
|
||||||
- room id
|
- room id
|
||||||
|
@ -34,8 +31,8 @@
|
||||||
- verify signature on key
|
- verify signature on key
|
||||||
- ??? what about inbound/outbound sessions? do they require multiple OlmSession objects?
|
- ??? what about inbound/outbound sessions? do they require multiple OlmSession objects?
|
||||||
- doesn't look like it, more like a way to start the session but once started (type=1), they are equivalent?
|
- doesn't look like it, more like a way to start the session but once started (type=1), they are equivalent?
|
||||||
- for outbound, see https://matrix.org/docs/guides/end-to-end-encryption-implementation-guide#starting-an-olm-session
|
- for outbound, see file:///home/bwindels/Downloads/matrix-docs/End-to-End%20Encryption%20implementation%20guide%20%7C%20Matrix.org.html#starting-an-olm-session
|
||||||
- for inbound, see: https://matrix.org/docs/guides/end-to-end-encryption-implementation-guide#handling-an-mroomencrypted-event
|
- for inbound, see: file:///home/bwindels/Downloads/matrix-docs/End-to-End%20Encryption%20implementation%20guide%20|%20Matrix.org.html#handling-an-mroomencrypted-event
|
||||||
- so in this case, it would the session would be created as an outbound session.
|
- so in this case, it would the session would be created as an outbound session.
|
||||||
- store pickled, index by curve25519 identity key?
|
- store pickled, index by curve25519 identity key?
|
||||||
- get from storage if exists and unpickle
|
- get from storage if exists and unpickle
|
||||||
|
@ -68,10 +65,10 @@
|
||||||
- mark them as published in account
|
- mark them as published in account
|
||||||
- update picked session in storage
|
- update picked session in storage
|
||||||
- implement encrypting olm messages
|
- implement encrypting olm messages
|
||||||
- roughly https://matrix.org/docs/guides/end-to-end-encryption-implementation-guide#encrypting-an-event-with-olm
|
- roughly file:///home/bwindels/Downloads/matrix-docs/End-to-End%20Encryption%20implementation%20guide%20|%20Matrix.org.html#encrypting-an-event-with-olm
|
||||||
- packaging as m.room.encrypted event
|
- packaging as m.room.encrypted event
|
||||||
- implement decrypting olm messages
|
- implement decrypting olm messages
|
||||||
- roughly https://matrix.org/docs/guides/end-to-end-encryption-implementation-guide#handling-an-mroomencrypted-event
|
- roughly file:///home/bwindels/Downloads/matrix-docs/End-to-End%20Encryption%20implementation%20guide%20|%20Matrix.org.html#handling-an-mroomencrypted-event
|
||||||
- decrypt with libolm
|
- decrypt with libolm
|
||||||
- verify signature
|
- verify signature
|
||||||
- check message index, etc to detect replay attacks
|
- check message index, etc to detect replay attacks
|
||||||
|
@ -121,8 +118,7 @@ we'll need to pass an implementation of EventSender or something to SendQueue th
|
||||||
- use AES-CTR from https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto
|
- use AES-CTR from https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
- libolm api docs (also for js api) would be great. Found something that could work:
|
- libolm api docs (also for js api) would be great
|
||||||
https://gitlab.matrix.org/matrix-org/olm/-/blob/master/javascript/index.d.ts
|
|
||||||
|
|
||||||
|
|
||||||
## OO Design
|
## OO Design
|
||||||
|
|
|
@ -1,11 +1,3 @@
|
||||||
# TODO
|
|
||||||
|
|
||||||
## Member list
|
|
||||||
|
|
||||||
- support migrations in StorageFactory
|
|
||||||
- migrate all stores from key to key_path
|
|
||||||
- how to deal with members coming from backfill? do we even need to store them?
|
|
||||||
|
|
||||||
# How to store members?
|
# How to store members?
|
||||||
|
|
||||||
All of this is assuming we'll use lazy loading of members.
|
All of this is assuming we'll use lazy loading of members.
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
# Replying to pending messages
|
|
||||||
The matrix spec requires clients capable of rich replies (that would be us once replies work) to include fallback (textual in `body` and structured in `formatted_body`) that can be rendered
|
|
||||||
by clients that do not natively support rich replies (that would be us at the time of writing). The schema for the fallback is as follows:
|
|
||||||
|
|
||||||
```
|
|
||||||
<mx-reply>
|
|
||||||
<blockquote>
|
|
||||||
<a href="https://matrix.to/#/!somewhere:example.org/$event:example.org">In reply to</a>
|
|
||||||
<a href="https://matrix.to/#/@alice:example.org">@alice:example.org</a>
|
|
||||||
<br />
|
|
||||||
<!-- This is where the related event's HTML would be. -->
|
|
||||||
</blockquote>
|
|
||||||
</mx-reply>
|
|
||||||
```
|
|
||||||
|
|
||||||
There's a single complication here for pending events: we have `$event:example.org` in the schema (the `In reply to` link), and it must
|
|
||||||
be present _within the content_, inside `formatted_body`. The issue is that, if we are queuing a reply to a pending event,
|
|
||||||
we don't know its remote ID. All we know is its transaction ID on our end. If we were to use that while formatting the message,
|
|
||||||
we'd be sending messages that contain our internal transaction IDs instead of proper matrix event identifiers.
|
|
||||||
|
|
||||||
To solve this, we'd need `SendQueue`, whenever it receives a remote echo, to update pending events that are replies with their
|
|
||||||
`relatedEventId`. This already happens, and the `event_id` field in `m.relates_to` is updated. But we'd need to extend this
|
|
||||||
to adjust the messages' `formatted_body` with the resolved remote ID, too.
|
|
||||||
|
|
||||||
How do we safely do this, without accidentally substituting event IDs into places in the body where they were not intended?
|
|
|
@ -1,22 +0,0 @@
|
||||||
# Push Notifications
|
|
||||||
- we setup the app on the sygnal server, with an app_id (io.element.hydrogen.web), generating a key pair
|
|
||||||
- we create a web push subscription, passing the server pub key, and get `endpoint`, `p256dh` and `auth` back. We put `webpush_endpoint` and `auth` in the push data, and use `p256dh` as the push key?
|
|
||||||
- we call `POST /_matrix/client/r0/pushers/set` on the homeserver with the sygnal instance url. We pass the web push subscription as pusher data.
|
|
||||||
- the homeserver wants to send out a notification, calling sygnal on `POST /_matrix/push/v1/notify` with for each device the pusher data.
|
|
||||||
- we encrypt and send with the data in the data for each device in the notification
|
|
||||||
- this wakes up the service worker
|
|
||||||
- now we need to find which local session id this notification is for
|
|
||||||
|
|
||||||
## Testing/development
|
|
||||||
|
|
||||||
- set up local synapse
|
|
||||||
- set up local sygnal
|
|
||||||
- write pushkin
|
|
||||||
- configure "hydrogen" app in sygnal config with a webpush pushkin
|
|
||||||
- start writing service worker code in hydrogen (we'll need to enable it for local dev)
|
|
||||||
- try to get a notification through
|
|
||||||
|
|
||||||
## Questions
|
|
||||||
|
|
||||||
- do we use the `event_id_only` format?
|
|
||||||
- for e2ee rooms, are we fine with just showing "Bob sent you a message (in room if not DM)", or do we want to sync and show the actual message? perhaps former can be MVP.
|
|
|
@ -1,5 +0,0 @@
|
||||||
# Read receipts
|
|
||||||
|
|
||||||
## UI
|
|
||||||
|
|
||||||
For the expanding avatars, trimmed at 5 or so, we could use css grid and switch from the right most cell to a cell that covers the whole width when clicking.
|
|
|
@ -1,22 +1,7 @@
|
||||||
Relations and redactions
|
Relations and redactions
|
||||||
|
|
||||||
events that refer to another event will need support in the SyncWriter, Timeline and SendQueue I think.
|
events that refer to another event will need support in the SyncWriter, Timeline and SendQueue I think.
|
||||||
SyncWriter will need to resolve the related remote id to a [fragmentId, eventIndex] and persist that on the event that relates to some other. Same for SendQueue? If unknown remote id, not much to do. However, once the remote id comes in, how do we handle it correctly? We might need a index on m.relates_to/event_id? I'd rather avoid that if possible, as that becomes useless once we have the target event of the relationship (we store the relations on the target event (see "One fetch" below) and have the target event id on the relation so can go both ways). I'm not sure this index will be completely useless actually. For edits, we'll want to be able to list all edits. For reactions, we'll want to fetch the authors and timestamps. For replies, we want to render the origin event and not use the fallback text? It is true though that only a minority of the events will have a related_to event id, so I wonder if it is faster to put it in a different store? Perhaps a prototype can clarify ...
|
SyncWriter will need to resolve the related remote id to a [fragmentId, eventIndex] and persist that on the event that relates to some other. Same for SendQueue? If unknown remote id, not much to do. However, once the remote id comes in, how do we handle it correctly? We might need a index on m.relates_to/event_id?
|
||||||
|
|
||||||
`event_relations` store could be this:
|
|
||||||
|
|
||||||
{
|
|
||||||
sourceEventId:
|
|
||||||
targetEventId:
|
|
||||||
rel_type:
|
|
||||||
roomId:
|
|
||||||
}
|
|
||||||
|
|
||||||
`{"key": "!bEWtlqtDwCLFIAKAcv:matrix.org|$apmyieZOI5vm4DzjEFzjbRiZW9oeQQR21adM6A6eRwM|m.annotation|m.reaction|$jSisozR3is5XUuDZXD5cyaVMOQ5_BtFS3jKfcP89MOM"}`
|
|
||||||
|
|
||||||
or actually stored like `roomId|targetEventId|rel_type|sourceEventId`. How can we get the last edit? They are sorted by origin_server_ts IIRC? Should this be part of the key? Solved: we store the event id of a replacement on the target event
|
|
||||||
|
|
||||||
We should look into what part of the relationships will be present on the event once it is received from the server (e.g. m.replace might be evident, but not all the reaction events?). If not, we could add a object store with missing relation targets.
|
|
||||||
|
|
||||||
The timeline can take incoming events from both the SendQueue and SyncWriter, and see if their related to fragmentId/eventIndex is in view, and then update it?
|
The timeline can take incoming events from both the SendQueue and SyncWriter, and see if their related to fragmentId/eventIndex is in view, and then update it?
|
||||||
|
|
||||||
|
@ -28,242 +13,6 @@ Also, related ids should be processed recursively. If event 3 is a redaction of
|
||||||
|
|
||||||
As a UI for reactions, we could show (👍 14 + 1) where the + 1 is our own local echo (perhaps style it pulsating and/or in grey?). Clicking it again would just show 14 and when the remote echo comes in it would turn into 15.
|
As a UI for reactions, we could show (👍 14 + 1) where the + 1 is our own local echo (perhaps style it pulsating and/or in grey?). Clicking it again would just show 14 and when the remote echo comes in it would turn into 15.
|
||||||
|
|
||||||
## One fetch for timeline reading
|
|
||||||
|
|
||||||
wrt to how to store relations in indexeddb, we could store all local ids of related events (per type?) on the related-to event, so we can fetch them in one query for *all* events that have related events that were fetched in a range, without needing another index that would slow down writes. So that would only add 1 query which we only need to do when there are relations in the TimelineReader. what do we do though if we receive the relating event before the related-to event? An index would fix this mostly ... or we need a temp store where we store unresolved relations...
|
wrt to how to store relations in indexeddb, we could store all local ids of related events (per type?) on the related-to event, so we can fetch them in one query for *all* events that have related events that were fetched in a range, without needing another index that would slow down writes. So that would only add 1 query which we only need to do when there are relations in the TimelineReader. what do we do though if we receive the relating event before the related-to event? An index would fix this mostly ... or we need a temp store where we store unresolved relations...
|
||||||
|
|
||||||
Replies should definitely use this relation mechanism, so we can easily show the most up to date version of the replied-to event.
|
|
||||||
|
|
||||||
Redactions can de done separately
|
|
||||||
|
|
||||||
For replies (or references in general?), we do need to load the referred-to event in a second read. For reactions and edits, they will already be stored on the target event.
|
|
||||||
|
|
||||||
|
|
||||||
## Example events from the wild
|
|
||||||
|
|
||||||
### Reaction
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"content": {
|
|
||||||
"m.relates_to": {
|
|
||||||
"event_id": "$apmyieZOI5vm4DzjEFzjbRiZW9oeQQR21adM6A6eRwM",
|
|
||||||
"key": "👍️",
|
|
||||||
"rel_type": "m.annotation"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"origin_server_ts": 1621284357314,
|
|
||||||
"sender": "@charly:matrix.org",
|
|
||||||
"type": "m.reaction",
|
|
||||||
"unsigned": {
|
|
||||||
"age": 64140856
|
|
||||||
},
|
|
||||||
"event_id": "$jSisozR3is5XUuDZXD5cyaVMOQ5_BtFS3jKfcP89MOM",
|
|
||||||
"room_id": "!bEWtlqtDwCLFIAKAcv:matrix.org"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Edit
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"content": {
|
|
||||||
"body": " * ...",
|
|
||||||
"m.new_content": {
|
|
||||||
"body": "...",
|
|
||||||
"msgtype": "m.text"
|
|
||||||
},
|
|
||||||
"m.relates_to": {
|
|
||||||
"event_id": "$OXL0yk18y-VG3DuTybVh9j9cvdjjnnzWbBKY-QPXJ-0",
|
|
||||||
"rel_type": "m.replace"
|
|
||||||
},
|
|
||||||
"msgtype": "m.text"
|
|
||||||
},
|
|
||||||
"origin_server_ts": 1621264902371,
|
|
||||||
"room_id": "!bEWtlqtDwCLFIAKAcv:matrix.org",
|
|
||||||
"sender": "@alice:matrix.org",
|
|
||||||
"type": "m.room.message",
|
|
||||||
"unsigned": {
|
|
||||||
"age": 83636544
|
|
||||||
},
|
|
||||||
"event_id": "$Z7sFSKWtLTFoMMabkPFe0PSKWpkakjWUkYQeBU8IHVc",
|
|
||||||
"user_id": "@alice:matrix.org",
|
|
||||||
"age": 83636544
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Reply
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"content": {
|
|
||||||
"body": "...",
|
|
||||||
"format": "org.matrix.custom.html",
|
|
||||||
"formatted_body": "...",
|
|
||||||
"m.relates_to": {
|
|
||||||
"m.in_reply_to": {
|
|
||||||
"event_id": "$rGD9iQ93UmopkkagJ0tW_FHATa8IrvABg9cM_tNUvu4"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"msgtype": "m.text"
|
|
||||||
},
|
|
||||||
"origin_server_ts": 1621242338597,
|
|
||||||
"room_id": "!bEWtlqtDwCLFIAKAcv:matrix.org",
|
|
||||||
"sender": "@bob:matrix.org",
|
|
||||||
"type": "m.room.message",
|
|
||||||
"unsigned": {
|
|
||||||
"age": 106408661,
|
|
||||||
"m.relations": {
|
|
||||||
"m.annotation": {
|
|
||||||
"chunk": [
|
|
||||||
{
|
|
||||||
"type": "m.reaction",
|
|
||||||
"key": "👍️",
|
|
||||||
"count": 1
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"event_id": "$yS_n5n3cIO2aTtek0_2ZSlv-7g4YYR2zKrk2mFCW_q4",
|
|
||||||
"user_id": "@bob:matrix.org",
|
|
||||||
"age": 106408661
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Remaining spec issues
|
|
||||||
|
|
||||||
- m.in_reply_to vs rel_type
|
|
||||||
- reactions in unsigned can't be deduplicated
|
|
||||||
- how to sort edits? for now we went with origin_server_ts
|
|
||||||
- do we say anything about events of a different type replacing an event?
|
|
||||||
- do we specify that replies should be to the original event, not the edit?
|
|
||||||
|
|
||||||
## What to store denormalized on the event itself?
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"reactions": {
|
|
||||||
"👍": {"count": 3, "me": true, "firstTimestamp": 2323989},
|
|
||||||
"👋": {"count": 1, "me": false, "firstTimestamp": 2323989}
|
|
||||||
},
|
|
||||||
"replacingEvent": {
|
|
||||||
"event_id": "$abc",
|
|
||||||
"origin_server_ts": ?,
|
|
||||||
"content": {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
we only need the m.new_content and event id of the replacing event, even timestamp we can load the event for on hover?
|
|
||||||
|
|
||||||
|
|
||||||
store the replacing event along the original event because we need to keep the original event along somewhere, but for displaying purposes, we'd use the content of the replacingEvent. Should we just store the content of the replacing event? Or even just the `m.new_content`? Could make sense, but perhaps also store the new timestamp along. How about whem somebody than the sender edits?
|
|
||||||
|
|
||||||
# Aggregation
|
|
||||||
|
|
||||||
what do we do with the aggregated timestamps? do we store them? if so, where?
|
|
||||||
|
|
||||||
when we hover reactions, we want to show the authors, rather than the timestamp, so we'll need to call /relations for that anyway. so no need to store the timestamp?
|
|
||||||
|
|
||||||
`/relations` is in fact a bit the server-side version of our `event_relations` store
|
|
||||||
|
|
||||||
## Dealing with gappy syncs
|
|
||||||
|
|
||||||
Doesn't look like synapse currently tells us which target events have outdates relations after a gappy sync. MSC 2675 proposes `stale_events`, but inspecting network traffic, that doesn't seem to be implemented right now.
|
|
||||||
|
|
||||||
So, if we locally would need to determine if relations are outdated, we could look if any of the fragments between an event and the last synced event have pagination tokens. Although we have no way to clear this "flag" if we were to fetch the relations after this.
|
|
||||||
|
|
||||||
As an initial cut it is probably fine if reactions and edits are outdated unless you scroll up all the way to an event (and hence back-fill), as this is what we'll always do (apart from permalinks).
|
|
||||||
|
|
||||||
### Permalinks
|
|
||||||
|
|
||||||
So once we do support permalinks, how do we solve this? Element solves this by not storing `/context` and the `/messages` requests around, hence it is always fresh.
|
|
||||||
|
|
||||||
We could store the live fragment id in events when we refresh their `/relations`, and if it is not the current live fragment id, you're outdated.
|
|
||||||
|
|
||||||
To accurately display anything not in the live fragment, we either need to:
|
|
||||||
- backfill until there are no more gaps between the event fragment and the live fragment.
|
|
||||||
- -- there is no way to know how many events this would load.
|
|
||||||
- ++ that we know which gaps we've already filled
|
|
||||||
- ++ we need to do this for e2ee rooms anyway
|
|
||||||
- ++ we need to implement this anyway for non-gappy sync
|
|
||||||
- ++ we can only do this as an initial cut, especially as we don't support permalinks yet
|
|
||||||
- Refetch the `/context` and `/messages` for what is on the screen and reconcile.
|
|
||||||
- ++ we know how much we'll fetch
|
|
||||||
- -- we need to fetch everything again if we have one small gap
|
|
||||||
- we store the current live fragment when doing this, so can know:
|
|
||||||
- if we need to refetch / if there is a gap
|
|
||||||
- how many gaps we need to fill
|
|
||||||
- could we fall back to this strategy if the first one takes too long/many events?
|
|
||||||
- we could pick a heuristic to pick either strategy (like time between syncs or try for x events and if the gap is not closed, give up)?
|
|
||||||
- Refetch /aggregations for every event
|
|
||||||
- ++ we don't get the events (we dont need? edits?)
|
|
||||||
- --- need to do it for every event
|
|
||||||
- use `stale_events` if we actually implement it one day
|
|
||||||
- this can work well with the first strategy, we'd store a "relationsStale" flag on the event, and refetch /relations immediately or if scrolled into view.
|
|
||||||
|
|
||||||
# API
|
|
||||||
|
|
||||||
## Reactions
|
|
||||||
|
|
||||||
```js
|
|
||||||
const reaction = eventEntry.react("👍");
|
|
||||||
room.sendEvent("m.reaction", reaction);
|
|
||||||
```
|
|
||||||
|
|
||||||
```js
|
|
||||||
// this is an ObservableMap mapping the key to the count (or rather SortedArray?)
|
|
||||||
// probably fine to just use a SortedArray to sorts by count, then key
|
|
||||||
// actually, maybe better to do ObservableMap and store first timestamp so we can support https://github.com/vector-im/element-web/issues/9698 outside of SDK.
|
|
||||||
const reactions = eventEntry.reactions.sortValues((r1, r2) => r1.count - r2.count);
|
|
||||||
new ListView({list: reactions}, reaction => new ReactionView(reaction, room));
|
|
||||||
// reaction has:
|
|
||||||
reaction.key
|
|
||||||
reaction.hasMyReaction // how do we get this from the bundled events?
|
|
||||||
reaction.count
|
|
||||||
reaction.firstTimestamp
|
|
||||||
room.sendEvent("m.reaction", reaction.react());
|
|
||||||
// this won't work as we don't have the event id:
|
|
||||||
// room.sendRedaction(reaction.redact());
|
|
||||||
```
|
|
||||||
|
|
||||||
## Edits
|
|
||||||
|
|
||||||
```js
|
|
||||||
const replacement = eventEntry.replace({});
|
|
||||||
room.sendEvent(eventEntry.eventType, replacement);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Replies
|
|
||||||
|
|
||||||
```js
|
|
||||||
const reply = eventEntry.reply({});
|
|
||||||
room.sendEvent("m.room.message", reply);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Redactions
|
|
||||||
|
|
||||||
```js
|
|
||||||
const redaction = eventEntry.redact();
|
|
||||||
room.sendRedaction(redaction);
|
|
||||||
```
|
|
||||||
|
|
||||||
All off these reaction and edit entries should probably not be live, and at some point in the future if we need them to be live for some use case, we can add an additional api to make them live with an explicit release mechanism?
|
|
||||||
```js
|
|
||||||
|
|
||||||
// there is no api to get the reactions by sender though, so perhaps we need to load them all and then find our own?
|
|
||||||
const reactions = await eventEntry.getReactionEntries("👍");
|
|
||||||
const reaction = reactions.find(r => r.sender = ownUserId);
|
|
||||||
room.sendRedaction(reaction.redact());
|
|
||||||
```
|
|
||||||
|
|
||||||
```js
|
|
||||||
const edits = await eventEntry.getEdits();
|
|
||||||
room.sendRedaction(edits[1].redact());
|
|
||||||
```
|
|
||||||
|
|
||||||
```js
|
|
||||||
const lastEdit = await eventEntry.getLastEdit();
|
|
||||||
room.sendRedaction(lastEdit.redact());
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
If we were to render replies in a smart way (instead of relying on the fallback), we would
|
|
||||||
need to manually find entries that are pointed to be `in_reply_to`. Consulting the timeline
|
|
||||||
code, it seems appropriate to add a `_replyingTo` field to a `BaseEventEntry` (much like we
|
|
||||||
have `_pendingAnnotations` and `pendingRedactions`). We can then:
|
|
||||||
* use `TilesCollection`'s `_findTileIdx` to find the tile of the message being replied to,
|
|
||||||
and put a reference to its tile into the new tile being created (?).
|
|
||||||
* It doesn't seem appropriate to add an additional argument to TileCreator, but we may
|
|
||||||
want to re-use tiles instead of creating duplicate ones. Otherwise, of course, `tileCreator`
|
|
||||||
can create more than one tile from an entry's `_replyingTo` field.
|
|
||||||
* Resolve `_replyingTo` much like we resolve `redactingEntry` in timeline: search by `relatedTxnId`
|
|
||||||
and `relatedEventId` if our entry is a reply (we can add an `isReply` flag there).
|
|
||||||
* This works fine for local entries, which are loaded via an `AsyncMappedList`, but what
|
|
||||||
about remote entries? They are not loaded asynchronously, and the fact that they are
|
|
||||||
not a derived collection is used throughout `Timeline`.
|
|
||||||
* Entries that don't have replies that are loadeded (but that are replies) probably need
|
|
||||||
to be tracked somehow?
|
|
||||||
* Then, on timeline add, check new IDs and update corresponding entries
|
|
|
@ -1,109 +0,0 @@
|
||||||
SDK:
|
|
||||||
|
|
||||||
- we need to compile src/lib.ts to javascript, with a d.ts file generated as well. We need to compile to javascript once for cjs and once of es modules. The package.json looks like this:
|
|
||||||
|
|
||||||
```
|
|
||||||
"main": "./dist/index.cjs",
|
|
||||||
"exports": {
|
|
||||||
"import": "./dist/index.mjs",
|
|
||||||
"require": "./dist/index.cjs"
|
|
||||||
},
|
|
||||||
"types": "dist/index.d.ts",
|
|
||||||
```
|
|
||||||
|
|
||||||
we don't need to bundle for the sdk case! we might need to do some transpilation to just plain ES6 (e.g. don't assume ?. and ??) we could use a browserslist query for this e.g. `node 14`. esbuild seems to support this as well, tldraw uses esbuild for their build.
|
|
||||||
|
|
||||||
one advantage of not bundling the files for the sdk is that you can still use import overrides in the consuming project build settings. is that an idiomatic way of doing things though?
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
this way we will support typescript, non-esm javascript and esm javascript using libhydrogen as an SDK
|
|
||||||
|
|
||||||
got this from https://medium.com/dazn-tech/publishing-npm-packages-as-native-es-modules-41ffbc0a9dea
|
|
||||||
|
|
||||||
how about the assets?
|
|
||||||
|
|
||||||
we also need to build the app
|
|
||||||
|
|
||||||
we need to be able to version libhydrogen independently from hydrogen the app? as any api breaking changes will need a major version increase. we probably want to end up with a monorepo where the app uses the sdk as well and we just use the local code with yarn link?
|
|
||||||
|
|
||||||
## Assets
|
|
||||||
|
|
||||||
we want to provide scss/sass files, but also css that can be included
|
|
||||||
https://github.com/webpack/webpack/issues/7353 seems to imply that we just need to include the assets in the published files and from there on it is the consumer of libhydrogen's problem.
|
|
||||||
|
|
||||||
|
|
||||||
how does all of this tie in with vite?
|
|
||||||
|
|
||||||
|
|
||||||
we want to have hydrogenapp be a consumer of libhydrogen, potentially as two packages in a monorepo ... but we want the SDK to expose views and stylesheets... without having an index.html (which would be in hydrogenapp). this seems a bit odd...?
|
|
||||||
|
|
||||||
what would be in hydrogenapp actually? just an index.html file?
|
|
||||||
|
|
||||||
I'm not sure it makes sense to have them be 2 different packages in a monorepo, they should really be two artifacts from the same directory.
|
|
||||||
|
|
||||||
the stylesheets included in libhydrogen are from the same main.css file as is used in the app
|
|
||||||
|
|
||||||
https://www.freecodecamp.org/news/build-a-css-library-with-vitejs/
|
|
||||||
|
|
||||||
basically, we import the sass file from src/lib.ts so it is included in the assets there too, and we also create a plugin that emits a file for every sass file as suggested in the link above?
|
|
||||||
|
|
||||||
we probably want two different build commands for the app and the sdk though, we could have a parent vite config that both build configs extend from?
|
|
||||||
|
|
||||||
|
|
||||||
### Dependency assets
|
|
||||||
our dependencies should not be bundled for the SDK case. So if we import aesjs, it would be up to the build system of the consuming project to make that import work.
|
|
||||||
|
|
||||||
the paths.ts thingy ... we want to make it easy for people to setup the assets for our dependencies (olm), some assets are also part of the sdk itself. it might make sense to make all of the assets there part of the sdk (e.g. bundle olm.wasm and friends?) although shipping crypto, etc ...
|
|
||||||
|
|
||||||
perhaps we should have an include file per build system that treats own assets and dep assets the same by including the package name as wel for our own deps:
|
|
||||||
```js
|
|
||||||
import _downloadSandboxPath from "@matrix-org/hydrogen-sdk/download-sandbox.html?url";
|
|
||||||
import _serviceWorkerPath from "@matrix-org/hydrogen-sdk/sw.js?url"; // not yet sure this is the way to do it
|
|
||||||
import olmWasmPath from "@matrix-org/olm/olm.wasm?url";
|
|
||||||
import olmJsPath from "@matrix-org/olm/olm.js?url";
|
|
||||||
import olmLegacyJsPath from "@matrix-org/olm/olm_legacy.js?url";
|
|
||||||
|
|
||||||
export const olmPaths = {
|
|
||||||
wasm: olmWasmPath,
|
|
||||||
legacyBundle: olmLegacyJsPath,
|
|
||||||
wasmBundle: olmJsPath,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const downloadSandboxPath = _downloadSandboxPath;
|
|
||||||
```
|
|
||||||
|
|
||||||
we could put this file per build system, as ESM, in dist as well so you can include it to get the paths
|
|
||||||
|
|
||||||
|
|
||||||
## Tooling
|
|
||||||
|
|
||||||
- `vite` a more high-level build tool that takes your index.html and turns it into optimized assets that you can host for production, as well as a very fast dev server. is used to have good default settings for our tools, typescript support, and also deals with asset compiling. good dev server. Would be nice to have the same tool for dev and prod. vite has good support for using `import` for anything that is not javascript, where we had an issue with `snowpack` (to get the prod path of an asset).
|
|
||||||
- `rollup`: inlines
|
|
||||||
- `lerna` is used to handle multi-package monorepos
|
|
||||||
- `esbuild`: a js/ts build tool that we could use for building the lower level sdk where no other assets are involved, `vite` uses it for fast dev builds (`rollup` for prod). For now we won't extract a lower level sdk though.
|
|
||||||
|
|
||||||
|
|
||||||
## TODO
|
|
||||||
|
|
||||||
- finish vite app build (without IE11 for now?)
|
|
||||||
- create vite config to build src/lib.ts in cjs and esm, inheriting from a common base config with the app config
|
|
||||||
- this will create a dist folder with
|
|
||||||
- the whole source tree in es and cjs format
|
|
||||||
- an es file to import get the asset paths as they are expected by Platform, per build system
|
|
||||||
- assets from hydrogen itself:
|
|
||||||
- css files and any resource used therein
|
|
||||||
- download-sandbox.html
|
|
||||||
- a type declaration file (index.d.ts)
|
|
||||||
|
|
||||||
## Questions
|
|
||||||
- can rollup not bundle the source tree and leave modules intact?
|
|
||||||
- if we can use a function that creates a chunk per file to pass to manualChunks and disable chunk hashing we can probably do this. See https://rollupjs.org/guide/en/#outputmanualchunks
|
|
||||||
|
|
||||||
looks like we should be able to disable chunk name hashing with chunkFileNames https://rollupjs.org/guide/en/#outputoptions-object
|
|
||||||
|
|
||||||
|
|
||||||
we should test this with a vite test config
|
|
||||||
|
|
||||||
we also need to compile down to ES6, both for the app and for the sdk
|
|
|
@ -1,54 +0,0 @@
|
||||||
Pseudo code of how SSO should work:
|
|
||||||
|
|
||||||
```js
|
|
||||||
// 1. Starting SSO
|
|
||||||
const loginOptions = await sessionContainer.queryLogin("matrix.org");
|
|
||||||
// every login option (the return type of loginOptions.password and loginOptions.sso.createLogin)
|
|
||||||
// that can be passed in to startWithLogin will implement a common LoginMethod interface that has:
|
|
||||||
// - a `homeserver` property (so the hsApi can be created for it before passing it into `login`)
|
|
||||||
// - a method `async login(hsApi, deviceName)` that returns loginData (device_id, user_id, access_token)
|
|
||||||
|
|
||||||
// loginOptions goes to the LoginViewModel
|
|
||||||
|
|
||||||
// if password login, mapped to PasswordLoginViewModel
|
|
||||||
if (loginOptions.password) {
|
|
||||||
sessionContainer.startWithLogin(loginOptions.password(username, password));
|
|
||||||
}
|
|
||||||
|
|
||||||
// if sso login, mapped to SSOLoginViewModel
|
|
||||||
if (loginOptions.sso) {
|
|
||||||
const {sso} = loginOptions;
|
|
||||||
// params contains everything needed to create a callback url:
|
|
||||||
// the homeserver, and optionally the provider
|
|
||||||
let provider = null;
|
|
||||||
if (sso.providers) {
|
|
||||||
// show button for each provider
|
|
||||||
// pick the first one as an example
|
|
||||||
provider = providers[0];
|
|
||||||
}
|
|
||||||
// when sso button is clicked:
|
|
||||||
// store the homeserver for when we get redirected back after the sso flow
|
|
||||||
platform.settingsStorage.setString("sso_homeserver", loginOptions.homeserver);
|
|
||||||
// create the redirect url
|
|
||||||
const callbackUrl = urlCreator.createSSOCallbackURL(); // will just return the document url without any fragment
|
|
||||||
const redirectUrl = sso.createRedirectUrl(callbackUrl, provider);
|
|
||||||
// and open it
|
|
||||||
platform.openURL(redirectUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. URLRouter, History & parseUrlPath will need to also take the query params into account, so hydrogen.element.io/?loginToken=abc can be converted into a navigation path of [{type: "sso", value: "abc"}]
|
|
||||||
|
|
||||||
// 3. when "sso" is on the navigation path, a CompleteSSOLoginView is shown.
|
|
||||||
// It will use the same SessionLoadView(Model) as for password login once login is called.
|
|
||||||
//
|
|
||||||
// Also see RootViewModel._applyNavigation.
|
|
||||||
//
|
|
||||||
// Its view model will do something like:
|
|
||||||
|
|
||||||
// need to retrieve ssoHomeserver url in localStorage
|
|
||||||
const ssoHomeserver = platform.settingsStorage.getString("sso_homeserver");
|
|
||||||
// need to retrieve loginToken from query parameters
|
|
||||||
const loginToken = "..."; // passed in to view model constructor
|
|
||||||
const loginOptions = await sessionContainer.queryLogin(ssoHomeserver);
|
|
||||||
sessionContainer.startWithLogin(loginOptions.sso.createLogin(loginToken));
|
|
||||||
```
|
|
|
@ -1,7 +0,0 @@
|
||||||
## Get member for timeline event
|
|
||||||
|
|
||||||
so when writing sync, we persist the display name and avatar
|
|
||||||
|
|
||||||
the server might or might not support lazy loading
|
|
||||||
|
|
||||||
if it is a room we just joined
|
|
116
doc/invites.md
116
doc/invites.md
|
@ -3,118 +3,4 @@
|
||||||
- invite_state doesn't update over /sync
|
- invite_state doesn't update over /sync
|
||||||
- can we reuse room summary? need to clear when joining
|
- can we reuse room summary? need to clear when joining
|
||||||
- rely on filter operator to split membership=join from membership=invite?
|
- rely on filter operator to split membership=join from membership=invite?
|
||||||
|
-
|
||||||
- invite_state comes once, and then not again
|
|
||||||
- only state (no heroes for example, but we do get the members)
|
|
||||||
- wants:
|
|
||||||
- different class to represent invited room, with accept or reject method?
|
|
||||||
- make it somewhat easy to render just joined rooms (rely on filter and still put them all in the same observable map)
|
|
||||||
- make the transition from invite to joined smooth
|
|
||||||
- reuse room summary logic?
|
|
||||||
|
|
||||||
InvitedRoom
|
|
||||||
isDM
|
|
||||||
isEncrypted
|
|
||||||
name
|
|
||||||
|
|
||||||
timestamp
|
|
||||||
accept()
|
|
||||||
reject()
|
|
||||||
JoiningRoom
|
|
||||||
to store intent of room you joined through directory, invite, or just /join roomid
|
|
||||||
also joining is retried when coming back online
|
|
||||||
|
|
||||||
forget()
|
|
||||||
Room
|
|
||||||
|
|
||||||
so, also taking into account that other types of room we might not want to expose through session.rooms will have invites,
|
|
||||||
perhaps it is best to expose invites through a different observable collection. You can always join/concat them to show in
|
|
||||||
the same list.
|
|
||||||
|
|
||||||
How do we handle a smooth UI transition when accepting an invite though?
|
|
||||||
For looking at the room itself:
|
|
||||||
- we would attach to the Invite event emitter, and we can have a property "joined" that we would update. Then you know you can go look for the room (or even allow to access the room through a property?)
|
|
||||||
- so this way the view model can know when to switch and signal the view
|
|
||||||
For the room list:
|
|
||||||
- the new Room will be added at exactly the same moment the Invite is removed,
|
|
||||||
so it should already be fairly smooth whether they are rendered in the same list or not.
|
|
||||||
|
|
||||||
How will we locate the Invite/Room during sync when we go from invite => join?
|
|
||||||
- have both adhere to sync target api (e.g. prepareSync, ...) and look in invite map
|
|
||||||
if room id is not found in room map in session.getroom.
|
|
||||||
- how do we remove the invite when join?
|
|
||||||
- we ca
|
|
||||||
Where to store?
|
|
||||||
- room summaries?
|
|
||||||
- do we have an interest in keeping the raw events?
|
|
||||||
- room versions will add another layer of indirection to the room summaries (or will it? once you've upgraded the room, we don't care too much anymore about the details of the old room? hmmm, we do care about whether it is encrypted or not... we need everything to be able to show the timeline in any case)
|
|
||||||
|
|
||||||
|
|
||||||
Invite => accept() => Room (ends up in session.rooms)
|
|
||||||
(.type) => Space (ends up in session.spaces)
|
|
||||||
Invite:
|
|
||||||
- isEncrypted
|
|
||||||
- isDM
|
|
||||||
- type
|
|
||||||
- id
|
|
||||||
- name
|
|
||||||
- avatarUrl
|
|
||||||
- timestamp
|
|
||||||
- joinRule (to say wheter you cannot join this room again if you reject)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
new "memberships":
|
|
||||||
joining (when we want to join/are joining but haven't received remote echo yet)
|
|
||||||
leaving (needed?)
|
|
||||||
|
|
||||||
maybe it's slightly overkill to persist the intent of joining or leaving a room,
|
|
||||||
but I do want a way to local echo joining a room,
|
|
||||||
so that it immediately appears in the room list when clicking join in the room directory / from a url ... how would we sort these rooms though? we can always add another collection, but I'm not sure invites should be treated the same, they can already local echo on the invite object itself.
|
|
||||||
|
|
||||||
|
|
||||||
since invites don't update, we could, in sync when processing a new join just set a flag on the roomsyncstate if a room is newly created and in writeSync/afterSync check if there is a `session.invites.get(id)` and call `writeSync/afterSync` on it as well. We need to handle leave => invite as well. So don't check for invites only if it is a new room, but also if membership is leave
|
|
||||||
|
|
||||||
transitions are:
|
|
||||||
invite => join
|
|
||||||
invite => leave
|
|
||||||
invite => ban
|
|
||||||
join => left
|
|
||||||
join => ban
|
|
||||||
leave => invite
|
|
||||||
leave => join
|
|
||||||
leave => ban
|
|
||||||
ban => leave
|
|
||||||
none => invite
|
|
||||||
none => join
|
|
||||||
none => ban
|
|
||||||
|
|
||||||
kick should keep the room & timeline visible (even in room list, until you archive?)
|
|
||||||
leave should close the room. So explicit archive() step on room ?
|
|
||||||
|
|
||||||
Room => leave() => ArchivedRoom (just a Room loaded from archived_room_summaries) => .forget()
|
|
||||||
=> .forget()
|
|
||||||
|
|
||||||
Room receives leave membership
|
|
||||||
- if sender === state_key, we left, and we archive the room (remove it from the room list, but keep it in storage)
|
|
||||||
- if sender !== state_key, we got kicked, and we write the membership but don't archive so it stays in the room list until you call archive/forget on the room
|
|
||||||
when calling room.leave(), do you have to call archive() or forget() after as well? or rather param of leave and stored intent? sounds like non-atomical operation to me ...
|
|
||||||
we should be able to archive or forget before leave remote echo arrives
|
|
||||||
|
|
||||||
if two stores, this could mean we could have both an invite and a room with kicked state for a given room id?
|
|
||||||
|
|
||||||
we should avoid key collisions between `session.invites` and `session.rooms` (also `session.archivedRooms` once supported?) in any case,
|
|
||||||
because if we join them to display in one list, things get complicated.
|
|
||||||
|
|
||||||
avoiding key collisions can happen both with 1 or multiple stores for different room states and is just a matter
|
|
||||||
of carefully removing one state representation before adding another one.
|
|
||||||
so a kicked or left room would disappear from session.rooms when an invite is synced?
|
|
||||||
this would prevent you from seeing the old timeline for example, and if you reject, the old state would come back?
|
|
||||||
|
|
||||||
|
|
||||||
# Decisions
|
|
||||||
- we expose session.invites separate from session.rooms because they are of a different type.
|
|
||||||
This way, you only have methods on the object that make sense (accept on Room does not make sense, like Invite.openTimeline doesn't make sense)
|
|
||||||
- we store invites (and likely also archived rooms) in a different store, so that we don't have to clear/add properties where they both differ when transitioning. Also, this gives us the possibility to show the timeline on a room that you have previously joined, as the room summary and invite can exist at the same time. (need to resolve key collision question though for this)
|
|
||||||
- we want to keep kicked rooms in the room list until explicitly archived
|
|
||||||
- room id collisions between invites and rooms, can we implement a strategy to prefer invites in the join operator?
|
|
||||||
|
|
BIN
icon.png
Normal file
BIN
icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 40 KiB |
30
index.html
Normal file
30
index.html
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, user-scalable=no">
|
||||||
|
<meta name="application-name" content="Hydrogen Chat"/>
|
||||||
|
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||||
|
<meta name="apple-mobile-web-app-status-bar-style" content="black">
|
||||||
|
<meta name="apple-mobile-web-app-title" content="Hydrogen Chat">
|
||||||
|
<meta name="description" content="A matrix chat application">
|
||||||
|
<link rel="stylesheet" type="text/css" href="src/ui/web/css/main.css">
|
||||||
|
<link rel="stylesheet" type="text/css" href="src/ui/web/css/themes/element/theme.css" title="Element Theme">
|
||||||
|
<link rel="alternate stylesheet" type="text/css" href="src/ui/web/css/themes/bubbles/theme.css" title="Bubbles Theme">
|
||||||
|
</head>
|
||||||
|
<body class="hydrogen">
|
||||||
|
<script id="version" type="disabled">
|
||||||
|
window.HYDROGEN_VERSION = "%%VERSION%%";
|
||||||
|
</script>
|
||||||
|
<script id="main" type="module">
|
||||||
|
import {main} from "./src/main.js";
|
||||||
|
main(document.body);
|
||||||
|
</script>
|
||||||
|
<script id="service-worker" type="disabled">
|
||||||
|
if('serviceWorker' in navigator) {
|
||||||
|
navigator.serviceWorker.register('sw.js')
|
||||||
|
.then(function() { console.log("Service Worker registered"); });
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
2313
package-lock.json
generated
Normal file
2313
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
65
package.json
65
package.json
|
@ -1,24 +1,15 @@
|
||||||
{
|
{
|
||||||
"name": "hydrogen-web",
|
"name": "hydrogen-web",
|
||||||
"version": "0.3.1",
|
"version": "0.0.27",
|
||||||
"description": "A javascript matrix client prototype, trying to minize RAM usage by offloading as much as possible to IndexedDB",
|
"description": "A javascript matrix client prototype, trying to minize RAM usage by offloading as much as possible to IndexedDB",
|
||||||
|
"main": "index.js",
|
||||||
"directories": {
|
"directories": {
|
||||||
"doc": "doc"
|
"doc": "doc"
|
||||||
},
|
},
|
||||||
"enginesStrict": {
|
|
||||||
"node": ">=15"
|
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "eslint --cache src/",
|
"test": "node_modules/.bin/impunity --entry-point src/main.js --force-esm",
|
||||||
"lint-ts": "eslint src/ -c .ts-eslintrc.js --ext .ts",
|
"start": "node scripts/serve-local.js",
|
||||||
"lint-ci": "eslint src/",
|
"build": "node --experimental-modules scripts/build.mjs"
|
||||||
"test": "impunity --entry-point src/platform/web/main.js src/platform/web/Platform.js --force-esm-dirs lib/ src/ --root-dir src/",
|
|
||||||
"test:postcss": "impunity --entry-point scripts/postcss/tests/css-compile-variables.test.js scripts/postcss/tests/css-url-to-variables.test.js",
|
|
||||||
"test:sdk": "yarn build:sdk && cd ./scripts/sdk/test/ && yarn --no-lockfile && node test-sdk-in-esm-vite-build-env.js && node test-sdk-in-commonjs-env.js",
|
|
||||||
"start": "vite --port 3000",
|
|
||||||
"build": "vite build && ./scripts/cleanup.sh",
|
|
||||||
"build:sdk": "./scripts/sdk/build.sh",
|
|
||||||
"watch:sdk": "./scripts/sdk/build.sh && yarn run vite build -c vite.sdk-lib-config.js --watch"
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -31,36 +22,26 @@
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/vector-im/hydrogen-web/#readme",
|
"homepage": "https://github.com/vector-im/hydrogen-web/#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@typescript-eslint/eslint-plugin": "^4.29.2",
|
"@babel/core": "^7.11.1",
|
||||||
"@typescript-eslint/parser": "^4.29.2",
|
"@babel/preset-env": "^7.11.0",
|
||||||
"acorn": "^8.6.0",
|
"@rollup/plugin-babel": "^5.1.0",
|
||||||
"acorn-walk": "^8.2.0",
|
"@rollup/plugin-commonjs": "^14.0.0",
|
||||||
"aes-js": "^3.1.2",
|
"@rollup/plugin-multi-entry": "^3.0.1",
|
||||||
"bs58": "^4.0.1",
|
"@rollup/plugin-node-resolve": "^8.4.0",
|
||||||
|
"cheerio": "^1.0.0-rc.3",
|
||||||
|
"commander": "^6.0.0",
|
||||||
"core-js": "^3.6.5",
|
"core-js": "^3.6.5",
|
||||||
"es6-promise": "https://github.com/bwindels/es6-promise.git#bwindels/expose-flush",
|
"finalhandler": "^1.1.1",
|
||||||
"escodegen": "^2.0.0",
|
"impunity": "^0.0.11",
|
||||||
"eslint": "^7.32.0",
|
|
||||||
"fake-indexeddb": "^3.1.2",
|
|
||||||
"impunity": "^1.0.9",
|
|
||||||
"mdn-polyfills": "^5.20.0",
|
"mdn-polyfills": "^5.20.0",
|
||||||
"merge-options": "^3.0.4",
|
"postcss": "^7.0.18",
|
||||||
"node-html-parser": "^4.0.0",
|
"postcss-css-variables": "^0.17.0",
|
||||||
"postcss-css-variables": "^0.18.0",
|
"postcss-flexbugs-fixes": "^4.2.1",
|
||||||
"postcss-flexbugs-fixes": "^5.0.2",
|
"postcss-import": "^12.0.1",
|
||||||
"postcss-value-parser": "^4.2.0",
|
"postcss-url": "^8.0.0",
|
||||||
"regenerator-runtime": "^0.13.7",
|
"regenerator-runtime": "^0.13.7",
|
||||||
"svgo": "^2.8.0",
|
"rollup": "^1.15.6",
|
||||||
"text-encoding": "^0.7.0",
|
"serve-static": "^1.13.2",
|
||||||
"typescript": "^4.7.0",
|
"xxhash": "^0.3.0"
|
||||||
"vite": "^2.9.8",
|
|
||||||
"xxhashjs": "^0.2.2"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@matrix-org/olm": "https://gitlab.matrix.org/api/v4/projects/27/packages/npm/@matrix-org/olm/-/@matrix-org/olm-3.2.8.tgz",
|
|
||||||
"another-json": "^0.2.0",
|
|
||||||
"base64-arraybuffer": "^0.2.0",
|
|
||||||
"dompurify": "^2.3.0",
|
|
||||||
"off-color": "^2.0.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,165 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script src="https://unpkg.com/text-encoding@0.6.4/lib/encoding-indexes.js"></script>
|
|
||||||
<script src="https://unpkg.com/text-encoding@0.6.4/lib/encoding.js"></script>
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script>
|
|
||||||
<script src="deps/jsSHA/dist/sha512.js"></script>
|
|
||||||
<script type="text/javascript" src="https://cdn.rawgit.com/ricmoo/aes-js/e27b99df/index.js"></script>
|
|
||||||
<script type="text/javascript" src="derive-keys-bundle.js"></script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
if (!Math.imul) Math.imul = function(a, b) {
|
|
||||||
var aHi = (a >>> 16) & 0xffff;
|
|
||||||
var aLo = a & 0xffff;
|
|
||||||
var bHi = (b >>> 16) & 0xffff;
|
|
||||||
var bLo = b & 0xffff;
|
|
||||||
// the shift by 0 fixes the sign on the high part
|
|
||||||
// the final |0 converts the unsigned value into a signed value
|
|
||||||
return ((aLo * bLo) + (((aHi * bLo + aLo * bHi) << 16) >>> 0) | 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!Math.clz32) Math.clz32 = (function(log, LN2){
|
|
||||||
return function(x) {
|
|
||||||
// Let n be ToUint32(x).
|
|
||||||
// Let p be the number of leading zero bits in
|
|
||||||
// the 32-bit binary representation of n.
|
|
||||||
// Return p.
|
|
||||||
var asUint = x >>> 0;
|
|
||||||
if (asUint === 0) {
|
|
||||||
return 32;
|
|
||||||
}
|
|
||||||
return 31 - (log(asUint) / LN2 | 0) |0; // the "| 0" acts like math.floor
|
|
||||||
};
|
|
||||||
})(Math.log, Math.LN2);
|
|
||||||
</script>
|
|
||||||
<script type="text/javascript" src="../lib/olm/olm_legacy.js"></script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
// sample data from account with recovery key
|
|
||||||
const ssssKeyAccountData = {
|
|
||||||
"type": "m.secret_storage.key.le4jDjlxrIMZDSKu1EudJL5Tc4U5qI0d",
|
|
||||||
"content": {
|
|
||||||
"algorithm": "m.secret_storage.v1.aes-hmac-sha2",
|
|
||||||
"iv": "YPhwwArIUTwasbROMFd1PQ==",
|
|
||||||
"mac": "khWXeBzKtZi8SX6I7m/9yPoLB1yv1u9l+NNi6WF4+ek="
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const megolmBackupKeyAccountData = {
|
|
||||||
"type": "m.megolm_backup.v1",
|
|
||||||
"content": {
|
|
||||||
"encrypted": {
|
|
||||||
"le4jDjlxrIMZDSKu1EudJL5Tc4U5qI0d": {
|
|
||||||
"iv": "PiqYdySj9s4RsaLc1oDF1w==",
|
|
||||||
"ciphertext": "62fjUs1xkF3BvqVEvAEoDH9jcYiotkcJHG/VNtzSrPBlrmOYQyPA93L2rKo=",
|
|
||||||
"mac": "vtq+kEg5XaRdw08aPiQi7+w9qUiDCQKo/jKNTvrN4ho="
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const backupInfo = {
|
|
||||||
"algorithm": "m.megolm_backup.v1.curve25519-aes-sha2",
|
|
||||||
"auth_data": {
|
|
||||||
"public_key": "tY/jSdfy2q1pS8Ux+LP8xr/RMn9NDElwofH+E5sFG38",
|
|
||||||
"signatures": {
|
|
||||||
"@bruno-test4s2:matrix.org": {
|
|
||||||
"ed25519:KTLGZUJCYZ": "YPuzpLo4OZL5+HQTkbDnUKpIuCmL50Q7RnMs9cRfKqyS+CMPm0RBU1ttPO6XOZ+TjZ4VThXU50LUkmpJiKM+Aw",
|
|
||||||
"ed25519:l17fdsfeS7qUKIYzgx3LxIcHnjPM00+Ge5dTk7Msy04": "epDo+d9foXXcnXChZaEOCKNYzofOMBXQF3FCMDJ52hxvxh9K1w+2zOOAwWEKOts88gubgIsdRQedkuhuIm2LCg"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"count": 1,
|
|
||||||
"etag": "1",
|
|
||||||
"version": "1"
|
|
||||||
};
|
|
||||||
|
|
||||||
const sessionResponse = {
|
|
||||||
"first_message_index": 0,
|
|
||||||
"forwarded_count": 0,
|
|
||||||
"is_verified": true,
|
|
||||||
"session_data": {
|
|
||||||
"ciphertext": "+a8OCF0v5U5GYTNAMwgNEqSItxy4hea073zlWCp+ocr4mUQDuUZyOo+DGHDPPvSOnhJA2waSV05wna/Jmig7NAzuJJy8eEd0dHmGiA16eUMFiUz0HYFseDXs0dDGF38shz1C6CXYRjTOS3S7JWLVzeeYy632BMGvGjWMvAuOpm4NgV9fLB5J6nYVb/wvU3Mf8mw/eT5k8AUJA/CAD6zM7T9skEJhuFoi5kdPfBoozUbScA5xcPVmE6aY08zZ6QpiZ7lsyWoIRDbRxaBxL82T2CnpcngE/SAHF+eJ9ZWK3txolYLT/KAfKlAVLV7yWXkYL7oxrW8DI/5ZQFXUqzqqqfAB7Qz2AIvCdUVqhDGwuDr5noCMlKYEwyYR0VC2i4ZyXdtLdOjKBS2eTqDcwdv2gcaOnbJJcIEuGMKVg89/rKqpWncY/+NOBTQhuts05+Wi+9wU+OlGlNFvhkOgp1BaP0Q7T4pkxgj4OSbf3t1UfthltJSX8TS9ZGd3DVDI8swQuMBvF9H+7kAeO2IWTMSe57MYvlk0aw/gPFdI06lcOvH2nAr9C2HNsuYhyO4XGZOAg8HHzkjLlzNU+zJk1MfRIXRoVgbIh1hApcK9HhyTBzg",
|
|
||||||
"ephemeral": "z0JE6swJZbrmRYOWGvEI6zhIzoJ57lhzp1uujVS2jUs",
|
|
||||||
"mac": "+AAASqA+4U8"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const keyId = "le4jDjlxrIMZDSKu1EudJL5Tc4U5qI0d";
|
|
||||||
// sample data with account with recovery passphrase
|
|
||||||
// const ssssKeyAccountData =
|
|
||||||
// {
|
|
||||||
// "type": "m.secret_storage.key.HB6AKfUD4avkZfPfyjcJ6iJPWDp4f9WM",
|
|
||||||
// "content": {
|
|
||||||
// "algorithm": "m.secret_storage.v1.aes-hmac-sha2",
|
|
||||||
// "passphrase": {
|
|
||||||
// "algorithm": "m.pbkdf2",
|
|
||||||
// "iterations": 500000,
|
|
||||||
// "salt": "tfY5mgvQBr3Gd5Dy1IBiKf7fLquL4Y9O"
|
|
||||||
// },
|
|
||||||
// "iv": "xitm4hxsqagkbyEmXj0tUw==",
|
|
||||||
// "mac": "nagOYz7FKrdlFEKM9ij78th0O2p7YVGgl+p0LHr4EBE="
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
|
|
||||||
// const megolmBackupKeyAccountData = {
|
|
||||||
// "type": "m.megolm_backup.v1",
|
|
||||||
// "content": {
|
|
||||||
// "encrypted": {
|
|
||||||
// "HB6AKfUD4avkZfPfyjcJ6iJPWDp4f9WM": {
|
|
||||||
// "iv": "HpzOY5DxYFJCxw5Vi6BBOQ==",
|
|
||||||
// "ciphertext": "u1TJjaaGKVDGExg9hu2fIUZ0gjToMcMReyhn4nsXgnhm7Dvz6E/4p+nSF3w=",
|
|
||||||
// "mac": "08ckDbQK9wB2jiE4n4sfp2sw83q/0C2/gEz2LuHMEPg="
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
// const backupInfo = {
|
|
||||||
// "algorithm": "m.megolm_backup.v1.curve25519-aes-sha2",
|
|
||||||
// "auth_data": {
|
|
||||||
// "public_key": "Vw2cwhbxFg/GQ2rr4VRIQ+Oh74lP7IxY6oN4R9q992k",
|
|
||||||
// "signatures": {
|
|
||||||
// "@bruno-test4s:matrix.org": {
|
|
||||||
// "ed25519:XAIKJXBCNZ": "AFBp1T2x8hyPSi2hCHg6IzNy67RxULj3/7LYZgVT3Ruz49v5h1+jAScTxZj5jrItxo2LCzSORH+yABHjPIqOBQ",
|
|
||||||
// "ed25519:lukepZkTmPcJS6wCl12B0tIURIO8YbMd5QJLf8UOugI": "a1ZJa+1+p9Gm5Po1B619ZDy4xidHmLt82vXVPH7vWTjny1r3JI2iM4fB2qh8vEiASNlFyVrFx//gQrz9Y1IJBA"
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// },
|
|
||||||
// "count": 1,
|
|
||||||
// "etag": "1",
|
|
||||||
// "version": "1"
|
|
||||||
// };
|
|
||||||
// const sessionResponse = {
|
|
||||||
// "first_message_index": 0,
|
|
||||||
// "forwarded_count": 0,
|
|
||||||
// "is_verified": true,
|
|
||||||
// "session_data": {
|
|
||||||
// "ciphertext": "1NoC8/GZWeGjneuoFDcqpbMYOJ8bjDFiw2O4/YOKC59x9RqSejLyM8qLL5FzlV+uW7anPVED8t9m+p2t1kKa15LxlcdzXjLPCv1QGYlhotbUhN8eRUobQuLqsD5Dl/QqNxv+Xl65tEaQhUeF30NmSesw6GHvP93vB3mTN8Yz9QyaQtvgoI/Q6c4d+yGmFVE2dlhXdOs7Hrylrg8UyM1QI+qpNJ3L9ETcqiXCG/FJIdM87LmNnHPX65TWK5xsu1JKWCI2BY1KFVDyxm40FyHHypUPYoT9RqPnygHtYoTiZzyaVxqUu2vg08Bv0t1VH2SNDGs5aZYQN5S1JNAHrXE+cWSg0rfVb160Z4FJC/89wO8fw/uXqJehqMVuC9BSU/zsKcZ797U92qDnIb6QQuMYKRgh9JrEugqJN9ocL7F8W9fW2oFfUYRyvOZRSf387hGrapEGBKx7Owb7UoXvWyb4C5hc5SFNvej+yg98+Fi4hzlGH26DqzJdLcxU5P/MWfZc222QqPFuFspe6f0Ts5jnJhjCQhXWoM4G6mtvGbOm2ESSJULj8U4JSDz8GsxrmojR/pBpywBvuy/mx//htnacnTRqYJz+PZVtV63rfaZlEtU",
|
|
||||||
// "ephemeral": "wXBeLoazggBmFS0eiVY9H/qq5o1yt2/NIKWcq384EHc",
|
|
||||||
// "mac": "w3IfO5vL9Bc"
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
//const keyId = "HB6AKfUD4avkZfPfyjcJ6iJPWDp4f9WM";
|
|
||||||
const cryptoDriver = new bundle.CryptoDriver((window.crypto || window.msCrypto).subtle);
|
|
||||||
window.Olm.init().then(function() {
|
|
||||||
bundle.deserializeSSSSKey("EsUH dBfj L7XF Kdej TNmK 2CdP R7NQ KnQH zA1o 8kDg piuJ QEZh", ssssKeyAccountData).then(function(ssssKey) {
|
|
||||||
//bundle.deriveSSSSKey(cryptoDriver, prompt("passphrase"), ssssKeyAccountData).then(function(ssssKey) {
|
|
||||||
// const ssssKey = new Uint8Array(32);
|
|
||||||
// const bytes = [123, 47, 138, 15, 190, 69, 224, 204, 88, 246, 203, 65, 243, 234, 91, 17, 250, 107, 104, 51, 211, 252, 81, 67, 80, 191, 105, 208, 127, 87, 107, 231];
|
|
||||||
// for (var i = bytes.length - 1; i >= 0; i--) {
|
|
||||||
// ssssKey[i] = bytes[i];
|
|
||||||
// }
|
|
||||||
console.log("ssssKey", ssssKey);
|
|
||||||
bundle.decryptSecret(cryptoDriver, keyId, ssssKey, megolmBackupKeyAccountData).then(function(backupKeyBase64) {
|
|
||||||
console.log("backupKeyBase64", backupKeyBase64);
|
|
||||||
bundle.decryptSession(backupKeyBase64, backupInfo, sessionResponse).then(function(session) {
|
|
||||||
console.log("session", session);
|
|
||||||
alert(session.session_key);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
458
prototypes/derive-keys.js
vendored
458
prototypes/derive-keys.js
vendored
|
@ -1,458 +0,0 @@
|
||||||
import {base58} from "../src/utils/base-encoding.js";
|
|
||||||
|
|
||||||
function subtleCryptoResult(promiseOrOp, method) {
|
|
||||||
if (promiseOrOp instanceof Promise) {
|
|
||||||
return promiseOrOp;
|
|
||||||
} else {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
promiseOrOp.oncomplete = e => resolve(e.target.result);
|
|
||||||
promiseOrOp.onerror = e => reject(new Error("Crypto error on " + method));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class CryptoHMACDriver {
|
|
||||||
constructor(subtleCrypto) {
|
|
||||||
this._subtleCrypto = subtleCrypto;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* [hmac description]
|
|
||||||
* @param {BufferSource} key
|
|
||||||
* @param {BufferSource} mac
|
|
||||||
* @param {BufferSource} data
|
|
||||||
* @param {HashName} hash
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
async verify(key, mac, data, hash) {
|
|
||||||
const opts = {
|
|
||||||
name: 'HMAC',
|
|
||||||
hash: {name: hashName(hash)},
|
|
||||||
};
|
|
||||||
const hmacKey = await subtleCryptoResult(this._subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
key,
|
|
||||||
opts,
|
|
||||||
false,
|
|
||||||
['verify'],
|
|
||||||
), "importKey");
|
|
||||||
const isVerified = await subtleCryptoResult(this._subtleCrypto.verify(
|
|
||||||
opts,
|
|
||||||
hmacKey,
|
|
||||||
mac,
|
|
||||||
data,
|
|
||||||
), "verify");
|
|
||||||
return isVerified;
|
|
||||||
}
|
|
||||||
|
|
||||||
async compute(key, data, hash) {
|
|
||||||
const opts = {
|
|
||||||
name: 'HMAC',
|
|
||||||
hash: {name: hashName(hash)},
|
|
||||||
};
|
|
||||||
const hmacKey = await subtleCryptoResult(this._subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
key,
|
|
||||||
opts,
|
|
||||||
false,
|
|
||||||
['sign'],
|
|
||||||
), "importKey");
|
|
||||||
const buffer = await subtleCryptoResult(this._subtleCrypto.sign(
|
|
||||||
opts,
|
|
||||||
hmacKey,
|
|
||||||
data,
|
|
||||||
), "sign");
|
|
||||||
return new Uint8Array(buffer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const nwbo = (num, len) => {
|
|
||||||
const arr = new Uint8Array(len);
|
|
||||||
for(let i=0; i<len; i++) arr[i] = 0xFF && (num >> ((len - i - 1)*8));
|
|
||||||
return arr;
|
|
||||||
};
|
|
||||||
|
|
||||||
class CryptoLegacyHMACDriver {
|
|
||||||
constructor(hmacDriver) {
|
|
||||||
this._hmacDriver = hmacDriver;
|
|
||||||
}
|
|
||||||
|
|
||||||
async verify(key, mac, data, hash) {
|
|
||||||
if (hash === "SHA-512") {
|
|
||||||
throw new Error("SHA-512 HMAC verification is not implemented yet");
|
|
||||||
} else {
|
|
||||||
return this._hmacDriver.verify(key, mac, data, hash)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async compute(key, data, hash) {
|
|
||||||
if (hash === "SHA-256") {
|
|
||||||
return await this._hmacDriver.compute(key, data, hash);
|
|
||||||
} else {
|
|
||||||
const shaObj = new window.jsSHA(hash, "UINT8ARRAY", {
|
|
||||||
"hmacKey": {
|
|
||||||
"value": key,
|
|
||||||
"format": "UINT8ARRAY"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
shaObj.update(data);
|
|
||||||
return shaObj.getHash("UINT8ARRAY");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class CryptoLegacyDeriveDriver {
|
|
||||||
constructor(cryptoDriver) {
|
|
||||||
this._cryptoDriver = cryptoDriver;
|
|
||||||
}
|
|
||||||
|
|
||||||
// adapted from https://github.com/junkurihara/jscu/blob/develop/packages/js-crypto-pbkdf/src/pbkdf.ts#L21
|
|
||||||
// could also consider https://github.com/brix/crypto-js/blob/develop/src/pbkdf2.js although not async
|
|
||||||
async pbkdf2(password, iterations, salt, hash, length) {
|
|
||||||
const dkLen = length / 8;
|
|
||||||
if (iterations <= 0) {
|
|
||||||
throw new Error('InvalidIterationCount');
|
|
||||||
}
|
|
||||||
if (dkLen <= 0) {
|
|
||||||
throw new Error('InvalidDerivedKeyLength');
|
|
||||||
}
|
|
||||||
const hLen = this._cryptoDriver.digestSize(hash);
|
|
||||||
if(dkLen > (Math.pow(2, 32) - 1) * hLen) throw new Error('DerivedKeyTooLong');
|
|
||||||
|
|
||||||
const l = Math.ceil(dkLen/hLen);
|
|
||||||
const r = dkLen - (l-1)*hLen;
|
|
||||||
|
|
||||||
const funcF = async (i) => {
|
|
||||||
const seed = new Uint8Array(salt.length + 4);
|
|
||||||
seed.set(salt);
|
|
||||||
seed.set(nwbo(i+1, 4), salt.length);
|
|
||||||
let u = await this._cryptoDriver.hmac.compute(password, seed, hash);
|
|
||||||
let outputF = new Uint8Array(u);
|
|
||||||
for(let j = 1; j < iterations; j++){
|
|
||||||
if ((j % 1000) === 0) {
|
|
||||||
console.log(j, j/iterations);
|
|
||||||
}
|
|
||||||
u = await this._cryptoDriver.hmac.compute(password, u, hash);
|
|
||||||
outputF = u.map( (elem, idx) => elem ^ outputF[idx]);
|
|
||||||
}
|
|
||||||
return {index: i, value: outputF};
|
|
||||||
};
|
|
||||||
|
|
||||||
const Tis = [];
|
|
||||||
const DK = new Uint8Array(dkLen);
|
|
||||||
for(let i = 0; i < l; i++) {
|
|
||||||
Tis.push(funcF(i));
|
|
||||||
}
|
|
||||||
const TisResolved = await Promise.all(Tis);
|
|
||||||
TisResolved.forEach(elem => {
|
|
||||||
if (elem.index !== l - 1) {
|
|
||||||
DK.set(elem.value, elem.index*hLen);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
DK.set(elem.value.slice(0, r), elem.index*hLen);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return DK;
|
|
||||||
}
|
|
||||||
|
|
||||||
// based on https://github.com/junkurihara/jscu/blob/develop/packages/js-crypto-hkdf/src/hkdf.ts
|
|
||||||
async hkdf(key, salt, info, hash, length) {
|
|
||||||
length = length / 8;
|
|
||||||
const len = this._cryptoDriver.digestSize(hash);
|
|
||||||
|
|
||||||
// RFC5869 Step 1 (Extract)
|
|
||||||
const prk = await this._cryptoDriver.hmac.compute(salt, key, hash);
|
|
||||||
|
|
||||||
// RFC5869 Step 2 (Expand)
|
|
||||||
let t = new Uint8Array([]);
|
|
||||||
const okm = new Uint8Array(Math.ceil(length / len) * len);
|
|
||||||
for(let i = 0; i < Math.ceil(length / len); i++){
|
|
||||||
const concat = new Uint8Array(t.length + info.length + 1);
|
|
||||||
concat.set(t);
|
|
||||||
concat.set(info, t.length);
|
|
||||||
concat.set(new Uint8Array([i+1]), t.length + info.length);
|
|
||||||
t = await this._cryptoDriver.hmac.compute(prk, concat, hash);
|
|
||||||
okm.set(t, len * i);
|
|
||||||
}
|
|
||||||
return okm.slice(0, length);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class CryptoDeriveDriver {
|
|
||||||
constructor(subtleCrypto) {
|
|
||||||
this._subtleCrypto = subtleCrypto;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* [pbkdf2 description]
|
|
||||||
* @param {BufferSource} password
|
|
||||||
* @param {Number} iterations
|
|
||||||
* @param {BufferSource} salt
|
|
||||||
* @param {HashName} hash
|
|
||||||
* @param {Number} length the desired length of the generated key, in bits (not bytes!)
|
|
||||||
* @return {BufferSource}
|
|
||||||
*/
|
|
||||||
async pbkdf2(password, iterations, salt, hash, length) {
|
|
||||||
// check for existance of deriveBits, which IE11 does not have
|
|
||||||
const key = await subtleCryptoResult(this._subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
password,
|
|
||||||
{name: 'PBKDF2'},
|
|
||||||
false,
|
|
||||||
['deriveBits'],
|
|
||||||
), "importKey");
|
|
||||||
const keybits = await subtleCryptoResult(this._subtleCrypto.deriveBits(
|
|
||||||
{
|
|
||||||
name: 'PBKDF2',
|
|
||||||
salt,
|
|
||||||
iterations,
|
|
||||||
hash: hashName(hash),
|
|
||||||
},
|
|
||||||
key,
|
|
||||||
length,
|
|
||||||
), "deriveBits");
|
|
||||||
return new Uint8Array(keybits);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* [hkdf description]
|
|
||||||
* @param {BufferSource} key [description]
|
|
||||||
* @param {BufferSource} salt [description]
|
|
||||||
* @param {BufferSource} info [description]
|
|
||||||
* @param {HashName} hash the hash to use
|
|
||||||
* @param {Number} length desired length of the generated key in bits (not bytes!)
|
|
||||||
* @return {[type]} [description]
|
|
||||||
*/
|
|
||||||
async hkdf(key, salt, info, hash, length) {
|
|
||||||
const hkdfkey = await subtleCryptoResult(this._subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
key,
|
|
||||||
{name: "HKDF"},
|
|
||||||
false,
|
|
||||||
["deriveBits"],
|
|
||||||
), "importKey");
|
|
||||||
const keybits = await subtleCryptoResult(this._subtleCrypto.deriveBits({
|
|
||||||
name: "HKDF",
|
|
||||||
salt,
|
|
||||||
info,
|
|
||||||
hash: hashName(hash),
|
|
||||||
},
|
|
||||||
hkdfkey,
|
|
||||||
length,
|
|
||||||
), "deriveBits");
|
|
||||||
return new Uint8Array(keybits);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class CryptoAESDriver {
|
|
||||||
constructor(subtleCrypto) {
|
|
||||||
this._subtleCrypto = subtleCrypto;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* [decrypt description]
|
|
||||||
* @param {BufferSource} key [description]
|
|
||||||
* @param {BufferSource} iv [description]
|
|
||||||
* @param {BufferSource} ciphertext [description]
|
|
||||||
* @return {BufferSource} [description]
|
|
||||||
*/
|
|
||||||
async decrypt(key, iv, ciphertext) {
|
|
||||||
const opts = {
|
|
||||||
name: "AES-CTR",
|
|
||||||
counter: iv,
|
|
||||||
length: 64,
|
|
||||||
};
|
|
||||||
let aesKey;
|
|
||||||
try {
|
|
||||||
aesKey = await subtleCryptoResult(this._subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
key,
|
|
||||||
opts,
|
|
||||||
false,
|
|
||||||
['decrypt'],
|
|
||||||
), "importKey");
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(`Could not import key for AES-CTR decryption: ${err.message}`);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const plaintext = await subtleCryptoResult(this._subtleCrypto.decrypt(
|
|
||||||
// see https://developer.mozilla.org/en-US/docs/Web/API/AesCtrParams
|
|
||||||
opts,
|
|
||||||
aesKey,
|
|
||||||
ciphertext,
|
|
||||||
), "decrypt");
|
|
||||||
return new Uint8Array(plaintext);
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(`Could not decrypt with AES-CTR: ${err.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CryptoLegacyAESDriver {
|
|
||||||
/**
|
|
||||||
* [decrypt description]
|
|
||||||
* @param {BufferSource} key [description]
|
|
||||||
* @param {BufferSource} iv [description]
|
|
||||||
* @param {BufferSource} ciphertext [description]
|
|
||||||
* @return {BufferSource} [description]
|
|
||||||
*/
|
|
||||||
async decrypt(key, iv, ciphertext) {
|
|
||||||
const aesjs = window.aesjs;
|
|
||||||
var aesCtr = new aesjs.ModeOfOperation.ctr(key, new aesjs.Counter(iv));
|
|
||||||
return aesCtr.decrypt(ciphertext);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function hashName(name) {
|
|
||||||
if (name !== "SHA-256" && name !== "SHA-512") {
|
|
||||||
throw new Error(`Invalid hash name: ${name}`);
|
|
||||||
}
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CryptoDriver {
|
|
||||||
constructor(subtleCrypto) {
|
|
||||||
this.aes = new CryptoLegacyAESDriver();
|
|
||||||
// this.aes = new CryptoAESDriver(subtleCrypto);
|
|
||||||
//this.derive = new CryptoDeriveDriver(subtleCrypto);
|
|
||||||
this.derive = new CryptoLegacyDeriveDriver(this);
|
|
||||||
// subtleCrypto.deriveBits ?
|
|
||||||
// new CryptoDeriveDriver(subtleCrypto) :
|
|
||||||
// new CryptoLegacyDeriveDriver(this);
|
|
||||||
this.hmac = new CryptoLegacyHMACDriver(new CryptoHMACDriver(subtleCrypto));
|
|
||||||
this._subtleCrypto = subtleCrypto;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* [digest description]
|
|
||||||
* @param {HashName} hash
|
|
||||||
* @param {BufferSource} data
|
|
||||||
* @return {BufferSource}
|
|
||||||
*/
|
|
||||||
async digest(hash, data) {
|
|
||||||
return await subtleCryptoResult(this._subtleCrypto.digest(hashName(hash), data));
|
|
||||||
}
|
|
||||||
|
|
||||||
digestSize(hash) {
|
|
||||||
switch (hashName(hash)) {
|
|
||||||
case "SHA-512": return 64;
|
|
||||||
case "SHA-256": return 32;
|
|
||||||
default: throw new Error(`Not implemented for ${hashName(hash)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function decodeBase64(base64) {
|
|
||||||
const binStr = window.atob(base64);
|
|
||||||
const len = binStr.length;
|
|
||||||
const bytes = new Uint8Array(len);
|
|
||||||
for (let i = 0; i < len; i++) {
|
|
||||||
bytes[i] = binStr.charCodeAt(i);
|
|
||||||
}
|
|
||||||
return bytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_ITERATIONS = 500000;
|
|
||||||
|
|
||||||
const DEFAULT_BITSIZE = 256;
|
|
||||||
|
|
||||||
export async function deriveSSSSKey(cryptoDriver, passphrase, ssssKey) {
|
|
||||||
const textEncoder = new TextEncoder();
|
|
||||||
return await cryptoDriver.derive.pbkdf2(
|
|
||||||
textEncoder.encode(passphrase),
|
|
||||||
ssssKey.content.passphrase.iterations || DEFAULT_ITERATIONS,
|
|
||||||
textEncoder.encode(ssssKey.content.passphrase.salt),
|
|
||||||
"SHA-512",
|
|
||||||
ssssKey.content.passphrase.bits || DEFAULT_BITSIZE);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function decryptSecret(cryptoDriver, keyId, ssssKey, event) {
|
|
||||||
const textEncoder = new TextEncoder();
|
|
||||||
const textDecoder = new TextDecoder();
|
|
||||||
// now derive the aes and mac key from the 4s key
|
|
||||||
const hkdfKey = await cryptoDriver.derive.hkdf(
|
|
||||||
ssssKey,
|
|
||||||
new Uint8Array(8).buffer, //salt
|
|
||||||
textEncoder.encode(event.type), // info
|
|
||||||
"SHA-256",
|
|
||||||
512 // 512 bits or 64 bytes
|
|
||||||
);
|
|
||||||
const aesKey = hkdfKey.slice(0, 32);
|
|
||||||
const hmacKey = hkdfKey.slice(32);
|
|
||||||
|
|
||||||
const data = event.content.encrypted[keyId];
|
|
||||||
|
|
||||||
const ciphertextBytes = decodeBase64(data.ciphertext);
|
|
||||||
const isVerified = await cryptoDriver.hmac.verify(
|
|
||||||
hmacKey, decodeBase64(data.mac),
|
|
||||||
ciphertextBytes, "SHA-256");
|
|
||||||
|
|
||||||
if (!isVerified) {
|
|
||||||
throw new Error("Bad MAC");
|
|
||||||
}
|
|
||||||
|
|
||||||
const plaintext = await cryptoDriver.aes.decrypt(aesKey, decodeBase64(data.iv), ciphertextBytes);
|
|
||||||
return textDecoder.decode(new Uint8Array(plaintext));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export async function decryptSession(backupKeyBase64, backupInfo, sessionResponse) {
|
|
||||||
const privKey = decodeBase64(backupKeyBase64);
|
|
||||||
console.log("privKey", privKey);
|
|
||||||
|
|
||||||
const decryption = new window.Olm.PkDecryption();
|
|
||||||
let backupPubKey;
|
|
||||||
try {
|
|
||||||
backupPubKey = decryption.init_with_private_key(privKey);
|
|
||||||
} catch (e) {
|
|
||||||
decryption.free();
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the pubkey computed from the private data we've been given
|
|
||||||
// doesn't match the one in the auth_data, the user has enetered
|
|
||||||
// a different recovery key / the wrong passphrase.
|
|
||||||
if (backupPubKey !== backupInfo.auth_data.public_key) {
|
|
||||||
console.log("backupPubKey", backupPubKey.length, backupPubKey);
|
|
||||||
throw new Error("bad backup key");
|
|
||||||
}
|
|
||||||
|
|
||||||
const sessionInfo = decryption.decrypt(
|
|
||||||
sessionResponse.session_data.ephemeral,
|
|
||||||
sessionResponse.session_data.mac,
|
|
||||||
sessionResponse.session_data.ciphertext,
|
|
||||||
);
|
|
||||||
return JSON.parse(sessionInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
const OLM_RECOVERY_KEY_PREFIX = [0x8B, 0x01];
|
|
||||||
|
|
||||||
|
|
||||||
export async function deserializeSSSSKey(recoverykey) {
|
|
||||||
const result = base58.decode(recoverykey.replace(/ /g, ''));
|
|
||||||
|
|
||||||
let parity = 0;
|
|
||||||
for (const b of result) {
|
|
||||||
parity ^= b;
|
|
||||||
}
|
|
||||||
if (parity !== 0) {
|
|
||||||
throw new Error("Incorrect parity");
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < OLM_RECOVERY_KEY_PREFIX.length; ++i) {
|
|
||||||
if (result[i] !== OLM_RECOVERY_KEY_PREFIX[i]) {
|
|
||||||
throw new Error("Incorrect prefix");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
result.length !==
|
|
||||||
OLM_RECOVERY_KEY_PREFIX.length + window.Olm.PRIVATE_KEY_LENGTH + 1
|
|
||||||
) {
|
|
||||||
throw new Error("Incorrect length");
|
|
||||||
}
|
|
||||||
|
|
||||||
return Uint8Array.from(result.slice(
|
|
||||||
OLM_RECOVERY_KEY_PREFIX.length,
|
|
||||||
OLM_RECOVERY_KEY_PREFIX.length + window.Olm.PRIVATE_KEY_LENGTH,
|
|
||||||
));
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<link rel="stylesheet" type="text/css" href="non-ie11.css" media="not all and (-ms-high-contrast: none), (-ms-high-contrast: active)">
|
|
||||||
<link rel="stylesheet" type="text/css" href="ie11.css" media="all and (-ms-high-contrast: none), (-ms-high-contrast: active)">
|
|
||||||
<style type="text/css">
|
|
||||||
/*
|
|
||||||
can't make this work in non-IE browser for now...
|
|
||||||
*/
|
|
||||||
@import url('non-ie11.css') screen @supports(--foo: green);
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Velit dignissim sodales ut eu sem integer vitae justo eget. Libero justo laoreet sit amet cursus sit amet dictum. Egestas fringilla phasellus faucibus scelerisque eleifend donec pretium vulputate. Quis eleifend quam adipiscing vitae proin sagittis nisl. Egestas maecenas pharetra convallis posuere morbi leo. Metus dictum at tempor commodo ullamcorper a lacus. Odio pellentesque diam volutpat commodo sed egestas egestas. Elementum eu facilisis sed odio morbi quis commodo odio aenean. Velit euismod in pellentesque massa placerat duis ultricies lacus sed. Feugiat sed lectus vestibulum mattis ullamcorper velit sed ullamcorper morbi. Pulvinar etiam non quam lacus suspendisse. Dictumst vestibulum rhoncus est pellentesque elit ullamcorper dignissim. Proin gravida hendrerit lectus a. Nibh sed pulvinar proin gravida. Massa placerat duis ultricies lacus. Enim sed faucibus turpis in eu mi bibendum neque egestas. Turpis egestas sed tempus urna et pharetra pharetra.</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,100 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
class IDBError extends Error {
|
|
||||||
constructor(errorEvent) {
|
|
||||||
const request = errorEvent.target;
|
|
||||||
const {error} = request;
|
|
||||||
super(error.message);
|
|
||||||
this.name = error.name;
|
|
||||||
this.errorEvent = errorEvent;
|
|
||||||
}
|
|
||||||
|
|
||||||
preventAbort() {
|
|
||||||
this.errorEvent.preventDefault();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class AbortError extends Error {
|
|
||||||
get name() { return "AbortError"; }
|
|
||||||
}
|
|
||||||
|
|
||||||
function reqAsPromise(req) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function(e) {
|
|
||||||
resolve(e.target.result);
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new IDBError(e));
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function txnAsPromise(txn) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
txn.addEventListener("complete", () => resolve());
|
|
||||||
txn.addEventListener("abort", event => {
|
|
||||||
reject(new AbortError());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function Storage(databaseName) {
|
|
||||||
this._databaseName = databaseName;
|
|
||||||
this._database = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
Storage.prototype = {
|
|
||||||
open: function() {
|
|
||||||
const req = window.indexedDB.open(this._databaseName);
|
|
||||||
const self = this;
|
|
||||||
req.onupgradeneeded = function(ev) {
|
|
||||||
const db = ev.target.result;
|
|
||||||
const oldVersion = ev.oldVersion;
|
|
||||||
self._createStores(db, oldVersion);
|
|
||||||
};
|
|
||||||
return reqAsPromise(req).then(function() {
|
|
||||||
self._database = req.result;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
readWriteTxn: function(storeName) {
|
|
||||||
return this._database.transaction([storeName], "readwrite");
|
|
||||||
},
|
|
||||||
readTxn: function(storeName) {
|
|
||||||
return this._database.transaction([storeName], "readonly");
|
|
||||||
},
|
|
||||||
_createStores: function(db) {
|
|
||||||
db.createObjectStore("foos", {keyPath: "id"});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const storage = new Storage("idb-continue-on-constrainterror");
|
|
||||||
await storage.open();
|
|
||||||
const txn1 = storage.readWriteTxn("foos");
|
|
||||||
const store = txn1.objectStore("foos");
|
|
||||||
await reqAsPromise(store.clear());
|
|
||||||
console.log("first foo read back", await reqAsPromise(store.get(5)));
|
|
||||||
await reqAsPromise(store.add({id: 5, name: "Mr Foo"}));
|
|
||||||
try {
|
|
||||||
await reqAsPromise(store.add({id: 5, name: "bar"}));
|
|
||||||
} catch (err) {
|
|
||||||
console.log("we did get an error", err.name);
|
|
||||||
err.preventAbort();
|
|
||||||
}
|
|
||||||
await txnAsPromise(txn1);
|
|
||||||
|
|
||||||
const txn2 = storage.readTxn("foos");
|
|
||||||
const store2 = txn2.objectStore("foos");
|
|
||||||
console.log("got name from second txn", await reqAsPromise(store2.get(5)));
|
|
||||||
}
|
|
||||||
main().catch(err => console.error(err));
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,112 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript" src="promifill.js"></script>
|
|
||||||
<!-- <script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script> -->
|
|
||||||
<script type="text/javascript">
|
|
||||||
//window.Promise = Promifill;
|
|
||||||
function reqAsPromise(req) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function() {
|
|
||||||
resolve(req);
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function Storage(databaseName) {
|
|
||||||
this._databaseName = databaseName;
|
|
||||||
this._database = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
Storage.prototype = {
|
|
||||||
open: function() {
|
|
||||||
const req = window.indexedDB.open(this._databaseName);
|
|
||||||
const self = this;
|
|
||||||
req.onupgradeneeded = function(ev) {
|
|
||||||
const db = ev.target.result;
|
|
||||||
const oldVersion = ev.oldVersion;
|
|
||||||
self._createStores(db, oldVersion);
|
|
||||||
};
|
|
||||||
return reqAsPromise(req).then(function() {
|
|
||||||
self._database = req.result;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
openTxn: function(mode, storeName) {
|
|
||||||
const txn = this._database.transaction([storeName], mode);
|
|
||||||
const store = txn.objectStore(storeName);
|
|
||||||
return Promise.resolve(store);
|
|
||||||
},
|
|
||||||
_createStores: function(db) {
|
|
||||||
db.createObjectStore("foos", {keyPath: ["id"]});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
function getAll(store) {
|
|
||||||
const request = store.openCursor();
|
|
||||||
const results = [];
|
|
||||||
return new Promise(function(resolve, reject) {
|
|
||||||
request.onsuccess = function(event) {
|
|
||||||
const cursor = event.target.result;
|
|
||||||
if(cursor) {
|
|
||||||
results.push(cursor.value);
|
|
||||||
cursor.continue();
|
|
||||||
} else {
|
|
||||||
resolve(results);
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
request.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
try {
|
|
||||||
let storage = new Storage("idb-promises");
|
|
||||||
await storage.open();
|
|
||||||
const store = await storage.openTxn("readwrite", "foos");
|
|
||||||
store.clear();
|
|
||||||
store.add({id: 5, name: "foo"});
|
|
||||||
store.add({id: 6, name: "bar"});
|
|
||||||
console.log("all1", await getAll(store));
|
|
||||||
store.add({id: 7, name: "bazzz"});
|
|
||||||
console.log("all2", await getAll(store));
|
|
||||||
} catch(err) {
|
|
||||||
console.error(err.message + ": " + err.stack);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
main();
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
we basically want something like this for IE11/Win7:
|
|
||||||
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function() {
|
|
||||||
resolve(req);
|
|
||||||
Promise?.flushQueue();
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise?.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
we don't have this problem on platforms with a native promise implementation, so we can just have our own (forked) promise polyfill?
|
|
||||||
*/
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,169 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
function reqAsPromise(req) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function() {
|
|
||||||
resolve(req.result);
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + req.error));
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function txnAsPromise(txn) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
txn.oncomplete = function() {
|
|
||||||
resolve(txn);
|
|
||||||
};
|
|
||||||
txn.onabort = function(e) {
|
|
||||||
reject(new Error("Transaction got aborted: " + txn.error));
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const BrowserMutationObserver = window.MutationObserver || window.WebKitMutationObserver;
|
|
||||||
|
|
||||||
function useMutationObserver(flush) {
|
|
||||||
let iterations = 0;
|
|
||||||
const observer = new BrowserMutationObserver(flush);
|
|
||||||
const node = document.createTextNode('');
|
|
||||||
observer.observe(node, { characterData: true });
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
node.data = (iterations = ++iterations % 2);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const wait = (function() {
|
|
||||||
let resolve = null;
|
|
||||||
const trigger = useMutationObserver(() => {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
return () => {
|
|
||||||
return new Promise(r => {
|
|
||||||
resolve = r;
|
|
||||||
trigger();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
var _resolve = Promise.resolve.bind(Promise);
|
|
||||||
var _then = Promise.prototype.then;
|
|
||||||
|
|
||||||
async function delay() {
|
|
||||||
return Promise.resolve();
|
|
||||||
// two consecutive macro tasks
|
|
||||||
//await new Promise(r => setImmediate(r));
|
|
||||||
// the next macro task will now be the complete event of the txn,
|
|
||||||
// so schedule another macro task to execute after that
|
|
||||||
//await new Promise(r => setImmediate(r));
|
|
||||||
//return;
|
|
||||||
// for (let i = 0; i < 1000; i+=1) {
|
|
||||||
// console.log("await...");
|
|
||||||
// await wait();
|
|
||||||
// }
|
|
||||||
let p = _resolve(0);
|
|
||||||
for (let i=0;i<10;++i) {
|
|
||||||
p = _then.call(p, x => x + 1);
|
|
||||||
}
|
|
||||||
let result = await p;
|
|
||||||
console.log("Result: "+ result + " (should be 10)");
|
|
||||||
}
|
|
||||||
|
|
||||||
class Storage {
|
|
||||||
constructor(databaseName) {
|
|
||||||
this._databaseName = databaseName;
|
|
||||||
this._database = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
open() {
|
|
||||||
const req = window.indexedDB.open(this._databaseName);
|
|
||||||
const self = this;
|
|
||||||
req.onupgradeneeded = function(ev) {
|
|
||||||
const db = ev.target.result;
|
|
||||||
const oldVersion = ev.oldVersion;
|
|
||||||
self._createStores(db, oldVersion);
|
|
||||||
};
|
|
||||||
return reqAsPromise(req).then(function() {
|
|
||||||
self._database = req.result;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
openTxn(mode, storeName) {
|
|
||||||
const txn = this._database.transaction([storeName], mode);
|
|
||||||
txn.addEventListener("complete", () => {
|
|
||||||
console.info(`transaction ${mode} for ${storeName} completed`);
|
|
||||||
});
|
|
||||||
txn.addEventListener("abort", e => {
|
|
||||||
console.warn(`transaction ${mode} for ${storeName} aborted`, e.target.error);
|
|
||||||
});
|
|
||||||
return txn;
|
|
||||||
}
|
|
||||||
|
|
||||||
_createStores(db) {
|
|
||||||
db.createObjectStore("foos", {keyPath: "id"});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getAll(store, depth = 0) {
|
|
||||||
if (depth < 15) {
|
|
||||||
return await getAll(store, depth + 1);
|
|
||||||
}
|
|
||||||
const request = store.openCursor();
|
|
||||||
const results = [];
|
|
||||||
return await new Promise(function(resolve, reject) {
|
|
||||||
request.onsuccess = function(event) {
|
|
||||||
const cursor = event.target.result;
|
|
||||||
if(cursor) {
|
|
||||||
results.push(cursor.value);
|
|
||||||
cursor.continue();
|
|
||||||
} else {
|
|
||||||
resolve(results);
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
request.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
try {
|
|
||||||
let storage = new Storage("idb-promises");
|
|
||||||
await storage.open();
|
|
||||||
//await reqAsPromise(storage.openTxn("readwrite", "foos").objectStore("foos").clear());
|
|
||||||
|
|
||||||
for (let i = 0; i < 10; i += 1) {
|
|
||||||
storage.openTxn("readonly", "foos").objectStore("foos").get(5);
|
|
||||||
//console.log("from readtxn", await reqAsPromise(storage.openTxn("readonly", "foos").objectStore("foos").get(5)));
|
|
||||||
const txn = storage.openTxn("readwrite", "foos");
|
|
||||||
const store = txn.objectStore("foos");
|
|
||||||
console.log("writing the foos");
|
|
||||||
store.put({id: 5, name: "foo"});
|
|
||||||
store.put({id: 6, name: "bar"});
|
|
||||||
store.put({id: 7, name: "bazzz"});
|
|
||||||
await delay();
|
|
||||||
console.log("reading the foos");
|
|
||||||
console.log("5", await reqAsPromise(store.get(5)));
|
|
||||||
console.log("6", await reqAsPromise(store.get(6)));
|
|
||||||
console.log("7", await reqAsPromise(store.get(7)));
|
|
||||||
// await txnAsPromise(txn);
|
|
||||||
}
|
|
||||||
} catch(err) {
|
|
||||||
console.error(err);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
main();
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,118 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript" src="promifill.js"></script>
|
|
||||||
<!-- <script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script> -->
|
|
||||||
<script type="text/javascript">
|
|
||||||
//window.Promise = Promifill;
|
|
||||||
function reqAsPromise(req) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function() {
|
|
||||||
resolve(req);
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function Storage(databaseName) {
|
|
||||||
this._databaseName = databaseName;
|
|
||||||
this._database = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
Storage.prototype = {
|
|
||||||
open: function() {
|
|
||||||
const req = window.indexedDB.open(this._databaseName);
|
|
||||||
const self = this;
|
|
||||||
req.onupgradeneeded = function(ev) {
|
|
||||||
const db = ev.target.result;
|
|
||||||
const oldVersion = ev.oldVersion;
|
|
||||||
self._createStores(db, oldVersion);
|
|
||||||
};
|
|
||||||
return reqAsPromise(req).then(function() {
|
|
||||||
self._database = req.result;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
openTxn: function(mode, storeName) {
|
|
||||||
const txn = this._database.transaction([storeName], mode);
|
|
||||||
const store = txn.objectStore(storeName);
|
|
||||||
return Promise.resolve(store);
|
|
||||||
},
|
|
||||||
_createStores: function(db) {
|
|
||||||
db.createObjectStore("foos", {keyPath: ["id"]});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
function getAll(store) {
|
|
||||||
const request = store.openCursor();
|
|
||||||
const results = [];
|
|
||||||
return new Promise(function(resolve, reject) {
|
|
||||||
request.onsuccess = function(event) {
|
|
||||||
const cursor = event.target.result;
|
|
||||||
if(cursor) {
|
|
||||||
results.push(cursor.value);
|
|
||||||
cursor.continue();
|
|
||||||
} else {
|
|
||||||
resolve(results);
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
request.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise.flushQueue && Promise.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function main() {
|
|
||||||
let storage = new Storage("idb-promises");
|
|
||||||
let store;
|
|
||||||
storage.open().then(function() {
|
|
||||||
return storage.openTxn("readwrite", "foos");
|
|
||||||
}).then(function(s) {
|
|
||||||
store = s;
|
|
||||||
store.clear();
|
|
||||||
store.add({id: 5, name: "foo"});
|
|
||||||
store.add({id: 6, name: "bar"});
|
|
||||||
return getAll(store);
|
|
||||||
}).then(function(all) {
|
|
||||||
console.log("all1", all);
|
|
||||||
store.add({id: 7, name: "bazzz"});
|
|
||||||
return getAll(store);
|
|
||||||
}).then(function(all) {
|
|
||||||
console.log("all2", all);
|
|
||||||
}).catch(function(err) {
|
|
||||||
console.error(err.message + ": " + err.stack);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
main();
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
we basically want something like this for IE11/Win7:
|
|
||||||
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
req.onsuccess = function() {
|
|
||||||
resolve(req);
|
|
||||||
Promise?.flushQueue();
|
|
||||||
};
|
|
||||||
req.onerror = function(e) {
|
|
||||||
reject(new Error("IDB request failed: " + e.target.error.message));
|
|
||||||
Promise?.flushQueue();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
we don't have this problem on platforms with a native promise implementation, so we can just have our own (forked) promise polyfill?
|
|
||||||
*/
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
<html>
|
|
||||||
<head><meta charset="utf-8"></head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
|
|
||||||
const log = (...params) => {
|
|
||||||
document.write(params.join(" ")+"<br>");
|
|
||||||
};
|
|
||||||
|
|
||||||
function reqAsPromise(req) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
req.onerror = (err) => reject(err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function txnAsPromise(txn) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
txn.addEventListener("complete", resolve);
|
|
||||||
txn.addEventListener("abort", reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function openDatabase(name, createObjectStore, version) {
|
|
||||||
const req = indexedDB.open(name, version);
|
|
||||||
req.onupgradeneeded = (ev) => {
|
|
||||||
const db = ev.target.result;
|
|
||||||
const txn = ev.target.transaction;
|
|
||||||
const oldVersion = ev.oldVersion;
|
|
||||||
createObjectStore(db, txn, oldVersion, version);
|
|
||||||
};
|
|
||||||
return reqAsPromise(req);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function detectWebkitEarlyCloseTxnBug() {
|
|
||||||
const dbName = "webkit_test_inactive_txn_" + Math.random() * Number.MAX_SAFE_INTEGER;
|
|
||||||
try {
|
|
||||||
const db = await openDatabase(dbName, db => {
|
|
||||||
db.createObjectStore("test", {keyPath: "key"});
|
|
||||||
}, 1);
|
|
||||||
const readTxn = db.transaction(["test"], "readonly");
|
|
||||||
await reqAsPromise(readTxn.objectStore("test").get("somekey"));
|
|
||||||
// schedule a macro task in between the two txns
|
|
||||||
await new Promise(r => setTimeout(r, 0));
|
|
||||||
const writeTxn = db.transaction(["test"], "readwrite");
|
|
||||||
await Promise.resolve();
|
|
||||||
writeTxn.objectStore("test").add({key: "somekey", value: "foo"});
|
|
||||||
await txnAsPromise(writeTxn);
|
|
||||||
} catch (err) {
|
|
||||||
if (err.name === "TransactionInactiveError") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
indexedDB.deleteDatabase(dbName);
|
|
||||||
} catch (err) {}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
(async () => {
|
|
||||||
if (await detectWebkitEarlyCloseTxnBug()) {
|
|
||||||
log("the test failed, your browser seems to have the bug");
|
|
||||||
} else {
|
|
||||||
log("the test succeeded, your browser seems fine");
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,24 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<ul id="changes"></ul>
|
|
||||||
<script type="text/javascript">
|
|
||||||
const ul = document.getElementById("changes");
|
|
||||||
window.onhashchange = function() {
|
|
||||||
const hash = document.location.hash.substr(1);
|
|
||||||
const li = document.createElement("li");
|
|
||||||
li.appendChild(document.createTextNode(hash));
|
|
||||||
ul.appendChild(li);
|
|
||||||
window.history.replaceState(null, null, "#" + hash + hash);
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<p>
|
|
||||||
<a href="#foo">foo</a>
|
|
||||||
<a href="#bar">bar</a>
|
|
||||||
<a href="#baz">baz</a>
|
|
||||||
</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,91 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script src="https://dl.dropboxusercontent.com/s/r55397ld512etib/EncoderDecoderTogether.min.js?dl=0" nomodule="" type="text/javascript"></script>
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script>
|
|
||||||
<script src="deps/jsSHA/dist/sha512.js"></script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
|
|
||||||
function decodeBase64(base64) {
|
|
||||||
const binStr = window.atob(base64);
|
|
||||||
const len = binStr.length;
|
|
||||||
const bytes = new Uint8Array(len);
|
|
||||||
for (let i = 0; i < len; i++) {
|
|
||||||
bytes[i] = binStr.charCodeAt(i);
|
|
||||||
}
|
|
||||||
return bytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
function encodeBase64(bytes) {
|
|
||||||
let binStr = "";
|
|
||||||
for (let i = 0; i < bytes.length; i++) {
|
|
||||||
binStr += String.fromCharCode(bytes[i]);
|
|
||||||
}
|
|
||||||
return window.btoa(binStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
function subtleCryptoResult(promiseOrOp, method) {
|
|
||||||
if (promiseOrOp instanceof Promise) {
|
|
||||||
return promiseOrOp;
|
|
||||||
} else {
|
|
||||||
return new Promise(function(resolve, reject) {
|
|
||||||
promiseOrOp.oncomplete = function(e) {resolve(e.target.result);}
|
|
||||||
promiseOrOp.onerror = function(e) {
|
|
||||||
reject(new Error("Crypto error on " + method));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const subtleCrypto = (window.crypto || window.msCrypto).subtle;
|
|
||||||
|
|
||||||
function computeFallback(key, data, hash) {
|
|
||||||
const shaObj = new jsSHA(hash, "UINT8ARRAY", {
|
|
||||||
"hmacKey": {
|
|
||||||
"value": key,
|
|
||||||
"format": "UINT8ARRAY"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
shaObj.update(data);
|
|
||||||
return Promise.resolve(shaObj.getHash("UINT8ARRAY"));
|
|
||||||
}
|
|
||||||
|
|
||||||
function compute(key, data, hash) {
|
|
||||||
const opts = {
|
|
||||||
name: 'HMAC',
|
|
||||||
hash: {name: hash},
|
|
||||||
};
|
|
||||||
return subtleCryptoResult(subtleCrypto.importKey(
|
|
||||||
'raw',
|
|
||||||
key,
|
|
||||||
opts,
|
|
||||||
false,
|
|
||||||
['sign']
|
|
||||||
), "importKey").then(function (hmacKey) {
|
|
||||||
console.log("hmacKey", hmacKey);
|
|
||||||
return subtleCryptoResult(subtleCrypto.sign(
|
|
||||||
opts,
|
|
||||||
hmacKey,
|
|
||||||
data
|
|
||||||
), "sign");
|
|
||||||
}).then(function(buffer) {
|
|
||||||
return new Uint8Array(buffer);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const te = new TextEncoder();
|
|
||||||
computeFallback(
|
|
||||||
new Uint8Array(te.encode("I am a key!!")),
|
|
||||||
new Uint8Array(te.encode("I am some data!!")),
|
|
||||||
"SHA-512"
|
|
||||||
).then(function(mac) {
|
|
||||||
// should be 9bpJS7myNR/ttCfts+woXJSapVb19qqFRntGh17rHydOBB8+pplZFG8Cc4Qkxxznri4nWyzhFWcWnenY9vd5rA==
|
|
||||||
alert(encodeBase64(mac));
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
const bytes = [104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100];
|
|
||||||
const buffer = new Uint8Array(bytes.length);
|
|
||||||
for (let i = 0; i < buffer.length; i += 1) {
|
|
||||||
buffer[i] = bytes[i];
|
|
||||||
}
|
|
||||||
const blob = new Blob([buffer], {type: "text/plain"});
|
|
||||||
const reader = new FileReader();
|
|
||||||
reader.addEventListener("load", function(evt) {
|
|
||||||
const result = evt.target.result;
|
|
||||||
console.log("result", result);
|
|
||||||
});
|
|
||||||
reader.addEventListener("error", function(evt) {reject(evt.target.error);});
|
|
||||||
reader.readAsText(blob, "utf-8");
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,3 +0,0 @@
|
||||||
p {
|
|
||||||
color: red;
|
|
||||||
}
|
|
|
@ -1,378 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style type="text/css">
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
height: 100%;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
.container {
|
|
||||||
display: grid;
|
|
||||||
grid-template: "left middle" 1fr /
|
|
||||||
200px 1fr;
|
|
||||||
height: 100vh;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container .left {
|
|
||||||
display: grid;
|
|
||||||
grid-template:
|
|
||||||
"welcome" auto
|
|
||||||
"rooms" 1fr /
|
|
||||||
1fr;
|
|
||||||
min-height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container .middle {
|
|
||||||
display: grid;
|
|
||||||
grid-template:
|
|
||||||
"header" auto
|
|
||||||
"timeline" 1fr
|
|
||||||
"composer" auto /
|
|
||||||
1fr;
|
|
||||||
min-height: 0;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.left { grid-area: left;}
|
|
||||||
.left p {
|
|
||||||
grid-area welcome;
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
.left ul {
|
|
||||||
grid-area: rooms;
|
|
||||||
min-height: 0;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.middle { grid-area: middle;}
|
|
||||||
.middle .header { grid-area: header;}
|
|
||||||
.middle .timeline {
|
|
||||||
grid-area: timeline;
|
|
||||||
min-height: 0;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
.middle .composer {
|
|
||||||
grid-area: composer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header h2 {
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.composer {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.composer input {
|
|
||||||
display: block;
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.menu {
|
|
||||||
position: absolute;
|
|
||||||
border-radius: 8px;
|
|
||||||
box-shadow: 2px 2px 10px rgba(0,0,0,0.5);
|
|
||||||
padding: 16px;
|
|
||||||
background-color: white;
|
|
||||||
z-index: 1;
|
|
||||||
list-style: none;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<div class="left">
|
|
||||||
<p>Welcome!<button>⋮</button></p>
|
|
||||||
<ul>
|
|
||||||
<li>Room xyz <button>⋮</button></li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz <button>⋮</button></li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz</li>
|
|
||||||
<li>Room xyz <button>⋮</button></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="middle">
|
|
||||||
<div class="header">
|
|
||||||
<h2>Room xyz</h2>
|
|
||||||
<button>⋮</button>
|
|
||||||
</div>
|
|
||||||
<ul class="timeline">
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc <button>⋮</button></li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc <button>⋮</button></li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc</li>
|
|
||||||
<li>Message abc <button>⋮</button></li>
|
|
||||||
</ul>
|
|
||||||
<div class="composer">
|
|
||||||
<input type="text" name="">
|
|
||||||
<button>⋮</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<script type="text/javascript">
|
|
||||||
let menu;
|
|
||||||
|
|
||||||
function createMenu(options) {
|
|
||||||
const menu = document.createElement("ul");
|
|
||||||
menu.className = "menu";
|
|
||||||
for (const o of options) {
|
|
||||||
const li = document.createElement("li");
|
|
||||||
li.innerText = o;
|
|
||||||
menu.appendChild(li);
|
|
||||||
}
|
|
||||||
return menu;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function showMenu(evt) {
|
|
||||||
if (menu) {
|
|
||||||
menu = menu.close();
|
|
||||||
} else if (evt.target.tagName.toLowerCase() === "button") {
|
|
||||||
menu = showPopup(evt.target, createMenu(["Send file", "Save contact", "Send picture", "Foo the bar"]), {
|
|
||||||
horizontal: {
|
|
||||||
relativeTo: "end",
|
|
||||||
align: "start",
|
|
||||||
after: 0,
|
|
||||||
},
|
|
||||||
vertical: {
|
|
||||||
relativeTo: "end",
|
|
||||||
align: "end",
|
|
||||||
after: 10,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function showMenuInScroller(evt) {
|
|
||||||
if (!menu && evt.target.tagName.toLowerCase() === "button") {
|
|
||||||
evt.stopPropagation();
|
|
||||||
menu = showPopup(evt.target, createMenu(["Show reactions", "Share"]), {
|
|
||||||
horizontal: {
|
|
||||||
relativeTo: "start",
|
|
||||||
align: "end",
|
|
||||||
after: 10,
|
|
||||||
},
|
|
||||||
vertical: {
|
|
||||||
relativeTo: "start",
|
|
||||||
align: "center",
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
document.body.addEventListener("click", showMenu, false);
|
|
||||||
document.querySelector(".middle ul").addEventListener("click", showMenuInScroller, false);
|
|
||||||
document.querySelector(".left ul").addEventListener("click", showMenuInScroller, false);
|
|
||||||
|
|
||||||
function showPopup(target, popup, arrangement) {
|
|
||||||
targetAxes = elementToAxes(target);
|
|
||||||
if (!arrangement) {
|
|
||||||
arrangement = getAutoArrangement(targetAxes);
|
|
||||||
}
|
|
||||||
|
|
||||||
target.offsetParent.appendChild(popup);
|
|
||||||
|
|
||||||
const popupAxes = elementToAxes(popup);
|
|
||||||
const scrollerAxes = elementToAxes(findScrollParent(target));
|
|
||||||
const offsetParentAxes = elementToAxes(target.offsetParent);
|
|
||||||
|
|
||||||
function reposition() {
|
|
||||||
if (scrollerAxes && !isVisibleInScrollParent(targetAxes.vertical, scrollerAxes.vertical)) {
|
|
||||||
popupObj.close();
|
|
||||||
}
|
|
||||||
applyArrangement(
|
|
||||||
popupAxes.vertical,
|
|
||||||
targetAxes.vertical,
|
|
||||||
offsetParentAxes.vertical,
|
|
||||||
scrollerAxes?.vertical,
|
|
||||||
arrangement.vertical
|
|
||||||
);
|
|
||||||
applyArrangement(
|
|
||||||
popupAxes.horizontal,
|
|
||||||
targetAxes.horizontal,
|
|
||||||
offsetParentAxes.horizontal,
|
|
||||||
scrollerAxes?.horizontal,
|
|
||||||
arrangement.horizontal
|
|
||||||
);
|
|
||||||
}
|
|
||||||
reposition();
|
|
||||||
|
|
||||||
document.body.addEventListener("scroll", reposition, true);
|
|
||||||
|
|
||||||
const popupObj = {
|
|
||||||
close() {
|
|
||||||
document.body.removeEventListener("scroll", reposition, true);
|
|
||||||
popup.remove();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return popupObj;
|
|
||||||
}
|
|
||||||
|
|
||||||
function elementToAxes(element) {
|
|
||||||
if (element) {
|
|
||||||
return {
|
|
||||||
horizontal: new HorizontalAxis(element),
|
|
||||||
vertical: new VerticalAxis(element),
|
|
||||||
element
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function findScrollParent(el) {
|
|
||||||
let parent = el;
|
|
||||||
do {
|
|
||||||
parent = parent.parentElement;
|
|
||||||
if (parent.scrollHeight > parent.clientHeight) {
|
|
||||||
return parent;
|
|
||||||
}
|
|
||||||
} while (parent !== el.offsetParent);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isVisibleInScrollParent(targetAxis, scrollerAxis) {
|
|
||||||
// clipped at start?
|
|
||||||
if ((targetAxis.offsetStart + targetAxis.clientSize) < (
|
|
||||||
scrollerAxis.offsetStart +
|
|
||||||
scrollerAxis.scrollOffset
|
|
||||||
)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// clipped at end?
|
|
||||||
if (targetAxis.offsetStart > (
|
|
||||||
scrollerAxis.offsetStart +
|
|
||||||
scrollerAxis.clientSize +
|
|
||||||
scrollerAxis.scrollOffset
|
|
||||||
)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyArrangement(elAxis, targetAxis, offsetParentAxis, scrollerAxis, {relativeTo, align, before, after}) {
|
|
||||||
if (relativeTo === "end") {
|
|
||||||
let end = offsetParentAxis.clientSize - targetAxis.offsetStart;
|
|
||||||
if (align === "end") {
|
|
||||||
end -= elAxis.offsetSize;
|
|
||||||
} else if (align === "center") {
|
|
||||||
end -= ((elAxis.offsetSize / 2) - (targetAxis.offsetSize / 2));
|
|
||||||
}
|
|
||||||
if (typeof before === "number") {
|
|
||||||
end += before;
|
|
||||||
} else if (typeof after === "number") {
|
|
||||||
end -= (targetAxis.offsetSize + after);
|
|
||||||
}
|
|
||||||
elAxis.end = end;
|
|
||||||
} else if (relativeTo === "start") {
|
|
||||||
let scrollOffset = scrollerAxis?.scrollOffset || 0;
|
|
||||||
let start = targetAxis.offsetStart - scrollOffset;
|
|
||||||
if (align === "start") {
|
|
||||||
start -= elAxis.offsetSize;
|
|
||||||
} else if (align === "center") {
|
|
||||||
start -= ((elAxis.offsetSize / 2) - (targetAxis.offsetSize / 2));
|
|
||||||
}
|
|
||||||
if (typeof before === "number") {
|
|
||||||
start -= before;
|
|
||||||
} else if (typeof after === "number") {
|
|
||||||
start += (targetAxis.offsetSize + after);
|
|
||||||
}
|
|
||||||
elAxis.start = start;
|
|
||||||
} else {
|
|
||||||
throw new Error("unknown relativeTo: " + relativeTo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class HorizontalAxis {
|
|
||||||
constructor(el) {
|
|
||||||
this.element = el;
|
|
||||||
}
|
|
||||||
get scrollOffset() {return this.element.scrollLeft;}
|
|
||||||
get clientSize() {return this.element.clientWidth;}
|
|
||||||
get offsetSize() {return this.element.offsetWidth;}
|
|
||||||
get offsetStart() {return this.element.offsetLeft;}
|
|
||||||
set start(value) {this.element.style.left = `${value}px`;}
|
|
||||||
set end(value) {this.element.style.right = `${value}px`;}
|
|
||||||
}
|
|
||||||
class VerticalAxis {
|
|
||||||
constructor(el) {
|
|
||||||
this.element = el;
|
|
||||||
}
|
|
||||||
get scrollOffset() {return this.element.scrollTop;}
|
|
||||||
get clientSize() {return this.element.clientHeight;}
|
|
||||||
get offsetSize() {return this.element.offsetHeight;}
|
|
||||||
get offsetStart() {return this.element.offsetTop;}
|
|
||||||
set start(value) {this.element.style.top = `${value}px`;}
|
|
||||||
set end(value) {this.element.style.bottom = `${value}px`;}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,3 +0,0 @@
|
||||||
p {
|
|
||||||
color: green;
|
|
||||||
}
|
|
|
@ -1,128 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<style type="text/css">
|
|
||||||
pre {
|
|
||||||
font-family: monospace;
|
|
||||||
display: block;
|
|
||||||
white-space: pre;
|
|
||||||
font-size: 2em;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
if (!Math.imul) Math.imul = function(a,b) {return (a*b)|0;}/* function(a, b) {
|
|
||||||
var aHi = (a >>> 16) & 0xffff;
|
|
||||||
var aLo = a & 0xffff;
|
|
||||||
var bHi = (b >>> 16) & 0xffff;
|
|
||||||
var bLo = b & 0xffff;
|
|
||||||
// the shift by 0 fixes the sign on the high part
|
|
||||||
// the final |0 converts the unsigned value into a signed value
|
|
||||||
return ((aLo * bLo) + (((aHi * bLo + aLo * bHi) << 16) >>> 0) | 0);
|
|
||||||
};*/
|
|
||||||
|
|
||||||
if (!Math.clz32) Math.clz32 = (function(log, LN2){
|
|
||||||
return function(x) {
|
|
||||||
// Let n be ToUint32(x).
|
|
||||||
// Let p be the number of leading zero bits in
|
|
||||||
// the 32-bit binary representation of n.
|
|
||||||
// Return p.
|
|
||||||
var asUint = x >>> 0;
|
|
||||||
if (asUint === 0) {
|
|
||||||
return 32;
|
|
||||||
}
|
|
||||||
return 31 - (log(asUint) / LN2 | 0) |0; // the "| 0" acts like math.floor
|
|
||||||
};
|
|
||||||
})(Math.log, Math.LN2);
|
|
||||||
</script>
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script>
|
|
||||||
<script type="text/javascript" src="../lib/olm/olm_legacy.js"></script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
function doit(log) {
|
|
||||||
var alice = new Olm.Account();
|
|
||||||
alice.create();
|
|
||||||
log("alice", alice.identity_keys());
|
|
||||||
|
|
||||||
var bob = new Olm.Account();
|
|
||||||
bob.unpickle("secret", "EWfA87or4GgQ+wqVkyuFiW9gUk3FI6QSXgp8E2dS5RFLvXgy4oFvxwQ1gVnbMkdJz2Hy9ex9UmJ/ZyuRU0aRt0IwXpw/SUNq4IQeVJ7J/miXW7rV4Ep+4RSEf945KbDrokDCS2CoL5PIfv/NYyey32gA0hMi8wWIfIlOxFBV4SBJYSC+Qd54VjprwCg0Sn9vjQouKVrM/+5jzsv9+JK5OpWW0Vrb3qrXwyAOEAQ4WlOQcqZHAyPQIw");
|
|
||||||
log("bob", bob.identity_keys());
|
|
||||||
// generate OTK on receiver side
|
|
||||||
bob.generate_one_time_keys(1);
|
|
||||||
var bobOneTimeKeys = JSON.parse(bob.one_time_keys());
|
|
||||||
var otkName = Object.getOwnPropertyNames(bobOneTimeKeys.curve25519)[0];
|
|
||||||
var bobOneTimeKey = bobOneTimeKeys.curve25519[otkName];
|
|
||||||
// encrypt
|
|
||||||
var aliceSession = new Olm.Session();
|
|
||||||
aliceSession.create_outbound(
|
|
||||||
alice,
|
|
||||||
JSON.parse(bob.identity_keys()).curve25519,
|
|
||||||
bobOneTimeKey
|
|
||||||
);
|
|
||||||
log("alice outbound session created");
|
|
||||||
var aliceSessionPickled = aliceSession.pickle("secret");
|
|
||||||
log("aliceSession pickled", aliceSessionPickled);
|
|
||||||
try {
|
|
||||||
var tmp = new Olm.Session();
|
|
||||||
tmp.unpickle("secret", aliceSessionPickled);
|
|
||||||
log("aliceSession unpickled");
|
|
||||||
} finally {
|
|
||||||
tmp.free();
|
|
||||||
}
|
|
||||||
var message = aliceSession.encrypt("hello secret world");
|
|
||||||
log("message", message);
|
|
||||||
// decrypt
|
|
||||||
var bobSession = new Olm.Session();
|
|
||||||
bobSession.create_inbound(bob, message.body);
|
|
||||||
var plaintext = bobSession.decrypt(message.type, message.body);
|
|
||||||
log("plaintext", plaintext);
|
|
||||||
// remove Bob's OTK as it was used to start an olm session
|
|
||||||
log("bob OTK before removing", bob.one_time_keys());
|
|
||||||
bob.remove_one_time_keys(bobSession);
|
|
||||||
log("bob OTK after removing", bob.one_time_keys());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (window.msCrypto && !window.crypto) {
|
|
||||||
window.crypto = window.msCrypto;
|
|
||||||
}
|
|
||||||
|
|
||||||
function doRun(e) {
|
|
||||||
e.target.setAttribute("disabled", "disabled");
|
|
||||||
var logEl = document.getElementById("log");
|
|
||||||
logEl.innerText = "";
|
|
||||||
var startTime = performance.now();
|
|
||||||
function log() {
|
|
||||||
var timeDiff = Math.round(performance.now() - startTime).toString();
|
|
||||||
while (timeDiff.length < 5) {
|
|
||||||
timeDiff = "0" + timeDiff;
|
|
||||||
}
|
|
||||||
logEl.appendChild(document.createTextNode(timeDiff + " "));
|
|
||||||
for (var i = 0; i < arguments.length; i += 1) {
|
|
||||||
var value = arguments[i];
|
|
||||||
if (typeof value !== "string") {
|
|
||||||
value = JSON.stringify(value);
|
|
||||||
}
|
|
||||||
logEl.appendChild(document.createTextNode(value + " "));
|
|
||||||
}
|
|
||||||
logEl.appendChild(document.createTextNode("\n"));
|
|
||||||
}
|
|
||||||
doit(log);
|
|
||||||
e.target.removeAttribute("disabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
function main() {
|
|
||||||
Olm.init( ).then(function() {
|
|
||||||
var startButton = document.getElementById("start");
|
|
||||||
startButton.innerText = "Start";
|
|
||||||
startButton.addEventListener("click", doRun);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
document.addEventListener("DOMContentLoaded", main);
|
|
||||||
</script>
|
|
||||||
<pre id="log"></pre>
|
|
||||||
<button id="start">Loading...</button>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<style type="text/css">
|
|
||||||
pre {
|
|
||||||
font-family: monospace;
|
|
||||||
display: block;
|
|
||||||
white-space: pre;
|
|
||||||
font-size: 2em;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript" src="../lib/olm/olm.js"></script>
|
|
||||||
<script type="module">
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const Olm = window.Olm;
|
|
||||||
await Olm.init({
|
|
||||||
locateFile: () => "../lib/olm/olm.wasm",
|
|
||||||
});
|
|
||||||
const alice = new Olm.Account();
|
|
||||||
alice.create();
|
|
||||||
console.log("alice", alice.identity_keys());
|
|
||||||
|
|
||||||
const bob = new Olm.Account();
|
|
||||||
bob.unpickle("secret", "EWfA87or4GgQ+wqVkyuFiW9gUk3FI6QSXgp8E2dS5RFLvXgy4oFvxwQ1gVnbMkdJz2Hy9ex9UmJ/ZyuRU0aRt0IwXpw/SUNq4IQeVJ7J/miXW7rV4Ep+4RSEf945KbDrokDCS2CoL5PIfv/NYyey32gA0hMi8wWIfIlOxFBV4SBJYSC+Qd54VjprwCg0Sn9vjQouKVrM/+5jzsv9+JK5OpWW0Vrb3qrXwyAOEAQ4WlOQcqZHAyPQIw");
|
|
||||||
console.log("bob", bob.identity_keys());
|
|
||||||
// generate OTK on receiver side
|
|
||||||
bob.generate_one_time_keys(1);
|
|
||||||
const bobOneTimeKeys = JSON.parse(bob.one_time_keys());
|
|
||||||
const bobOneTimeKey = Object.values(bobOneTimeKeys.curve25519)[0];
|
|
||||||
// encrypt
|
|
||||||
const aliceSession = new Olm.Session();
|
|
||||||
aliceSession.create_outbound(
|
|
||||||
alice,
|
|
||||||
JSON.parse(bob.identity_keys()).curve25519,
|
|
||||||
bobOneTimeKey,
|
|
||||||
);
|
|
||||||
const message = aliceSession.encrypt("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Pulvinar pellentesque habitant morbi tristique. Pretium nibh ipsum consequat nisl. Molestie a iaculis at erat pellentesque adipiscing. Semper eget duis at tellus at urna condimentum. Malesuada fames ac turpis egestas maecenas pharetra convallis. Sed lectus vestibulum mattis ullamcorper. Elementum facilisis leo vel fringilla est. Mattis vulputate enim nulla aliquet porttitor lacus luctus accumsan tortor. Faucibus nisl tincidunt eget nullam non nisi est. Aliquam faucibus purus in massa tempor. Dui accumsan sit amet nulla facilisi morbi tempus iaculis. Pellentesque dignissim enim sit amet venenatis urna. Nisi porta lorem mollis aliquam ut porttitor leo. Adipiscing elit duis tristique sollicitudin nibh sit. Et netus et malesuada fames ac turpis. Ut diam quam nulla porttitor massa id neque. Pharetra convallis posuere morbi leo urna molestie. A pellentesque sit amet porttitor eget dolor morbi non arcu.");
|
|
||||||
console.log("message", message);
|
|
||||||
// decrypt
|
|
||||||
const bobSession = new Olm.Session();
|
|
||||||
|
|
||||||
let plaintext;
|
|
||||||
const iterations = 698;
|
|
||||||
let decryptDuration = 0;
|
|
||||||
for (let i = 0; i < iterations; i += 1) {
|
|
||||||
bobSession.create_inbound(bob, message.body);
|
|
||||||
const start = performance.now();
|
|
||||||
plaintext = bobSession.decrypt(message.type, message.body);
|
|
||||||
const end = performance.now();
|
|
||||||
const duration = end - start;
|
|
||||||
decryptDuration += duration;
|
|
||||||
}
|
|
||||||
console.log("plaintext", plaintext);
|
|
||||||
console.log(`took avg of ${decryptDuration / iterations}ms to decrypt over ${iterations} iterations`);
|
|
||||||
// remove Bob's OTK as it was used to start an olm session
|
|
||||||
console.log("bob OTK before removing", bob.one_time_keys());
|
|
||||||
bob.remove_one_time_keys(bobSession);
|
|
||||||
console.log("bob OTK after removing", bob.one_time_keys());
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
|
@ -1,62 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script type="text/javascript">
|
|
||||||
if (!Math.imul) Math.imul = function(a, b) {
|
|
||||||
var aHi = (a >>> 16) & 0xffff;
|
|
||||||
var aLo = a & 0xffff;
|
|
||||||
var bHi = (b >>> 16) & 0xffff;
|
|
||||||
var bLo = b & 0xffff;
|
|
||||||
// the shift by 0 fixes the sign on the high part
|
|
||||||
// the final |0 converts the unsigned value into a signed value
|
|
||||||
return ((aLo * bLo) + (((aHi * bLo + aLo * bHi) << 16) >>> 0) | 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!Math.clz32) Math.clz32 = (function(log, LN2){
|
|
||||||
return function(x) {
|
|
||||||
// Let n be ToUint32(x).
|
|
||||||
// Let p be the number of leading zero bits in
|
|
||||||
// the 32-bit binary representation of n.
|
|
||||||
// Return p.
|
|
||||||
var asUint = x >>> 0;
|
|
||||||
if (asUint === 0) {
|
|
||||||
return 32;
|
|
||||||
}
|
|
||||||
return 31 - (log(asUint) / LN2 | 0) |0; // the "| 0" acts like math.floor
|
|
||||||
};
|
|
||||||
})(Math.log, Math.LN2);
|
|
||||||
</script>
|
|
||||||
<!-- removing this line will make it work -->
|
|
||||||
<script src="https://dl.dropboxusercontent.com/s/r55397ld512etib/EncoderDecoderTogether.min.js?dl=0" nomodule="" type="text/javascript"></script>
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/promise-polyfill@8/dist/polyfill.min.js"></script>
|
|
||||||
<script type="text/javascript" src="../lib/olm/olm_legacy.js"></script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
window.Olm.init().then(function() {
|
|
||||||
const bytes = [
|
|
||||||
34, 123, 54, 9, 124, 89, 230, 120,
|
|
||||||
43, 232, 19, 78, 129, 170, 255, 5,
|
|
||||||
90, 143, 56, 99, 101, 140, 240, 3,
|
|
||||||
7, 121, 41, 22, 67, 231, 85, 32
|
|
||||||
];
|
|
||||||
var privKey = new Uint8Array(32);
|
|
||||||
for (var i = bytes.length - 1; i >= 0; i--) {
|
|
||||||
privKey[i] = bytes[i];
|
|
||||||
}
|
|
||||||
console.log("privKey", privKey);
|
|
||||||
const decryption = new window.Olm.PkDecryption();
|
|
||||||
let backupPubKey;
|
|
||||||
try {
|
|
||||||
backupPubKey = decryption.init_with_private_key(privKey);
|
|
||||||
console.log("backupPubKey", backupPubKey.length, backupPubKey);
|
|
||||||
} catch (e) {
|
|
||||||
decryption.free();
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
444
prototypes/promifill.js
vendored
444
prototypes/promifill.js
vendored
|
@ -1,444 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
||||||
|
|
||||||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
||||||
|
|
||||||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
|
||||||
|
|
||||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|
||||||
|
|
||||||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
|
||||||
|
|
||||||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
|
||||||
|
|
||||||
var PENDING = void 0,
|
|
||||||
FULFILLED = true,
|
|
||||||
REJECTED = false;
|
|
||||||
|
|
||||||
var Promifill = /*#__PURE__*/function () {
|
|
||||||
_createClass(Promifill, [{
|
|
||||||
key: "state",
|
|
||||||
get: function get() {
|
|
||||||
return PENDING;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "value",
|
|
||||||
get: function get() {
|
|
||||||
return void 0;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "settled",
|
|
||||||
get: function get() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
function Promifill(executor) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
_classCallCheck(this, Promifill);
|
|
||||||
|
|
||||||
if (typeof executor != "function") {
|
|
||||||
throw new TypeError("Promise resolver ".concat(Object.prototype.toString.call(executor), " is not a function"));
|
|
||||||
}
|
|
||||||
|
|
||||||
defineProperty(this, "chain", []);
|
|
||||||
defineProperty(this, "observers", []);
|
|
||||||
var secret = [];
|
|
||||||
|
|
||||||
var resolve = function resolve(value, bypassKey) {
|
|
||||||
if (_this.settled && bypassKey !== secret) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
defineProperty(_this, "settled", true);
|
|
||||||
var then_ = value && value.then;
|
|
||||||
var thenable = typeof then_ == "function";
|
|
||||||
|
|
||||||
if (thenable) {
|
|
||||||
defineProperty(value, "preventThrow", true);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (thenable && value.state === PENDING) {
|
|
||||||
then_.call(value, function (v) {
|
|
||||||
return resolve(v, secret);
|
|
||||||
}, function (r) {
|
|
||||||
return reject(r, secret);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
defineProperty(_this, "value", thenable ? value.value : value);
|
|
||||||
defineProperty(_this, "state", thenable ? value.state : FULFILLED);
|
|
||||||
schedule(_this.observers.map(function (observer) {
|
|
||||||
return {
|
|
||||||
handler: _this.state === FULFILLED ? observer.onfulfill : observer.onreject,
|
|
||||||
value: _this.value
|
|
||||||
};
|
|
||||||
}));
|
|
||||||
|
|
||||||
if (_this.state === REJECTED) {
|
|
||||||
raiseUnhandledPromiseRejectionException(_this.value, _this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var reject = function reject(reason, bypassKey) {
|
|
||||||
if (_this.settled && bypassKey !== secret) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
defineProperty(_this, "settled", true);
|
|
||||||
defineProperty(_this, "value", reason);
|
|
||||||
defineProperty(_this, "state", REJECTED);
|
|
||||||
schedule(_this.observers.map(function (observer) {
|
|
||||||
return {
|
|
||||||
handler: observer.onreject,
|
|
||||||
value: _this.value
|
|
||||||
};
|
|
||||||
}));
|
|
||||||
raiseUnhandledPromiseRejectionException(_this.value, _this);
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
executor(resolve, reject);
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(Promifill, [{
|
|
||||||
key: "then",
|
|
||||||
value: function then(onfulfill, onreject) {
|
|
||||||
var _this2 = this;
|
|
||||||
|
|
||||||
var chainedPromise = new this.constructor(function (resolve, reject) {
|
|
||||||
var internalOnfulfill = function internalOnfulfill(value) {
|
|
||||||
try {
|
|
||||||
resolve(typeof onfulfill == "function" ? onfulfill(value) : value);
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var internalOnreject = function internalOnreject(reason) {
|
|
||||||
try {
|
|
||||||
if (typeof onreject == "function") {
|
|
||||||
resolve(onreject(reason));
|
|
||||||
} else {
|
|
||||||
reject(reason);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (_this2.state === PENDING) {
|
|
||||||
_this2.observers.push({
|
|
||||||
onfulfill: internalOnfulfill,
|
|
||||||
onreject: internalOnreject
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
schedule([{
|
|
||||||
handler: _this2.state === FULFILLED ? internalOnfulfill : internalOnreject,
|
|
||||||
value: _this2.value
|
|
||||||
}]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
this.chain.push(chainedPromise);
|
|
||||||
return chainedPromise;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "catch",
|
|
||||||
value: function _catch(onreject) {
|
|
||||||
return this.then(null, onreject);
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "finally",
|
|
||||||
value: function _finally(oncomplete) {
|
|
||||||
var _this3 = this;
|
|
||||||
|
|
||||||
var chainedPromise = new this.constructor(function (resolve, reject) {
|
|
||||||
var internalOncomplete = function internalOncomplete() {
|
|
||||||
try {
|
|
||||||
oncomplete();
|
|
||||||
|
|
||||||
if (_this3.state === FULFILLED) {
|
|
||||||
resolve(_this3.value);
|
|
||||||
} else {
|
|
||||||
reject(_this3.value);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (_this3.state === PENDING) {
|
|
||||||
_this3.observers.push({
|
|
||||||
onfulfill: internalOncomplete,
|
|
||||||
onreject: internalOncomplete
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
schedule([{
|
|
||||||
handler: internalOncomplete
|
|
||||||
}]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
this.chain.push(chainedPromise);
|
|
||||||
return chainedPromise;
|
|
||||||
}
|
|
||||||
}], [{
|
|
||||||
key: "resolve",
|
|
||||||
value: function resolve(value) {
|
|
||||||
return value && value.constructor === Promifill ? value : new Promifill(function (resolve) {
|
|
||||||
resolve(value);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "reject",
|
|
||||||
value: function reject(reason) {
|
|
||||||
return new Promifill(function (_, reject) {
|
|
||||||
reject(reason);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "all",
|
|
||||||
value: function all(iterable) {
|
|
||||||
return new Promifill(function (resolve, reject) {
|
|
||||||
validateIterable(iterable);
|
|
||||||
var iterableSize = 0;
|
|
||||||
var values = [];
|
|
||||||
|
|
||||||
if (isEmptyIterable(iterable)) {
|
|
||||||
return resolve(values);
|
|
||||||
}
|
|
||||||
|
|
||||||
var add = function add(value, index) {
|
|
||||||
values[index] = value;
|
|
||||||
|
|
||||||
if (values.filter(function () {
|
|
||||||
return true;
|
|
||||||
}).length === iterableSize) {
|
|
||||||
resolve(values);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var _iterator = _createForOfIteratorHelper(iterable),
|
|
||||||
_step;
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
||||||
var item = _step.value;
|
|
||||||
|
|
||||||
(function (entry, index) {
|
|
||||||
Promifill.resolve(entry).then(function (value) {
|
|
||||||
return add(value, index);
|
|
||||||
}, reject);
|
|
||||||
})(item, iterableSize++);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
_iterator.e(err);
|
|
||||||
} finally {
|
|
||||||
_iterator.f();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "race",
|
|
||||||
value: function race(iterable) {
|
|
||||||
return new Promifill(function (resolve, reject) {
|
|
||||||
validateIterable(iterable);
|
|
||||||
|
|
||||||
if (isEmptyIterable(iterable)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _iterator2 = _createForOfIteratorHelper(iterable),
|
|
||||||
_step2;
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
|
||||||
var entry = _step2.value;
|
|
||||||
Promifill.resolve(entry).then(resolve, reject);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
_iterator2.e(err);
|
|
||||||
} finally {
|
|
||||||
_iterator2.f();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: "flushQueue",
|
|
||||||
value: function flushQueue() {
|
|
||||||
console.log("running promise sync by flushing queue");
|
|
||||||
schedule.flushQueue();
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return Promifill;
|
|
||||||
}();
|
|
||||||
|
|
||||||
var defineProperty = function defineProperty(obj, propName, propValue) {
|
|
||||||
Object.defineProperty(obj, propName, {
|
|
||||||
value: propValue
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
var defer = function defer(handler) {
|
|
||||||
return function () {
|
|
||||||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
||||||
args[_key] = arguments[_key];
|
|
||||||
}
|
|
||||||
|
|
||||||
setTimeout.apply(void 0, [handler, 0].concat(args));
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
var thrower = function thrower(error) {
|
|
||||||
throw error instanceof Error ? error : new Error(error);
|
|
||||||
};
|
|
||||||
|
|
||||||
var raiseUnhandledPromiseRejectionException = defer(function (error, promise) {
|
|
||||||
if (promise.preventThrow || promise.chain.length > 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
thrower(error);
|
|
||||||
});
|
|
||||||
|
|
||||||
var MutationObserverStrategy = /*#__PURE__*/function () {
|
|
||||||
function MutationObserverStrategy(handler) {
|
|
||||||
_classCallCheck(this, MutationObserverStrategy);
|
|
||||||
|
|
||||||
var observer = new MutationObserver(handler);
|
|
||||||
var node = this.node = document.createTextNode("");
|
|
||||||
observer.observe(node, {
|
|
||||||
characterData: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(MutationObserverStrategy, [{
|
|
||||||
key: "trigger",
|
|
||||||
value: function trigger() {
|
|
||||||
this.node.data = this.node.data === 1 ? 0 : 1;
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return MutationObserverStrategy;
|
|
||||||
}();
|
|
||||||
|
|
||||||
var NextTickStrategy = /*#__PURE__*/function () {
|
|
||||||
function NextTickStrategy(handler) {
|
|
||||||
_classCallCheck(this, NextTickStrategy);
|
|
||||||
|
|
||||||
this.scheduleNextTick = function () {
|
|
||||||
return process.nextTick(handler);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(NextTickStrategy, [{
|
|
||||||
key: "trigger",
|
|
||||||
value: function trigger() {
|
|
||||||
this.scheduleNextTick();
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return NextTickStrategy;
|
|
||||||
}();
|
|
||||||
|
|
||||||
var BetterThanNothingStrategy = /*#__PURE__*/function () {
|
|
||||||
function BetterThanNothingStrategy(handler) {
|
|
||||||
_classCallCheck(this, BetterThanNothingStrategy);
|
|
||||||
|
|
||||||
this.scheduleAsap = function () {
|
|
||||||
return setTimeout(handler, 0);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(BetterThanNothingStrategy, [{
|
|
||||||
key: "trigger",
|
|
||||||
value: function trigger() {
|
|
||||||
this.scheduleAsap();
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return BetterThanNothingStrategy;
|
|
||||||
}();
|
|
||||||
|
|
||||||
var getStrategy = function getStrategy() {
|
|
||||||
if (typeof window != "undefined" && typeof window.MutationObserver == "function") {
|
|
||||||
return MutationObserverStrategy;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof global != "undefined" && typeof process != "undefined" && typeof process.nextTick == "function") {
|
|
||||||
return NextTickStrategy;
|
|
||||||
}
|
|
||||||
|
|
||||||
return BetterThanNothingStrategy;
|
|
||||||
};
|
|
||||||
|
|
||||||
var schedule = function () {
|
|
||||||
var microtasks = [];
|
|
||||||
|
|
||||||
var run = function run() {
|
|
||||||
var handler, value;
|
|
||||||
|
|
||||||
while (microtasks.length > 0 && (_microtasks$shift = microtasks.shift(), handler = _microtasks$shift.handler, value = _microtasks$shift.value, _microtasks$shift)) {
|
|
||||||
var _microtasks$shift;
|
|
||||||
console.log("running handler with", value);
|
|
||||||
handler(value);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var Strategy = getStrategy();
|
|
||||||
var ctrl = new Strategy(run);
|
|
||||||
|
|
||||||
var scheduleFn = function scheduleFn(observers) {
|
|
||||||
if (observers.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
microtasks = microtasks.concat(observers);
|
|
||||||
observers.length = 0;
|
|
||||||
ctrl.trigger();
|
|
||||||
};
|
|
||||||
|
|
||||||
scheduleFn.flushQueue = function () {
|
|
||||||
run();
|
|
||||||
};
|
|
||||||
|
|
||||||
return scheduleFn;
|
|
||||||
}();
|
|
||||||
|
|
||||||
var isIterable = function isIterable(subject) {
|
|
||||||
return subject != null && typeof subject[Symbol.iterator] == "function";
|
|
||||||
};
|
|
||||||
|
|
||||||
var validateIterable = function validateIterable(subject) {
|
|
||||||
if (isIterable(subject)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new TypeError("Cannot read property 'Symbol(Symbol.iterator)' of ".concat(Object.prototype.toString.call(subject), "."));
|
|
||||||
};
|
|
||||||
|
|
||||||
var isEmptyIterable = function isEmptyIterable(subject) {
|
|
||||||
var _iterator3 = _createForOfIteratorHelper(subject),
|
|
||||||
_step3;
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
|
||||||
var _ = _step3.value;
|
|
||||||
// eslint-disable-line no-unused-vars
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
_iterator3.e(err);
|
|
||||||
} finally {
|
|
||||||
_iterator3.f();
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
|
|
||||||
window.Promifill = Promifill;
|
|
|
@ -1,23 +0,0 @@
|
||||||
{
|
|
||||||
"name": "foo",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"description": "",
|
|
||||||
"main": "index.js",
|
|
||||||
"scripts": {
|
|
||||||
"test": "echo \"Error: no test specified\" && exit 1"
|
|
||||||
},
|
|
||||||
"author": "",
|
|
||||||
"license": "ISC",
|
|
||||||
"devDependencies": {
|
|
||||||
"@babel/core": "^7.11.1",
|
|
||||||
"@babel/preset-env": "^7.11.0",
|
|
||||||
"@rollup/plugin-babel": "^5.1.0",
|
|
||||||
"@rollup/plugin-commonjs": "^15.0.0",
|
|
||||||
"@rollup/plugin-multi-entry": "^4.0.0",
|
|
||||||
"@rollup/plugin-node-resolve": "^9.0.0",
|
|
||||||
"mdn-polyfills": "^5.20.0",
|
|
||||||
"regenerator-runtime": "^0.13.7",
|
|
||||||
"rollup": "^2.26.4",
|
|
||||||
"core-js": "^3.6.5"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,46 +0,0 @@
|
||||||
import fsRoot from "fs";
|
|
||||||
const fs = fsRoot.promises;
|
|
||||||
import { rollup } from 'rollup';
|
|
||||||
// needed for legacy bundle
|
|
||||||
import babel from '@rollup/plugin-babel';
|
|
||||||
// needed to find the polyfill modules in the main-legacy.js bundle
|
|
||||||
import { nodeResolve } from '@rollup/plugin-node-resolve';
|
|
||||||
// needed because some of the polyfills are written as commonjs modules
|
|
||||||
import commonjs from '@rollup/plugin-commonjs';
|
|
||||||
// multi-entry plugin so we can add polyfill file to main
|
|
||||||
import multi from '@rollup/plugin-multi-entry';
|
|
||||||
import removeJsComments from 'rollup-plugin-cleanup';
|
|
||||||
// replace urls of asset names with content hashed version
|
|
||||||
|
|
||||||
async function build(inputFile, outputFile) {
|
|
||||||
// compile down to whatever IE 11 needs
|
|
||||||
const babelPlugin = babel.babel({
|
|
||||||
babelHelpers: 'bundled',
|
|
||||||
exclude: '../../node_modules/**',
|
|
||||||
presets: [
|
|
||||||
[
|
|
||||||
"@babel/preset-env",
|
|
||||||
{
|
|
||||||
useBuiltIns: "entry",
|
|
||||||
corejs: "3",
|
|
||||||
targets: "IE 11"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
});
|
|
||||||
const polyfillFile = '../../src/worker-polyfill.js';
|
|
||||||
// create js bundle
|
|
||||||
const rollupConfig = {
|
|
||||||
input: [polyfillFile, inputFile],
|
|
||||||
plugins: [multi(), commonjs(), nodeResolve(), babelPlugin, removeJsComments({comments: "none"})]
|
|
||||||
};
|
|
||||||
const bundle = await rollup(rollupConfig);
|
|
||||||
const {output} = await bundle.generate({
|
|
||||||
format: 'iife',
|
|
||||||
name: `bundle`
|
|
||||||
});
|
|
||||||
const code = output[0].code;
|
|
||||||
await fs.writeFile(outputFile, code, "utf8");
|
|
||||||
}
|
|
||||||
|
|
||||||
build(process.argv[2], process.argv[3]);
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,51 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<iframe id="iframe" sandbox="allow-scripts allow-downloads allow-downloads-without-user-activation"></iframe>
|
|
||||||
<script type="text/javascript">
|
|
||||||
const iframeHtml = `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<button id="download">Download</button>
|
|
||||||
<a id="link" href="#">Link to progamatically click</a>
|
|
||||||
<script type="text/javascript">
|
|
||||||
var link = document.getElementById("link");
|
|
||||||
var button = document.getElementById("download");
|
|
||||||
async function download(blob, filename) {
|
|
||||||
const slice = blob.slice(0, 1);
|
|
||||||
const reader = new FileReader();
|
|
||||||
reader.readAsArrayBuffer(slice);
|
|
||||||
const buffer = await new Promise((resolve, reject) => {
|
|
||||||
reader.addEventListener("abort", () => reject(new Error("aborted")));
|
|
||||||
reader.addEventListener("error", e => reject(e.error));
|
|
||||||
reader.addEventListener("load", e => resolve(e.target.result));
|
|
||||||
});
|
|
||||||
document.body.appendChild(document.createTextNode("buffer: " + new Uint8Array(buffer)[0] + " (" + buffer.byteLength + ")"));
|
|
||||||
var url = URL.createObjectURL(blob);
|
|
||||||
link.href = url;
|
|
||||||
link.download = filename;
|
|
||||||
link.innerText = url;
|
|
||||||
link.click();
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
}
|
|
||||||
button.addEventListener("click", async function(event) {
|
|
||||||
try {
|
|
||||||
await download(new Blob(["a message from the iframe"], {type: "text/plain"}), "hello world.txt");
|
|
||||||
} catch (err) {
|
|
||||||
document.body.appendChild(document.createTextNode(err.toString()));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</${"script"}>
|
|
||||||
</body>
|
|
||||||
</html>`;
|
|
||||||
document.getElementById("iframe").setAttribute("srcdoc", iframeHtml);
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,98 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<button id="download">Download</button>
|
|
||||||
<iframe id="iframe" sandbox="allow-scripts allow-downloads allow-downloads-without-user-activation"></iframe>
|
|
||||||
<script type="text/javascript">
|
|
||||||
const iframeHtml = `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<a id="link" href="#">Link to progamatically click</a>
|
|
||||||
<script type="text/javascript">
|
|
||||||
var link = document.getElementById("link");
|
|
||||||
|
|
||||||
function downloadBlob(blob, filename) {
|
|
||||||
var url = URL.createObjectURL(blob);
|
|
||||||
link.href = url;
|
|
||||||
link.download = filename;
|
|
||||||
link.innerText = url;
|
|
||||||
link.click();
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toBase64(buffer) {
|
|
||||||
let binaryStr = "";
|
|
||||||
const bytes = new Uint8Array(buffer);
|
|
||||||
for (let i = 0; i < bytes.byteLength; i++) {
|
|
||||||
binaryStr += String.fromCharCode(bytes[i]);
|
|
||||||
}
|
|
||||||
return window.btoa(binaryStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
function downloadBuffer(buffer, mimeType, filename) {
|
|
||||||
var url = "data:" + mimeType + ";base64," + toBase64(buffer);
|
|
||||||
link.href = url;
|
|
||||||
link.download = filename;
|
|
||||||
link.innerText = url;
|
|
||||||
link.click();
|
|
||||||
}
|
|
||||||
|
|
||||||
window.addEventListener("message", async function(event) {
|
|
||||||
// if (event.origin === window.location.origin) {
|
|
||||||
if (event.data.type === "downloadBuffer") {
|
|
||||||
await downloadBuffer(event.data.buffer, event.data.mimeType, event.data.filename);
|
|
||||||
} else if (event.data.type === "downloadBlob") {
|
|
||||||
await downloadBlob(event.data.blob, event.data.filename);
|
|
||||||
}
|
|
||||||
// }
|
|
||||||
});
|
|
||||||
</${"script"}>
|
|
||||||
</body>
|
|
||||||
</html>`;
|
|
||||||
const isIOS = /iPad|iPhone|iPod/.test(navigator.platform) || (navigator.platform === 'MacIntel' && navigator.maxTouchPoints > 1) && !window.MSStream;
|
|
||||||
const iframe = document.getElementById("iframe");
|
|
||||||
const button = document.getElementById("download");
|
|
||||||
iframe.setAttribute("srcdoc", iframeHtml);
|
|
||||||
const blob = new Blob(["a message from the iframe"], {type: "text/plain"});
|
|
||||||
window.addEventListener("message", async event => {
|
|
||||||
//if (event.origin === window.location.origin) {
|
|
||||||
//}
|
|
||||||
});
|
|
||||||
button.addEventListener("click", async evt => {
|
|
||||||
if (isIOS) {
|
|
||||||
const reader = new FileReader();
|
|
||||||
const promise = new Promise((resolve, reject) => {
|
|
||||||
reader.addEventListener("error", e => reject(e.error));
|
|
||||||
reader.addEventListener("load", e => resolve(e.target.result));
|
|
||||||
});
|
|
||||||
reader.readAsArrayBuffer(blob);
|
|
||||||
let buffer = await promise;
|
|
||||||
|
|
||||||
iframe.contentWindow.postMessage({type: "downloadBuffer", mimeType: "text/plain", buffer, filename: "hello world.txt"}, "*");
|
|
||||||
} else {
|
|
||||||
iframe.contentWindow.postMessage({type: "downloadBlob", blob, filename: "hello world.txt"}, "*");
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
if (window.SharedArrayBuffer) {
|
|
||||||
const srcView = new Uint8Array(buffer);
|
|
||||||
const sharedBuffer = new SharedArrayBuffer(buffer.byteLength);
|
|
||||||
const dstView = new Uint8Array(sharedBuffer);
|
|
||||||
for (var i = srcView.length - 1; i >= 0; i--) {
|
|
||||||
dstView[i] = srcView[i];
|
|
||||||
}
|
|
||||||
buffer = sharedBuffer;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
//iframe.contentWindow.postMessage({type: "downloadBlob", blob, filename: "hello world.txt"}, "*");
|
|
||||||
});
|
|
||||||
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,38 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<iframe id="iframe" sandbox="allow-scripts allow-downloads allow-downloads-without-user-activation"></iframe>
|
|
||||||
<script type="text/javascript">
|
|
||||||
const iframeHtml = `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<button id="download">Download</button>
|
|
||||||
<a id="link" href="#">Link to progamatically click</a>
|
|
||||||
<script type="text/javascript">
|
|
||||||
var link = document.getElementById("link");
|
|
||||||
var button = document.getElementById("download");
|
|
||||||
function download(str, filename) {
|
|
||||||
var url = "data:text/plain;base64," + btoa(str);
|
|
||||||
link.href = url;
|
|
||||||
link.download = filename;
|
|
||||||
link.innerText = url;
|
|
||||||
link.click();
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
}
|
|
||||||
button.addEventListener("click", function(event) {
|
|
||||||
download("hello world", "hello world.txt");
|
|
||||||
});
|
|
||||||
</${"script"}>
|
|
||||||
</body>
|
|
||||||
</html>`;
|
|
||||||
document.getElementById("iframe").setAttribute("srcdoc", iframeHtml);
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,18 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
"env": {
|
|
||||||
"node": true,
|
|
||||||
"es6": true
|
|
||||||
},
|
|
||||||
"extends": "eslint:recommended",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 2020,
|
|
||||||
"sourceType": "module"
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"no-console": "off",
|
|
||||||
"no-empty": "off",
|
|
||||||
"no-prototype-builtins": "off",
|
|
||||||
"no-unused-vars": "warn"
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
const fs = require('fs/promises');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
module.exports = function injectWebManifest(manifestFile) {
|
|
||||||
let root;
|
|
||||||
let base;
|
|
||||||
let manifestHref;
|
|
||||||
return {
|
|
||||||
name: "hydrogen:injectWebManifest",
|
|
||||||
apply: "build",
|
|
||||||
configResolved: config => {
|
|
||||||
root = config.root;
|
|
||||||
base = config.base;
|
|
||||||
},
|
|
||||||
transformIndexHtml: {
|
|
||||||
transform(html) {
|
|
||||||
return [{
|
|
||||||
tag: "link",
|
|
||||||
attrs: {rel: "manifest", href: manifestHref},
|
|
||||||
injectTo: "head"
|
|
||||||
}];
|
|
||||||
},
|
|
||||||
},
|
|
||||||
generateBundle: async function() {
|
|
||||||
const absoluteManifestFile = path.resolve(root, manifestFile);
|
|
||||||
const manifestDir = path.dirname(absoluteManifestFile);
|
|
||||||
const json = await fs.readFile(absoluteManifestFile, {encoding: "utf8"});
|
|
||||||
const manifest = JSON.parse(json);
|
|
||||||
for (const icon of manifest.icons) {
|
|
||||||
const iconFileName = path.resolve(manifestDir, icon.src);
|
|
||||||
const imgData = await fs.readFile(iconFileName);
|
|
||||||
const ref = this.emitFile({
|
|
||||||
type: "asset",
|
|
||||||
name: path.basename(iconFileName),
|
|
||||||
source: imgData
|
|
||||||
});
|
|
||||||
// we take the basename as getFileName gives the filename
|
|
||||||
// relative to the output dir, but the manifest is an asset
|
|
||||||
// just like they icon, so we assume they end up in the same dir
|
|
||||||
icon.src = path.basename(this.getFileName(ref));
|
|
||||||
}
|
|
||||||
const outputName = path.basename(absoluteManifestFile);
|
|
||||||
const manifestRef = this.emitFile({
|
|
||||||
type: "asset",
|
|
||||||
name: outputName,
|
|
||||||
source: JSON.stringify(manifest)
|
|
||||||
});
|
|
||||||
manifestHref = base + this.getFileName(manifestRef);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,376 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
const path = require('path').posix;
|
|
||||||
const {optimize} = require('svgo');
|
|
||||||
|
|
||||||
async function readCSSSource(location) {
|
|
||||||
const fs = require("fs").promises;
|
|
||||||
const resolvedLocation = path.resolve(__dirname, "../../", `${location}/theme.css`);
|
|
||||||
const data = await fs.readFile(resolvedLocation);
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getRootSectionWithVariables(variables) {
|
|
||||||
return `:root{\n${Object.entries(variables).reduce((acc, [key, value]) => acc + `--${key}: ${value};\n`, "")} }\n\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function appendVariablesToCSS(variables, cssSource) {
|
|
||||||
return cssSource + getRootSectionWithVariables(variables);
|
|
||||||
}
|
|
||||||
|
|
||||||
function addThemesToConfig(bundle, manifestLocations, defaultThemes) {
|
|
||||||
for (const [fileName, info] of Object.entries(bundle)) {
|
|
||||||
if (fileName === "config.json") {
|
|
||||||
const source = new TextDecoder().decode(info.source);
|
|
||||||
const config = JSON.parse(source);
|
|
||||||
config["themeManifests"] = manifestLocations;
|
|
||||||
config["defaultTheme"] = defaultThemes;
|
|
||||||
info.source = new TextEncoder().encode(JSON.stringify(config, undefined, 2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns an object where keys are the svg file names and the values
|
|
||||||
* are the svg code (optimized)
|
|
||||||
* @param {*} icons Object where keys are css variable names and values are locations of the svg
|
|
||||||
* @param {*} manifestLocation Location of manifest used for resolving path
|
|
||||||
*/
|
|
||||||
async function generateIconSourceMap(icons, manifestLocation) {
|
|
||||||
const sources = {};
|
|
||||||
const fileNames = [];
|
|
||||||
const promises = [];
|
|
||||||
const fs = require("fs").promises;
|
|
||||||
for (const icon of Object.values(icons)) {
|
|
||||||
const [location] = icon.split("?");
|
|
||||||
// resolve location against manifestLocation
|
|
||||||
const resolvedLocation = path.resolve(manifestLocation, location);
|
|
||||||
const iconData = fs.readFile(resolvedLocation);
|
|
||||||
promises.push(iconData);
|
|
||||||
const fileName = path.basename(resolvedLocation);
|
|
||||||
fileNames.push(fileName);
|
|
||||||
}
|
|
||||||
const results = await Promise.all(promises);
|
|
||||||
for (let i = 0; i < results.length; ++i) {
|
|
||||||
const svgString = results[i].toString();
|
|
||||||
const result = optimize(svgString, {
|
|
||||||
plugins: [
|
|
||||||
{
|
|
||||||
name: "preset-default",
|
|
||||||
params: {
|
|
||||||
overrides: { convertColors: false, },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
const optimizedSvgString = result.data;
|
|
||||||
sources[fileNames[i]] = optimizedSvgString;
|
|
||||||
}
|
|
||||||
return sources;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a mapping from location (of manifest file) to an array containing all the chunks (of css files) generated from that location.
|
|
||||||
* To understand what chunk means in this context, see https://rollupjs.org/guide/en/#generatebundle.
|
|
||||||
* @param {*} bundle Mapping from fileName to AssetInfo | ChunkInfo
|
|
||||||
*/
|
|
||||||
function getMappingFromLocationToChunkArray(bundle) {
|
|
||||||
const chunkMap = new Map();
|
|
||||||
for (const [fileName, info] of Object.entries(bundle)) {
|
|
||||||
if (!fileName.endsWith(".css") || info.type === "asset" || info.facadeModuleId?.includes("type=runtime")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const location = info.facadeModuleId?.match(/(.+)\/.+\.css/)?.[1];
|
|
||||||
if (!location) {
|
|
||||||
throw new Error("Cannot find location of css chunk!");
|
|
||||||
}
|
|
||||||
const array = chunkMap.get(location);
|
|
||||||
if (!array) {
|
|
||||||
chunkMap.set(location, [info]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
array.push(info);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return chunkMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a mapping from unhashed file name (of css files) to AssetInfo.
|
|
||||||
* To understand what AssetInfo means in this context, see https://rollupjs.org/guide/en/#generatebundle.
|
|
||||||
* @param {*} bundle Mapping from fileName to AssetInfo | ChunkInfo
|
|
||||||
*/
|
|
||||||
function getMappingFromFileNameToAssetInfo(bundle) {
|
|
||||||
const assetMap = new Map();
|
|
||||||
for (const [fileName, info] of Object.entries(bundle)) {
|
|
||||||
if (!fileName.endsWith(".css")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (info.type === "asset") {
|
|
||||||
/**
|
|
||||||
* So this is the css assetInfo that contains the asset hashed file name.
|
|
||||||
* We'll store it in a separate map indexed via fileName (unhashed) to avoid
|
|
||||||
* searching through the bundle array later.
|
|
||||||
*/
|
|
||||||
assetMap.set(info.name, info);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return assetMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a mapping from location (of manifest file) to ChunkInfo of the runtime css asset
|
|
||||||
* To understand what ChunkInfo means in this context, see https://rollupjs.org/guide/en/#generatebundle.
|
|
||||||
* @param {*} bundle Mapping from fileName to AssetInfo | ChunkInfo
|
|
||||||
*/
|
|
||||||
function getMappingFromLocationToRuntimeChunk(bundle) {
|
|
||||||
let runtimeThemeChunkMap = new Map();
|
|
||||||
for (const [fileName, info] of Object.entries(bundle)) {
|
|
||||||
if (!fileName.endsWith(".css") || info.type === "asset") {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const location = info.facadeModuleId?.match(/(.+)\/.+\.css/)?.[1];
|
|
||||||
if (!location) {
|
|
||||||
throw new Error("Cannot find location of css chunk!");
|
|
||||||
}
|
|
||||||
if (info.facadeModuleId?.includes("type=runtime")) {
|
|
||||||
/**
|
|
||||||
* We have a separate field in manifest.source just for the runtime theme,
|
|
||||||
* so store this separately.
|
|
||||||
*/
|
|
||||||
runtimeThemeChunkMap.set(location, info);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return runtimeThemeChunkMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = function buildThemes(options) {
|
|
||||||
let manifest, variants, defaultDark, defaultLight, defaultThemes = {};
|
|
||||||
let isDevelopment = false;
|
|
||||||
const virtualModuleId = '@theme/'
|
|
||||||
const resolvedVirtualModuleId = '\0' + virtualModuleId;
|
|
||||||
const themeToManifestLocation = new Map();
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: "build-themes",
|
|
||||||
enforce: "pre",
|
|
||||||
|
|
||||||
configResolved(config) {
|
|
||||||
if (config.command === "serve") {
|
|
||||||
isDevelopment = true;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
async buildStart() {
|
|
||||||
const { themeConfig } = options;
|
|
||||||
for (const location of themeConfig.themes) {
|
|
||||||
manifest = require(`${location}/manifest.json`);
|
|
||||||
const themeCollectionId = manifest.id;
|
|
||||||
themeToManifestLocation.set(themeCollectionId, location);
|
|
||||||
variants = manifest.values.variants;
|
|
||||||
for (const [variant, details] of Object.entries(variants)) {
|
|
||||||
const fileName = `theme-${themeCollectionId}-${variant}.css`;
|
|
||||||
if (themeCollectionId === themeConfig.default && details.default) {
|
|
||||||
// This is the default theme, stash the file name for later
|
|
||||||
if (details.dark) {
|
|
||||||
defaultDark = fileName;
|
|
||||||
defaultThemes["dark"] = `${themeCollectionId}-${variant}`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
defaultLight = fileName;
|
|
||||||
defaultThemes["light"] = `${themeCollectionId}-${variant}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// emit the css as built theme bundle
|
|
||||||
if (!isDevelopment) {
|
|
||||||
this.emitFile({ type: "chunk", id: `${location}/theme.css?variant=${variant}${details.dark ? "&dark=true" : ""}`, fileName, });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// emit the css as runtime theme bundle
|
|
||||||
if (!isDevelopment) {
|
|
||||||
this.emitFile({ type: "chunk", id: `${location}/theme.css?type=runtime`, fileName: `theme-${themeCollectionId}-runtime.css`, });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
resolveId(id) {
|
|
||||||
if (id.startsWith(virtualModuleId)) {
|
|
||||||
return '\0' + id;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
async load(id) {
|
|
||||||
if (isDevelopment) {
|
|
||||||
/**
|
|
||||||
* To load the theme during dev, we need to take a different approach because emitFile is not supported in dev.
|
|
||||||
* We solve this by resolving virtual file "@theme/name/variant" into the necessary css import.
|
|
||||||
* This virtual file import is removed when hydrogen is built (see transform hook).
|
|
||||||
*/
|
|
||||||
if (id.startsWith(resolvedVirtualModuleId)) {
|
|
||||||
let [theme, variant, file] = id.substr(resolvedVirtualModuleId.length).split("/");
|
|
||||||
if (theme === "default") {
|
|
||||||
theme = options.themeConfig.default;
|
|
||||||
}
|
|
||||||
const location = themeToManifestLocation.get(theme);
|
|
||||||
const manifest = require(`${location}/manifest.json`);
|
|
||||||
const variants = manifest.values.variants;
|
|
||||||
if (!variant || variant === "default") {
|
|
||||||
// choose the first default variant for now
|
|
||||||
// this will need to support light/dark variants as well
|
|
||||||
variant = Object.keys(variants).find(variantName => variants[variantName].default);
|
|
||||||
}
|
|
||||||
if (!file) {
|
|
||||||
file = "index.js";
|
|
||||||
}
|
|
||||||
switch (file) {
|
|
||||||
case "index.js": {
|
|
||||||
const isDark = variants[variant].dark;
|
|
||||||
return `import "${path.resolve(`${location}/theme.css`)}${isDark? "?dark=true": ""}";` +
|
|
||||||
`import "@theme/${theme}/${variant}/variables.css"`;
|
|
||||||
}
|
|
||||||
case "variables.css": {
|
|
||||||
const variables = variants[variant].variables;
|
|
||||||
const css = getRootSectionWithVariables(variables);
|
|
||||||
return css;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const result = id.match(/(.+)\/theme.css\?variant=([^&]+)/);
|
|
||||||
if (result) {
|
|
||||||
const [, location, variant] = result;
|
|
||||||
const cssSource = await readCSSSource(location);
|
|
||||||
const config = variants[variant];
|
|
||||||
return appendVariablesToCSS(config.variables, cssSource);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
transform(code, id) {
|
|
||||||
if (isDevelopment) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Removes develop-only script tag; this cannot be done in transformIndexHtml hook because
|
|
||||||
* by the time that hook runs, the import is added to the bundled js file which would
|
|
||||||
* result in a runtime error.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const devScriptTag =
|
|
||||||
/<script type="module"> import "@theme\/.+"; <\/script>/;
|
|
||||||
if (id.endsWith("index.html")) {
|
|
||||||
const htmlWithoutDevScript = code.replace(devScriptTag, "");
|
|
||||||
return htmlWithoutDevScript;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
transformIndexHtml(_, ctx) {
|
|
||||||
if (isDevelopment) {
|
|
||||||
// Don't add default stylesheets to index.html on dev
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let darkThemeLocation, lightThemeLocation;
|
|
||||||
for (const [, bundle] of Object.entries(ctx.bundle)) {
|
|
||||||
if (bundle.name === defaultDark) {
|
|
||||||
darkThemeLocation = bundle.fileName;
|
|
||||||
}
|
|
||||||
if (bundle.name === defaultLight) {
|
|
||||||
lightThemeLocation = bundle.fileName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
tag: "link",
|
|
||||||
attrs: {
|
|
||||||
rel: "stylesheet",
|
|
||||||
type: "text/css",
|
|
||||||
media: "(prefers-color-scheme: dark)",
|
|
||||||
href: `./${darkThemeLocation}`,
|
|
||||||
class: "theme",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: "link",
|
|
||||||
attrs: {
|
|
||||||
rel: "stylesheet",
|
|
||||||
type: "text/css",
|
|
||||||
media: "(prefers-color-scheme: light)",
|
|
||||||
href: `./${lightThemeLocation}`,
|
|
||||||
class: "theme",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
];
|
|
||||||
},
|
|
||||||
|
|
||||||
async generateBundle(_, bundle) {
|
|
||||||
const assetMap = getMappingFromFileNameToAssetInfo(bundle);
|
|
||||||
const chunkMap = getMappingFromLocationToChunkArray(bundle);
|
|
||||||
const runtimeThemeChunkMap = getMappingFromLocationToRuntimeChunk(bundle);
|
|
||||||
const manifestLocations = [];
|
|
||||||
// Location of the directory containing manifest relative to the root of the build output
|
|
||||||
const manifestLocation = "assets";
|
|
||||||
for (const [location, chunkArray] of chunkMap) {
|
|
||||||
const manifest = require(`${location}/manifest.json`);
|
|
||||||
const compiledVariables = options.compiledVariables.get(location);
|
|
||||||
const derivedVariables = compiledVariables["derived-variables"];
|
|
||||||
const icon = compiledVariables["icon"];
|
|
||||||
const builtAssets = {};
|
|
||||||
let themeKey;
|
|
||||||
for (const chunk of chunkArray) {
|
|
||||||
const [, name, variant] = chunk.fileName.match(/theme-(.+)-(.+)\.css/);
|
|
||||||
themeKey = name;
|
|
||||||
const locationRelativeToBuildRoot = assetMap.get(chunk.fileName).fileName;
|
|
||||||
const locationRelativeToManifest = path.relative(manifestLocation, locationRelativeToBuildRoot);
|
|
||||||
builtAssets[`${name}-${variant}`] = locationRelativeToManifest;
|
|
||||||
}
|
|
||||||
// Emit the base svg icons as asset
|
|
||||||
const nameToAssetHashedLocation = [];
|
|
||||||
const nameToSource = await generateIconSourceMap(icon, location);
|
|
||||||
for (const [name, source] of Object.entries(nameToSource)) {
|
|
||||||
const ref = this.emitFile({ type: "asset", name, source });
|
|
||||||
const assetHashedName = this.getFileName(ref);
|
|
||||||
nameToAssetHashedLocation[name] = assetHashedName;
|
|
||||||
}
|
|
||||||
// Update icon section in output manifest with paths to the icon in build output
|
|
||||||
for (const [variable, location] of Object.entries(icon)) {
|
|
||||||
const [locationWithoutQueryParameters, queryParameters] = location.split("?");
|
|
||||||
const name = path.basename(locationWithoutQueryParameters);
|
|
||||||
const locationRelativeToBuildRoot = nameToAssetHashedLocation[name];
|
|
||||||
const locationRelativeToManifest = path.relative(manifestLocation, locationRelativeToBuildRoot);
|
|
||||||
icon[variable] = `${locationRelativeToManifest}?${queryParameters}`;
|
|
||||||
}
|
|
||||||
const runtimeThemeChunk = runtimeThemeChunkMap.get(location);
|
|
||||||
const runtimeAssetLocation = path.relative(manifestLocation, assetMap.get(runtimeThemeChunk.fileName).fileName);
|
|
||||||
manifest.source = {
|
|
||||||
"built-assets": builtAssets,
|
|
||||||
"runtime-asset": runtimeAssetLocation,
|
|
||||||
"derived-variables": derivedVariables,
|
|
||||||
"icon": icon,
|
|
||||||
};
|
|
||||||
const name = `theme-${themeKey}.json`;
|
|
||||||
manifestLocations.push(`${manifestLocation}/${name}`);
|
|
||||||
this.emitFile({
|
|
||||||
type: "asset",
|
|
||||||
name,
|
|
||||||
source: JSON.stringify(manifest),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
addThemesToConfig(bundle, manifestLocations, defaultThemes);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,157 +0,0 @@
|
||||||
const fs = require('fs/promises');
|
|
||||||
const path = require('path');
|
|
||||||
const xxhash = require('xxhashjs');
|
|
||||||
|
|
||||||
function contentHash(str) {
|
|
||||||
var hasher = new xxhash.h32(0);
|
|
||||||
hasher.update(str);
|
|
||||||
return hasher.digest();
|
|
||||||
}
|
|
||||||
|
|
||||||
function injectServiceWorker(swFile, findUnhashedFileNamesFromBundle, placeholdersPerChunk) {
|
|
||||||
const swName = path.basename(swFile);
|
|
||||||
let root;
|
|
||||||
let version;
|
|
||||||
let logger;
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: "hydrogen:injectServiceWorker",
|
|
||||||
apply: "build",
|
|
||||||
enforce: "post",
|
|
||||||
buildStart() {
|
|
||||||
this.emitFile({
|
|
||||||
type: "chunk",
|
|
||||||
fileName: swName,
|
|
||||||
id: swFile,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
configResolved: config => {
|
|
||||||
root = config.root;
|
|
||||||
version = JSON.parse(config.define.DEFINE_VERSION); // unquote
|
|
||||||
logger = config.logger;
|
|
||||||
},
|
|
||||||
generateBundle: async function(options, bundle) {
|
|
||||||
const otherUnhashedFiles = findUnhashedFileNamesFromBundle(bundle);
|
|
||||||
const unhashedFilenames = [swName].concat(otherUnhashedFiles);
|
|
||||||
const unhashedFileContentMap = unhashedFilenames.reduce((map, fileName) => {
|
|
||||||
const chunkOrAsset = bundle[fileName];
|
|
||||||
if (!chunkOrAsset) {
|
|
||||||
throw new Error("could not get content for uncached asset or chunk " + fileName);
|
|
||||||
}
|
|
||||||
map[fileName] = chunkOrAsset.source || chunkOrAsset.code;
|
|
||||||
return map;
|
|
||||||
}, {});
|
|
||||||
const assets = Object.values(bundle);
|
|
||||||
const hashedFileNames = assets.map(o => o.fileName).filter(fileName => !unhashedFileContentMap[fileName]);
|
|
||||||
const globalHash = getBuildHash(hashedFileNames, unhashedFileContentMap);
|
|
||||||
const placeholderValues = {
|
|
||||||
DEFINE_GLOBAL_HASH: `"${globalHash}"`,
|
|
||||||
...getCacheFileNamePlaceholderValues(swName, unhashedFilenames, assets, placeholdersPerChunk)
|
|
||||||
};
|
|
||||||
replacePlaceholdersInChunks(assets, placeholdersPerChunk, placeholderValues);
|
|
||||||
logger.info(`\nBuilt ${version} (${globalHash})`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function getBuildHash(hashedFileNames, unhashedFileContentMap) {
|
|
||||||
const unhashedHashes = Object.entries(unhashedFileContentMap).map(([fileName, content]) => {
|
|
||||||
return `${fileName}-${contentHash(Buffer.from(content))}`;
|
|
||||||
});
|
|
||||||
const globalHashAssets = hashedFileNames.concat(unhashedHashes);
|
|
||||||
globalHashAssets.sort();
|
|
||||||
return contentHash(globalHashAssets.join(",")).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
const NON_PRECACHED_JS = [
|
|
||||||
"hydrogen-legacy",
|
|
||||||
"olm_legacy.js",
|
|
||||||
// most environments don't need the worker
|
|
||||||
"main.js"
|
|
||||||
];
|
|
||||||
|
|
||||||
function isPreCached(asset) {
|
|
||||||
const {name, fileName} = asset;
|
|
||||||
return name.endsWith(".svg") ||
|
|
||||||
name.endsWith(".png") ||
|
|
||||||
name.endsWith(".css") ||
|
|
||||||
name.endsWith(".wasm") ||
|
|
||||||
name.endsWith(".html") ||
|
|
||||||
// the index and vendor chunks don't have an extension in `name`, so check extension on `fileName`
|
|
||||||
fileName.endsWith(".js") && !NON_PRECACHED_JS.includes(path.basename(name));
|
|
||||||
}
|
|
||||||
|
|
||||||
function getCacheFileNamePlaceholderValues(swName, unhashedFilenames, assets) {
|
|
||||||
const unhashedPreCachedAssets = [];
|
|
||||||
const hashedPreCachedAssets = [];
|
|
||||||
const hashedCachedOnRequestAssets = [];
|
|
||||||
|
|
||||||
for (const asset of assets) {
|
|
||||||
const {name, fileName} = asset;
|
|
||||||
// the service worker should not be cached at all,
|
|
||||||
// it's how updates happen
|
|
||||||
if (fileName === swName) {
|
|
||||||
continue;
|
|
||||||
} else if (unhashedFilenames.includes(fileName)) {
|
|
||||||
unhashedPreCachedAssets.push(fileName);
|
|
||||||
} else if (isPreCached(asset)) {
|
|
||||||
hashedPreCachedAssets.push(fileName);
|
|
||||||
} else {
|
|
||||||
hashedCachedOnRequestAssets.push(fileName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
DEFINE_UNHASHED_PRECACHED_ASSETS: JSON.stringify(unhashedPreCachedAssets),
|
|
||||||
DEFINE_HASHED_PRECACHED_ASSETS: JSON.stringify(hashedPreCachedAssets),
|
|
||||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: JSON.stringify(hashedCachedOnRequestAssets)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function replacePlaceholdersInChunks(assets, placeholdersPerChunk, placeholderValues) {
|
|
||||||
for (const [name, placeholderMap] of Object.entries(placeholdersPerChunk)) {
|
|
||||||
const chunk = assets.find(a => a.type === "chunk" && a.name === name);
|
|
||||||
if (!chunk) {
|
|
||||||
throw new Error(`could not find chunk ${name} to replace placeholders`);
|
|
||||||
}
|
|
||||||
for (const [placeholderName, placeholderLiteral] of Object.entries(placeholderMap)) {
|
|
||||||
const replacedValue = placeholderValues[placeholderName];
|
|
||||||
const oldCode = chunk.code;
|
|
||||||
chunk.code = chunk.code.replaceAll(placeholderLiteral, replacedValue);
|
|
||||||
if (chunk.code === oldCode) {
|
|
||||||
throw new Error(`Could not replace ${placeholderName} in ${name}, looking for literal ${placeholderLiteral}:\n${chunk.code}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** creates a value to be include in the `define` build settings,
|
|
||||||
* but can be replace at the end of the build in certain chunks.
|
|
||||||
* We need this for injecting the global build hash and the final
|
|
||||||
* filenames in the service worker and index chunk.
|
|
||||||
* These values are only known in the generateBundle step, so we
|
|
||||||
* replace them by unique strings wrapped in a prompt call so no
|
|
||||||
* transformation will touch them (minifying, ...) and we can do a
|
|
||||||
* string replacement still at the end of the build. */
|
|
||||||
function definePlaceholderValue(mode, name, devValue) {
|
|
||||||
if (mode === "production") {
|
|
||||||
// note that `prompt(...)` will never be in the final output, it's replaced by the final value
|
|
||||||
// once we know at the end of the build what it is and just used as a temporary value during the build
|
|
||||||
// as something that will not be transformed.
|
|
||||||
// I first considered Symbol but it's not inconceivable that babel would transform this.
|
|
||||||
return `prompt(${JSON.stringify(name)})`;
|
|
||||||
} else {
|
|
||||||
return JSON.stringify(devValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createPlaceholderValues(mode) {
|
|
||||||
return {
|
|
||||||
DEFINE_GLOBAL_HASH: definePlaceholderValue(mode, "DEFINE_GLOBAL_HASH", null),
|
|
||||||
DEFINE_UNHASHED_PRECACHED_ASSETS: definePlaceholderValue(mode, "UNHASHED_PRECACHED_ASSETS", []),
|
|
||||||
DEFINE_HASHED_PRECACHED_ASSETS: definePlaceholderValue(mode, "HASHED_PRECACHED_ASSETS", []),
|
|
||||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: definePlaceholderValue(mode, "HASHED_CACHED_ON_REQUEST_ASSETS", []),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {injectServiceWorker, createPlaceholderValues};
|
|
362
scripts/build.mjs
Normal file
362
scripts/build.mjs
Normal file
|
@ -0,0 +1,362 @@
|
||||||
|
/*
|
||||||
|
Copyright 2020 Bruno Windels <bruno@windels.cloud>
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import cheerio from "cheerio";
|
||||||
|
import fsRoot from "fs";
|
||||||
|
const fs = fsRoot.promises;
|
||||||
|
import path from "path";
|
||||||
|
import XXHash from 'xxhash';
|
||||||
|
import rollup from 'rollup';
|
||||||
|
import postcss from "postcss";
|
||||||
|
import postcssImport from "postcss-import";
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { dirname } from 'path';
|
||||||
|
import commander from "commander";
|
||||||
|
// needed for legacy bundle
|
||||||
|
import babel from '@rollup/plugin-babel';
|
||||||
|
// needed to find the polyfill modules in the main-legacy.js bundle
|
||||||
|
import { nodeResolve } from '@rollup/plugin-node-resolve';
|
||||||
|
// needed because some of the polyfills are written as commonjs modules
|
||||||
|
import commonjs from '@rollup/plugin-commonjs';
|
||||||
|
// multi-entry plugin so we can add polyfill file to main
|
||||||
|
import multi from '@rollup/plugin-multi-entry';
|
||||||
|
// replace urls of asset names with content hashed version
|
||||||
|
import postcssUrl from "postcss-url";
|
||||||
|
|
||||||
|
import cssvariables from "postcss-css-variables";
|
||||||
|
import flexbugsFixes from "postcss-flexbugs-fixes";
|
||||||
|
|
||||||
|
const PROJECT_ID = "hydrogen";
|
||||||
|
const PROJECT_SHORT_NAME = "Hydrogen";
|
||||||
|
const PROJECT_NAME = "Hydrogen Chat";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const projectDir = path.join(__dirname, "../");
|
||||||
|
const cssSrcDir = path.join(projectDir, "src/ui/web/css/");
|
||||||
|
const targetDir = path.join(projectDir, "target/");
|
||||||
|
|
||||||
|
const program = new commander.Command();
|
||||||
|
program
|
||||||
|
.option("--legacy", "make a build for IE11")
|
||||||
|
.option("--no-offline", "make a build without a service worker or appcache manifest")
|
||||||
|
program.parse(process.argv);
|
||||||
|
const {debug, noOffline, legacy} = program;
|
||||||
|
const offline = !noOffline;
|
||||||
|
|
||||||
|
async function build() {
|
||||||
|
// get version number
|
||||||
|
const version = JSON.parse(await fs.readFile(path.join(projectDir, "package.json"), "utf8")).version;
|
||||||
|
|
||||||
|
const devHtml = await fs.readFile(path.join(projectDir, "index.html"), "utf8");
|
||||||
|
const doc = cheerio.load(devHtml);
|
||||||
|
const themes = [];
|
||||||
|
findThemes(doc, themeName => {
|
||||||
|
themes.push(themeName);
|
||||||
|
});
|
||||||
|
// clear target dir
|
||||||
|
await removeDirIfExists(targetDir);
|
||||||
|
await createDirs(targetDir, themes);
|
||||||
|
// also creates the directories where the theme css bundles are placed in,
|
||||||
|
// so do it first
|
||||||
|
const themeAssets = await copyThemeAssets(themes, legacy);
|
||||||
|
const jsBundlePath = await (legacy ? buildJsLegacy() : buildJs());
|
||||||
|
const cssBundlePaths = await buildCssBundles(legacy ? buildCssLegacy : buildCss, themes, themeAssets);
|
||||||
|
const assetPaths = createAssetPaths(jsBundlePath, cssBundlePaths, themeAssets);
|
||||||
|
|
||||||
|
let manifestPath;
|
||||||
|
if (offline) {
|
||||||
|
manifestPath = await buildOffline(version, assetPaths);
|
||||||
|
}
|
||||||
|
await buildHtml(doc, version, assetPaths, manifestPath);
|
||||||
|
|
||||||
|
console.log(`built ${PROJECT_ID}${legacy ? " legacy" : ""} ${version} successfully`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createAssetPaths(jsBundlePath, cssBundlePaths, themeAssets) {
|
||||||
|
function trim(path) {
|
||||||
|
if (!path.startsWith(targetDir)) {
|
||||||
|
throw new Error("invalid target path: " + targetDir);
|
||||||
|
}
|
||||||
|
return path.substr(targetDir.length);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
jsBundle: () => trim(jsBundlePath),
|
||||||
|
cssMainBundle: () => trim(cssBundlePaths.main),
|
||||||
|
cssThemeBundle: themeName => trim(cssBundlePaths.themes[themeName]),
|
||||||
|
cssThemeBundles: () => Object.values(cssBundlePaths.themes).map(a => trim(a)),
|
||||||
|
otherAssets: () => Object.values(themeAssets).map(a => trim(a))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function findThemes(doc, callback) {
|
||||||
|
doc("link[rel~=stylesheet][title]").each((i, el) => {
|
||||||
|
const theme = doc(el);
|
||||||
|
const href = theme.attr("href");
|
||||||
|
const themesPrefix = "/themes/";
|
||||||
|
const prefixIdx = href.indexOf(themesPrefix);
|
||||||
|
if (prefixIdx !== -1) {
|
||||||
|
const themeNameStart = prefixIdx + themesPrefix.length;
|
||||||
|
const themeNameEnd = href.indexOf("/", themeNameStart);
|
||||||
|
const themeName = href.substr(themeNameStart, themeNameEnd - themeNameStart);
|
||||||
|
callback(themeName, theme);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createDirs(targetDir, themes) {
|
||||||
|
await fs.mkdir(targetDir);
|
||||||
|
const themeDir = path.join(targetDir, "themes");
|
||||||
|
await fs.mkdir(themeDir);
|
||||||
|
for (const theme of themes) {
|
||||||
|
await fs.mkdir(path.join(themeDir, theme));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function copyThemeAssets(themes, legacy) {
|
||||||
|
const assets = {};
|
||||||
|
for (const theme of themes) {
|
||||||
|
const themeDstFolder = path.join(targetDir, `themes/${theme}`);
|
||||||
|
const themeSrcFolder = path.join(cssSrcDir, `themes/${theme}`);
|
||||||
|
const themeAssets = await copyFolder(themeSrcFolder, themeDstFolder, file => {
|
||||||
|
const isUnneededFont = legacy ? file.endsWith(".woff2") : file.endsWith(".woff");
|
||||||
|
return !file.endsWith(".css") && !isUnneededFont;
|
||||||
|
});
|
||||||
|
Object.assign(assets, themeAssets);
|
||||||
|
}
|
||||||
|
return assets;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildHtml(doc, version, assetPaths, manifestPath) {
|
||||||
|
// transform html file
|
||||||
|
// change path to main.css to css bundle
|
||||||
|
doc("link[rel=stylesheet]:not([title])").attr("href", assetPaths.cssMainBundle());
|
||||||
|
// change paths to all theme stylesheets
|
||||||
|
findThemes(doc, (themeName, theme) => {
|
||||||
|
theme.attr("href", assetPaths.cssThemeBundle(themeName));
|
||||||
|
});
|
||||||
|
doc("script#main").replaceWith(
|
||||||
|
`<script type="text/javascript" src="${assetPaths.jsBundle()}"></script>` +
|
||||||
|
`<script type="text/javascript">${PROJECT_ID}Bundle.main(document.body);</script>`);
|
||||||
|
removeOrEnableScript(doc("script#service-worker"), offline);
|
||||||
|
|
||||||
|
const versionScript = doc("script#version");
|
||||||
|
versionScript.attr("type", "text/javascript");
|
||||||
|
let vSource = versionScript.contents().text();
|
||||||
|
vSource = vSource.replace(`"%%VERSION%%"`, `"${version}"`);
|
||||||
|
versionScript.text(vSource);
|
||||||
|
|
||||||
|
if (offline) {
|
||||||
|
doc("html").attr("manifest", "manifest.appcache");
|
||||||
|
doc("head").append(`<link rel="manifest" href="${manifestPath.substr(targetDir.length)}">`);
|
||||||
|
}
|
||||||
|
await fs.writeFile(path.join(targetDir, "index.html"), doc.html(), "utf8");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildJs() {
|
||||||
|
// create js bundle
|
||||||
|
const bundle = await rollup.rollup({input: 'src/main.js'});
|
||||||
|
const {output} = await bundle.generate({
|
||||||
|
format: 'iife',
|
||||||
|
name: `${PROJECT_ID}Bundle`
|
||||||
|
});
|
||||||
|
const code = output[0].code;
|
||||||
|
const bundlePath = resource(`${PROJECT_ID}.js`, code);
|
||||||
|
await fs.writeFile(bundlePath, code, "utf8");
|
||||||
|
return bundlePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildJsLegacy() {
|
||||||
|
// compile down to whatever IE 11 needs
|
||||||
|
const babelPlugin = babel.babel({
|
||||||
|
babelHelpers: 'bundled',
|
||||||
|
exclude: 'node_modules/**',
|
||||||
|
presets: [
|
||||||
|
[
|
||||||
|
"@babel/preset-env",
|
||||||
|
{
|
||||||
|
useBuiltIns: "entry",
|
||||||
|
corejs: "3",
|
||||||
|
targets: "IE 11"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
});
|
||||||
|
// create js bundle
|
||||||
|
const rollupConfig = {
|
||||||
|
input: ['src/legacy-polyfill.js', 'src/main.js'],
|
||||||
|
plugins: [multi(), commonjs(), nodeResolve(), babelPlugin]
|
||||||
|
};
|
||||||
|
const bundle = await rollup.rollup(rollupConfig);
|
||||||
|
const {output} = await bundle.generate({
|
||||||
|
format: 'iife',
|
||||||
|
name: `${PROJECT_ID}Bundle`
|
||||||
|
});
|
||||||
|
const code = output[0].code;
|
||||||
|
const bundlePath = resource(`${PROJECT_ID}-legacy.js`, code);
|
||||||
|
await fs.writeFile(bundlePath, code, "utf8");
|
||||||
|
return bundlePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildOffline(version, assetPaths) {
|
||||||
|
// write offline availability
|
||||||
|
const offlineFiles = [
|
||||||
|
assetPaths.jsBundle(),
|
||||||
|
assetPaths.cssMainBundle(),
|
||||||
|
"index.html",
|
||||||
|
"icon-192.png",
|
||||||
|
].concat(assetPaths.cssThemeBundles());
|
||||||
|
|
||||||
|
// write appcache manifest
|
||||||
|
const manifestLines = [
|
||||||
|
`CACHE MANIFEST`,
|
||||||
|
`# v${version}`,
|
||||||
|
`NETWORK`,
|
||||||
|
`"*"`,
|
||||||
|
`CACHE`,
|
||||||
|
];
|
||||||
|
manifestLines.push(...offlineFiles);
|
||||||
|
const manifest = manifestLines.join("\n") + "\n";
|
||||||
|
await fs.writeFile(path.join(targetDir, "manifest.appcache"), manifest, "utf8");
|
||||||
|
// write service worker
|
||||||
|
let swSource = await fs.readFile(path.join(projectDir, "src/service-worker.template.js"), "utf8");
|
||||||
|
swSource = swSource.replace(`"%%VERSION%%"`, `"${version}"`);
|
||||||
|
swSource = swSource.replace(`"%%OFFLINE_FILES%%"`, JSON.stringify(offlineFiles));
|
||||||
|
swSource = swSource.replace(`"%%CACHE_FILES%%"`, JSON.stringify(assetPaths.otherAssets()));
|
||||||
|
await fs.writeFile(path.join(targetDir, "sw.js"), swSource, "utf8");
|
||||||
|
// write web manifest
|
||||||
|
const webManifest = {
|
||||||
|
name:PROJECT_NAME,
|
||||||
|
short_name: PROJECT_SHORT_NAME,
|
||||||
|
display: "fullscreen",
|
||||||
|
start_url: "index.html",
|
||||||
|
icons: [{"src": "icon-192.png", "sizes": "192x192", "type": "image/png"}],
|
||||||
|
};
|
||||||
|
const manifestJson = JSON.stringify(webManifest);
|
||||||
|
const manifestPath = resource("manifest.json", manifestJson);
|
||||||
|
await fs.writeFile(manifestPath, manifestJson, "utf8");
|
||||||
|
// copy icon
|
||||||
|
// should this icon have a content hash as well?
|
||||||
|
let icon = await fs.readFile(path.join(projectDir, "icon.png"));
|
||||||
|
await fs.writeFile(path.join(targetDir, "icon-192.png"), icon);
|
||||||
|
return manifestPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildCssBundles(buildFn, themes, themeAssets) {
|
||||||
|
const bundleCss = await buildFn(path.join(cssSrcDir, "main.css"));
|
||||||
|
const mainDstPath = resource(`${PROJECT_ID}.css`, bundleCss);
|
||||||
|
await fs.writeFile(mainDstPath, bundleCss, "utf8");
|
||||||
|
const bundlePaths = {main: mainDstPath, themes: {}};
|
||||||
|
for (const theme of themes) {
|
||||||
|
const urlBase = path.join(targetDir, `themes/${theme}/`);
|
||||||
|
const assetUrlMapper = ({absolutePath}) => {
|
||||||
|
const hashedDstPath = themeAssets[absolutePath];
|
||||||
|
if (hashedDstPath && hashedDstPath.startsWith(urlBase)) {
|
||||||
|
return hashedDstPath.substr(urlBase.length);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const themeCss = await buildFn(path.join(cssSrcDir, `themes/${theme}/theme.css`), assetUrlMapper);
|
||||||
|
const themeDstPath = resource(`themes/${theme}/bundle.css`, themeCss);
|
||||||
|
await fs.writeFile(themeDstPath, themeCss, "utf8");
|
||||||
|
bundlePaths.themes[theme] = themeDstPath;
|
||||||
|
}
|
||||||
|
return bundlePaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildCss(entryPath, urlMapper = null) {
|
||||||
|
const preCss = await fs.readFile(entryPath, "utf8");
|
||||||
|
const options = [postcssImport];
|
||||||
|
if (urlMapper) {
|
||||||
|
options.push(postcssUrl({url: urlMapper}));
|
||||||
|
}
|
||||||
|
const cssBundler = postcss(options);
|
||||||
|
const result = await cssBundler.process(preCss, {from: entryPath});
|
||||||
|
return result.css;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildCssLegacy(entryPath, urlMapper = null) {
|
||||||
|
const preCss = await fs.readFile(entryPath, "utf8");
|
||||||
|
const options = [
|
||||||
|
postcssImport,
|
||||||
|
cssvariables(),
|
||||||
|
flexbugsFixes()
|
||||||
|
];
|
||||||
|
if (urlMapper) {
|
||||||
|
options.push(postcssUrl({url: urlMapper}));
|
||||||
|
}
|
||||||
|
const cssBundler = postcss(options);
|
||||||
|
const result = await cssBundler.process(preCss, {from: entryPath});
|
||||||
|
return result.css;
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeOrEnableScript(scriptNode, enable) {
|
||||||
|
if (enable) {
|
||||||
|
scriptNode.attr("type", "text/javascript");
|
||||||
|
} else {
|
||||||
|
scriptNode.remove();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeDirIfExists(targetDir) {
|
||||||
|
try {
|
||||||
|
await fs.rmdir(targetDir, {recursive: true});
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code !== "ENOENT") {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function copyFolder(srcRoot, dstRoot, filter) {
|
||||||
|
const assetPaths = {};
|
||||||
|
const dirEnts = await fs.readdir(srcRoot, {withFileTypes: true});
|
||||||
|
for (const dirEnt of dirEnts) {
|
||||||
|
const dstPath = path.join(dstRoot, dirEnt.name);
|
||||||
|
const srcPath = path.join(srcRoot, dirEnt.name);
|
||||||
|
if (dirEnt.isDirectory()) {
|
||||||
|
await fs.mkdir(dstPath);
|
||||||
|
Object.assign(assetPaths, await copyFolder(srcPath, dstPath, filter));
|
||||||
|
} else if (dirEnt.isFile() && filter(srcPath)) {
|
||||||
|
const content = await fs.readFile(srcPath);
|
||||||
|
const hashedDstPath = resource(dstPath, content);
|
||||||
|
await fs.writeFile(hashedDstPath, content);
|
||||||
|
assetPaths[srcPath] = hashedDstPath;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return assetPaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resource(relPath, content) {
|
||||||
|
let fullPath = relPath;
|
||||||
|
if (!relPath.startsWith("/")) {
|
||||||
|
fullPath = path.join(targetDir, relPath);
|
||||||
|
}
|
||||||
|
const hash = contentHash(Buffer.from(content));
|
||||||
|
const dir = path.dirname(fullPath);
|
||||||
|
const extname = path.extname(fullPath);
|
||||||
|
const basename = path.basename(fullPath, extname);
|
||||||
|
return path.join(dir, `${basename}-${hash}${extname}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function contentHash(str) {
|
||||||
|
var hasher = new XXHash(0);
|
||||||
|
hasher.update(str);
|
||||||
|
return hasher.digest();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
build().catch(err => console.error(err));
|
165
scripts/ci.sh
165
scripts/ci.sh
|
@ -1,165 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
# ci.sh: Helper script to automate deployment operations on CI/CD
|
|
||||||
# Copyright © 2022 Aravinth Manivannan <realaravinth@batsense.net>
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as
|
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
set -xEeuo pipefail
|
|
||||||
#source $(pwd)/scripts/lib.sh
|
|
||||||
|
|
||||||
readonly SSH_ID_FILE=/tmp/ci-ssh-id
|
|
||||||
readonly SSH_REMOTE_NAME=origin-ssh
|
|
||||||
readonly PROJECT_ROOT=$(pwd)
|
|
||||||
|
|
||||||
match_arg() {
|
|
||||||
if [ $1 == $2 ] || [ $1 == $3 ]
|
|
||||||
then
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
help() {
|
|
||||||
cat << EOF
|
|
||||||
USAGE: ci.sh [SUBCOMMAND]
|
|
||||||
Helper script to automate deployment operations on CI/CD
|
|
||||||
|
|
||||||
Subcommands
|
|
||||||
|
|
||||||
-c --clean cleanup secrets, SSH key and other runtime data
|
|
||||||
-i --init <SSH_PRIVATE_KEY> initialize environment, write SSH private to file
|
|
||||||
-d --deploy <PAGES-SECRET> <TARGET BRANCH> push branch to Gitea and call Pages server
|
|
||||||
-h --help print this help menu
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
# $1: SSH private key
|
|
||||||
write_ssh(){
|
|
||||||
truncate --size 0 $SSH_ID_FILE
|
|
||||||
echo "$1" > $SSH_ID_FILE
|
|
||||||
chmod 600 $SSH_ID_FILE
|
|
||||||
}
|
|
||||||
|
|
||||||
set_ssh_remote() {
|
|
||||||
http_remote_url=$(git remote get-url origin)
|
|
||||||
remote_hostname=$(echo $http_remote_url | cut -d '/' -f 3)
|
|
||||||
repository_owner=$(echo $http_remote_url | cut -d '/' -f 4)
|
|
||||||
repository_name=$(echo $http_remote_url | cut -d '/' -f 5)
|
|
||||||
ssh_remote="git@$remote_hostname:$repository_owner/$repository_name"
|
|
||||||
ssh_remote="git@git.batsense.net:mystiq/hydrogen-web.git"
|
|
||||||
git remote add $SSH_REMOTE_NAME $ssh_remote
|
|
||||||
}
|
|
||||||
|
|
||||||
clean() {
|
|
||||||
if [ -f $SSH_ID_FILE ]
|
|
||||||
then
|
|
||||||
shred $SSH_ID_FILE
|
|
||||||
rm $SSH_ID_FILE
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# $1: branch name
|
|
||||||
# $2: directory containing build assets
|
|
||||||
# $3: Author in <author-name author@example.com> format
|
|
||||||
commit_files() {
|
|
||||||
cd $PROJECT_ROOT
|
|
||||||
original_branch=$(git branch --show-current)
|
|
||||||
tmp_dir=$(mktemp -d)
|
|
||||||
cp -r $2/* $tmp_dir
|
|
||||||
|
|
||||||
if [[ -z $(git ls-remote --heads origin ${1}) ]]
|
|
||||||
then
|
|
||||||
echo "[*] Creating deployment branch $1"
|
|
||||||
git checkout --orphan $1
|
|
||||||
else
|
|
||||||
echo "[*] Deployment branch $1 exists, pulling changes from remote"
|
|
||||||
git fetch origin $1
|
|
||||||
git switch $1
|
|
||||||
fi
|
|
||||||
|
|
||||||
git rm -rf .
|
|
||||||
/bin/rm -rf *
|
|
||||||
cp -r $tmp_dir/* .
|
|
||||||
git add --all
|
|
||||||
if [ $(git status --porcelain | xargs | sed '/^$/d' | wc -l) -gt 0 ];
|
|
||||||
then
|
|
||||||
echo "[*] Repository has changed, committing changes"
|
|
||||||
git commit \
|
|
||||||
--author="$3" \
|
|
||||||
--message="new deploy: $(date --iso-8601=seconds)"
|
|
||||||
fi
|
|
||||||
git checkout $original_branch
|
|
||||||
}
|
|
||||||
|
|
||||||
# $1: Pages API secret
|
|
||||||
# $2: Deployment target branch
|
|
||||||
deploy() {
|
|
||||||
if (( "$#" < 2 ))
|
|
||||||
then
|
|
||||||
help
|
|
||||||
else
|
|
||||||
git -c core.sshCommand="/usr/bin/ssh -oStrictHostKeyChecking=no -i $SSH_ID_FILE"\
|
|
||||||
push --force $SSH_REMOTE_NAME $2
|
|
||||||
curl -vv --location --request \
|
|
||||||
POST "https://deploy.batsense.net/api/v1/update"\
|
|
||||||
--header 'Content-Type: application/json' \
|
|
||||||
--data-raw "{ \"secret\": \"$1\", \"branch\": \"$2\" }"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
if (( "$#" < 1 ))
|
|
||||||
then
|
|
||||||
help
|
|
||||||
exit -1
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
if match_arg $1 '-i' '--init'
|
|
||||||
then
|
|
||||||
if (( "$#" < 2 ))
|
|
||||||
then
|
|
||||||
help
|
|
||||||
exit -1
|
|
||||||
fi
|
|
||||||
set_ssh_remote
|
|
||||||
write_ssh "$2"
|
|
||||||
elif match_arg $1 '-c' '--clean'
|
|
||||||
then
|
|
||||||
clean
|
|
||||||
elif match_arg $1 '-cf' '--commit-files'
|
|
||||||
then
|
|
||||||
if (( "$#" < 4 ))
|
|
||||||
then
|
|
||||||
help
|
|
||||||
exit -1
|
|
||||||
fi
|
|
||||||
commit_files $2 $3 $4
|
|
||||||
elif match_arg $1 '-d' '--deploy'
|
|
||||||
then
|
|
||||||
if (( "$#" < 3 ))
|
|
||||||
then
|
|
||||||
help
|
|
||||||
exit -1
|
|
||||||
fi
|
|
||||||
deploy $2 $3
|
|
||||||
elif match_arg $1 '-h' '--help'
|
|
||||||
then
|
|
||||||
help
|
|
||||||
else
|
|
||||||
help
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Remove icons created in .tmp
|
|
||||||
rm -rf .tmp
|
|
6
scripts/deploy.sh
Executable file
6
scripts/deploy.sh
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
git checkout gh-pages
|
||||||
|
cp -R target/* .
|
||||||
|
git add $(find . -maxdepth 1 -type f)
|
||||||
|
git add themes
|
||||||
|
git commit -m "update hydrogen"
|
||||||
|
git checkout master
|
|
@ -1,51 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2020 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export function openFile(mimeType = null) {
|
|
||||||
const input = document.createElement("input");
|
|
||||||
input.setAttribute("type", "file");
|
|
||||||
input.className = "hidden";
|
|
||||||
if (mimeType) {
|
|
||||||
input.setAttribute("accept", mimeType);
|
|
||||||
}
|
|
||||||
const promise = new Promise((resolve, reject) => {
|
|
||||||
const checkFile = () => {
|
|
||||||
input.removeEventListener("change", checkFile, true);
|
|
||||||
const file = input.files[0];
|
|
||||||
document.body.removeChild(input);
|
|
||||||
if (file) {
|
|
||||||
resolve(file);
|
|
||||||
} else {
|
|
||||||
reject(new Error("no file picked"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
input.addEventListener("change", checkFile, true);
|
|
||||||
});
|
|
||||||
// IE11 needs the input to be attached to the document
|
|
||||||
document.body.appendChild(input);
|
|
||||||
input.click();
|
|
||||||
return promise;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function readFileAsText(file) {
|
|
||||||
const reader = new FileReader();
|
|
||||||
const promise = new Promise((resolve, reject) => {
|
|
||||||
reader.addEventListener("load", evt => resolve(evt.target.result));
|
|
||||||
reader.addEventListener("error", evt => reject(evt.target.error));
|
|
||||||
});
|
|
||||||
reader.readAsText(file);
|
|
||||||
return promise;
|
|
||||||
}
|
|
|
@ -1,209 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style type="text/css">
|
|
||||||
html, body {
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 1rem;
|
|
||||||
margin: 0;
|
|
||||||
display: grid;
|
|
||||||
grid-template-areas: "nav nav" "items details";
|
|
||||||
grid-template-columns: 1fr 400px;
|
|
||||||
grid-template-rows: auto 1fr;
|
|
||||||
min-height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
main {
|
|
||||||
grid-area: items;
|
|
||||||
min-width: 0;
|
|
||||||
min-height: 0;
|
|
||||||
overflow-y: auto;
|
|
||||||
padding: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
main section h2 {
|
|
||||||
margin: 2px 14px;
|
|
||||||
font-size: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside {
|
|
||||||
grid-area: details;
|
|
||||||
padding: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside h3 {
|
|
||||||
word-wrap: anywhere;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside p {
|
|
||||||
margin: 2px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values li span {
|
|
||||||
word-wrap: ;
|
|
||||||
word-wrap: anywhere;
|
|
||||||
padding: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values {
|
|
||||||
list-style: none;
|
|
||||||
padding: 0;
|
|
||||||
border: 1px solid lightgray;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values span.key {
|
|
||||||
width: 30%;
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values span.value {
|
|
||||||
width: 70%;
|
|
||||||
display: block;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values li {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
aside .values li:not(:first-child) {
|
|
||||||
border-top: 1px solid lightgray;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav {
|
|
||||||
grid-area: nav;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline li:not(.expanded) > ol {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline li > div {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .toggleExpanded {
|
|
||||||
border: none;
|
|
||||||
background: none;
|
|
||||||
width: 24px;
|
|
||||||
height: 24px;
|
|
||||||
margin-right: 4px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .toggleExpanded:before {
|
|
||||||
content: "▶";
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline li.expanded > div > .toggleExpanded:before {
|
|
||||||
content: "▼";
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline ol {
|
|
||||||
list-style: none;
|
|
||||||
padding: 0 0 0 20px;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item {
|
|
||||||
--hue: 100deg;
|
|
||||||
--brightness: 80%;
|
|
||||||
background-color: hsl(var(--hue), 60%, var(--brightness));
|
|
||||||
border: 1px solid hsl(var(--hue), 60%, calc(var(--brightness) - 40%));
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 2px;
|
|
||||||
display: flex;
|
|
||||||
margin: 1px;
|
|
||||||
flex: 1;
|
|
||||||
min-width: 0;
|
|
||||||
color: inherit;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.timeline .item:not(.has-children) {
|
|
||||||
margin-left: calc(24px + 4px + 1px);
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item .caption {
|
|
||||||
white-space: nowrap;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
overflow: hidden;
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.level-3 {
|
|
||||||
--brightness: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.level-2 {
|
|
||||||
--brightness: 95%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.level-5 {
|
|
||||||
--brightness: 80%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.level-6, .timeline .item.level-7 {
|
|
||||||
--hue: 0deg !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.level-7 {
|
|
||||||
--brightness: 50%;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.type-network {
|
|
||||||
--hue: 30deg;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.type-navigation {
|
|
||||||
--hue: 200deg;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.selected {
|
|
||||||
background-color: Highlight;
|
|
||||||
border-color: Highlight;
|
|
||||||
color: HighlightText;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timeline .item.highlighted {
|
|
||||||
background-color: fuchsia;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hidden {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
#highlight {
|
|
||||||
width: 300px;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav form {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<nav>
|
|
||||||
<button id="openFile">Open log file</button>
|
|
||||||
<button id="collapseAll">Collapse all</button>
|
|
||||||
<button id="hideCollapsed">Hide collapsed root items</button>
|
|
||||||
<button id="hideHighlightedSiblings" title="Hide collapsed siblings of highlighted">Hide non-highlighted</button>
|
|
||||||
<button id="showAll">Show all</button>
|
|
||||||
<form id="highlightForm">
|
|
||||||
<input type="text" id="highlight" name="highlight" placeholder="Highlight a search term" autocomplete="on">
|
|
||||||
<output id="highlightMatches"></output>
|
|
||||||
</form>
|
|
||||||
</nav>
|
|
||||||
<main></main>
|
|
||||||
<aside></aside>
|
|
||||||
<script type="module" src="main.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,398 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2020 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import {tag as t} from "./html.js";
|
|
||||||
import {openFile, readFileAsText} from "./file.js";
|
|
||||||
|
|
||||||
const main = document.querySelector("main");
|
|
||||||
|
|
||||||
let selectedItemNode;
|
|
||||||
let rootItem;
|
|
||||||
let itemByRef;
|
|
||||||
|
|
||||||
const logLevels = [undefined, "All", "Debug", "Detail", "Info", "Warn", "Error", "Fatal", "Off"];
|
|
||||||
|
|
||||||
main.addEventListener("click", event => {
|
|
||||||
if (event.target.classList.contains("toggleExpanded")) {
|
|
||||||
const li = event.target.parentElement.parentElement;
|
|
||||||
li.classList.toggle("expanded");
|
|
||||||
} else {
|
|
||||||
// allow clicking any links other than .item in the timeline, like refs
|
|
||||||
if (event.target.tagName === "A" && !event.target.classList.contains("item")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const itemNode = event.target.closest(".item");
|
|
||||||
if (itemNode) {
|
|
||||||
// we don't want scroll to jump when clicking
|
|
||||||
// so prevent default behaviour, and select and push to history manually
|
|
||||||
event.preventDefault();
|
|
||||||
selectNode(itemNode);
|
|
||||||
history.pushState(null, null, `#${itemNode.id}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
window.addEventListener("hashchange", () => {
|
|
||||||
const id = window.location.hash.substr(1);
|
|
||||||
const itemNode = document.getElementById(id);
|
|
||||||
if (itemNode && itemNode.closest("main")) {
|
|
||||||
selectNode(itemNode);
|
|
||||||
itemNode.scrollIntoView({behavior: "smooth", block: "nearest"});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function selectNode(itemNode) {
|
|
||||||
if (selectedItemNode) {
|
|
||||||
selectedItemNode.classList.remove("selected");
|
|
||||||
}
|
|
||||||
selectedItemNode = itemNode;
|
|
||||||
selectedItemNode.classList.add("selected");
|
|
||||||
let item = rootItem;
|
|
||||||
let parent;
|
|
||||||
const indices = selectedItemNode.id.split("/").map(i => parseInt(i, 10));
|
|
||||||
for(const i of indices) {
|
|
||||||
parent = item;
|
|
||||||
item = itemChildren(item)[i];
|
|
||||||
}
|
|
||||||
showItemDetails(item, parent, selectedItemNode);
|
|
||||||
}
|
|
||||||
|
|
||||||
function stringifyItemValue(value) {
|
|
||||||
if (typeof value === "object" && value !== null) {
|
|
||||||
return JSON.stringify(value, undefined, 2);
|
|
||||||
} else {
|
|
||||||
return value + "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function showItemDetails(item, parent, itemNode) {
|
|
||||||
const parentOffset = itemStart(parent) ? `${itemStart(item) - itemStart(parent)}ms` : "none";
|
|
||||||
const expandButton = t.button("Expand recursively");
|
|
||||||
expandButton.addEventListener("click", () => expandResursively(itemNode.parentElement.parentElement));
|
|
||||||
const start = itemStart(item);
|
|
||||||
const aside = t.aside([
|
|
||||||
t.h3(itemCaption(item)),
|
|
||||||
t.p([t.strong("Log level: "), logLevels[itemLevel(item)]]),
|
|
||||||
t.p([t.strong("Error: "), itemError(item) ? `${itemError(item).name} ${itemError(item).stack}` : "none"]),
|
|
||||||
t.p([t.strong("Parent offset: "), parentOffset]),
|
|
||||||
t.p([t.strong("Start: "), new Date(start).toString(), ` (${start})`]),
|
|
||||||
t.p([t.strong("Duration: "), `${itemDuration(item)}ms`]),
|
|
||||||
t.p([t.strong("Child count: "), itemChildren(item) ? `${itemChildren(item).length}` : "none"]),
|
|
||||||
t.p([t.strong("Forced finish: "), (itemForcedFinish(item) || false) + ""]),
|
|
||||||
t.p(t.strong("Values:")),
|
|
||||||
t.ul({class: "values"}, Object.entries(itemValues(item)).map(([key, value]) => {
|
|
||||||
let valueNode;
|
|
||||||
if (key === "ref") {
|
|
||||||
const refItem = itemByRef.get(value);
|
|
||||||
if (refItem) {
|
|
||||||
valueNode = t.a({href: `#${refItem.id}`}, itemCaption(refItem));
|
|
||||||
} else {
|
|
||||||
valueNode = `unknown ref ${value}`;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
valueNode = stringifyItemValue(value);
|
|
||||||
}
|
|
||||||
return t.li([
|
|
||||||
t.span({className: "key"}, normalizeValueKey(key)),
|
|
||||||
t.span({className: "value"}, valueNode)
|
|
||||||
]);
|
|
||||||
})),
|
|
||||||
t.p(expandButton)
|
|
||||||
]);
|
|
||||||
document.querySelector("aside").replaceWith(aside);
|
|
||||||
}
|
|
||||||
|
|
||||||
function expandResursively(li) {
|
|
||||||
li.classList.add("expanded");
|
|
||||||
const ol = li.querySelector("ol");
|
|
||||||
if (ol) {
|
|
||||||
const len = ol.children.length;
|
|
||||||
for (let i = 0; i < len; i += 1) {
|
|
||||||
expandResursively(ol.children[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
document.getElementById("openFile").addEventListener("click", loadFile);
|
|
||||||
|
|
||||||
function getRootItemHeader(prevItem, item) {
|
|
||||||
if (prevItem) {
|
|
||||||
const diff = itemStart(item) - itemEnd(prevItem);
|
|
||||||
if (diff >= 0) {
|
|
||||||
return `+ ${formatTime(diff)}`;
|
|
||||||
} else {
|
|
||||||
const overlap = -diff;
|
|
||||||
if (overlap >= itemDuration(item)) {
|
|
||||||
return `ran entirely in parallel with`;
|
|
||||||
} else {
|
|
||||||
return `ran ${formatTime(-diff)} in parallel with`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return new Date(itemStart(item)).toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadFile() {
|
|
||||||
const file = await openFile();
|
|
||||||
const json = await readFileAsText(file);
|
|
||||||
const logs = JSON.parse(json);
|
|
||||||
logs.items.sort((a, b) => itemStart(a) - itemStart(b));
|
|
||||||
rootItem = {c: logs.items};
|
|
||||||
itemByRef = new Map();
|
|
||||||
preprocessRecursively(rootItem, null, itemByRef, []);
|
|
||||||
|
|
||||||
const fragment = logs.items.reduce((fragment, item, i, items) => {
|
|
||||||
const prevItem = i === 0 ? null : items[i - 1];
|
|
||||||
fragment.appendChild(t.section([
|
|
||||||
t.h2(getRootItemHeader(prevItem, item)),
|
|
||||||
t.div({className: "timeline"}, t.ol(itemToNode(item, [i])))
|
|
||||||
]));
|
|
||||||
return fragment;
|
|
||||||
}, document.createDocumentFragment());
|
|
||||||
main.replaceChildren(fragment);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: make this use processRecursively
|
|
||||||
function preprocessRecursively(item, parentElement, refsMap, path) {
|
|
||||||
item.s = (parentElement?.s || 0) + item.s;
|
|
||||||
if (itemRefSource(item)) {
|
|
||||||
refsMap.set(itemRefSource(item), item);
|
|
||||||
}
|
|
||||||
if (itemChildren(item)) {
|
|
||||||
for (let i = 0; i < itemChildren(item).length; i += 1) {
|
|
||||||
// do it in advance for a child as we don't want to do it for the rootItem
|
|
||||||
const child = itemChildren(item)[i];
|
|
||||||
const childPath = path.concat(i);
|
|
||||||
child.id = childPath.join("/");
|
|
||||||
preprocessRecursively(child, item, refsMap, childPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const MS_IN_SEC = 1000;
|
|
||||||
const MS_IN_MIN = MS_IN_SEC * 60;
|
|
||||||
const MS_IN_HOUR = MS_IN_MIN * 60;
|
|
||||||
const MS_IN_DAY = MS_IN_HOUR * 24;
|
|
||||||
function formatTime(ms) {
|
|
||||||
let str = "";
|
|
||||||
if (ms > MS_IN_DAY) {
|
|
||||||
const days = Math.floor(ms / MS_IN_DAY);
|
|
||||||
ms -= days * MS_IN_DAY;
|
|
||||||
str += `${days}d`;
|
|
||||||
}
|
|
||||||
if (ms > MS_IN_HOUR) {
|
|
||||||
const hours = Math.floor(ms / MS_IN_HOUR);
|
|
||||||
ms -= hours * MS_IN_HOUR;
|
|
||||||
str += `${hours}h`;
|
|
||||||
}
|
|
||||||
if (ms > MS_IN_MIN) {
|
|
||||||
const mins = Math.floor(ms / MS_IN_MIN);
|
|
||||||
ms -= mins * MS_IN_MIN;
|
|
||||||
str += `${mins}m`;
|
|
||||||
}
|
|
||||||
if (ms > MS_IN_SEC) {
|
|
||||||
const secs = ms / MS_IN_SEC;
|
|
||||||
str += `${secs.toFixed(2)}s`;
|
|
||||||
} else if (ms > 0 || !str.length) {
|
|
||||||
str += `${ms}ms`;
|
|
||||||
}
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
function itemChildren(item) { return item.c; }
|
|
||||||
function itemStart(item) { return item.s; }
|
|
||||||
function itemEnd(item) { return item.s + item.d; }
|
|
||||||
function itemDuration(item) { return item.d; }
|
|
||||||
function itemValues(item) { return item.v; }
|
|
||||||
function itemLevel(item) { return item.l; }
|
|
||||||
function itemLabel(item) { return item.v?.l; }
|
|
||||||
function itemType(item) { return item.v?.t; }
|
|
||||||
function itemError(item) { return item.e; }
|
|
||||||
function itemForcedFinish(item) { return item.f; }
|
|
||||||
function itemRef(item) { return item.v?.ref; }
|
|
||||||
function itemRefSource(item) { return item.v?.refId; }
|
|
||||||
function itemShortErrorMessage(item) {
|
|
||||||
if (itemError(item)) {
|
|
||||||
const e = itemError(item);
|
|
||||||
return e.name || e.stack.substr(0, e.stack.indexOf("\n"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function itemCaption(item) {
|
|
||||||
if (itemType(item) === "network") {
|
|
||||||
return `${itemValues(item)?.method} ${itemValues(item)?.url}`;
|
|
||||||
} else if (itemLabel(item) && itemValues(item)?.id) {
|
|
||||||
return `${itemLabel(item)} ${itemValues(item).id}`;
|
|
||||||
} else if (itemLabel(item) && itemValues(item)?.status) {
|
|
||||||
return `${itemLabel(item)} (${itemValues(item).status})`;
|
|
||||||
} else if (itemLabel(item) && itemError(item)) {
|
|
||||||
return `${itemLabel(item)} (${itemShortErrorMessage(item)})`;
|
|
||||||
} else if (itemRef(item)) {
|
|
||||||
const refItem = itemByRef.get(itemRef(item));
|
|
||||||
if (refItem) {
|
|
||||||
return `ref "${itemCaption(refItem)}"`
|
|
||||||
} else {
|
|
||||||
return `unknown ref ${itemRef(item)}`
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return itemLabel(item) || itemType(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function normalizeValueKey(key) {
|
|
||||||
switch (key) {
|
|
||||||
case "t": return "type";
|
|
||||||
case "l": return "label";
|
|
||||||
default: return key;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// returns the node and the total range (recursively) occupied by the node
|
|
||||||
function itemToNode(item) {
|
|
||||||
const hasChildren = !!itemChildren(item)?.length;
|
|
||||||
const className = {
|
|
||||||
item: true,
|
|
||||||
"has-children": hasChildren,
|
|
||||||
error: itemError(item),
|
|
||||||
[`type-${itemType(item)}`]: !!itemType(item),
|
|
||||||
[`level-${itemLevel(item)}`]: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
const id = item.id;
|
|
||||||
let captionNode;
|
|
||||||
if (itemRef(item)) {
|
|
||||||
const refItem = itemByRef.get(itemRef(item));
|
|
||||||
if (refItem) {
|
|
||||||
captionNode = ["ref ", t.a({href: `#${refItem.id}`}, itemCaption(refItem))];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!captionNode) {
|
|
||||||
captionNode = itemCaption(item);
|
|
||||||
}
|
|
||||||
const li = t.li([
|
|
||||||
t.div([
|
|
||||||
hasChildren ? t.button({className: "toggleExpanded"}) : "",
|
|
||||||
t.a({className, id, href: `#${id}`}, [
|
|
||||||
t.span({class: "caption"}, captionNode),
|
|
||||||
t.span({class: "duration"}, `(${formatTime(itemDuration(item))})`),
|
|
||||||
])
|
|
||||||
])
|
|
||||||
]);
|
|
||||||
if (itemChildren(item) && itemChildren(item).length) {
|
|
||||||
li.appendChild(t.ol(itemChildren(item).map(item => {
|
|
||||||
return itemToNode(item);
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
return li;
|
|
||||||
}
|
|
||||||
|
|
||||||
const highlightForm = document.getElementById("highlightForm");
|
|
||||||
|
|
||||||
highlightForm.addEventListener("submit", evt => {
|
|
||||||
evt.preventDefault();
|
|
||||||
const matchesOutput = document.getElementById("highlightMatches");
|
|
||||||
const query = document.getElementById("highlight").value;
|
|
||||||
if (query) {
|
|
||||||
matchesOutput.innerText = "Searching…";
|
|
||||||
let matches = 0;
|
|
||||||
processRecursively(rootItem, item => {
|
|
||||||
let domNode = document.getElementById(item.id);
|
|
||||||
if (itemMatchesFilter(item, query)) {
|
|
||||||
matches += 1;
|
|
||||||
domNode.classList.add("highlighted");
|
|
||||||
domNode = domNode.parentElement;
|
|
||||||
while (domNode.nodeName !== "SECTION") {
|
|
||||||
if (domNode.nodeName === "LI") {
|
|
||||||
domNode.classList.add("expanded");
|
|
||||||
}
|
|
||||||
domNode = domNode.parentElement;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
domNode.classList.remove("highlighted");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
matchesOutput.innerText = `${matches} matches`;
|
|
||||||
} else {
|
|
||||||
for (const node of document.querySelectorAll(".highlighted")) {
|
|
||||||
node.classList.remove("highlighted");
|
|
||||||
}
|
|
||||||
matchesOutput.innerText = "";
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function itemMatchesFilter(item, query) {
|
|
||||||
if (itemError(item)) {
|
|
||||||
if (valueMatchesQuery(itemError(item), query)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return valueMatchesQuery(itemValues(item), query);
|
|
||||||
}
|
|
||||||
|
|
||||||
function valueMatchesQuery(value, query) {
|
|
||||||
if (typeof value === "string") {
|
|
||||||
return value.includes(query);
|
|
||||||
} else if (typeof value === "object" && value !== null) {
|
|
||||||
for (const key in value) {
|
|
||||||
if (value.hasOwnProperty(key) && valueMatchesQuery(value[key], query)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (typeof value === "number") {
|
|
||||||
return value.toString().includes(query);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function processRecursively(item, callback, parentItem) {
|
|
||||||
if (item.id) {
|
|
||||||
callback(item, parentItem);
|
|
||||||
}
|
|
||||||
if (itemChildren(item)) {
|
|
||||||
for (let i = 0; i < itemChildren(item).length; i += 1) {
|
|
||||||
// do it in advance for a child as we don't want to do it for the rootItem
|
|
||||||
const child = itemChildren(item)[i];
|
|
||||||
processRecursively(child, callback, item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
document.getElementById("collapseAll").addEventListener("click", () => {
|
|
||||||
for (const node of document.querySelectorAll(".expanded")) {
|
|
||||||
node.classList.remove("expanded");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
document.getElementById("hideCollapsed").addEventListener("click", () => {
|
|
||||||
for (const node of document.querySelectorAll("section > div.timeline > ol > li:not(.expanded)")) {
|
|
||||||
node.closest("section").classList.add("hidden");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
document.getElementById("hideHighlightedSiblings").addEventListener("click", () => {
|
|
||||||
for (const node of document.querySelectorAll(".highlighted")) {
|
|
||||||
const list = node.closest("ol");
|
|
||||||
const siblings = Array.from(list.querySelectorAll("li > div > a:not(.highlighted)")).map(n => n.closest("li"));
|
|
||||||
for (const sibling of siblings) {
|
|
||||||
if (!sibling.classList.contains("expanded")) {
|
|
||||||
sibling.classList.add("hidden");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
document.getElementById("showAll").addEventListener("click", () => {
|
|
||||||
for (const node of document.querySelectorAll(".hidden")) {
|
|
||||||
node.classList.remove("hidden");
|
|
||||||
}
|
|
||||||
});
|
|
|
@ -1,7 +0,0 @@
|
||||||
var Buffer = {
|
|
||||||
isBuffer: function(array) {return array instanceof Uint8Array;},
|
|
||||||
from: function(arrayBuffer) {return arrayBuffer;},
|
|
||||||
allocUnsafe: function(size) {return Buffer.alloc(size);},
|
|
||||||
alloc: function(size) {return new Uint8Array(size);}
|
|
||||||
};
|
|
||||||
export default Buffer;
|
|
|
@ -1,2 +0,0 @@
|
||||||
import Buffer from "buffer";
|
|
||||||
export {Buffer};
|
|
|
@ -1,10 +0,0 @@
|
||||||
VERSION=$(jq -r ".version" package.json)
|
|
||||||
PACKAGE=hydrogen-web-$VERSION.tar.gz
|
|
||||||
yarn build
|
|
||||||
pushd target
|
|
||||||
# move config file so we don't override it
|
|
||||||
# when deploying a new version
|
|
||||||
mv config.json config.sample.json
|
|
||||||
tar -czvf ../$PACKAGE ./
|
|
||||||
popd
|
|
||||||
echo $PACKAGE
|
|
|
@ -1,180 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const valueParser = require("postcss-value-parser");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This plugin derives new css variables from a given set of base variables.
|
|
||||||
* A derived css variable has the form --base--operation-argument; meaning that the derived
|
|
||||||
* variable has a value that is generated from the base variable "base" by applying "operation"
|
|
||||||
* with given "argument".
|
|
||||||
*
|
|
||||||
* eg: given the base variable --foo-color: #40E0D0, --foo-color--darker-20 is a css variable
|
|
||||||
* derived from foo-color by making it 20% more darker.
|
|
||||||
*
|
|
||||||
* All derived variables are added to the :root section.
|
|
||||||
*
|
|
||||||
* The actual derivation is done outside the plugin in a callback.
|
|
||||||
*/
|
|
||||||
|
|
||||||
function getValueFromAlias(alias, {aliasMap, baseVariables, resolvedMap}) {
|
|
||||||
const derivedVariable = aliasMap.get(alias);
|
|
||||||
return baseVariables.get(derivedVariable) ?? resolvedMap.get(derivedVariable);
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseDeclarationValue(value) {
|
|
||||||
const parsed = valueParser(value);
|
|
||||||
const variables = [];
|
|
||||||
parsed.walk(node => {
|
|
||||||
if (node.type !== "function") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
switch (node.value) {
|
|
||||||
case "var": {
|
|
||||||
const variable = node.nodes[0];
|
|
||||||
variables.push(variable.value);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "url": {
|
|
||||||
const url = node.nodes[0].value;
|
|
||||||
// resolve url with some absolute url so that we get the query params without using regex
|
|
||||||
const params = new URL(url, "file://foo/bar/").searchParams;
|
|
||||||
const primary = params.get("primary");
|
|
||||||
const secondary = params.get("secondary");
|
|
||||||
if (primary) { variables.push(primary); }
|
|
||||||
if (secondary) { variables.push(secondary); }
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return variables;
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveDerivedVariable(decl, derive, maps, isDark) {
|
|
||||||
const { baseVariables, resolvedMap } = maps;
|
|
||||||
const RE_VARIABLE_VALUE = /(?:--)?((.+)--(.+)-(.+))/;
|
|
||||||
const variableCollection = parseDeclarationValue(decl.value);
|
|
||||||
for (const variable of variableCollection) {
|
|
||||||
const matches = variable.match(RE_VARIABLE_VALUE);
|
|
||||||
if (matches) {
|
|
||||||
const [, wholeVariable, baseVariable, operation, argument] = matches;
|
|
||||||
const value = baseVariables.get(baseVariable) ?? getValueFromAlias(baseVariable, maps);
|
|
||||||
if (!value) {
|
|
||||||
throw new Error(`Cannot derive from ${baseVariable} because it is neither defined in config nor is it an alias!`);
|
|
||||||
}
|
|
||||||
const derivedValue = derive(value, operation, argument, isDark);
|
|
||||||
resolvedMap.set(wholeVariable, derivedValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function extract(decl, {aliasMap, baseVariables}) {
|
|
||||||
if (decl.variable) {
|
|
||||||
// see if right side is of form "var(--foo)"
|
|
||||||
const wholeVariable = decl.value.match(/var\(--(.+)\)/)?.[1];
|
|
||||||
// remove -- from the prop
|
|
||||||
const prop = decl.prop.substring(2);
|
|
||||||
if (wholeVariable) {
|
|
||||||
aliasMap.set(prop, wholeVariable);
|
|
||||||
// Since this is an alias, we shouldn't store it in baseVariables
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
baseVariables.set(prop, decl.value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function addResolvedVariablesToRootSelector(root, {Rule, Declaration}, {resolvedMap}) {
|
|
||||||
const newRule = new Rule({ selector: ":root", source: root.source });
|
|
||||||
// Add derived css variables to :root
|
|
||||||
resolvedMap.forEach((value, key) => {
|
|
||||||
const declaration = new Declaration({prop: `--${key}`, value});
|
|
||||||
newRule.append(declaration);
|
|
||||||
});
|
|
||||||
root.append(newRule);
|
|
||||||
}
|
|
||||||
|
|
||||||
function populateMapWithDerivedVariables(map, cssFileLocation, {resolvedMap, aliasMap}) {
|
|
||||||
const location = cssFileLocation.match(/(.+)\/.+\.css/)?.[1];
|
|
||||||
const derivedVariables = [
|
|
||||||
...([...resolvedMap.keys()].filter(v => !aliasMap.has(v))),
|
|
||||||
...([...aliasMap.entries()].map(([alias, variable]) => `${alias}=${variable}`))
|
|
||||||
];
|
|
||||||
const sharedObject = map.get(location);
|
|
||||||
const output = { "derived-variables": derivedVariables };
|
|
||||||
if (sharedObject) {
|
|
||||||
Object.assign(sharedObject, output);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
map.set(location, output);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @callback derive
|
|
||||||
* @param {string} value - The base value on which an operation is applied
|
|
||||||
* @param {string} operation - The operation to be applied (eg: darker, lighter...)
|
|
||||||
* @param {string} argument - The argument for this operation
|
|
||||||
* @param {boolean} isDark - Indicates whether this theme is dark
|
|
||||||
*/
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param {Object} opts - Options for the plugin
|
|
||||||
* @param {derive} opts.derive - The callback which contains the logic for resolving derived variables
|
|
||||||
* @param {Map} opts.compiledVariables - A map that stores derived variables so that manifest source sections can be produced
|
|
||||||
*/
|
|
||||||
module.exports = (opts = {}) => {
|
|
||||||
const aliasMap = new Map();
|
|
||||||
const resolvedMap = new Map();
|
|
||||||
const baseVariables = new Map();
|
|
||||||
const maps = { aliasMap, resolvedMap, baseVariables };
|
|
||||||
|
|
||||||
return {
|
|
||||||
postcssPlugin: "postcss-compile-variables",
|
|
||||||
|
|
||||||
Once(root, {Rule, Declaration, result}) {
|
|
||||||
const cssFileLocation = root.source.input.from;
|
|
||||||
if (cssFileLocation.includes("type=runtime")) {
|
|
||||||
// If this is a runtime theme, don't derive variables.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const isDark = cssFileLocation.includes("dark=true");
|
|
||||||
/*
|
|
||||||
Go through the CSS file once to extract all aliases and base variables.
|
|
||||||
We use these when resolving derived variables later.
|
|
||||||
*/
|
|
||||||
root.walkDecls(decl => extract(decl, maps));
|
|
||||||
root.walkDecls(decl => resolveDerivedVariable(decl, opts.derive, maps, isDark));
|
|
||||||
addResolvedVariablesToRootSelector(root, {Rule, Declaration}, maps);
|
|
||||||
if (opts.compiledVariables){
|
|
||||||
populateMapWithDerivedVariables(opts.compiledVariables, cssFileLocation, maps);
|
|
||||||
}
|
|
||||||
// Also produce a mapping from alias to completely resolved color
|
|
||||||
const resolvedAliasMap = new Map();
|
|
||||||
aliasMap.forEach((value, key) => {
|
|
||||||
resolvedAliasMap.set(key, resolvedMap.get(value));
|
|
||||||
});
|
|
||||||
// Publish the base-variables, derived-variables and resolved aliases to the other postcss-plugins
|
|
||||||
const combinedMap = new Map([...baseVariables, ...resolvedMap, ...resolvedAliasMap]);
|
|
||||||
result.messages.push({
|
|
||||||
type: "resolved-variable-map",
|
|
||||||
plugin: "postcss-compile-variables",
|
|
||||||
colorMap: combinedMap,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.postcss = true;
|
|
|
@ -1,92 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const valueParser = require("postcss-value-parser");
|
|
||||||
const resolve = require("path").resolve;
|
|
||||||
|
|
||||||
function colorsFromURL(url, colorMap) {
|
|
||||||
const params = new URL(`file://${url}`).searchParams;
|
|
||||||
const primary = params.get("primary");
|
|
||||||
if (!primary) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const secondary = params.get("secondary");
|
|
||||||
const primaryColor = colorMap.get(primary);
|
|
||||||
const secondaryColor = colorMap.get(secondary);
|
|
||||||
if (!primaryColor) {
|
|
||||||
throw new Error(`Variable ${primary} not found in resolved color variables!`);
|
|
||||||
}
|
|
||||||
if (secondary && !secondaryColor) {
|
|
||||||
throw new Error(`Variable ${secondary} not found in resolved color variables!`);
|
|
||||||
}
|
|
||||||
return [primaryColor, secondaryColor];
|
|
||||||
}
|
|
||||||
|
|
||||||
function processURL(decl, replacer, colorMap, cssPath) {
|
|
||||||
const value = decl.value;
|
|
||||||
const parsed = valueParser(value);
|
|
||||||
parsed.walk(node => {
|
|
||||||
if (node.type !== "function" || node.value !== "url") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const urlStringNode = node.nodes[0];
|
|
||||||
const oldURL = urlStringNode.value;
|
|
||||||
const oldURLAbsolute = resolve(cssPath, oldURL);
|
|
||||||
const colors = colorsFromURL(oldURLAbsolute, colorMap);
|
|
||||||
if (!colors) {
|
|
||||||
// If no primary color is provided via url params, then this url need not be handled.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const newURL = replacer(oldURLAbsolute.replace(/\?.+/, ""), ...colors);
|
|
||||||
if (!newURL) {
|
|
||||||
throw new Error("Replacer failed to produce a replacement URL!");
|
|
||||||
}
|
|
||||||
urlStringNode.value = newURL;
|
|
||||||
});
|
|
||||||
decl.assign({prop: decl.prop, value: parsed.toString()})
|
|
||||||
}
|
|
||||||
|
|
||||||
/* *
|
|
||||||
* @type {import('postcss').PluginCreator}
|
|
||||||
*/
|
|
||||||
module.exports = (opts = {}) => {
|
|
||||||
return {
|
|
||||||
postcssPlugin: "postcss-url-to-variable",
|
|
||||||
|
|
||||||
Once(root, {result}) {
|
|
||||||
const cssFileLocation = root.source.input.from;
|
|
||||||
if (cssFileLocation.includes("type=runtime")) {
|
|
||||||
// If this is a runtime theme, don't process urls.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
postcss-compile-variables should have sent the list of resolved colours down via results
|
|
||||||
*/
|
|
||||||
const {colorMap} = result.messages.find(m => m.type === "resolved-variable-map");
|
|
||||||
if (!colorMap) {
|
|
||||||
throw new Error("Postcss results do not contain resolved colors!");
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
Go through each declaration and if it contains an URL, replace the url with the result
|
|
||||||
of running replacer(url)
|
|
||||||
*/
|
|
||||||
const cssPath = root.source?.input.file.replace(/[^/]*$/, "");
|
|
||||||
root.walkDecls(decl => processURL(decl, opts.replacer, colorMap, cssPath));
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.postcss = true;
|
|
|
@ -1,97 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const valueParser = require("postcss-value-parser");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This plugin extracts content inside url() into css variables and adds the variables to the root section.
|
|
||||||
* This plugin is used in conjunction with css-url-processor plugin to colorize svg icons.
|
|
||||||
*/
|
|
||||||
const idToPrepend = "icon-url";
|
|
||||||
|
|
||||||
function findAndReplaceUrl(decl, urlVariables, counter) {
|
|
||||||
const value = decl.value;
|
|
||||||
const parsed = valueParser(value);
|
|
||||||
parsed.walk(node => {
|
|
||||||
if (node.type !== "function" || node.value !== "url") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const url = node.nodes[0].value;
|
|
||||||
if (!url.match(/\.svg\?primary=.+/)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const count = counter.next().value;
|
|
||||||
const variableName = `${idToPrepend}-${count}`;
|
|
||||||
urlVariables.set(variableName, url);
|
|
||||||
node.value = "var";
|
|
||||||
node.nodes = [{ type: "word", value: `--${variableName}` }];
|
|
||||||
});
|
|
||||||
decl.assign({prop: decl.prop, value: parsed.toString()})
|
|
||||||
}
|
|
||||||
|
|
||||||
function addResolvedVariablesToRootSelector(root, { Rule, Declaration }, urlVariables) {
|
|
||||||
const newRule = new Rule({ selector: ":root", source: root.source });
|
|
||||||
// Add derived css variables to :root
|
|
||||||
urlVariables.forEach((value, key) => {
|
|
||||||
const declaration = new Declaration({ prop: `--${key}`, value: `url("${value}")`});
|
|
||||||
newRule.append(declaration);
|
|
||||||
});
|
|
||||||
root.append(newRule);
|
|
||||||
}
|
|
||||||
|
|
||||||
function populateMapWithIcons(map, cssFileLocation, urlVariables) {
|
|
||||||
const location = cssFileLocation.match(/(.+)\/.+\.css/)?.[1];
|
|
||||||
const sharedObject = map.get(location);
|
|
||||||
const output = {"icon": Object.fromEntries(urlVariables)};
|
|
||||||
if (sharedObject) {
|
|
||||||
Object.assign(sharedObject, output);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
map.set(location, output);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function *createCounter() {
|
|
||||||
for (let i = 0; ; ++i) {
|
|
||||||
yield i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* *
|
|
||||||
* @type {import('postcss').PluginCreator}
|
|
||||||
*/
|
|
||||||
module.exports = (opts = {}) => {
|
|
||||||
return {
|
|
||||||
postcssPlugin: "postcss-url-to-variable",
|
|
||||||
|
|
||||||
Once(root, { Rule, Declaration }) {
|
|
||||||
const urlVariables = new Map();
|
|
||||||
const counter = createCounter();
|
|
||||||
root.walkDecls(decl => findAndReplaceUrl(decl, urlVariables, counter));
|
|
||||||
const cssFileLocation = root.source.input.from;
|
|
||||||
if (urlVariables.size && !cssFileLocation.includes("type=runtime")) {
|
|
||||||
addResolvedVariablesToRootSelector(root, { Rule, Declaration }, urlVariables);
|
|
||||||
}
|
|
||||||
if (opts.compiledVariables){
|
|
||||||
const cssFileLocation = root.source.input.from;
|
|
||||||
populateMapWithIcons(opts.compiledVariables, cssFileLocation, urlVariables);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.postcss = true;
|
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import {readFileSync, mkdirSync, writeFileSync} from "fs";
|
|
||||||
import {resolve} from "path";
|
|
||||||
import {h32} from "xxhashjs";
|
|
||||||
import {getColoredSvgString} from "../../src/platform/web/theming/shared/svg-colorizer.mjs";
|
|
||||||
|
|
||||||
function createHash(content) {
|
|
||||||
const hasher = new h32(0);
|
|
||||||
hasher.update(content);
|
|
||||||
return hasher.digest();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Builds a new svg with the colors replaced and returns its location.
|
|
||||||
* @param {string} svgLocation The location of the input svg file
|
|
||||||
* @param {string} primaryColor Primary color for the new svg
|
|
||||||
* @param {string} secondaryColor Secondary color for the new svg
|
|
||||||
*/
|
|
||||||
export function buildColorizedSVG(svgLocation, primaryColor, secondaryColor) {
|
|
||||||
const svgCode = readFileSync(svgLocation, { encoding: "utf8"});
|
|
||||||
const coloredSVGCode = getColoredSvgString(svgCode, primaryColor, secondaryColor);
|
|
||||||
const fileName = svgLocation.match(/.+[/\\](.+\.svg)/)[1];
|
|
||||||
const outputName = `${fileName.substring(0, fileName.length - 4)}-${createHash(coloredSVGCode)}.svg`;
|
|
||||||
const outputPath = resolve(__dirname, "./.tmp");
|
|
||||||
try {
|
|
||||||
mkdirSync(outputPath);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e.code !== "EEXIST") {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const outputFile = `${outputPath}/${outputName}`;
|
|
||||||
writeFileSync(outputFile, coloredSVGCode);
|
|
||||||
return outputFile;
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const postcss = require("postcss");
|
|
||||||
|
|
||||||
module.exports.createTestRunner = function (plugin) {
|
|
||||||
return async function run(input, output, opts = {}, assert) {
|
|
||||||
let result = await postcss([plugin(opts)]).process(input, { from: undefined, });
|
|
||||||
assert.strictEqual(
|
|
||||||
result.css.replaceAll(/\s/g, ""),
|
|
||||||
output.replaceAll(/\s/g, "")
|
|
||||||
);
|
|
||||||
assert.strictEqual(result.warnings().length, 0);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
|
@ -1,156 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const offColor = require("off-color").offColor;
|
|
||||||
const postcss = require("postcss");
|
|
||||||
const plugin = require("../css-compile-variables");
|
|
||||||
const derive = require("../color").derive;
|
|
||||||
const run = require("./common").createTestRunner(plugin);
|
|
||||||
|
|
||||||
module.exports.tests = function tests() {
|
|
||||||
return {
|
|
||||||
"derived variables are resolved": async (assert) => {
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--foo-color: #ff0;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
background-color: var(--foo-color--lighter-50);
|
|
||||||
}`;
|
|
||||||
const transformedColor = offColor("#ff0").lighten(0.5);
|
|
||||||
const outputCSS =
|
|
||||||
inputCSS +
|
|
||||||
`
|
|
||||||
:root {
|
|
||||||
--foo-color--lighter-50: ${transformedColor.hex()};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run( inputCSS, outputCSS, {derive}, assert);
|
|
||||||
},
|
|
||||||
|
|
||||||
"derived variables work with alias": async (assert) => {
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--icon-color: #fff;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
background: var(--icon-color--darker-20);
|
|
||||||
--my-alias: var(--icon-color--darker-20);
|
|
||||||
color: var(--my-alias--lighter-15);
|
|
||||||
}`;
|
|
||||||
const colorDarker = offColor("#fff").darken(0.2).hex();
|
|
||||||
const aliasLighter = offColor(colorDarker).lighten(0.15).hex();
|
|
||||||
const outputCSS = inputCSS + `:root {
|
|
||||||
--icon-color--darker-20: ${colorDarker};
|
|
||||||
--my-alias--lighter-15: ${aliasLighter};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run(inputCSS, outputCSS, {derive}, assert);
|
|
||||||
},
|
|
||||||
|
|
||||||
"derived variable throws if base not present in config": async (assert) => {
|
|
||||||
const css = `:root {
|
|
||||||
color: var(--icon-color--darker-20);
|
|
||||||
}`;
|
|
||||||
assert.rejects(async () => await postcss([plugin({ variables: {} })]).process(css, { from: undefined, }));
|
|
||||||
},
|
|
||||||
|
|
||||||
"multiple derived variable in single declaration is parsed correctly": async (assert) => {
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--foo-color: #ff0;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
background-color: linear-gradient(var(--foo-color--lighter-50), var(--foo-color--darker-20));
|
|
||||||
}`;
|
|
||||||
const transformedColor1 = offColor("#ff0").lighten(0.5);
|
|
||||||
const transformedColor2 = offColor("#ff0").darken(0.2);
|
|
||||||
const outputCSS =
|
|
||||||
inputCSS +
|
|
||||||
`
|
|
||||||
:root {
|
|
||||||
--foo-color--lighter-50: ${transformedColor1.hex()};
|
|
||||||
--foo-color--darker-20: ${transformedColor2.hex()};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run( inputCSS, outputCSS, {derive}, assert);
|
|
||||||
},
|
|
||||||
|
|
||||||
"multiple aliased-derived variable in single declaration is parsed correctly": async (assert) => {
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--foo-color: #ff0;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
--my-alias: var(--foo-color);
|
|
||||||
background-color: linear-gradient(var(--my-alias--lighter-50), var(--my-alias--darker-20));
|
|
||||||
}`;
|
|
||||||
const transformedColor1 = offColor("#ff0").lighten(0.5);
|
|
||||||
const transformedColor2 = offColor("#ff0").darken(0.2);
|
|
||||||
const outputCSS =
|
|
||||||
inputCSS +
|
|
||||||
`
|
|
||||||
:root {
|
|
||||||
--my-alias--lighter-50: ${transformedColor1.hex()};
|
|
||||||
--my-alias--darker-20: ${transformedColor2.hex()};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run( inputCSS, outputCSS, {derive}, assert);
|
|
||||||
},
|
|
||||||
|
|
||||||
"compiledVariables map is populated": async (assert) => {
|
|
||||||
const compiledVariables = new Map();
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--icon-color: #fff;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
background: var(--icon-color--darker-20);
|
|
||||||
--my-alias: var(--icon-color--darker-20);
|
|
||||||
color: var(--my-alias--lighter-15);
|
|
||||||
}`;
|
|
||||||
await postcss([plugin({ derive, compiledVariables })]).process(inputCSS, { from: "/foo/bar/test.css", });
|
|
||||||
const actualArray = compiledVariables.get("/foo/bar")["derived-variables"];
|
|
||||||
const expectedArray = ["icon-color--darker-20", "my-alias=icon-color--darker-20", "my-alias--lighter-15"];
|
|
||||||
assert.deepStrictEqual(actualArray.sort(), expectedArray.sort());
|
|
||||||
},
|
|
||||||
|
|
||||||
"derived variable are supported in urls": async (assert) => {
|
|
||||||
const inputCSS = `
|
|
||||||
:root {
|
|
||||||
--foo-color: #ff0;
|
|
||||||
}
|
|
||||||
div {
|
|
||||||
background-color: var(--foo-color--lighter-50);
|
|
||||||
background: url("./foo/bar/icon.svg?primary=foo-color--darker-5");
|
|
||||||
}
|
|
||||||
a {
|
|
||||||
background: url("foo/bar/icon.svg");
|
|
||||||
}`;
|
|
||||||
const transformedColorLighter = offColor("#ff0").lighten(0.5);
|
|
||||||
const transformedColorDarker = offColor("#ff0").darken(0.05);
|
|
||||||
const outputCSS =
|
|
||||||
inputCSS +
|
|
||||||
`
|
|
||||||
:root {
|
|
||||||
--foo-color--lighter-50: ${transformedColorLighter.hex()};
|
|
||||||
--foo-color--darker-5: ${transformedColorDarker.hex()};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run( inputCSS, outputCSS, {derive}, assert);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
|
@ -1,71 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const plugin = require("../css-url-to-variables");
|
|
||||||
const run = require("./common").createTestRunner(plugin);
|
|
||||||
const postcss = require("postcss");
|
|
||||||
|
|
||||||
module.exports.tests = function tests() {
|
|
||||||
return {
|
|
||||||
"url is replaced with variable": async (assert) => {
|
|
||||||
const inputCSS = `div {
|
|
||||||
background: no-repeat center/80% url("../img/image.svg?primary=main-color--darker-20");
|
|
||||||
}
|
|
||||||
button {
|
|
||||||
background: url("/home/foo/bar/cool.svg?primary=blue&secondary=green");
|
|
||||||
}`;
|
|
||||||
const outputCSS =
|
|
||||||
`div {
|
|
||||||
background: no-repeat center/80% var(--icon-url-0);
|
|
||||||
}
|
|
||||||
button {
|
|
||||||
background: var(--icon-url-1);
|
|
||||||
}`+
|
|
||||||
`
|
|
||||||
:root {
|
|
||||||
--icon-url-0: url("../img/image.svg?primary=main-color--darker-20");
|
|
||||||
--icon-url-1: url("/home/foo/bar/cool.svg?primary=blue&secondary=green");
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
await run(inputCSS, outputCSS, { }, assert);
|
|
||||||
},
|
|
||||||
"non svg urls without query params are not replaced": async (assert) => {
|
|
||||||
const inputCSS = `div {
|
|
||||||
background: no-repeat url("./img/foo/bar/image.png");
|
|
||||||
}`;
|
|
||||||
await run(inputCSS, inputCSS, {}, assert);
|
|
||||||
},
|
|
||||||
"map is populated with icons": async (assert) => {
|
|
||||||
const compiledVariables = new Map();
|
|
||||||
compiledVariables.set("/foo/bar", { "derived-variables": ["background-color--darker-20", "accent-color--lighter-15"] });
|
|
||||||
const inputCSS = `div {
|
|
||||||
background: no-repeat center/80% url("../img/image.svg?primary=main-color--darker-20");
|
|
||||||
}
|
|
||||||
button {
|
|
||||||
background: url("/home/foo/bar/cool.svg?primary=blue&secondary=green");
|
|
||||||
}`;
|
|
||||||
const expectedObject = {
|
|
||||||
"icon-url-0": "../img/image.svg?primary=main-color--darker-20",
|
|
||||||
"icon-url-1": "/home/foo/bar/cool.svg?primary=blue&secondary=green",
|
|
||||||
};
|
|
||||||
await postcss([plugin({compiledVariables})]).process(inputCSS, { from: "/foo/bar/test.css", });
|
|
||||||
const sharedVariable = compiledVariables.get("/foo/bar");
|
|
||||||
assert.deepEqual(["background-color--darker-20", "accent-color--lighter-15"], sharedVariable["derived-variables"]);
|
|
||||||
assert.deepEqual(expectedObject, sharedVariable["icon"]);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
set -e
|
|
||||||
if [ -z "$1" ]; then
|
if [ -z "$1" ]; then
|
||||||
echo "provide a new version, current version is $(jq '.version' package.json)"
|
echo "provide a new version, current version is $(jq '.version' package.json)"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
{
|
|
||||||
"name": "hydrogen-view-sdk",
|
|
||||||
"description": "Embeddable matrix client library, including view components",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"main": "./lib-build/hydrogen.cjs.js",
|
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"import": "./lib-build/hydrogen.es.js",
|
|
||||||
"require": "./lib-build/hydrogen.cjs.js"
|
|
||||||
},
|
|
||||||
"./paths/vite": "./paths/vite.js",
|
|
||||||
"./style.css": "./asset-build/assets/theme-element-light.css",
|
|
||||||
"./theme-element-light.css": "./asset-build/assets/theme-element-light.css",
|
|
||||||
"./theme-element-dark.css": "./asset-build/assets/theme-element-dark.css",
|
|
||||||
"./main.js": "./asset-build/assets/main.js",
|
|
||||||
"./download-sandbox.html": "./asset-build/assets/download-sandbox.html",
|
|
||||||
"./assets/*": "./asset-build/assets/*"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
# Exit whenever one of the commands fail with a non-zero exit code
|
|
||||||
set -e
|
|
||||||
set -o pipefail
|
|
||||||
# Enable extended globs so we can use the `!(filename)` glob syntax
|
|
||||||
shopt -s extglob
|
|
||||||
|
|
||||||
# Only remove the directory contents instead of the whole directory to maintain
|
|
||||||
# the `npm link`/`yarn link` symlink
|
|
||||||
rm -rf target/*
|
|
||||||
yarn run vite build -c vite.sdk-assets-config.js
|
|
||||||
yarn run vite build -c vite.sdk-lib-config.js
|
|
||||||
yarn tsc -p tsconfig-declaration.json
|
|
||||||
./scripts/sdk/create-manifest.js ./target/package.json
|
|
||||||
mkdir target/paths
|
|
||||||
# this doesn't work, the ?url imports need to be in the consuming project, so disable for now
|
|
||||||
# ./scripts/sdk/transform-paths.js ./src/platform/web/sdk/paths/vite.js ./target/paths/vite.js
|
|
||||||
cp doc/SDK.md target/README.md
|
|
||||||
pushd target/asset-build
|
|
||||||
rm index.html
|
|
||||||
popd
|
|
||||||
pushd target/asset-build/assets
|
|
||||||
# Remove all `*.wasm` and `*.js` files except for `main.js`
|
|
||||||
rm !(main).js *.wasm
|
|
||||||
popd
|
|
|
@ -1,23 +0,0 @@
|
||||||
#!/usr/bin/env node
|
|
||||||
const fs = require("fs");
|
|
||||||
const appManifest = require("../../package.json");
|
|
||||||
const baseSDKManifest = require("./base-manifest.json");
|
|
||||||
/*
|
|
||||||
Need to leave typescript type definitions out until the
|
|
||||||
typescript conversion is complete and all imports in the d.ts files
|
|
||||||
exists.
|
|
||||||
```
|
|
||||||
"types": "types/lib.d.ts"
|
|
||||||
```
|
|
||||||
*/
|
|
||||||
const mergeOptions = require('merge-options');
|
|
||||||
|
|
||||||
const manifestExtension = {
|
|
||||||
devDependencies: undefined,
|
|
||||||
scripts: undefined,
|
|
||||||
};
|
|
||||||
|
|
||||||
const manifest = mergeOptions(appManifest, baseSDKManifest, manifestExtension);
|
|
||||||
const json = JSON.stringify(manifest, undefined, 2);
|
|
||||||
const outFile = process.argv[2];
|
|
||||||
fs.writeFileSync(outFile, json, {encoding: "utf8"});
|
|
3
scripts/sdk/test/.gitignore
vendored
3
scripts/sdk/test/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
||||||
node_modules
|
|
||||||
dist
|
|
||||||
yarn.lock
|
|
2
scripts/sdk/test/deps.d.ts
vendored
2
scripts/sdk/test/deps.d.ts
vendored
|
@ -1,2 +0,0 @@
|
||||||
// Keep TypeScripts from complaining about hydrogen-view-sdk not having types yet
|
|
||||||
declare module "hydrogen-view-sdk";
|
|
|
@ -1,21 +0,0 @@
|
||||||
import * as hydrogenViewSdk from "hydrogen-view-sdk";
|
|
||||||
import downloadSandboxPath from 'hydrogen-view-sdk/download-sandbox.html?url';
|
|
||||||
import workerPath from 'hydrogen-view-sdk/main.js?url';
|
|
||||||
import olmWasmPath from '@matrix-org/olm/olm.wasm?url';
|
|
||||||
import olmJsPath from '@matrix-org/olm/olm.js?url';
|
|
||||||
import olmLegacyJsPath from '@matrix-org/olm/olm_legacy.js?url';
|
|
||||||
const assetPaths = {
|
|
||||||
downloadSandbox: downloadSandboxPath,
|
|
||||||
worker: workerPath,
|
|
||||||
olm: {
|
|
||||||
wasm: olmWasmPath,
|
|
||||||
legacyBundle: olmLegacyJsPath,
|
|
||||||
wasmBundle: olmJsPath
|
|
||||||
}
|
|
||||||
};
|
|
||||||
import "hydrogen-view-sdk/assets/theme-element-light.css";
|
|
||||||
|
|
||||||
console.log('hydrogenViewSdk', hydrogenViewSdk);
|
|
||||||
console.log('assetPaths', assetPaths);
|
|
||||||
|
|
||||||
console.log('Entry ESM works ✅');
|
|
|
@ -1,12 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
||||||
<title>Vite App</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div id="app" class="hydrogen"></div>
|
|
||||||
<script type="module" src="./esm-entry.ts"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,8 +0,0 @@
|
||||||
{
|
|
||||||
"name": "test-sdk",
|
|
||||||
"version": "0.0.0",
|
|
||||||
"description": "",
|
|
||||||
"dependencies": {
|
|
||||||
"hydrogen-view-sdk": "link:../../../target"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
// Make sure the SDK can be used in a CommonJS environment.
|
|
||||||
// Usage: node scripts/sdk/test/test-sdk-in-commonjs-env.js
|
|
||||||
const hydrogenViewSdk = require('hydrogen-view-sdk');
|
|
||||||
|
|
||||||
// Test that the "exports" are available:
|
|
||||||
// Worker
|
|
||||||
require.resolve('hydrogen-view-sdk/main.js');
|
|
||||||
// Styles
|
|
||||||
require.resolve('hydrogen-view-sdk/assets/theme-element-light.css');
|
|
||||||
// Can access files in the assets/* directory
|
|
||||||
require.resolve('hydrogen-view-sdk/assets/main.js');
|
|
||||||
|
|
||||||
console.log('SDK works in CommonJS ✅');
|
|
|
@ -1,19 +0,0 @@
|
||||||
const { resolve } = require('path');
|
|
||||||
const { build } = require('vite');
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await build({
|
|
||||||
outDir: './dist',
|
|
||||||
build: {
|
|
||||||
rollupOptions: {
|
|
||||||
input: {
|
|
||||||
main: resolve(__dirname, 'index.html')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('SDK works in Vite build ✅');
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue