mirror of
https://github.com/vector-im/hydrogen-web.git
synced 2024-12-22 11:05:03 +01:00
Merge pull request #1174 from element-hq/midhun/support-sw-everywhere
Support service worker in all environments
This commit is contained in:
commit
841ed2527c
35
.eslintrc.js
35
.eslintrc.js
@ -1,26 +1,27 @@
|
||||
module.exports = {
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es6": true
|
||||
env: {
|
||||
browser: true,
|
||||
es6: true,
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module"
|
||||
extends: "eslint:recommended",
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020,
|
||||
sourceType: "module",
|
||||
},
|
||||
"rules": {
|
||||
rules: {
|
||||
"no-console": "off",
|
||||
"no-empty": "off",
|
||||
"no-prototype-builtins": "off",
|
||||
"no-unused-vars": "warn"
|
||||
"no-unused-vars": "warn",
|
||||
},
|
||||
"globals": {
|
||||
"DEFINE_VERSION": "readonly",
|
||||
"DEFINE_GLOBAL_HASH": "readonly",
|
||||
"DEFINE_PROJECT_DIR": "readonly",
|
||||
globals: {
|
||||
DEFINE_VERSION: "readonly",
|
||||
DEFINE_GLOBAL_HASH: "readonly",
|
||||
DEFINE_IS_SDK: "readonly",
|
||||
DEFINE_PROJECT_DIR: "readonly",
|
||||
// only available in sw.js
|
||||
"DEFINE_UNHASHED_PRECACHED_ASSETS": "readonly",
|
||||
"DEFINE_HASHED_PRECACHED_ASSETS": "readonly",
|
||||
"DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS": "readonly"
|
||||
}
|
||||
DEFINE_UNHASHED_PRECACHED_ASSETS: "readonly",
|
||||
DEFINE_HASHED_PRECACHED_ASSETS: "readonly",
|
||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: "readonly",
|
||||
},
|
||||
};
|
||||
|
@ -1,6 +1,5 @@
|
||||
const fs = require('fs/promises');
|
||||
const path = require('path');
|
||||
const xxhash = require('xxhashjs');
|
||||
const path = require("path");
|
||||
const xxhash = require("xxhashjs");
|
||||
|
||||
function contentHash(str) {
|
||||
var hasher = new xxhash.h32(0);
|
||||
@ -8,16 +7,21 @@ function contentHash(str) {
|
||||
return hasher.digest();
|
||||
}
|
||||
|
||||
function injectServiceWorker(swFile, findUnhashedFileNamesFromBundle, placeholdersPerChunk) {
|
||||
function injectServiceWorker(
|
||||
swFile,
|
||||
findUnhashedFileNamesFromBundle,
|
||||
placeholdersPerChunk
|
||||
) {
|
||||
const swName = path.basename(swFile);
|
||||
let root;
|
||||
let version;
|
||||
let logger;
|
||||
let mode;
|
||||
|
||||
return {
|
||||
name: "hydrogen:injectServiceWorker",
|
||||
apply: "build",
|
||||
enforce: "post",
|
||||
|
||||
buildStart() {
|
||||
this.emitFile({
|
||||
type: "chunk",
|
||||
@ -25,39 +29,63 @@ function injectServiceWorker(swFile, findUnhashedFileNamesFromBundle, placeholde
|
||||
id: swFile,
|
||||
});
|
||||
},
|
||||
configResolved: config => {
|
||||
root = config.root;
|
||||
|
||||
configResolved: (config) => {
|
||||
mode = config.mode;
|
||||
version = JSON.parse(config.define.DEFINE_VERSION); // unquote
|
||||
logger = config.logger;
|
||||
},
|
||||
generateBundle: async function(options, bundle) {
|
||||
|
||||
generateBundle: async function (options, bundle) {
|
||||
const otherUnhashedFiles = findUnhashedFileNamesFromBundle(bundle);
|
||||
const unhashedFilenames = [swName].concat(otherUnhashedFiles);
|
||||
const unhashedFileContentMap = unhashedFilenames.reduce((map, fileName) => {
|
||||
const chunkOrAsset = bundle[fileName];
|
||||
if (!chunkOrAsset) {
|
||||
throw new Error("could not get content for uncached asset or chunk " + fileName);
|
||||
}
|
||||
map[fileName] = chunkOrAsset.source || chunkOrAsset.code;
|
||||
return map;
|
||||
}, {});
|
||||
const unhashedFileContentMap = unhashedFilenames.reduce(
|
||||
(map, fileName) => {
|
||||
const chunkOrAsset = bundle[fileName];
|
||||
if (!chunkOrAsset) {
|
||||
throw new Error(
|
||||
"could not get content for uncached asset or chunk " +
|
||||
fileName
|
||||
);
|
||||
}
|
||||
map[fileName] = chunkOrAsset.source || chunkOrAsset.code;
|
||||
return map;
|
||||
},
|
||||
{}
|
||||
);
|
||||
const assets = Object.values(bundle);
|
||||
const hashedFileNames = assets.map(o => o.fileName).filter(fileName => !unhashedFileContentMap[fileName]);
|
||||
const globalHash = getBuildHash(hashedFileNames, unhashedFileContentMap);
|
||||
const hashedFileNames = assets
|
||||
.map((o) => o.fileName)
|
||||
.filter((fileName) => !unhashedFileContentMap[fileName]);
|
||||
const globalHash = getBuildHash(
|
||||
hashedFileNames,
|
||||
unhashedFileContentMap
|
||||
);
|
||||
const placeholderValues = {
|
||||
DEFINE_GLOBAL_HASH: `"${globalHash}"`,
|
||||
...getCacheFileNamePlaceholderValues(swName, unhashedFilenames, assets, placeholdersPerChunk)
|
||||
...getCacheFileNamePlaceholderValues(
|
||||
swName,
|
||||
unhashedFilenames,
|
||||
assets,
|
||||
mode
|
||||
),
|
||||
};
|
||||
replacePlaceholdersInChunks(assets, placeholdersPerChunk, placeholderValues);
|
||||
replacePlaceholdersInChunks(
|
||||
assets,
|
||||
placeholdersPerChunk,
|
||||
placeholderValues
|
||||
);
|
||||
logger.info(`\nBuilt ${version} (${globalHash})`);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getBuildHash(hashedFileNames, unhashedFileContentMap) {
|
||||
const unhashedHashes = Object.entries(unhashedFileContentMap).map(([fileName, content]) => {
|
||||
return `${fileName}-${contentHash(Buffer.from(content))}`;
|
||||
});
|
||||
const unhashedHashes = Object.entries(unhashedFileContentMap).map(
|
||||
([fileName, content]) => {
|
||||
return `${fileName}-${contentHash(Buffer.from(content))}`;
|
||||
}
|
||||
);
|
||||
const globalHashAssets = hashedFileNames.concat(unhashedHashes);
|
||||
globalHashAssets.sort();
|
||||
return contentHash(globalHashAssets.join(",")).toString();
|
||||
@ -66,60 +94,87 @@ function getBuildHash(hashedFileNames, unhashedFileContentMap) {
|
||||
const NON_PRECACHED_JS = [
|
||||
"hydrogen-legacy",
|
||||
"olm_legacy.js",
|
||||
// most environments don't need the worker
|
||||
"main.js"
|
||||
// most environments don't need the worker
|
||||
"main.js",
|
||||
];
|
||||
|
||||
function isPreCached(asset) {
|
||||
const {name, fileName} = asset;
|
||||
return name.endsWith(".svg") ||
|
||||
name.endsWith(".png") ||
|
||||
name.endsWith(".css") ||
|
||||
name.endsWith(".wasm") ||
|
||||
name.endsWith(".html") ||
|
||||
// the index and vendor chunks don't have an extension in `name`, so check extension on `fileName`
|
||||
fileName.endsWith(".js") && !NON_PRECACHED_JS.includes(path.basename(name));
|
||||
const { name, fileName } = asset;
|
||||
return (
|
||||
name?.endsWith(".svg") ||
|
||||
name?.endsWith(".png") ||
|
||||
name?.endsWith(".css") ||
|
||||
name?.endsWith(".wasm") ||
|
||||
name?.endsWith(".html") ||
|
||||
// the index and vendor chunks don't have an extension in `name`, so check extension on `fileName`
|
||||
(fileName.endsWith(".js") &&
|
||||
!NON_PRECACHED_JS.includes(path.basename(name)))
|
||||
);
|
||||
}
|
||||
|
||||
function getCacheFileNamePlaceholderValues(swName, unhashedFilenames, assets) {
|
||||
function getCacheFileNamePlaceholderValues(
|
||||
swName,
|
||||
unhashedFilenames,
|
||||
assets,
|
||||
mode
|
||||
) {
|
||||
const unhashedPreCachedAssets = [];
|
||||
const hashedPreCachedAssets = [];
|
||||
const hashedCachedOnRequestAssets = [];
|
||||
|
||||
for (const asset of assets) {
|
||||
const {name, fileName} = asset;
|
||||
// the service worker should not be cached at all,
|
||||
// it's how updates happen
|
||||
if (fileName === swName) {
|
||||
continue;
|
||||
} else if (unhashedFilenames.includes(fileName)) {
|
||||
unhashedPreCachedAssets.push(fileName);
|
||||
} else if (isPreCached(asset)) {
|
||||
hashedPreCachedAssets.push(fileName);
|
||||
} else {
|
||||
hashedCachedOnRequestAssets.push(fileName);
|
||||
if (mode === "production") {
|
||||
for (const asset of assets) {
|
||||
const { name, fileName } = asset;
|
||||
// the service worker should not be cached at all,
|
||||
// it's how updates happen
|
||||
if (fileName === swName) {
|
||||
continue;
|
||||
} else if (unhashedFilenames.includes(fileName)) {
|
||||
unhashedPreCachedAssets.push(fileName);
|
||||
} else if (isPreCached(asset)) {
|
||||
hashedPreCachedAssets.push(fileName);
|
||||
} else {
|
||||
hashedCachedOnRequestAssets.push(fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
DEFINE_UNHASHED_PRECACHED_ASSETS: JSON.stringify(unhashedPreCachedAssets),
|
||||
DEFINE_UNHASHED_PRECACHED_ASSETS: JSON.stringify(
|
||||
unhashedPreCachedAssets
|
||||
),
|
||||
DEFINE_HASHED_PRECACHED_ASSETS: JSON.stringify(hashedPreCachedAssets),
|
||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: JSON.stringify(hashedCachedOnRequestAssets)
|
||||
}
|
||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: JSON.stringify(
|
||||
hashedCachedOnRequestAssets
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function replacePlaceholdersInChunks(assets, placeholdersPerChunk, placeholderValues) {
|
||||
function replacePlaceholdersInChunks(
|
||||
assets,
|
||||
placeholdersPerChunk,
|
||||
placeholderValues
|
||||
) {
|
||||
for (const [name, placeholderMap] of Object.entries(placeholdersPerChunk)) {
|
||||
const chunk = assets.find(a => a.type === "chunk" && a.name === name);
|
||||
const chunk = assets.find((a) => a.type === "chunk" && a.name === name);
|
||||
if (!chunk) {
|
||||
throw new Error(`could not find chunk ${name} to replace placeholders`);
|
||||
throw new Error(
|
||||
`could not find chunk ${name} to replace placeholders`
|
||||
);
|
||||
}
|
||||
for (const [placeholderName, placeholderLiteral] of Object.entries(placeholderMap)) {
|
||||
for (const [placeholderName, placeholderLiteral] of Object.entries(
|
||||
placeholderMap
|
||||
)) {
|
||||
const replacedValue = placeholderValues[placeholderName];
|
||||
const oldCode = chunk.code;
|
||||
chunk.code = chunk.code.replaceAll(placeholderLiteral, replacedValue);
|
||||
chunk.code = chunk.code.replaceAll(
|
||||
placeholderLiteral,
|
||||
replacedValue
|
||||
);
|
||||
if (chunk.code === oldCode) {
|
||||
throw new Error(`Could not replace ${placeholderName} in ${name}, looking for literal ${placeholderLiteral}:\n${chunk.code}`);
|
||||
throw new Error(
|
||||
`Could not replace ${placeholderName} in ${name}, looking for literal ${placeholderLiteral}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -134,7 +189,7 @@ function replacePlaceholdersInChunks(assets, placeholdersPerChunk, placeholderVa
|
||||
* transformation will touch them (minifying, ...) and we can do a
|
||||
* string replacement still at the end of the build. */
|
||||
function definePlaceholderValue(mode, name, devValue) {
|
||||
if (mode === "production") {
|
||||
if (mode === "production" || mode === "sdk") {
|
||||
// note that `prompt(...)` will never be in the final output, it's replaced by the final value
|
||||
// once we know at the end of the build what it is and just used as a temporary value during the build
|
||||
// as something that will not be transformed.
|
||||
@ -145,13 +200,44 @@ function definePlaceholderValue(mode, name, devValue) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the short sha for the latest git commit
|
||||
* @see https://stackoverflow.com/a/35778030
|
||||
*/
|
||||
function getLatestGitCommitHash() {
|
||||
try {
|
||||
return require("child_process")
|
||||
.execSync("git rev-parse --short HEAD")
|
||||
.toString()
|
||||
.trim();
|
||||
} catch {
|
||||
return "could_not_fetch_sha";
|
||||
}
|
||||
}
|
||||
|
||||
function createPlaceholderValues(mode) {
|
||||
return {
|
||||
DEFINE_GLOBAL_HASH: definePlaceholderValue(mode, "DEFINE_GLOBAL_HASH", null),
|
||||
DEFINE_UNHASHED_PRECACHED_ASSETS: definePlaceholderValue(mode, "UNHASHED_PRECACHED_ASSETS", []),
|
||||
DEFINE_HASHED_PRECACHED_ASSETS: definePlaceholderValue(mode, "HASHED_PRECACHED_ASSETS", []),
|
||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: definePlaceholderValue(mode, "HASHED_CACHED_ON_REQUEST_ASSETS", []),
|
||||
DEFINE_GLOBAL_HASH: definePlaceholderValue(
|
||||
mode,
|
||||
"DEFINE_GLOBAL_HASH",
|
||||
`git commit: ${getLatestGitCommitHash()}`
|
||||
),
|
||||
DEFINE_UNHASHED_PRECACHED_ASSETS: definePlaceholderValue(
|
||||
mode,
|
||||
"UNHASHED_PRECACHED_ASSETS",
|
||||
[]
|
||||
),
|
||||
DEFINE_HASHED_PRECACHED_ASSETS: definePlaceholderValue(
|
||||
mode,
|
||||
"HASHED_PRECACHED_ASSETS",
|
||||
[]
|
||||
),
|
||||
DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS: definePlaceholderValue(
|
||||
mode,
|
||||
"HASHED_CACHED_ON_REQUEST_ASSETS",
|
||||
[]
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {injectServiceWorker, createPlaceholderValues};
|
||||
module.exports = { injectServiceWorker, createPlaceholderValues };
|
||||
|
67
scripts/build-plugins/sw-dev.js
Normal file
67
scripts/build-plugins/sw-dev.js
Normal file
@ -0,0 +1,67 @@
|
||||
/*
|
||||
Copyright 2024 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
* This rollup plugin makes it possible to use the serviceworker with the dev server.
|
||||
* The service worker is located in `/src/platform/web/sw.js` and it contains some
|
||||
* fields that need to be replaced with sensible values.
|
||||
*
|
||||
* We have a plugin that does this during build (see `./service-worker.js`).
|
||||
* This plugin does more or less the same but for dev.
|
||||
*/
|
||||
|
||||
export function transformServiceWorkerInDevServer() {
|
||||
// See https://vitejs.dev/config/shared-options.html#define
|
||||
// Comes from vite.config.js
|
||||
let define;
|
||||
|
||||
return {
|
||||
name: "hydrogen:transformServiceWorkerInDevServer",
|
||||
apply: "serve",
|
||||
enforce: "pre",
|
||||
|
||||
configResolved(resolvedConfig) {
|
||||
// store the resolved config
|
||||
define = resolvedConfig.define;
|
||||
},
|
||||
|
||||
async load(id) {
|
||||
if (!id.includes("sw.js")) return null;
|
||||
let code = await readServiceWorkerCode();
|
||||
for (const [key, value] of Object.entries(define)) {
|
||||
code = code.replaceAll(key, value);
|
||||
}
|
||||
return code;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Read service worker code from `src/platform/web/sw.js`
|
||||
* @returns code as string
|
||||
*/
|
||||
async function readServiceWorkerCode() {
|
||||
const resolvedLocation = path.resolve(
|
||||
__dirname,
|
||||
"../../",
|
||||
"./src/platform/web/sw.js"
|
||||
);
|
||||
const data = await fs.readFile(resolvedLocation, { encoding: "utf-8" });
|
||||
return data;
|
||||
}
|
@ -8,8 +8,8 @@ shopt -s extglob
|
||||
# Only remove the directory contents instead of the whole directory to maintain
|
||||
# the `npm link`/`yarn link` symlink
|
||||
rm -rf target/*
|
||||
yarn run vite build -c vite.sdk-assets-config.js
|
||||
yarn run vite build -c vite.sdk-lib-config.js
|
||||
yarn run vite build -c vite.sdk-assets-config.js --mode sdk
|
||||
yarn run vite build -c vite.sdk-lib-config.js --mode sdk
|
||||
yarn tsc -p tsconfig-declaration.json
|
||||
./scripts/sdk/create-manifest.js ./target/package.json
|
||||
mkdir target/paths
|
||||
|
@ -103,8 +103,22 @@ export class ServiceWorkerHandler {
|
||||
if (document.hidden) {
|
||||
return;
|
||||
}
|
||||
const version = await this._sendAndWaitForReply("version", null, this._registration.waiting);
|
||||
if (confirm(`Version ${version.version} (${version.buildHash}) is available. Reload to apply?`)) {
|
||||
const version = await this._sendAndWaitForReply(
|
||||
"version",
|
||||
null,
|
||||
this._registration.waiting
|
||||
);
|
||||
const isSdk = DEFINE_IS_SDK;
|
||||
const isDev = this.version === "develop";
|
||||
// Don't ask for confirmation when being used as an sdk/ when being run in dev server
|
||||
if (
|
||||
isSdk ||
|
||||
isDev ||
|
||||
confirm(
|
||||
`Version ${version.version} (${version.buildHash}) is available. Reload to apply?`
|
||||
)
|
||||
) {
|
||||
console.log("Service Worker has been updated!");
|
||||
// prevent any fetch requests from going to the service worker
|
||||
// from any client, so that it is not kept active
|
||||
// when calling skipWaiting on the new one
|
||||
|
@ -19,9 +19,7 @@
|
||||
import {Platform} from "./Platform";
|
||||
import configURL from "./assets/config.json?url";
|
||||
import assetPaths from "./sdk/paths/vite";
|
||||
if (import.meta.env.PROD) {
|
||||
assetPaths.serviceWorker = "sw.js";
|
||||
}
|
||||
assetPaths.serviceWorker = "sw.js";
|
||||
const platform = new Platform({
|
||||
container: document.body,
|
||||
assetPaths,
|
||||
|
BIN
src/platform/web/public/icon.png
Normal file
BIN
src/platform/web/public/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
@ -15,8 +15,9 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import NOTIFICATION_BADGE_ICON from "./assets/icon.png?url";
|
||||
// replaced by the service worker build plugin
|
||||
const NOTIFICATION_BADGE_ICON = "icon.png";
|
||||
|
||||
// These are replaced by rollup plugins
|
||||
const UNHASHED_PRECACHED_ASSETS = DEFINE_UNHASHED_PRECACHED_ASSETS;
|
||||
const HASHED_PRECACHED_ASSETS = DEFINE_HASHED_PRECACHED_ASSETS;
|
||||
const HASHED_CACHED_ON_REQUEST_ASSETS = DEFINE_HASHED_CACHED_ON_REQUEST_ASSETS;
|
||||
@ -25,20 +26,24 @@ const unhashedCacheName = `hydrogen-assets-${DEFINE_GLOBAL_HASH}`;
|
||||
const hashedCacheName = `hydrogen-assets`;
|
||||
const mediaThumbnailCacheName = `hydrogen-media-thumbnails-v2`;
|
||||
|
||||
self.addEventListener('install', function(e) {
|
||||
e.waitUntil((async () => {
|
||||
const unhashedCache = await caches.open(unhashedCacheName);
|
||||
await unhashedCache.addAll(UNHASHED_PRECACHED_ASSETS);
|
||||
const hashedCache = await caches.open(hashedCacheName);
|
||||
await Promise.all(HASHED_PRECACHED_ASSETS.map(async asset => {
|
||||
if (!await hashedCache.match(asset)) {
|
||||
await hashedCache.add(asset);
|
||||
}
|
||||
}));
|
||||
})());
|
||||
self.addEventListener("install", function (e) {
|
||||
e.waitUntil(
|
||||
(async () => {
|
||||
const unhashedCache = await caches.open(unhashedCacheName);
|
||||
await unhashedCache.addAll(UNHASHED_PRECACHED_ASSETS);
|
||||
const hashedCache = await caches.open(hashedCacheName);
|
||||
await Promise.all(
|
||||
HASHED_PRECACHED_ASSETS.map(async (asset) => {
|
||||
if (!(await hashedCache.match(asset))) {
|
||||
await hashedCache.add(asset);
|
||||
}
|
||||
})
|
||||
);
|
||||
})()
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('activate', (event) => {
|
||||
self.addEventListener("activate", (event) => {
|
||||
// on a first page load/sw install,
|
||||
// start using the service worker on all pages straight away
|
||||
self.clients.claim();
|
||||
@ -49,26 +54,29 @@ async function purgeOldCaches() {
|
||||
// remove any caches we don't know about
|
||||
const keyList = await caches.keys();
|
||||
for (const key of keyList) {
|
||||
if (key !== unhashedCacheName && key !== hashedCacheName && key !== mediaThumbnailCacheName) {
|
||||
if (
|
||||
key !== unhashedCacheName &&
|
||||
key !== hashedCacheName &&
|
||||
key !== mediaThumbnailCacheName
|
||||
) {
|
||||
await caches.delete(key);
|
||||
}
|
||||
}
|
||||
// remove the cache for any old hashed resource
|
||||
const hashedCache = await caches.open(hashedCacheName);
|
||||
const keys = await hashedCache.keys();
|
||||
const hashedAssetURLs =
|
||||
HASHED_PRECACHED_ASSETS
|
||||
.concat(HASHED_CACHED_ON_REQUEST_ASSETS)
|
||||
.map(a => new URL(a, self.registration.scope).href);
|
||||
const hashedAssetURLs = HASHED_PRECACHED_ASSETS.concat(
|
||||
HASHED_CACHED_ON_REQUEST_ASSETS
|
||||
).map((a) => new URL(a, self.registration.scope).href);
|
||||
|
||||
for (const request of keys) {
|
||||
if (!hashedAssetURLs.some(url => url === request.url)) {
|
||||
if (!hashedAssetURLs.some((url) => url === request.url)) {
|
||||
hashedCache.delete(request);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.addEventListener('fetch', (event) => {
|
||||
self.addEventListener("fetch", (event) => {
|
||||
/*
|
||||
service worker shouldn't handle xhr uploads because otherwise
|
||||
the progress events won't fire.
|
||||
@ -95,12 +103,18 @@ let pendingFetchAbortController = new AbortController();
|
||||
|
||||
async function handleRequest(request) {
|
||||
try {
|
||||
if (request.url.includes("config.json") || /theme-.+\.json/.test(request.url)) {
|
||||
if (
|
||||
request.url.includes("config.json") ||
|
||||
/theme-.+\.json/.test(request.url)
|
||||
) {
|
||||
return handleStaleWhileRevalidateRequest(request);
|
||||
}
|
||||
const url = new URL(request.url);
|
||||
// rewrite / to /index.html so it hits the cache
|
||||
if (url.origin === baseURL.origin && url.pathname === baseURL.pathname) {
|
||||
if (
|
||||
url.origin === baseURL.origin &&
|
||||
url.pathname === baseURL.pathname
|
||||
) {
|
||||
request = new Request(new URL("index.html", baseURL.href));
|
||||
}
|
||||
let response = await readCache(request);
|
||||
@ -108,9 +122,15 @@ async function handleRequest(request) {
|
||||
// use cors so the resource in the cache isn't opaque and uses up to 7mb
|
||||
// https://developers.google.com/web/tools/chrome-devtools/progressive-web-apps?utm_source=devtools#opaque-responses
|
||||
if (isCacheableThumbnail(url)) {
|
||||
response = await fetch(request, {signal: pendingFetchAbortController.signal, mode: "cors", credentials: "omit"});
|
||||
response = await fetch(request, {
|
||||
signal: pendingFetchAbortController.signal,
|
||||
mode: "cors",
|
||||
credentials: "omit",
|
||||
});
|
||||
} else {
|
||||
response = await fetch(request, {signal: pendingFetchAbortController.signal});
|
||||
response = await fetch(request, {
|
||||
signal: pendingFetchAbortController.signal,
|
||||
});
|
||||
}
|
||||
await updateCache(request, response);
|
||||
}
|
||||
@ -184,7 +204,7 @@ async function readCache(request) {
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
const url = new URL(request.url);
|
||||
if (isCacheableThumbnail(url)) {
|
||||
const mediaThumbnailCache = await caches.open(mediaThumbnailCacheName);
|
||||
@ -198,9 +218,10 @@ async function readCache(request) {
|
||||
return response;
|
||||
}
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
const reply = payload => event.source.postMessage({replyTo: event.data.id, payload});
|
||||
const {replyTo} = event.data;
|
||||
self.addEventListener("message", (event) => {
|
||||
const reply = (payload) =>
|
||||
event.source.postMessage({ replyTo: event.data.id, payload });
|
||||
const { replyTo } = event.data;
|
||||
if (replyTo) {
|
||||
const resolve = pendingReplies.get(replyTo);
|
||||
if (resolve) {
|
||||
@ -210,7 +231,10 @@ self.addEventListener('message', (event) => {
|
||||
} else {
|
||||
switch (event.data?.type) {
|
||||
case "version":
|
||||
reply({version: DEFINE_VERSION, buildHash: DEFINE_GLOBAL_HASH});
|
||||
reply({
|
||||
version: DEFINE_VERSION,
|
||||
buildHash: DEFINE_GLOBAL_HASH,
|
||||
});
|
||||
break;
|
||||
case "skipWaiting":
|
||||
self.skipWaiting();
|
||||
@ -220,8 +244,10 @@ self.addEventListener('message', (event) => {
|
||||
break;
|
||||
case "closeSession":
|
||||
event.waitUntil(
|
||||
closeSession(event.data.payload.sessionId, event.source.id)
|
||||
.finally(() => reply())
|
||||
closeSession(
|
||||
event.data.payload.sessionId,
|
||||
event.source.id
|
||||
).finally(() => reply())
|
||||
);
|
||||
break;
|
||||
}
|
||||
@ -235,29 +261,40 @@ async function openClientFromNotif(event) {
|
||||
console.log("clicked notif with tag", event.notification.tag);
|
||||
return;
|
||||
}
|
||||
const {sessionId, roomId} = event.notification.data;
|
||||
const { sessionId, roomId } = event.notification.data;
|
||||
const sessionHash = `#/session/${sessionId}`;
|
||||
const roomHash = `${sessionHash}/room/${roomId}`;
|
||||
const clientWithSession = await findClient(async client => {
|
||||
return await sendAndWaitForReply(client, "hasSessionOpen", {sessionId});
|
||||
const clientWithSession = await findClient(async (client) => {
|
||||
return await sendAndWaitForReply(client, "hasSessionOpen", {
|
||||
sessionId,
|
||||
});
|
||||
});
|
||||
if (clientWithSession) {
|
||||
console.log("notificationclick: client has session open, showing room there");
|
||||
console.log(
|
||||
"notificationclick: client has session open, showing room there"
|
||||
);
|
||||
// use a message rather than clientWithSession.navigate here as this refreshes the page on chrome
|
||||
clientWithSession.postMessage({type: "openRoom", payload: {roomId}});
|
||||
if ('focus' in clientWithSession) {
|
||||
clientWithSession.postMessage({
|
||||
type: "openRoom",
|
||||
payload: { roomId },
|
||||
});
|
||||
if ("focus" in clientWithSession) {
|
||||
try {
|
||||
await clientWithSession.focus();
|
||||
} catch (err) { console.error(err); } // I've had this throw on me on Android
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} // I've had this throw on me on Android
|
||||
}
|
||||
} else if (self.clients.openWindow) {
|
||||
console.log("notificationclick: no client found with session open, opening new window");
|
||||
console.log(
|
||||
"notificationclick: no client found with session open, opening new window"
|
||||
);
|
||||
const roomURL = new URL(`./${roomHash}`, baseURL).href;
|
||||
await self.clients.openWindow(roomURL);
|
||||
}
|
||||
}
|
||||
|
||||
self.addEventListener('notificationclick', event => {
|
||||
self.addEventListener("notificationclick", (event) => {
|
||||
event.notification.close();
|
||||
event.waitUntil(openClientFromNotif(event));
|
||||
});
|
||||
@ -268,19 +305,30 @@ async function handlePushNotification(n) {
|
||||
let sender = n.sender_display_name || n.sender;
|
||||
if (sender && n.event_id) {
|
||||
const roomId = n.room_id;
|
||||
const hasFocusedClientOnRoom = !!await findClient(async client => {
|
||||
const hasFocusedClientOnRoom = !!(await findClient(async (client) => {
|
||||
if (client.visibilityState === "visible" && client.focused) {
|
||||
return await sendAndWaitForReply(client, "hasRoomOpen", {sessionId, roomId});
|
||||
return await sendAndWaitForReply(client, "hasRoomOpen", {
|
||||
sessionId,
|
||||
roomId,
|
||||
});
|
||||
}
|
||||
});
|
||||
}));
|
||||
if (hasFocusedClientOnRoom) {
|
||||
console.log("client is focused, room is open, don't show notif");
|
||||
return;
|
||||
}
|
||||
const newMessageNotifs = Array.from(await self.registration.getNotifications({tag: NOTIF_TAG_NEW_MESSAGE}));
|
||||
const notifsForRoom = newMessageNotifs.filter(n => n.data.roomId === roomId);
|
||||
const hasMultiNotification = notifsForRoom.some(n => n.data.multi);
|
||||
const hasSingleNotifsForRoom = newMessageNotifs.some(n => !n.data.multi);
|
||||
const newMessageNotifs = Array.from(
|
||||
await self.registration.getNotifications({
|
||||
tag: NOTIF_TAG_NEW_MESSAGE,
|
||||
})
|
||||
);
|
||||
const notifsForRoom = newMessageNotifs.filter(
|
||||
(n) => n.data.roomId === roomId
|
||||
);
|
||||
const hasMultiNotification = notifsForRoom.some((n) => n.data.multi);
|
||||
const hasSingleNotifsForRoom = newMessageNotifs.some(
|
||||
(n) => !n.data.multi
|
||||
);
|
||||
const roomName = n.room_name || n.room_alias;
|
||||
let multi = false;
|
||||
let label;
|
||||
@ -304,9 +352,9 @@ async function handlePushNotification(n) {
|
||||
}
|
||||
await self.registration.showNotification(label, {
|
||||
body,
|
||||
data: {sessionId, roomId, multi},
|
||||
data: { sessionId, roomId, multi },
|
||||
tag: NOTIF_TAG_NEW_MESSAGE,
|
||||
badge: NOTIFICATION_BADGE_ICON
|
||||
badge: NOTIFICATION_BADGE_ICON,
|
||||
});
|
||||
}
|
||||
// we could consider hiding previous notifications here based on the unread count
|
||||
@ -315,25 +363,31 @@ async function handlePushNotification(n) {
|
||||
// when no client is visible, see https://goo.gl/yqv4Q4
|
||||
}
|
||||
|
||||
self.addEventListener('push', event => {
|
||||
self.addEventListener("push", (event) => {
|
||||
event.waitUntil(handlePushNotification(event.data.json()));
|
||||
});
|
||||
|
||||
async function closeSession(sessionId, requestingClientId) {
|
||||
const clients = await self.clients.matchAll();
|
||||
await Promise.all(clients.map(async client => {
|
||||
if (client.id !== requestingClientId) {
|
||||
await sendAndWaitForReply(client, "closeSession", {sessionId});
|
||||
}
|
||||
}));
|
||||
await Promise.all(
|
||||
clients.map(async (client) => {
|
||||
if (client.id !== requestingClientId) {
|
||||
await sendAndWaitForReply(client, "closeSession", {
|
||||
sessionId,
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async function haltRequests() {
|
||||
// first ask all clients to block sending any more requests
|
||||
const clients = await self.clients.matchAll({type: "window"});
|
||||
await Promise.all(clients.map(client => {
|
||||
return sendAndWaitForReply(client, "haltRequests");
|
||||
}));
|
||||
const clients = await self.clients.matchAll({ type: "window" });
|
||||
await Promise.all(
|
||||
clients.map((client) => {
|
||||
return sendAndWaitForReply(client, "haltRequests");
|
||||
})
|
||||
);
|
||||
// and only then abort the current requests
|
||||
pendingFetchAbortController.abort();
|
||||
}
|
||||
@ -343,15 +397,15 @@ let messageIdCounter = 0;
|
||||
function sendAndWaitForReply(client, type, payload) {
|
||||
messageIdCounter += 1;
|
||||
const id = messageIdCounter;
|
||||
const promise = new Promise(resolve => {
|
||||
const promise = new Promise((resolve) => {
|
||||
pendingReplies.set(id, resolve);
|
||||
});
|
||||
client.postMessage({type, id, payload});
|
||||
client.postMessage({ type, id, payload });
|
||||
return promise;
|
||||
}
|
||||
|
||||
async function findClient(predicate) {
|
||||
const clientList = await self.clients.matchAll({type: "window"});
|
||||
const clientList = await self.clients.matchAll({ type: "window" });
|
||||
for (const client of clientList) {
|
||||
if (await predicate(client)) {
|
||||
return client;
|
||||
|
@ -1,59 +1,78 @@
|
||||
const cssvariables = require("postcss-css-variables");
|
||||
const {
|
||||
createPlaceholderValues,
|
||||
} = require("./scripts/build-plugins/service-worker");
|
||||
const flexbugsFixes = require("postcss-flexbugs-fixes");
|
||||
const compileVariables = require("./scripts/postcss/css-compile-variables");
|
||||
const urlVariables = require("./scripts/postcss/css-url-to-variables");
|
||||
const urlProcessor = require("./scripts/postcss/css-url-processor");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const manifest = require("./package.json");
|
||||
const version = manifest.version;
|
||||
const appManifest = require("./package.json");
|
||||
const sdkManifest = require("./scripts/sdk/base-manifest.json");
|
||||
const compiledVariables = new Map();
|
||||
import {buildColorizedSVG as replacer} from "./scripts/postcss/svg-builder.mjs";
|
||||
import {derive} from "./src/platform/web/theming/shared/color.mjs";
|
||||
import { buildColorizedSVG as replacer } from "./scripts/postcss/svg-builder.mjs";
|
||||
import { derive } from "./src/platform/web/theming/shared/color.mjs";
|
||||
|
||||
const commonOptions = {
|
||||
logLevel: "warn",
|
||||
publicDir: false,
|
||||
server: {
|
||||
hmr: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
// these should only be imported by the base-x package in any runtime code
|
||||
// and works in the browser with a Uint8Array shim,
|
||||
// rather than including a ton of polyfill code
|
||||
"safe-buffer": "./scripts/package-overrides/safe-buffer/index.js",
|
||||
"buffer": "./scripts/package-overrides/buffer/index.js",
|
||||
}
|
||||
},
|
||||
build: {
|
||||
emptyOutDir: true,
|
||||
assetsInlineLimit: 0,
|
||||
polyfillModulePreload: false,
|
||||
},
|
||||
assetsInclude: ['**/config.json'],
|
||||
define: {
|
||||
DEFINE_VERSION: JSON.stringify(version),
|
||||
DEFINE_GLOBAL_HASH: JSON.stringify(null),
|
||||
},
|
||||
css: {
|
||||
postcss: {
|
||||
plugins: [
|
||||
compileVariables({derive, compiledVariables}),
|
||||
urlVariables({compiledVariables}),
|
||||
urlProcessor({replacer}),
|
||||
// cssvariables({
|
||||
// preserve: (declaration) => {
|
||||
// return declaration.value.indexOf("var(--ios-") == 0;
|
||||
// }
|
||||
// }),
|
||||
// the grid option creates some source fragment that causes the vite warning reporter to crash because
|
||||
// it wants to log a warning on a line that does not exist in the source fragment.
|
||||
// autoprefixer({overrideBrowserslist: ["IE 11"], grid: "no-autoplace"}),
|
||||
flexbugsFixes()
|
||||
]
|
||||
}
|
||||
}
|
||||
const commonOptions = (mode) => {
|
||||
const definePlaceholders = createPlaceholderValues(mode);
|
||||
return {
|
||||
logLevel: "warn",
|
||||
publicDir: false,
|
||||
server: {
|
||||
hmr: false,
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
// these should only be imported by the base-x package in any runtime code
|
||||
// and works in the browser with a Uint8Array shim,
|
||||
// rather than including a ton of polyfill code
|
||||
"safe-buffer":
|
||||
"./scripts/package-overrides/safe-buffer/index.js",
|
||||
buffer: "./scripts/package-overrides/buffer/index.js",
|
||||
},
|
||||
},
|
||||
build: {
|
||||
emptyOutDir: true,
|
||||
assetsInlineLimit: 0,
|
||||
polyfillModulePreload: false,
|
||||
},
|
||||
assetsInclude: ["**/config.json"],
|
||||
define: Object.assign(
|
||||
{
|
||||
DEFINE_VERSION: `"${getVersion(mode)}"`,
|
||||
DEFINE_GLOBAL_HASH: JSON.stringify(null),
|
||||
DEFINE_IS_SDK: mode === "sdk" ? "true" : "false",
|
||||
DEFINE_PROJECT_DIR: JSON.stringify(__dirname),
|
||||
},
|
||||
definePlaceholders
|
||||
),
|
||||
css: {
|
||||
postcss: {
|
||||
plugins: [
|
||||
compileVariables({ derive, compiledVariables }),
|
||||
urlVariables({ compiledVariables }),
|
||||
urlProcessor({ replacer }),
|
||||
flexbugsFixes(),
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the version for this build
|
||||
* @param mode Vite mode for this build
|
||||
* @returns string representing version
|
||||
*/
|
||||
function getVersion(mode) {
|
||||
if (mode === "production") {
|
||||
// This is an app build, so return the version from root/package.json
|
||||
return appManifest.version;
|
||||
} else if (mode === "sdk") {
|
||||
// For the sdk build, return version from base-manifest.json
|
||||
return sdkManifest.version;
|
||||
} else {
|
||||
// For the develop server
|
||||
return "develop";
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { commonOptions, compiledVariables };
|
||||
|
@ -1,15 +1,20 @@
|
||||
const injectWebManifest = require("./scripts/build-plugins/manifest");
|
||||
const {injectServiceWorker, createPlaceholderValues} = require("./scripts/build-plugins/service-worker");
|
||||
const {
|
||||
transformServiceWorkerInDevServer,
|
||||
} = require("./scripts/build-plugins/sw-dev");
|
||||
const themeBuilder = require("./scripts/build-plugins/rollup-plugin-build-themes");
|
||||
const {defineConfig} = require('vite');
|
||||
const mergeOptions = require('merge-options').bind({concatArrays: true});
|
||||
const {commonOptions, compiledVariables} = require("./vite.common-config.js");
|
||||
const { defineConfig } = require("vite");
|
||||
const mergeOptions = require("merge-options").bind({ concatArrays: true });
|
||||
const { commonOptions, compiledVariables } = require("./vite.common-config.js");
|
||||
|
||||
export default defineConfig(({mode}) => {
|
||||
export default defineConfig(({ mode }) => {
|
||||
const definePlaceholders = createPlaceholderValues(mode);
|
||||
return mergeOptions(commonOptions, {
|
||||
const options = commonOptions(mode);
|
||||
return mergeOptions(options, {
|
||||
root: "src/platform/web",
|
||||
base: "./",
|
||||
publicDir: "./public",
|
||||
build: {
|
||||
outDir: "../../../target",
|
||||
minify: true,
|
||||
@ -19,18 +24,17 @@ export default defineConfig(({mode}) => {
|
||||
assetFileNames: (asset) => {
|
||||
if (asset.name.includes("config.json")) {
|
||||
return "[name][extname]";
|
||||
}
|
||||
else if (asset.name.match(/theme-.+\.json/)) {
|
||||
} else if (asset.name.match(/theme-.+\.json/)) {
|
||||
return "assets/[name][extname]";
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return "assets/[name].[hash][extname]";
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
transformServiceWorkerInDevServer(),
|
||||
themeBuilder({
|
||||
themeConfig: {
|
||||
themes: ["./src/platform/web/ui/css/themes/element"],
|
||||
@ -41,17 +45,19 @@ export default defineConfig(({mode}) => {
|
||||
// important this comes before service worker
|
||||
// otherwise the manifest and the icons it refers to won't be cached
|
||||
injectWebManifest("assets/manifest.json"),
|
||||
injectServiceWorker("./src/platform/web/sw.js", findUnhashedFileNamesFromBundle, {
|
||||
// placeholders to replace at end of build by chunk name
|
||||
index: {
|
||||
DEFINE_GLOBAL_HASH: definePlaceholders.DEFINE_GLOBAL_HASH,
|
||||
},
|
||||
sw: definePlaceholders,
|
||||
}),
|
||||
injectServiceWorker(
|
||||
"./src/platform/web/sw.js",
|
||||
findUnhashedFileNamesFromBundle,
|
||||
{
|
||||
// placeholders to replace at end of build by chunk name
|
||||
index: {
|
||||
DEFINE_GLOBAL_HASH:
|
||||
definePlaceholders.DEFINE_GLOBAL_HASH,
|
||||
},
|
||||
sw: definePlaceholders,
|
||||
}
|
||||
),
|
||||
],
|
||||
define: Object.assign({
|
||||
DEFINE_PROJECT_DIR: JSON.stringify(__dirname)
|
||||
}, definePlaceholders),
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -1,7 +1,8 @@
|
||||
const path = require("path");
|
||||
const mergeOptions = require('merge-options');
|
||||
const mergeOptions = require("merge-options").bind({ concatArrays: true });
|
||||
const themeBuilder = require("./scripts/build-plugins/rollup-plugin-build-themes");
|
||||
const {commonOptions, compiledVariables} = require("./vite.common-config.js");
|
||||
const { commonOptions, compiledVariables } = require("./vite.common-config.js");
|
||||
const { defineConfig } = require("vite");
|
||||
|
||||
// These paths will be saved without their hash so they have a consisent path
|
||||
// that we can reference in our `package.json` `exports`. And so people can import
|
||||
@ -13,33 +14,40 @@ const pathsToExport = [
|
||||
"theme-element-dark.css",
|
||||
];
|
||||
|
||||
export default mergeOptions(commonOptions, {
|
||||
root: "src/",
|
||||
base: "./",
|
||||
build: {
|
||||
outDir: "../target/asset-build/",
|
||||
rollupOptions: {
|
||||
output: {
|
||||
assetFileNames: (chunkInfo) => {
|
||||
// Get rid of the hash so we can consistently reference these
|
||||
// files in our `package.json` `exports`. And so people can
|
||||
// import them with a consistent path.
|
||||
if(pathsToExport.includes(path.basename(chunkInfo.name))) {
|
||||
return "assets/[name].[ext]";
|
||||
}
|
||||
export default defineConfig(({ mode }) => {
|
||||
const options = commonOptions(mode);
|
||||
return mergeOptions(options, {
|
||||
root: "src/",
|
||||
base: "./",
|
||||
build: {
|
||||
outDir: "../target/asset-build/",
|
||||
rollupOptions: {
|
||||
output: {
|
||||
assetFileNames: (chunkInfo) => {
|
||||
// Get rid of the hash so we can consistently reference these
|
||||
// files in our `package.json` `exports`. And so people can
|
||||
// import them with a consistent path.
|
||||
if (
|
||||
pathsToExport.includes(
|
||||
path.basename(chunkInfo.name)
|
||||
)
|
||||
) {
|
||||
return "assets/[name].[ext]";
|
||||
}
|
||||
|
||||
return "assets/[name]-[hash][extname]";
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
plugins: [
|
||||
themeBuilder({
|
||||
themeConfig: {
|
||||
themes: ["./src/platform/web/ui/css/themes/element"],
|
||||
default: "element",
|
||||
return "assets/[name]-[hash][extname]";
|
||||
},
|
||||
},
|
||||
},
|
||||
compiledVariables,
|
||||
}),
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
themeBuilder({
|
||||
themeConfig: {
|
||||
themes: ["./src/platform/web/ui/css/themes/element"],
|
||||
default: "element",
|
||||
},
|
||||
compiledVariables,
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,12 @@
|
||||
const path = require("path");
|
||||
const mergeOptions = require('merge-options');
|
||||
const {commonOptions} = require("./vite.common-config.js");
|
||||
const { defineConfig } = require("vite");
|
||||
const mergeOptions = require("merge-options").bind({ concatArrays: true });
|
||||
const { commonOptions } = require("./vite.common-config.js");
|
||||
const manifest = require("./package.json");
|
||||
const {
|
||||
injectServiceWorker,
|
||||
createPlaceholderValues,
|
||||
} = require("./scripts/build-plugins/service-worker");
|
||||
|
||||
const externalDependencies = Object.keys(manifest.dependencies)
|
||||
// just in case for safety in case fake-indexeddb wouldn't be
|
||||
@ -9,39 +14,38 @@ const externalDependencies = Object.keys(manifest.dependencies)
|
||||
.concat(Object.keys(manifest.devDependencies))
|
||||
// bundle bs58 because it uses buffer indirectly, which is a pain to bundle,
|
||||
// so we don't annoy our library users with it.
|
||||
.filter(d => d !== "bs58");
|
||||
const moduleDir = path.join(__dirname, "node_modules");
|
||||
.filter((d) => d !== "bs58");
|
||||
|
||||
export default mergeOptions(commonOptions, {
|
||||
root: "src/",
|
||||
build: {
|
||||
lib: {
|
||||
entry: path.resolve(__dirname, 'src/lib.ts'),
|
||||
formats: ["cjs", "es"],
|
||||
fileName: format => `hydrogen.${format}.js`,
|
||||
},
|
||||
minify: false,
|
||||
sourcemap: false,
|
||||
outDir: "../target/lib-build",
|
||||
// don't bundle any dependencies, they should be imported/required
|
||||
rollupOptions: {
|
||||
external(id) {
|
||||
return externalDependencies.some(d => id === d || id.startsWith(d + "/"));
|
||||
export default defineConfig(({ mode }) => {
|
||||
const options = commonOptions(mode);
|
||||
const definePlaceholders = createPlaceholderValues(mode);
|
||||
return mergeOptions(options, {
|
||||
root: "src/",
|
||||
plugins: [
|
||||
injectServiceWorker("./src/platform/web/sw.js", () => [], {
|
||||
lib: {
|
||||
DEFINE_GLOBAL_HASH: definePlaceholders.DEFINE_GLOBAL_HASH,
|
||||
},
|
||||
sw: definePlaceholders,
|
||||
}),
|
||||
],
|
||||
build: {
|
||||
lib: {
|
||||
entry: path.resolve(__dirname, "src/lib.ts"),
|
||||
formats: ["cjs", "es"],
|
||||
fileName: (format) => `hydrogen.${format}.js`,
|
||||
},
|
||||
/* don't bundle, so we can override imports per file at build time to replace components */
|
||||
// output: {
|
||||
// manualChunks: (id) => {
|
||||
// if (id.startsWith(srcDir)) {
|
||||
// const idPath = id.substring(srcDir.length);
|
||||
// const pathWithoutExt = idPath.substring(0, idPath.lastIndexOf("."));
|
||||
// return pathWithoutExt;
|
||||
// } else {
|
||||
// return "index";
|
||||
// }
|
||||
// },
|
||||
// minifyInternalExports: false,
|
||||
// chunkFileNames: "[format]/[name].js"
|
||||
// }
|
||||
}
|
||||
},
|
||||
minify: false,
|
||||
sourcemap: false,
|
||||
outDir: "../target/lib-build",
|
||||
// don't bundle any dependencies, they should be imported/required
|
||||
rollupOptions: {
|
||||
external(id) {
|
||||
return externalDependencies.some(
|
||||
(d) => id === d || id.startsWith(d + "/")
|
||||
);
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user