Compare commits

...

27 Commits

Author SHA1 Message Date
Mononaut
7569c6a394 Avoid initializing rbf cache in worker threads 2023-08-24 22:00:25 +09:00
Mononaut
d67285c683 Avoid initializing redis in worker threads 2023-08-24 22:00:25 +09:00
Mononaut
2b7ac32c22 Parallelize block summary/cpfp indexing with worker threads 2023-08-24 22:00:25 +09:00
Mononaut
df596ab5bf Parallelize block indexing with worker threads 2023-08-24 22:00:25 +09:00
softsimon
f12fabe030 Merge pull request #4206 from mempool/dependabot/npm_and_yarn/frontend/frontend-angular-dependencies-f1c021e5b3
Bump the frontend-angular-dependencies group in /frontend with 12 updates
2023-08-24 10:17:12 +02:00
dependabot[bot]
25d75efa09 Bump the frontend-angular-dependencies group
Bumps the frontend-angular-dependencies group in /frontend with 12 updates:

| Package | From | To |
| --- | --- | --- |
| [@angular-devkit/build-angular](https://github.com/angular/angular-cli) | `16.1.4` | `16.2.0` |
| [@angular/animations](https://github.com/angular/angular/tree/HEAD/packages/animations) | `16.1.5` | `16.2.2` |
| [@angular/cli](https://github.com/angular/angular-cli) | `16.1.4` | `16.2.0` |
| [@angular/common](https://github.com/angular/angular/tree/HEAD/packages/common) | `16.1.5` | `16.2.2` |
| [@angular/compiler](https://github.com/angular/angular/tree/HEAD/packages/compiler) | `16.1.5` | `16.2.2` |
| [@angular/core](https://github.com/angular/angular/tree/HEAD/packages/core) | `16.1.5` | `16.2.2` |
| [@angular/forms](https://github.com/angular/angular/tree/HEAD/packages/forms) | `16.1.5` | `16.2.2` |
| [@angular/localize](https://github.com/angular/angular) | `16.1.5` | `16.2.2` |
| [@angular/platform-browser](https://github.com/angular/angular/tree/HEAD/packages/platform-browser) | `16.1.5` | `16.2.2` |
| [@angular/platform-browser-dynamic](https://github.com/angular/angular/tree/HEAD/packages/platform-browser-dynamic) | `16.1.5` | `16.2.2` |
| [@angular/platform-server](https://github.com/angular/angular/tree/HEAD/packages/platform-server) | `16.1.5` | `16.2.2` |
| [@angular/router](https://github.com/angular/angular/tree/HEAD/packages/router) | `16.1.5` | `16.2.2` |


Updates `@angular-devkit/build-angular` from 16.1.4 to 16.2.0
- [Release notes](https://github.com/angular/angular-cli/releases)
- [Changelog](https://github.com/angular/angular-cli/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular-cli/compare/16.1.4...16.2.0)

Updates `@angular/animations` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/animations)

Updates `@angular/cli` from 16.1.4 to 16.2.0
- [Release notes](https://github.com/angular/angular-cli/releases)
- [Changelog](https://github.com/angular/angular-cli/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular-cli/compare/16.1.4...16.2.0)

Updates `@angular/common` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/common)

Updates `@angular/compiler` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/compiler)

Updates `@angular/core` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/core)

Updates `@angular/forms` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/forms)

Updates `@angular/localize` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/compare/16.1.5...16.2.2)

Updates `@angular/platform-browser` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/platform-browser)

Updates `@angular/platform-browser-dynamic` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/platform-browser-dynamic)

Updates `@angular/platform-server` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/platform-server)

Updates `@angular/router` from 16.1.5 to 16.2.2
- [Release notes](https://github.com/angular/angular/releases)
- [Changelog](https://github.com/angular/angular/blob/main/CHANGELOG.md)
- [Commits](https://github.com/angular/angular/commits/16.2.2/packages/router)

---
updated-dependencies:
- dependency-name: "@angular-devkit/build-angular"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/animations"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/cli"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/common"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/compiler"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/core"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/forms"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/localize"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/platform-browser"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/platform-browser-dynamic"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/platform-server"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
- dependency-name: "@angular/router"
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: frontend-angular-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-08-24 02:53:59 +00:00
wiz
b180fe694f ops: Tweak nginx config for gone paths and render expires 2023-08-23 01:09:11 +09:00
wiz
6c7d33f681 Merge pull request #4203 from mempool/mononaut/fix-unfurl-fallbacks
Fix unfurl fallback img routes
2023-08-23 00:31:48 +09:00
Mononaut
6946bc9da9 Fix unfurl fallback img routes 2023-08-23 00:11:24 +09:00
wiz
063d7e96a1 Merge pull request #4198 from mempool/mononaut/unfurler-fixes
More unfurler fixes
2023-08-22 20:26:03 +09:00
Mononaut
0d2df72621 Proxy unfurler resources to local instance 2023-08-22 02:16:46 +09:00
Mononaut
91c0a3e689 More unfurler logging 2023-08-22 02:16:36 +09:00
Mononaut
9ba7ab9975 More robust webgl handling 2023-08-22 00:07:44 +09:00
wiz
ead32a4a65 Merge pull request #4194 from TheBlueMatt/msrv
Drop MSRV to 1.63
2023-08-21 16:06:13 +09:00
Matt Corallo
122a721d7a sign cla 2023-08-21 05:04:33 +00:00
Matt Corallo
6088fffc09 Drop MSRV to 1.63
Debian bookworm ships with 1.63, and since the change is trivial,
there's little reason to not support people running mempool on
Debian.
2023-08-21 04:03:37 +00:00
Matt Corallo
fadc46f3b5 Add a missing [workspace] tag in rust-gbt Cargo to build in git
If cargo detects its being run in a git tree, it looks for a
top-level `Cargo.toml`. When failing to find one, it errors out,
saying "current package believes it's in a workspace when it's not."

Instead, we add a `[workspace]` tag to let cargo know that rust-gbt
is not, in fact, in a rust workspace.
2023-08-21 04:00:52 +00:00
wiz
8e73e76312 ops: Tweak proxy_cache_valid time for unfurler/slurper cache 2023-08-20 02:36:34 +09:00
wiz
9d978ead6d ops: Set expires headers for unfurler/slurper responses 2023-08-20 00:01:22 +09:00
wiz
655eb31107 ops: Tweak nginx cache valid time for unfurler/slurper 2023-08-19 23:27:08 +09:00
wiz
15bd7bc068 Merge pull request #4187 from mempool/mononaut/unfurler-debugging
Unfurler debugging
2023-08-19 23:23:53 +09:00
wiz
65847547b9 ops: Tweak nginx cache for slurper 2023-08-19 23:23:28 +09:00
Mononaut
ed9d31686e Add cluster/tab to unfurler logs 2023-08-19 21:07:10 +09:00
Mononaut
7f2a459575 Fix SSR puppeteer page initialization 2023-08-19 21:05:28 +09:00
Mononaut
126e87a746 More more verbose unfurler logs 2023-08-19 19:35:52 +09:00
Mononaut
3bda5537d7 More verbose unfurler logs 2023-08-19 18:50:57 +09:00
Mononaut
df8b6cd53c Fix unfurler resource proxying 2023-08-19 18:40:11 +09:00
21 changed files with 2732 additions and 1167 deletions

View File

@@ -27,8 +27,8 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: dtolnay/rust-toolchain@1.70
- name: Install 1.63.x Rust toolchain
uses: dtolnay/rust-toolchain@1.63
- name: Install
if: ${{ matrix.flavor == 'dev'}}

View File

@@ -6,6 +6,8 @@ authors = ["mononaut"]
edition = "2021"
publish = false
[workspace]
[lib]
crate-type = ["cdylib"]

View File

@@ -335,13 +335,15 @@ fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
if let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
.expect("audit_pool contains all ancestors")
{
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
} else { todo!() };
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {

View File

@@ -29,6 +29,10 @@ import websocketHandler from './websocket-handler';
import redisCache from './redis-cache';
import rbfCache from './rbf-cache';
import { calcBitsDifference } from './difficulty-adjustment';
import os from 'os';
import { Worker } from 'worker_threads';
import path from 'path';
class Blocks {
private blocks: BlockExtended[] = [];
@@ -406,6 +410,8 @@ class Blocks {
return;
}
const workerPool: Worker[] = [];
try {
// Get all indexed block hash
const indexedBlocks = await blocksRepository.$getIndexedBlocks();
@@ -420,39 +426,67 @@ class Blocks {
let newlyIndexed = 0;
let totalIndexed = indexedBlockSummariesHashesArray.length;
let indexedThisRun = 0;
let timer = Date.now() / 1000;
const startedAt = Date.now() / 1000;
let timer = Date.now();
const startedAt = Date.now();
for (const block of indexedBlocks) {
if (indexedBlockSummariesHashes[block.hash] === true) {
continue;
}
const blocksToIndex = indexedBlocks.filter(block => !indexedBlockSummariesHashes[block.hash]);
// Logging
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5) {
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
indexedThisRun = 0;
}
if (!blocksToIndex.length) {
return;
}
const numWorkers = Math.max(1, os.cpus().length - 1);
for (let i = 0; i < numWorkers; i++) {
workerPool.push(new Worker(path.resolve(__dirname, '../index-workers/block-summary-worker.js')));
}
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(block.hash)).map(tx => transactionUtils.extendTransaction(tx));
const cpfpSummary = await this.$indexCPFP(block.hash, block.height, txs);
await this.$getStrippedBlockTransactions(block.hash, true, true, cpfpSummary, block.height); // This will index the block summary
const promises: Promise<void>[] = [];
// This function assigns a task to a worker
const assignTask = (worker: Worker): boolean => {
if (blocksToIndex.length === 0) {
return false;
} else {
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
worker.postMessage(blocksToIndex.shift());
return true;
}
};
// Logging
const handleResult = (height: number): void => {
indexedThisRun++;
totalIndexed++;
newlyIndexed++;
const elapsed = Date.now() - timer;
if (elapsed > 5000) {
const runningFor = Date.now() - startedAt;
const blockPerSeconds = indexedThisRun / (elapsed / 1000);
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${(runningFor / 1000).toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now();
indexedThisRun = 0;
}
};
// Start a task on each worker
for (const worker of workerPool) {
promises.push(new Promise((resolve, reject) => {
worker.removeAllListeners();
worker.on('message', (result) => {
// Handle the result, then assign a new task to the worker
handleResult(result);
if (!assignTask(worker)) {
resolve();
};
});
worker.on('error', reject);
if (!assignTask(worker)) {
resolve();
}
}));
}
await Promise.all(promises);
if (newlyIndexed > 0) {
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
@@ -461,6 +495,14 @@ class Blocks {
} catch (e) {
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
} finally {
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
}
}
@@ -557,6 +599,7 @@ class Blocks {
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
public async $generateBlockDatabase(): Promise<boolean> {
const workerPool: Worker[] = [];
try {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
let currentBlockHeight = blockchainInfo.blocks;
@@ -575,12 +618,18 @@ class Blocks {
let totalIndexed = await blocksRepository.$blockCountBetweenHeight(currentBlockHeight, lastBlockToIndex);
let indexedThisRun = 0;
let newlyIndexed = 0;
const startedAt = Date.now() / 1000;
let timer = Date.now() / 1000;
const startedAt = Date.now();
let timer = Date.now();
if (currentBlockHeight >= lastBlockToIndex) {
const numWorkers = Math.max(1, os.cpus().length - 1);
for (let i = 0; i < numWorkers; i++) {
workerPool.push(new Worker(path.resolve(__dirname, '../index-workers/block-worker.js')));
}
}
while (currentBlockHeight >= lastBlockToIndex) {
const endBlock = Math.max(0, lastBlockToIndex, currentBlockHeight - chunkSize + 1);
const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
currentBlockHeight, endBlock);
if (missingBlockHeights.length <= 0) {
@@ -590,33 +639,65 @@ class Blocks {
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`, logger.tags.mining);
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
break;
const promises: Promise<void>[] = [];
// This function assigns a task to a worker
const assignTask = (worker: Worker): boolean => {
if (missingBlockHeights.length === 0) {
return false;
} else {
worker.postMessage({ height: missingBlockHeights.shift() });
return true;
}
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
};
const handleResult = (height: number): void => {
indexedThisRun++;
totalIndexed++;
newlyIndexed++;
const elapsed = Date.now() - timer;
if (elapsed > 5000 || height === lastBlockToIndex) {
const runningFor = Date.now() - startedAt;
const blockPerSeconds = indexedThisRun / (elapsed / 1000);
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress.toFixed(2)}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
logger.debug(`Indexing block #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress.toFixed(2)}%) | elapsed: ${(runningFor / 1000).toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now();
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
};
newlyIndexed++;
await blocksRepository.$saveBlockInDatabase(blockExtended);
// Start a task on each worker
for (const worker of workerPool) {
promises.push(new Promise((resolve, reject) => {
worker.removeAllListeners();
worker.on('message', (result) => {
// Handle the result, then assign a new task to the worker
handleResult(result);
if (!assignTask(worker)) {
resolve();
};
});
worker.on('error', reject);
if (!assignTask(worker)) {
resolve();
}
}));
}
await Promise.all(promises);
currentBlockHeight -= chunkSize;
}
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
if (newlyIndexed > 0) {
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
@@ -627,6 +708,14 @@ class Blocks {
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 100);
throw e;
} finally {
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
}
return await BlocksRepository.$validateChain();

View File

@@ -53,7 +53,7 @@ class RbfCache {
private expiring: Map<string, number> = new Map();
private cacheQueue: CacheEvent[] = [];
constructor() {
public init(): void {
setInterval(this.cleanup.bind(this), 1000 * 60 * 10);
}

View File

@@ -23,24 +23,21 @@ class RedisCache {
private cacheQueue: MempoolTransactionExtended[] = [];
private txFlushLimit: number = 10000;
constructor() {
if (config.REDIS.ENABLED) {
const redisConfig = {
socket: {
path: config.REDIS.UNIX_SOCKET_PATH
},
database: NetworkDB[config.MEMPOOL.NETWORK],
};
this.client = createClient(redisConfig);
this.client.on('error', (e) => {
logger.err(`Error in Redis client: ${e instanceof Error ? e.message : e}`);
});
this.$ensureConnected();
}
}
private async $ensureConnected(): Promise<void> {
if (!this.connected && config.REDIS.ENABLED) {
if (!this.client) {
const redisConfig = {
socket: {
path: config.REDIS.UNIX_SOCKET_PATH
},
database: NetworkDB[config.MEMPOOL.NETWORK],
};
this.client = createClient(redisConfig);
this.client.on('error', (e) => {
logger.err(`Error in Redis client: ${e instanceof Error ? e.message : e}`);
});
}
return this.client.connect().then(async () => {
this.connected = true;
logger.info(`Redis client connected`);

View File

@@ -0,0 +1,38 @@
import { parentPort } from 'worker_threads';
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
import blocks from '../api/blocks';
import config from '../config';
import transactionUtils from '../api/transaction-utils';
import bitcoinClient from '../api/bitcoin/bitcoin-client';
if (parentPort) {
parentPort.on('message', async ({ hash, height }) => {
if (hash != null && height != null) {
await indexBlockSummary(hash, height);
}
if (parentPort) {
parentPort.postMessage(height);
}
});
}
async function indexBlockSummary(hash: string, height: number): Promise<void> {
let txs;
if (config.MEMPOOL.BACKEND === 'esplora') {
txs = (await bitcoinApi.$getTxsForBlock(hash)).map(tx => transactionUtils.extendTransaction(tx));
} else {
const block = await bitcoinClient.getBlock(hash, 2);
txs = block.tx.map(tx => {
tx.fee = Math.round(tx.fee * 100_000_000);
tx.vout.forEach((vout) => {
vout.value = Math.round(vout.value * 100000000);
});
tx.vsize = Math.round(tx.weight / 4); // required for backwards compatibility
return tx;
});
}
const cpfpSummary = await blocks.$indexCPFP(hash, height, txs);
await blocks.$getStrippedBlockTransactions(hash, true, true, cpfpSummary, height); // This will index the block summary
}

View File

@@ -0,0 +1,25 @@
import { parentPort } from 'worker_threads';
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
import blocksRepository from '../repositories/BlocksRepository';
import blocks from '../api/blocks';
import { IEsploraApi } from '../api/bitcoin/esplora-api.interface';
if (parentPort) {
parentPort.on('message', async (params) => {
if (params.height != null) {
await indexBlock(params.height);
}
if (parentPort) {
parentPort.postMessage(params.height);
}
});
}
async function indexBlock(blockHeight: number): Promise<void> {
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await blocks['$getTransactionsExtended'](blockHash, block.height, true, null, true);
const blockExtended = await blocks['$getBlockExtended'](block, transactions);
await blocksRepository.$saveBlockInDatabase(blockExtended);
}

View File

@@ -43,6 +43,7 @@ import { AxiosError } from 'axios';
import v8 from 'v8';
import { formatBytes, getBytesUnit } from './utils/format';
import redisCache from './api/redis-cache';
import rbfCache from './api/rbf-cache';
class Server {
private wss: WebSocket.Server | undefined;
@@ -107,6 +108,8 @@ class Server {
}
}
rbfCache.init();
this.app
.use((req: Request, res: Response, next: NextFunction) => {
res.setHeader('Access-Control-Allow-Origin', '*');

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file with sha256 hash c80c5ee4c71c5a76a1f6cd35339bd0c45b25b491933ea7b02a66470e9f43a6fd.
Signed: TheBlueMatt

File diff suppressed because it is too large Load Diff

View File

@@ -61,18 +61,18 @@
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
},
"dependencies": {
"@angular-devkit/build-angular": "^16.1.4",
"@angular/animations": "^16.1.5",
"@angular/cli": "^16.1.4",
"@angular/common": "^16.1.5",
"@angular/compiler": "^16.1.5",
"@angular/core": "^16.1.5",
"@angular/forms": "^16.1.5",
"@angular/localize": "^16.1.5",
"@angular/platform-browser": "^16.1.5",
"@angular/platform-browser-dynamic": "^16.1.5",
"@angular/platform-server": "^16.1.5",
"@angular/router": "^16.1.5",
"@angular-devkit/build-angular": "^16.2.0",
"@angular/animations": "^16.2.2",
"@angular/cli": "^16.2.0",
"@angular/common": "^16.2.2",
"@angular/compiler": "^16.2.2",
"@angular/core": "^16.2.2",
"@angular/forms": "^16.2.2",
"@angular/localize": "^16.2.2",
"@angular/platform-browser": "^16.2.2",
"@angular/platform-browser-dynamic": "^16.2.2",
"@angular/platform-server": "^16.2.2",
"@angular/router": "^16.2.2",
"@fortawesome/angular-fontawesome": "~0.13.0",
"@fortawesome/fontawesome-common-types": "~6.4.0",
"@fortawesome/fontawesome-svg-core": "~6.4.0",

View File

@@ -70,9 +70,11 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
this.canvas.nativeElement.addEventListener('webglcontextlost', this.handleContextLost, false);
this.canvas.nativeElement.addEventListener('webglcontextrestored', this.handleContextRestored, false);
this.gl = this.canvas.nativeElement.getContext('webgl');
this.initCanvas();
this.resizeCanvas();
if (this.gl) {
this.initCanvas();
this.resizeCanvas();
}
}
ngOnChanges(changes): void {
@@ -195,10 +197,16 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
cancelAnimationFrame(this.animationFrameRequest);
this.animationFrameRequest = null;
this.running = false;
this.gl = null;
}
handleContextRestored(event): void {
this.initCanvas();
if (this.canvas?.nativeElement) {
this.gl = this.canvas.nativeElement.getContext('webgl');
if (this.gl) {
this.initCanvas();
}
}
}
@HostListener('window:resize', ['$event'])
@@ -224,6 +232,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
compileShader(src, type): WebGLShader {
if (!this.gl) {
return;
}
const shader = this.gl.createShader(type);
this.gl.shaderSource(shader, src);
@@ -237,6 +248,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
buildShaderProgram(shaderInfo): WebGLProgram {
if (!this.gl) {
return;
}
const program = this.gl.createProgram();
shaderInfo.forEach((desc) => {
@@ -273,7 +287,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
now = performance.now();
}
// skip re-render if there's no change to the scene
if (this.scene) {
if (this.scene && this.gl) {
/* SET UP SHADER UNIFORMS */
// screen dimensions
this.gl.uniform2f(this.gl.getUniformLocation(this.shaderProgram, 'screenSize'), this.displayWidth, this.displayHeight);

View File

@@ -90,7 +90,7 @@ export const download = (href, name) => {
export function detectWebGL(): boolean {
const canvas = document.createElement('canvas');
const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
const gl = canvas.getContext('webgl');
return !!(gl && gl instanceof WebGLRenderingContext);
}

View File

@@ -530,6 +530,7 @@ osCertbotDryRun()
zfsCreateFilesystems()
{
zfs create -o "mountpoint=/backup" "${ZPOOL}/backup"
zfs create -o "mountpoint=/var/cache/nginx" "${ZPOOL}/cache"
zfs create -o "mountpoint=${ELEMENTS_HOME}" "${ZPOOL}/elements"
zfs create -o "mountpoint=${BITCOIN_HOME}" "${ZPOOL}/bitcoin"
@@ -1852,8 +1853,6 @@ chown "${MEMPOOL_USER}:${MEMPOOL_GROUP}" "${MEMPOOL_MYSQL_CREDENTIALS}"
echo "[*] Adding Nginx configuration"
osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}"
mkdir -p /var/cache/nginx/services /var/cache/nginx/api
chown "${NGINX_USER}:${NGINX_GROUP}" /var/cache/nginx/services /var/cache/nginx/api
ln -s "${MEMPOOL_HOME}/mempool" "${NGINX_ETC_FOLDER}/mempool"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!${NGINX_USER}!" "${NGINX_CONFIGURATION}"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}"

View File

@@ -1,7 +1,7 @@
# proxy cache
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=600s max_size=200m;
types_hash_max_size 2048;
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=365d max_size=5000m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=365d max_size=100m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=365d max_size=100m;
types_hash_max_size 4096;

View File

@@ -97,6 +97,14 @@ location ~* ^/.+\..+\.(js|css)$ {
expires 1y;
}
# old stuff is gone
location /explorer/ {
return 410;
}
location /sitemap/ {
return 410;
}
# unfurl preview
location /preview {
try_files /$lang/$uri $uri /en-US/$uri /en-US/index.html =404;
@@ -105,7 +113,6 @@ location /preview {
# unfurl renderer
location ^~ /render {
try_files /dev/null @mempool-space-unfurler;
expires 10m;
}
# unfurl handler
location /unfurl/ {
@@ -136,8 +143,10 @@ location @mempool-space-unfurler {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache unfurler;
proxy_cache_valid 200 10m;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_redirect off;
expires 1d;
}
location @mempool-space-slurper {
@@ -151,6 +160,8 @@ location @mempool-space-slurper {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache slurper;
proxy_cache_valid 200 10m;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_redirect off;
expires 10d;
}

View File

@@ -11,12 +11,13 @@ const BROWSER_TIMEOUT = 8000;
const maxAgeMs = (config.PUPPETEER.MAX_PAGE_AGE || (24 * 60 * 60)) * 1000;
const maxConcurrency = config.PUPPETEER.CLUSTER_SIZE;
interface RepairablePage extends puppeteer.Page {
export interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
clusterGroup?: string;
}
interface ResourceData {
@@ -76,7 +77,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
for (let i = 0; i < maxConcurrency; i++) {
const newPage = await this.initPage();
newPage.index = this.pages.length;
logger.info(`initialized page ${newPage.index}`);
logger.info(`initialized page ${newPage.clusterGroup}:${newPage.index}`);
this.pages.push(newPage);
}
}
@@ -87,6 +88,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'unfurler';
page.language = null;
page.createdAt = Date.now();
let defaultUrl
@@ -108,7 +110,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
page.waitForSelector('meta[property="og:preview:fail"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 }).then(() => false)
])
} catch (e) {
logger.err(`failed to load frontend during page initialization: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@@ -129,6 +131,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async repairPage(page) {
// create a new page
logger.info(`Repairing page ${page.clusterGroup}:${page.index}`);
const newPage = await this.initPage();
newPage.free = true;
// replace the old page
@@ -138,9 +141,10 @@ export default class ReusablePage extends ConcurrencyImplementation {
try {
await page.goto('about:blank', {timeout: 200}); // prevents memory leak (maybe?)
} catch (e) {
logger.err('unexpected page repair error');
logger.err(`unexpected page repair error ${page.clusterGroup}:${page.index}`);
} finally {
await page.close();
}
await page.close();
return newPage;
}

View File

@@ -2,19 +2,11 @@ import * as puppeteer from 'puppeteer';
import { timeoutExecute } from 'puppeteer-cluster/dist/util';
import logger from '../logger';
import config from '../config';
import ReusablePage from './ReusablePage';
import ReusablePage, { RepairablePage } from './ReusablePage';
const mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
const mockImageBuffer = Buffer.from("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVQYV2NgYAAAAAMAAWgmWQ0AAAAASUVORK5CYII=", 'base64');
interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
}
export default class ReusableSSRPage extends ReusablePage {
public constructor(options: puppeteer.LaunchOptions, puppeteer: any) {
@@ -27,36 +19,37 @@ export default class ReusableSSRPage extends ReusablePage {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'slurper';
page.language = null;
page.createdAt = Date.now();
const defaultUrl = mempoolHost + '/about';
const defaultUrl = mempoolHost + '/preview/block/1';
page.on('pageerror', (err) => {
console.log(err);
// page.repairRequested = true;
page.repairRequested = true;
});
await page.setRequestInterception(true);
page.on('request', req => {
if (req.isInterceptResolutionHandled()) {
return req.continue();
}
if (req.resourceType() === 'image') {
return req.respond({
contentType: 'image/png',
headers: {"Access-Control-Allow-Origin": "*"},
body: mockImageBuffer
});
} else if (!['document', 'script', 'xhr', 'fetch'].includes(req.resourceType())) {
return req.abort();
} else {
return req.continue();
}
});
page.on('request', req => {
if (req.isInterceptResolutionHandled()) {
return req.continue();
}
if (req.resourceType() === 'image') {
return req.respond({
contentType: 'image/png',
headers: {"Access-Control-Allow-Origin": "*"},
body: mockImageBuffer
});
} else if (req.resourceType() === 'media') {
return req.abort();
} else {
return req.continue();
}
});
try {
await page.goto(defaultUrl, { waitUntil: "networkidle0" });
await page.waitForSelector('meta[property="og:meta:ready"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 });
} catch (e) {
logger.err(`failed to load frontend during ssr page initialization: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during ssr page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
page.free = true;

View File

@@ -5,7 +5,7 @@ import * as https from 'https';
import config from './config';
import { Cluster } from 'puppeteer-cluster';
import ReusablePage from './concurrency/ReusablePage';
import ReusableSSRPage from './concurrency/ReusablePage';
import ReusableSSRPage from './concurrency/ReusableSSRPage';
import { parseLanguageUrl } from './language/lang';
import { matchRoute } from './routes';
import nodejsPath from 'path';
@@ -28,13 +28,18 @@ class Server {
mempoolUrl: URL;
network: string;
secureHost = true;
secureMempoolHost = true;
canonicalHost: string;
seoQueueLength: number = 0;
unfurlQueueLength: number = 0;
constructor() {
this.app = express();
this.mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
this.mempoolUrl = new URL(this.mempoolHost);
this.secureHost = config.SERVER.HOST.startsWith('https');
this.secureMempoolHost = config.MEMPOOL.HTTP_HOST.startsWith('https');
this.network = config.MEMPOOL.NETWORK || 'bitcoin';
let canonical;
@@ -120,8 +125,10 @@ class Server {
this.app.get('*', (req, res) => { return this.renderHTML(req, res, false) })
}
async clusterTask({ page, data: { url, path, action } }) {
async clusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
try {
logger.info(`rendering "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
@@ -154,27 +161,30 @@ class Server {
captureBeyondViewport: false,
clip: { width: 1200, height: 600, x: 0, y: 0, scale: 1 },
});
logger.info(`rendered unfurl img in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return screenshot;
} else if (success === false) {
logger.warn(`failed to render ${path} for ${action} due to client-side error, e.g. requested an invalid txid`);
logger.warn(`failed to render ${reqUrl} for ${action} due to client-side error, e.g. requested an invalid txid`);
page.repairRequested = true;
} else {
logger.warn(`failed to render ${path} for ${action} due to puppeteer timeout`);
logger.warn(`failed to render ${reqUrl} for ${action} due to puppeteer timeout`);
page.repairRequested = true;
}
} catch (e) {
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
async ssrClusterTask({ page, data: { url, path, action } }) {
async ssrClusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
try {
logger.info(`slurping "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
page.language = urlParts.lang;
const localizedUrl = urlParts.lang ? `${this.mempoolHost}/${urlParts.lang}${urlParts.path}` : `${this.mempoolHost}${urlParts.path}` ;
const localizedUrl = urlParts.lang ? `${this.mempoolHost}/${urlParts.lang}${urlParts.path}` : `${this.mempoolHost}${urlParts.path}`;
await page.goto(localizedUrl, { waitUntil: "load" });
} else {
const loaded = await page.evaluate(async (path) => {
@@ -197,17 +207,20 @@ class Server {
return !!window['soft404'];
});
if (is404) {
logger.info(`slurp 404 in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return '404';
} else {
let html = await page.content();
logger.info(`rendered slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
}
} catch (e) {
if (e instanceof TimeoutError) {
let html = await page.content();
logger.info(`rendered partial slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
} else {
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@@ -219,6 +232,8 @@ class Server {
async renderPreview(req, res) {
try {
this.unfurlQueueLength++;
const start = Date.now();
const rawPath = req.params[0];
let img = null;
@@ -228,12 +243,15 @@ class Server {
// don't bother unless the route is definitely renderable
if (rawPath.includes('/preview/') && matchedRoute.render) {
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot' });
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot', reqUrl: req.url });
logger.info(`unfurl returned "${req.url}" in ${Date.now() - start}ms | ${this.unfurlQueueLength - 1} tasks in queue`);
} else {
logger.info('rendering not enabled for page "' + req.url + '"');
}
if (!img) {
// proxy fallback image from the frontend
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackImg));
// send local fallback image file
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackFile));
} else {
res.contentType('image/png');
res.send(img);
@@ -241,6 +259,8 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
this.unfurlQueueLength--;
}
}
@@ -258,10 +278,17 @@ class Server {
res.status(404).send();
return;
} else {
if (this.secureHost) {
https.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
logger.info('proxying resource "' + req.url + '"');
if (this.secureMempoolHost) {
https.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
} else {
http.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
http.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
}
return;
}
@@ -270,9 +297,13 @@ class Server {
let result = '';
try {
if (unfurl) {
logger.info('unfurling "' + req.url + '"');
result = await this.renderUnfurlMeta(rawPath);
} else {
result = await this.renderSEOPage(rawPath);
this.seoQueueLength++;
const start = Date.now();
result = await this.renderSEOPage(rawPath, req.url);
logger.info(`slurp returned "${req.url}" in ${Date.now() - start}ms | ${this.seoQueueLength - 1} tasks in queue`);
}
if (result && result.length) {
if (result === '404') {
@@ -286,6 +317,10 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
if (!unfurl) {
this.seoQueueLength--;
}
}
}
@@ -326,8 +361,8 @@ class Server {
</html>`;
}
async renderSEOPage(rawPath: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr' });
async renderSEOPage(rawPath: string, reqUrl: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr', reqUrl });
// remove javascript to prevent double hydration
if (html && html.length) {
html = html.replaceAll(/<script.*<\/script>/g, "");

View File

@@ -2,6 +2,7 @@ interface Match {
render: boolean;
title: string;
fallbackImg: string;
fallbackFile: string;
staticImg?: string;
networkMode: string;
}
@@ -30,7 +31,8 @@ const routes = {
},
lightning: {
title: "Lightning",
fallbackImg: '/resources/img/lightning.png',
fallbackImg: '/resources/previews/lightning.png',
fallbackFile: '/resources/img/lightning.png',
routes: {
node: {
render: true,
@@ -68,7 +70,8 @@ const routes = {
},
mining: {
title: "Mining",
fallbackImg: '/resources/img/mining.png',
fallbackImg: '/resources/previews/mining.png',
fallbackFile: '/resources/img/mining.png',
routes: {
pool: {
render: true,
@@ -83,13 +86,15 @@ const routes = {
const networks = {
bitcoin: {
fallbackImg: '/resources/img/dashboard.png',
fallbackImg: '/resources/previews/dashboard.png',
fallbackFile: '/resources/img/dashboard.png',
routes: {
...routes // all routes supported
}
},
liquid: {
fallbackImg: '/resources/img/liquid.png',
fallbackImg: '/resources/liquid/liquid-network-preview.png',
fallbackFile: '/resources/img/liquid',
routes: { // only block, address & tx routes supported
block: routes.block,
address: routes.address,
@@ -97,7 +102,8 @@ const networks = {
}
},
bisq: {
fallbackImg: '/resources/img/bisq.png',
fallbackImg: '/resources/bisq/bisq-markets-preview.png',
fallbackFile: '/resources/img/bisq.png',
routes: {} // no routes supported
}
};
@@ -107,6 +113,7 @@ export function matchRoute(network: string, path: string): Match {
render: false,
title: '',
fallbackImg: '',
fallbackFile: '',
networkMode: 'mainnet'
}
@@ -121,6 +128,7 @@ export function matchRoute(network: string, path: string): Match {
let route = networks[network] || networks.bitcoin;
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
// traverse the route tree until we run out of route or tree, or hit a renderable match
while (!route.render && route.routes && parts.length && route.routes[parts[0]]) {
@@ -128,6 +136,7 @@ export function matchRoute(network: string, path: string): Match {
parts.shift();
if (route.fallbackImg) {
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
}
}