Merge branch 'master' into nymkappa/bugfix/node-count

This commit is contained in:
wiz
2022-11-21 18:19:53 +09:00
committed by GitHub
176 changed files with 81575 additions and 17353 deletions

118
backend/src/api/audit.ts Normal file
View File

@@ -0,0 +1,118 @@
import logger from '../logger';
import { BlockExtended, TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], score: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], score: 0 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
const inBlock = {};
const inTemplate = {};
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// coinbase is always expected
if (transactions[0]) {
inTemplate[transactions[0].txid] = true;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
}
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
logger.warn(`${fresh.length} fresh, ${Object.keys(isCensored).length} possibly censored, ${displacedWeight} displaced weight`);
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
let displacedWeightRemaining = displacedWeight;
let index = 0;
let lastFeeRate = Infinity;
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
}
index++;
}
// mark unexpected transactions in the mined block as 'added'
let overflowWeight = 0;
for (const tx of transactions) {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
}
overflowWeight += tx.weight;
}
}
// transactions missing from near the end of our template are probably not being censored
let overflowWeightRemaining = overflowWeight;
let lastOverflowRate = 1.00;
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
lastOverflowRate = mempool[txid].effectiveFeePerVsize;
} else if (Math.floor(mempool[txid].effectiveFeePerVsize * 100) <= Math.ceil(lastOverflowRate * 100)) { // tolerance of 0.01 sat/vb
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
const numCensored = Object.keys(isCensored).length;
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
return {
censored: Object.keys(isCensored),
added,
score
};
}
}
export default new Audit();

View File

@@ -20,6 +20,7 @@ import indexer from '../indexer';
import fiatConversion from './fiat-conversion';
import poolsParser from './pools-parser';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import mining from './mining/mining';
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
import PricesRepository from '../repositories/PricesRepository';
@@ -186,14 +187,18 @@ class Blocks {
if (!pool) { // We should never have this situation in practise
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
`Check your "pools" table entries`);
return blockExtended;
} else {
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
}
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
const auditSummary = await BlocksAuditsRepository.$getShortBlockAudit(block.id);
if (auditSummary) {
blockExtended.extras.matchRate = auditSummary.matchRate;
}
}
return blockExtended;

View File

@@ -4,7 +4,7 @@ import logger from '../logger';
import { Common } from './common';
class DatabaseMigration {
private static currentVersion = 40;
private static currentVersion = 41;
private queryTimeout = 120000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@@ -348,6 +348,10 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
}
if (databaseSchemaVersion < 41 && isBitcoin === true) {
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
}
}
/**

View File

@@ -129,6 +129,56 @@ class NodesApi {
}
}
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
try {
const inQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
const outQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
return {
incoming: inRows.length > 0 ? inRows : [],
outgoing: outRows.length > 0 ? outRows : [],
};
} catch (e) {
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getAllNodes(): Promise<any> {
try {
const query = `SELECT * FROM nodes`;

View File

@@ -20,6 +20,7 @@ class NodesRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
;
@@ -95,6 +96,22 @@ class NodesRoutes {
}
}
private async $getFeeHistogram(req: Request, res: Response) {
try {
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);

View File

@@ -70,6 +70,8 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}
channelProcessed++;
}
return consolidatedChannelList;

View File

@@ -1,7 +1,8 @@
import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
import { MempoolBlock, TransactionExtended, AuditTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import { PairingHeap } from '../utils/pairing-heap';
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
@@ -72,6 +73,7 @@ class MempoolBlocks {
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
}
@@ -99,6 +101,7 @@ class MempoolBlocks {
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
}
// Calculate change from previous block states
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
@@ -132,12 +135,286 @@ class MempoolBlocks {
removed
});
}
return {
blocks: mempoolBlocks,
deltas: mempoolBlockDeltas
};
}
/*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*
* blockLimit: number of blocks to build in total.
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
* condenseRest: whether to ignore excess transactions or append them to the final block.
*/
public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): MempoolBlockWithTransactions[] {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = [];
const restOfArray: TransactionExtended[] = [];
let weight = 0;
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
weight += tx.weight;
if (weight >= maxWeight) {
restOfArray.push(tx);
return;
}
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
fee: tx.fee,
size: tx.size,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
vin: tx.vin,
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
}
mempoolArray.push(auditPool[tx.txid]);
})
// Build relatives graph & calculate ancestor scores
for (const tx of mempoolArray) {
if (!tx.relativesSet) {
this.setRelatives(tx, auditPool);
}
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 4000;
let blockSize = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
// skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++;
}
// Select best next package
let nextTx;
const nextPoolTx = mempoolArray[top];
const nextModifiedTx = modified.peek();
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
nextTx = nextPoolTx;
top++;
} else {
modified.pop();
if (nextModifiedTx) {
nextTx = nextModifiedTx;
nextTx.modifiedNode = undefined;
}
}
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
blockWeight += nextTx.ancestorWeight;
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
sortedTxSet.forEach((ancestor, i, arr) => {
const mempoolTx = mempool[ancestor.txid];
if (ancestor && !ancestor?.used) {
ancestor.used = true;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
}
})
if (i < arr.length - 1) {
mempoolTx.bestDescendant = {
txid: arr[arr.length - 1].txid,
fee: arr[arr.length - 1].fee,
weight: arr[arr.length - 1].weight,
};
}
transactions.push(ancestor);
blockSize += ancestor.size;
}
});
// remove these as valid package ancestors for any descendants remaining in the mempool
if (sortedTxSet.length) {
sortedTxSet.forEach(tx => {
this.updateDescendants(tx, auditPool, modified);
});
}
failures = 0;
} else {
// hold this package in an overflow list while we check for smaller options
overflow.push(nextTx);
failures++;
}
}
// this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
if (exceededPackageTries && (!condenseRest || blocks.length < blockLimit - 1)) {
// construct this block
if (transactions.length) {
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
for (const overflowTx of overflow.reverse()) {
if (overflowTx.modified) {
overflowTx.modifiedNode = modified.add(overflowTx);
} else {
top--;
mempoolArray[top] = overflowTx;
}
}
overflow = [];
}
}
if (condenseRest) {
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
blockSize += tx.size;
transactions.push(tx);
tx.used = true;
}
const blockTransactions = transactions.map(t => mempool[t.txid])
restOfArray.forEach(tx => {
blockWeight += tx.weight;
blockSize += tx.size;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(this.dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
}
transactions = [];
} else if (transactions.length) {
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return blocks;
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
public setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent.txid];
if (parentTx && !tx.ancestorMap!.has(parent.txid)) {
tx.ancestorMap.set(parent.txid, parentTx);
parentTx.children.add(tx);
// visit each node only once
if (!parentTx.relativesSet) {
this.setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
});
tx.score = tx.ancestorFee / (tx.ancestorWeight || 1);
tx.relativesSet = true;
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
// avoids recursion to limit call stack depth
private updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
modified: PairingHeap<AuditTransaction>,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
const descendants: AuditTransaction[] = [];
let descendantTx;
let ancestorIndex;
let tmpScore;
rootTx.children.forEach(childTx => {
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorWeight;
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;
descendantTx.modifiedNode = modified.add(descendantTx);
} else {
// rebalance modified heap if score has changed
if (descendantTx.score < tmpScore) {
modified.decreasePriority(descendantTx.modifiedNode);
} else if (descendantTx.score > tmpScore) {
modified.increasePriority(descendantTx.modifiedNode);
}
}
// add this node's children to the stack
descendantTx.children.forEach(childTx => {
// visit each node only once
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
}
}
}
private dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;

View File

@@ -103,12 +103,11 @@ class Mempool {
return txTimes;
}
public async $updateMempool() {
logger.debug('Updating mempool');
public async $updateMempool(): Promise<void> {
logger.debug(`Updating mempool...`);
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
let txCount = 0;
const transactions = await bitcoinApi.$getRawMempool();
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
@@ -124,7 +123,6 @@ class Mempool {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
this.mempoolCache[txid] = transaction;
txCount++;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
this.vBytesPerSecondArray.push({
@@ -133,14 +131,9 @@ class Mempool {
});
}
hasChange = true;
if (diff > 0) {
logger.debug('Fetched transaction ' + txCount + ' / ' + diff);
} else {
logger.debug('Fetched transaction ' + txCount);
}
newTransactions.push(transaction);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
}
}
@@ -197,8 +190,7 @@ class Mempool {
const end = new Date().getTime();
const time = end - start;
logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`);
logger.debug('Mempool updated in ' + time / 1000 + ' seconds');
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {

View File

@@ -27,6 +27,7 @@ class MiningRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
;
}
@@ -238,6 +239,12 @@ class MiningRoutes {
public async $getBlockAudit(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
if (!audit) {
res.status(404).send(`This block has not been audited.`);
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
@@ -246,6 +253,29 @@ class MiningRoutes {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHeightFromTimestamp(req: Request, res: Response) {
try {
const timestamp = parseInt(req.params.timestamp, 10);
// This will prevent people from entering milliseconds etc.
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
// will never put the maximum value before the most recent block
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
// Prevent non-integers that are not seconds
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
throw new Error(`Invalid timestamp, value must be Unix seconds`);
}
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
timestamp,
);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new MiningRoutes();

View File

@@ -18,6 +18,7 @@ import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import Audit from './audit';
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
@@ -405,75 +406,63 @@ class WebsocketHandler {
});
}
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) {
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): void {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
let mBlocks: undefined | MempoolBlock[];
let mBlockDeltas: undefined | MempoolBlockDelta[];
let matchRate = 0;
let matchRate;
const _memPool = memPool.getMempool();
const _mempoolBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if (_mempoolBlocks[0]) {
const matches: string[] = [];
const added: string[] = [];
const missing: string[] = [];
if (Common.indexingEnabled()) {
const mempoolCopy = cloneMempool(_memPool);
const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2);
for (const txId of txIds) {
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
matches.push(txId);
} else {
added.push(txId);
const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, mempoolCopy);
matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
}) : [];
BlocksSummariesRepository.$saveSummary({
height: block.height,
template: {
id: block.id,
transactions: stripped
}
delete _memPool[txId];
}
});
for (const txId of _mempoolBlocks[0].transactionIds) {
if (matches.includes(txId) || added.includes(txId)) {
continue;
}
missing.push(txId);
}
BlocksAuditsRepository.$saveAudit({
time: block.timestamp,
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: censored,
matchRate: matchRate,
});
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
mempoolBlocks.updateMempoolBlocks(_memPool);
mBlocks = mempoolBlocks.getMempoolBlocks();
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
if (Common.indexingEnabled()) {
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
});
BlocksSummariesRepository.$saveSummary({
height: block.height,
template: {
id: block.id,
transactions: stripped
}
});
BlocksAuditsRepository.$saveAudit({
time: block.timestamp,
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: missing,
matchRate: matchRate,
});
if (block.extras) {
block.extras.matchRate = matchRate;
}
}
if (block.extras) {
block.extras.matchRate = matchRate;
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
}
mempoolBlocks.updateMempoolBlocks(_memPool);
mBlocks = mempoolBlocks.getMempoolBlocks();
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();
@@ -580,4 +569,14 @@ class WebsocketHandler {
}
}
function cloneMempool(mempool: { [txid: string]: TransactionExtended }): { [txid: string]: TransactionExtended } {
const cloned = {};
Object.keys(mempool).forEach(id => {
cloned[id] = {
...mempool[id]
};
});
return cloned;
}
export default new WebsocketHandler();