Merge branch 'master' into hunicus/manual-deployment-enterprise
This commit is contained in:
@@ -171,52 +171,58 @@ Helpful link: https://gist.github.com/System-Glitch/cb4e87bf1ae3fec9925725bb3ebe
|
||||
|
||||
Run bitcoind on regtest:
|
||||
```
|
||||
bitcoind -regtest -rpcport=8332
|
||||
bitcoind -regtest
|
||||
```
|
||||
|
||||
Create a new wallet, if needed:
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 createwallet test
|
||||
bitcoin-cli -regtest createwallet test
|
||||
```
|
||||
|
||||
Load wallet (this command may take a while if you have lot of UTXOs):
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 loadwallet test
|
||||
bitcoin-cli -regtest loadwallet test
|
||||
```
|
||||
|
||||
Get a new address:
|
||||
```
|
||||
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
|
||||
address=$(bitcoin-cli -regtest getnewaddress)
|
||||
```
|
||||
|
||||
Mine blocks to the previously generated address. You need at least 101 blocks before you can spend. This will take some time to execute (~1 min):
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 generatetoaddress 101 $address
|
||||
bitcoin-cli -regtest generatetoaddress 101 $address
|
||||
```
|
||||
|
||||
Send 0.1 BTC at 5 sat/vB to another address:
|
||||
```
|
||||
./src/bitcoin-cli -named -regtest -rpcport=8332 sendtoaddress address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress) amount=0.1 fee_rate=5
|
||||
bitcoin-cli -named -regtest sendtoaddress address=$(bitcoin-cli -regtest getnewaddress) amount=0.1 fee_rate=5
|
||||
```
|
||||
|
||||
See more example of `sendtoaddress`:
|
||||
```
|
||||
./src/bitcoin-cli sendtoaddress # will print the help
|
||||
bitcoin-cli sendtoaddress # will print the help
|
||||
```
|
||||
|
||||
Mini script to generate transactions with random TX fee-rate (between 1 to 100 sat/vB). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
|
||||
Mini script to generate random network activity (random TX count with random tx fee-rate). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
|
||||
```
|
||||
#!/bin/bash
|
||||
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
|
||||
address=$(bitcoin-cli -regtest getnewaddress)
|
||||
bitcoin-cli -regtest generatetoaddress 101 $address
|
||||
for i in {1..1000000}
|
||||
do
|
||||
./src/bitcoin-cli -regtest -rpcport=8332 -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
|
||||
for y in $(seq 1 "$(jot -r 1 1 1000)")
|
||||
do
|
||||
bitcoin-cli -regtest -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
|
||||
done
|
||||
bitcoin-cli -regtest generatetoaddress 1 $address
|
||||
sleep 5
|
||||
done
|
||||
```
|
||||
|
||||
Generate block at regular interval (every 10 seconds in this example):
|
||||
```
|
||||
watch -n 10 "./src/bitcoin-cli -regtest -rpcport=8332 generatetoaddress 1 $address"
|
||||
watch -n 10 "bitcoin-cli -regtest generatetoaddress 1 $address"
|
||||
```
|
||||
|
||||
### Mining pools update
|
||||
|
||||
@@ -23,9 +23,11 @@ describe('Mempool Difficulty Adjustment', () => {
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
previousTime: 1660820820,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: 0,
|
||||
expectedBlocks: 161.68833333333333,
|
||||
},
|
||||
],
|
||||
[ // Vector 2 (testnet)
|
||||
@@ -43,11 +45,13 @@ describe('Mempool Difficulty Adjustment', () => {
|
||||
estimatedRetargetDate: 1661895424692,
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousTime: 1660820820,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
|
||||
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
|
||||
expectedBlocks: 161.68833333333333,
|
||||
},
|
||||
],
|
||||
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
|
||||
|
||||
@@ -220,18 +220,17 @@ class BitcoinRoutes {
|
||||
let cpfpInfo;
|
||||
if (config.DATABASE.ENABLED) {
|
||||
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
|
||||
}
|
||||
if (cpfpInfo) {
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
} else {
|
||||
res.json({
|
||||
ancestors: []
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (cpfpInfo) {
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
}
|
||||
}
|
||||
res.status(404).send(`Transaction has no CPFP info available.`);
|
||||
}
|
||||
|
||||
private getBackendInfo(req: Request, res: Response) {
|
||||
@@ -652,7 +651,7 @@ class BitcoinRoutes {
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('not found');
|
||||
res.status(204).send();
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
|
||||
@@ -175,6 +175,7 @@ export class Common {
|
||||
case '1y': return '1 YEAR';
|
||||
case '2y': return '2 YEAR';
|
||||
case '3y': return '3 YEAR';
|
||||
case '4y': return '4 YEAR';
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1037,7 +1037,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('DELETE FROM `pools`');
|
||||
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
|
||||
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
|
||||
}
|
||||
}
|
||||
|
||||
private async $convertCompactCpfpTables(): Promise<void> {
|
||||
try {
|
||||
|
||||
@@ -9,9 +9,11 @@ export interface DifficultyAdjustment {
|
||||
remainingBlocks: number; // Block count
|
||||
remainingTime: number; // Duration of time in ms
|
||||
previousRetarget: number; // Percent: -75 to 300
|
||||
previousTime: number; // Unix time in ms
|
||||
nextRetargetHeight: number; // Block Height
|
||||
timeAvg: number; // Duration of time in ms
|
||||
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
|
||||
expectedBlocks: number; // Block count
|
||||
}
|
||||
|
||||
export function calcDifficultyAdjustment(
|
||||
@@ -32,12 +34,12 @@ export function calcDifficultyAdjustment(
|
||||
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
|
||||
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
|
||||
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
|
||||
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
|
||||
|
||||
let difficultyChange = 0;
|
||||
let timeAvgSecs = BLOCK_SECONDS_TARGET;
|
||||
let timeAvgSecs = diffSeconds / blocksInEpoch;
|
||||
// Only calculate the estimate once we have 7.2% of blocks in current epoch
|
||||
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
|
||||
timeAvgSecs = diffSeconds / blocksInEpoch;
|
||||
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
|
||||
// Max increase is x4 (+300%)
|
||||
if (difficultyChange > 300) {
|
||||
@@ -74,9 +76,11 @@ export function calcDifficultyAdjustment(
|
||||
remainingBlocks,
|
||||
remainingTime,
|
||||
previousRetarget,
|
||||
previousTime: DATime,
|
||||
nextRetargetHeight,
|
||||
timeAvg,
|
||||
timeOffset,
|
||||
expectedBlocks,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -11,19 +11,33 @@ import { Common } from './common';
|
||||
class DiskCache {
|
||||
private cacheSchemaVersion = 3;
|
||||
|
||||
private static TMP_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-cache.json';
|
||||
private static TMP_FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/tmp-cache{number}.json';
|
||||
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
|
||||
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
|
||||
private static CHUNK_FILES = 25;
|
||||
private isWritingCache = false;
|
||||
|
||||
constructor() { }
|
||||
constructor() {
|
||||
if (!cluster.isMaster) {
|
||||
return;
|
||||
}
|
||||
process.on('SIGINT', (e) => {
|
||||
this.saveCacheToDiskSync();
|
||||
process.exit(2);
|
||||
});
|
||||
process.on('SIGTERM', (e) => {
|
||||
this.saveCacheToDiskSync();
|
||||
process.exit(2);
|
||||
});
|
||||
}
|
||||
|
||||
async $saveCacheToDisk(): Promise<void> {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
if (this.isWritingCache) {
|
||||
logger.debug('Saving cache already in progress. Skipping.')
|
||||
logger.debug('Saving cache already in progress. Skipping.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -61,8 +75,57 @@ class DiskCache {
|
||||
}
|
||||
}
|
||||
|
||||
wipeCache() {
|
||||
logger.notice(`Wipping nodejs backend cache/cache*.json files`);
|
||||
saveCacheToDiskSync(): void {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
if (this.isWritingCache) {
|
||||
logger.debug('Saving cache already in progress. Skipping.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
logger.debug('Writing mempool and blocks data to disk cache (sync)...');
|
||||
this.isWritingCache = true;
|
||||
|
||||
const mempool = memPool.getMempool();
|
||||
const mempoolArray: TransactionExtended[] = [];
|
||||
for (const tx in mempool) {
|
||||
mempoolArray.push(mempool[tx]);
|
||||
}
|
||||
|
||||
Common.shuffleArray(mempoolArray);
|
||||
|
||||
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
|
||||
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
}
|
||||
|
||||
logger.debug('Mempool and blocks data saved to disk cache');
|
||||
this.isWritingCache = false;
|
||||
} catch (e) {
|
||||
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
|
||||
this.isWritingCache = false;
|
||||
}
|
||||
}
|
||||
|
||||
wipeCache(): void {
|
||||
logger.notice(`Wiping nodejs backend cache/cache*.json files`);
|
||||
try {
|
||||
fs.unlinkSync(DiskCache.FILE_NAME);
|
||||
} catch (e: any) {
|
||||
@@ -83,7 +146,7 @@ class DiskCache {
|
||||
}
|
||||
}
|
||||
|
||||
loadMempoolCache() {
|
||||
loadMempoolCache(): void {
|
||||
if (!fs.existsSync(DiskCache.FILE_NAME)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -417,24 +417,24 @@ class NodesApi {
|
||||
|
||||
if (!ispList[isp1]) {
|
||||
ispList[isp1] = {
|
||||
id: channel.isp1ID.toString(),
|
||||
ids: [channel.isp1ID],
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
nodes: {},
|
||||
};
|
||||
} else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) {
|
||||
ispList[isp1].id += ',' + channel.isp1ID.toString();
|
||||
} else if (ispList[isp1].ids.includes(channel.isp1ID) === false) {
|
||||
ispList[isp1].ids.push(channel.isp1ID);
|
||||
}
|
||||
|
||||
if (!ispList[isp2]) {
|
||||
ispList[isp2] = {
|
||||
id: channel.isp2ID.toString(),
|
||||
ids: [channel.isp2ID],
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
nodes: {},
|
||||
};
|
||||
} else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) {
|
||||
ispList[isp2].id += ',' + channel.isp2ID.toString();
|
||||
} else if (ispList[isp2].ids.includes(channel.isp2ID) === false) {
|
||||
ispList[isp2].ids.push(channel.isp2ID);
|
||||
}
|
||||
|
||||
ispList[isp1].capacity += channel.capacity;
|
||||
@@ -444,11 +444,11 @@ class NodesApi {
|
||||
ispList[isp2].channels += 1;
|
||||
ispList[isp2].nodes[channel.node2PublicKey] = true;
|
||||
}
|
||||
|
||||
|
||||
const ispRanking: any[] = [];
|
||||
for (const isp of Object.keys(ispList)) {
|
||||
ispRanking.push([
|
||||
ispList[isp].id,
|
||||
ispList[isp].ids.sort((a, b) => a - b).join(','),
|
||||
isp,
|
||||
ispList[isp].capacity,
|
||||
ispList[isp].channels,
|
||||
|
||||
@@ -31,6 +31,11 @@ class Mempool {
|
||||
private mempoolProtection = 0;
|
||||
private latestTransactions: any[] = [];
|
||||
|
||||
private ESPLORA_MISSING_TX_WARNING_THRESHOLD = 100;
|
||||
private SAMPLE_TIME = 10000; // In ms
|
||||
private timer = new Date().getTime();
|
||||
private missingTxCount = 0;
|
||||
|
||||
constructor() {
|
||||
setInterval(this.updateTxPerSecond.bind(this), 1000);
|
||||
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
|
||||
@@ -128,6 +133,16 @@ class Mempool {
|
||||
loadingIndicators.setProgress('mempool', Object.keys(this.mempoolCache).length / transactions.length * 100);
|
||||
}
|
||||
|
||||
// https://github.com/mempool/mempool/issues/3283
|
||||
const logEsplora404 = (missingTxCount, threshold, time) => {
|
||||
const log = `In the past ${time / 1000} seconds, esplora tx API replied ${missingTxCount} times with a 404 error code while updating nodejs backend mempool`;
|
||||
if (missingTxCount >= threshold) {
|
||||
logger.warn(log);
|
||||
} else if (missingTxCount > 0) {
|
||||
logger.debug(log);
|
||||
}
|
||||
};
|
||||
|
||||
for (const txid of transactions) {
|
||||
if (!this.mempoolCache[txid]) {
|
||||
try {
|
||||
@@ -142,7 +157,10 @@ class Mempool {
|
||||
}
|
||||
hasChange = true;
|
||||
newTransactions.push(transaction);
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
if (config.MEMPOOL.BACKEND === 'esplora' && e.response?.status === 404) {
|
||||
this.missingTxCount++;
|
||||
}
|
||||
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
@@ -152,6 +170,14 @@ class Mempool {
|
||||
}
|
||||
}
|
||||
|
||||
// Reset esplora 404 counter and log a warning if needed
|
||||
const elapsedTime = new Date().getTime() - this.timer;
|
||||
if (elapsedTime > this.SAMPLE_TIME) {
|
||||
logEsplora404(this.missingTxCount, this.ESPLORA_MISSING_TX_WARNING_THRESHOLD, elapsedTime);
|
||||
this.timer = new Date().getTime();
|
||||
this.missingTxCount = 0;
|
||||
}
|
||||
|
||||
// Prevent mempool from clear on bitcoind restart by delaying the deletion
|
||||
if (this.mempoolProtection === 0
|
||||
&& currentMempoolSize > 20000
|
||||
|
||||
@@ -263,7 +263,7 @@ class MiningRoutes {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
|
||||
|
||||
if (!audit) {
|
||||
res.status(404).send(`This block has not been audited.`);
|
||||
res.status(204).send(`This block has not been audited.`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ class Mining {
|
||||
poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h);
|
||||
} catch (e) {
|
||||
poolsStatistics['lastEstimatedHashrate'] = 0;
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
|
||||
}
|
||||
|
||||
return poolsStatistics;
|
||||
@@ -145,7 +145,7 @@ class Mining {
|
||||
try {
|
||||
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
|
||||
} catch (e) {
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -208,7 +208,7 @@ class Mining {
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
|
||||
logger.debug(`Indexing weekly mining pool hashrate`);
|
||||
logger.debug(`Indexing weekly mining pool hashrate`, logger.tags.mining);
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
@@ -245,7 +245,7 @@ class Mining {
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
newlyIndexed += hashrates.length / Math.max(1, pools.length);
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
}
|
||||
@@ -256,7 +256,7 @@ class Mining {
|
||||
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
|
||||
@@ -268,14 +268,14 @@ class Mining {
|
||||
}
|
||||
this.lastWeeklyHashrateIndexingDate = new Date().getUTCDate();
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
|
||||
logger.info(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
|
||||
}
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -308,7 +308,7 @@ class Mining {
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
|
||||
logger.debug(`Indexing daily network hashrate`);
|
||||
logger.debug(`Indexing daily network hashrate`, logger.tags.mining);
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
@@ -346,7 +346,7 @@ class Mining {
|
||||
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', progress);
|
||||
@@ -373,14 +373,14 @@ class Mining {
|
||||
|
||||
this.lastHashrateIndexingDate = new Date().getUTCDate();
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
|
||||
logger.info(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
|
||||
}
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
logger.err(`Daily network hashrate indexing failed. Trying again later. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -446,13 +446,13 @@ class Mining {
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5) {
|
||||
const progress = Math.round(totalBlockChecked / blocks.length * 100);
|
||||
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
|
||||
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (totalIndexed > 0) {
|
||||
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
|
||||
logger.info(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
|
||||
}
|
||||
@@ -499,7 +499,7 @@ class Mining {
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
logger.debug(logStr, logger.tags.mining);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
blocksPrices.length = 0;
|
||||
}
|
||||
@@ -511,7 +511,7 @@ class Mining {
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
logger.debug(logStr, logger.tags.mining);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -568,6 +568,7 @@ class Mining {
|
||||
|
||||
private getTimeRange(interval: string | null, scale = 1): number {
|
||||
switch (interval) {
|
||||
case '4y': return 43200 * scale; // 12h
|
||||
case '3y': return 43200 * scale; // 12h
|
||||
case '2y': return 28800 * scale; // 8h
|
||||
case '1y': return 28800 * scale; // 8h
|
||||
|
||||
@@ -375,6 +375,17 @@ class StatisticsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $list4Y(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const query = this.getQueryForDays(43200, '4 YEAR'); // 12h interval
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list4Y() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
|
||||
return statistic.map((s) => {
|
||||
return {
|
||||
|
||||
@@ -14,10 +14,11 @@ class StatisticsRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', this.$getStatisticsByTime.bind(this, '1y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/4y', this.$getStatisticsByTime.bind(this, '4y'))
|
||||
;
|
||||
}
|
||||
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y', req: Request, res: Response) {
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y', req: Request, res: Response) {
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
@@ -54,6 +55,9 @@ class StatisticsRoutes {
|
||||
case '3y':
|
||||
result = await statisticsApi.$list3Y();
|
||||
break;
|
||||
case '4y':
|
||||
result = await statisticsApi.$list4Y();
|
||||
break;
|
||||
default:
|
||||
result = await statisticsApi.$list2H();
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import logger from '../logger';
|
||||
import * as WebSocket from 'ws';
|
||||
import {
|
||||
BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta,
|
||||
OptimizedStatistic, ILoadingIndicators, IConversionRates
|
||||
BlockExtended, TransactionExtended, WebsocketResponse,
|
||||
OptimizedStatistic, ILoadingIndicators
|
||||
} from '../mempool.interfaces';
|
||||
import blocks from './blocks';
|
||||
import memPool from './mempool';
|
||||
@@ -20,6 +20,7 @@ import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository
|
||||
import Audit from './audit';
|
||||
import { deepClone } from '../utils/clone';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
import { ApiPrice } from '../repositories/PricesRepository';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -193,7 +194,7 @@ class WebsocketHandler {
|
||||
});
|
||||
}
|
||||
|
||||
handleNewConversionRates(conversionRates: IConversionRates) {
|
||||
handleNewConversionRates(conversionRates: ApiPrice) {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
@@ -214,7 +215,7 @@ class WebsocketHandler {
|
||||
'mempoolInfo': memPool.getMempoolInfo(),
|
||||
'vBytesPerSecond': memPool.getVBytesPerSecond(),
|
||||
'blocks': _blocks,
|
||||
'conversions': priceUpdater.latestPrices,
|
||||
'conversions': priceUpdater.getLatestPrices(),
|
||||
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
|
||||
'transactions': memPool.getLatestTransactions(),
|
||||
'backendInfo': backendInfo.getBackendInfo(),
|
||||
|
||||
@@ -38,6 +38,8 @@ import forensicsService from './tasks/lightning/forensics.service';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import chainTips from './api/chain-tips';
|
||||
import { AxiosError } from 'axios';
|
||||
import v8 from 'v8';
|
||||
import { formatBytes, getBytesUnit } from './utils/format';
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -45,6 +47,11 @@ class Server {
|
||||
private app: Application;
|
||||
private currentBackendRetryInterval = 5;
|
||||
|
||||
private maxHeapSize: number = 0;
|
||||
private heapLogInterval: number = 60;
|
||||
private warnedHeapCritical: boolean = false;
|
||||
private lastHeapLogTime: number | null = null;
|
||||
|
||||
constructor() {
|
||||
this.app = express();
|
||||
|
||||
@@ -137,6 +144,8 @@ class Server {
|
||||
this.runMainUpdateLoop();
|
||||
}
|
||||
|
||||
setInterval(() => { this.healthCheck(); }, 2500);
|
||||
|
||||
if (config.BISQ.ENABLED) {
|
||||
bisq.startBisqService();
|
||||
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitProperties('bsq-price', price));
|
||||
@@ -255,6 +264,26 @@ class Server {
|
||||
channelsRoutes.initRoutes(this.app);
|
||||
}
|
||||
}
|
||||
|
||||
healthCheck(): void {
|
||||
const now = Date.now();
|
||||
const stats = v8.getHeapStatistics();
|
||||
this.maxHeapSize = Math.max(stats.used_heap_size, this.maxHeapSize);
|
||||
const warnThreshold = 0.8 * stats.heap_size_limit;
|
||||
|
||||
const byteUnits = getBytesUnit(Math.max(this.maxHeapSize, stats.heap_size_limit));
|
||||
|
||||
if (!this.warnedHeapCritical && this.maxHeapSize > warnThreshold) {
|
||||
this.warnedHeapCritical = true;
|
||||
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
|
||||
}
|
||||
if (this.lastHeapLogTime === null || (now - this.lastHeapLogTime) > (this.heapLogInterval * 1000)) {
|
||||
logger.debug(`Memory usage: ${formatBytes(this.maxHeapSize, byteUnits)} / ${formatBytes(stats.heap_size_limit, byteUnits)}`);
|
||||
this.warnedHeapCritical = false;
|
||||
this.maxHeapSize = 0;
|
||||
this.lastHeapLogTime = now;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
((): Server => new Server())();
|
||||
|
||||
@@ -76,13 +76,13 @@ class Indexer {
|
||||
this.tasksRunning.push(task);
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`);
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining);
|
||||
setTimeout(() => {
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
|
||||
this.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
} else {
|
||||
logger.debug(`Blocks prices indexer will run now`);
|
||||
logger.debug(`Blocks prices indexer will run now`, logger.tags.mining);
|
||||
await mining.$indexBlockPrices();
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
|
||||
}
|
||||
@@ -112,7 +112,7 @@ class Indexer {
|
||||
this.runIndexer = false;
|
||||
this.indexerRunning = true;
|
||||
|
||||
logger.info(`Running mining indexer`);
|
||||
logger.debug(`Running mining indexer`);
|
||||
|
||||
await this.checkAvailableCoreIndexes();
|
||||
|
||||
@@ -122,7 +122,7 @@ class Indexer {
|
||||
const chainValid = await blocks.$generateBlockDatabase();
|
||||
if (chainValid === false) {
|
||||
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
|
||||
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`);
|
||||
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`, logger.tags.mining);
|
||||
setTimeout(() => this.reindex(), 10000);
|
||||
this.indexerRunning = false;
|
||||
return;
|
||||
|
||||
@@ -293,7 +293,6 @@ interface RequiredParams {
|
||||
}
|
||||
|
||||
export interface ILoadingIndicators { [name: string]: number; }
|
||||
export interface IConversionRates { [currency: string]: number; }
|
||||
|
||||
export interface IBackendInfo {
|
||||
hostname: string;
|
||||
|
||||
@@ -748,6 +748,7 @@ class BlocksRepository {
|
||||
SELECT height
|
||||
FROM compact_cpfp_clusters
|
||||
WHERE height <= ? AND height >= ?
|
||||
GROUP BY height
|
||||
ORDER BY height DESC;
|
||||
`, [currentBlockHeight, minHeight]);
|
||||
|
||||
|
||||
@@ -20,9 +20,9 @@ class DifficultyAdjustmentsRepository {
|
||||
await DB.query(query, params);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`);
|
||||
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`, logger.tags.mining);
|
||||
} else {
|
||||
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -54,7 +54,7 @@ class DifficultyAdjustmentsRepository {
|
||||
const [rows] = await DB.query(query);
|
||||
return rows as IndexedDifficultyAdjustment[];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -83,7 +83,7 @@ class DifficultyAdjustmentsRepository {
|
||||
const [rows] = await DB.query(query);
|
||||
return rows as IndexedDifficultyAdjustment[];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -93,27 +93,27 @@ class DifficultyAdjustmentsRepository {
|
||||
const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`);
|
||||
return rows.map(block => block.height);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteAdjustementsFromHeight(height: number): Promise<void> {
|
||||
try {
|
||||
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`);
|
||||
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteLastAdjustment(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Delete last difficulty adjustment from the database`);
|
||||
logger.info(`Delete last difficulty adjustment from the database`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ class HashratesRepository {
|
||||
try {
|
||||
await DB.query(query);
|
||||
} catch (e: any) {
|
||||
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -78,7 +78,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -93,7 +93,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows.map(row => row.timestamp);
|
||||
} catch (e) {
|
||||
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -128,7 +128,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -158,7 +158,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query, [pool.id]);
|
||||
boundaries = rows[0];
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
}
|
||||
|
||||
// Get hashrates entries between boundaries
|
||||
@@ -173,7 +173,7 @@ class HashratesRepository {
|
||||
const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -192,7 +192,7 @@ class HashratesRepository {
|
||||
}
|
||||
return rows[0]['number'];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -201,7 +201,7 @@ class HashratesRepository {
|
||||
* Delete most recent data points for re-indexing
|
||||
*/
|
||||
public async $deleteLastEntries() {
|
||||
logger.info(`Delete latest hashrates data points from the database`);
|
||||
logger.info(`Delete latest hashrates data points from the database`, logger.tags.mining);
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
|
||||
@@ -212,7 +212,7 @@ class HashratesRepository {
|
||||
mining.lastHashrateIndexingDate = null;
|
||||
mining.lastWeeklyHashrateIndexingDate = null;
|
||||
} catch (e) {
|
||||
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,7 +228,7 @@ class HashratesRepository {
|
||||
mining.lastHashrateIndexingDate = null;
|
||||
mining.lastWeeklyHashrateIndexingDate = null;
|
||||
} catch (e) {
|
||||
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { IConversionRates } from '../mempool.interfaces';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
|
||||
export interface ApiPrice {
|
||||
@@ -13,6 +12,16 @@ export interface ApiPrice {
|
||||
AUD: number,
|
||||
JPY: number,
|
||||
}
|
||||
const ApiPriceFields = `
|
||||
UNIX_TIMESTAMP(time) as time,
|
||||
USD,
|
||||
EUR,
|
||||
GBP,
|
||||
CAD,
|
||||
CHF,
|
||||
AUD,
|
||||
JPY
|
||||
`;
|
||||
|
||||
export interface ExchangeRates {
|
||||
USDEUR: number,
|
||||
@@ -39,7 +48,7 @@ export const MAX_PRICES = {
|
||||
};
|
||||
|
||||
class PricesRepository {
|
||||
public async $savePrices(time: number, prices: IConversionRates): Promise<void> {
|
||||
public async $savePrices(time: number, prices: ApiPrice): Promise<void> {
|
||||
if (prices.USD === -1) {
|
||||
// Some historical price entries have no USD prices, so we just ignore them to avoid future UX issues
|
||||
// As of today there are only 4 (on 2013-09-05, 2013-0909, 2013-09-12 and 2013-09-26) so that's fine
|
||||
@@ -60,77 +69,115 @@ class PricesRepository {
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ? )`,
|
||||
[time, prices.USD, prices.EUR, prices.GBP, prices.CAD, prices.CHF, prices.AUD, prices.JPY]
|
||||
);
|
||||
} catch (e: any) {
|
||||
} catch (e) {
|
||||
logger.err(`Cannot save exchange rate into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getOldestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time LIMIT 1`);
|
||||
const [oldestRow] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(time) AS time
|
||||
FROM prices
|
||||
ORDER BY time
|
||||
LIMIT 1
|
||||
`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getLatestPriceId(): Promise<number | null> {
|
||||
const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != 0 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].id : null;
|
||||
}
|
||||
|
||||
public async $getLatestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getPricesTimes(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time`);
|
||||
return times.map(time => time.time);
|
||||
}
|
||||
|
||||
public async $getPricesTimesAndId(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time, id, USD from prices ORDER BY time`);
|
||||
return times;
|
||||
}
|
||||
|
||||
public async $getLatestConversionRates(): Promise<any> {
|
||||
const [rates]: any[] = await DB.query(`
|
||||
SELECT USD, EUR, GBP, CAD, CHF, AUD, JPY
|
||||
const [oldestRow] = await DB.query(`
|
||||
SELECT id
|
||||
FROM prices
|
||||
ORDER BY time DESC
|
||||
LIMIT 1`
|
||||
);
|
||||
if (!rates || rates.length === 0) {
|
||||
return oldestRow[0] ? oldestRow[0].id : null;
|
||||
}
|
||||
|
||||
public async $getLatestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(time) AS time
|
||||
FROM prices
|
||||
ORDER BY time DESC
|
||||
LIMIT 1`
|
||||
);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getPricesTimes(): Promise<number[]> {
|
||||
const [times] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(time) AS time
|
||||
FROM prices
|
||||
WHERE USD != -1
|
||||
ORDER BY time
|
||||
`);
|
||||
if (!Array.isArray(times)) {
|
||||
return [];
|
||||
}
|
||||
return times.map(time => time.time);
|
||||
}
|
||||
|
||||
public async $getPricesTimesAndId(): Promise<{time: number, id: number, USD: number}[]> {
|
||||
const [times] = await DB.query(`
|
||||
SELECT
|
||||
UNIX_TIMESTAMP(time) AS time,
|
||||
id,
|
||||
USD
|
||||
FROM prices
|
||||
ORDER BY time
|
||||
`);
|
||||
return times as {time: number, id: number, USD: number}[];
|
||||
}
|
||||
|
||||
public async $getLatestConversionRates(): Promise<ApiPrice> {
|
||||
const [rates] = await DB.query(`
|
||||
SELECT ${ApiPriceFields}
|
||||
FROM prices
|
||||
ORDER BY time DESC
|
||||
LIMIT 1`
|
||||
);
|
||||
|
||||
if (!Array.isArray(rates) || rates.length === 0) {
|
||||
return priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
return rates[0];
|
||||
return rates[0] as ApiPrice;
|
||||
}
|
||||
|
||||
public async $getNearestHistoricalPrice(timestamp: number | undefined): Promise<Conversion | null> {
|
||||
try {
|
||||
const [rates]: any[] = await DB.query(`
|
||||
SELECT *, UNIX_TIMESTAMP(time) AS time
|
||||
const [rates] = await DB.query(`
|
||||
SELECT ${ApiPriceFields}
|
||||
FROM prices
|
||||
WHERE UNIX_TIMESTAMP(time) < ?
|
||||
ORDER BY time DESC
|
||||
LIMIT 1`,
|
||||
[timestamp]
|
||||
);
|
||||
if (!rates) {
|
||||
if (!Array.isArray(rates)) {
|
||||
throw Error(`Cannot get single historical price from the database`);
|
||||
}
|
||||
|
||||
// Compute fiat exchange rates
|
||||
const latestPrice = await this.$getLatestConversionRates();
|
||||
let latestPrice = rates[0] as ApiPrice;
|
||||
if (latestPrice.USD === -1) {
|
||||
latestPrice = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
|
||||
const computeFx = (usd: number, other: number): number =>
|
||||
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
|
||||
|
||||
const exchangeRates: ExchangeRates = {
|
||||
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100,
|
||||
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100,
|
||||
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100,
|
||||
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100,
|
||||
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100,
|
||||
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100,
|
||||
USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
|
||||
USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
|
||||
USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
|
||||
USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
|
||||
USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
|
||||
USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
|
||||
};
|
||||
|
||||
return {
|
||||
prices: rates,
|
||||
prices: rates as ApiPrice[],
|
||||
exchangeRates: exchangeRates
|
||||
};
|
||||
} catch (e) {
|
||||
@@ -141,28 +188,35 @@ class PricesRepository {
|
||||
|
||||
public async $getHistoricalPrices(): Promise<Conversion | null> {
|
||||
try {
|
||||
const [rates]: any[] = await DB.query(`
|
||||
SELECT *, UNIX_TIMESTAMP(time) AS time
|
||||
const [rates] = await DB.query(`
|
||||
SELECT ${ApiPriceFields}
|
||||
FROM prices
|
||||
ORDER BY time DESC
|
||||
`);
|
||||
if (!rates) {
|
||||
if (!Array.isArray(rates)) {
|
||||
throw Error(`Cannot get average historical price from the database`);
|
||||
}
|
||||
|
||||
// Compute fiat exchange rates
|
||||
const latestPrice: ApiPrice = rates[0];
|
||||
let latestPrice = rates[0] as ApiPrice;
|
||||
if (latestPrice.USD === -1) {
|
||||
latestPrice = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
|
||||
const computeFx = (usd: number, other: number): number =>
|
||||
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
|
||||
|
||||
const exchangeRates: ExchangeRates = {
|
||||
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100,
|
||||
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100,
|
||||
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100,
|
||||
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100,
|
||||
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100,
|
||||
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100,
|
||||
USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
|
||||
USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
|
||||
USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
|
||||
USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
|
||||
USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
|
||||
USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
|
||||
};
|
||||
|
||||
return {
|
||||
prices: rates,
|
||||
prices: rates as ApiPrice[],
|
||||
exchangeRates: exchangeRates
|
||||
};
|
||||
} catch (e) {
|
||||
|
||||
@@ -411,7 +411,7 @@ class LightningStatsImporter {
|
||||
}
|
||||
|
||||
if (totalProcessed > 0) {
|
||||
logger.notice(`Lightning network stats historical import completed`, logger.tags.ln);
|
||||
logger.info(`Lightning network stats historical import completed`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
|
||||
@@ -12,7 +12,7 @@ import * as https from 'https';
|
||||
*/
|
||||
class PoolsUpdater {
|
||||
lastRun: number = 0;
|
||||
currentSha: string | undefined = undefined;
|
||||
currentSha: string | null = null;
|
||||
poolsUrl: string = config.MEMPOOL.POOLS_JSON_URL;
|
||||
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
|
||||
|
||||
@@ -33,7 +33,7 @@ class PoolsUpdater {
|
||||
|
||||
try {
|
||||
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
|
||||
if (githubSha === undefined) {
|
||||
if (githubSha === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -42,12 +42,12 @@ class PoolsUpdater {
|
||||
}
|
||||
|
||||
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
||||
if (this.currentSha !== undefined && this.currentSha === githubSha) {
|
||||
if (this.currentSha !== null && this.currentSha === githubSha) {
|
||||
return;
|
||||
}
|
||||
|
||||
// See backend README for more details about the mining pools update process
|
||||
if (this.currentSha !== undefined && // If we don't have any mining pool, download it at least once
|
||||
if (this.currentSha !== null && // If we don't have any mining pool, download it at least once
|
||||
config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING !== true && // Automatic pools update is disabled
|
||||
!process.env.npm_config_update_pools // We're not manually updating mining pool
|
||||
) {
|
||||
@@ -57,7 +57,7 @@ class PoolsUpdater {
|
||||
}
|
||||
|
||||
const network = config.SOCKS5PROXY.ENABLED ? 'tor' : 'clearnet';
|
||||
if (this.currentSha === undefined) {
|
||||
if (this.currentSha === null) {
|
||||
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||
@@ -82,7 +82,7 @@ class PoolsUpdater {
|
||||
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
|
||||
await DB.query('ROLLBACK;');
|
||||
}
|
||||
logger.notice('PoolsUpdater completed');
|
||||
logger.info('PoolsUpdater completed');
|
||||
|
||||
} catch (e) {
|
||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||
@@ -108,20 +108,20 @@ class PoolsUpdater {
|
||||
/**
|
||||
* Fetch our latest pools-v2.json sha from the db
|
||||
*/
|
||||
private async getShaFromDb(): Promise<string | undefined> {
|
||||
private async getShaFromDb(): Promise<string | null> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
return (rows.length > 0 ? rows[0].string : undefined);
|
||||
return (rows.length > 0 ? rows[0].string : null);
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
return undefined;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch our latest pools-v2.json sha from github
|
||||
*/
|
||||
private async fetchPoolsSha(): Promise<string | undefined> {
|
||||
private async fetchPoolsSha(): Promise<string | null> {
|
||||
const response = await this.query(this.treeUrl);
|
||||
|
||||
if (response !== undefined) {
|
||||
@@ -133,7 +133,7 @@ class PoolsUpdater {
|
||||
}
|
||||
|
||||
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
||||
return undefined;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -8,9 +8,6 @@ class BitfinexApi implements PriceFeed {
|
||||
public url: string = 'https://api.bitfinex.com/v1/pubticker/BTC';
|
||||
public urlHist: string = 'https://api-pub.bitfinex.com/v2/candles/trade:{GRANULARITY}:tBTC{CURRENCY}/hist';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
if (response && response['last_price']) {
|
||||
|
||||
@@ -98,7 +98,7 @@ class KrakenApi implements PriceFeed {
|
||||
}
|
||||
|
||||
if (Object.keys(priceHistory).length > 0) {
|
||||
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
|
||||
logger.info(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,7 @@ import * as fs from 'fs';
|
||||
import path from 'path';
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import { IConversionRates } from '../mempool.interfaces';
|
||||
import PricesRepository, { MAX_PRICES } from '../repositories/PricesRepository';
|
||||
import PricesRepository, { ApiPrice, MAX_PRICES } from '../repositories/PricesRepository';
|
||||
import BitfinexApi from './price-feeds/bitfinex-api';
|
||||
import BitflyerApi from './price-feeds/bitflyer-api';
|
||||
import CoinbaseApi from './price-feeds/coinbase-api';
|
||||
@@ -21,18 +20,18 @@ export interface PriceFeed {
|
||||
}
|
||||
|
||||
export interface PriceHistory {
|
||||
[timestamp: number]: IConversionRates;
|
||||
[timestamp: number]: ApiPrice;
|
||||
}
|
||||
|
||||
class PriceUpdater {
|
||||
public historyInserted = false;
|
||||
lastRun = 0;
|
||||
lastHistoricalRun = 0;
|
||||
running = false;
|
||||
feeds: PriceFeed[] = [];
|
||||
currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
|
||||
latestPrices: IConversionRates;
|
||||
private ratesChangedCallback: ((rates: IConversionRates) => void) | undefined;
|
||||
private lastRun = 0;
|
||||
private lastHistoricalRun = 0;
|
||||
private running = false;
|
||||
private feeds: PriceFeed[] = [];
|
||||
private currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
|
||||
private latestPrices: ApiPrice;
|
||||
private ratesChangedCallback: ((rates: ApiPrice) => void) | undefined;
|
||||
|
||||
constructor() {
|
||||
this.latestPrices = this.getEmptyPricesObj();
|
||||
@@ -44,8 +43,13 @@ class PriceUpdater {
|
||||
this.feeds.push(new GeminiApi());
|
||||
}
|
||||
|
||||
public getEmptyPricesObj(): IConversionRates {
|
||||
public getLatestPrices(): ApiPrice {
|
||||
return this.latestPrices;
|
||||
}
|
||||
|
||||
public getEmptyPricesObj(): ApiPrice {
|
||||
return {
|
||||
time: 0,
|
||||
USD: -1,
|
||||
EUR: -1,
|
||||
GBP: -1,
|
||||
@@ -56,7 +60,7 @@ class PriceUpdater {
|
||||
};
|
||||
}
|
||||
|
||||
public setRatesChangedCallback(fn: (rates: IConversionRates) => void) {
|
||||
public setRatesChangedCallback(fn: (rates: ApiPrice) => void): void {
|
||||
this.ratesChangedCallback = fn;
|
||||
}
|
||||
|
||||
@@ -156,6 +160,10 @@ class PriceUpdater {
|
||||
}
|
||||
|
||||
this.lastRun = new Date().getTime() / 1000;
|
||||
|
||||
if (this.latestPrices.USD === -1) {
|
||||
this.latestPrices = await PricesRepository.$getLatestConversionRates();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -224,7 +232,7 @@ class PriceUpdater {
|
||||
|
||||
// Group them by timestamp and currency, for example
|
||||
// grouped[123456789]['USD'] = [1, 2, 3, 4];
|
||||
const grouped: any = {};
|
||||
const grouped = {};
|
||||
for (const historicalEntry of historicalPrices) {
|
||||
for (const time in historicalEntry) {
|
||||
if (existingPriceTimes.includes(parseInt(time, 10))) {
|
||||
@@ -249,7 +257,7 @@ class PriceUpdater {
|
||||
// Average prices and insert everything into the db
|
||||
let totalInserted = 0;
|
||||
for (const time in grouped) {
|
||||
const prices: IConversionRates = this.getEmptyPricesObj();
|
||||
const prices: ApiPrice = this.getEmptyPricesObj();
|
||||
for (const currency in grouped[time]) {
|
||||
if (grouped[time][currency].length === 0) {
|
||||
continue;
|
||||
|
||||
29
backend/src/utils/format.ts
Normal file
29
backend/src/utils/format.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
const byteUnits = ['B', 'kB', 'MB', 'GB', 'TB'];
|
||||
|
||||
export function getBytesUnit(bytes: number): string {
|
||||
if (isNaN(bytes) || !isFinite(bytes)) {
|
||||
return 'B';
|
||||
}
|
||||
|
||||
let unitIndex = 0;
|
||||
while (unitIndex < byteUnits.length && bytes > 1024) {
|
||||
unitIndex++;
|
||||
bytes /= 1024;
|
||||
}
|
||||
|
||||
return byteUnits[unitIndex];
|
||||
}
|
||||
|
||||
export function formatBytes(bytes: number, toUnit: string, skipUnit = false): string {
|
||||
if (isNaN(bytes) || !isFinite(bytes)) {
|
||||
return `${bytes}`;
|
||||
}
|
||||
|
||||
let unitIndex = 0;
|
||||
while (unitIndex < byteUnits.length && (toUnit && byteUnits[unitIndex] !== toUnit || (!toUnit && bytes > 1024))) {
|
||||
unitIndex++;
|
||||
bytes /= 1024;
|
||||
}
|
||||
|
||||
return `${bytes.toFixed(2)}${skipUnit ? '' : ' ' + byteUnits[unitIndex]}`;
|
||||
}
|
||||
Reference in New Issue
Block a user