Compare commits

..

844 Commits

Author SHA1 Message Date
Mononaut
60c50fc47e Add regtest network support 2023-07-24 16:06:14 +09:00
softsimon
e2fdacfddd Merge pull request #4041 from mempool/simon/sanitize-lightning-channel-id
Sanitize channel id search
2023-07-24 13:26:20 +09:00
softsimon
c84a444f79 Merge pull request #4039 from mempool/mononaut/fix-sigops
Fix missing sigops
2023-07-24 13:26:13 +09:00
softsimon
ee2d8f8c5a Sanitize channel id search 2023-07-24 13:21:06 +09:00
Mononaut
44f2217a68 Fix typo which skips sigop calculation 2023-07-24 10:49:29 +09:00
wiz
caa8cfbc0e Another hotfix for CLN crash 2023-07-23 22:35:32 +09:00
wiz
02f361af73 Hotfix for CLN crash 2023-07-23 22:21:53 +09:00
softsimon
5e91af168b Merge pull request #4027 from mempool/mononaut/p2pk
Support P2PK address types
2023-07-23 15:06:04 +09:00
softsimon
ae183210e0 Updating pubkey width on mobile and desktop 2023-07-23 14:43:43 +09:00
Mononaut
56127dce6a Add P2PK support to search bar 2023-07-23 14:05:04 +09:00
Mononaut
0376467e6c highlight matching P2PK inputs 2023-07-23 14:01:31 +09:00
Mononaut
48b55eed46 improve script hex parsing validation 2023-07-23 14:01:31 +09:00
Mononaut
0ce043cca9 Fix esplora error messages 2023-07-23 14:01:31 +09:00
Mononaut
65dbafd2ec Support P2PK address types 2023-07-23 14:01:31 +09:00
softsimon
3f36a30d1d Merge pull request #4031 from knorrium/es2022_fixes
Es2022 fixes
2023-07-23 11:56:04 +09:00
softsimon
b021746e9e Merge pull request #4030 from mempool/fix/CI-Rust
Use more reliable Github Action for Rust toolchain install
2023-07-23 11:50:00 +09:00
junderw
975ec772fa Use more reliable Github Action for Rust toolchain install. 2023-07-22 19:41:36 -07:00
Felipe Knorr Kuhn
442a4ff6e0 Fix tsconfig settigns for ES2022 2023-07-23 11:06:21 +09:00
Felipe Knorr Kuhn
cea218b81a Reset the supported browsers list 2023-07-23 11:05:49 +09:00
softsimon
a24d2ce547 Merge pull request #4021 from mempool/mononaut/blockchain-scroll
apply blockchain scroll offset as soon as element is ready
2023-07-22 14:32:42 +09:00
softsimon
95707de8ec Merge pull request #4013 from mempool/simon/css-fix-fa-icons
Fix some icon css color changes
2023-07-22 13:53:55 +09:00
softsimon
eb37066d5d Merge pull request #4024 from devinbileck/patch-1
Accept CLA
2023-07-22 13:53:45 +09:00
Devin Bileck
f0983844c1 Accept CLA 2023-07-21 15:13:10 -07:00
softsimon
a0bd4e0f63 Merge branch 'master' into mononaut/blockchain-scroll 2023-07-21 21:12:11 +09:00
softsimon
141ab8076f Merge pull request #4014 from mempool/mononaut/fix-blocks-list
Fix blocks list observable
2023-07-21 21:12:03 +09:00
softsimon
267f3d4877 Merge pull request #4020 from knorrium/fix_rate_limiting
Fix rate limiting when syncing assets on CI
2023-07-21 21:11:41 +09:00
softsimon
460a41644d Merge pull request #4015 from pedromvpg/patch-1
sign contributor agreement
2023-07-21 21:04:31 +09:00
Felipe Knorr Kuhn
ca69d19bf7 Use the GITHUB_SECRET to authenticate with the API
Fix the environment variable

Add extra logging when using the authentication

Use the GITHUB_TOKEN on the frontend build step
2023-07-21 18:14:32 +09:00
Mononaut
d91fa5c6ef null => of([]) 2023-07-21 18:10:13 +09:00
wiz
3610aa2e20 Merge pull request #3999 from mempool/mononaut/fix-liquid-fees
Fix fee handling on Liquid
2023-07-21 17:49:48 +09:00
wiz
7a6da07a61 Merge branch 'master' into mononaut/fix-liquid-fees 2023-07-21 17:38:47 +09:00
Mononaut
0f77fb88bf handle missing block.extras on liquid 2023-07-21 17:18:45 +09:00
Mononaut
1bd19e1d8d apply blockchain scroll offset when element is ready 2023-07-21 17:10:58 +09:00
Felipe Knorr Kuhn
61eeb82694 Expose the GITHUB_SECRET to the frontend build step 2023-07-21 17:09:57 +09:00
softsimon
135adfecbd Merge pull request #3934 from mempool/junderw/fix-armv7-docker
Fix backend docker build for armv7
2023-07-21 10:19:20 +09:00
softsimon
20b2017908 Merge pull request #4016 from knorrium/tweak_dependabot
Tweak dependabot settings
2023-07-21 10:18:58 +09:00
Felipe Knorr Kuhn
b1345038bd Tweak dependabot settings 2023-07-20 18:09:36 -07:00
Felipe Knorr Kuhn
7ba627e243 Merge branch 'master' into junderw/fix-armv7-docker 2023-07-20 17:31:17 -07:00
Pedro
6b453ef018 sign contributor agreement 2023-07-20 17:24:13 +01:00
Mononaut
943dc6f5e6 Fix blocks list observable 2023-07-20 17:30:26 +09:00
softsimon
4192869593 Fix some icon css color changes 2023-07-20 16:02:04 +09:00
softsimon
e066bb1e9d Merge pull request #4002 from mempool/nymkappa/mining-pool-summary
[mining] add missing empty td at the bottom of pool ranking
2023-07-20 15:10:32 +09:00
softsimon
6cdc97848f Merge pull request #3773 from mempool/nymkappa/search-bar-align
[search bar] fix alignment issue
2023-07-20 14:59:54 +09:00
nymkappa
ade7908229 [mining] add missing empty col at the bottom of pool ranking 2023-07-20 10:36:26 +09:00
nymkappa
9a2ab7fe21 [search bar] chrome - fix flex-auto 2023-07-20 09:57:04 +09:00
Mononaut
87e39b8389 Fix liquid blockchain bar 2023-07-19 16:24:05 +09:00
softsimon
bc508b6621 Merge pull request #3949 from knorrium/node_v20_to_matrix
Add node v20 to the test matrix
2023-07-19 15:56:31 +09:00
Mononaut
709783280a Fix liquid fees & remove minimum fee rate 2023-07-19 15:42:02 +09:00
softsimon
f9aa1b5b35 Merge pull request #3194 from mempool/hunicus/manual-deployment-enterprise
Specify manual deployment support for enterprise sponsors
2023-07-19 14:25:02 +09:00
softsimon
2fffd8b43c Merge branch 'master' into hunicus/manual-deployment-enterprise 2023-07-19 14:19:03 +09:00
softsimon
741571a93a Merge pull request #3607 from mempool/nymkappa/clip-label-overflow
Clip overflowing labels in pool component on mobile
2023-07-19 14:05:31 +09:00
softsimon
07a424e6f1 Merge pull request #3997 from mempool/mononaut/liquid-latest-blocks
restore latest blocks on liquid
2023-07-19 14:03:58 +09:00
Mononaut
943d05f680 restore latest blocks on liquid 2023-07-19 12:09:46 +09:00
softsimon
57aa2c69ac Merge branch 'master' into nymkappa/clip-label-overflow 2023-07-19 11:31:01 +09:00
softsimon
61e28a33e0 Merge pull request #3996 from mempool/simon/zero-zat-minimum-fee-fix
Fix for 0 sat minimum fee
2023-07-19 11:15:53 +09:00
softsimon
4055128d7f Fix for 0 sat minimum fee 2023-07-19 11:06:28 +09:00
Felipe Knorr Kuhn
7c29e51bbb Merge branch 'master' into junderw/fix-armv7-docker 2023-07-18 14:30:20 -07:00
softsimon
fabd420586 Merge branch 'master' into node_v20_to_matrix 2023-07-18 18:56:00 +09:00
softsimon
a12316f4dc Updating default mining pool icon 2023-07-18 18:16:21 +09:00
softsimon
548611f13a Merge branch 'master' into nymkappa/search-bar-align 2023-07-18 18:02:44 +09:00
softsimon
3397edecb4 Merge pull request #3659 from nothing0012/zz/add-cla
Add cla
2023-07-18 17:42:00 +09:00
softsimon
b041a145b1 Merge pull request #3654 from learntheropes/patch-1
sign cla
2023-07-18 17:41:49 +09:00
softsimon
7046c3d6c3 Merge pull request #3935 from mempool/mononaut/lightning-justice
Add lightning justice page
2023-07-18 17:24:26 +09:00
softsimon
67f58a4491 Sorting by closing date descending 2023-07-18 17:19:14 +09:00
softsimon
d74e4b1876 Replacing loading text with spinner 2023-07-18 17:15:54 +09:00
softsimon
e757b74c87 Merge pull request #3990 from mempool/simon/removing-unused-fullrbf-code
Removing unused rbf frontend code
2023-07-18 17:06:01 +09:00
wiz
4b41730636 Merge branch 'master' into mononaut/lightning-justice 2023-07-18 16:49:49 +09:00
wiz
23ecf9cf41 Merge pull request #3993 from mempool/simon/backend-deps-18-07
Bumping backend deps
2023-07-18 16:19:28 +09:00
wiz
f8cfa35552 Merge pull request #3965 from mempool/nymkappa/network-switch-align
[network selection] fix some align issues
2023-07-18 15:53:41 +09:00
wiz
c1d0e802d9 Merge branch 'master' into nymkappa/network-switch-align 2023-07-18 15:40:57 +09:00
softsimon
bde7fad1c4 Bumping backend deps 2023-07-18 15:36:30 +09:00
softsimon
8007f5a3d3 Merge pull request #3992 from mempool/simon/angular-16
Angular 16
2023-07-18 15:10:49 +09:00
softsimon
83f955e469 Merge pull request #3969 from mempool/mononaut/ancestors-undefined
Fix tx.ancestors undefined bug
2023-07-18 15:10:17 +09:00
nymkappa
29c53a7852 [search bar] fix alignment issue 2023-07-18 15:01:30 +09:00
softsimon
3825a1c359 Trying downgrading cypress wait 2023-07-18 13:45:24 +09:00
softsimon
ac82a25fa2 Angular 16 2023-07-18 13:38:33 +09:00
softsimon
85e071049c Removing unused rbf frontend code 2023-07-18 11:42:13 +09:00
softsimon
ae22b7b444 Merge pull request #3989 from mempool/mononaut/fullrbf-list-toggle
Remove frontend FULL_RBF_ENABLED flag
2023-07-18 11:36:46 +09:00
softsimon
9aba4d4357 Merge pull request #3988 from mempool/mononaut/clock-width-fix
Fix clock horizontal scroll bug
2023-07-18 11:03:11 +09:00
Mononaut
17866f80bd Remove frontend FULL_RBF_ENABLED flag 2023-07-18 11:01:35 +09:00
Mononaut
5e46176c4e Fix clock horizontal scroll bug 2023-07-18 10:52:47 +09:00
wiz
fa48c6f025 ops: Use NodeJS v18 to build, v20 to run backend 2023-07-18 08:52:51 +09:00
Felipe Knorr Kuhn
69c54d7207 Merge branch 'master' into node_v20_to_matrix 2023-07-17 15:53:34 -07:00
wiz
7e06c97f51 ops: Bump NodeJS to v20.4.0 2023-07-18 07:50:14 +09:00
wiz
c02eef352b ops: Increase bitcoin.conf maxconnections 42 -> 100 2023-07-17 22:56:16 +09:00
wiz
924782a4d1 Merge pull request #3867 from mempool/simon/enable-mempoolfullrbf
It's time (Enable fullrbf)
2023-07-17 21:48:46 +09:00
softsimon
5c6e3dfd5c Merge pull request #3983 from mempool/mononaut/hide-unknown-conf-badge
Hide confirmations badge if height unknown
2023-07-17 21:48:08 +09:00
softsimon
89dac1a77c Merge pull request #3979 from mempool/mononaut/new-block-race-condition
get chain tip direct from Bitcoin Core to avoid race conditions
2023-07-17 21:38:35 +09:00
Mononaut
6a16759e20 Hide confirmations badge if height unknown 2023-07-17 19:23:09 +09:00
softsimon
c355602924 Merge pull request #3981 from mempool/mononaut/fix-websocket-null-data
Fix websocket null data for undefined rbfSummary
2023-07-17 19:10:05 +09:00
softsimon
08ad81f4b5 Merge pull request #3982 from mempool/mononaut/faster-mempoll
Fix mempool update poll delay
2023-07-17 18:59:07 +09:00
Mononaut
2c1b9b9095 Fix mempool update poll delay 2023-07-17 18:21:44 +09:00
softsimon
965270dc7f Merge pull request #3933 from mempool/nymkappa/feature-bits
Show raw and decoded lightning node features
2023-07-17 17:58:22 +09:00
Mononaut
4309bfd519 Fix websocket null data for undefined rbfSummary 2023-07-17 17:53:26 +09:00
softsimon
6ab3b89884 Change to a Details-button 2023-07-17 17:41:38 +09:00
Mononaut
7a059ba294 get chain tip direct from Bitcoin Core to avoid race conditions 2023-07-17 15:21:52 +09:00
wiz
8af64900d9 Merge pull request #3959 from mempool/simon/load-more-mempool-txs
Load more mempool transactions
2023-07-17 14:56:18 +09:00
wiz
a59a7fe25e Merge branch 'master' into simon/load-more-mempool-txs 2023-07-17 14:17:52 +09:00
softsimon
2fc8e2997d Merge pull request #3978 from mempool/mononaut/fix-pool-blocks
unbork mining pool blocks list
2023-07-17 14:03:01 +09:00
wiz
ede961a34a Merge branch 'master' into simon/load-more-mempool-txs 2023-07-17 14:02:32 +09:00
softsimon
77764e1c77 Merge pull request #3882 from mempool/mononaut/audit-replication
Audit data synchronization
2023-07-17 12:51:34 +09:00
softsimon
8e114917a1 Merge pull request #3976 from mempool/mononaut/six-latest-transactions
always send 6 latest transactions to websocket clients
2023-07-17 12:03:06 +09:00
softsimon
be599ca624 Merge pull request #3971 from mempool/mononaut/audit-recently-cpfpd
Add "recently cpfpd" exception to audits
2023-07-17 11:51:06 +09:00
Mononaut
2c39e1e203 unbork mining pool blocks list 2023-07-17 11:15:55 +09:00
Mononaut
bf5a16b043 always send 6 latest transactions to websocket clients 2023-07-17 11:02:28 +09:00
wiz
a7ec9138c3 ops: Bump elements tag to 22.1.1 2023-07-17 01:14:52 +09:00
wiz
c0f33e6b52 Merge pull request #3974 from mempool/mononaut/missing-socket-init-data
Set missing websocket init data
2023-07-16 19:55:12 +09:00
Mononaut
565336df21 Set missing websocket init data 2023-07-16 18:39:51 +09:00
nymkappa
6fe32cdd19 [lightning] fix issue during initial node.features indexing 2023-07-16 18:24:42 +09:00
nymkappa
8fb67a914c [lightning] fix node features binary conversion 2023-07-16 18:17:49 +09:00
nymkappa
6336c529ed [lightning] show decoded features in node page 2023-07-16 18:17:49 +09:00
nymkappa
556eb65320 [lightning] start integrating features bits in the node page 2023-07-16 18:17:49 +09:00
nymkappa
4d41d36fe7 [lightning] save feature bit number when using lnd describegraph 2023-07-16 18:17:49 +09:00
nymkappa
32d46ad7ac [lightning] save bit number when converting features from clightning 2023-07-16 18:17:48 +09:00
nymkappa
1f003cc292 [lightning] save node features as stringified json array in db 2023-07-16 18:17:48 +09:00
Mononaut
b33ea4679d Add "recently cpfpd" exception to audits 2023-07-16 13:51:30 +09:00
Mononaut
b6a6fcd4e2 Fix tx.ancestors undefined bug 2023-07-16 12:53:55 +09:00
softsimon
b03f2185ce Merge pull request #3964 from mempool/simon/calculator-validation-improvements
Calculator validation improvements
2023-07-15 20:15:31 +09:00
softsimon
a52d5faf4f Merge pull request #3967 from mempool/nymkappa/update-price-timestamp-websocket
[price updater] update latestPrices timestamp before pushing to websocket
2023-07-15 20:15:10 +09:00
softsimon
b39f01471a Select all input box text on click 2023-07-15 17:47:36 +09:00
nymkappa
73d9b4ef28 [price updater] update latestPrices timestamp before pushing to websocket 2023-07-15 17:29:29 +09:00
nymkappa
cda6567c4c [network selector] fix rtl issue 2023-07-15 16:50:18 +09:00
nymkappa
a372b479b4 [network selector] improve align 2023-07-15 16:26:25 +09:00
softsimon
992196c91f Calculator validation improvements 2023-07-15 15:09:41 +09:00
softsimon
9ffd4cc38d Calculator mobile margin 2023-07-15 12:18:55 +09:00
softsimon
0bcaa5209c Merge pull request #3875 from mempool/simon/calculator
Bitcoin-Fiat calculator tool
2023-07-15 11:24:17 +09:00
softsimon
23dffb4ca2 Slight margin fix 2023-07-15 11:18:36 +09:00
softsimon
98be07f5ef Removing logos 2023-07-15 10:52:59 +09:00
softsimon
120c27d120 Calculator visual results 2023-07-15 10:52:59 +09:00
softsimon
67a998c69f Working fiat/btc calculator 2023-07-15 10:52:33 +09:00
wiz
e3ddde9c90 Merge branch 'master' into simon/load-more-mempool-txs 2023-07-14 19:30:15 +09:00
wiz
0ec98d03e5 Merge pull request #3621 from mempool/mononaut/sharper-blocks
Pixel-aligned grids for sharper block visualizations
2023-07-14 18:58:09 +09:00
wiz
8680e5f06e Merge branch 'master' into mononaut/sharper-blocks 2023-07-14 18:45:55 +09:00
wiz
23151ec3db Bump version to 3.0.0-dev
- Now requires mempool/electrs
- Mempool Accelerator integration
- Rust GBT integration
- And more!
2023-07-14 18:39:35 +09:00
Mononaut
7f0218e343 add margin between mobile audit tabs & visualization 2023-07-14 18:39:28 +09:00
softsimon
f8fc0439f8 Merge pull request #3962 from mempool/mononaut/latest-replacements
Switch "Latest blocks" to "Latest replacements"
2023-07-14 18:30:25 +09:00
Mononaut
1abd2a23cc Add audit replication success logging 2023-07-14 16:54:36 +09:00
Mononaut
e59a9d38ff fix audit replication merge conflicts 2023-07-14 16:54:32 +09:00
Mononaut
bccc6b3680 Add missing replication docker config 2023-07-14 16:43:07 +09:00
Mononaut
7f6d17fc0e Fix audit sync progress logging 2023-07-14 16:43:07 +09:00
Mononaut
736b997104 Add missing audit data to cached blocks 2023-07-14 16:43:07 +09:00
Mononaut
69e6b164b9 Add audit data replication service 2023-07-14 16:43:05 +09:00
Mononaut
fa48791c59 reduce latest rbf websocket data 2023-07-14 16:15:03 +09:00
Mononaut
9a6565cd92 tweak default sizes & resolutions 2023-07-14 15:35:52 +09:00
Mononaut
3cca6f6b8b Pixel-aligned grids for sharper block visualizations 2023-07-14 15:33:28 +09:00
softsimon
f15f0570d4 Merge pull request #3951 from mempool/mononaut/tx-highlight
Highlight searched transactions in the block visualizations
2023-07-14 15:10:12 +09:00
Mononaut
240afbed95 adjust latest replacements labels & layout 2023-07-14 12:25:45 +09:00
softsimon
548ea0f4b4 Merge pull request #3912 from bennyhodl/bennyhodl-contributor-agreement
bennyhodl contributor agreement
2023-07-14 12:20:49 +09:00
Mononaut
756fac7270 Switch "latest blocks" to "latest replacements" 2023-07-14 11:52:07 +09:00
softsimon
ba6f41fa1b Merge pull request #3961 from mempool/mononaut/fix-difficulty-chart
Fix difficulty chart off-by-one bug
2023-07-14 11:31:01 +09:00
Mononaut
d60709deff Fix scene null check on visualization load 2023-07-14 11:22:09 +09:00
Mononaut
e9c618849d Highlight matching transactions in the block visualizations 2023-07-14 11:22:09 +09:00
Mononaut
8477600859 Fix difficulty chart bug 2023-07-14 11:05:09 +09:00
softsimon
15a8c8d420 Support for romanz/electrs 2023-07-13 17:59:02 +09:00
softsimon
5ef592f53e Load more mempool transactions 2023-07-13 16:57:36 +09:00
wiz
cff2022baf Merge pull request #3952 from mempool/simon/connectivity-ranking-title-fix
Connectivity ranking title fix
2023-07-13 15:58:26 +09:00
wiz
a6dc4fa38c Merge branch 'master' into simon/connectivity-ranking-title-fix 2023-07-13 15:44:31 +09:00
wiz
8b681f3ba0 Merge pull request #3670 from mempool/junderw/pushtxantidos
Push TX: Include validation to prevent DoS
2023-07-13 15:44:18 +09:00
softsimon
5575a37f9b Merge pull request #3957 from mempool/mononaut/full-rbf-highlight
highlight & tag fullrbf replacements in RBF timeline
2023-07-13 15:34:19 +09:00
wiz
15e58035e5 ops: Remove 2 electrs patches from prod installer 2023-07-13 15:06:00 +09:00
wiz
da4f7a3aba Merge branch 'master' into junderw/pushtxantidos 2023-07-13 14:18:19 +09:00
wiz
7542d95bc5 Merge pull request #3940 from mempool/mononaut/null-miner-health
Unknown avg miner health
2023-07-13 14:17:39 +09:00
wiz
6a2a9bda06 Merge branch 'master' into mononaut/full-rbf-highlight 2023-07-13 14:16:33 +09:00
junderw
222b34993b Fix: Add new configs to all config instances properly. 2023-07-13 14:06:46 +09:00
junderw
df70ea05c6 Fix: Leaf version validation 2023-07-13 13:50:54 +09:00
junderw
43d41fca95 Fix: Allow detection of 1 byte annexes 2023-07-13 13:31:57 +09:00
junderw
95a8752a0a Fix: Tests for config 2023-07-13 13:26:18 +09:00
junderw
21a47a7b4b Push TX: Include validation to prevent DoS 2023-07-13 13:24:46 +09:00
softsimon
31336d47e2 Merge pull request #3948 from mempool/mononaut/loading-transaction
show "loading" message while checking for cached txs
2023-07-13 12:32:20 +09:00
Mononaut
3287c62f91 highlight & tag fullrbf replacements in RBF timeline 2023-07-13 12:28:33 +09:00
softsimon
cf13c43637 Merge pull request #3937 from mempool/mononaut/replace-stale-blocks
Replace client-side stale blocks
2023-07-13 12:08:24 +09:00
Mononaut
eccbcbe53b Add missing this.block null check 2023-07-13 11:58:29 +09:00
softsimon
04bc43e188 Merge pull request #3943 from mempool/mononaut/missing-fiat-alignment
Add spacer for missing fiat values
2023-07-13 11:10:46 +09:00
Mononaut
1e69ea2f1d Fix merge conflicts 2023-07-13 11:06:02 +09:00
Mononaut
886a099a2f Detect stale blocks from client blockchain cache 2023-07-13 10:45:48 +09:00
Mononaut
7230b65dc3 remove console.log, fix null blocks 2023-07-13 10:45:46 +09:00
Mononaut
842ac8ce39 Add stale block banner immediately on reorg 2023-07-13 10:43:18 +09:00
Mononaut
e8c703fdbc replace client recent blocks on reorg 2023-07-13 10:43:18 +09:00
wiz
9cf961c667 Merge pull request #3693 from mempool/mononaut/preview-table-overflows
prevent table overflow in unfurl previews
2023-07-12 18:32:47 +09:00
wiz
93b1c64482 Merge branch 'master' into mononaut/preview-table-overflows 2023-07-12 18:17:06 +09:00
wiz
faf85d0c82 Merge pull request #3620 from mempool/mononaut/fix-rtl-unfurls
Fix RTL locale unfurls
2023-07-12 18:17:01 +09:00
wiz
d01fa85927 Merge branch 'master' into mononaut/fix-rtl-unfurls 2023-07-12 17:54:21 +09:00
softsimon
7ad1ace8dc Merge pull request #3941 from mempool/mononaut/reset-latest-txs
reset blocks$ and transactions$ observables when network changes
2023-07-12 17:52:06 +09:00
Mononaut
132923e7db Show skeleton loader instead of "Loading transaction..." 2023-07-12 17:44:38 +09:00
Mononaut
55cc3a0c07 fix loading transaction i18n tag 2023-07-12 17:44:37 +09:00
Mononaut
415b70da14 show "loading" message while checking for cached txs 2023-07-12 17:44:37 +09:00
wiz
243fd5e9dd Merge pull request #3953 from mempool/mononaut/difficulty-updates
always show latest difficulty on hashrate chart
2023-07-12 17:33:22 +09:00
wiz
7c1c8e877e Merge pull request #3955 from mempool/wiz/fix-prod-rust-gbt-build
Fix production rust GBT build
2023-07-12 17:25:03 +09:00
wiz
95e50ddf02 Fix production rust GBT build 2023-07-12 17:16:40 +09:00
softsimon
b6b9ab1a87 Merge pull request #3945 from mempool/mononaut/fix-mined-rbf-conflicts
Fix mined rbf conflict prevention
2023-07-12 17:06:53 +09:00
wiz
4144746e14 Merge branch 'master' into mononaut/difficulty-updates 2023-07-12 16:45:03 +09:00
wiz
f871300bfb Merge branch 'master' into mononaut/fix-mined-rbf-conflicts 2023-07-12 16:17:47 +09:00
wiz
2b559ffcce Merge pull request #3598 from mempool/nymkappa/scan-closed-channel-no-mempool
Make sure to scan closed channels even if config.MEMPOOL.ENABLE = false
2023-07-12 16:17:38 +09:00
softsimon
ef771beb28 Merge pull request #3669 from TechMiX/hotfix/rtlIssue
Fix RTL issues
2023-07-12 16:06:35 +09:00
wiz
09b966e507 Merge branch 'master' into nymkappa/scan-closed-channel-no-mempool 2023-07-12 16:00:52 +09:00
wiz
4b9d0d1d31 Merge pull request #3947 from mempool/mononaut/fix-testnet-price-updater
fix price updater loop on testnet/signet
2023-07-12 16:00:39 +09:00
wiz
3ece5acd59 Merge branch 'master' into mononaut/fix-testnet-price-updater 2023-07-12 15:38:17 +09:00
Mononaut
0dd9867a1f always show latest difficulty on hashrate chart 2023-07-12 12:58:48 +09:00
softsimon
ffca2f174d Connectivity ranking title fix 2023-07-12 12:24:45 +09:00
softsimon
00282b79b8 Merge pull request #3862 from secondl1ght/patch-1
update frontend local instructions
2023-07-12 11:02:58 +09:00
softsimon
e66f14a5c3 Merge pull request #3870 from pfoytik/master
the last two docker-compose overrides need MEMPOOL_ in front
2023-07-12 11:02:36 +09:00
softsimon
05f85c5201 Merge branch 'master' into nymkappa/clip-label-overflow 2023-07-12 10:57:11 +09:00
Felipe Knorr Kuhn
2570357bec Add node v20 to the test matrix 2023-07-11 00:21:00 -07:00
Mononaut
ca2830d6d8 fix price updater loop on testnet/signet 2023-07-11 16:03:44 +09:00
wiz
795e6753eb Merge pull request #3879 from mempool/mononaut/audit-exclude-fullrbf
exclude fullrbf txs from audit and label in visualization
2023-07-11 15:29:32 +09:00
wiz
ad0a007e8c Merge pull request #3946 from mempool/mononaut/bitcoin-core-v25
Upgrade bitcoin core to v25.0
2023-07-11 15:28:36 +09:00
softsimon
168cc9c1bf Merge pull request #3932 from mempool/mononaut/stale-blocks
Stale blocks
2023-07-11 14:47:44 +09:00
Mononaut
11d1a68f78 Upgrade bitcoin core to v25.0 2023-07-11 14:36:42 +09:00
Mononaut
a01336d8ac Fix mined rbf conflict prevention 2023-07-11 11:44:30 +09:00
Mononaut
01bd9dd957 Add spacer for missing fiat values 2023-07-11 11:18:17 +09:00
Mononaut
94c0222efe reset blocks$ and transactions$ observables when network changes 2023-07-11 10:50:25 +09:00
softsimon
ad9d9c839b Merge pull request #3869 from mempool/mononaut/vb-wu-preference
Weight unit preference
2023-07-11 10:10:43 +09:00
Mononaut
5b9d43032c Switch remaining vb fields according to unit preference 2023-07-11 10:04:38 +09:00
Mononaut
bde8fbac98 Implement only-vsize and only-weight directives 2023-07-11 10:00:41 +09:00
Mononaut
013ad803d0 Switch all direct sat/vb fields to new rate component 2023-07-11 10:00:41 +09:00
Mononaut
c29558db20 Add fee rate display component 2023-07-11 10:00:40 +09:00
Mononaut
a45f1fde1c Add fee rate unit preference & dropdown 2023-07-11 10:00:40 +09:00
Mononaut
e81839e7ed Return null for avg of zero matching health scores 2023-07-11 09:54:03 +09:00
Mononaut
6942a6fd6a Show alert banner on stale blocks 2023-07-10 16:14:02 +09:00
Mononaut
385cb087d3 Replace cached blocks on reorg, serve stale blocks 2023-07-10 16:14:02 +09:00
Mononaut
f1966768a7 exclude fullrbf txs from audit and label in visualization 2023-07-10 15:34:22 +09:00
Mononaut
4ba552fe1b Add basic lightning justice page 2023-07-09 03:03:35 -04:00
junderw
ec918d57b2 Fix backend docker build for armv7 2023-07-08 23:03:03 -07:00
wiz
408c86963b Merge pull request #3889 from mempool/mononaut/rust-gbt
Rust GBT
2023-07-09 13:27:52 +09:00
Jonathan Underwood
f8e910f0a4 Merge branch 'master' into mononaut/rust-gbt 2023-07-07 18:56:04 -07:00
softsimon
8ad4b952ea Merge pull request #3915 from mempool/mononaut/block-time-precision
More precise relative block times
2023-07-07 19:06:08 +02:00
junderw
925ebf08d4 Fix Docker build for using napi-rs 2023-07-06 18:05:21 -07:00
junderw
eae5f6078e Move N-API CLI to dependencies 2023-07-06 15:49:54 -07:00
Jonathan Underwood
464587cec5 Add period 2023-07-05 19:35:36 -07:00
junderw
0c7c1dd0a8 Merge remote-tracking branch 'origin/master' into mononaut/rust-gbt 2023-07-05 08:39:48 -07:00
junderw
22d357c53c Faster txid to u32 parsing 2023-07-05 08:39:01 -07:00
Mononaut
cc695dc910 match higher precision time in block page details 2023-07-05 11:02:32 -04:00
Mononaut
850752e0ea More precise relative block times 2023-07-05 10:50:46 -04:00
softsimon
7b01f54fc6 Merge pull request #3919 from joostjager/pool-fees
Add average fee delta to pool ranking
2023-07-05 15:30:15 +02:00
wiz
9f2e94d9cd Merge pull request #3863 from mempool/mononaut/scrollable-mempool
scroll to see all mempool blocks
2023-07-05 09:57:17 +09:00
wiz
824c6f97e4 ops: Use mempool/electrs for liquid instances 2023-07-05 09:33:33 +09:00
junderw
390c4a7706 Use ManuallyDrop 2023-07-04 17:19:41 -07:00
wiz
efcb58a4a6 Merge branch 'master' into mononaut/scrollable-mempool 2023-07-05 07:58:50 +09:00
Joost Jager
9e5d10b15f Add average fee delta to pool ranking
Co-authored-by: mononaut <83316221+mononaut@users.noreply.github.com>
2023-07-04 11:25:39 +00:00
Mononaut
8fdc44aa89 replace audit_pool hashmap with a vec 2023-07-03 22:16:35 -04:00
Mononaut
cfa2363743 only return rates changed since last update 2023-07-03 12:01:49 -04:00
Mononaut
897c667f17 return sigop-adjusted effective fee rates 2023-07-03 12:01:06 -04:00
Mononaut
078bc1d914 fix and consolidate tx ordering logic 2023-07-03 11:57:12 -04:00
mononaut
d16d961cb2 Apply suggestions from code review
avoid regex in partial txid ordering conversion

Co-authored-by: Jonathan Underwood <jonathan.underwood4649@gmail.com>
2023-07-03 11:55:43 -04:00
Mononaut
23d487b904 Mimic Core's ordering for equal-score transactions 2023-07-03 10:28:32 -04:00
Mononaut
af6de9b72c more misc JS-side gbt performance optimizations 2023-07-03 10:20:08 -04:00
junderw
0ddfa94b59 Ran cargo fmt 2023-07-03 10:18:22 -04:00
Mononaut
ccbed8ec58 Use min(feerate,ancestor_feerate) for ancestor score 2023-07-03 10:18:22 -04:00
junderw
552818607a Better initial capacity 2023-07-03 10:18:22 -04:00
Mononaut
db8c34ae61 misc JS-side gbt performance optimizations 2023-07-03 10:18:22 -04:00
Mononaut
0886e850f9 Pass gbt mempool data directly without serialization 2023-07-03 10:18:21 -04:00
Mononaut
5065fa42d0 calculate total block weights inside rust gbt 2023-07-03 10:18:21 -04:00
junderw
2838b068f7 Increased performance 2023-07-03 10:18:21 -04:00
junderw
77c83a6a13 Fix CI for Rust test 2023-07-03 10:18:21 -04:00
Mononaut
79a10ee833 vsize -> sigop_adjusted_vsize 2023-07-03 10:18:21 -04:00
Mononaut
10beb76585 conform to core's gbt quirks 2023-07-03 10:18:21 -04:00
Mononaut
71f150b587 Add end-to-end test of rust gbt against results from Core 2023-07-03 10:18:21 -04:00
junderw
6650541b2d Added Logging and refactored a bit 2023-07-03 10:18:20 -04:00
Mononaut
702c4c123e remove unnecessary option wrapper from gbt return value 2023-07-03 10:18:20 -04:00
Mononaut
1688b7d24e fix handling of used txs at top of mempool stack 2023-07-03 10:18:20 -04:00
Mononaut
75fd4ff5e1 swap mempool_array deque for a mempool_stack vec 2023-07-03 10:18:20 -04:00
Mononaut
ac8f88da38 protect ancestor data from outside assignments 2023-07-03 10:18:20 -04:00
Mononaut
7873f1c26a fix rust gbt cpfp cluster construction 2023-07-03 10:18:20 -04:00
Mononaut
a66c0c88ce fix rust gbt priority queue handling 2023-07-03 10:18:20 -04:00
Mononaut
f5e0662517 Fix mempool-blocks linter issues 2023-07-03 10:18:19 -04:00
junderw
4334b9eac1 Fix README and publish = false in Cargo.toml 2023-07-03 10:18:19 -04:00
junderw
6e7a525d12 Fix: napi macro breaks rust-analyzer
See issue: https://github.com/napi-rs/napi-rs/issues/944#issuecomment-1013002760
This will ignore expanding the napi macro for rust-analyzer, letting auto-complete
work inside the napi impl blocks.
2023-07-03 10:18:19 -04:00
Mononaut
4a15cd7abe clearer uint32 uid overflow check 2023-07-03 10:18:19 -04:00
junderw
5d48ae1eec Use U32HasherState for HashSet 2023-07-03 10:18:19 -04:00
Mononaut
a71f931d9f Add sanity checks for GBT cache de-sync 2023-07-03 10:18:19 -04:00
Mononaut
6829e67e15 Add sanity check for uint32 uid overflow 2023-07-03 10:18:19 -04:00
Mononaut
fc504012d5 Enforce Rust GBT instance lifecycle 2023-07-03 10:18:18 -04:00
Mononaut
8f675c7062 Add Rust GBT config flag 2023-07-03 10:18:18 -04:00
junderw
1ed0f86ed9 calc_new_score can be private 2023-07-03 10:18:18 -04:00
junderw
2dfef5a003 Fix Rust version of compiler to prevent breakage 2023-07-03 10:18:18 -04:00
junderw
609df31e0c Fix clippy pedantic and nursery lints as well 2023-07-03 10:18:18 -04:00
junderw
e61ae59e53 Add safety comments 2023-07-03 10:18:18 -04:00
junderw
e3f4c33f03 Protect score from outside assignment and document the requirements 2023-07-03 10:18:17 -04:00
junderw
af4919a98b Use u32hasher with PriorityQueue as well 2023-07-03 10:18:17 -04:00
junderw
939d2230d2 Use bytemuck instead of unsafe Rust 2023-07-03 10:18:17 -04:00
junderw
59b19eefe3 Move u32hasher into its own module, don't expose the Hasher for use. 2023-07-03 10:18:17 -04:00
junderw
3c652bdcbc Optimize audit_pool key hashing as well. Use a const for starting capacity for tx related lists. 2023-07-03 10:18:17 -04:00
junderw
ded2352cf8 Use a class to hold state for Rust GbtGenerator 2023-07-03 10:18:17 -04:00
junderw
8cfda1a546 Use tokio async/await instead of callbacks 2023-07-03 10:18:17 -04:00
junderw
5f161e73c7 Check callback status in thread 2023-07-03 10:18:16 -04:00
junderw
dfe24ed9d7 Remove all unwrap() calls 2023-07-03 10:18:16 -04:00
junderw
4661bea2f0 Use N-API ThreadsafeFunction 2023-07-03 10:18:16 -04:00
junderw
152d2c364b Update TS side 2023-07-03 10:18:16 -04:00
junderw
2d4963c2df Feature: Use napi-rs instead of neon 2023-07-03 10:18:16 -04:00
junderw
0f1f151d65 Remove empty function 2023-07-03 10:18:16 -04:00
junderw
d2a47b92c8 Move conversion logic to struct file 2023-07-03 10:18:16 -04:00
junderw
920232be4c Use Cargo workspace
Cargo workspace in the root will help make the IDE experience universal.
Cargo.lock and the target directory for build artifacts will be in the root
of the mempool repository (with ./target ignored by git).
2023-07-03 10:18:15 -04:00
junderw
83bf9229e7 Fix: Some of the clippy pedantic lints 2023-07-03 10:18:15 -04:00
mononaut
178bb960e9 Remove redundant gbt function
Co-authored-by: Jonathan Underwood <jonathan.underwood4649@gmail.com>
2023-07-03 10:18:15 -04:00
Mononaut
891acf30bf Fix vscode rust-analyzer config 2023-07-03 10:18:15 -04:00
Mononaut
0e00881826 Refactor rust code for style 2023-07-03 10:18:15 -04:00
Mononaut
1d51b01bd1 Implement rust gbt updateBlockTemplates 2023-07-03 10:18:15 -04:00
Mononaut
52bb8b4a4d Rust GBT proof of concept 2023-07-03 10:18:14 -04:00
softsimon
3d33233e51 Merge pull request #3891 from 0xFlicker/master
sign contributor agreement
2023-07-03 10:11:51 +02:00
softsimon
4cd7561af8 Merge pull request #3906 from mempool/simon/mempool-break-limit
Lowering mempool loop break limit
2023-07-03 10:08:17 +02:00
softsimon
52c813bcc7 Merge branch 'master' into simon/mempool-break-limit 2023-07-03 09:59:21 +02:00
softsimon
c20c7ae11f Merge pull request #3917 from mempool/mononaut/effective-rate-templates
Save effective rates to templates & summaries
2023-07-02 18:40:40 +02:00
softsimon
5f582195ad Merge branch 'master' into mononaut/effective-rate-templates 2023-07-02 18:11:28 +02:00
softsimon
a567a28c5c Update backend/src/api/mempool.ts
Co-authored-by: mononaut <83316221+mononaut@users.noreply.github.com>
2023-07-02 18:10:14 +02:00
softsimon
59713e2132 Update backend/src/api/mempool.ts
Co-authored-by: mononaut <83316221+mononaut@users.noreply.github.com>
2023-07-02 18:10:08 +02:00
softsimon
1e83a99bae Update backend/src/api/mempool.ts
Co-authored-by: mononaut <83316221+mononaut@users.noreply.github.com>
2023-07-02 18:10:02 +02:00
softsimon
b5fdb6d64f Merge pull request #3843 from mempool/mononaut/projected-fee-graph
Better projected fee graph
2023-07-02 17:44:12 +02:00
softsimon
1411b701f9 Merge pull request #3887 from mempool/mononaut/atomic-cpfp
Atomic CPFP database operations
2023-07-02 17:29:42 +02:00
softsimon
f80257c5ea Merge branch 'master' into mononaut/atomic-cpfp 2023-07-01 22:04:14 +02:00
softsimon
23cd063402 Merge pull request #3883 from mempool/hunicus/ronindojo-link
Update ronindojo link on about page
2023-07-01 22:03:43 +02:00
softsimon
2bda12e5f9 Merge pull request #3881 from mempool/mononaut/separate-audit-api
Separate summary and audit-summary API endpoints
2023-07-01 19:43:20 +02:00
softsimon
2e285c8d86 Merge pull request #3878 from mempool/mononaut/full-mempool-graph
Add 'all time' option for mempool graph
2023-07-01 17:23:03 +02:00
softsimon
70854de6ec Merge pull request #3855 from mempool/mononaut/websocket-responses
Fix inconsistent websocket responses
2023-07-01 17:19:22 +02:00
softsimon
f5cf5c7cc3 Merge pull request #3916 from mempool/mononaut/fix-transaction-eta
Fix transaction component ETA
2023-07-01 16:21:18 +02:00
nymkappa
d939391e62 ALL -> all (no caps needed, match the url parameters as well) 2023-06-30 19:52:06 -04:00
nymkappa
85e33e689d break graph toggles into another line earlier 2023-06-30 19:52:05 -04:00
Mononaut
ca1a1228a9 break graph toggles into two rows on small screens 2023-06-30 19:52:05 -04:00
Mononaut
d4ed238ae1 Add statistics/all to cache warmer, change query keyword 2023-06-30 19:52:05 -04:00
Mononaut
22baf4186e Add 'all time' option for mempool graph 2023-06-30 19:52:05 -04:00
Mononaut
58b8052530 don't reset blockchain position on every mempool update 2023-06-30 19:45:53 -04:00
Mononaut
9d606d0006 scroll selected mempool block into view 2023-06-30 19:45:53 -04:00
Mononaut
d848ab4bef scroll to see all mempool blocks 2023-06-30 19:45:53 -04:00
Mononaut
eaad63a082 frontend resync recent blocks when necessary 2023-06-30 19:43:04 -04:00
Mononaut
ca6ddd609d clean up backend websocket logic 2023-06-30 19:43:04 -04:00
Mononaut
eca40f94c9 use power-of-ten formatting for large fee rates 2023-06-30 19:41:12 -04:00
Mononaut
86f51e3902 fix fee graph for underfilled blocks 2023-06-30 19:27:31 -04:00
Mononaut
9f2b98b246 Handle stack-of-N-blocks in new fee graph 2023-06-30 19:27:31 -04:00
Mononaut
e4f3642082 Redesign mempool block fee distribution graph 2023-06-30 19:27:30 -04:00
Mononaut
48d62a1396 Save effective fee rates to block templates & summaries 2023-06-29 19:24:19 -04:00
Mononaut
d9ed02a033 Fix transaction component ETA 2023-06-29 11:39:13 -04:00
bennyhodl
f87bf4df77 bennyhodl contributor agreement 2023-06-28 18:40:57 -04:00
softsimon
31ae382f75 Lowering mempool loop break limit 2023-06-27 18:46:28 +02:00
softsimon
5f787db30d Merge pull request #3762 from knorrium/update_node_matrix
Update node CI test matrix
2023-06-26 18:08:53 +02:00
softsimon
da3c39c7d0 Merge pull request #3861 from mempool/hunicus/add-luminex-sponsor
Add luminex as enterprise sponsor
2023-06-26 12:31:03 +02:00
0xflick
dbf759fc76 sign contributor agreement 2023-06-24 16:30:29 -05:00
wiz
e88cf70719 Merge pull request #3886 from mempool/mononaut/hotfix-undefined-cpfp-cluster
Hotfix for undefined cpfp cluster bug
2023-06-23 19:08:27 +09:00
Mononaut
1f442b9ea6 Make cpfp db save operations atomic 2023-06-22 12:08:47 -04:00
Mononaut
9ff5ce0d37 Change order of cpfp db operations 2023-06-21 19:57:18 -04:00
Mononaut
329c635da5 Fix getCpfpInfo error handling 2023-06-21 19:57:09 -04:00
Mononaut
e18f3800be Hotfix for undefined cpfp cluster bug 2023-06-21 19:07:00 -04:00
hunicus
9bfc2c9413 Update ronindojo link on about page 2023-06-20 22:36:29 -04:00
Mononaut
4fbab08586 Separate summary and audit-summary API endpoints 2023-06-20 15:13:52 -04:00
Peter Foytik
32490bfdb7 Merge branch 'mempool:master' into master 2023-06-20 13:09:08 -04:00
softsimon
e5efc2957a Merge pull request #3871 from mempool/mononaut/rbf-removed-not-mined
Change RBF status badges
2023-06-17 15:05:12 +02:00
softsimon
20d7e56de2 Update i18n 2023-06-17 15:04:46 +02:00
softsimon
0586e04d67 Merge pull request #3873 from mempool/simon/sanitize-pubkey-search
Sanitize node pubkey search
2023-06-17 11:22:28 +02:00
wiz
39bde61538 ops: Set cache time for /api/v1 endpoints to 2 sec 2023-06-16 17:15:41 -07:00
softsimon
0fb92e6ebb Merge pull request #3872 from mempool/simon/mempool-loop-timeout
Reinstate the mempool loop time limit
2023-06-17 00:56:00 +02:00
softsimon
c8d3653ef3 Updating tests 2023-06-17 00:32:58 +02:00
softsimon
a5575c0876 Sanitize node pubkey search 2023-06-16 23:42:57 +02:00
Mononaut
1872e5d12f change "removed" and "replaced" badges to yellow 2023-06-16 17:35:07 -04:00
softsimon
176f5e1377 Reinstate the mempool loop time limit 2023-06-16 20:42:31 +02:00
Mononaut
c0e235c01a Mark RBF transactions as removed if earlier version is mined 2023-06-16 13:47:09 -04:00
softsimon
2faeb1071e Merge pull request #3864 from mempool/mononaut/blocks-api-calls
remove redundant audit score api calls from blocks list
2023-06-16 19:37:48 +02:00
Peter
e8aea38320 contributer license 2023-06-15 21:46:09 -04:00
Peter
5f3fd85834 the last two docker-compose overrides need MEMPOOL_ in front 2023-06-15 21:39:51 -04:00
Mononaut
618ba56c42 remove redundant audit score api calls from blocks list 2023-06-15 12:57:20 -04:00
softsimon
d955dbff55 Merge pull request #3842 from mempool/mononaut/consistent-fee-ranges
Fix fee range inconsistencies
2023-06-15 16:49:07 +02:00
Mononaut
ee39283241 precompute block fee spans 2023-06-15 09:54:08 -04:00
wiz
c42670259e ops: Fix crash while building electrs in install script 2023-06-15 02:37:30 -07:00
secondl1ght
fe6da62dab create secondl1ght.txt contributor file 2023-06-14 21:55:17 -06:00
secondl1ght
22a491717a update frontend local instructions 2023-06-14 21:48:36 -06:00
hunicus
816f410855 Make luminex css class unique 2023-06-14 22:17:05 -04:00
Mononaut
bb61ff97fa continue to skip first rate in simple fee ranges 2023-06-14 19:04:09 -04:00
hunicus
73d629d319 Add luminex as enterprise sponsor on about page 2023-06-14 18:58:49 -04:00
softsimon
c630d705df Merge pull request #3839 from mempool/mononaut/clean-up-legacy-cpfp
Clean up legacy CPFP calculations
2023-06-14 23:14:07 +02:00
softsimon
c5bf167e36 Merge pull request #3846 from mempool/mononaut/audit-details
Add expected vs actual audit details comparison table
2023-06-14 23:05:07 +02:00
softsimon
e8420853e2 Merge pull request #3838 from mempool/mononaut/dependent-rate-indexing
calculate & index ancestor-dependent effective rates
2023-06-14 23:02:25 +02:00
softsimon
a8a5b733e5 Merge pull request #3860 from mempool/simon/disable-liquid-tests
Disabling Liquid tests for now
2023-06-14 22:28:08 +02:00
Mononaut
30f8d5cf96 add missing markForChecks in blocks list 2023-06-14 16:23:57 -04:00
softsimon
f09939a201 Disabling Liquid tests for now 2023-06-14 22:20:18 +02:00
softsimon
a99515e94a Merge pull request #3834 from mempool/mononaut/fix-double-mined-rbf
Fix multiple mined RBF replacements of the same tx
2023-06-14 22:15:53 +02:00
Mononaut
c4f7b99978 add backfilled audit stats to cached blocks 2023-06-14 16:15:33 -04:00
softsimon
dcf73ec3f3 Merge pull request #3831 from mempool/mononaut/clock
Interactive clock
2023-06-14 22:14:26 +02:00
softsimon
bbe0579cdd Changing default clock to mempool 2023-06-14 21:53:51 +02:00
softsimon
4390ffe3b6 Merge pull request #3820 from mempool/nymkappa/reindexing-pools-update
Mining pool update / re-indexer improvment
2023-06-14 19:16:08 +02:00
Mononaut
6b93e61b56 minor audit details fixes 2023-06-14 11:28:39 -04:00
softsimon
c79d031c86 Merge pull request #3668 from mempool/simon/ignore-existing-mining-pools
Skip existing mining pool logos when syncing assets
2023-06-13 11:15:02 +02:00
Felipe Knorr Kuhn
048399574e Merge branch 'master' into update_node_matrix 2023-06-12 22:42:21 -07:00
Mononaut
ae5a0312be change audit detail labels 2023-06-12 12:40:19 -04:00
softsimon
0b2ffb3e91 Merge branch 'master' into nymkappa/reindexing-pools-update 2023-06-12 15:53:46 +02:00
softsimon
d009edbbf3 Merge pull request #3822 from mempool/nymkappa/fix-possible-crash
fix possible backend crash
2023-06-12 15:52:47 +02:00
softsimon
1fbdf97639 Merge branch 'master' into nymkappa/fix-possible-crash 2023-06-12 15:45:02 +02:00
softsimon
a36303e1fb Merge branch 'master' into simon/ignore-existing-mining-pools 2023-06-12 14:29:40 +02:00
softsimon
27a3a1575d Merge pull request #3849 from mempool/mononaut/icon-reset-scroll
Reset blockchain scroll on logo click
2023-06-11 18:43:44 +02:00
Mononaut
93d24d1cf7 Add expected fee % diff to blocks list page 2023-06-10 12:35:15 -04:00
Mononaut
bfb842d7ea Add % difference to weight and tx count in audit details 2023-06-10 12:35:13 -04:00
Mononaut
5b62966863 Add indexer task to backfill audit fee/weight stats 2023-06-10 12:34:16 -04:00
Mononaut
3013386ca5 Add expected weight to audit table 2023-06-10 12:32:53 -04:00
Mononaut
aedaf53137 Merge branch 'master' into merged-expected-block-fees 2023-06-10 12:15:29 -04:00
Joost Jager
7157efcf79 Add CLA for joostjager
Signed-off-by: Joost Jager <joost.jager@gmail.com>
2023-06-10 11:10:05 +02:00
Mononaut
57ac1486a0 Reset blockchain scroll on logo click 2023-06-09 19:03:47 -04:00
softsimon
9a99ee6486 Merge pull request #3772 from mempool/nymkappa/dont-rethrow-block-summaries
[audit] warn if we cannot save templates and remove exception re-throw
2023-06-07 18:31:59 +02:00
nymkappa
fd30bff9c6 don't throw when BlocksAuditRepositories.$saveAudit fails 2023-06-07 18:04:21 +02:00
Mononaut
3c022ad755 Fix fee range inconsistencies 2023-06-07 11:59:31 -04:00
Mononaut
ca9b48283d calculate & index ancestor-dependent effective rates 2023-06-06 18:23:06 -04:00
Mononaut
c8fc416c88 Remove legacy mined block cpfp loop, reset stale ancestors 2023-06-06 14:19:30 -04:00
softsimon
804640216f Merge pull request #3830 from mempool/fix/p2tr-annex-parse
Fix: Annex parsing for p2tr on bitcoind/romanz backends
2023-06-06 20:27:03 +04:00
softsimon
2191bf2a22 Merge pull request #3826 from mempool/mononaut/fix-firstseen
fix firstSeen reset migration bug
2023-06-06 20:18:35 +04:00
softsimon
7d92cdf015 Merge pull request #3811 from mempool/mononaut/fix-median-fee-bug
Fix missing fees in $updateBlocks without esplora
2023-06-06 19:35:45 +04:00
Mononaut
386037d1db Fix missing fees in $updateBlocks without esplora 2023-06-06 17:34:22 +02:00
softsimon
f2e216b9ac Merge pull request #3804 from mempool/mononaut/split-summaries-table
Break block templates into their own db table
2023-06-06 19:30:28 +04:00
Mononaut
35d80eec1c Fix multiple mined RBF replacements of the same tx 2023-06-06 11:01:01 -04:00
softsimon
eb8c38e4e8 Merge pull request #3832 from mempool/mononaut/fix-graph-filter-colors
fix graph filter dropdown colors
2023-06-06 17:53:56 +04:00
Joost Jager
74b2014dff Show expected fees in blocks list 2023-06-06 08:52:29 +02:00
Mononaut
689319437a fix graph filter dropdown colors 2023-06-05 14:23:37 -04:00
Mononaut
9883e59f12 fix graph page links layout 2023-06-05 14:09:02 -04:00
Mononaut
30396c5dca Add link to clock from graph page 2023-06-05 13:28:00 -04:00
Mononaut
ec0d5e0c23 Polish clocks, fix urls, make interactive 2023-06-05 13:27:17 -04:00
junderw
9e1de656c1 Fix: Annex parsing for p2tr on bitcoind/romanz backends 2023-06-05 07:21:55 -07:00
Joost Jager
3c0bb11208 Add expected total fees audit 2023-06-05 14:19:16 +02:00
Mononaut
37dd95a4a0 fix firstSeen reset bug 2023-06-04 12:47:04 -04:00
softsimon
6076eeed46 Merge pull request #3825 from mempool/mononaut/fix-negative-confs
Fix negative confirmations
2023-06-04 20:23:35 +04:00
Mononaut
8b1dff6d15 fix txConfirmed type 2023-06-04 11:47:46 -04:00
softsimon
9d4b58604b Optimize change detection 2023-06-04 10:32:24 +04:00
Mononaut
c49626aefc Confirmation badge component, fix negative confirmations 2023-06-03 16:20:32 -04:00
softsimon
baeb200e8b Merge pull request #3815 from mempool/hunicus/unmute-promo
Unmute about promo video on click
2023-06-03 15:35:31 +04:00
softsimon
11669849ab Merge pull request #3818 from mempool/mononaut/calculate-sigops
Count sigops and use adjusted vsizes
2023-06-01 09:49:54 +07:00
Mononaut
b171ed6dd0 Break block templates into their own db table 2023-05-31 13:43:48 -04:00
nymkappa
0b74cf1d89 fix possible backend crash x2, remove dead code, improve log 2023-05-31 09:58:29 -07:00
nymkappa
c558c85f36 fix possible backend crash 2023-05-31 09:48:44 -07:00
Mononaut
0ac76e12c4 Fix frontend logic for displaying effective fee rate 2023-05-31 12:11:56 -04:00
Mononaut
ee1ec414ed use fractional base vsize in adjusted vsize 2023-05-31 11:45:47 -04:00
Mononaut
0e5dc21854 Fix mined rbf / calculate sigop merge conflicts 2023-05-31 11:37:13 -04:00
Mononaut
bf7df08305 Enforce block sigop limits in GBT algorithm 2023-05-31 11:29:58 -04:00
Mononaut
ec63c822db Display sigops & adjusted vsize in transaction page details 2023-05-31 11:29:58 -04:00
Mononaut
09e4e44e88 Count sigops & use adjusted vsizes in mempool projections 2023-05-31 11:29:56 -04:00
Mononaut
c5e6821ad4 show fee rating badge either on rate or effective rate 2023-05-31 11:27:04 -04:00
Mononaut
70fa78b987 Fix effective fee rates for non-cpfp dependents 2023-05-31 11:27:04 -04:00
softsimon
8c78e35063 Use native angular way of accessing HTML elements 2023-05-31 14:39:19 +07:00
nymkappa
ea51ab8d0b [indexer] show github sha when successufly updated pools json 2023-05-30 10:42:41 -07:00
nymkappa
62169cee3f [indexer] oldest known mining pool block per network 2023-05-30 10:25:41 -07:00
nymkappa
e7e7b30807 fix log 2023-05-30 10:16:56 -07:00
nymkappa
107bdbc209 [indexer] show indexer progress in /status component 2023-05-30 10:13:07 -07:00
nymkappa
0b4615cbf0 [indexer] reindex diff adjustments and hashrates upon mining pool update 2023-05-30 10:05:10 -07:00
wiz
126a75ed45 ops: Use mempool/electrs fork of blockstream/electrs 2023-05-30 12:53:38 -03:00
softsimon
0703690190 Merge pull request #3785 from mempool/mononaut/mined-block-rbf
Detect RBF by mined transactions
2023-05-30 14:50:07 +07:00
wiz
7f56f0295b Merge pull request #3817 from mempool/ops/add-backend-reindex-macros
ops: Add npm run scripts for backend reindexing
2023-05-29 14:21:01 -03:00
wiz
27154da191 ops: Add npm run scripts for backend reindexing 2023-05-29 14:09:28 -03:00
hunicus
0fb61fded3 Add touch event for touchscreens 2023-05-27 13:20:55 -04:00
hunicus
ce00814bbf Unmute about promo video on click 2023-05-28 01:40:30 +09:00
wiz
c758a3538b Merge pull request #3806 from mempool/dependabot/npm_and_yarn/backend/bitcoinjs-lib-6.1.1 2023-05-27 12:16:29 -03:00
Felipe Knorr Kuhn
60fd2df4e6 Merge branch 'master' into dependabot/npm_and_yarn/backend/bitcoinjs-lib-6.1.1 2023-05-27 08:13:46 -07:00
wiz
05e38b43b9 Merge pull request #3809 from mempool/hunicus/tm-r
Switch tm to r for registered trademarks
2023-05-26 23:50:35 -03:00
hunicus
9fa1eebc02 Switch tm to r for registered trademarks 2023-05-27 06:46:18 +09:00
wiz
79ea1ee257 Merge pull request #3797 from mempool/hunicus/add-bullbit
Add bull bitcoin as enterprise sponsor
2023-05-26 11:34:40 -03:00
wiz
add405481e Merge pull request #3776 from knorrium/docker_vars_test
Docker vars tests
2023-05-26 11:33:48 -03:00
Felipe Knorr Kuhn
c382e03e4a Merge branch 'master' into docker_vars_test 2023-05-26 07:16:29 -07:00
dependabot[bot]
ac73de31b5 Bump bitcoinjs-lib from 6.1.0 to 6.1.1 in /backend
Bumps [bitcoinjs-lib](https://github.com/bitcoinjs/bitcoinjs-lib) from 6.1.0 to 6.1.1.
- [Changelog](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/CHANGELOG.md)
- [Commits](https://github.com/bitcoinjs/bitcoinjs-lib/compare/v6.1.0...v6.1.1)

---
updated-dependencies:
- dependency-name: bitcoinjs-lib
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-05-26 02:56:38 +00:00
wiz
ccd403b771 Merge branch 'master' into hunicus/add-bullbit 2023-05-25 19:18:00 -03:00
wiz
f28c460bb4 Merge pull request #3796 from mempool/hunicus/fka-embassyos
Change embassyos to startos
2023-05-25 19:17:53 -03:00
hunicus
5711c939d7 Make svg pretty 2023-05-25 15:58:48 -04:00
wiz
ef3ce6d187 Merge pull request #3799 from Emzy/ops/nginx-json-match 2023-05-25 14:58:45 -03:00
wiz
410e604dce Merge pull request #3801 from mempool/nymkappa/reindexing
Always re-indexing blocks when updating mining pools
2023-05-25 13:10:10 -03:00
nymkappa
8ffe2f3156 Always re-indexing blocks when updating mining pools 2023-05-25 09:08:51 -07:00
wiz
d27febf878 Merge pull request #3800 from mempool/simon/dont-wipe-mempool-cache
Don't wipe mempool cache in pools updater
2023-05-25 12:37:51 -03:00
softsimon
038f9659bb Save new disk cache after ignoring blocks 2023-05-25 19:19:14 +04:00
softsimon
aeb896e200 Don't wipe mempool cache in pools updater 2023-05-25 19:07:51 +04:00
softsimon
349ba613dd Revert "Don't wipe mempool cache in pools updater"
This reverts commit 714208a366.
2023-05-25 19:06:45 +04:00
softsimon
714208a366 Don't wipe mempool cache in pools updater 2023-05-25 19:05:29 +04:00
Stephan Oeste
ad4c7d7c0b Fix exact match of .js files. Was also matching .json before. 2023-05-25 16:50:56 +02:00
hunicus
264c02eca1 Add bull bitcoin as enterprise sponsor 2023-05-25 01:29:55 -04:00
hunicus
ea42077320 Change embassyos to startos 2023-05-25 00:31:44 -04:00
softsimon
92ffb441a1 Merge pull request #3791 from mempool/simon/pool-detection-addresses-fix
Fix for pool output address matching
2023-05-24 19:49:41 +04:00
wiz
725526d733 Merge pull request #3637 from mempool/ops/installer-fix-cln-folder-creation
ops: Fix installer creation of CLN folders
2023-05-23 18:34:28 -03:00
softsimon
31ea2e1d4b Fix for pool output address matching
fixes #3782
2023-05-24 01:22:01 +04:00
wiz
c81509d762 ops: Start mysql-server using onestart from install 2023-05-23 17:57:27 -03:00
wiz
07f16165ff ops: Disable buggy CLN crontab job in install 2023-05-23 17:23:51 -03:00
softsimon
536f29eb0d Merge pull request #3783 from mempool/mononaut/high-sigop-audit
Omit possible high-sigop txs from block health score
2023-05-21 19:32:26 -04:00
softsimon
e88e56421b Enable fullrbf 2023-05-20 18:00:22 -04:00
Mononaut
369db7a63c Detect RBF by mined transactions 2023-05-18 09:51:41 -04:00
Mononaut
81ec54fcb3 Omit possible high-sigop txs from block health score 2023-05-17 11:46:50 -04:00
softsimon
64d6bda728 Merge pull request #3780 from mempool/mononaut/fix-graph-filter
Fix mempool graph fee filtering
2023-05-16 23:37:12 -04:00
Mononaut
7ab05d815d Fix mempool graph fee filtering 2023-05-16 16:25:38 -06:00
Felipe Knorr Kuhn
50a865e54e Merge branch 'master' into docker_vars_test 2023-05-15 13:09:46 -07:00
softsimon
2156fb2a83 Merge pull request #3406 from mempool/nymkappa/block-health-history-i18n
Rename block prediction to block health
2023-05-14 16:39:43 -05:00
Felipe Knorr Kuhn
4e1087801a Merge branch 'master' into docker_vars_test 2023-05-14 14:34:47 -07:00
nymkappa
4dec152df0 Blocks Health -> Block Health 2023-05-14 16:32:11 -05:00
nymkappa
720c2b8807 Block prediction -> Block health - Fix wrong chart download name 2023-05-14 16:32:11 -05:00
Felipe Knorr Kuhn
b00e0b6a73 Fix Docker config file to use the correct types 2023-05-14 14:26:38 -07:00
Felipe Knorr Kuhn
6333f3aa47 Update the backend config fixture 2023-05-14 14:25:23 -07:00
softsimon
7b127ebe8b Merge pull request #3653 from mempool/mononaut/fix-mempool-block-skeleton
Fix mempool block skeleton loaders
2023-05-14 16:20:24 -05:00
softsimon
585309f9f0 Merge branch 'master' into mononaut/fix-mempool-block-skeleton 2023-05-14 16:18:37 -05:00
Felipe Knorr Kuhn
184ca3c662 Add a test to check that the docker json is in sync with the template 2023-05-14 14:17:53 -07:00
softsimon
bfea535df1 Merge pull request #3716 from mempool/mononaut/skeleton-fee
display fee box skeleton while mempool not in sync
2023-05-14 16:16:02 -05:00
softsimon
65189d6f3e Merge branch 'master' into mononaut/skeleton-fee 2023-05-14 16:10:39 -05:00
Felipe Knorr Kuhn
de434bb398 Add missing Lightning config 2023-05-14 13:26:35 -07:00
Felipe Knorr Kuhn
804485a526 Add missing DATABASE_TIMEOUT config 2023-05-14 13:20:18 -07:00
Felipe Knorr Kuhn
3f8fefcb0e Add a test to check if all the vars are on the Docker script 2023-05-14 13:14:44 -07:00
Felipe Knorr Kuhn
178da3df26 Use the same delimiter for all substitutions 2023-05-14 13:13:37 -07:00
softsimon
aa9fd845ef Path fix 2023-05-14 14:21:41 -05:00
softsimon
e41ce16bbb Merge branch 'master' into simon/ignore-existing-mining-pools 2023-05-14 13:19:22 -05:00
softsimon
f12403747d Adding video to sha1 check and chaining requests in promises. 2023-05-14 12:45:42 -05:00
Felipe Knorr Kuhn
68e9fb0882 Merge branch 'master' into update_node_matrix 2023-05-13 14:27:45 -07:00
softsimon
da3c3e8f5c Re-use variable fix 2023-05-13 13:24:44 -05:00
softsimon
a447887901 Adding missing slash 2023-05-13 13:23:27 -05:00
softsimon
02b5fadf44 Merge pull request #3774 from mempool/mononaut/fix-clock-fee
Remove clock fee debugging adjustment
2023-05-13 12:16:32 -05:00
Mononaut
ea52d40a10 Remove clock fee debugging adjustment 2023-05-13 09:33:15 -06:00
softsimon
cbd4ea1aa8 Merge pull request #3771 from mempool/mononaut/clock-mempool-subscription
Add missing clock websocket subscriptions
2023-05-13 08:35:05 -05:00
nymkappa
f95c16a78e [audit] warn if we cannot save templates and remove exception re-throw 2023-05-13 15:28:29 +02:00
softsimon
fa1cf7abb9 Merge pull request #3768 from mempool/hunicus/tm-symbol
Add trademark symbol to trademarks in footer
2023-05-13 08:04:55 -05:00
Mononaut
6a5afd7f95 Add missing clock websocket subscriptions 2023-05-12 21:08:12 -06:00
softsimon
d3bd434255 Use sha hashes to compare before downloading 2023-05-12 16:01:45 -05:00
hunicus
0ddd9b2487 Turn generic project text into trademark 2023-05-12 15:54:35 -04:00
hunicus
ee1e75f978 Add trademark symbol to trademarks in footer 2023-05-12 12:55:51 -04:00
wiz
6e83bee23f ops: Disable mempool loop for lightning backends 2023-05-12 11:38:28 -05:00
softsimon
49e057e726 Fixing trailing slash issue 2023-05-12 11:05:20 -05:00
softsimon
7f3e4eb534 Skip existing mining pool logos when syncing assets 2023-05-12 11:05:19 -05:00
softsimon
97c8ace8f7 Merge pull request #3749 from mempool/nymkappa/fix-3747
[mempool graph] show horizontal guide line
2023-05-12 10:54:05 -05:00
softsimon
a3ca79e995 Temporarily hide sign in 2023-05-12 10:53:42 -05:00
softsimon
50a96ce340 Merge branch 'master' into update_node_matrix 2023-05-12 10:37:23 -05:00
softsimon
962c9543d0 Merge pull request #3752 from mempool/simon/global-footer-fixes
Global footer fixes
2023-05-12 10:35:01 -05:00
softsimon
52bcef54be Only show sign in on mempool base module 2023-05-12 10:34:32 -05:00
softsimon
22030027b4 Merge pull request #3755 from mempool/mononaut/better-precise-durations
smarter time duration unit selection
2023-05-12 10:31:16 -05:00
hunicus
53a9093a70 Center left footer panel vertically 2023-05-12 11:29:17 -04:00
softsimon
6ef793f202 Merge pull request #3760 from knorrium/fix_rbf_test
Update mocks and fix RBF tests
2023-05-12 10:29:07 -05:00
softsimon
b227ff8e1b Update websocket RBF fixture 2023-05-12 10:14:21 -05:00
Felipe Knorr Kuhn
38162e1af2 Merge branch 'master' into update_node_matrix 2023-05-12 07:44:07 -07:00
Felipe Knorr Kuhn
db6b9ffa6e Merge branch 'master' into fix_rbf_test 2023-05-12 07:43:54 -07:00
softsimon
5a0c809fa7 Fix linting 2023-05-12 09:37:19 -05:00
softsimon
f6caed3ffd Hide sign in on non official 2023-05-12 09:31:12 -05:00
softsimon
6141516eb2 Merge pull request #3753 from mempool/mononaut/fix-difficulty-estimate
Fix difficulty estimate
2023-05-12 09:21:06 -05:00
Felipe Knorr Kuhn
b0478f6604 Remove node 19 and 20 due to Angular incompatibility 2023-05-11 22:15:29 -07:00
Felipe Knorr Kuhn
6cae3256a9 Update node CI test matrix 2023-05-11 21:42:19 -07:00
Felipe Knorr Kuhn
3b295b5482 Update mocks and fix RBF tests 2023-05-11 21:33:47 -07:00
softsimon
adc395fc3d Merge pull request #3743 from mempool/mononaut/full-stack-fee-stats
stack-of-n-blocks fee statistics
2023-05-11 19:48:02 -05:00
hunicus
0c52898010 Add spacing to compensate for links under button 2023-05-11 20:43:57 -04:00
hunicus
4fe6a74d20 Vary network urls according to base module 2023-05-11 20:37:11 -04:00
softsimon
7fca18d7be Merge pull request #3751 from mempool/mononaut/fix-rbf-times
Fix RBF timestamps to always use seconds
2023-05-11 19:35:29 -05:00
hunicus
208756bdd2 Show explorer links conditionally 2023-05-11 19:31:22 -04:00
hunicus
82a072bd87 Remove footer from bisq docs 2023-05-11 19:04:24 -04:00
softsimon
6d76d11837 Merge pull request #3742 from vostrnad/empty-witness-items
Display empty witness items
2023-05-11 17:51:44 -05:00
hunicus
982f1e007a Fix bisq quirks 2023-05-11 18:48:53 -04:00
hunicus
a4bf545993 Fix footer on bisq 2023-05-11 18:44:42 -04:00
softsimon
fe51f6a504 Merge pull request #3745 from mempool/mononaut/address-false-positive
Fix txids interpreted as addresses in search bar
2023-05-11 17:34:19 -05:00
softsimon
f5d311ca44 Fixes for Liquid 2023-05-11 17:14:16 -05:00
hunicus
b81dd99825 Remove social icons and version from about page 2023-05-11 17:57:04 -04:00
hunicus
f08457aba7 Improve responsiveness and add social icons 2023-05-11 17:46:38 -04:00
Mononaut
c601e5dcb4 smarter time duration unit selection 2023-05-11 13:33:38 -06:00
hunicus
11b82b3459 Remove third column on footer 2023-05-11 15:31:48 -04:00
Mononaut
d322665789 update difficulty tests 2023-05-11 11:39:18 -06:00
Mononaut
49529627f8 Fix difficulty adjustment calculation 2023-05-11 11:18:58 -06:00
softsimon
107746feec Global footer fixes 2023-05-11 11:38:57 -05:00
wiz
cb9d09a930 Merge branch 'master' into nymkappa/scan-closed-channel-no-mempool 2023-05-11 11:04:19 -05:00
Mononaut
e5bef55d47 Fix RBF timestamps to always use seconds 2023-05-11 09:21:48 -06:00
wiz
17dd02ed4e Merge pull request #3736 from mempool/mononaut/optimize-websocket-updates
Optimize websocket updates
2023-05-11 09:57:08 -05:00
nymkappa
7bb34fe090 [mempool graph] show horizontal guide line 2023-05-11 14:30:57 +02:00
Mononaut
abbaee0274 Fix txids interpreted as addresses in search 2023-05-10 19:57:58 -06:00
Mononaut
3d1cd3193a online calculation of stack-of-n-blocks fee statistics 2023-05-10 17:34:20 -06:00
Vojtěch Strnad
be53cd8b48 Display empty witness items 2023-05-11 00:11:23 +02:00
Vojtěch Strnad
4b20ea7232 Accept the CLA for @vostrnad 2023-05-11 00:10:57 +02:00
Mononaut
ffd7831efc optimize websocket init data 2023-05-10 08:05:39 -06:00
Mononaut
f8636d20c2 optimize batch client websocket updates 2023-05-10 08:05:39 -06:00
wiz
3b4dd7e633 Merge pull request #3724 from mempool/hunicus/big-footer
Add big footer
2023-05-09 13:45:43 -05:00
wiz
ea101e65bb Merge branch 'master' into hunicus/big-footer 2023-05-09 13:31:58 -05:00
wiz
8a713c3880 Merge pull request #3732 from mempool/mononaut/more-fee-bands
Increase displayed fee bands
2023-05-09 13:31:50 -05:00
wiz
42d5650bc0 Merge branch 'master' into mononaut/more-fee-bands 2023-05-09 13:20:04 -05:00
wiz
ee2bc2282a Merge pull request #3731 from mempool/mononaut/clocktower
Mempool clocks
2023-05-09 12:51:01 -05:00
wiz
34565dc675 Merge branch 'master' into mononaut/clocktower 2023-05-09 12:36:58 -05:00
wiz
6e57de3220 Merge pull request #3735 from mempool/mononaut/optimize-gbt-main-thread
Optimize main thread processing of GBT updates
2023-05-10 00:13:32 +09:00
Mononaut
033e78c0a7 Optimize main thread processing of GBT updates 2023-05-08 23:01:17 -06:00
Mononaut
5257716e1a Dynamic fee ranges & legend in mempool graph 2023-05-08 12:53:37 -06:00
Mononaut
47b95af8ae increase range of fee colors 2023-05-08 12:44:14 -06:00
Mononaut
f20bfb025b fix clock merge conflicts 2023-05-08 09:11:30 -06:00
Mononaut
9671259f5c clock selected block arrow 2023-05-08 08:48:56 -06:00
Mononaut
07dddd857b resize clock labels 2023-05-08 08:48:56 -06:00
Mononaut
19353fc1d0 rename clock components 2023-05-08 08:48:56 -06:00
Mononaut
1fccd70379 clock size query params 2023-05-08 08:48:55 -06:00
Mononaut
fdb0cf509d query param toggle for clock stats 2023-05-08 08:48:55 -06:00
Mononaut
056d61a28d clock i18n 2023-05-08 08:48:55 -06:00
Mononaut
d3a7950e78 Add clock statistics 2023-05-08 08:48:55 -06:00
Mononaut
3ddd51d4cb implement clock face & dial 2023-05-08 08:48:55 -06:00
Mononaut
f879a34021 responsive clock, fix blockchain 2023-05-08 08:48:55 -06:00
Mononaut
61531171c9 clocktower top blocks & layout adjustment 2023-05-08 08:48:53 -06:00
Mononaut
3b459b6857 hourly blocks clock faces 2023-05-08 08:46:18 -06:00
softsimon
be5882edb3 Merge pull request #3710 from mempool/mononaut/optimize-gbt-data
Mononaut/optimize gbt data
2023-05-07 23:55:27 +04:00
Mononaut
428d4fc6ab optimize data structures for advanced GBT algorithm 2023-05-07 11:54:23 -06:00
softsimon
07d9315bbe Merge pull request #3727 from mempool/simon/rapid-mempool-sync
Rapid mempool sync
2023-05-07 20:22:10 +04:00
softsimon
ee05a6852e Rapid mempool sync 2023-05-07 20:21:02 +04:00
softsimon
64b3e7ad50 Merge pull request #3655 from mempool/mononaut/mempool-delete-race-condition
Fix new block mempool eviction race condition
2023-05-07 20:18:36 +04:00
hunicus
822934828a Add initial content to big footer 2023-05-06 04:10:17 -04:00
softsimon
8866923efa Merge pull request #3721 from mempool/mononaut/fix-first-seen
Restore transaction first seen field
2023-05-06 03:05:35 +04:00
Mononaut
06ef114395 Restore transaction first seen field 2023-05-05 15:12:05 -07:00
softsimon
918a3ef115 Merge pull request #3677 from mempool/mononaut/duration-precision
More precise durations in difficulty components
2023-05-06 01:58:16 +04:00
softsimon
5e9e570b99 Merge branch 'master' into mononaut/duration-precision 2023-05-06 01:39:50 +04:00
softsimon
3e6c672a1e Merge pull request #3676 from mempool/mononaut/better-durations
Improve unit selection for duration formatting
2023-05-06 01:39:37 +04:00
Mononaut
387a38a1c8 Increase precision of some formatted durations 2023-05-05 14:35:57 -07:00
Mononaut
78e86c7c55 Improve unit selection for duration formatting 2023-05-05 14:30:03 -07:00
softsimon
de1e6d3b27 Merge pull request #3672 from mempool/hunicus/unchained-icon
Update unchained icon on about page
2023-05-06 01:18:53 +04:00
softsimon
9c9837d722 Merge pull request #3720 from mempool/simon/fix-global-footer-graph-pages
Fix height on graphs pages with footer
2023-05-06 00:55:56 +04:00
softsimon
494ceca44e Correcting graph widget bottom padding 2023-05-06 00:55:25 +04:00
softsimon
cb324733bf Removing weird bottom padding 2023-05-06 00:31:08 +04:00
softsimon
ad6d817f32 Adding bottom margin on some graph list pages 2023-05-06 00:20:56 +04:00
softsimon
e5ae2f6ef6 Fixing header margin and Lightning charts 2023-05-06 00:08:26 +04:00
softsimon
3425f2e390 Fix height on graphs pages with footer 2023-05-05 23:38:35 +04:00
Mononaut
3d0f7d6855 add missing rbf eviction 2023-05-05 10:20:17 -07:00
Mononaut
d322c6b5b5 Fix new block mempool deletion race condition 2023-05-05 10:19:11 -07:00
softsimon
a7dff0effe Merge pull request #3719 from mempool/mononaut/fix-rbf-cache-eviction
fix rbf cache eviction logic
2023-05-05 14:27:56 +04:00
Mononaut
f456912679 fix rbf cache eviction logic 2023-05-04 23:30:11 -04:00
softsimon
0d204426e6 Merge pull request #3564 from mempool/simon/global-footer
Global footer
2023-05-05 02:14:36 +04:00
softsimon
4c5ece8249 Fixing navbar overflowing footer 2023-05-05 02:06:48 +04:00
softsimon
e7ae9049bb Enabling footer on about page 2023-05-05 01:49:11 +04:00
softsimon
d40344aa92 Global footer component that fixes Liquid 2023-05-05 01:38:58 +04:00
softsimon
e3e273a688 Fixing position on mobile view 2023-05-05 00:53:21 +04:00
softsimon
e9a0be4941 Remove logger
Co-authored-by: Bufo <32884105+bufo24@users.noreply.github.com>
2023-05-05 00:53:21 +04:00
softsimon
9ca9ab63f5 Bottom padding for mobile 2023-05-05 00:53:20 +04:00
softsimon
261241fcc8 Global footer 2023-05-05 00:53:20 +04:00
softsimon
3c108a271d Merge pull request #3703 from mempool/mononaut/delayed-disk-cache
delay writing disk cache until block handler completes
2023-05-04 18:15:11 +04:00
Mononaut
e5f97ace8b delay writing disk cache until block handler completes 2023-05-03 15:53:47 -06:00
softsimon
e807b3ca74 Merge pull request #3705 from mempool/mononaut/increase-websocket-timeout
Increase client websocket timeout
2023-05-04 01:35:16 +04:00
softsimon
90154aec83 Merge pull request #3380 from mempool/mononaut/mempool-effective-rates
Use effective fee rates in mempool block visualizations & tooltips
2023-05-04 01:19:40 +04:00
Mononaut
d7333ec858 display fee box skeleton while mempool not in sync 2023-05-03 15:01:07 -06:00
softsimon
e6e90799ef Merge branch 'master' into mononaut/mempool-effective-rates 2023-05-04 00:58:49 +04:00
softsimon
8fd9c1a292 Merge pull request #3673 from mempool/mononaut/stable-mempool-positions
Improve stability of the mempool transaction marker arrow
2023-05-04 00:55:35 +04:00
Mononaut
489470639a remove mempool marker on tx confirmation 2023-05-03 14:18:07 -06:00
Mononaut
b79377d5a1 Use new mempool position data for transaction ETA 2023-05-03 14:18:07 -06:00
Mononaut
a22703d547 Add mempool position improvements to updateMempoolBlocks 2023-05-03 14:18:07 -06:00
Mononaut
3b8bcc4da5 Improve stability of mempool tx position arrow 2023-05-03 14:18:06 -06:00
softsimon
a5b764fb66 Merge pull request #3714 from mempool/mononaut/fix-tx-eta
Fix transaction ETA calculation
2023-05-04 00:17:09 +04:00
Mononaut
9bd968f6c3 fix tx page mempool blocks subscription leak 2023-05-03 13:58:08 -06:00
Mononaut
e3f1fced99 keep ETA relative time basis updated 2023-05-03 13:55:26 -06:00
softsimon
12ae940ed6 Merge pull request #2847 from mempool/mononaut/rbf-timeline
RBF Timelines
2023-05-03 23:48:57 +04:00
Mononaut
b325f8c524 Fix import path 2023-05-03 13:16:27 -06:00
Mononaut
ca7c8906a5 Fix missing null checks in tx component 2023-05-03 13:00:23 -06:00
Mononaut
a3b0c56182 Improve RBF diagram state visibility 2023-05-03 13:00:23 -06:00
Mononaut
f29a56f3fe Don't show RBF replaced alert banner after confirmation 2023-05-03 13:00:23 -06:00
Mononaut
36c7697c2b Update rbf disk caching for new method 2023-05-03 13:00:23 -06:00
Mononaut
3355419761 fix missing FULL_RBF_ENABLED config entries 2023-05-03 13:00:22 -06:00
Mononaut
3cb96b32a6 fix rbf e2e test 2023-05-03 13:00:22 -06:00
Mononaut
8d57aa1f06 scroll selected rbf node into view 2023-05-03 13:00:22 -06:00
Mononaut
f2749c67f3 change rbf subheading, fix interval alignment 2023-05-03 13:00:22 -06:00
Mononaut
f95da34fd1 change rbf tooltip to standard two-column table design 2023-05-03 13:00:22 -06:00
Mononaut
7e9cfa0858 Persist RBF cache to disk 2023-05-03 13:00:20 -06:00
Mononaut
6fb4adc27d fixes for non-dual-node rbf feature 2023-05-03 12:53:56 -06:00
Mononaut
723212c918 add mouseover tooltips to rbf timelines 2023-05-03 12:53:56 -06:00
Mononaut
086b41d958 support trees of RBF replacements 2023-05-03 12:53:56 -06:00
Mononaut
c064ef6ace remove 'replaces' alert on transaction page 2023-05-03 12:53:56 -06:00
Mononaut
f46296a2bb new page listing recent RBF events 2023-05-03 12:53:53 -06:00
Mononaut
7b2a1cfd10 update RBF timeline over websocket 2023-05-03 12:52:21 -06:00
Mononaut
1b843da785 Timeline of replacements for RBF-d transactions 2023-05-03 12:52:20 -06:00
softsimon
8db7326a5a Merge pull request #3709 from mempool/mononaut/optimize-new-block-gbt
skip unnecessary makeBlockTemplates call
2023-05-03 21:56:07 +04:00
Mononaut
3f49944c05 Fix transaction ETA calculation 2023-05-03 10:02:03 -06:00
softsimon
2f0d4d6068 Merge pull request #3712 from mempool/revert-3694-simon/revert-tcp-socket-fallback
Revert "Revert TCP socket fallback"
2023-05-03 10:12:59 +04:00
softsimon
dd68572603 Revert "Revert TCP socket fallback" 2023-05-03 10:11:44 +04:00
Mononaut
03ee5c7c31 skip unnecessary makeBlockTemplates 2023-05-02 18:47:34 -06:00
softsimon
4d5662fee4 Merge pull request #3708 from mempool/simon/change-forensics-logging-to-debug
Change forensic logging to debug
2023-05-02 18:08:26 +04:00
softsimon
565aa9616b Change forensic logging to debug 2023-05-02 17:39:02 +04:00
softsimon
62ef1768ec Merge pull request #3707 from mempool/simon/audit-optimization-dead-code
Removing dead code causing slowdown
2023-05-02 16:37:54 +04:00
softsimon
c659adb4be Removing dead code causing slowdown 2023-05-02 15:40:16 +04:00
Mononaut
3691ba8242 Increase client websocket timeout 2023-05-01 18:13:53 -06:00
softsimon
32c39f7af9 Merge pull request #3702 from mempool/mononaut/websocket-logs
Log websocket statistics
2023-05-02 00:55:13 +04:00
Mononaut
3748102bb0 Log websocket statistics 2023-05-01 13:08:29 -06:00
softsimon
a87b604153 Merge pull request #3700 from mempool/mononaut/fix-async-cache-load
await for mempool change handler after loading disk cache
2023-05-01 01:54:33 +04:00
Mononaut
4597bfa5d7 use $ naming convention for async function names 2023-04-30 15:52:44 -06:00
Mononaut
f30cf70226 await for mempool change handler after loading disk cache 2023-04-30 15:51:26 -06:00
softsimon
ba4253da79 Merge pull request #3689 from mempool/mononaut/debug-main-loop-stall
detect and log stalls in the main loop
2023-05-01 00:17:57 +04:00
softsimon
58b08f2c33 Add end quotes 2023-05-01 00:16:23 +04:00
softsimon
ac240398ef Merge branch 'master' into mononaut/debug-main-loop-stall 2023-04-30 22:53:06 +04:00
softsimon
1f2d05c5a4 Merge pull request #3696 from mempool/mononaut/mysql-timeout
Add explicit timeout to mysql DB queries
2023-04-30 22:09:02 +04:00
Mononaut
e05f2198d5 Add explicit timeout to mysql DB queries 2023-04-28 19:21:03 -06:00
Mononaut
95df317f56 detect and log stall in main loop 2023-04-28 19:17:58 -06:00
softsimon
f61f520a4b Merge pull request #3687 from mempool/simon/backend-block-tip-height-endpoint
Backend block tip height endpoint
2023-04-28 13:41:49 +04:00
wiz
864225a0dc Merge branch 'master' into simon/backend-block-tip-height-endpoint 2023-04-28 18:25:13 +09:00
softsimon
5628da2f80 Merge pull request #3694 from mempool/simon/revert-tcp-socket-fallback
Revert TCP socket fallback
2023-04-28 12:16:02 +04:00
softsimon
000c46bf57 Revert TCP socket fallback 2023-04-28 12:06:49 +04:00
softsimon
66919a1aba Backend block tip height endpoint 2023-04-26 13:49:01 +04:00
Mononaut
7b9fd8ac63 prevent table overflow in unfurl previews 2023-04-26 05:15:38 +09:00
nymkappa
99af881cdd Merge branch 'master' into nymkappa/clip-label-overflow 2023-04-22 20:10:27 +02:00
hunicus
008ec104b6 Fix clashing class in unchained svg 2023-04-19 22:52:53 -04:00
hunicus
b0859f91b2 Update unchained icon on about page 2023-04-19 22:00:09 -04:00
TechMiX
22ee9916dd fix change component and audit button position in RTL mode 2023-04-19 12:15:09 +02:00
softsimon
1df2c89cdb Merge branch 'master' into mononaut/fix-mempool-block-skeleton 2023-04-12 00:11:51 +08:00
softsimon
2fbe2b2fa6 Merge pull request #3652 from mempool/mononaut/fix-flying-mempool-blocks
Disable mempool block animations except when new block is mined
2023-04-08 23:09:53 +08:00
Giovanni La Perna
71935e29c8 Update and rename giovannilaperna.txt to learntheropes.txt
change github username
2023-04-08 11:19:04 +02:00
softsimon
d293d637b5 Merge pull request #3633 from mempool/mononaut/right-click-scroll
Disable blockchain drag for middle/right click
2023-04-08 17:00:25 +08:00
softsimon
04a8249883 Merge pull request #3644 from mempool/mononaut/full-mempool-cpfp
Perform full cpfp calculations for the entire mempool
2023-04-08 16:50:57 +08:00
Rex
123b853205 Add cla 2023-04-08 11:43:26 +08:00
softsimon
a196c276c9 Merge pull request #3656 from mempool/nymkappa/update-tests
[config] add missing RETRY_UNIX_SOCKET_AFTER
2023-04-08 10:22:08 +08:00
nymkappa
dfe2cf631f [config] fix docker esplora config and template 2023-04-08 10:42:08 +09:00
nymkappa
d6913b6439 [config] add missing RETRY_UNIX_SOCKET_AFTER 2023-04-07 13:28:32 +09:00
Giovanni La Perna
bad73c1805 Create giovannilaperna.txt 2023-04-07 01:04:23 +02:00
Mononaut
6602bddb2b Fit mempool block skeleton loaders to screen 2023-04-07 03:25:02 +09:00
Mononaut
32cd8bb3cb Prevent mempool block animations except when new block mined 2023-04-07 02:18:33 +09:00
Mononaut
5950034f53 Perform full cpfp calculations for the entire mempool 2023-04-07 00:25:45 +09:00
wiz
d18ebdfc59 ops: Update hard-coded path for liquid asset icons 2023-04-06 19:19:30 +09:00
wiz
604c3ba266 ops: Tweak boot-time delays for all daemons 2023-04-06 19:19:28 +09:00
wiz
b8d063a4f7 Merge pull request #3649 from mempool/nymkappa/indexing-error
Fix indexing error
2023-04-06 17:52:58 +09:00
wiz
3c30415982 ops: Add fallback TCP socket for esplora backends 2023-04-06 17:52:17 +09:00
nymkappa
c5252dc27d [indexing] delete dead code 2023-04-06 11:55:25 +09:00
nymkappa
6016db2533 [indexing] save missing fee_percentiles and median_fee_amt when indexing on the fly 2023-04-06 11:55:17 +09:00
nymkappa
b23f14b798 [indexing] fix typescript issue, reading invalid field 2023-04-06 11:54:22 +09:00
wiz
09d52f9fbe Merge pull request #3643 from mempool/nymkappa/esplora-socket-fallback
[esplora] fallback to tcp socket if unix socket fails
2023-04-05 22:58:34 +09:00
nymkappa
c23e529f0a [main loop] retry every seconds upon exception - warn after 5 attempts 2023-04-05 22:44:01 +09:00
nymkappa
ab7cb5f681 [esplora] reset timeout variable when retrying unix socket 2023-04-05 17:05:23 +09:00
nymkappa
db27e5a92c [esplora] print log when retrying unix socket - don't fallback to tcp socket on ETIMEDOUT 2023-04-05 17:00:53 +09:00
wiz
66109afb0d ops: Enable unix socket for esplora on mainnet-lightning 2023-04-05 16:48:26 +09:00
nymkappa
b6f1fd5a4a [esplora] initialize default socket config to axiosConfigWithUnixSocket 2023-04-05 16:38:37 +09:00
nymkappa
44a0913b81 [esplora] fallback to tcp socket if unix socket fails 2023-04-05 16:27:13 +09:00
Mononaut
4c569c0ded Send mempool effective fee rate changes to frontend & apply 2023-04-05 08:42:01 +09:00
Mononaut
3d5c156776 Use effective fee rates in mempool block visualizations & tooltips 2023-04-05 08:42:01 +09:00
wiz
6c81dcdc76 Merge pull request #3640 from mempool/ops/use-unix-sockets-for-mysql
ops: Use unix sockets for MySQL
2023-04-04 21:52:26 +09:00
wiz
906f24f0ee ops: Use unix sockets for MySQL 2023-04-04 21:48:42 +09:00
wiz
bca35600ff ops: Fix installer creation of CLN folders 2023-04-04 20:18:46 +09:00
wiz
a9dc5e9be4 Merge pull request #3634 from mempool/nymkappa/fiat-component-custom-color
Make fiat component color class customizable
2023-04-04 14:11:58 +09:00
nymkappa
90fa4a8f77 Make fiat component color class customizable 2023-04-04 11:42:06 +09:00
Mononaut
d325734c16 Disable blockchain drag for middle/right click 2023-04-04 08:25:40 +09:00
Mononaut
aa882aa36a Fix RTL locale unfurls 2023-04-04 07:55:55 +09:00
wiz
bdbb1dcf8e Merge pull request #3631 from mempool/simon/improved-warning-message
Redesigned testnet alert
2023-04-03 22:09:52 +09:00
wiz
2ada9dcd40 Merge branch 'master' into simon/improved-warning-message 2023-04-03 21:58:09 +09:00
wiz
95cc74c076 Merge pull request #3630 from mempool/hunicus/phx-dark-icon
Switch phoenix wallet logo to dark mode
2023-04-03 21:57:56 +09:00
softsimon
d59a31a65a Merge branch 'master' into simon/improved-warning-message 2023-04-03 20:03:04 +09:00
softsimon
38e4832b6a Restoring missing tx index attribute for cypress 2023-04-03 20:02:39 +09:00
softsimon
6b6dc9fb24 Merge pull request #3622 from mempool/mononaut/shift-click
Key modifiers to open transaction in new tab from visualization
2023-04-03 19:05:08 +09:00
softsimon
a1b6fc5a7b Redesigned testnet alert
fixes #3625
2023-04-03 18:30:06 +09:00
wiz
6ac0e887f7 Merge pull request #3247 from mempool/ops/esplora-unix-sockets
ops: Use unix sockets to query esplora from nginx
2023-04-03 16:07:05 +09:00
wiz
bdb7e62921 Merge branch 'master' into ops/esplora-unix-sockets 2023-04-03 15:34:47 +09:00
hunicus
445e376675 Switch phoenix wallet logo to dark mode 2023-04-03 02:07:56 -04:00
wiz
bedbd9c5d5 Merge pull request #3587 from mempool/hunicus/mynode-casing
Update mynode profile on about page
2023-04-03 15:00:13 +09:00
wiz
34236fca7c Remove unused mynodebtc.jpg 2023-04-03 14:59:39 +09:00
wiz
f74d651b85 Merge branch 'master' into hunicus/mynode-casing 2023-04-03 14:51:28 +09:00
softsimon
41a93af89e Merge pull request #3601 from mempool/mononaut/fix-liquid-asset-diagram
Fix broken tx diagram for non-LBTC liquid assets
2023-04-03 14:39:38 +09:00
wiz
e5b1615c61 Merge branch 'master' into mononaut/fix-liquid-asset-diagram 2023-04-03 12:27:32 +09:00
softsimon
2ef340712f Merge pull request #3442 from mempool/nymkappa/reorg-keep-templates
When a re-org happens, keep the block templates for audit
2023-04-03 12:24:05 +09:00
wiz
3841d1e7b8 Merge pull request #3600 from mempool/mononaut/fix-unfurl-cpfp-badge
Fix unfurl cpfp badge
2023-04-03 11:50:11 +09:00
wiz
675ecc608c Merge branch 'master' into mononaut/fix-unfurl-cpfp-badge 2023-04-03 11:40:47 +09:00
wiz
3625e41e97 Merge pull request #3599 from mempool/mononaut/fix-ln-node-unfurl
Fix node unfurl row overflow
2023-04-03 11:40:39 +09:00
wiz
ff8fecbd05 Merge branch 'master' into mononaut/fix-ln-node-unfurl 2023-04-03 11:31:20 +09:00
Mononaut
a91a8d2a4b Key modifiers to open tx in new tab from visualization 2023-04-02 07:46:32 +09:00
softsimon
83c03474a9 Merge pull request #3586 from mempool/nymkappa/fix-price-undefined
Add missing sanity check when fetching single price datapoint
2023-04-01 18:04:32 +09:00
softsimon
f55aac46f1 Merge pull request #3568 from mempool/hunicus/fix-memfaq-mobile
Fix anchor link expand on mobile for mempool faq
2023-04-01 18:03:17 +09:00
softsimon
f1b5ee2a5f Merge pull request #3404 from mempool/nymkappa/bugfix/wrong-percentage-heap-log
Fix % on heap limit warn
2023-04-01 16:56:50 +09:00
softsimon
97008b9caa Merge pull request #3326 from mempool/nymkappa/warning-testnet-signet
Show warning on testnet/signet
2023-04-01 16:53:17 +09:00
softsimon
b3038e557c Merge pull request #3618 from mempool/nymkappa/ln-stats-import-trycatch
Wrap lightning stats importer into try/catch
2023-04-01 15:21:55 +09:00
softsimon
61e29bcff9 Merge pull request #3608 from mempool/nymkappa/fix-default-graph-preference
Use window.location object instead of angular router for default graph window preference setting
2023-04-01 15:18:10 +09:00
nymkappa
a512884b65 Wrap lightning stats importer into try/catch 2023-04-01 14:56:18 +09:00
softsimon
46fbd6aa49 Merge pull request #3443 from mempool/simon/update-backend-libs-2023-03
Update backend NPM deps
2023-04-01 12:25:40 +09:00
softsimon
fc29943d0f Upgrading deps 2023-04-01 12:16:59 +09:00
softsimon
482a609d84 Update backend NPM libs 2023-04-01 12:15:32 +09:00
softsimon
b7d869ad23 Merge pull request #3375 from mempool/nymkappa/log-update
Update some logs
2023-04-01 12:07:53 +09:00
nymkappa
321161ede9 Cleanup some log 2023-04-01 12:00:54 +09:00
softsimon
b5ad0895ac Merge pull request #3610 from mempool/nymkappa/fix-search-wiz-test
Fix search 1wizS test
2023-03-31 22:32:40 +09:00
softsimon
427cef9f9d Merge pull request #3611 from mempool/nymkappa/fix-transaction-list-infinite-scroll
Fix infinite scroll transaction list component
2023-03-31 19:23:36 +09:00
nymkappa
816fb3bf01 Don't delete transactions when checking if the current chain is valid 2023-03-31 12:22:26 +09:00
nymkappa
44bbb472d3 Keep re-org'ed block summaries in the database 2023-03-31 12:08:05 +09:00
nymkappa
aba49897f9 Fix infinite scroll transaction list component 2023-03-30 17:07:34 +09:00
nymkappa
96121a86f8 Fix search 1wizS test 2023-03-29 17:35:49 +09:00
nymkappa
ea2193a42d Add missing sanity check when fetching single price datapoint 2023-03-29 17:33:07 +09:00
nymkappa
9e4fe40ca3 When a re-org happens, keep the block templates for audit 2023-03-29 17:32:17 +09:00
nymkappa
d9b4ad64bb Fix % on heap limit warn 2023-03-29 17:30:32 +09:00
nymkappa
7562407a0c Show warning on testnet/signet 2023-03-29 17:27:33 +09:00
nymkappa
0bc244b9f1 Use window.location object instead of angular router for default graph window preference setting 2023-03-29 15:10:59 +09:00
nymkappa
7ab373ecac Clip overflowing labels in pool component on mobile 2023-03-29 15:09:38 +09:00
Mononaut
14e0d80042 Fix broken tx diagram for non-lBTC liquid assets 2023-03-29 07:54:58 +09:00
Mononaut
5555916de3 Fix unfurl cpfp badge 2023-03-29 05:45:49 +09:00
Mononaut
ef09912d1b Fix node unfurl row overflow 2023-03-29 03:15:01 +09:00
nymkappa
c675d1c498 Make sure to scan closed channels even if config.MEMPOOL.ENABLE = false 2023-03-28 23:07:50 +09:00
softsimon
5977251a20 Merge pull request #3316 from mempool/mononaut/projected-median-fee
New median fee calculation for mempool blocks
2023-03-28 17:22:20 +09:00
Mononaut
a4c027dc48 clean up unused vars in mempool-blocks.ts 2023-03-28 17:02:37 +09:00
Mononaut
9f40cba914 use new median fee calculation for mempool blocks 2023-03-28 17:02:37 +09:00
softsimon
5ba2c181b0 Merge pull request #3315 from mempool/mononaut/effective-fee-rates
Use effective fee rate heuristics for block fee span
2023-03-28 16:57:22 +09:00
Mononaut
2fc404a55c refactor effective rate calculation 2023-03-28 16:20:20 +09:00
Mononaut
2baa10dcef Use effective fee rate heuristics for block fee span 2023-03-28 16:19:06 +09:00
softsimon
d08a318a2c Merge pull request #3584 from mempool/release/v2.5.0
Release v2.5.0
2023-03-28 14:26:18 +09:00
wiz
96f3218ec6 Bump version to v2.6.0-dev 2023-03-28 14:25:05 +09:00
wiz
57eddac7f0 Release v2.5.0 2023-03-28 12:14:31 +09:00
wiz
af115b49aa Merge pull request #3585 from mempool/nymkappa/fix-db-state
Reset pools sha db state
2023-03-28 12:12:04 +09:00
softsimon
332f9a2f5e Pull from transifex (Vietnamese) 2023-03-28 12:11:27 +09:00
hunicus
2b3d132db6 Update mynode logo 2023-03-27 07:26:06 -04:00
hunicus
f1361a698d Switch mynode capitalization to match branding 2023-03-27 06:50:30 -04:00
nymkappa
34eef3553b Reset pools sha db state 2023-03-27 19:39:50 +09:00
softsimon
9e4ce42b6a Pull from transifex (Hebrew) 2023-03-27 16:31:02 +09:00
softsimon
4c4a91ae95 Merge pull request #3560 from mempool/mononaut/missing-tx-bug
Fix thread inconsistency / lazy deletion race condition bugs
2023-03-27 15:33:34 +09:00
softsimon
93d46d5c5b Pull from transifex 2023-03-27 15:12:23 +09:00
softsimon
8788d4f898 Pull from transifex 2023-03-27 15:10:17 +09:00
wiz
e28650c46c Merge pull request #3581 from mempool/simon/enable-lightning-mempool-prod 2023-03-27 14:53:46 +09:00
softsimon
855c11f02c Enabling mempool in lightning prod
fixes #3579
2023-03-27 14:51:34 +09:00
softsimon
3f8e91bd46 Merge pull request #3578 from mempool/nymkappa/revert-undocumented-fast-forward
Revert regression introduced in #1320
2023-03-26 22:10:23 +09:00
softsimon
6722e45109 Merge pull request #3576 from mempool/mononaut/fix-difficulty-adjustment
Fix difficulty adjustment bugs
2023-03-26 18:02:30 +09:00
nymkappa
414383638d Revert regression introduced in #1320 2023-03-26 17:54:24 +09:00
nymkappa
2575b79c05 Merge branch 'master' into mononaut/fix-difficulty-adjustment 2023-03-26 17:02:41 +09:00
nymkappa
c7cab4c877 Remove difficulty adjustment calculation lag in the backend 2023-03-26 17:01:04 +09:00
softsimon
85c2f0ba30 Pull from transifex 2023-03-26 16:46:20 +09:00
Mononaut
edfbede704 Don't send back difficulty adjustment info 2023-03-26 09:05:41 +09:00
Mononaut
5f60cb821a Fix difficulty adjustment start-of-epoch edge cases 2023-03-26 07:27:11 +09:00
Mononaut
8486c1117d log warnings for unexpectedly missing txs 2023-03-26 05:41:31 +09:00
hunicus
ad3785ff41 Fix anchor link expand on mobile for mempool faq 2023-03-24 21:22:49 -04:00
Mononaut
61f24562fd tighten sanity checks in block audit 2023-03-24 09:49:02 +09:00
Mononaut
28de93d0ff move lazy tx deletion into main loop 2023-03-24 09:48:08 +09:00
Mononaut
1fd85b729d handle stale transactions in block templates 2023-03-24 09:47:08 +09:00
softsimon
5681ae3f5c Pull from transifex 2023-03-23 22:45:07 +09:00
softsimon
9d9e0976ae Pull from transifex 2023-03-23 17:41:24 +09:00
wiz
6180837636 Merge pull request #3557 from mempool/nymkappa/hotfix-infinite-scroll
Hotfix infinite scroll (need to apply a real fix for 2.5.1)
2023-03-23 17:36:08 +09:00
wiz
17beaf7d4f Merge pull request #3555 from mempool/hunicus/raspiblitz-logo
Change raspibolt logo to raspiblitz logo
2023-03-23 17:34:01 +09:00
wiz
ce8f471b27 Merge pull request #3554 from mempool/simon/bumping-electrum-client
Bumping electrum-client
2023-03-23 17:27:31 +09:00
nymkappa
b3e36fdd99 Hotfix infinite scroll (need to apply a real fix) 2023-03-23 17:23:32 +09:00
wiz
f971ddf1fa Merge branch 'master' into hunicus/raspiblitz-logo 2023-03-23 17:22:18 +09:00
hunicus
c0c37922c3 Change raspibolt logo to raspiblitz logo 2023-03-23 03:12:44 -04:00
wiz
1eb9e58331 Merge branch 'master' into simon/bumping-electrum-client 2023-03-23 15:58:44 +09:00
wiz
f8a35a110c Merge pull request #3553 from mempool/nymkappa/electrum-retry
Reconnect to electrum an unlimited amount of times every 1 seconds upon disconnection
2023-03-23 15:58:37 +09:00
softsimon
c4d13fb5b7 Bumping electrum-client 2023-03-23 15:56:30 +09:00
nymkappa
53a44853b3 Reconnect to electrum an unlimited amount of times every 1 seconds up disconnection 2023-03-23 15:18:48 +09:00
softsimon
29aa3617d8 Crediting Lithuanian and Danish translator 2023-03-23 14:43:03 +09:00
wiz
addf3e2521 Merge pull request #3548 from mempool/simon/updating-node-map-loading
Update channels map indexing indicator
2023-03-23 13:46:34 +09:00
softsimon
5826f8fa1e Pull from transifex 2023-03-23 00:28:17 +09:00
wiz
965d89fd91 Merge branch 'master' into simon/updating-node-map-loading 2023-03-22 17:48:09 +09:00
nymkappa
ed69591bcf Show "No data to display yet" in "Fee distribution" chart on node page when there are no channels yet 2023-03-22 14:09:30 +09:00
nymkappa
f1f6c48128 Show "No data to display yet" until we have at least two points for node stats charts 2023-03-22 13:45:27 +09:00
softsimon
f8bd062aa2 Pull from transifex 2023-03-22 13:35:16 +09:00
softsimon
77835bcb9d Restoring Preview component behavior 2023-03-22 13:20:22 +09:00
softsimon
bf5821c8c8 Remove indexing indicator 2023-03-21 23:17:09 +09:00
softsimon
a2e23014f4 Update channels map indexing indicator 2023-03-21 23:14:45 +09:00
wiz
811c14a6bd Merge pull request #3547 from mempool/simon/node-fee-chart-loading-d
Removing "d" from node fee chart loading
2023-03-21 23:10:47 +09:00
wiz
a34d87148b Merge branch 'master' into simon/node-fee-chart-loading-d 2023-03-21 23:01:19 +09:00
wiz
a45a8db479 Merge pull request #3494 from mempool/simon/difficulty-mining-css-updates
Difficulty mining css updates
2023-03-21 23:00:54 +09:00
softsimon
672f71c515 Removing "d" from node fee chart loading 2023-03-21 22:48:40 +09:00
softsimon
2c16bbb0e9 Fixing sub text height 2023-03-21 22:26:18 +09:00
softsimon
63f7709e82 Restoring size of current change 2023-03-21 22:20:14 +09:00
wiz
15b13ef4a4 Merge branch 'master' into simon/difficulty-mining-css-updates 2023-03-21 22:03:46 +09:00
wiz
75303c7a34 docker: Minor tweak to frontend entrypoint LND detection 2023-03-21 21:50:46 +09:00
softsimon
1a6048f0ab Difficulty mining css updates 2023-03-21 21:25:37 +09:00
wiz
ae6a408c05 Merge pull request #3493 from mempool/simon/update-softsimon-profile
Update softsimon profile photo
2023-03-21 20:58:11 +09:00
wiz
1015cbfa94 Merge pull request #3492 from mempool/simon/lightning-indexing-status
Lightning indexing indicators
2023-03-21 20:56:47 +09:00
wiz
876feef53f Fix frontend docker entrypoint umbrel LND detection 2023-03-21 18:11:10 +09:00
softsimon
f5f0329d39 Update softsimon profile photo 2023-03-21 18:03:19 +09:00
wiz
80a7b6d8d5 Merge branch 'master' into simon/lightning-indexing-status 2023-03-21 18:02:28 +09:00
wiz
f72e17c12e Merge pull request #3491 from mempool/simon/audit-off-hide-health
Audit disabled related UX fixes
2023-03-21 18:02:24 +09:00
wiz
f570b2762f Merge branch 'master' into simon/audit-off-hide-health 2023-03-21 17:43:56 +09:00
wiz
e2fda99578 Merge pull request #3490 from mempool/simon/auto-disable-ln-on-macaroon-fail
Auto disable LN on macaroon fail
2023-03-21 17:43:12 +09:00
softsimon
d7d45146c8 Lightning indexing indicators
refs  #2647
2023-03-21 17:33:14 +09:00
softsimon
45dbc6c6f6 Update logger network after modifying config 2023-03-21 16:21:11 +09:00
softsimon
d76e3a5939 Audit disabled related UX fixes 2023-03-21 16:02:46 +09:00
wiz
cb8fdb5e8d Hack docker frontend entrypoint to auto-enable lightning 2023-03-21 15:57:22 +09:00
softsimon
d337bf3ee2 Turn off LN if Macaroon is missing 2023-03-21 15:52:41 +09:00
softsimon
758e4d4f4c Disable LN on macaroon fail 2023-03-21 15:49:38 +09:00
wiz
ccab8b16bf Merge branch 'master' into ops/esplora-unix-sockets 2023-03-21 14:29:06 +09:00
softsimon
cd2bda4b49 Pull russian from transifex 2023-03-20 21:44:47 +09:00
wiz
493ea0641d Merge pull request #3487 from mempool/simon/catch-unhandled-lnd-axios-request
Catch exeptions in Lightning stats
2023-03-20 21:42:47 +09:00
wiz
7970f4ae88 ops: Use unix sockets to query esplora from nginx 2023-03-13 16:35:27 +09:00
Mononaut
96a41400f4 Add axios support for esplora unix sockets 2023-03-13 14:53:44 +09:00
hunicus
6cc2f20638 Use href for enterprise links 2023-03-12 04:39:57 -04:00
wiz
730c1ae2d7 Merge branch 'master' into hunicus/manual-deployment-enterprise 2023-03-12 16:57:49 +09:00
hunicus
f493da4eac Generalize faq from linux to any server 2023-03-04 18:24:02 +09:00
hunicus
e7ad857cc9 Specify manual deployment support for enterprise sponsors [readme] 2023-03-04 18:24:02 +09:00
hunicus
f968faeaf9 Specify manual deployment support for enterprise sponsors [faq] 2023-03-04 18:24:02 +09:00
386 changed files with 31543 additions and 13862 deletions

View File

@@ -7,7 +7,8 @@ updates:
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
@@ -18,7 +19,8 @@ updates:
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
@@ -28,7 +30,8 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: docker
directory: "/docker/frontend"
@@ -36,7 +39,8 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: "github-actions"
directory: "/"
@@ -44,4 +48,5 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]

View File

@@ -9,7 +9,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.14.1"]
node: ["16", "17", "18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@@ -27,6 +27,9 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: dtolnay/rust-toolchain@1.70
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
@@ -55,7 +58,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.14.1"]
node: ["16", "17", "18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@@ -94,3 +97,6 @@ jobs:
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js
target

View File

@@ -1,5 +1,6 @@
{
"editor.tabSize": 2,
"typescript.preferences.importModuleSpecifier": "relative",
"typescript.tsdk": "./backend/node_modules/typescript/lib"
"typescript.tsdk": "./backend/node_modules/typescript/lib",
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] }
}

533
Cargo.lock generated Normal file
View File

@@ -0,0 +1,533 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1586fa608b1dab41f667475b4a41faec5ba680aee428bfa5de4ea520fdc6e901"
dependencies = [
"quote",
"syn 2.0.20",
]
[[package]]
name = "gbt"
version = "0.1.0"
dependencies = [
"bytemuck",
"bytes",
"napi",
"napi-build",
"napi-derive",
"priority-queue",
"tracing",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
]
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "napi"
version = "2.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
"tokio",
]
[[package]]
name = "napi-build"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
[[package]]
name = "napi-derive"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20bbc7c69168d06a848f925ec5f0e0997f98e8c8d4f2cc30157f0da51c009e17"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"regex",
"semver",
"syn 1.0.109",
]
[[package]]
name = "napi-sys"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
dependencies = [
"libloading",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num_cpus"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
name = "priority-queue"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61"
dependencies = [
"autocfg",
"indexmap",
]
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.2",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "sharded-slab"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
dependencies = [
"lazy_static",
]
[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tokio"
version = "1.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
dependencies = [
"autocfg",
"num_cpus",
"pin-project-lite",
"windows-sys",
]
[[package]]
name = "tracing"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.20",
]
[[package]]
name = "tracing-core"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

8
Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[workspace]
members = [
"./backend/rust-gbt",
]
[profile.release]
lto = true
codegen-units = 1

1
backend/.dockerignore Normal file
View File

@@ -0,0 +1 @@
Dockerfile

View File

@@ -2,7 +2,7 @@
These instructions are mostly intended for developers.
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool does not provide support for custom setups.
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool only provides support for custom setups to [enterprise sponsors](https://mempool.space/enterprise).
See other ways to set up Mempool on [the main README](/../../#installation-methods).
@@ -79,6 +79,8 @@ Query OK, 0 rows affected (0.00 sec)
_Make sure to use Node.js 16.10 and npm 7._
_The build process requires [Rust](https://www.rust-lang.org/tools/install) to be installed._
Install dependencies with `npm` and build the backend:
```

View File

@@ -27,8 +27,11 @@
"AUDIT": false,
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"RUST_GBT": false,
"CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6
"DISK_CACHE_BLOCK_INTERVAL": 6,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
},
"CORE_RPC": {
"HOST": "127.0.0.1",
@@ -43,7 +46,9 @@
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-bitcoin-mainnet",
"RETRY_UNIX_SOCKET_AFTER": 30000
},
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
@@ -59,7 +64,8 @@
"SOCKET": "/var/run/mysql/mysql.sock",
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 180000
},
"SYSLOG": {
"ENABLED": true,
@@ -119,5 +125,16 @@
"LIQUID_ONION": "http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1",
"BISQ_URL": "https://bisq.markets/api",
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": [
"list",
"of",
"trusted",
"servers"
]
}
}

7121
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "mempool-backend",
"version": "2.5.0-dev",
"version": "3.0.0-dev",
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
"license": "GNU Affero General Public License v3.0",
"homepage": "https://mempool.space",
@@ -22,47 +22,50 @@
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run tsc && npm run create-resources",
"build": "npm run build-rust && npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && mv rust-gbt package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint @napi-rs ../rust-gbt/target ../rust-gbt/node_modules ../rust-gbt/src)",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js",
"reindex-updated-pools": "npm run start-production --update-pools",
"reindex-all-blocks": "npm run start-production --update-pools --reindex-blocks",
"test": "./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
"build-rust": "cd rust-gbt && npm install"
},
"dependencies": {
"@babel/core": "^7.20.12",
"@mempool/electrum-client": "^1.1.7",
"@types/node": "^16.18.11",
"axios": "~0.27.2",
"bitcoinjs-lib": "~6.1.0",
"@babel/core": "^7.21.3",
"@mempool/electrum-client": "1.1.9",
"@types/node": "^18.15.3",
"axios": "~1.4.0",
"bitcoinjs-lib": "~6.1.3",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.8",
"mysql2": "~2.3.3",
"node-worker-threads-pool": "~1.5.1",
"maxmind": "~4.3.11",
"mysql2": "~3.5.2",
"rust-gbt": "file:./rust-gbt",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4",
"ws": "~8.11.0"
"typescript": "~4.9.3",
"ws": "~8.13.0"
},
"devDependencies": {
"@babel/core": "^7.20.7",
"@babel/core": "^7.21.3",
"@babel/code-frame": "^7.18.6",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.15",
"@types/jest": "^29.2.5",
"@types/ws": "~8.5.4",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.5.0",
"jest": "^29.3.1",
"prettier": "^2.8.2",
"ts-jest": "^29.0.3",
"@types/express": "^4.17.17",
"@types/jest": "^29.5.0",
"@types/ws": "~8.5.5",
"@typescript-eslint/eslint-plugin": "^5.55.0",
"@typescript-eslint/parser": "^5.55.0",
"eslint": "^8.36.0",
"eslint-config-prettier": "^8.8.0",
"jest": "^29.5.0",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1"
}
}

4
backend/rust-gbt/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
*.node
**/node_modules
**/.DS_Store
npm-debug.log*

View File

@@ -0,0 +1,25 @@
[package]
name = "gbt"
version = "0.1.0"
description = "An inefficient re-implementation of the getBlockTemplate algorithm in Rust"
authors = ["mononaut"]
edition = "2021"
publish = false
[lib]
crate-type = ["cdylib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
priority-queue = "1.3.2"
bytes = "1.4.0"
napi = { version = "2.13.2", features = ["napi8", "tokio_rt"] }
napi-derive = "2.13.0"
bytemuck = "1.13.1"
tracing = "0.1.36"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.15", features = ["env-filter"]}
[build-dependencies]
napi-build = "2.0.1"

123
backend/rust-gbt/README.md Normal file
View File

@@ -0,0 +1,123 @@
# gbt
**gbt:** rust implementation of the getBlockTemplate algorithm
This project was bootstrapped by [napi](https://www.npmjs.com/package/@napi-rs/cli).
## Installing gbt
Installing gbt requires a [supported version of Node and Rust](https://github.com/napi-rs/napi-rs#platform-support).
The build process also requires [Rust](https://www.rust-lang.org/tools/install) to be installed.
You can install the project with npm. In the project directory, run:
```sh
$ npm install
```
This fully installs the project, including installing any dependencies and running the build.
## Building gbt
If you have already installed the project and only want to run the build, run:
```sh
$ npm run build
```
This command uses the [napi build](https://www.npmjs.com/package/@napi-rs/cli) utility to run the Rust build and copy the built library into `./gbt.[TARGET_TRIPLE].node`.
## Exploring gbt
After building gbt, you can explore its exports at the Node REPL:
```sh
$ npm install
$ node
> require('.').hello()
"hello node"
```
## Available Scripts
In the project directory, you can run:
### `npm install`
Installs the project, including running `npm run build-release`.
### `npm build`
Builds the Node addon (`gbt.[TARGET_TRIPLE].node`) from source.
Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm build` and `npm build-*` commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html):
```
npm run build -- --feature=beetle
```
#### `npm build-debug`
Alias for `npm build`.
#### `npm build-release`
Same as [`npm build`](#npm-build) but, builds the module with the [`release`](https://doc.rust-lang.org/cargo/reference/profiles.html#release) profile. Release builds will compile slower, but run faster.
### `npm test`
Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/).
## Project Layout
The directory structure of this project is:
```
gbt/
├── Cargo.toml
├── README.md
├── gbt.[TARGET_TRIPLE].node
├── package.json
├── src/
| └── lib.rs
└── target/
```
### Cargo.toml
The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command.
### README.md
This file.
### gbt.\[TARGET_TRIPLE\].node
The Node addon—i.e., a binary Node module—generated by building the project. This is the main module for this package, as dictated by the `"main"` key in `package.json`.
Under the hood, a [Node addon](https://nodejs.org/api/addons.html) is a [dynamically-linked shared object](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries). The `"build"` script produces this file by copying it from within the `target/` directory, which is where the Rust build produces the shared object.
### package.json
The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command.
### src/
The directory tree containing the Rust source code for the project.
### src/lib.rs
The Rust library's main module.
### target/
Binary artifacts generated by the Rust build.
## Learn More
To learn more about Neon, see the [Napi-RS documentation](https://napi.rs/docs/introduction/getting-started).
To learn more about Rust, see the [Rust documentation](https://www.rust-lang.org).
To learn more about Node, see the [Node documentation](https://nodejs.org).

View File

@@ -0,0 +1,3 @@
fn main() {
napi_build::setup();
}

45
backend/rust-gbt/index.d.ts vendored Normal file
View File

@@ -0,0 +1,45 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export interface ThreadTransaction {
uid: number
order: number
fee: number
weight: number
sigops: number
effectiveFeePerVsize: number
inputs: Array<number>
}
export class GbtGenerator {
constructor()
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
make(mempool: Array<ThreadTransaction>, maxUid: number): Promise<GbtResult>
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, maxUid: number): Promise<GbtResult>
}
/**
* The result from calling the gbt function.
*
* This tuple contains the following:
* blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
* block_weights: A Vector of total weights per block.
* clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
* rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
*/
export class GbtResult {
blocks: Array<Array<number>>
blockWeights: Array<number>
clusters: Array<Array<number>>
rates: Array<Array<number>>
constructor(blocks: Array<Array<number>>, blockWeights: Array<number>, clusters: Array<Array<number>>, rates: Array<Array<number>>)
}

258
backend/rust-gbt/index.js Normal file
View File

@@ -0,0 +1,258 @@
/* tslint:disable */
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
let nativeBinding = null
let localFileExisted = false
let loadError = null
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm64.node')
} else {
nativeBinding = require('gbt-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm-eabi.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm-eabi.node')
} else {
nativeBinding = require('gbt-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-x64-msvc.node')
} else {
nativeBinding = require('gbt-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-ia32-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-ia32-msvc.node')
} else {
nativeBinding = require('gbt-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-arm64-msvc.node')
} else {
nativeBinding = require('gbt-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-universal.node')
} else {
nativeBinding = require('gbt-darwin-universal')
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-x64.node')
} else {
nativeBinding = require('gbt-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-arm64.node')
} else {
nativeBinding = require('gbt-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'gbt.freebsd-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.freebsd-x64.node')
} else {
nativeBinding = require('gbt-freebsd-x64')
}
} catch (e) {
loadError = e
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-musl.node')
} else {
nativeBinding = require('gbt-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-gnu.node')
} else {
nativeBinding = require('gbt-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-musl.node')
} else {
nativeBinding = require('gbt-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-gnu.node')
} else {
nativeBinding = require('gbt-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('gbt-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
throw loadError
}
throw new Error(`Failed to load native binding`)
}
const { GbtGenerator, GbtResult } = nativeBinding
module.exports.GbtGenerator = GbtGenerator
module.exports.GbtResult = GbtResult

34
backend/rust-gbt/package-lock.json generated Normal file
View File

@@ -0,0 +1,34 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "gbt",
"version": "3.0.0-dev",
"hasInstallScript": true,
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"bin": {
"napi": "scripts/index.js"
},
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
}
}
}

View File

@@ -0,0 +1,33 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"description": "An inefficient re-implementation of the getBlockTemplate algorithm in Rust",
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform",
"build-debug": "npm run build",
"build-release": "npm run build -- --release --strip",
"install": "npm run build-release",
"prepublishOnly": "napi prepublish -t npm",
"test": "cargo test"
},
"author": "mononaut",
"napi": {
"name": "gbt",
"triples": {
"defaults": false,
"additional": [
"x86_64-unknown-linux-gnu",
"x86_64-unknown-freebsd"
]
}
},
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
}

View File

@@ -0,0 +1,220 @@
use crate::{
u32_hasher_types::{u32hashset_new, U32HasherState},
ThreadTransaction,
};
use std::{
cmp::Ordering,
collections::HashSet,
hash::{Hash, Hasher},
};
#[allow(clippy::struct_excessive_bools)]
#[derive(Clone, Debug)]
pub struct AuditTransaction {
pub uid: u32,
order: u32,
pub fee: u64,
pub weight: u32,
// exact sigop-adjusted weight
pub sigop_adjusted_weight: u32,
// sigop-adjusted vsize rounded up the the next integer
pub sigop_adjusted_vsize: u32,
pub sigops: u32,
adjusted_fee_per_vsize: f64,
pub effective_fee_per_vsize: f64,
pub dependency_rate: f64,
pub inputs: Vec<u32>,
pub relatives_set_flag: bool,
pub ancestors: HashSet<u32, U32HasherState>,
pub children: HashSet<u32, U32HasherState>,
ancestor_fee: u64,
ancestor_sigop_adjusted_weight: u32,
ancestor_sigop_adjusted_vsize: u32,
ancestor_sigops: u32,
// Safety: Must be private to prevent NaN breaking Ord impl.
score: f64,
pub used: bool,
/// whether this transaction has been moved to the "modified" priority queue
pub modified: bool,
pub dirty: bool,
}
impl Hash for AuditTransaction {
fn hash<H: Hasher>(&self, state: &mut H) {
self.uid.hash(state);
}
}
impl PartialEq for AuditTransaction {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for AuditTransaction {}
#[inline]
pub fn partial_cmp_uid_score(a: (u32, u32, f64), b: (u32, u32, f64)) -> Option<Ordering> {
// If either score is NaN, this is false,
// and partial_cmp will return None
if a.2 != b.2 {
// compare by score (sorts by ascending score)
a.2.partial_cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by comparing partial txids (sorts by descending txid)
Some(b.1.cmp(&a.1))
} else {
// tie-break partial txid collisions by comparing uids (sorts by descending uid)
Some(b.0.cmp(&a.0))
}
}
impl PartialOrd for AuditTransaction {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for AuditTransaction {
fn cmp(&self, other: &Self) -> Ordering {
// Safety: The only possible values for score are f64
// that are not NaN. This is because outside code can not
// freely assign score. Also, calc_new_score guarantees no NaN.
self.partial_cmp(other).expect("score will never be NaN")
}
}
#[inline]
fn calc_fee_rate(fee: f64, vsize: f64) -> f64 {
fee / (if vsize == 0.0 { 1.0 } else { vsize })
}
impl AuditTransaction {
pub fn from_thread_transaction(tx: &ThreadTransaction) -> Self {
// rounded up to the nearest integer
let is_adjusted = tx.weight < (tx.sigops * 20);
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
let effective_fee_per_vsize = if is_adjusted {
calc_fee_rate(tx.fee, f64::from(sigop_adjusted_weight) / 4.0)
} else {
tx.effective_fee_per_vsize
};
Self {
uid: tx.uid,
order: tx.order,
fee: tx.fee as u64,
weight: tx.weight,
sigop_adjusted_weight,
sigop_adjusted_vsize,
sigops: tx.sigops,
adjusted_fee_per_vsize: calc_fee_rate(tx.fee, f64::from(sigop_adjusted_vsize)),
effective_fee_per_vsize,
dependency_rate: f64::INFINITY,
inputs: tx.inputs.clone(),
relatives_set_flag: false,
ancestors: u32hashset_new(),
children: u32hashset_new(),
ancestor_fee: tx.fee as u64,
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
ancestor_sigops: tx.sigops,
score: 0.0,
used: false,
modified: false,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize,
}
}
#[inline]
pub const fn score(&self) -> f64 {
self.score
}
#[inline]
pub const fn order(&self) -> u32 {
self.order
}
#[inline]
pub const fn ancestor_sigop_adjusted_vsize(&self) -> u32 {
self.ancestor_sigop_adjusted_vsize
}
#[inline]
pub const fn ancestor_sigops(&self) -> u32 {
self.ancestor_sigops
}
#[inline]
pub fn cluster_rate(&self) -> f64 {
// Safety: self.ancestor_weight can never be 0.
// Even if it could, as it approaches 0, the value inside the min() call
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
// the smaller of the two. If either side is NaN, the other side is returned.
self.dependency_rate.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
))
}
pub fn set_dirty_if_different(&mut self, cluster_rate: f64) {
if self.effective_fee_per_vsize != cluster_rate {
self.effective_fee_per_vsize = cluster_rate;
self.dirty = true;
}
}
/// Safety: This function must NEVER set score to NaN.
#[inline]
fn calc_new_score(&mut self) {
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_vsize),
));
}
#[inline]
pub fn set_ancestors(
&mut self,
ancestors: HashSet<u32, U32HasherState>,
total_fee: u64,
total_sigop_adjusted_weight: u32,
total_sigop_adjusted_vsize: u32,
total_sigops: u32,
) {
self.ancestors = ancestors;
self.ancestor_fee = self.fee + total_fee;
self.ancestor_sigop_adjusted_weight =
self.sigop_adjusted_weight + total_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize = self.sigop_adjusted_vsize + total_sigop_adjusted_vsize;
self.ancestor_sigops = self.sigops + total_sigops;
self.calc_new_score();
self.relatives_set_flag = true;
}
#[inline]
pub fn remove_root(
&mut self,
root_txid: u32,
root_fee: u64,
root_sigop_adjusted_weight: u32,
root_sigop_adjusted_vsize: u32,
root_sigops: u32,
cluster_rate: f64,
) -> f64 {
let old_score = self.score();
self.dependency_rate = self.dependency_rate.min(cluster_rate);
if self.ancestors.remove(&root_txid) {
self.ancestor_fee -= root_fee;
self.ancestor_sigop_adjusted_weight -= root_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize -= root_sigop_adjusted_vsize;
self.ancestor_sigops -= root_sigops;
self.calc_new_score();
}
old_score
}
}

421
backend/rust-gbt/src/gbt.rs Normal file
View File

@@ -0,0 +1,421 @@
use priority_queue::PriorityQueue;
use std::{cmp::Ordering, collections::HashSet, mem::ManuallyDrop};
use tracing::{info, trace};
use crate::{
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
GbtResult, ThreadTransactionsMap,
};
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
const BLOCK_SIGOPS: u32 = 80_000;
const BLOCK_RESERVED_WEIGHT: u32 = 4_000;
const BLOCK_RESERVED_SIGOPS: u32 = 400;
const MAX_BLOCKS: usize = 8;
type AuditPool = Vec<Option<ManuallyDrop<AuditTransaction>>>;
type ModifiedQueue = PriorityQueue<u32, TxPriority, U32HasherState>;
#[derive(Debug)]
struct TxPriority {
uid: u32,
order: u32,
score: f64,
}
impl PartialEq for TxPriority {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for TxPriority {}
impl PartialOrd for TxPriority {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for TxPriority {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).expect("score will never be NaN")
}
}
/// Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core.
///
/// See `BlockAssembler` in Bitcoin Core's
/// [miner.cpp](https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp).
/// Ported from mempool backend's
/// [tx-selection-worker.ts](https://github.com/mempool/mempool/blob/master/backend/src/api/tx-selection-worker.ts).
//
// TODO: Make gbt smaller to fix these lints.
#[allow(clippy::too_many_lines)]
#[allow(clippy::cognitive_complexity)]
pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
let mempool_len = mempool.len();
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
audit_pool.resize(max_uid + 1, None);
let mut mempool_stack: Vec<u32> = Vec::with_capacity(mempool_len);
let mut clusters: Vec<Vec<u32>> = Vec::new();
let mut block_weights: Vec<u32> = Vec::new();
info!("Initializing working structs");
for (uid, tx) in &mut *mempool {
let audit_tx = AuditTransaction::from_thread_transaction(tx);
// Safety: audit_pool and mempool_stack must always contain the same transactions
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
mempool_stack.push(*uid);
}
info!("Building relatives graph & calculate ancestor scores");
for txid in &mempool_stack {
set_relatives(*txid, &mut audit_pool);
}
trace!("Post relative graph Audit Pool: {:#?}", audit_pool);
info!("Sorting by descending ancestor score");
let mut mempool_stack: Vec<(u32, u32, f64)> = mempool_stack
.into_iter()
.map(|txid| {
let atx = audit_pool
.get(txid as usize)
.and_then(Option::as_ref)
.expect("All txids are from audit_pool");
(txid, atx.order(), atx.score())
})
.collect();
mempool_stack.sort_unstable_by(|a, b| partial_cmp_uid_score(*a, *b).expect("Not NaN"));
let mut mempool_stack: Vec<u32> = mempool_stack.into_iter().map(|(txid, _, _)| txid).collect();
info!("Building blocks by greedily choosing the highest feerate package");
info!("(i.e. the package rooted in the transaction with the best ancestor score)");
let mut blocks: Vec<Vec<u32>> = Vec::new();
let mut block_weight: u32 = BLOCK_RESERVED_WEIGHT;
let mut block_sigops: u32 = BLOCK_RESERVED_SIGOPS;
// No need to be bigger than 4096 transactions for the per-block transaction Vec.
let initial_txes_per_block: usize = 4096.min(mempool_len);
let mut transactions: Vec<u32> = Vec::with_capacity(initial_txes_per_block);
let mut modified: ModifiedQueue = u32priority_queue_with_capacity(mempool_len);
let mut overflow: Vec<u32> = Vec::new();
let mut failures = 0;
while !mempool_stack.is_empty() || !modified.is_empty() {
// This trace log storm is big, so to make scrolling through
// Each iteration easier, leaving a bunch of empty rows
// And a header of ======
trace!("\n\n\n\n\n\n\n\n\n\n==================================");
trace!("mempool_array: {:#?}", mempool_stack);
trace!("clusters: {:#?}", clusters);
trace!("modified: {:#?}", modified);
trace!("audit_pool: {:#?}", audit_pool);
trace!("blocks: {:#?}", blocks);
trace!("block_weight: {:#?}", block_weight);
trace!("block_sigops: {:#?}", block_sigops);
trace!("transactions: {:#?}", transactions);
trace!("overflow: {:#?}", overflow);
trace!("failures: {:#?}", failures);
trace!("\n==================================");
let next_from_stack = next_valid_from_stack(&mut mempool_stack, &audit_pool);
let next_from_queue = next_valid_from_queue(&mut modified, &audit_pool);
if next_from_stack.is_none() && next_from_queue.is_none() {
continue;
}
let (next_tx, from_stack) = match (next_from_stack, next_from_queue) {
(Some(stack_tx), Some(queue_tx)) => match queue_tx.cmp(stack_tx) {
std::cmp::Ordering::Less => (stack_tx, true),
_ => (queue_tx, false),
},
(Some(stack_tx), None) => (stack_tx, true),
(None, Some(queue_tx)) => (queue_tx, false),
(None, None) => unreachable!(),
};
if from_stack {
mempool_stack.pop();
} else {
modified.pop();
}
if blocks.len() < (MAX_BLOCKS - 1)
&& ((block_weight + (4 * next_tx.ancestor_sigop_adjusted_vsize())
>= MAX_BLOCK_WEIGHT_UNITS)
|| (block_sigops + next_tx.ancestor_sigops() > BLOCK_SIGOPS))
{
// hold this package in an overflow list while we check for smaller options
overflow.push(next_tx.uid);
failures += 1;
} else {
let mut package: Vec<(u32, u32, usize)> = Vec::new();
let mut cluster: Vec<u32> = Vec::new();
let is_cluster: bool = !next_tx.ancestors.is_empty();
for ancestor_id in &next_tx.ancestors {
if let Some(Some(ancestor)) = audit_pool.get(*ancestor_id as usize) {
package.push((*ancestor_id, ancestor.order(), ancestor.ancestors.len()));
}
}
package.sort_unstable_by(|a, b| -> Ordering {
if a.2 != b.2 {
// order by ascending ancestor count
a.2.cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by ascending partial txid
a.1.cmp(&b.1)
} else {
// tie-break partial txid collisions by ascending uid
a.0.cmp(&b.0)
}
});
package.push((next_tx.uid, next_tx.order(), next_tx.ancestors.len()));
let cluster_rate = next_tx.cluster_rate();
for (txid, _, _) in &package {
cluster.push(*txid);
if let Some(Some(tx)) = audit_pool.get_mut(*txid as usize) {
tx.used = true;
tx.set_dirty_if_different(cluster_rate);
transactions.push(tx.uid);
block_weight += tx.weight;
block_sigops += tx.sigops;
}
update_descendants(*txid, &mut audit_pool, &mut modified, cluster_rate);
}
if is_cluster {
clusters.push(cluster);
}
failures = 0;
}
// this block is full
let exceeded_package_tries =
failures > 1000 && block_weight > (MAX_BLOCK_WEIGHT_UNITS - BLOCK_RESERVED_WEIGHT);
let queue_is_empty = mempool_stack.is_empty() && modified.is_empty();
if (exceeded_package_tries || queue_is_empty) && blocks.len() < (MAX_BLOCKS - 1) {
// finalize this block
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
// reset for the next block
transactions = Vec::with_capacity(initial_txes_per_block);
block_weight = BLOCK_RESERVED_WEIGHT;
block_sigops = BLOCK_RESERVED_SIGOPS;
failures = 0;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
overflow.reverse();
for overflowed in &overflow {
if let Some(Some(overflowed_tx)) = audit_pool.get(*overflowed as usize) {
if overflowed_tx.modified {
modified.push(
*overflowed,
TxPriority {
uid: *overflowed,
order: overflowed_tx.order(),
score: overflowed_tx.score(),
},
);
} else {
mempool_stack.push(*overflowed);
}
}
}
overflow = Vec::new();
}
}
info!("add the final unbounded block if it contains any transactions");
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
info!("make a list of dirty transactions and their new rates");
let mut rates: Vec<Vec<f64>> = Vec::new();
for (uid, thread_tx) in mempool {
// Takes ownership of the audit_tx and replaces with None
if let Some(Some(audit_tx)) = audit_pool.get_mut(*uid as usize).map(Option::take) {
trace!("txid: {}, is_dirty: {}", uid, audit_tx.dirty);
if audit_tx.dirty {
rates.push(vec![f64::from(*uid), audit_tx.effective_fee_per_vsize]);
thread_tx.effective_fee_per_vsize = audit_tx.effective_fee_per_vsize;
}
// Drops the AuditTransaction manually
// There are no audit_txs that are not in the mempool HashMap
// So there is guaranteed to be no memory leaks.
ManuallyDrop::into_inner(audit_tx);
}
}
trace!("\n\n\n\n\n====================");
trace!("blocks: {:#?}", blocks);
trace!("clusters: {:#?}", clusters);
trace!("rates: {:#?}\n====================\n\n\n\n\n", rates);
GbtResult {
blocks,
block_weights,
clusters,
rates,
}
}
fn next_valid_from_stack<'a>(
mempool_stack: &mut Vec<u32>,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some(next_txid) = mempool_stack.last() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used && !tx.modified => {
return Some(tx);
}
_ => {
mempool_stack.pop();
}
}
}
None
}
fn next_valid_from_queue<'a>(
queue: &mut ModifiedQueue,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some((next_txid, _)) = queue.peek() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used => {
return Some(tx);
}
_ => {
queue.pop();
}
}
}
None
}
fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut parents: HashSet<u32, U32HasherState> = u32hashset_new();
if let Some(Some(tx)) = audit_pool.get(txid as usize) {
if tx.relatives_set_flag {
return;
}
for input in &tx.inputs {
parents.insert(*input);
}
} else {
return;
}
let mut ancestors: HashSet<u32, U32HasherState> = u32hashset_new();
for parent_id in &parents {
set_relatives(*parent_id, audit_pool);
if let Some(Some(parent)) = audit_pool.get_mut(*parent_id as usize) {
// Safety: ancestors must always contain only txes in audit_pool
ancestors.insert(*parent_id);
parent.children.insert(txid);
for ancestor in &parent.ancestors {
ancestors.insert(*ancestor);
}
}
}
let mut total_fee: u64 = 0;
let mut total_sigop_adjusted_weight: u32 = 0;
let mut total_sigop_adjusted_vsize: u32 = 0;
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {
tx.set_ancestors(
ancestors,
total_fee,
total_sigop_adjusted_weight,
total_sigop_adjusted_vsize,
total_sigops,
);
}
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
fn update_descendants(
root_txid: u32,
audit_pool: &mut AuditPool,
modified: &mut ModifiedQueue,
cluster_rate: f64,
) {
let mut visited: HashSet<u32, U32HasherState> = u32hashset_new();
let mut descendant_stack: Vec<u32> = Vec::new();
let root_fee: u64;
let root_sigop_adjusted_weight: u32;
let root_sigop_adjusted_vsize: u32;
let root_sigops: u32;
if let Some(Some(root_tx)) = audit_pool.get(root_txid as usize) {
for descendant_id in &root_tx.children {
if !visited.contains(descendant_id) {
descendant_stack.push(*descendant_id);
visited.insert(*descendant_id);
}
}
root_fee = root_tx.fee;
root_sigop_adjusted_weight = root_tx.sigop_adjusted_weight;
root_sigop_adjusted_vsize = root_tx.sigop_adjusted_vsize;
root_sigops = root_tx.sigops;
} else {
return;
}
while let Some(next_txid) = descendant_stack.pop() {
if let Some(Some(descendant)) = audit_pool.get_mut(next_txid as usize) {
// remove root tx as ancestor
let old_score = descendant.remove_root(
root_txid,
root_fee,
root_sigop_adjusted_weight,
root_sigop_adjusted_vsize,
root_sigops,
cluster_rate,
);
// add to priority queue or update priority if score has changed
if descendant.score() < old_score {
descendant.modified = true;
modified.push_decrease(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
} else if descendant.score() > old_score {
descendant.modified = true;
modified.push_increase(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
}
// add this node's children to the stack
for child_id in &descendant.children {
if !visited.contains(child_id) {
descendant_stack.push(*child_id);
visited.insert(*child_id);
}
}
}
}
}

177
backend/rust-gbt/src/lib.rs Normal file
View File

@@ -0,0 +1,177 @@
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::float_cmp)]
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use thread_transaction::ThreadTransaction;
use tracing::{debug, info, trace};
use tracing_log::LogTracer;
use tracing_subscriber::{EnvFilter, FmtSubscriber};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
mod audit_transaction;
mod gbt;
mod thread_transaction;
mod u32_hasher_types;
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
/// This is the initial capacity of the `GbtGenerator` struct's inner `HashMap`.
///
/// Note: This doesn't *have* to be a power of 2. (uwu)
const STARTING_CAPACITY: usize = 1_048_576;
type ThreadTransactionsMap = HashMap<u32, ThreadTransaction, U32HasherState>;
#[napi]
pub struct GbtGenerator {
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
}
#[napi::module_init]
fn init() {
// Set all `tracing` logs to print to STDOUT
// Note: Passing RUST_LOG env variable to the node process
// will change the log level for the rust module.
tracing::subscriber::set_global_default(
FmtSubscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.with_ansi(
// Default to no-color logs.
// Setting RUST_LOG_COLOR to 1 or true|TRUE|True etc.
// will enable color
std::env::var("RUST_LOG_COLOR")
.map(|s| ["1", "true"].contains(&&*s.to_lowercase()))
.unwrap_or(false),
)
.finish(),
)
.expect("Logging subscriber failed");
// Convert all `log` logs into `tracing` events
LogTracer::init().expect("Legacy log subscriber failed");
}
#[napi]
impl GbtGenerator {
#[napi(constructor)]
#[allow(clippy::new_without_default)]
#[must_use]
pub fn new() -> Self {
debug!("Created new GbtGenerator");
Self {
thread_transactions: Arc::new(Mutex::new(u32hashmap_with_capacity(STARTING_CAPACITY))),
}
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn make(&self, mempool: Vec<ThreadTransaction>, max_uid: u32) -> Result<GbtResult> {
trace!("make: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in mempool {
map.insert(tx.uid, tx);
}
},
)
.await
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn update(
&self,
new_txs: Vec<ThreadTransaction>,
remove_txs: Vec<u32>,
max_uid: u32,
) -> Result<GbtResult> {
trace!("update: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in new_txs {
map.insert(tx.uid, tx);
}
for txid in &remove_txs {
map.remove(txid);
}
},
)
.await
}
}
/// The result from calling the gbt function.
///
/// This tuple contains the following:
/// blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
/// block_weights: A Vector of total weights per block.
/// clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
/// rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
#[napi(constructor)]
pub struct GbtResult {
pub blocks: Vec<Vec<u32>>,
pub block_weights: Vec<u32>,
pub clusters: Vec<Vec<u32>>,
pub rates: Vec<Vec<f64>>, // Tuples not supported. u32 fits inside f64
}
/// All on another thread, this runs an arbitrary task in between
/// taking the lock and running gbt.
///
/// Rather than filling / updating the `HashMap` on the main thread,
/// this allows for `HashMap` modifying tasks to be run before running and returning gbt results.
///
/// `thread_transactions` is a cloned `Arc` of the `Mutex` for the `HashMap` state.
/// `callback` is a `'static + Send` `FnOnce` closure/function that takes a mutable reference
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
async fn run_task<F>(
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
max_uid: usize,
callback: F,
) -> Result<GbtResult>
where
F: FnOnce(&mut ThreadTransactionsMap) + Send + 'static,
{
debug!("Spawning thread...");
let handle = napi::tokio::task::spawn_blocking(move || {
debug!(
"Getting lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
let mut map = thread_transactions
.lock()
.map_err(|_| napi::Error::from_reason("THREAD_TRANSACTIONS Mutex poisoned"))?;
callback(&mut map);
info!("Starting gbt algorithm for {} elements...", map.len());
let result = gbt::gbt(&mut map, max_uid);
info!("Finished gbt algorithm for {} elements...", map.len());
debug!(
"Releasing lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
drop(map);
Ok(result)
});
handle
.await
.map_err(|_| napi::Error::from_reason("thread panicked"))?
}

View File

@@ -0,0 +1,13 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadTransaction {
pub uid: u32,
pub order: u32,
pub fee: f64,
pub weight: u32,
pub sigops: u32,
pub effective_fee_per_vsize: f64,
pub inputs: Vec<u32>,
}

View File

@@ -0,0 +1,132 @@
use priority_queue::PriorityQueue;
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
hash::{BuildHasher, Hasher},
};
/// This is the only way to create a `HashMap` with the `U32HasherState` and capacity
pub fn u32hashmap_with_capacity<V>(capacity: usize) -> HashMap<u32, V, U32HasherState> {
HashMap::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `PriorityQueue` with the `U32HasherState` and capacity
pub fn u32priority_queue_with_capacity<V: Ord>(
capacity: usize,
) -> PriorityQueue<u32, V, U32HasherState> {
PriorityQueue::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `HashSet` with the `U32HasherState`
pub fn u32hashset_new() -> HashSet<u32, U32HasherState> {
HashSet::with_hasher(U32HasherState(()))
}
/// A private unit type is contained so no one can make an instance of it.
#[derive(Clone)]
pub struct U32HasherState(());
impl Debug for U32HasherState {
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
impl BuildHasher for U32HasherState {
type Hasher = U32Hasher;
fn build_hasher(&self) -> Self::Hasher {
U32Hasher(0)
}
}
/// This also can't be created outside this module due to private field.
pub struct U32Hasher(u32);
impl Hasher for U32Hasher {
fn finish(&self) -> u64 {
// Safety: Two u32s next to each other will make a u64
bytemuck::cast([self.0, 0])
}
fn write(&mut self, bytes: &[u8]) {
// Assert in debug builds (testing too) that only 4 byte keys (u32, i32, f32, etc.) run
debug_assert!(bytes.len() == 4);
// Safety: We know that the size of the key is 4 bytes
// We also know that the only way to get an instance of HashMap using this "hasher"
// is through the public functions in this module which set the key type to u32.
self.0 = *bytemuck::from_bytes(bytes);
}
}
#[cfg(test)]
mod tests {
use super::U32HasherState;
use priority_queue::PriorityQueue;
use std::collections::HashMap;
#[test]
fn test_hashmap() {
let mut hm: HashMap<u32, String, U32HasherState> = HashMap::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
hm.insert(0, String::from("0"));
hm.insert(42, String::from("42"));
hm.insert(256, String::from("256"));
hm.insert(u32::MAX, String::from("MAX"));
hm.insert(u32::MAX >> 2, String::from("MAX >> 2"));
assert_eq!(hm.get(&0), Some(&String::from("0")));
assert_eq!(hm.get(&42), Some(&String::from("42")));
assert_eq!(hm.get(&256), Some(&String::from("256")));
assert_eq!(hm.get(&u32::MAX), Some(&String::from("MAX")));
assert_eq!(hm.get(&(u32::MAX >> 2)), Some(&String::from("MAX >> 2")));
assert_eq!(hm.get(&(u32::MAX >> 4)), None);
assert_eq!(hm.get(&3), None);
assert_eq!(hm.get(&43), None);
}
#[test]
fn test_priority_queue() {
let mut pq: PriorityQueue<u32, i32, U32HasherState> =
PriorityQueue::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
assert_eq!(pq.push(1, 5), None);
assert_eq!(pq.push(2, -10), None);
assert_eq!(pq.push(3, 7), None);
assert_eq!(pq.push(4, 20), None);
assert_eq!(pq.push(u32::MAX, -42), None);
assert_eq!(pq.push_increase(1, 4), Some(4));
assert_eq!(pq.push_increase(2, -8), Some(-10));
assert_eq!(pq.push_increase(3, 5), Some(5));
assert_eq!(pq.push_increase(4, 21), Some(20));
assert_eq!(pq.push_increase(u32::MAX, -99), Some(-99));
assert_eq!(pq.push_increase(42, 1337), None);
assert_eq!(pq.push_decrease(1, 4), Some(5));
assert_eq!(pq.push_decrease(2, -10), Some(-8));
assert_eq!(pq.push_decrease(3, 5), Some(7));
assert_eq!(pq.push_decrease(4, 20), Some(21));
assert_eq!(pq.push_decrease(u32::MAX, 100), Some(100));
assert_eq!(pq.push_decrease(69, 420), None);
assert_eq!(pq.peek(), Some((&42, &1337)));
assert_eq!(pq.pop(), Some((42, 1337)));
assert_eq!(pq.peek(), Some((&69, &420)));
assert_eq!(pq.pop(), Some((69, 420)));
assert_eq!(pq.peek(), Some((&4, &20)));
assert_eq!(pq.pop(), Some((4, 20)));
assert_eq!(pq.peek(), Some((&3, &5)));
assert_eq!(pq.pop(), Some((3, 5)));
assert_eq!(pq.peek(), Some((&1, &4)));
assert_eq!(pq.pop(), Some((1, 4)));
assert_eq!(pq.peek(), Some((&2, &-10)));
assert_eq!(pq.pop(), Some((2, -10)));
assert_eq!(pq.peek(), Some((&u32::MAX, &-42)));
assert_eq!(pq.pop(), Some((u32::MAX, -42)));
assert_eq!(pq.peek(), None);
assert_eq!(pq.pop(), None);
}
}

View File

@@ -16,7 +16,7 @@
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": 11,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 12,
"EXTERNAL_RETRY_INTERVAL": 13,
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
@@ -24,19 +24,22 @@
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"AUDIT": "__MEMPOOL_AUDIT__",
"ADVANCED_GBT_AUDIT": "__MEMPOOL_ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__MEMPOOL_ADVANCED_GBT_MEMPOOL__",
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__",
"MAX_BLOCKS_BULK_QUERY": "__MEMPOOL_MAX_BLOCKS_BULK_QUERY__",
"DISK_CACHE_BLOCK_INTERVAL": "__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__"
"AUDIT": true,
"ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": false,
"CPFP_INDEXING": true,
"MAX_BLOCKS_BULK_QUERY": 999,
"DISK_CACHE_BLOCK_INTERVAL": 999,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": "__CORE_RPC_TIMEOUT__"
"TIMEOUT": 1000
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
@@ -44,14 +47,16 @@
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": 888
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": "__SECOND_CORE_RPC_TIMEOUT__"
"TIMEOUT": 2000
},
"DATABASE": {
"ENABLED": false,
@@ -60,7 +65,8 @@
"PORT": 18,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": 3000
},
"SYSLOG": {
"ENABLED": false,
@@ -98,14 +104,14 @@
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"LIGHTNING": {
"ENABLED": "__LIGHTNING_ENABLED__",
"ENABLED": true,
"BACKEND": "__LIGHTNING_BACKEND__",
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
"FORENSICS_RATE_LIMIT": 1234
},
"LND": {
"TLS_CERT_PATH": "",
@@ -115,5 +121,11 @@
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": []
}
}

View File

@@ -14,11 +14,11 @@ describe('Mempool Difficulty Adjustment', () => {
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
0, // If not testnet, not used
0, // Latest block timestamp in seconds (only used if difficulty already locked in)
],
{ // Expected Result
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
@@ -41,7 +41,7 @@ describe('Mempool Difficulty Adjustment', () => {
],
{ // Expected Result is same other than timeOffset
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
@@ -54,6 +54,29 @@ describe('Mempool Difficulty Adjustment', () => {
expectedBlocks: 161.68833333333333,
},
],
[ // Vector 3 (mainnet lock-in (epoch ending 788255))
[ // Inputs
dt('2023-04-20T09:57:33.000Z'), // Last DA time (in seconds)
dt('2023-05-04T14:54:09.000Z'), // Current time (now) (in seconds)
788255, // Current block height
1.7220298879531821, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
dt('2023-05-04T14:54:26.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result
progressPercent: 99.95039682539682,
difficultyChange: -1.4512637555574193,
estimatedRetargetDate: 1683212658129,
remainingBlocks: 1,
remainingTime: 609129,
previousRetarget: 1.7220298879531821,
previousTime: 1681984653,
nextRetargetHeight: 788256,
timeAvg: 609129,
timeOffset: 0,
expectedBlocks: 2045.66,
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
for (const vector of vectors) {

View File

@@ -40,14 +40,17 @@ describe('Mempool Backend Config', () => {
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
RUST_GBT: false,
CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0,
DISK_CACHE_BLOCK_INTERVAL: 6,
MAX_PUSH_TX_SIZE_WEIGHT: 400000,
ALLOW_UNREACHABLE: true,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000' });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000', UNIX_SOCKET_PATH: null, RETRY_UNIX_SOCKET_AFTER: 30000 });
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
@@ -72,7 +75,8 @@ describe('Mempool Backend Config', () => {
PORT: 3306,
DATABASE: 'mempool',
USERNAME: 'mempool',
PASSWORD: 'mempool'
PASSWORD: 'mempool',
TIMEOUT: 180000,
});
expect(config.SYSLOG).toStrictEqual({
@@ -116,6 +120,13 @@ describe('Mempool Backend Config', () => {
GEOLITE2_ASN: '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
GEOIP2_ISP: '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
});
expect(config.REPLICATION).toStrictEqual({
ENABLED: false,
AUDIT: false,
AUDIT_START_HEIGHT: 774000,
SERVERS: []
});
});
});
@@ -151,4 +162,94 @@ describe('Mempool Backend Config', () => {
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
});
});
test('should ensure the docker start.sh script has default values', () => {
jest.isolateModules(() => {
const startSh = fs.readFileSync(`${__dirname}/../../../docker/backend/start.sh`, 'utf-8');
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
// We have a few cases where we can't follow the pattern
if (root === 'MEMPOOL' && key === 'HTTP_PORT') {
console.log('skipping check for MEMPOOL_HTTP_PORT');
return;
}
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
return;
} else {
parseJson(value, key);
}
break;
}
default: {
//The flattened string, i.e, __MEMPOOL_ENABLED__
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
//The string used as the environment variable, i.e, MEMPOOL_ENABLED
const envVarStr = `${root ? root : ''}_${key}`;
//The string used as the default value, to be checked as a regex, i.e, __MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=(.*)}
const defaultEntry = replaceStr + '=' + '\\${' + envVarStr + ':=(.*)' + '}';
console.log(`looking for ${defaultEntry} in the start.sh script`);
const re = new RegExp(defaultEntry);
expect(startSh).toMatch(re);
//The string that actually replaces the values in the config file
const sedStr = 'sed -i "s!' + replaceStr + '!${' + replaceStr + '}!g" mempool-config.json';
console.log(`looking for ${sedStr} in the start.sh script`);
expect(startSh).toContain(sedStr);
break;
}
}
}
}
parseJson(fixture);
});
});
test('should ensure that the mempool-config.json Docker template has all the keys', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
const dockerJson = fs.readFileSync(`${__dirname}/../../../docker/backend/mempool-config.json`, 'utf-8');
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
} else {
//Check for top level config keys
expect(dockerJson).toContain(`"${key}"`);
parseJson(value, key);
break;
}
}
case 'string': {
// strings should be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": "${replaceStr}"`);
break;
}
default: {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
}
}
};
}
parseJson(fixture);
});
});
});

View File

@@ -0,0 +1,68 @@
import fs from 'fs';
import { GbtGenerator, ThreadTransaction } from '../../../rust-gbt';
import path from 'path';
const baseline = require('./test-data/target-template.json');
const testVector = require('./test-data/test-data-ids.json');
const vectorUidMap: Map<number, string> = new Map(testVector.map(x => [x[0], x[1]]));
const vectorTxidMap: Map<string, number> = new Map(testVector.map(x => [x[1], x[0]]));
// Note that this test buffer is specially constructed
// such that uids are assigned in numerical txid order
// so that ties break the same way as in Core's implementation
const vectorBuffer: Buffer = fs.readFileSync(path.join(__dirname, './', './test-data/test-buffer.bin'));
describe('Rust GBT', () => {
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
const rustGbt = new GbtGenerator();
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
const result = await rustGbt.make(mempool, maxUid);
const blocks: [string, number][][] = result.blocks.map(block => {
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);
});
const template = baseline.map(tx => [tx.txid, vectorTxidMap.get(tx.txid)]);
expect(blocks[0].length).toEqual(baseline.length);
expect(blocks[0]).toEqual(template);
});
});
function mempoolFromArrayBuffer(buf: ArrayBuffer): { mempool: ThreadTransaction[], maxUid: number } {
let maxUid = 0;
const view = new DataView(buf);
const count = view.getUint32(0, false);
const txs: ThreadTransaction[] = [];
let offset = 4;
for (let i = 0; i < count; i++) {
const uid = view.getUint32(offset, false);
maxUid = Math.max(maxUid, uid);
const tx: ThreadTransaction = {
uid,
order: txidToOrdering(vectorUidMap.get(uid) as string),
fee: view.getFloat64(offset + 4, false),
weight: view.getUint32(offset + 12, false),
sigops: view.getUint32(offset + 16, false),
// feePerVsize: view.getFloat64(offset + 20, false),
effectiveFeePerVsize: view.getFloat64(offset + 28, false),
inputs: [],
};
const numInputs = view.getUint32(offset + 36, false);
offset += 40;
for (let j = 0; j < numInputs; j++) {
tx.inputs.push(view.getUint32(offset, false));
offset += 4;
}
txs.push(tx);
}
return { mempool: txs, maxUid };
}
function txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -1,18 +1,21 @@
import config from '../config';
import { TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
import logger from '../logger';
import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
import rbfCache from './rbf-cache';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number, similarity: number } {
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended })
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0, similarity: 1 };
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], score: 0, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const fresh: string[] = []; // missing, but firstSeen or lastBoosted within PROPAGATION_MARGIN
const fullrbf: string[] = []; // either missing or present, and part of a fullrbf replacement
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
@@ -33,23 +36,30 @@ class Audit {
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
if (rbfCache.isFullRbf(txid)) {
fullrbf.push(txid);
} else if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
// tx is recent, may have reached the miner too late for inclusion
fresh.push(txid);
} else if (mempool[txid]?.lastBoosted != null && (now - (mempool[txid]?.lastBoosted || 0)) <= PROPAGATION_MARGIN) {
// tx was recently cpfp'd, miner may not have the latest effective rate
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
displacedWeight += mempool[txid]?.weight || 0;
} else {
matchedWeight += mempool[txid].weight;
matchedWeight += mempool[txid]?.weight || 0;
}
projectedWeight += mempool[txid].weight;
projectedWeight += mempool[txid]?.weight || 0;
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
if (transactions[0]) {
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
}
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
@@ -59,19 +69,24 @@ class Audit {
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
const tx = mempool[txid];
if (tx) {
const fits = (tx.weight - displacedWeightRemaining) < 4000;
const feeMatches = tx.effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, tx.effectiveFeePerVsize);
}
if (tx.firstSeen == null || (now - (tx?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= tx.weight;
}
failures = 0;
} else {
failures++;
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
logger.warn('projected transaction missing from mempool cache');
}
index++;
}
@@ -83,19 +98,11 @@ class Audit {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
if (rbfCache.isFullRbf(tx.txid)) {
fullrbf.push(tx.txid);
} else if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
} else {
}
let blockIndex = -1;
let index = -1;
projectedBlocks.forEach((block, bi) => {
const i = block.transactionIds.indexOf(tx.txid);
if (i >= 0) {
blockIndex = bi;
index = i;
}
});
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
@@ -108,20 +115,25 @@ class Audit {
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
const tx = mempool[txid];
if (tx) {
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (tx.effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = tx.effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (tx.effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
} else {
logger.warn('projected transaction missing from mempool cache');
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
@@ -134,6 +146,8 @@ class Audit {
censored: Object.keys(isCensored),
added,
fresh,
sigop: [],
fullrbf,
score,
similarity,
};

View File

@@ -7,7 +7,6 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
import { BisqBlocks, BisqBlock, BisqTransaction, BisqStats, BisqTrade } from './interfaces';
import { Common } from '../common';
import { BlockExtended } from '../../mempool.interfaces';
import { StaticPool } from 'node-worker-threads-pool';
import backendInfo from '../backend-info';
import logger from '../../logger';
@@ -31,10 +30,6 @@ class Bisq {
private priceUpdateCallbackFunction: ((price: number) => void) | undefined;
private topDirectoryWatcher: fs.FSWatcher | undefined;
private subdirectoryWatcher: fs.FSWatcher | undefined;
private jsonParsePool = new StaticPool({
size: 4,
task: (blob: string) => JSON.parse(blob),
});
constructor() {}

View File

@@ -14,6 +14,8 @@ export interface AbstractBitcoinApi {
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash>;
$getScriptHashTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$sendRawTransaction(rawTransaction: string): Promise<string>;
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;

View File

@@ -29,6 +29,7 @@ class BitcoinApi implements AbstractBitcoinApi {
weight: block.weight,
previousblockhash: block.previousblockhash,
mediantime: block.mediantime,
stale: block.confirmations === -1,
};
}
@@ -64,17 +65,11 @@ class BitcoinApi implements AbstractBitcoinApi {
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.height;
});
return this.bitcoindClient.getBlockCount();
}
$getBlockHashTip(): Promise<string> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.hash;
});
return this.bitcoindClient.getBestBlockHash();
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
@@ -113,6 +108,14 @@ class BitcoinApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not supported by the Bitcoin RPC API.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not supported by the Bitcoin RPC API.');
}
$getScriptHashTransactions(scripthash: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not supported by the Bitcoin RPC API.');
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.bitcoindClient.getRawMemPool();
}
@@ -415,12 +418,38 @@ class BitcoinApi implements AbstractBitcoinApi {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
const witnessScript = vin.witness[vin.witness.length - 2];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness) {
const witnessScript = this.witnessToP2TRScript(vin.witness);
if (witnessScript !== null) {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
}
/**
* This function must only be called when we know the witness we are parsing
* is a taproot witness.
* @param witness An array of hex strings that represents the witness stack of
* the input.
* @returns null if the witness is not a script spend, and the hex string of
* the script item if it is a script spend.
*/
private witnessToP2TRScript(witness: string[]): string | null {
if (witness.length < 2) return null;
// Note: see BIP341 for parsing details of witness stack
// If there are at least two witness elements, and the first byte of the
// last element is 0x50, this last element is called annex a and
// is removed from the witness stack.
const hasAnnex = witness[witness.length - 1].substring(0, 2) === '50';
// If there are at least two witness elements left, script path spending is used.
// Call the second-to-last stack element s, the script.
// (Note: this phrasing from BIP341 assumes we've *removed* the annex from the stack)
if (hasAnnex && witness.length < 3) return null;
const positionOfScript = hasAnnex ? witness.length - 3 : witness.length - 2;
return witness[positionOfScript];
}
}
export default BitcoinApi;

View File

@@ -32,8 +32,10 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/replaces', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/rbf', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/cached', this.getCachedTx)
.get(config.MEMPOOL.API_URL_PREFIX + 'replacements', this.getRbfReplacements)
.get(config.MEMPOOL.API_URL_PREFIX + 'fullrbf/replacements', this.getFullRbfReplacements)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
try {
@@ -94,6 +96,7 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
@@ -110,7 +113,6 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
@@ -119,7 +121,8 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash', this.getScriptHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash/txs', this.getScriptHashTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
;
}
@@ -128,8 +131,9 @@ class BitcoinRoutes {
private getInitData(req: Request, res: Response) {
try {
const result = websocketHandler.getInitData();
res.json(result);
const result = websocketHandler.getSerializedInitData();
res.set('Content-Type', 'application/json');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@@ -208,6 +212,8 @@ class BitcoinRoutes {
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
sigops: tx.sigops,
adjustedVsize: tx.adjustedVsize,
});
return;
}
@@ -219,7 +225,12 @@ class BitcoinRoutes {
} else {
let cpfpInfo;
if (config.DATABASE.ENABLED) {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
try {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
} catch (e) {
res.status(500).send('failed to get CPFP info');
return;
}
}
if (cpfpInfo) {
res.json(cpfpInfo);
@@ -389,9 +400,13 @@ class BitcoinRoutes {
private async getBlockAuditSummary(req: Request, res: Response) {
try {
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
const auditSummary = await blocks.$getBlockAuditSummary(req.params.hash);
if (auditSummary) {
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(auditSummary);
} else {
return res.status(404).send(`audit not available`);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@@ -399,7 +414,7 @@ class BitcoinRoutes {
private async getBlocks(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocks(height, 15));
@@ -413,7 +428,7 @@ class BitcoinRoutes {
private async getBlocksByBulk(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
return res.status(404).send(`This API is only available for Bitcoin networks`);
}
if (config.MEMPOOL.MAX_BLOCKS_BULK_QUERY <= 0) {
@@ -532,15 +547,37 @@ class BitcoinRoutes {
}
}
private async getAddressTransactions(req: Request, res: Response) {
private async getAddressTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getScriptHash(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const addressData = await bitcoinApi.$getScriptHash(req.params.address);
res.json(addressData);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
@@ -549,8 +586,26 @@ class BitcoinRoutes {
}
}
private async getAdressTxChain(req: Request, res: Response) {
res.status(501).send('Not implemented');
private async getScriptHashTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getScriptHashTransactions(req.params.address, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddressPrefix(req: Request, res: Response) {
@@ -589,10 +644,14 @@ class BitcoinRoutes {
}
}
private async getBlockTipHeight(req: Request, res: Response) {
private getBlockTipHeight(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlockHeightTip();
res.json(result);
const result = blocks.getCurrentBlockHeight();
if (!result) {
return res.status(503).send(`Service Temporarily Unavailable`);
}
res.setHeader('content-type', 'text/plain');
res.send(result.toString());
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@@ -638,8 +697,30 @@ class BitcoinRoutes {
private async getRbfHistory(req: Request, res: Response) {
try {
const result = rbfCache.getReplaces(req.params.txId);
res.json(result || []);
const replacements = rbfCache.getRbfTree(req.params.txId) || null;
const replaces = rbfCache.getReplaces(req.params.txId) || null;
res.json({
replacements,
replaces
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(false);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getFullRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(true);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@@ -683,12 +764,7 @@ class BitcoinRoutes {
private async $postTransaction(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
try {
let rawTx;
if (typeof req.body === 'object') {
rawTx = Object.keys(req.body)[0];
} else {
rawTx = req.body;
}
const rawTx = Common.getTransactionFromRequest(req, false);
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
res.send(txIdResult);
} catch (e: any) {
@@ -699,12 +775,8 @@ class BitcoinRoutes {
private async $postTransactionForm(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
const matches = /tx=([a-z0-9]+)/.exec(req.body);
let txHex = '';
if (matches && matches[1]) {
txHex = matches[1];
}
try {
const txHex = Common.getTransactionFromRequest(req, true);
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
res.send(txIdResult);
} catch (e: any) {

View File

@@ -16,7 +16,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
super(bitcoinClient);
const electrumConfig = { client: 'mempool-v2', version: '1.4' };
const electrumPersistencePolicy = { retryPeriod: 10000, maxRetry: 1000, callback: null };
const electrumPersistencePolicy = { retryPeriod: 1000, maxRetry: Number.MAX_SAFE_INTEGER, callback: null };
const electrumCallbacks = {
onConnect: (client, versionInfo) => { logger.info(`Connected to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT} (${JSON.stringify(versionInfo)})`); },
@@ -126,6 +126,77 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
}
}
async $getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
try {
const balance = await this.electrumClient.blockchainScripthash_getBalance(scripthash);
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
const unconfirmed = history ? history.filter((h) => h.fee).length : 0;
return {
'scripthash': scripthash,
'chain_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.confirmed ? balance.confirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.confirmed < 0 ? balance.confirmed : 0,
'tx_count': (history?.length || 0) - unconfirmed,
},
'mempool_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.unconfirmed > 0 ? balance.unconfirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.unconfirmed < 0 ? -balance.unconfirmed : 0,
'tx_count': unconfirmed,
},
'electrum': true,
};
} catch (e: any) {
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
async $getScriptHashTransactions(scripthash: string, lastSeenTxId?: string): Promise<IEsploraApi.Transaction[]> {
try {
loadingIndicators.setProgress('address-' + scripthash, 0);
const transactions: IEsploraApi.Transaction[] = [];
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
if (!history) {
throw new Error('failed to get scripthash history');
}
history.sort((a, b) => (b.height || 9999999) - (a.height || 9999999));
let startingIndex = 0;
if (lastSeenTxId) {
const pos = history.findIndex((historicalTx) => historicalTx.tx_hash === lastSeenTxId);
if (pos) {
startingIndex = pos + 1;
}
}
const endIndex = Math.min(startingIndex + 10, history.length);
for (let i = startingIndex; i < endIndex; i++) {
const tx = await this.$getRawTransaction(history[i].tx_hash, false, true);
transactions.push(tx);
loadingIndicators.setProgress('address-' + scripthash, (i + 1) / endIndex * 100);
}
return transactions;
} catch (e: any) {
loadingIndicators.setProgress('address-' + scripthash, 100);
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
private $getScriptHashBalance(scriptHash: string): Promise<IElectrumApi.ScriptHashBalance> {
return this.electrumClient.blockchainScripthash_getBalance(this.encodeScriptHash(scriptHash));
}

View File

@@ -89,6 +89,7 @@ export namespace IEsploraApi {
weight: number;
previousblockhash: string;
mediantime: number;
stale: boolean;
}
export interface Address {
@@ -98,6 +99,13 @@ export namespace IEsploraApi {
electrum?: boolean;
}
export interface ScriptHash {
scripthash: string;
chain_stats: ChainStats;
mempool_stats: MempoolStats;
electrum?: boolean;
}
export interface ChainStats {
funded_txo_count: number;
funded_txo_sum: number;

View File

@@ -3,65 +3,102 @@ import axios, { AxiosRequestConfig } from 'axios';
import http from 'http';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import logger from '../../logger';
const axiosConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true })
httpAgent: new http.Agent({ keepAlive: true, })
});
class ElectrsApi implements AbstractBitcoinApi {
axiosConfig: AxiosRequestConfig = {
private axiosConfigWithUnixSocket: AxiosRequestConfig = config.ESPLORA.UNIX_SOCKET_PATH ? {
socketPath: config.ESPLORA.UNIX_SOCKET_PATH,
timeout: 10000,
} : {
timeout: 10000,
};
private axiosConfigTcpSocketOnly: AxiosRequestConfig = {
timeout: 10000,
};
constructor() { }
unixSocketRetryTimeout;
activeAxiosConfig;
constructor() {
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
}
fallbackToTcpSocket() {
if (!this.unixSocketRetryTimeout) {
logger.err(`Unable to connect to esplora unix socket. Falling back to tcp socket. Retrying unix socket in ${config.ESPLORA.RETRY_UNIX_SOCKET_AFTER / 1000} seconds`);
// Retry the unix socket after a few seconds
this.unixSocketRetryTimeout = setTimeout(() => {
logger.info(`Retrying to use unix socket for esplora now (applied for the next query)`);
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
this.unixSocketRetryTimeout = undefined;
}, config.ESPLORA.RETRY_UNIX_SOCKET_AFTER);
}
// Use the TCP socket (reach a different esplora instance through nginx)
this.activeAxiosConfig = this.axiosConfigTcpSocketOnly;
}
$queryWrapper<T>(url, responseType = 'json'): Promise<T> {
return axiosConnection.get<T>(url, { ...this.activeAxiosConfig, responseType: responseType })
.then((response) => response.data)
.catch((e) => {
if (e?.code === 'ECONNREFUSED') {
this.fallbackToTcpSocket();
// Retry immediately
return axiosConnection.get<T>(url, this.activeAxiosConfig)
.then((response) => response.data)
.catch((e) => {
logger.warn(`Cannot query esplora through the unix socket nor the tcp socket. Exception ${e}`);
throw e;
});
} else {
throw e;
}
});
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return axiosConnection.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids');
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return axiosConnection.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId);
}
$getTransactionHex(txId: string): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex');
}
$getBlockHeightTip(): Promise<number> {
return axiosConnection.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height');
}
$getBlockHashTip(): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash');
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return axiosConnection.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids');
}
$getBlockHash(height: number): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height);
}
$getBlockHeader(hash: string): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header');
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return axiosConnection.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash);
}
$getRawBlock(hash: string): Promise<Buffer> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
return this.$queryWrapper<any>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", 'arraybuffer')
.then((response) => { return Buffer.from(response.data); });
}
@@ -73,6 +110,14 @@ class ElectrsApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not implemented.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not implemented.');
}
$getScriptHashTransactions(scripthash: string, txId?: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not implemented.');
}
$getAddressPrefix(prefix: string): string[] {
throw new Error('Method not implemented.');
}
@@ -82,13 +127,11 @@ class ElectrsApi implements AbstractBitcoinApi {
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return axiosConnection.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return axiosConnection.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends');
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {

View File

@@ -2,7 +2,7 @@ import config from '../config';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import logger from '../logger';
import memPool from './mempool';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo, CpfpSummary, MempoolTransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
import diskCache from './disk-cache';
import transactionUtils from './transaction-utils';
@@ -25,6 +25,7 @@ import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmen
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import chainTips from './chain-tips';
import websocketHandler from './websocket-handler';
class Blocks {
private blocks: BlockExtended[] = [];
@@ -34,7 +35,9 @@ class Blocks {
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>)[] = [];
private mainLoopTimeout: number = 120000;
constructor() { }
@@ -58,7 +61,7 @@ class Blocks {
this.newBlockCallbacks.push(fn);
}
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn);
}
@@ -73,10 +76,14 @@ class Blocks {
blockHash: string,
blockHeight: number,
onlyCoinbase: boolean,
txIds: string[] | null = null,
quiet: boolean = false,
addMempoolData: boolean = false,
): Promise<TransactionExtended[]> {
const transactions: TransactionExtended[] = [];
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
if (!txIds) {
txIds = await bitcoinApi.$getTxIdsForBlock(blockHash);
}
const mempool = memPool.getMempool();
let transactionsFound = 0;
@@ -94,14 +101,14 @@ class Blocks {
logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`);
}
try {
const tx = await transactionUtils.$getTransactionExtended(txIds[i]);
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, false, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} catch (e) {
try {
if (config.MEMPOOL.BACKEND === 'esplora') {
// Try again with core
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true);
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} else {
@@ -124,12 +131,6 @@ class Blocks {
}
}
transactions.forEach((tx) => {
if (!tx.cpfpChecked) {
Common.setRelativesAndGetCpfpInfo(tx, mempool); // Child Pay For Parent
}
});
if (!quiet) {
logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`);
}
@@ -161,9 +162,18 @@ class Blocks {
};
}
public summarizeBlockTransactions(hash: string, transactions: TransactionExtended[]): BlockSummary {
return {
id: hash,
transactions: Common.stripTransactions(transactions),
};
}
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
block.tx.forEach(tx => {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
if (!isFinite(Number(tx.fee))) {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
}
});
return block;
}
@@ -200,8 +210,15 @@ class Blocks {
extras.segwitTotalWeight = 0;
} else {
const stats: IBitcoinApi.BlockStats = await bitcoinClient.getBlockStats(block.id);
extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
let feeStats = {
medianFee: stats.feerate_percentiles[2], // 50th percentiles
feeRange: [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat(),
};
if (transactions?.length > 1) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
}
extras.medianFee = feeStats.medianFee;
extras.feeRange = feeStats.feeRange;
extras.totalFees = stats.totalfee;
extras.avgFee = stats.avgfee;
extras.avgFeeRate = stats.avgfeerate;
@@ -246,7 +263,7 @@ class Blocks {
extras.totalInputAmt = null;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
let pool: PoolTag;
if (coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(coinbaseTx);
@@ -270,10 +287,14 @@ class Blocks {
}
extras.matchRate = null;
extras.expectedFees = null;
extras.expectedWeight = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
extras.expectedFees = auditScore.expectedFees;
extras.expectedWeight = auditScore.expectedWeight;
}
}
}
@@ -297,7 +318,7 @@ class Blocks {
}
const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig);
const address = txMinerInfo.vout[0].scriptpubkey_address;
const addresses = txMinerInfo.vout.map((vout) => vout.scriptpubkey_address).filter((address) => address);
let pools: PoolTag[] = [];
if (config.DATABASE.ENABLED === true) {
@@ -307,11 +328,13 @@ class Blocks {
}
for (let i = 0; i < pools.length; ++i) {
if (address !== undefined) {
const addresses: string[] = typeof pools[i].addresses === 'string' ?
if (addresses.length) {
const poolAddresses: string[] = typeof pools[i].addresses === 'string' ?
JSON.parse(pools[i].addresses) : pools[i].addresses;
if (addresses.indexOf(address) !== -1) {
return pools[i];
for (let y = 0; y < poolAddresses.length; y++) {
if (addresses.indexOf(poolAddresses[y]) !== -1) {
return pools[i];
}
}
}
@@ -403,12 +426,13 @@ class Blocks {
try {
// Get all indexed block hash
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
if (!unindexedBlockHeights?.length) {
return;
}
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
// Logging
let count = 0;
let countThisRun = 0;
@@ -440,6 +464,46 @@ class Blocks {
}
}
/**
* [INDEXING] Index expected fees & weight for all audited blocks
*/
public async $generateAuditStats(): Promise<void> {
const blockIds = await BlocksAuditsRepository.$getBlocksWithoutSummaries();
if (!blockIds?.length) {
return;
}
let timer = Date.now();
let indexedThisRun = 0;
let indexedTotal = 0;
logger.debug(`Indexing ${blockIds.length} block audit details`);
for (const hash of blockIds) {
const summary = await BlocksSummariesRepository.$getTemplate(hash);
let totalFees = 0;
let totalWeight = 0;
for (const tx of summary?.transactions || []) {
totalFees += tx.fee;
totalWeight += (tx.vsize * 4);
}
await BlocksAuditsRepository.$setSummary(hash, totalFees, totalWeight);
const cachedBlock = this.blocks.find(block => block.id === hash);
if (cachedBlock) {
cachedBlock.extras.expectedFees = totalFees;
cachedBlock.extras.expectedWeight = totalWeight;
}
indexedThisRun++;
indexedTotal++;
const elapsedSeconds = (Date.now() - timer) / 1000;
if (elapsedSeconds > 5) {
const blockPerSeconds = indexedThisRun / elapsedSeconds;
logger.debug(`Indexed ${indexedTotal} / ${blockIds.length} block audit details (${blockPerSeconds.toFixed(1)}/s)`);
timer = Date.now();
indexedThisRun = 0;
}
}
logger.debug(`Indexing block audit details completed`);
}
/**
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
@@ -495,7 +559,7 @@ class Blocks {
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
newlyIndexed++;
@@ -519,9 +583,16 @@ class Blocks {
return await BlocksRepository.$validateChain();
}
public async $updateBlocks() {
public async $updateBlocks(): Promise<number> {
// warn if this run stalls the main loop for more than 2 minutes
const timer = this.startTimer();
diskCache.lock();
let fastForwarded = false;
const blockHeightTip = await bitcoinApi.$getBlockHeightTip();
let handledBlocks = 0;
const blockHeightTip = await bitcoinCoreApi.$getBlockHeightTip();
this.updateTimerProgress(timer, 'got block height tip');
if (this.blocks.length === 0) {
this.currentBlockHeight = Math.max(blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT, -1);
@@ -539,16 +610,21 @@ class Blocks {
if (!this.lastDifficultyAdjustmentTime) {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
this.updateTimerProgress(timer, 'got blockchain info for initial difficulty adjustment');
if (blockchainInfo.blocks === blockchainInfo.headers) {
const heightDiff = blockHeightTip % 2016;
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
this.updateTimerProgress(timer, 'got block hash for initial difficulty adjustment');
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
this.updateTimerProgress(timer, 'got block for initial difficulty adjustment');
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
if (blockHeightTip >= 2016) {
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
this.updateTimerProgress(timer, 'got previous block hash for initial difficulty adjustment');
const previousPeriodBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(previousPeriodBlockHash);
this.updateTimerProgress(timer, 'got previous block for initial difficulty adjustment');
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
logger.debug(`Initial difficulty adjustment data set.`);
}
@@ -558,57 +634,83 @@ class Blocks {
}
while (this.currentBlockHeight < blockHeightTip) {
if (this.currentBlockHeight < blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT) {
if (this.currentBlockHeight === 0) {
this.currentBlockHeight = blockHeightTip;
} else {
this.currentBlockHeight++;
logger.debug(`New block found (#${this.currentBlockHeight})!`);
this.updateTimerProgress(timer, `getting orphaned blocks for ${this.currentBlockHeight}`);
await chainTips.updateOrphanedBlocks();
}
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
this.updateTimerProgress(timer, `getting block data for ${this.currentBlockHeight}`);
const blockHash = await bitcoinCoreApi.$getBlockHash(this.currentBlockHeight);
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
// start async callbacks
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
const txIds: string[] = verboseBlock.tx.map(tx => tx.txid);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, txIds, false, true) as MempoolTransactionExtended[];
if (config.MEMPOOL.BACKEND !== 'esplora') {
// fill in missing transaction fee data from verboseBlock
for (let i = 0; i < transactions.length; i++) {
if (!transactions[i].fee && transactions[i].txid === verboseBlock.tx[i].txid) {
transactions[i].fee = verboseBlock.tx[i].fee * 100_000_000;
}
}
}
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
const blockSummary: BlockSummary = this.summarizeBlockTransactions(block.id, cpfpSummary.transactions);
this.updateTimerProgress(timer, `got block data for ${this.currentBlockHeight}`);
if (Common.indexingEnabled()) {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
this.updateTimerProgress(timer, `got block by height for ${this.currentBlockHeight}`);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock.id) {
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`);
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`, logger.tags.mining);
// We assume there won't be a reorg with more than 10 block depth
this.updateTimerProgress(timer, `rolling back diverged chain from ${this.currentBlockHeight}`);
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock.height - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
this.blocks = this.blocks.slice(0, -10);
this.updateTimerProgress(timer, `rolled back chain divergence from ${this.currentBlockHeight}`);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock.height - i);
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
this.blocks.push(newBlock);
this.updateTimerProgress(timer, `reindexed block`);
let cpfpSummary;
if (config.MEMPOOL.CPFP_INDEXING) {
await this.$indexCPFP(newBlock.id, lastBlock.height - i);
cpfpSummary = await this.$indexCPFP(newBlock.id, lastBlock.height - i);
this.updateTimerProgress(timer, `reindexed block cpfp`);
}
await this.$getStrippedBlockTransactions(newBlock.id, true, true, cpfpSummary, newBlock.height);
this.updateTimerProgress(timer, `reindexed block summary`);
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`);
this.updateTimerProgress(timer, `reindexed difficulty adjustments`);
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`, logger.tags.mining);
indexer.reindex();
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
websocketHandler.handleReorg();
}
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
this.updateTimerProgress(timer, `saved ${this.currentBlockHeight} to database`);
if (!fastForwarded) {
const lastestPriceId = await PricesRepository.$getLatestPriceId();
this.updateTimerProgress(timer, `got latest price id ${this.currentBlockHeight}`);
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
await blocksRepository.$saveBlockPrices([{
height: blockExtended.height,
priceId: lastestPriceId,
}]);
this.updateTimerProgress(timer, `saved prices for ${this.currentBlockHeight}`);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
logger.debug(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
@@ -616,14 +718,20 @@ class Blocks {
// Save blocks summary for visualization if it's enabled
if (Common.blocksSummariesIndexingEnabled() === true) {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
await this.$getStrippedBlockTransactions(blockExtended.id, true, false, cpfpSummary, blockExtended.height);
this.updateTimerProgress(timer, `saved block summary for ${this.currentBlockHeight}`);
}
if (config.MEMPOOL.CPFP_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
this.$saveCpfp(blockExtended.id, this.currentBlockHeight, cpfpSummary);
this.updateTimerProgress(timer, `saved cpfp for ${this.currentBlockHeight}`);
}
}
}
// start async callbacks
this.updateTimerProgress(timer, `starting async callbacks for ${this.currentBlockHeight}`);
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
if (block.height % 2016 === 0) {
if (Common.indexingEnabled()) {
await DifficultyAdjustmentsRepository.$saveAdjustments({
@@ -632,6 +740,7 @@ class Blocks {
difficulty: block.difficulty,
adjustment: Math.round((block.difficulty / this.currentDifficulty) * 1000000) / 1000000, // Remove float point noise
});
this.updateTimerProgress(timer, `saved difficulty adjustment for ${this.currentBlockHeight}`);
}
this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100;
@@ -639,6 +748,11 @@ class Blocks {
this.currentDifficulty = block.difficulty;
}
// wait for pending async callbacks to finish
this.updateTimerProgress(timer, `waiting for async callbacks to complete for ${this.currentBlockHeight}`);
await Promise.all(callbackPromises);
this.updateTimerProgress(timer, `async callbacks completed for ${this.currentBlockHeight}`);
this.blocks.push(blockExtended);
if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
@@ -655,8 +769,35 @@ class Blocks {
diskCache.$saveCacheToDisk();
}
// wait for pending async callbacks to finish
await Promise.all(callbackPromises);
handledBlocks++;
}
diskCache.unlock();
this.clearTimer(timer);
return handledBlocks;
}
private startTimer() {
const state: any = {
start: Date.now(),
progress: 'begin $updateBlocks',
timer: null,
};
state.timer = setTimeout(() => {
logger.err(`$updateBlocks stalled at "${state.progress}"`);
}, this.mainLoopTimeout);
return state;
}
private updateTimerProgress(state, msg) {
state.progress = msg;
}
private clearTimer(state) {
if (state.timer) {
clearTimeout(state.timer);
}
}
@@ -683,6 +824,16 @@ class Blocks {
return blockExtended;
}
public async $indexStaleBlock(hash: string): Promise<BlockExtended> {
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
blockExtended.canonical = await bitcoinApi.$getBlockHash(block.height);
return blockExtended;
}
/**
* Get one block by its hash
*/
@@ -694,17 +845,21 @@ class Blocks {
}
// Not Bitcoin network, return the block as it from the bitcoin backend
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) === false) {
return await bitcoinCoreApi.$getBlock(hash);
}
// Bitcoin network, add our custom data on top
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
return await this.$indexBlock(block.height);
if (block.stale) {
return await this.$indexStaleBlock(hash);
} else {
return await this.$indexBlock(block.height);
}
}
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
skipDBLookup = false): Promise<TransactionStripped[]>
skipDBLookup = false, cpfpSummary?: CpfpSummary, blockHeight?: number): Promise<TransactionStripped[]>
{
if (skipMemoryCache === false) {
// Check the memory cache
@@ -722,13 +877,35 @@ class Blocks {
}
}
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
const summary = this.summarizeBlock(block);
let height = blockHeight;
let summary: BlockSummary;
if (cpfpSummary && !Common.isLiquid()) {
summary = {
id: hash,
transactions: cpfpSummary.transactions.map(tx => {
return {
txid: tx.txid,
fee: tx.fee,
vsize: tx.vsize,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0)),
rate: tx.effectiveFeePerVsize
};
}),
};
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
summary = this.summarizeBlock(block);
height = block.height;
}
if (height == null) {
const block = await bitcoinApi.$getBlock(hash);
height = block.height;
}
// Index the response if needed
if (Common.blocksSummariesIndexingEnabled() === true) {
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
await BlocksSummariesRepository.$saveTransactions(height, hash, summary.transactions);
}
return summary.transactions;
@@ -844,11 +1021,12 @@ class Blocks {
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(cleanBlock.height, cleanBlock.hash, summary.transactions);
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
if (cleanBlock.fee_amt_percentiles !== null) {
cleanBlock.median_fee_amt = cleanBlock.fee_amt_percentiles[3];
await blocksRepository.$updateFeeAmounts(cleanBlock.hash, cleanBlock.fee_amt_percentiles, cleanBlock.median_fee_amt);
}
}
@@ -883,19 +1061,11 @@ class Blocks {
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
return BlocksAuditsRepository.$getBlockAudit(hash);
} else {
return null;
}
// fallback to non-audited transaction summary
if (!summary?.transactions?.length) {
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
summary = {
transactions: strippedTransactions
};
}
return summary;
}
public getLastDifficultyAdjustmentTime(): number {
@@ -913,44 +1083,26 @@ class Blocks {
public async $indexCPFP(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx;
});
const clusters: any[] = [];
const summary = Common.calculateCpfp(height, transactions);
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
clusters.push({
root: cluster[0].txid,
height,
txs: cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
});
}
cluster = [];
ancestors = {};
await this.$saveCpfp(hash, height, summary);
const effectiveFeeStats = Common.calcEffectiveFeeStatistics(summary.transactions);
await blocksRepository.$saveEffectiveFeeStats(hash, effectiveFeeStats);
}
public async $saveCpfp(hash: string, height: number, cpfpSummary: CpfpSummary): Promise<void> {
try {
const result = await cpfpRepository.$batchSaveClusters(cpfpSummary.clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
const result = await cpfpRepository.$batchSaveClusters(clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
} catch (e) {
// not a fatal error, we'll try again next time the indexer runs
}
}
}

View File

@@ -1,4 +1,6 @@
import { CpfpInfo, MempoolBlockWithTransactions, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import * as bitcoinjs from 'bitcoinjs-lib';
import { Request } from 'express';
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
@@ -57,35 +59,66 @@ export class Common {
return arr;
}
static findRbfTransactions(added: TransactionExtended[], deleted: TransactionExtended[]): { [txid: string]: TransactionExtended } {
const matches: { [txid: string]: TransactionExtended } = {};
deleted
.forEach((deletedTx) => {
const foundMatches = added.find((addedTx) => {
static findRbfTransactions(added: MempoolTransactionExtended[], deleted: MempoolTransactionExtended[]): { [txid: string]: MempoolTransactionExtended[] } {
const matches: { [txid: string]: MempoolTransactionExtended[] } = {};
added
.forEach((addedTx) => {
const foundMatches = deleted.filter((deletedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
return addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.feePerVsize > deletedTx.feePerVsize
&& addedTx.adjustedFeePerVsize > deletedTx.adjustedFeePerVsize
// Spends one or more of the same inputs
&& deletedTx.vin.some((deletedVin) =>
addedTx.vin.some((vin) => vin.txid === deletedVin.txid && vin.vout === deletedVin.vout));
});
if (foundMatches) {
matches[deletedTx.txid] = foundMatches;
if (foundMatches?.length) {
matches[addedTx.txid] = foundMatches;
}
});
return matches;
}
static findMinedRbfTransactions(minedTransactions: TransactionExtended[], spendMap: Map<string, MempoolTransactionExtended>): { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} {
const matches: { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} = {};
for (const tx of minedTransactions) {
const replaced: Set<MempoolTransactionExtended> = new Set();
for (let i = 0; i < tx.vin.length; i++) {
const vin = tx.vin[i];
const match = spendMap.get(`${vin.txid}:${vin.vout}`);
if (match && match.txid !== tx.txid) {
replaced.add(match);
// remove this tx from the spendMap
// prevents the same tx being replaced more than once
for (const replacedVin of match.vin) {
const key = `${replacedVin.txid}:${replacedVin.vout}`;
spendMap.delete(key);
}
}
const key = `${vin.txid}:${vin.vout}`;
spendMap.delete(key);
}
if (replaced.size) {
matches[tx.txid] = { replaced: Array.from(replaced), replacedBy: tx };
}
}
return matches;
}
static stripTransaction(tx: TransactionExtended): TransactionStripped {
return {
txid: tx.txid,
fee: tx.fee,
vsize: tx.weight / 4,
value: tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0),
rate: tx.effectiveFeePerVsize,
};
}
static stripTransactions(txs: TransactionExtended[]): TransactionStripped[] {
return txs.map(this.stripTransaction);
}
static sleep$(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(() => {
@@ -101,18 +134,18 @@ export class Common {
}
}
static setRelativesAndGetCpfpInfo(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): CpfpInfo {
static setRelativesAndGetCpfpInfo(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): CpfpInfo {
const parents = this.findAllParents(tx, memPool);
const lowerFeeParents = parents.filter((parent) => parent.feePerVsize < tx.effectiveFeePerVsize);
const lowerFeeParents = parents.filter((parent) => parent.adjustedFeePerVsize < tx.effectiveFeePerVsize);
let totalWeight = tx.weight + lowerFeeParents.reduce((prev, val) => prev + val.weight, 0);
let totalWeight = (tx.adjustedVsize * 4) + lowerFeeParents.reduce((prev, val) => prev + (val.adjustedVsize * 4), 0);
let totalFees = tx.fee + lowerFeeParents.reduce((prev, val) => prev + val.fee, 0);
tx.ancestors = parents
.map((t) => {
return {
txid: t.txid,
weight: t.weight,
weight: (t.adjustedVsize * 4),
fee: t.fee,
};
});
@@ -133,8 +166,8 @@ export class Common {
}
private static findAllParents(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): TransactionExtended[] {
let parents: TransactionExtended[] = [];
private static findAllParents(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): MempoolTransactionExtended[] {
let parents: MempoolTransactionExtended[] = [];
tx.vin.forEach((parent) => {
if (parents.find((p) => p.txid === parent.txid)) {
return;
@@ -142,17 +175,17 @@ export class Common {
const parentTx = memPool[parent.txid];
if (parentTx) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.feePerVsize) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.adjustedFeePerVsize) {
if (parentTx.bestDescendant && parentTx.bestDescendant.fee < tx.fee + tx.bestDescendant.fee) {
parentTx.bestDescendant = {
weight: tx.weight + tx.bestDescendant.weight,
weight: (tx.adjustedVsize * 4) + tx.bestDescendant.weight,
fee: tx.fee + tx.bestDescendant.fee,
txid: tx.txid,
};
}
} else if (tx.feePerVsize > parentTx.feePerVsize) {
} else if (tx.adjustedFeePerVsize > parentTx.adjustedFeePerVsize) {
parentTx.bestDescendant = {
weight: tx.weight,
weight: (tx.adjustedVsize * 4),
fee: tx.fee,
txid: tx.txid
};
@@ -206,7 +239,7 @@ export class Common {
static indexingEnabled(): boolean {
return (
['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) &&
['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) &&
config.DATABASE.ENABLED === true &&
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
);
@@ -345,4 +378,364 @@ export class Common {
};
}
}
static calculateCpfp(height: number, transactions: TransactionExtended[]): CpfpSummary {
const clusters: CpfpCluster[] = []; // list of all cpfp clusters in this block
const clusterMap: { [txid: string]: CpfpCluster } = {}; // map transactions to their cpfp cluster
let clusterTxs: TransactionExtended[] = []; // working list of elements of the current cluster
let ancestors: { [txid: string]: boolean } = {}; // working set of ancestors of the current cluster root
const txMap = {};
// initialize the txMap
for (const tx of transactions) {
txMap[tx.txid] = tx;
}
// reverse pass to identify CPFP clusters
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
clusterTxs.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += (tx.weight / 4);
});
const effectiveFeePerVsize = totalFee / totalVSize;
let cluster: CpfpCluster;
if (clusterTxs.length > 1) {
cluster = {
root: clusterTxs[0].txid,
height,
txs: clusterTxs.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
};
clusters.push(cluster);
}
clusterTxs.forEach(tx => {
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
if (cluster) {
clusterMap[tx.txid] = cluster;
}
});
// reset working vars
clusterTxs = [];
ancestors = {};
}
clusterTxs.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
// forward pass to enforce ancestor rate caps
for (const tx of transactions) {
let minAncestorRate = tx.effectiveFeePerVsize;
for (const vin of tx.vin) {
if (txMap[vin.txid]?.effectiveFeePerVsize) {
minAncestorRate = Math.min(minAncestorRate, txMap[vin.txid].effectiveFeePerVsize);
}
}
// check rounded values to skip cases with almost identical fees
const roundedMinAncestorRate = Math.ceil(minAncestorRate);
const roundedEffectiveFeeRate = Math.floor(tx.effectiveFeePerVsize);
if (roundedMinAncestorRate < roundedEffectiveFeeRate) {
tx.effectiveFeePerVsize = minAncestorRate;
if (!clusterMap[tx.txid]) {
// add a single-tx cluster to record the dependent rate
const cluster = {
root: tx.txid,
height,
txs: [{ txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }],
effectiveFeePerVsize: minAncestorRate,
};
clusterMap[tx.txid] = cluster;
clusters.push(cluster);
} else {
// update the existing cluster with the dependent rate
clusterMap[tx.txid].effectiveFeePerVsize = minAncestorRate;
}
}
}
return {
transactions,
clusters,
};
}
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string }[]): EffectiveFeeStats {
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
let weightCount = 0;
let medianFee = 0;
let medianWeight = 0;
// calculate the "medianFee" as the average fee rate of the middle 10000 weight units of transactions
const leftBound = 1995000;
const rightBound = 2005000;
for (let i = 0; i < sortedTxs.length && weightCount < rightBound; i++) {
const left = weightCount;
const right = weightCount + sortedTxs[i].weight;
if (right > leftBound) {
const weight = Math.min(right, rightBound) - Math.max(left, leftBound);
medianFee += (sortedTxs[i].rate * (weight / 4) );
medianWeight += weight;
}
weightCount += sortedTxs[i].weight;
}
const medianFeeRate = medianWeight ? (medianFee / (medianWeight / 4)) : 0;
// minimum effective fee heuristic:
// lowest of
// a) the 1st percentile of effective fee rates
// b) the minimum effective fee rate in the last 2% of transactions (in block order)
const minFee = Math.min(
Common.getNthPercentile(1, sortedTxs).rate,
transactions.slice(-transactions.length / 50).reduce((min, tx) => { return Math.min(min, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, Infinity)
);
// maximum effective fee heuristic:
// highest of
// a) the 99th percentile of effective fee rates
// b) the maximum effective fee rate in the first 2% of transactions (in block order)
const maxFee = Math.max(
Common.getNthPercentile(99, sortedTxs).rate,
transactions.slice(0, transactions.length / 50).reduce((max, tx) => { return Math.max(max, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, 0)
);
return {
medianFee: medianFeeRate,
feeRange: [
minFee,
[10,25,50,75,90].map(n => Common.getNthPercentile(n, sortedTxs).rate),
maxFee,
].flat(),
};
}
static getNthPercentile(n: number, sortedDistribution: any[]): any {
return sortedDistribution[Math.floor((sortedDistribution.length - 1) * (n / 100))];
}
static getTransactionFromRequest(req: Request, form: boolean): string {
let rawTx: any = typeof req.body === 'object' && form
? Object.values(req.body)[0] as any
: req.body;
if (typeof rawTx !== 'string') {
throw Object.assign(new Error('Non-string request body'), { code: -1 });
}
// Support both upper and lower case hex
// Support both txHash= Form and direct API POST
const reg = form ? /^txHash=((?:[a-fA-F0-9]{2})+)$/ : /^((?:[a-fA-F0-9]{2})+)$/;
const matches = reg.exec(rawTx);
if (!matches || !matches[1]) {
throw Object.assign(new Error('Non-hex request body'), { code: -2 });
}
// Guaranteed to be a hex string of multiple of 2
// Guaranteed to be lower case
// Guaranteed to pass validation (see function below)
return this.validateTransactionHex(matches[1].toLowerCase());
}
private static validateTransactionHex(txhex: string): string {
// Do not mutate txhex
// We assume txhex to be valid hex (output of getTransactionFromRequest above)
// Check 1: Valid transaction parse
let tx: bitcoinjs.Transaction;
try {
tx = bitcoinjs.Transaction.fromHex(txhex);
} catch(e) {
throw Object.assign(new Error('Invalid transaction (could not parse)'), { code: -4 });
}
// Check 2: Simple size check
if (tx.weight() > config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT) {
throw Object.assign(new Error(`Transaction too large (max ${config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT} weight units)`), { code: -3 });
}
// Check 3: Check unreachable script in taproot (if not allowed)
if (!config.MEMPOOL.ALLOW_UNREACHABLE) {
tx.ins.forEach(input => {
const witness = input.witness;
// See BIP 341: Script validation rules
const hasAnnex = witness.length >= 2 &&
witness[witness.length - 1][0] === 0x50;
const scriptSpendMinLength = hasAnnex ? 3 : 2;
const maybeScriptSpend = witness.length >= scriptSpendMinLength;
if (maybeScriptSpend) {
const controlBlock = witness[witness.length - scriptSpendMinLength + 1];
if (controlBlock.length === 0 || !this.isValidLeafVersion(controlBlock[0])) {
// Skip this input, it's not taproot
return;
}
// Definitely taproot. Get script
const script = witness[witness.length - scriptSpendMinLength];
const decompiled = bitcoinjs.script.decompile(script);
if (!decompiled || decompiled.length < 2) {
// Skip this input
return;
}
// Iterate up to second last (will look ahead 1 item)
for (let i = 0; i < decompiled.length - 1; i++) {
const first = decompiled[i];
const second = decompiled[i + 1];
if (
first === bitcoinjs.opcodes.OP_FALSE &&
second === bitcoinjs.opcodes.OP_IF
) {
throw Object.assign(new Error('Unreachable taproot scripts not allowed'), { code: -5 });
}
}
}
})
}
// Pass through the input string untouched
return txhex;
}
private static isValidLeafVersion(leafVersion: number): boolean {
// See Note 7 in BIP341
// https://github.com/bitcoin/bips/blob/66a1a8151021913047934ebab3f8883f2f8ca75b/bip-0341.mediawiki#cite_note-7
// "What constraints are there on the leaf version?"
// Must be an integer between 0 and 255
// Since we're parsing a byte
if (Math.floor(leafVersion) !== leafVersion || leafVersion < 0 || leafVersion > 255) {
return false;
}
// "the leaf version cannot be odd"
if ((leafVersion & 0x01) === 1) {
return false;
}
// "The values that comply to this rule are
// the 32 even values between 0xc0 and 0xfe
if (leafVersion >= 0xc0 && leafVersion <= 0xfe) {
return true;
}
// and also 0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe."
if ([0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe].includes(leafVersion)) {
return true;
}
// Otherwise, invalid
return false;
}
}
/**
* Class to calculate average fee rates of a list of transactions
* at certain weight percentiles, in a single pass
*
* init with:
* maxWeight - the total weight to measure percentiles relative to (e.g. 4MW for a single block)
* percentileBandWidth - how many weight units to average over for each percentile (as a % of maxWeight)
* percentiles - an array of weight percentiles to compute, in %
*
* then call .processNext(tx) for each transaction, in descending order
*
* retrieve the final results with .getFeeStats()
*/
export class OnlineFeeStatsCalculator {
private maxWeight: number;
private percentiles = [10,25,50,75,90];
private bandWidthPercent = 2;
private bandWidth: number = 0;
private bandIndex = 0;
private leftBound = 0;
private rightBound = 0;
private inBand = false;
private totalBandFee = 0;
private totalBandWeight = 0;
private minBandRate = Infinity;
private maxBandRate = 0;
private feeRange: { avg: number, min: number, max: number }[] = [];
private totalWeight: number = 0;
constructor (maxWeight: number, percentileBandWidth?: number, percentiles?: number[]) {
this.maxWeight = maxWeight;
if (percentiles && percentiles.length) {
this.percentiles = percentiles;
}
if (percentileBandWidth != null) {
this.bandWidthPercent = percentileBandWidth;
}
this.bandWidth = this.maxWeight * (this.bandWidthPercent / 100);
// add min/max percentiles aligned to the ends of the range
this.percentiles.unshift(this.bandWidthPercent / 2);
this.percentiles.push(100 - (this.bandWidthPercent / 2));
this.setNextBounds();
}
processNext(tx: { weight: number, fee: number, effectiveFeePerVsize?: number, feePerVsize?: number, rate?: number, txid: string }): void {
let left = this.totalWeight;
const right = this.totalWeight + tx.weight;
if (!this.inBand && right <= this.leftBound) {
this.totalWeight += tx.weight;
return;
}
while (left < right) {
if (right > this.leftBound) {
this.inBand = true;
const txRate = (tx.rate || tx.effectiveFeePerVsize || tx.feePerVsize || 0);
const weight = Math.min(right, this.rightBound) - Math.max(left, this.leftBound);
this.totalBandFee += (txRate * weight);
this.totalBandWeight += weight;
this.maxBandRate = Math.max(this.maxBandRate, txRate);
this.minBandRate = Math.min(this.minBandRate, txRate);
}
left = Math.min(right, this.rightBound);
if (left >= this.rightBound) {
this.inBand = false;
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
this.bandIndex++;
this.setNextBounds();
this.totalBandFee = 0;
this.totalBandWeight = 0;
this.minBandRate = Infinity;
this.maxBandRate = 0;
}
}
this.totalWeight += tx.weight;
}
private setNextBounds(): void {
const nextPercentile = this.percentiles[this.bandIndex];
if (nextPercentile != null) {
this.leftBound = ((nextPercentile / 100) * this.maxWeight) - (this.bandWidth / 2);
this.rightBound = this.leftBound + this.bandWidth;
} else {
this.leftBound = Infinity;
this.rightBound = Infinity;
}
}
getRawFeeStats(): WorkingEffectiveFeeStats {
if (this.totalBandWeight > 0) {
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
}
while (this.feeRange.length < this.percentiles.length) {
this.feeRange.unshift({ avg: 0, min: 0, max: 0 });
}
return {
minFee: this.feeRange[0].min,
medianFee: this.feeRange[Math.floor(this.feeRange.length / 2)].avg,
maxFee: this.feeRange[this.feeRange.length - 1].max,
feeRange: this.feeRange.map(f => f.avg),
};
}
getFeeStats(): EffectiveFeeStats {
const stats = this.getRawFeeStats();
stats.feeRange[0] = stats.minFee;
stats.feeRange[stats.feeRange.length - 1] = stats.maxFee;
return stats;
}
}

View File

@@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 59;
private static currentVersion = 64;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@@ -104,7 +104,7 @@ class DatabaseMigration {
private async $createMissingTablesAndIndexes(databaseSchemaVersion: number) {
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
const isBitcoin = ['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK);
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
@@ -497,6 +497,7 @@ class DatabaseMigration {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}
@@ -511,10 +512,42 @@ class DatabaseMigration {
await this.updateToSchemaVersion(58);
}
if (databaseSchemaVersion < 59 && (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet')) {
if (databaseSchemaVersion < 59 && ['testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
// https://github.com/mempool/mempool/issues/3360
await this.$executeQuery(`TRUNCATE prices`);
}
if (databaseSchemaVersion < 60 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD sigop_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(60);
}
if (databaseSchemaVersion < 61 && isBitcoin === true) {
// Break block templates into their own table
if (! await this.$checkIfTableExists('blocks_templates')) {
await this.$executeQuery('CREATE TABLE blocks_templates AS SELECT id, template FROM blocks_summaries WHERE template != "[]"');
}
await this.$executeQuery('ALTER TABLE blocks_templates MODIFY template JSON DEFAULT "[]"');
await this.$executeQuery('ALTER TABLE blocks_templates ADD PRIMARY KEY (id)');
await this.$executeQuery('ALTER TABLE blocks_summaries DROP COLUMN template');
await this.updateToSchemaVersion(61);
}
if (databaseSchemaVersion < 62 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD expected_fees BIGINT UNSIGNED DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD expected_weight BIGINT UNSIGNED DEFAULT NULL');
await this.updateToSchemaVersion(62);
}
if (databaseSchemaVersion < 63 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fullrbf_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(63);
}
if (databaseSchemaVersion < 64 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD features text NULL');
await this.updateToSchemaVersion(64);
}
}
/**
@@ -623,7 +656,7 @@ class DatabaseMigration {
*/
private getMigrationQueriesFromVersion(version: number): string[] {
const queries: string[] = [];
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
const isBitcoin = ['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK);
if (version < 1) {
if (config.MEMPOOL.NETWORK !== 'liquid' && config.MEMPOOL.NETWORK !== 'liquidtestnet') {
@@ -1033,7 +1066,7 @@ class DatabaseMigration {
}
public async $blocksReindexingTruncate(): Promise<void> {
logger.warn(`Truncating pools, blocks and hashrates for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
logger.warn(`Truncating pools, blocks, hashrates and difficulty_adjustments tables for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
await Common.sleep$(5000);
await this.$executeQuery(`TRUNCATE blocks`);

View File

@@ -24,31 +24,29 @@ export function calcDifficultyAdjustment(
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = nowSeconds - DATime;
const diffSeconds = Math.max(0, nowSeconds - DATime);
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
const actualTimespan = (blocksInEpoch === 2015 ? latestBlockTimestamp : nowSeconds) - DATime;
let difficultyChange = 0;
let timeAvgSecs = diffSeconds / blocksInEpoch;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
let timeAvgSecs = blocksInEpoch ? diffSeconds / blocksInEpoch : BLOCK_SECONDS_TARGET;
difficultyChange = (BLOCK_SECONDS_TARGET / (actualTimespan / (blocksInEpoch + 1)) - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,

View File

@@ -7,16 +7,26 @@ import logger from '../logger';
import config from '../config';
import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
import rbfCache from './rbf-cache';
class DiskCache {
private cacheSchemaVersion = 3;
private rbfCacheSchemaVersion = 1;
private static TMP_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-cache.json';
private static TMP_FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/tmp-cache{number}.json';
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
private static TMP_RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-rbfcache.json';
private static RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/rbfcache.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
private ignoreBlocksCache = false;
private semaphore: { resume: (() => void)[], locks: number } = {
resume: [],
locks: 0,
};
constructor() {
if (!cluster.isPrimary) {
@@ -43,7 +53,9 @@ class DiskCache {
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
mempoolArray.push(mempool[tx]);
if (mempool[tx]) {
mempoolArray.push(mempool[tx]);
}
}
Common.shuffleArray(mempoolArray);
@@ -71,6 +83,7 @@ class DiskCache {
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
} else {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
@@ -80,6 +93,7 @@ class DiskCache {
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
@@ -98,6 +112,32 @@ class DiskCache {
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
try {
logger.debug('Writing rbf data to disk cache (async)...');
this.isWritingCache = true;
const rbfData = rbfCache.dump();
if (sync) {
fs.writeFileSync(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
fs.renameSync(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
} else {
await fsPromises.writeFile(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
await fsPromises.rename(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
}
logger.debug('Rbf data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing rbf data to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
}
wipeCache(): void {
@@ -122,7 +162,19 @@ class DiskCache {
}
}
loadMempoolCache(): void {
wipeRbfCache() {
logger.notice(`Wipping nodejs backend cache/rbfcache.json file`);
try {
fs.unlinkSync(DiskCache.RBF_FILE_NAME);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${DiskCache.RBF_FILE_NAME}. Exception ${JSON.stringify(e)}`);
}
}
}
async $loadMempoolCache(): Promise<void> {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
@@ -143,6 +195,7 @@ class DiskCache {
if (data.mempoolArray) {
for (const tx of data.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
}
@@ -155,6 +208,7 @@ class DiskCache {
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
if (cacheData2.mempoolArray) {
for (const tx of cacheData2.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
} else {
@@ -162,16 +216,74 @@ class DiskCache {
}
}
} catch (e) {
logger.info('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
memPool.setMempool(data.mempool);
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
await memPool.$setMempool(data.mempool);
if (!this.ignoreBlocksCache) {
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
} else {
logger.info('Re-saving cache with empty recent blocks data');
await this.$saveCacheToDisk(true);
}
} catch (e) {
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
try {
let rbfData: any = {};
const rbfCacheData = fs.readFileSync(DiskCache.RBF_FILE_NAME, 'utf8');
if (rbfCacheData) {
logger.info('Restoring rbf data from disk cache');
rbfData = JSON.parse(rbfCacheData);
if (rbfData.rbfCacheSchemaVersion === undefined || rbfData.rbfCacheSchemaVersion !== this.rbfCacheSchemaVersion) {
logger.notice('Rbf disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
if (rbfData.network && rbfData.network !== config.MEMPOOL.NETWORK) {
logger.notice('Rbf disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
}
if (rbfData?.rbf) {
rbfCache.load(rbfData.rbf);
}
} catch (e) {
logger.warn('Failed to parse rbf cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
private $yield(): Promise<void> {
if (this.semaphore.locks) {
logger.debug('Pause writing mempool and blocks data to disk cache (async)');
return new Promise((resolve) => {
this.semaphore.resume.push(resolve);
});
} else {
return Promise.resolve();
}
}
public lock(): void {
this.semaphore.locks++;
}
public unlock(): void {
this.semaphore.locks = Math.max(0, this.semaphore.locks - 1);
if (!this.semaphore.locks && this.semaphore.resume.length) {
const nextResume = this.semaphore.resume.shift();
if (nextResume) {
logger.debug('Resume writing mempool and blocks data to disk cache (async)');
nextResume();
}
}
}
public setIgnoreBlocksCache(): void {
this.ignoreBlocksCache = true;
}
}

View File

@@ -80,7 +80,7 @@ class ChannelsApi {
public async $searchChannelsById(search: string): Promise<any[]> {
try {
const searchStripped = search.replace('%', '') + '%';
const searchStripped = search.replace(/[^0-9x]/g, '') + '%';
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;
@@ -117,6 +117,26 @@ class ChannelsApi {
}
}
public async $getPenaltyClosedChannels(): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left,
n2.alias AS alias_right,
channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.status = 2 AND channels.closing_reason = 3
ORDER BY closing_date DESC
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getPenaltyClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;

View File

@@ -11,6 +11,7 @@ class ChannelsRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/penalties', this.$getPenaltyClosedChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
;
@@ -108,6 +109,18 @@ class ChannelsRoutes {
}
}
private async $getPenaltyClosedChannels(req: Request, res: Response): Promise<void> {
try {
const channels = await channelsApi.$getPenaltyClosedChannels();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getAllChannelsGeo(req: Request, res: Response) {
try {
const style: string = typeof req.query.style === 'string' ? req.query.style : '';

View File

@@ -3,6 +3,7 @@ import DB from '../../database';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
import { bin2hex } from '../../utils/format';
class NodesApi {
public async $getWorldNodes(): Promise<any> {
@@ -56,7 +57,8 @@ class NodesApi {
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision
geo_names_country.names as country, geo_names_subdivision.names as subdivision,
features
FROM nodes
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
@@ -76,6 +78,23 @@ class NodesApi {
node.city = JSON.parse(node.city);
node.country = JSON.parse(node.country);
// Features
node.features = JSON.parse(node.features);
node.featuresBits = null;
if (node.features) {
let maxBit = 0;
for (const feature of node.features) {
maxBit = Math.max(maxBit, feature.bit);
}
maxBit = Math.ceil(maxBit / 4) * 4 - 1;
node.featuresBits = new Array(maxBit + 1).fill(0);
for (const feature of node.features) {
node.featuresBits[feature.bit] = 1;
}
node.featuresBits = bin2hex(node.featuresBits.reverse().join(''));
}
// Active channels and capacity
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
@@ -373,7 +392,7 @@ class NodesApi {
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace('%', '') + '%';
const publicKeySearch = search.replace(/[^a-zA-Z0-9]/g, '') + '%';
const aliasSearch = search
.replace(/[-_.]/g, ' ') // Replace all -_. characters with empty space. Eg: "ln.nicehash" becomes "ln nicehash".
.replace(/[^a-zA-Z0-9 ]/g, '') // Remove all special characters and keep just A to Z, 0 to 9.
@@ -656,10 +675,19 @@ class NodesApi {
alias_search,
color,
sockets,
status
status,
features
)
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1, ?)
ON DUPLICATE KEY UPDATE
updated_at = FROM_UNIXTIME(?),
alias = ?,
alias_search = ?,
color = ?,
sockets = ?,
status = 1,
features = ?
`;
await DB.query(query, [
node.pub_key,
@@ -668,11 +696,13 @@ class NodesApi {
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
]);
} catch (e) {
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));

View File

@@ -2,8 +2,91 @@ import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
import { hex2bin } from '../../../utils/format';
import config from '../../../config';
// https://github.com/lightningnetwork/lnd/blob/master/lnwire/features.go
export enum FeatureBits {
DataLossProtectRequired = 0,
DataLossProtectOptional = 1,
InitialRoutingSync = 3,
UpfrontShutdownScriptRequired = 4,
UpfrontShutdownScriptOptional = 5,
GossipQueriesRequired = 6,
GossipQueriesOptional = 7,
TLVOnionPayloadRequired = 8,
TLVOnionPayloadOptional = 9,
StaticRemoteKeyRequired = 12,
StaticRemoteKeyOptional = 13,
PaymentAddrRequired = 14,
PaymentAddrOptional = 15,
MPPRequired = 16,
MPPOptional = 17,
WumboChannelsRequired = 18,
WumboChannelsOptional = 19,
AnchorsRequired = 20,
AnchorsOptional = 21,
AnchorsZeroFeeHtlcTxRequired = 22,
AnchorsZeroFeeHtlcTxOptional = 23,
ShutdownAnySegwitRequired = 26,
ShutdownAnySegwitOptional = 27,
AMPRequired = 30,
AMPOptional = 31,
ExplicitChannelTypeRequired = 44,
ExplicitChannelTypeOptional = 45,
ScidAliasRequired = 46,
ScidAliasOptional = 47,
PaymentMetadataRequired = 48,
PaymentMetadataOptional = 49,
ZeroConfRequired = 50,
ZeroConfOptional = 51,
KeysendRequired = 54,
KeysendOptional = 55,
ScriptEnforcedLeaseRequired = 2022,
ScriptEnforcedLeaseOptional = 2023,
MaxBolt11Feature = 5114,
};
export const FeaturesMap = new Map<FeatureBits, string>([
[FeatureBits.DataLossProtectRequired, 'data-loss-protect'],
[FeatureBits.DataLossProtectOptional, 'data-loss-protect'],
[FeatureBits.InitialRoutingSync, 'initial-routing-sync'],
[FeatureBits.UpfrontShutdownScriptRequired, 'upfront-shutdown-script'],
[FeatureBits.UpfrontShutdownScriptOptional, 'upfront-shutdown-script'],
[FeatureBits.GossipQueriesRequired, 'gossip-queries'],
[FeatureBits.GossipQueriesOptional, 'gossip-queries'],
[FeatureBits.TLVOnionPayloadRequired, 'tlv-onion'],
[FeatureBits.TLVOnionPayloadOptional, 'tlv-onion'],
[FeatureBits.StaticRemoteKeyOptional, 'static-remote-key'],
[FeatureBits.StaticRemoteKeyRequired, 'static-remote-key'],
[FeatureBits.PaymentAddrOptional, 'payment-addr'],
[FeatureBits.PaymentAddrRequired, 'payment-addr'],
[FeatureBits.MPPOptional, 'multi-path-payments'],
[FeatureBits.MPPRequired, 'multi-path-payments'],
[FeatureBits.AnchorsRequired, 'anchor-commitments'],
[FeatureBits.AnchorsOptional, 'anchor-commitments'],
[FeatureBits.AnchorsZeroFeeHtlcTxRequired, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.AnchorsZeroFeeHtlcTxOptional, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.WumboChannelsRequired, 'wumbo-channels'],
[FeatureBits.WumboChannelsOptional, 'wumbo-channels'],
[FeatureBits.AMPRequired, 'amp'],
[FeatureBits.AMPOptional, 'amp'],
[FeatureBits.PaymentMetadataOptional, 'payment-metadata'],
[FeatureBits.PaymentMetadataRequired, 'payment-metadata'],
[FeatureBits.ExplicitChannelTypeOptional, 'explicit-commitment-type'],
[FeatureBits.ExplicitChannelTypeRequired, 'explicit-commitment-type'],
[FeatureBits.KeysendOptional, 'keysend'],
[FeatureBits.KeysendRequired, 'keysend'],
[FeatureBits.ScriptEnforcedLeaseRequired, 'script-enforced-lease'],
[FeatureBits.ScriptEnforcedLeaseOptional, 'script-enforced-lease'],
[FeatureBits.ScidAliasRequired, 'scid-alias'],
[FeatureBits.ScidAliasOptional, 'scid-alias'],
[FeatureBits.ZeroConfRequired, 'zero-conf'],
[FeatureBits.ZeroConfOptional, 'zero-conf'],
[FeatureBits.ShutdownAnySegwitRequired, 'shutdown-any-segwit'],
[FeatureBits.ShutdownAnySegwitOptional, 'shutdown-any-segwit'],
]);
/**
* Convert a clightning "listnode" entry to a lnd node entry
*/
@@ -17,10 +100,36 @@ export function convertNode(clNode: any): ILightningApi.Node {
custom_records = undefined;
}
}
const nodeFeatures: ILightningApi.Feature[] = [];
const nodeFeaturesBinary = hex2bin(clNode.features).split('').reverse().join('');
for (let i = 0; i < nodeFeaturesBinary.length; i++) {
if (nodeFeaturesBinary[i] === '0') {
continue;
}
const feature = FeaturesMap.get(i);
if (!feature) {
nodeFeatures.push({
bit: i,
name: 'unknown',
is_required: i % 2 === 0,
is_known: false
});
} else {
nodeFeatures.push({
bit: i,
name: feature,
is_required: i % 2 === 0,
is_known: true
});
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
features: [], // TODO parse and return clNode.feature
features: nodeFeatures,
pub_key: clNode.nodeid,
addresses: clNode.addresses?.map((addr) => {
let address = addr.address;
@@ -108,7 +217,7 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
return {
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
capacity: clChannelA.satoshis,
capacity: (clChannelA.amount_msat / 1000).toString(),
last_update: lastUpdate,
node1_policy: convertPolicy(clChannelA),
node2_policy: convertPolicy(clChannelB),
@@ -132,7 +241,7 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
return {
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
capacity: clChannel.satoshis,
capacity: (clChannel.amount_msat / 1000).toString(),
last_update: clChannel.last_update ?? 0,
node1_policy: convertPolicy(clChannel),
node2_policy: null,
@@ -148,8 +257,8 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
return {
time_lock_delta: clChannel.delay,
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
min_htlc: clChannel.htlc_minimum_msat.toString(),
max_htlc_msat: clChannel.htlc_maximum_msat.toString(),
fee_base_msat: clChannel.base_fee_millisatoshi,
fee_rate_milli_msat: clChannel.fee_per_millionth,
disabled: !clChannel.active,

View File

@@ -79,6 +79,7 @@ export namespace ILightningApi {
}
export interface Feature {
bit: number;
name: string;
is_required: boolean;
is_known: boolean;

View File

@@ -4,21 +4,29 @@ import * as fs from 'fs';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import config from '../../../config';
import logger from '../../../logger';
class LndApi implements AbstractLightningApi {
axiosConfig: AxiosRequestConfig = {};
constructor() {
if (config.LIGHTNING.ENABLED) {
if (!config.LIGHTNING.ENABLED) {
return;
}
try {
this.axiosConfig = {
headers: {
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex'),
},
httpsAgent: new Agent({
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
}),
timeout: config.LND.TIMEOUT
};
} catch (e) {
config.LIGHTNING.ENABLED = false;
logger.updateNetwork();
logger.err(`Could not initialize LND Macaroon/TLS Cert. Disabling LIGHTNING. ` + (e instanceof Error ? e.message : e));
}
}
@@ -33,8 +41,23 @@ class LndApi implements AbstractLightningApi {
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
const graph = await axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
.then((response) => response.data);
for (const node of graph.nodes) {
const nodeFeatures: ILightningApi.Feature[] = [];
for (const bit in node.features) {
nodeFeatures.push({
bit: parseInt(bit, 10),
name: node.features[bit].name,
is_required: node.features[bit].is_required,
is_known: node.features[bit].is_known,
});
}
node.features = nodeFeatures;
}
return graph;
}
}

View File

@@ -2,7 +2,7 @@ import * as fs from 'fs';
import logger from '../../logger';
class Icons {
private static FILE_NAME = './icons.json';
private static FILE_NAME = '/elements/asset_registry_db/icons.json';
private iconIds: string[] = [];
private icons: { [assetId: string]: string; } = {};

View File

@@ -1,16 +1,22 @@
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction } from '../../rust-gbt';
import logger from '../logger';
import { MempoolBlock, TransactionExtended, ThreadTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common';
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces';
import { Common, OnlineFeeStatsCalculator } from './common';
import config from '../config';
import { Worker } from 'worker_threads';
import path from 'path';
const MAX_UINT32 = Math.pow(2, 32) - 1;
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private txSelectionWorker: Worker | null = null;
private rustInitialized: boolean = false;
private rustGbtGenerator: GbtGenerator = new GbtGenerator();
constructor() {}
private nextUid: number = 1;
private uidMap: Map<number, string> = new Map(); // map short numerical uids to full txids
public getMempoolBlocks(): MempoolBlock[] {
return this.mempoolBlocks.map((block) => {
@@ -33,13 +39,11 @@ class MempoolBlocks {
return this.mempoolBlockDeltas;
}
public updateMempoolBlocks(memPool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): MempoolBlockWithTransactions[] {
public updateMempoolBlocks(memPool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): MempoolBlockWithTransactions[] {
const latestMempool = memPool;
const memPoolArray: TransactionExtended[] = [];
const memPoolArray: MempoolTransactionExtended[] = [];
for (const i in latestMempool) {
if (latestMempool.hasOwnProperty(i)) {
memPoolArray.push(latestMempool[i]);
}
memPoolArray.push(latestMempool[i]);
}
const start = new Date().getTime();
@@ -49,17 +53,24 @@ class MempoolBlocks {
tx.ancestors = [];
tx.cpfpChecked = false;
if (!tx.effectiveFeePerVsize) {
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.effectiveFeePerVsize = tx.adjustedFeePerVsize;
}
});
// First sort
memPoolArray.sort((a, b) => b.feePerVsize - a.feePerVsize);
memPoolArray.sort((a, b) => {
if (a.adjustedFeePerVsize === b.adjustedFeePerVsize) {
// tie-break by lexicographic txid order for stability
return a.txid < b.txid ? -1 : 1;
} else {
return b.adjustedFeePerVsize - a.adjustedFeePerVsize;
}
});
// Loop through and traverse all ancestors and sum up all the sizes + fees
// Pass down size + fee to all unconfirmed children
let sizes = 0;
memPoolArray.forEach((tx, i) => {
memPoolArray.forEach((tx) => {
sizes += tx.weight;
if (sizes > 4000000 * 8) {
return;
@@ -68,13 +79,20 @@ class MempoolBlocks {
});
// Final sort, by effective fee
memPoolArray.sort((a, b) => b.effectiveFeePerVsize - a.effectiveFeePerVsize);
memPoolArray.sort((a, b) => {
if (a.effectiveFeePerVsize === b.effectiveFeePerVsize) {
// tie-break by lexicographic txid order for stability
return a.txid < b.txid ? -1 : 1;
} else {
return b.effectiveFeePerVsize - a.effectiveFeePerVsize;
}
});
const end = new Date().getTime();
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
const blocks = this.calculateMempoolBlocks(memPoolArray);
if (saveResults) {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
@@ -85,26 +103,63 @@ class MempoolBlocks {
return blocks;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
private calculateMempoolBlocks(transactionsSorted: MempoolTransactionExtended[]): MempoolBlockWithTransactions[] {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 0;
let feeStatsCalculator: OnlineFeeStatsCalculator = new OnlineFeeStatsCalculator(config.MEMPOOL.BLOCK_WEIGHT_UNITS);
let onlineStats = false;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
transactionsSorted.forEach((tx) => {
let blockWeight = 0;
let blockVsize = 0;
let blockFees = 0;
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
let transactionIds: string[] = [];
let transactions: MempoolTransactionExtended[] = [];
transactionsSorted.forEach((tx, index) => {
if (blockWeight + tx.weight <= config.MEMPOOL.BLOCK_WEIGHT_UNITS
|| mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
tx.position = {
block: mempoolBlocks.length,
vsize: blockVsize + (tx.vsize / 2),
};
blockWeight += tx.weight;
blockVsize += tx.vsize;
blockSize += tx.size;
transactions.push(tx);
blockFees += tx.fee;
if (blockVsize <= sizeLimit) {
transactions.push(tx);
}
transactionIds.push(tx.txid);
if (onlineStats) {
feeStatsCalculator.processNext(tx);
}
} else {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactionIds, transactions, blockSize, blockWeight, blockFees));
blockVsize = 0;
tx.position = {
block: mempoolBlocks.length,
vsize: blockVsize + (tx.vsize / 2),
};
if (mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
const stackWeight = transactionsSorted.slice(index).reduce((total, tx) => total + (tx.weight || 0), 0);
if (stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
onlineStats = true;
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
feeStatsCalculator.processNext(tx);
}
}
blockVsize += tx.vsize;
blockWeight = tx.weight;
blockSize = tx.size;
blockFees = tx.fee;
transactionIds = [tx.txid];
transactions = [tx];
}
});
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
const feeStats = onlineStats ? feeStatsCalculator.getRawFeeStats() : undefined;
mempoolBlocks.push(this.dataToMempoolBlocks(transactionIds, transactions, blockSize, blockWeight, blockFees, feeStats));
}
return mempoolBlocks;
@@ -115,6 +170,7 @@ class MempoolBlocks {
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let removed: string[] = [];
const changed: { txid: string, rate: number | undefined }[] = [];
if (mempoolBlocks[i] && !prevBlocks[i]) {
added = mempoolBlocks[i].transactions;
} else if (!mempoolBlocks[i] && prevBlocks[i]) {
@@ -123,7 +179,7 @@ class MempoolBlocks {
const prevIds = {};
const newIds = {};
prevBlocks[i].transactions.forEach(tx => {
prevIds[tx.txid] = true;
prevIds[tx.txid] = tx;
});
mempoolBlocks[i].transactions.forEach(tx => {
newIds[tx.txid] = true;
@@ -136,30 +192,45 @@ class MempoolBlocks {
mempoolBlocks[i].transactions.forEach(tx => {
if (!prevIds[tx.txid]) {
added.push(tx);
} else if (tx.rate !== prevIds[tx.txid].rate) {
changed.push({ txid: tx.txid, rate: tx.rate });
}
});
}
mempoolBlockDeltas.push({
added,
removed
removed,
changed,
});
}
return mempoolBlockDeltas;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
public async $makeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
this.resetUids();
for (const tx of Object.values(newMempool)) {
this.setUid(tx);
}
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
const strippedMempool: Map<number, CompactThreadTransaction> = new Map();
Object.values(newMempool).forEach(entry => {
strippedMempool[entry.txid] = {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
if (entry.uid !== null && entry.uid !== undefined) {
const stripped = {
uid: entry.uid,
fee: entry.fee,
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
};
strippedMempool.set(entry.uid, stripped);
}
});
// (re)initialize tx selection worker thread
@@ -178,7 +249,7 @@ class MempoolBlocks {
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
const workerResultPromise = new Promise<{ blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
@@ -186,102 +257,268 @@ class MempoolBlocks {
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
const { blocks, rates, clusters } = this.convertResultTxids(await workerResultPromise);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
return this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
return this.mempoolBlocks;
}
public async updateBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, added: TransactionExtended[], removed: string[], saveResults: boolean = false): Promise<void> {
public async $updateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[], saveResults: boolean = false): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
this.makeBlockTemplates(newMempool, saveResults);
await this.$makeBlockTemplates(newMempool, saveResults);
return;
}
const start = Date.now();
for (const tx of Object.values(added)) {
this.setUid(tx, true);
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: ThreadTransaction[] = added.map(entry => {
const addedStripped: CompactThreadTransaction[] = added.filter(entry => (entry.uid !== null && entry.uid !== undefined)).map(entry => {
return {
txid: entry.txid,
uid: entry.uid || 0,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
};
});
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
const workerResultPromise = new Promise<{ blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed: removedUids });
const { blocks, rates, clusters } = this.convertResultTxids(await workerResultPromise);
this.removeUids(removedUids);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
private processBlockTemplates(mempool, blocks, clusters, saveResults): MempoolBlockWithTransactions[] {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
if (tx.cpfpRoot && tx.cpfpRoot in clusters) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (txid === tx.txid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: mempool[txid].weight,
};
if (matched) {
descendants.push(relative);
} else {
ancestors.push(relative);
}
}
});
mempool[tx.txid].ancestors = ancestors;
mempool[tx.txid].descendants = descendants;
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
}
});
});
private resetRustGbt(): void {
this.rustInitialized = false;
this.rustGbtGenerator = new GbtGenerator();
}
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), blockIndex);
private async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
if (saveResults) {
this.resetUids();
}
// set missing short ids
for (const tx of Object.values(newMempool)) {
this.setUid(tx, !saveResults);
}
// set short ids for transaction inputs
for (const tx of Object.values(newMempool)) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
// run the block construction algorithm in a separate thread, and wait for a result
const rustGbt = saveResults ? this.rustGbtGenerator : new GbtGenerator();
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], this.nextUid),
);
if (saveResults) {
this.rustInitialized = true;
}
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, saveResults);
logger.debug(`RUST makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
logger.err('RUST makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
if (saveResults) {
this.resetRustGbt();
}
}
return this.mempoolBlocks;
}
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }): Promise<MempoolBlockWithTransactions[]> {
return this.$rustMakeBlockTemplates(newMempool, false);
}
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[]): Promise<void> {
// GBT optimization requires that uids never get too sparse
// as a sanity check, we should also explicitly prevent uint32 uid overflow
if (this.nextUid + added.length >= Math.min(Math.max(262144, 2 * mempoolSize), MAX_UINT32)) {
this.resetRustGbt();
}
if (!this.rustInitialized) {
// need to reset the worker
await this.$rustMakeBlockTemplates(newMempool, true);
return;
}
const start = Date.now();
// set missing short ids
for (const tx of added) {
this.setUid(tx, true);
}
// set short ids for transaction inputs
for (const tx of added) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// run the block construction algorithm in a separate thread, and wait for a result
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await this.rustGbtGenerator.update(
added as RustThreadTransaction[],
removedUids,
this.nextUid,
),
);
const resultMempoolSize = blocks.reduce((total, block) => total + block.length, 0);
if (mempoolSize !== resultMempoolSize) {
throw new Error('GBT returned wrong number of transactions, cache is probably out of sync');
} else {
this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, true);
}
this.removeUids(removedUids);
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
} catch (e) {
logger.err('RUST updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
this.resetRustGbt();
}
}
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], saveResults): MempoolBlockWithTransactions[] {
for (const [txid, rate] of rates) {
if (txid in mempool) {
mempool[txid].effectiveFeePerVsize = rate;
mempool[txid].cpfpChecked = false;
}
}
const lastBlockIndex = blocks.length - 1;
let hasBlockStack = blocks.length >= 8;
let stackWeight;
let feeStatsCalculator: OnlineFeeStatsCalculator | void;
if (hasBlockStack) {
if (blockWeights && blockWeights[7] !== null) {
stackWeight = blockWeights[7];
} else {
stackWeight = blocks[lastBlockIndex].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0);
}
hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS;
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
}
for (const cluster of clusters) {
for (const memberTxid of cluster) {
const mempoolTx = mempool[memberTxid];
if (mempoolTx) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
let matched = false;
cluster.forEach(txid => {
if (txid === memberTxid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: (mempool[txid].adjustedVsize * 4),
};
if (matched) {
descendants.push(relative);
mempoolTx.lastBoosted = Math.max(mempoolTx.lastBoosted || 0, mempool[txid].firstSeen || 0);
} else {
ancestors.push(relative);
}
}
});
Object.assign(mempoolTx, {ancestors, descendants, bestDescendant: null, cpfpChecked: true});
}
}
}
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
// update this thread's mempool with the results
let mempoolTx: MempoolTransactionExtended;
const mempoolBlocks: MempoolBlockWithTransactions[] = blocks.map((block, blockIndex) => {
let totalSize = 0;
let totalVsize = 0;
let totalWeight = 0;
let totalFees = 0;
const transactions: MempoolTransactionExtended[] = [];
for (const txid of block) {
if (txid) {
mempoolTx = mempool[txid];
// save position in projected blocks
mempoolTx.position = {
block: blockIndex,
vsize: totalVsize + (mempoolTx.vsize / 2),
};
if (!mempoolTx.cpfpChecked) {
if (mempoolTx.ancestors?.length) {
mempoolTx.ancestors = [];
}
if (mempoolTx.descendants?.length) {
mempoolTx.descendants = [];
}
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
}
// online calculation of stack-of-blocks fee stats
if (hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) {
feeStatsCalculator.processNext(mempoolTx);
}
totalSize += mempoolTx.size;
totalVsize += mempoolTx.vsize;
totalWeight += mempoolTx.weight;
totalFees += mempoolTx.fee;
if (totalVsize <= sizeLimit) {
transactions.push(mempoolTx);
}
}
}
return this.dataToMempoolBlocks(
block,
transactions,
totalSize,
totalWeight,
totalFees,
(hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
);
});
if (saveResults) {
@@ -293,37 +530,97 @@ class MempoolBlocks {
return mempoolBlocks;
}
private dataToMempoolBlocks(transactions: TransactionExtended[], blocksIndex: number): MempoolBlockWithTransactions {
let totalSize = 0;
let totalWeight = 0;
const fitTransactions: TransactionExtended[] = [];
transactions.forEach(tx => {
totalSize += tx.size;
totalWeight += tx.weight;
if ((totalWeight + tx.weight) <= config.MEMPOOL.BLOCK_WEIGHT_UNITS * 1.2) {
fitTransactions.push(tx);
}
});
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
private dataToMempoolBlocks(transactionIds: string[], transactions: MempoolTransactionExtended[], totalSize: number, totalWeight: number, totalFees: number, feeStats?: EffectiveFeeStats ): MempoolBlockWithTransactions {
if (!feeStats) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
}
return {
blockSize: totalSize,
blockVSize: totalWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: fitTransactions.map((tx) => Common.stripTransaction(tx)),
blockVSize: (totalWeight / 4), // fractional vsize to avoid rounding errors
nTx: transactionIds.length,
totalFees: totalFees,
medianFee: feeStats.medianFee, // Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: feeStats.feeRange, //Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactionIds,
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}
private resetUids(): void {
this.uidMap.clear();
this.nextUid = 1;
}
private setUid(tx: MempoolTransactionExtended, skipSet = false): number {
if (tx.uid === null || tx.uid === undefined || !skipSet) {
const uid = this.nextUid;
this.nextUid++;
this.uidMap.set(uid, tx.txid);
tx.uid = uid;
return uid;
} else {
return tx.uid;
}
}
private getUid(tx: MempoolTransactionExtended): number | void {
if (tx?.uid !== null && tx?.uid !== undefined && this.uidMap.has(tx.uid)) {
return tx.uid;
}
}
private removeUids(uids: number[]): void {
for (const uid of uids) {
this.uidMap.delete(uid);
}
}
private convertResultTxids({ blocks, rates, clusters }: { blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]>})
: { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }} {
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
return this.uidMap.get(uid) || '';
}));
const convertedRates = {};
for (const rateUid of rates.keys()) {
const rateTxid = this.uidMap.get(rateUid);
if (rateTxid) {
convertedRates[rateTxid] = rates.get(rateUid);
}
}
const convertedClusters = {};
for (const rootUid of clusters.keys()) {
const rootTxid = this.uidMap.get(rootUid);
if (rootTxid) {
const members = clusters.get(rootUid)?.map(uid => {
return this.uidMap.get(uid);
});
convertedClusters[rootTxid] = members;
}
}
return { blocks: convertedBlocks, rates: convertedRates, clusters: convertedClusters } as { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }};
}
private convertNapiResultTxids({ blocks, blockWeights, rates, clusters }: GbtResult)
: { blocks: string[][], blockWeights: number[], rates: [string, number][], clusters: string[][] } {
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
const txid = this.uidMap.get(uid);
if (txid !== undefined) {
return txid;
} else {
throw new Error('GBT returned a block containing a transaction with unknown uid');
}
}));
const convertedRates: [string, number][] = [];
for (const [rateUid, rate] of rates) {
const rateTxid = this.uidMap.get(rateUid) as string;
convertedRates.push([rateTxid, rate]);
}
const convertedClusters: string[][] = [];
for (const cluster of clusters) {
convertedClusters.push(cluster.map(uid => this.uidMap.get(uid)) as string[]);
}
return { blocks: convertedBlocks, blockWeights, rates: convertedRates, clusters: convertedClusters };
}
}
export default new MempoolBlocks();

View File

@@ -1,6 +1,6 @@
import config from '../config';
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
import { MempoolTransactionExtended, TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
import logger from '../logger';
import { Common } from './common';
import transactionUtils from './transaction-utils';
@@ -11,17 +11,16 @@ import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
import rbfCache from './rbf-cache';
class Mempool {
private static WEBSOCKET_REFRESH_RATE_MS = 10000;
private static LAZY_DELETE_AFTER_SECONDS = 30;
private inSync: boolean = false;
private mempoolCacheDelta: number = -1;
private mempoolCache: { [txId: string]: TransactionExtended } = {};
private mempoolCache: { [txId: string]: MempoolTransactionExtended } = {};
private spendMap = new Map<string, MempoolTransactionExtended>();
private mempoolInfo: IBitcoinApi.MempoolInfo = { loaded: false, size: 0, bytes: 0, usage: 0, total_fee: 0,
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => Promise<void>) | undefined;
private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => void) | undefined;
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, mempoolSize: number, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
@@ -35,10 +34,10 @@ class Mempool {
private SAMPLE_TIME = 10000; // In ms
private timer = new Date().getTime();
private missingTxCount = 0;
private mainLoopTimeout: number = 120000;
constructor() {
setInterval(this.updateTxPerSecond.bind(this), 1000);
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
}
/**
@@ -65,28 +64,43 @@ class Mempool {
return this.latestTransactions;
}
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => void) {
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; },
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => void): void {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
this.asyncMempoolChangedCallback = fn;
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void {
this.$asyncMempoolChangedCallback = fn;
}
public getMempool(): { [txid: string]: TransactionExtended } {
public getMempool(): { [txid: string]: MempoolTransactionExtended } {
return this.mempoolCache;
}
public setMempool(mempoolData: { [txId: string]: TransactionExtended }) {
public getSpendMap(): Map<string, MempoolTransactionExtended> {
return this.spendMap;
}
public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) {
this.mempoolCache = mempoolData;
let count = 0;
for (const txid of Object.keys(this.mempoolCache)) {
if (!this.mempoolCache[txid].sigops || this.mempoolCache[txid].effectiveFeePerVsize == null) {
this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]);
}
if (this.mempoolCache[txid].order == null) {
this.mempoolCache[txid].order = transactionUtils.txidToOrdering(txid);
}
count++;
}
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
}
if (this.asyncMempoolChangedCallback) {
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
if (this.$asyncMempoolChangedCallback) {
await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], []);
}
this.addToSpendMap(Object.values(this.mempoolCache));
}
public async $updateMemPoolInfo() {
@@ -118,19 +132,23 @@ class Mempool {
return txTimes;
}
public async $updateMempool(): Promise<void> {
public async $updateMempool(transactions: string[]): Promise<void> {
logger.debug(`Updating mempool...`);
// warn if this run stalls the main loop for more than 2 minutes
const timer = this.startTimer();
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
const transactions = await bitcoinApi.$getRawMempool();
this.updateTimerProgress(timer, 'got raw mempool');
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
const newTransactions: MempoolTransactionExtended[] = [];
this.mempoolCacheDelta = Math.abs(diff);
if (!this.inSync) {
loadingIndicators.setProgress('mempool', Object.keys(this.mempoolCache).length / transactions.length * 100);
loadingIndicators.setProgress('mempool', currentMempoolSize / transactions.length * 100);
}
// https://github.com/mempool/mempool/issues/3283
@@ -143,10 +161,12 @@ class Mempool {
}
};
let intervalTimer = Date.now();
for (const txid of transactions) {
if (!this.mempoolCache[txid]) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
const transaction = await transactionUtils.$getMempoolTransactionExtended(txid, false, false, false);
this.updateTimerProgress(timer, 'fetched new transaction');
this.mempoolCache[txid] = transaction;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
@@ -165,8 +185,19 @@ class Mempool {
}
}
if ((new Date().getTime()) - start > Mempool.WEBSOCKET_REFRESH_RATE_MS) {
break;
if (Date.now() - intervalTimer > 5_000) {
if (this.inSync) {
// Break and restart mempool loop if we spend too much time processing
// new transactions that may lead to falling behind on block height
logger.debug('Breaking mempool loop because the 5s time limit exceeded.');
break;
} else {
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
loadingIndicators.setProgress('mempool', progress);
intervalTimer = Date.now()
}
}
}
@@ -192,7 +223,7 @@ class Mempool {
}, 1000 * 60 * config.MEMPOOL.CLEAR_PROTECTION_MINUTES);
}
const deletedTransactions: TransactionExtended[] = [];
const deletedTransactions: MempoolTransactionExtended[] = [];
if (this.mempoolProtection !== 1) {
this.mempoolProtection = 0;
@@ -200,45 +231,100 @@ class Mempool {
const transactionsObject = {};
transactions.forEach((txId) => transactionsObject[txId] = true);
// Flag transactions for lazy deletion
// Delete evicted transactions from mempool
for (const tx in this.mempoolCache) {
if (!transactionsObject[tx] && !this.mempoolCache[tx].deleteAfter) {
if (!transactionsObject[tx]) {
deletedTransactions.push(this.mempoolCache[tx]);
this.mempoolCache[tx].deleteAfter = new Date().getTime() + Mempool.LAZY_DELETE_AFTER_SECONDS * 1000;
}
}
for (const tx of deletedTransactions) {
delete this.mempoolCache[tx.txid];
}
}
const newMempoolSize = currentMempoolSize + newTransactions.length - deletedTransactions.length;
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
if (!this.inSync && transactions.length === Object.keys(this.mempoolCache).length) {
if (!this.inSync && transactions.length === newMempoolSize) {
this.inSync = true;
logger.notice('The mempool is now in sync!');
loadingIndicators.setProgress('mempool', 100);
}
this.mempoolCacheDelta = Math.abs(transactions.length - Object.keys(this.mempoolCache).length);
this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.updateTimerProgress(timer, 'running async mempool callback');
await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions);
this.updateTimerProgress(timer, 'completed async mempool callback');
}
const end = new Date().getTime();
const time = end - start;
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
this.clearTimer(timer);
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
private startTimer() {
const state: any = {
start: Date.now(),
progress: 'begin $updateMempool',
timer: null,
};
state.timer = setTimeout(() => {
logger.err(`$updateMempool stalled at "${state.progress}"`);
}, this.mainLoopTimeout);
return state;
}
private updateTimerProgress(state, msg) {
state.progress = msg;
}
private clearTimer(state) {
if (state.timer) {
clearTimeout(state.timer);
}
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: MempoolTransactionExtended[]; }): void {
for (const rbfTransaction in rbfTransactions) {
if (this.mempoolCache[rbfTransaction]) {
if (this.mempoolCache[rbfTransaction] && rbfTransactions[rbfTransaction]?.length) {
// Store replaced transactions
rbfCache.add(this.mempoolCache[rbfTransaction], rbfTransactions[rbfTransaction].txid);
// Erase the replaced transactions from the local mempool
delete this.mempoolCache[rbfTransaction];
rbfCache.add(rbfTransactions[rbfTransaction], this.mempoolCache[rbfTransaction]);
}
}
}
public handleMinedRbfTransactions(rbfTransactions: { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }}): void {
for (const rbfTransaction in rbfTransactions) {
if (rbfTransactions[rbfTransaction].replacedBy && rbfTransactions[rbfTransaction]?.replaced?.length) {
// Store replaced transactions
rbfCache.add(rbfTransactions[rbfTransaction].replaced, transactionUtils.extendMempoolTransaction(rbfTransactions[rbfTransaction].replacedBy));
}
}
}
public addToSpendMap(transactions: MempoolTransactionExtended[]): void {
for (const tx of transactions) {
for (const vin of tx.vin) {
this.spendMap.set(`${vin.txid}:${vin.vout}`, tx);
}
}
}
public removeFromSpendMap(transactions: TransactionExtended[]): void {
for (const tx of transactions) {
for (const vin of tx.vin) {
const key = `${vin.txid}:${vin.vout}`;
if (this.spendMap.get(key)?.txid === tx.txid) {
this.spendMap.delete(key);
}
}
}
}
@@ -256,17 +342,6 @@ class Mempool {
}
}
private deleteExpiredTransactions() {
const now = new Date().getTime();
for (const tx in this.mempoolCache) {
const lazyDeleteAt = this.mempoolCache[tx].deleteAfter;
if (lazyDeleteAt && lazyDeleteAt < now) {
delete this.mempoolCache[tx];
rbfCache.evict(tx);
}
}
}
private $getMempoolInfo() {
if (config.MEMPOOL.USE_SECOND_NODE_FOR_MINFEE) {
return Promise.all([

View File

@@ -26,7 +26,7 @@ class MiningRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', this.$getHistoricalBlockFeeRates)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlocksHealth)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
@@ -244,15 +244,15 @@ class MiningRoutes {
}
}
private async $getHistoricalBlockPrediction(req: Request, res: Response) {
private async $getHistoricalBlocksHealth(req: Request, res: Response) {
try {
const blockPredictions = await mining.$getBlockPredictionsHistory(req.params.interval);
const blockCount = await BlocksAuditsRepository.$getPredictionsCount();
const blocksHealth = await mining.$getBlocksHealthHistory(req.params.interval);
const blockCount = await BlocksAuditsRepository.$getBlocksHealthCount();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockPredictions.map(prediction => [prediction.time, prediction.height, prediction.match_rate]));
res.json(blocksHealth.map(health => [health.time, health.height, health.match_rate]));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}

View File

@@ -19,12 +19,15 @@ class Mining {
private blocksPriceIndexingRunning = false;
public lastHashrateIndexingDate: number | null = null;
public lastWeeklyHashrateIndexingDate: number | null = null;
public reindexHashrateRequested = false;
public reindexDifficultyAdjustmentRequested = false;
/**
* Get historical block predictions match rate
* Get historical blocks health
*/
public async $getBlockPredictionsHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlockPredictionsHistory(
public async $getBlocksHealthHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlocksHealthHistory(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
@@ -103,6 +106,7 @@ class Mining {
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
slug: poolInfo.slug,
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
avgFeeDelta: poolInfo.avgFeeDelta,
};
poolsStats.push(poolStat);
});
@@ -290,6 +294,14 @@ class Mining {
* Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// If a re-index was requested, truncate first
if (this.reindexHashrateRequested === true) {
logger.notice(`hashrates will now be re-indexed`);
await database.query(`TRUNCATE hashrates`);
this.lastHashrateIndexingDate = 0;
this.reindexHashrateRequested = false;
}
// We only run this once a day around midnight
const today = new Date().getUTCDate();
if (today === this.lastHashrateIndexingDate) {
@@ -395,6 +407,13 @@ class Mining {
* Index difficulty adjustments
*/
public async $indexDifficultyAdjustments(): Promise<void> {
// If a re-index was requested, truncate first
if (this.reindexDifficultyAdjustmentRequested === true) {
logger.notice(`difficulty_adjustments will now be re-indexed`);
await database.query(`TRUNCATE difficulty_adjustments`);
this.reindexDifficultyAdjustmentRequested = false;
}
const indexedHeightsArray = await DifficultyAdjustmentsRepository.$getAdjustmentsHeights();
const indexedHeights = {};
for (const height of indexedHeightsArray) {
@@ -452,7 +471,7 @@ class Mining {
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
logger.debug(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
@@ -473,11 +492,11 @@ class Mining {
}
this.blocksPriceIndexingRunning = true;
let totalInserted = 0;
try {
const prices: any[] = await PricesRepository.$getPricesTimesAndId();
const blocksWithoutPrices: any[] = await BlocksRepository.$getBlocksWithoutPrice();
let totalInserted = 0;
const blocksPrices: BlockPrice[] = [];
for (const block of blocksWithoutPrices) {
@@ -522,7 +541,13 @@ class Mining {
}
} catch (e) {
this.blocksPriceIndexingRunning = false;
throw e;
logger.err(`Cannot index block prices. ${e}`);
}
if (totalInserted > 0) {
logger.info(`Indexing blocks prices completed. Indexed ${totalInserted}`, logger.tags.mining);
} else {
logger.debug(`Indexing blocks prices completed. Indexed 0.`, logger.tags.mining);
}
this.blocksPriceIndexingRunning = false;
@@ -558,8 +583,10 @@ class Mining {
currentBlockHeight -= 10000;
}
if (totalIndexed) {
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
if (totalIndexed > 0) {
logger.info(`Indexing missing coinstatsindex data completed. Indexed ${totalIndexed}`, logger.tags.mining);
} else {
logger.debug(`Indexing missing coinstatsindex data completed. Indexed 0.`, logger.tags.mining);
}
}

View File

@@ -4,6 +4,7 @@ import config from '../config';
import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces';
import diskCache from './disk-cache';
import mining from './mining/mining';
class PoolsParser {
miningPools: any[] = [];
@@ -41,7 +42,7 @@ class PoolsParser {
public async migratePoolsJson(): Promise<void> {
// We also need to wipe the backend cache to make sure we don't serve blocks with
// the wrong mining pool (usually happen with unknown blocks)
diskCache.wipeCache();
diskCache.setIgnoreBlocksCache();
await this.$insertUnknownPool();
@@ -73,14 +74,12 @@ class PoolsParser {
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
// Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool.`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB);
}
}
}
logger.info('Mining pools-v2.json import completed');
}
/**
@@ -118,10 +117,6 @@ class PoolsParser {
* @param pool
*/
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`
@@ -132,7 +127,15 @@ class PoolsParser {
LIMIT 1`,
[pool.id]
);
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
if (config.MEMPOOL.NETWORK === 'testnet') {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
} else if (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'regtest') {
firstKnownBlockPool = 0;
}
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : firstKnownBlockPool;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
await DB.query(`
@@ -146,16 +149,31 @@ class PoolsParser {
WHERE pool_id = ?`,
[pool.id]
);
// Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
}
private async $deleteUnknownBlocks(): Promise<void> {
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
if (config.MEMPOOL.NETWORK === 'testnet') {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
} else if (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'regtest') {
firstKnownBlockPool = 0;
}
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${firstKnownBlockPool} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= 130635`,
WHERE pool_id = ? AND height >= ${firstKnownBlockPool}`,
[unknownPool[0].id]
);
// Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
}
}

View File

@@ -1,65 +1,391 @@
import { TransactionExtended } from "../mempool.interfaces";
import logger from "../logger";
import { MempoolTransactionExtended, TransactionStripped } from "../mempool.interfaces";
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { Common } from "./common";
interface RbfTransaction extends TransactionStripped {
rbf?: boolean;
mined?: boolean;
fullRbf?: boolean;
}
interface RbfTree {
tx: RbfTransaction;
time: number;
interval?: number;
mined?: boolean;
fullRbf: boolean;
replaces: RbfTree[];
}
export interface ReplacementInfo {
mined: boolean;
fullRbf: boolean;
txid: string;
oldFee: number;
oldVsize: number;
newFee: number;
newVsize: number;
}
class RbfCache {
private replacedBy: { [txid: string]: string; } = {};
private replaces: { [txid: string]: string[] } = {};
private txs: { [txid: string]: TransactionExtended } = {};
private expiring: { [txid: string]: Date } = {};
private replacedBy: Map<string, string> = new Map();
private replaces: Map<string, string[]> = new Map();
private rbfTrees: Map<string, RbfTree> = new Map(); // sequences of consecutive replacements
private dirtyTrees: Set<string> = new Set();
private treeMap: Map<string, string> = new Map(); // map of txids to sequence ids
private txs: Map<string, MempoolTransactionExtended> = new Map();
private expiring: Map<string, number> = new Map();
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 60);
setInterval(this.cleanup.bind(this), 1000 * 60 * 10);
}
public add(replacedTx: TransactionExtended, newTxId: string): void {
this.replacedBy[replacedTx.txid] = newTxId;
this.txs[replacedTx.txid] = replacedTx;
if (!this.replaces[newTxId]) {
this.replaces[newTxId] = [];
public add(replaced: MempoolTransactionExtended[], newTxExtended: MempoolTransactionExtended): void {
if (!newTxExtended || !replaced?.length || this.txs.has(newTxExtended.txid)) {
return;
}
this.replaces[newTxId].push(replacedTx.txid);
const newTx = Common.stripTransaction(newTxExtended) as RbfTransaction;
const newTime = newTxExtended.firstSeen || (Date.now() / 1000);
newTx.rbf = newTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
this.txs.set(newTx.txid, newTxExtended);
// maintain rbf trees
let txFullRbf = false;
let treeFullRbf = false;
const replacedTrees: RbfTree[] = [];
for (const replacedTxExtended of replaced) {
const replacedTx = Common.stripTransaction(replacedTxExtended) as RbfTransaction;
replacedTx.rbf = replacedTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
if (!replacedTx.rbf) {
txFullRbf = true;
}
this.replacedBy.set(replacedTx.txid, newTx.txid);
if (this.treeMap.has(replacedTx.txid)) {
const treeId = this.treeMap.get(replacedTx.txid);
if (treeId) {
const tree = this.rbfTrees.get(treeId);
this.rbfTrees.delete(treeId);
if (tree) {
tree.interval = newTime - tree?.time;
replacedTrees.push(tree);
treeFullRbf = treeFullRbf || tree.fullRbf || !tree.tx.rbf;
}
}
} else {
const replacedTime = replacedTxExtended.firstSeen || (Date.now() / 1000);
replacedTrees.push({
tx: replacedTx,
time: replacedTime,
interval: newTime - replacedTime,
fullRbf: !replacedTx.rbf,
replaces: [],
});
treeFullRbf = treeFullRbf || !replacedTx.rbf;
this.txs.set(replacedTx.txid, replacedTxExtended);
}
}
newTx.fullRbf = txFullRbf;
const treeId = replacedTrees[0].tx.txid;
const newTree = {
tx: newTx,
time: newTime,
fullRbf: treeFullRbf,
replaces: replacedTrees
};
this.rbfTrees.set(treeId, newTree);
this.updateTreeMap(treeId, newTree);
this.replaces.set(newTx.txid, replacedTrees.map(tree => tree.tx.txid));
this.dirtyTrees.add(treeId);
}
public getReplacedBy(txId: string): string | undefined {
return this.replacedBy[txId];
return this.replacedBy.get(txId);
}
public getReplaces(txId: string): string[] | undefined {
return this.replaces[txId];
return this.replaces.get(txId);
}
public getTx(txId: string): TransactionExtended | undefined {
return this.txs[txId];
public getTx(txId: string): MempoolTransactionExtended | undefined {
return this.txs.get(txId);
}
public getRbfTree(txId: string): RbfTree | void {
return this.rbfTrees.get(this.treeMap.get(txId) || '');
}
// get a paginated list of RbfTrees
// ordered by most recent replacement time
public getRbfTrees(onlyFullRbf: boolean, after?: string): RbfTree[] {
const limit = 25;
const trees: RbfTree[] = [];
const used = new Set<string>();
const replacements: string[][] = Array.from(this.replacedBy).reverse();
const afterTree = after ? this.treeMap.get(after) : null;
let ready = !afterTree;
for (let i = 0; i < replacements.length && trees.length <= limit - 1; i++) {
const txid = replacements[i][1];
const treeId = this.treeMap.get(txid) || '';
if (treeId === afterTree) {
ready = true;
} else if (ready) {
if (!used.has(treeId)) {
const tree = this.rbfTrees.get(treeId);
used.add(treeId);
if (tree && (!onlyFullRbf || tree.fullRbf)) {
trees.push(tree);
}
}
}
}
return trees;
}
// get map of rbf trees that have been updated since the last call
public getRbfChanges(): { trees: {[id: string]: RbfTree }, map: { [txid: string]: string }} {
const changes: { trees: {[id: string]: RbfTree }, map: { [txid: string]: string }} = {
trees: {},
map: {},
};
this.dirtyTrees.forEach(id => {
const tree = this.rbfTrees.get(id);
if (tree) {
changes.trees[id] = tree;
this.getTransactionsInTree(tree).forEach(tx => {
changes.map[tx.txid] = id;
});
}
});
this.dirtyTrees = new Set();
return changes;
}
public mined(txid): void {
if (!this.txs.has(txid)) {
return;
}
const treeId = this.treeMap.get(txid);
if (treeId && this.rbfTrees.has(treeId)) {
const tree = this.rbfTrees.get(treeId);
if (tree) {
this.setTreeMined(tree, txid);
tree.mined = true;
this.dirtyTrees.add(treeId);
}
}
this.evict(txid);
}
// flag a transaction as removed from the mempool
public evict(txid): void {
this.expiring[txid] = new Date(Date.now() + 1000 * 86400); // 24 hours
public evict(txid: string, fast: boolean = false): void {
if (this.txs.has(txid) && (fast || !this.expiring.has(txid))) {
this.expiring.set(txid, fast ? Date.now() + (1000 * 60 * 10) : Date.now() + (1000 * 86400)); // 24 hours
}
}
// is the transaction involved in a full rbf replacement?
public isFullRbf(txid: string): boolean {
const treeId = this.treeMap.get(txid);
if (!treeId) {
return false;
}
const tree = this.rbfTrees.get(treeId);
if (!tree) {
return false;
}
return tree?.fullRbf;
}
private cleanup(): void {
const currentDate = new Date();
for (const txid in this.expiring) {
if (this.expiring[txid] < currentDate) {
delete this.expiring[txid];
const now = Date.now();
for (const txid of this.expiring.keys()) {
if ((this.expiring.get(txid) || 0) < now) {
this.expiring.delete(txid);
this.remove(txid);
}
}
logger.debug(`rbf cache contains ${this.txs.size} txs, ${this.expiring.size} due to expire`);
}
// remove a transaction & all previous versions from the cache
private remove(txid): void {
// don't remove a transaction while a newer version remains in the mempool
if (this.replaces[txid] && !this.replacedBy[txid]) {
const replaces = this.replaces[txid];
delete this.replaces[txid];
for (const tx of replaces) {
// don't remove a transaction if a newer version remains in the mempool
if (!this.replacedBy.has(txid)) {
const replaces = this.replaces.get(txid);
this.replaces.delete(txid);
this.treeMap.delete(txid);
this.txs.delete(txid);
this.expiring.delete(txid);
for (const tx of (replaces || [])) {
// recursively remove prior versions from the cache
delete this.replacedBy[tx];
delete this.txs[tx];
this.replacedBy.delete(tx);
// if this is the id of a tree, remove that too
if (this.treeMap.get(tx) === tx) {
this.rbfTrees.delete(tx);
}
this.remove(tx);
}
}
}
private updateTreeMap(newId: string, tree: RbfTree): void {
this.treeMap.set(tree.tx.txid, newId);
tree.replaces.forEach(subtree => {
this.updateTreeMap(newId, subtree);
});
}
private getTransactionsInTree(tree: RbfTree, txs: RbfTransaction[] = []): RbfTransaction[] {
txs.push(tree.tx);
tree.replaces.forEach(subtree => {
this.getTransactionsInTree(subtree, txs);
});
return txs;
}
private setTreeMined(tree: RbfTree, txid: string): void {
if (tree.tx.txid === txid) {
tree.tx.mined = true;
} else {
tree.replaces.forEach(subtree => {
this.setTreeMined(subtree, txid);
});
}
}
public dump(): any {
const trees = Array.from(this.rbfTrees.values()).map((tree: RbfTree) => { return this.exportTree(tree); });
return {
txs: Array.from(this.txs.entries()),
trees,
expiring: Array.from(this.expiring.entries()),
};
}
public async load({ txs, trees, expiring }): Promise<void> {
txs.forEach(txEntry => {
this.txs.set(txEntry[0], txEntry[1]);
});
for (const deflatedTree of trees) {
await this.importTree(deflatedTree.root, deflatedTree.root, deflatedTree, this.txs);
}
expiring.forEach(expiringEntry => {
if (this.txs.has(expiringEntry[0])) {
this.expiring.set(expiringEntry[0], new Date(expiringEntry[1]).getTime());
}
});
this.cleanup();
}
exportTree(tree: RbfTree, deflated: any = null) {
if (!deflated) {
deflated = {
root: tree.tx.txid,
};
}
deflated[tree.tx.txid] = {
tx: tree.tx.txid,
txMined: tree.tx.mined,
time: tree.time,
interval: tree.interval,
mined: tree.mined,
fullRbf: tree.fullRbf,
replaces: tree.replaces.map(child => child.tx.txid),
};
tree.replaces.forEach(child => {
this.exportTree(child, deflated);
});
return deflated;
}
async importTree(root, txid, deflated, txs: Map<string, MempoolTransactionExtended>, mined: boolean = false): Promise<RbfTree | void> {
const treeInfo = deflated[txid];
const replaces: RbfTree[] = [];
// check if any transactions in this tree have already been confirmed
mined = mined || treeInfo.mined;
let exists = mined;
if (!mined) {
try {
const apiTx = await bitcoinApi.$getRawTransaction(txid);
if (apiTx) {
exists = true;
}
if (apiTx?.status?.confirmed) {
mined = true;
treeInfo.txMined = true;
this.evict(txid, true);
}
} catch (e) {
// most transactions do not exist
}
}
// if the root tx is not in the mempool or the blockchain
// evict this tree as soon as possible
if (root === txid && !exists) {
this.evict(txid, true);
}
// recursively reconstruct child trees
for (const childId of treeInfo.replaces) {
const replaced = await this.importTree(root, childId, deflated, txs, mined);
if (replaced) {
this.replacedBy.set(replaced.tx.txid, txid);
replaces.push(replaced);
if (replaced.mined) {
mined = true;
}
}
}
this.replaces.set(txid, replaces.map(t => t.tx.txid));
const tx = txs.get(txid);
if (!tx) {
return;
}
const strippedTx = Common.stripTransaction(tx) as RbfTransaction;
strippedTx.rbf = tx.vin.some((v) => v.sequence < 0xfffffffe);
strippedTx.mined = treeInfo.txMined;
const tree = {
tx: strippedTx,
time: treeInfo.time,
interval: treeInfo.interval,
mined: mined,
fullRbf: treeInfo.fullRbf,
replaces,
};
this.treeMap.set(txid, root);
if (root === txid) {
this.rbfTrees.set(root, tree);
this.dirtyTrees.add(root);
}
return tree;
}
public getLatestRbfSummary(): ReplacementInfo[] {
const rbfList = this.getRbfTrees(false);
return rbfList.slice(0, 6).map(rbfTree => {
let oldFee = 0;
let oldVsize = 0;
for (const replaced of rbfTree.replaces) {
oldFee += replaced.tx.fee;
oldVsize += replaced.tx.vsize;
}
return {
txid: rbfTree.tx.txid,
mined: !!rbfTree.tx.mined,
fullRbf: !!rbfTree.tx.fullRbf,
oldFee,
oldVsize,
newFee: rbfTree.tx.fee,
newVsize: rbfTree.tx.vsize,
};
});
}
}
export default new RbfCache();

View File

@@ -211,7 +211,7 @@ class StatisticsApi {
CAST(avg(vsize_1800) as DOUBLE) as vsize_1800,
CAST(avg(vsize_2000) as DOUBLE) as vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
@@ -259,7 +259,7 @@ class StatisticsApi {
vsize_1800,
vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
@@ -386,6 +386,17 @@ class StatisticsApi {
}
}
public async $listAll(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(43200, 'all'); // 12h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$listAll() error' + (e instanceof Error ? e.message : e));
return [];
}
}
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
return statistic.map((s) => {
return {

View File

@@ -15,10 +15,11 @@ class StatisticsRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/4y', this.$getStatisticsByTime.bind(this, '4y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/all', this.$getStatisticsByTime.bind(this, 'all'))
;
}
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y', req: Request, res: Response) {
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y' | 'all', req: Request, res: Response) {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
@@ -26,10 +27,6 @@ class StatisticsRoutes {
try {
let result;
switch (time as string) {
case '2h':
result = await statisticsApi.$list2H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
break;
case '24h':
result = await statisticsApi.$list24H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
@@ -58,8 +55,13 @@ class StatisticsRoutes {
case '4y':
result = await statisticsApi.$list4Y();
break;
case 'all':
result = await statisticsApi.$listAll();
break;
default:
result = await statisticsApi.$list2H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
break;
}
res.json(result);
} catch (e) {

View File

@@ -1,7 +1,8 @@
import { TransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { TransactionExtended, MempoolTransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { IEsploraApi } from './bitcoin/esplora-api.interface';
import { Common } from './common';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import * as bitcoinjs from 'bitcoinjs-lib';
class TransactionUtils {
constructor() { }
@@ -22,19 +23,34 @@ class TransactionUtils {
}
/**
* @param txId
* @param addPrevouts
* @param lazyPrevouts
* @param txId
* @param addPrevouts
* @param lazyPrevouts
* @param forceCore - See https://github.com/mempool/mempool/issues/2904
*/
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<TransactionExtended> {
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false, addMempoolData = false): Promise<TransactionExtended> {
let transaction: IEsploraApi.Transaction;
if (forceCore === true) {
transaction = await bitcoinCoreApi.$getRawTransaction(txId, true);
} else {
transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
}
return this.extendTransaction(transaction);
if (Common.isLiquid()) {
if (!isFinite(Number(transaction.fee))) {
transaction.fee = Object.values(transaction.fee || {}).reduce((total, output) => total + output, 0);
}
}
if (addMempoolData || !transaction?.status?.confirmed) {
return this.extendMempoolTransaction(transaction);
} else {
return this.extendTransaction(transaction);
}
}
public async $getMempoolTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<MempoolTransactionExtended> {
return (await this.$getTransactionExtended(txId, addPrevouts, lazyPrevouts, forceCore, true)) as MempoolTransactionExtended;
}
private extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
@@ -43,15 +59,37 @@ class TransactionUtils {
// @ts-ignore
return transaction;
}
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / (transaction.weight / 4));
const feePerVbytes = (transaction.fee || 0) / (transaction.weight / 4);
const transactionExtended: TransactionExtended = Object.assign({
vsize: Math.round(transaction.weight / 4),
feePerVsize: feePerVbytes,
effectiveFeePerVsize: feePerVbytes,
}, transaction);
if (!transaction.status.confirmed) {
transactionExtended.firstSeen = Math.round((new Date().getTime() / 1000));
if (!transaction?.status?.confirmed && !transactionExtended.firstSeen) {
transactionExtended.firstSeen = Math.round((Date.now() / 1000));
}
return transactionExtended;
}
public extendMempoolTransaction(transaction: IEsploraApi.Transaction): MempoolTransactionExtended {
const vsize = Math.ceil(transaction.weight / 4);
const fractionalVsize = (transaction.weight / 4);
const sigops = !Common.isLiquid() ? this.countSigops(transaction) : 0;
// https://github.com/bitcoin/bitcoin/blob/e9262ea32a6e1d364fb7974844fadc36f931f8c6/src/policy/policy.cpp#L295-L298
const adjustedVsize = Math.max(fractionalVsize, sigops * 5); // adjusted vsize = Max(weight, sigops * bytes_per_sigop) / witness_scale_factor
const feePerVbytes = (transaction.fee || 0) / fractionalVsize;
const adjustedFeePerVsize = (transaction.fee || 0) / adjustedVsize;
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
order: this.txidToOrdering(transaction.txid),
vsize: Math.round(transaction.weight / 4),
adjustedVsize,
sigops,
feePerVsize: feePerVbytes,
adjustedFeePerVsize: adjustedFeePerVsize,
effectiveFeePerVsize: adjustedFeePerVsize,
});
if (!transactionExtended?.status?.confirmed && !transactionExtended.firstSeen) {
transactionExtended.firstSeen = Math.round((Date.now() / 1000));
}
return transactionExtended;
}
@@ -63,6 +101,75 @@ class TransactionUtils {
}
return str;
}
public countScriptSigops(script: string, isRawScript: boolean = false, witness: boolean = false): number {
let sigops = 0;
// count OP_CHECKSIG and OP_CHECKSIGVERIFY
sigops += (script.match(/OP_CHECKSIG/g)?.length || 0);
// count OP_CHECKMULTISIG and OP_CHECKMULTISIGVERIFY
if (isRawScript) {
// in scriptPubKey or scriptSig, always worth 20
sigops += 20 * (script.match(/OP_CHECKMULTISIG/g)?.length || 0);
} else {
// in redeem scripts and witnesses, worth N if preceded by OP_N, 20 otherwise
const matches = script.matchAll(/(?:OP_(\d+))? OP_CHECKMULTISIG/g);
for (const match of matches) {
const n = parseInt(match[1]);
if (Number.isInteger(n)) {
sigops += n;
} else {
sigops += 20;
}
}
}
return witness ? sigops : (sigops * 4);
}
public countSigops(transaction: IEsploraApi.Transaction): number {
let sigops = 0;
for (const input of transaction.vin) {
if (input.scriptsig_asm) {
sigops += this.countScriptSigops(input.scriptsig_asm, true);
}
if (input.prevout) {
switch (true) {
case input.prevout.scriptpubkey_type === 'p2sh' && input.witness?.length === 2 && input.scriptsig && input.scriptsig.startsWith('160014'):
case input.prevout.scriptpubkey_type === 'v0_p2wpkh':
sigops += 1;
break;
case input.prevout?.scriptpubkey_type === 'p2sh' && input.witness?.length && input.scriptsig && input.scriptsig.startsWith('220020'):
case input.prevout.scriptpubkey_type === 'v0_p2wsh':
if (input.witness?.length) {
sigops += this.countScriptSigops(bitcoinjs.script.toASM(Buffer.from(input.witness[input.witness.length - 1], 'hex')), false, true);
}
break;
}
}
}
for (const output of transaction.vout) {
if (output.scriptpubkey_asm) {
sigops += this.countScriptSigops(output.scriptpubkey_asm, true);
}
}
return sigops;
}
// returns the most significant 4 bytes of the txid as an integer
public txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}
}
export default new TransactionUtils();

View File

@@ -1,11 +1,10 @@
import config from '../config';
import logger from '../logger';
import { ThreadTransaction, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { CompactThreadTransaction, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
let mempool: { [txid: string]: ThreadTransaction } = {};
let mempool: Map<number, CompactThreadTransaction> = new Map();
if (parentPort) {
parentPort.on('message', (params) => {
@@ -13,18 +12,18 @@ if (parentPort) {
mempool = params.mempool;
} else if (params.type === 'update') {
params.added.forEach(tx => {
mempool[tx.txid] = tx;
mempool.set(tx.uid, tx);
});
params.removed.forEach(txid => {
delete mempool[txid];
params.removed.forEach(uid => {
mempool.delete(uid);
});
}
const { blocks, clusters } = makeBlockTemplates(mempool);
const { blocks, rates, clusters } = makeBlockTemplates(mempool);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ blocks, clusters });
parentPort.postMessage({ blocks, rates, clusters });
}
});
}
@@ -33,35 +32,36 @@ if (parentPort) {
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*/
function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
: { blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } } {
function makeBlockTemplates(mempool: Map<number, CompactThreadTransaction>)
: { blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const auditPool: Map<number, AuditTransaction> = new Map();
const mempoolArray: AuditTransaction[] = [];
const restOfArray: ThreadTransaction[] = [];
const cpfpClusters: { [root: string]: string[] } = {};
const cpfpClusters: Map<number, number[]> = new Map();
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
mempool.forEach(tx => {
tx.dirty = false;
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
auditPool.set(tx.uid, {
uid: tx.uid,
fee: tx.fee,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
effectiveFeePerVsize: tx.feePerVsize,
vin: tx.vin,
sigops: tx.sigops,
inputs: tx.inputs || [],
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
ancestorMap: new Map<number, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
ancestorSigops: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
};
mempoolArray.push(auditPool[tx.txid]);
});
mempoolArray.push(auditPool.get(tx.uid) as AuditTransaction);
});
// Build relatives graph & calculate ancestor scores
@@ -72,15 +72,29 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
mempoolArray.sort((a, b) => {
if (b.score === a.score) {
// tie-break by uid for stability
return a.uid < b.uid ? -1 : 1;
} else {
return (b.score || 0) - (a.score || 0);
}
});
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: ThreadTransaction[][] = [];
const blocks: number[][] = [];
let blockWeight = 4000;
let blockSize = 0;
let blockSigops = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => {
if (a.score === b.score) {
// tie-break by uid for stability
return a.uid > b.uid;
} else {
return (a.score || 0) > (b.score || 0);
}
});
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
@@ -107,30 +121,36 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
if (blocks.length >= 7 || ((blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) && (blockSigops + nextTx.ancestorSigops <= 80000))) {
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
let isCluster = false;
if (sortedTxSet.length > 1) {
cpfpClusters[nextTx.txid] = sortedTxSet.map(tx => tx.txid);
cpfpClusters.set(nextTx.uid, sortedTxSet.map(tx => tx.uid));
isCluster = true;
}
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const effectiveFeeRate = Math.min(nextTx.dependencyRate || Infinity, nextTx.ancestorFee / (nextTx.ancestorWeight / 4));
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
const mempoolTx = mempool.get(ancestor.uid);
if (!mempoolTx) {
continue;
}
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
ancestor.usedBy = nextTx.uid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
if (isCluster) {
mempoolTx.cpfpRoot = nextTx.txid;
if (mempoolTx.effectiveFeePerVsize !== effectiveFeeRate) {
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.dirty = true;
}
if (mempoolTx.cpfpRoot !== nextTx.uid) {
mempoolTx.cpfpRoot = isCluster ? nextTx.uid : null;
mempoolTx.dirty;
}
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
@@ -138,7 +158,7 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
// remove these as valid package ancestors for any descendants remaining in the mempool
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
updateDescendants(tx, auditPool, modified, effectiveFeeRate);
});
}
@@ -156,11 +176,10 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
if ((exceededPackageTries || queueEmpty) && blocks.length < 7) {
// construct this block
if (transactions.length) {
blocks.push(transactions.map(t => mempool[t.txid]));
blocks.push(transactions.map(t => t.uid));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
@@ -175,50 +194,38 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
overflow = [];
}
}
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
if (tx.ancestorMap.size > 0) {
cpfpClusters[tx.txid] = Array.from(tx.ancestorMap?.values()).map(a => a.txid);
mempoolTx.cpfpRoot = tx.txid;
}
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
if (overflow.length > 0) {
logger.warn('GBT overflow list unexpectedly non-empty after final block constructed');
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(blockTransactions);
// add the final unbounded block if it contains any transactions
if (transactions.length > 0) {
blocks.push(transactions.map(t => t.uid));
}
// get map of dirty transactions
const rates = new Map<number, number>();
for (const tx of mempool.values()) {
if (tx?.dirty) {
rates.set(tx.uid, tx.effectiveFeePerVsize || tx.feePerVsize);
}
}
transactions = [];
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return { blocks, clusters: cpfpClusters };
return { blocks, rates, clusters: cpfpClusters };
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
function setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
mempool: Map<number, AuditTransaction>,
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent];
for (const parent of tx.inputs) {
const parentTx = mempool.get(parent);
if (parentTx && !tx.ancestorMap?.has(parent)) {
tx.ancestorMap.set(parent, parentTx);
parentTx.children.add(tx);
@@ -227,15 +234,17 @@ function setRelatives(
setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
tx.ancestorMap.set(ancestor.uid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorSigops = tx.sigops || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
tx.ancestorSigops += ancestor.sigops;
});
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
tx.relativesSet = true;
@@ -245,8 +254,9 @@ function setRelatives(
// avoids recursion to limit call stack depth
function updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
mempool: Map<number, AuditTransaction>,
modified: PairingHeap<AuditTransaction>,
clusterRate: number,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
@@ -261,13 +271,15 @@ function updateDescendants(
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.uid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorMap.delete(rootTx.uid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
descendantTx.ancestorSigops -= rootTx.sigops;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
descendantTx.dependencyRate = descendantTx.dependencyRate ? Math.min(descendantTx.dependencyRate, clusterRate) : clusterRate;
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;

View File

@@ -1,7 +1,7 @@
import logger from '../logger';
import * as WebSocket from 'ws';
import {
BlockExtended, TransactionExtended, WebsocketResponse,
BlockExtended, TransactionExtended, MempoolTransactionExtended, WebsocketResponse,
OptimizedStatistic, ILoadingIndicators
} from '../mempool.interfaces';
import blocks from './blocks';
@@ -12,7 +12,7 @@ import { Common } from './common';
import loadingIndicators from './loading-indicators';
import config from '../config';
import transactionUtils from './transaction-utils';
import rbfCache from './rbf-cache';
import rbfCache, { ReplacementInfo } from './rbf-cache';
import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
@@ -22,18 +22,69 @@ import { deepClone } from '../utils/clone';
import priceUpdater from '../tasks/price-updater';
import { ApiPrice } from '../repositories/PricesRepository';
// valid 'want' subscriptions
const wantable = [
'blocks',
'mempool-blocks',
'live-2h-chart',
'stats',
];
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
private extraInitProperties = {};
private numClients = 0;
private numConnected = 0;
private numDisconnected = 0;
private socketData: { [key: string]: string } = {};
private serializedInitData: string = '{}';
private lastRbfSummary: ReplacementInfo | null = null;
constructor() { }
setWebsocketServer(wss: WebSocket.Server) {
this.wss = wss;
}
setExtraInitProperties(property: string, value: any) {
setExtraInitData(property: string, value: any) {
this.extraInitProperties[property] = value;
this.updateSocketDataFields(this.extraInitProperties);
}
private updateSocketDataFields(data: { [property: string]: any }): void {
for (const property of Object.keys(data)) {
if (data[property] != null) {
this.socketData[property] = JSON.stringify(data[property]);
} else {
delete this.socketData[property];
}
}
this.serializedInitData = '{'
+ Object.keys(this.socketData).map(key => `"${key}": ${this.socketData[key]}`).join(', ')
+ '}';
}
private updateSocketData(): void {
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
const da = difficultyAdjustment.getDifficultyAdjustment();
this.updateSocketDataFields({
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': feeApi.getRecommendedFee(),
});
}
public getSerializedInitData(): string {
return this.serializedInitData;
}
setupConnectionHandling() {
@@ -42,49 +93,76 @@ class WebsocketHandler {
}
this.wss.on('connection', (client: WebSocket) => {
this.numConnected++;
client.on('error', logger.info);
client.on('close', () => {
this.numDisconnected++;
});
client.on('message', async (message: string) => {
try {
const parsedMessage: WebsocketResponse = JSON.parse(message);
const response = {};
if (parsedMessage.action === 'want') {
client['want-blocks'] = parsedMessage.data.indexOf('blocks') > -1;
client['want-mempool-blocks'] = parsedMessage.data.indexOf('mempool-blocks') > -1;
client['want-live-2h-chart'] = parsedMessage.data.indexOf('live-2h-chart') > -1;
client['want-stats'] = parsedMessage.data.indexOf('stats') > -1;
const wantNow = {};
if (parsedMessage && parsedMessage.action === 'want' && Array.isArray(parsedMessage.data)) {
for (const sub of wantable) {
const key = `want-${sub}`;
const wants = parsedMessage.data.includes(sub);
if (wants && client['wants'] && !client[key]) {
wantNow[key] = true;
}
client[key] = wants;
}
client['wants'] = true;
}
// send initial data when a client first starts a subscription
if (wantNow['want-blocks'] || (parsedMessage && parsedMessage['refresh-blocks'])) {
response['blocks'] = this.socketData['blocks'];
}
if (wantNow['want-mempool-blocks']) {
response['mempool-blocks'] = this.socketData['mempool-blocks'];
}
if (wantNow['want-stats']) {
response['mempoolInfo'] = this.socketData['mempoolInfo'];
response['vBytesPerSecond'] = this.socketData['vBytesPerSecond'];
response['fees'] = this.socketData['fees'];
response['da'] = this.socketData['da'];
}
if (parsedMessage && parsedMessage['track-tx']) {
if (/^[a-fA-F0-9]{64}$/.test(parsedMessage['track-tx'])) {
client['track-tx'] = parsedMessage['track-tx'];
const trackTxid = client['track-tx'];
// Client is telling the transaction wasn't found
if (parsedMessage['watch-mempool']) {
const rbfCacheTxid = rbfCache.getReplacedBy(client['track-tx']);
const rbfCacheTxid = rbfCache.getReplacedBy(trackTxid);
if (rbfCacheTxid) {
response['txReplaced'] = {
response['txReplaced'] = JSON.stringify({
txid: rbfCacheTxid,
};
});
client['track-tx'] = null;
} else {
// It might have appeared before we had the time to start watching for it
const tx = memPool.getMempool()[client['track-tx']];
const tx = memPool.getMempool()[trackTxid];
if (tx) {
if (config.MEMPOOL.BACKEND === 'esplora') {
response['tx'] = tx;
response['tx'] = JSON.stringify(tx);
} else {
// tx.prevout is missing from transactions when in bitcoind mode
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction: ' + (e instanceof Error ? e.message : e));
}
}
} else {
try {
const fullTx = await transactionUtils.$getTransactionExtended(client['track-tx'], true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(client['track-tx'], true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction. ' + (e instanceof Error ? e.message : e));
client['track-mempool-tx'] = parsedMessage['track-tx'];
@@ -92,6 +170,13 @@ class WebsocketHandler {
}
}
}
const tx = memPool.getMempool()[trackTxid];
if (tx && tx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: tx.position,
});
}
} else {
client['track-tx'] = null;
}
@@ -123,25 +208,47 @@ class WebsocketHandler {
const index = parsedMessage['track-mempool-block'];
client['track-mempool-block'] = index;
const mBlocksWithTransactions = mempoolBlocks.getMempoolBlocksWithTransactions();
response['projected-block-transactions'] = {
response['projected-block-transactions'] = JSON.stringify({
index: index,
blockTransactions: mBlocksWithTransactions[index]?.transactions || [],
};
});
} else {
client['track-mempool-block'] = null;
}
}
if (parsedMessage && parsedMessage['track-rbf'] !== undefined) {
if (['all', 'fullRbf'].includes(parsedMessage['track-rbf'])) {
client['track-rbf'] = parsedMessage['track-rbf'];
response['rbfLatest'] = JSON.stringify(rbfCache.getRbfTrees(parsedMessage['track-rbf'] === 'fullRbf'));
} else {
client['track-rbf'] = false;
}
}
if (parsedMessage && parsedMessage['track-rbf-summary'] != null) {
if (parsedMessage['track-rbf-summary']) {
client['track-rbf-summary'] = true;
if (this.socketData['rbfSummary'] != null) {
response['rbfLatestSummary'] = this.socketData['rbfSummary'];
}
} else {
client['track-rbf-summary'] = false;
}
}
if (parsedMessage.action === 'init') {
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
if (!_blocks) {
if (!this.socketData['blocks']?.length || !this.socketData['da'] || !this.socketData['backendInfo'] || !this.socketData['conversions']) {
this.updateSocketData();
}
if (!this.socketData['blocks']?.length) {
return;
}
client.send(JSON.stringify(this.getInitData(_blocks)));
client.send(this.serializedInitData);
}
if (parsedMessage.action === 'ping') {
response['pong'] = true;
response['pong'] = JSON.stringify(true);
}
if (parsedMessage['track-donation'] && parsedMessage['track-donation'].length === 22) {
@@ -157,7 +264,8 @@ class WebsocketHandler {
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
} catch (e) {
logger.debug('Error parsing websocket message: ' + (e instanceof Error ? e.message : e));
@@ -186,11 +294,14 @@ class WebsocketHandler {
throw new Error('WebSocket.Server is not set');
}
this.updateSocketDataFields({ 'loadingIndicators': indicators });
const response = JSON.stringify({ loadingIndicators: indicators });
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ loadingIndicators: indicators }));
client.send(response);
});
}
@@ -199,38 +310,28 @@ class WebsocketHandler {
throw new Error('WebSocket.Server is not set');
}
this.updateSocketDataFields({ 'conversions': conversionRates });
const response = JSON.stringify({ conversions: conversionRates });
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ conversions: conversionRates }));
client.send(response);
});
}
getInitData(_blocks?: BlockExtended[]) {
if (!_blocks) {
_blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
}
return {
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': difficultyAdjustment.getDifficultyAdjustment(),
'fees': feeApi.getRecommendedFee(),
...this.extraInitProperties
};
}
handleNewStatistic(stats: OptimizedStatistic) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
const response = JSON.stringify({
'live-2h-chart': stats
});
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
@@ -240,20 +341,58 @@ class WebsocketHandler {
return;
}
client.send(JSON.stringify({
'live-2h-chart': stats
}));
client.send(response);
});
}
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
handleReorg(): void {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
const da = difficultyAdjustment.getDifficultyAdjustment();
// update init data
this.updateSocketDataFields({
'blocks': blocks.getBlocks(),
'da': da?.previousTime ? da : undefined,
});
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
const response = {};
if (client['want-blocks']) {
response['blocks'] = this.socketData['blocks'];
}
if (client['want-stats']) {
response['da'] = this.socketData['da'];
}
if (Object.keys(response).length) {
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(newMempool, newTransactions, deletedTransactions.map(tx => tx.txid), true);
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions);
} else {
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
}
} else {
mempoolBlocks.updateMempoolBlocks(newMempool, true);
}
@@ -265,8 +404,72 @@ class WebsocketHandler {
const rbfTransactions = Common.findRbfTransactions(newTransactions, deletedTransactions);
const da = difficultyAdjustment.getDifficultyAdjustment();
memPool.handleRbfTransactions(rbfTransactions);
const rbfChanges = rbfCache.getRbfChanges();
let rbfReplacements;
let fullRbfReplacements;
let rbfSummary;
if (Object.keys(rbfChanges.trees).length) {
rbfReplacements = rbfCache.getRbfTrees(false);
fullRbfReplacements = rbfCache.getRbfTrees(true);
rbfSummary = rbfCache.getLatestRbfSummary();
}
for (const deletedTx of deletedTransactions) {
rbfCache.evict(deletedTx.txid);
}
memPool.removeFromSpendMap(deletedTransactions);
memPool.addToSpendMap(newTransactions);
const recommendedFees = feeApi.getRecommendedFee();
const latestTransactions = memPool.getLatestTransactions();
// update init data
const socketDataFields = {
'mempoolInfo': mempoolInfo,
'vBytesPerSecond': vBytesPerSecond,
'mempool-blocks': mBlocks,
'transactions': latestTransactions,
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': recommendedFees,
};
if (rbfSummary) {
socketDataFields['rbfSummary'] = rbfSummary;
}
this.updateSocketDataFields(socketDataFields);
// cache serialized objects to avoid stringify-ing the same thing for every client
const responseCache = { ...this.socketData };
function getCachedResponse(key: string, data): string {
if (!responseCache[key]) {
responseCache[key] = JSON.stringify(data);
}
return responseCache[key];
}
// pre-compute new tracked outspends
const outspendCache: { [txid: string]: { [vout: number]: { vin: number, txid: string } } } = {};
const trackedTxs = new Set<string>();
this.wss.clients.forEach((client) => {
if (client['track-tx']) {
trackedTxs.add(client['track-tx']);
}
});
if (trackedTxs.size > 0) {
for (const tx of newTransactions) {
for (let i = 0; i < tx.vin.length; i++) {
const vin = tx.vin[i];
if (trackedTxs.has(vin.txid)) {
if (!outspendCache[vin.txid]) {
outspendCache[vin.txid] = { [vin.vout]: { vin: i, txid: tx.txid }};
} else {
outspendCache[vin.txid][vin.vout] = { vin: i, txid: tx.txid };
}
}
}
}
}
this.wss.clients.forEach(async (client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
@@ -275,15 +478,17 @@ class WebsocketHandler {
const response = {};
if (client['want-stats']) {
response['mempoolInfo'] = mempoolInfo;
response['vBytesPerSecond'] = vBytesPerSecond;
response['transactions'] = newTransactions.slice(0, 6).map((tx) => Common.stripTransaction(tx));
response['da'] = da;
response['fees'] = recommendedFees;
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
response['vBytesPerSecond'] = getCachedResponse('vBytesPerSecond', vBytesPerSecond);
response['transactions'] = getCachedResponse('transactions', latestTransactions);
if (da?.previousTime) {
response['da'] = getCachedResponse('da', da);
}
response['fees'] = getCachedResponse('fees', recommendedFees);
}
if (client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
response['mempool-blocks'] = getCachedResponse('mempool-blocks', mBlocks);
}
if (client['track-mempool-tx']) {
@@ -291,13 +496,13 @@ class WebsocketHandler {
if (tx) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
response['tx'] = tx;
response['tx'] = JSON.stringify(tx);
}
client['track-mempool-tx'] = null;
}
@@ -311,7 +516,7 @@ class WebsocketHandler {
if (someVin) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
@@ -325,7 +530,7 @@ class WebsocketHandler {
if (someVout) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
@@ -337,7 +542,7 @@ class WebsocketHandler {
}
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
response['address-transactions'] = JSON.stringify(foundTransactions);
}
}
@@ -366,89 +571,118 @@ class WebsocketHandler {
});
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
response['address-transactions'] = JSON.stringify(foundTransactions);
}
}
if (client['track-tx']) {
const outspends: object = {};
newTransactions.forEach((tx) => tx.vin.forEach((vin, i) => {
if (vin.txid === client['track-tx']) {
outspends[vin.vout] = {
vin: i,
txid: tx.txid,
};
}
}));
const trackTxid = client['track-tx'];
const outspends = outspendCache[trackTxid];
if (Object.keys(outspends).length) {
response['utxoSpent'] = outspends;
if (outspends && Object.keys(outspends).length) {
response['utxoSpent'] = JSON.stringify(outspends);
}
if (rbfTransactions[client['track-tx']]) {
for (const rbfTransaction in rbfTransactions) {
if (client['track-tx'] === rbfTransaction) {
response['rbfTransaction'] = {
txid: rbfTransactions[rbfTransaction].txid,
};
break;
}
}
const rbfReplacedBy = rbfCache.getReplacedBy(client['track-tx']);
if (rbfReplacedBy) {
response['rbfTransaction'] = JSON.stringify({
txid: rbfReplacedBy,
});
}
const rbfChange = rbfChanges.map[client['track-tx']];
if (rbfChange) {
response['rbfInfo'] = JSON.stringify(rbfChanges.trees[rbfChange]);
}
const mempoolTx = newMempool[trackTxid];
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: mempoolTx.position,
});
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas[index]) {
response['projected-block-transactions'] = {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
index: index,
delta: mBlockDeltas[index],
};
});
}
}
if (client['track-rbf'] === 'all' && rbfReplacements) {
response['rbfLatest'] = getCachedResponse('rbfLatest', rbfReplacements);
} else if (client['track-rbf'] === 'fullRbf' && fullRbfReplacements) {
response['rbfLatest'] = getCachedResponse('fullrbfLatest', fullRbfReplacements);
}
if (client['track-rbf-summary'] && rbfSummary) {
response['rbfLatestSummary'] = getCachedResponse('rbfLatestSummary', rbfSummary);
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
const _memPool = memPool.getMempool();
const rbfTransactions = Common.findMinedRbfTransactions(transactions, memPool.getSpendMap());
memPool.handleMinedRbfTransactions(rbfTransactions);
memPool.removeFromSpendMap(transactions);
if (config.MEMPOOL.AUDIT) {
let projectedBlocks;
let auditMempool = _memPool;
// template calculation functions have mempool side effects, so calculate audits using
// a cloned copy of the mempool if we're running a different algorithm for mempool updates
const auditMempool = (config.MEMPOOL.ADVANCED_GBT_AUDIT === config.MEMPOOL.ADVANCED_GBT_MEMPOOL) ? _memPool : deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
projectedBlocks = await mempoolBlocks.makeBlockTemplates(auditMempool, false);
const separateAudit = config.MEMPOOL.ADVANCED_GBT_AUDIT !== config.MEMPOOL.ADVANCED_GBT_MEMPOOL;
if (separateAudit) {
auditMempool = deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
if (config.MEMPOOL.RUST_GBT) {
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool);
} else {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
}
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
}
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const { censored, added, fresh, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const { censored, added, fresh, sigop, fullrbf, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
}) : [];
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : [];
let totalFees = 0;
let totalWeight = 0;
for (const tx of stripped) {
totalFees += tx.fee;
totalWeight += (tx.vsize * 4);
}
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
transactions: stripped
transactions: stripped,
}
});
@@ -459,11 +693,17 @@ class WebsocketHandler {
addedTxs: added,
missingTxs: censored,
freshTxs: fresh,
sigopTxs: sigop,
fullrbfTxs: fullrbf,
matchRate: matchRate,
expectedFees: totalFees,
expectedWeight: totalWeight,
});
if (block.extras) {
block.extras.matchRate = matchRate;
block.extras.expectedFees = totalFees;
block.extras.expectedWeight = totalWeight;
block.extras.similarity = similarity;
}
}
@@ -474,16 +714,18 @@ class WebsocketHandler {
}
}
const removed: string[] = [];
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
removed.push(txId);
rbfCache.evict(txId);
rbfCache.mined(txId);
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(_memPool, [], removed, true);
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions);
} else {
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
}
} else {
mempoolBlocks.updateMempoolBlocks(_memPool, true);
}
@@ -492,29 +734,64 @@ class WebsocketHandler {
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();
const mempoolInfo = memPool.getMempoolInfo();
// update init data
this.updateSocketDataFields({
'mempoolInfo': mempoolInfo,
'blocks': [...blocks.getBlocks(), block].slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT),
'mempool-blocks': mBlocks,
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': fees,
});
const responseCache = { ...this.socketData };
function getCachedResponse(key, data): string {
if (!responseCache[key]) {
responseCache[key] = JSON.stringify(data);
}
return responseCache[key];
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (!client['want-blocks']) {
return;
const response = {};
if (client['want-blocks']) {
response['block'] = getCachedResponse('block', block);
}
const response = {
'block': block,
'mempoolInfo': memPool.getMempoolInfo(),
'da': da,
'fees': fees,
};
if (client['want-stats']) {
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
response['vBytesPerSecond'] = getCachedResponse('vBytesPerSecond', memPool.getVBytesPerSecond());
response['fees'] = getCachedResponse('fees', fees);
if (da?.previousTime) {
response['da'] = getCachedResponse('da', da);
}
}
if (mBlocks && client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
response['mempool-blocks'] = getCachedResponse('mempool-blocks', mBlocks);
}
if (client['track-tx'] && txIds.indexOf(client['track-tx']) > -1) {
response['txConfirmed'] = true;
if (client['track-tx']) {
const trackTxid = client['track-tx'];
if (trackTxid && txIds.indexOf(trackTxid) > -1) {
response['txConfirmed'] = JSON.stringify(trackTxid);
} else {
const mempoolTx = _memPool[trackTxid];
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: mempoolTx.position,
});
}
}
}
if (client['track-address']) {
@@ -540,7 +817,7 @@ class WebsocketHandler {
};
});
response['block-transactions'] = foundTransactions;
response['block-transactions'] = JSON.stringify(foundTransactions);
}
}
@@ -577,23 +854,45 @@ class WebsocketHandler {
};
});
response['block-transactions'] = foundTransactions;
response['block-transactions'] = JSON.stringify(foundTransactions);
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas && mBlockDeltas[index]) {
response['projected-block-transactions'] = {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
index: index,
delta: mBlockDeltas[index],
};
});
}
}
client.send(JSON.stringify(response));
if (Object.keys(response).length) {
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
// takes a dictionary of JSON serialized values
// and zips it together into a valid JSON object
private serializeResponse(response): string {
return '{'
+ Object.keys(response).map(key => `"${key}": ${response[key]}`).join(', ')
+ '}';
}
private printLogs(): void {
if (this.wss) {
const count = this.wss?.clients?.size || 0;
const diff = count - this.numClients;
this.numClients = count;
logger.debug(`${count} websocket clients | ${this.numConnected} connected | ${this.numDisconnected} disconnected | (${diff >= 0 ? '+' : ''}${diff})`);
this.numConnected = 0;
this.numDisconnected = 0;
}
}
}
export default new WebsocketHandler();

View File

@@ -5,7 +5,7 @@ const configFromFile = require(
interface IConfig {
MEMPOOL: {
ENABLED: boolean;
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'regtest' | 'liquid' | 'liquidtestnet';
BACKEND: 'esplora' | 'electrum' | 'none';
HTTP_PORT: number;
SPAWN_CLUSTER_PROCS: number;
@@ -31,12 +31,17 @@ interface IConfig {
AUDIT: boolean;
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
RUST_GBT: boolean;
CPFP_INDEXING: boolean;
MAX_BLOCKS_BULK_QUERY: number;
DISK_CACHE_BLOCK_INTERVAL: number;
MAX_PUSH_TX_SIZE_WEIGHT: number;
ALLOW_UNREACHABLE: boolean;
};
ESPLORA: {
REST_API_URL: string;
UNIX_SOCKET_PATH: string | void | null;
RETRY_UNIX_SOCKET_AFTER: number;
};
LIGHTNING: {
ENABLED: boolean;
@@ -84,6 +89,7 @@ interface IConfig {
DATABASE: string;
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
};
SYSLOG: {
ENABLED: boolean;
@@ -126,6 +132,12 @@ interface IConfig {
GEOLITE2_ASN: string;
GEOIP2_ISP: string;
},
REPLICATION: {
ENABLED: boolean;
AUDIT: boolean;
AUDIT_START_HEIGHT: number;
SERVERS: string[];
}
}
const defaults: IConfig = {
@@ -157,12 +169,17 @@ const defaults: IConfig = {
'AUDIT': false,
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'RUST_GBT': false,
'CPFP_INDEXING': false,
'MAX_BLOCKS_BULK_QUERY': 0,
'DISK_CACHE_BLOCK_INTERVAL': 6,
'MAX_PUSH_TX_SIZE_WEIGHT': 400000,
'ALLOW_UNREACHABLE': true,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
'UNIX_SOCKET_PATH': null,
'RETRY_UNIX_SOCKET_AFTER': 30000,
},
'ELECTRUM': {
'HOST': '127.0.0.1',
@@ -190,7 +207,8 @@ const defaults: IConfig = {
'PORT': 3306,
'DATABASE': 'mempool',
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
'PASSWORD': 'mempool',
'TIMEOUT': 180000,
},
'SYSLOG': {
'ENABLED': true,
@@ -252,6 +270,12 @@ const defaults: IConfig = {
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
},
'REPLICATION': {
'ENABLED': false,
'AUDIT': false,
'AUDIT_START_HEIGHT': 774000,
'SERVERS': [],
}
};
class Config implements IConfig {
@@ -271,6 +295,7 @@ class Config implements IConfig {
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
MAXMIND: IConfig['MAXMIND'];
REPLICATION: IConfig['REPLICATION'];
constructor() {
const configs = this.merge(configFromFile, defaults);
@@ -290,6 +315,7 @@ class Config implements IConfig {
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
this.MAXMIND = configs.MAXMIND;
this.REPLICATION = configs.REPLICATION;
}
merge = (...objects: object[]): IConfig => {

View File

@@ -30,11 +30,64 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
}
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
OkPacket[] | ResultSetHeader>(query, params?, connection?: PoolConnection): Promise<[T, FieldPacket[]]>
{
this.checkDBFlag();
let hardTimeout;
if (query?.timeout != null) {
hardTimeout = Math.floor(query.timeout * 1.1);
} else {
hardTimeout = config.DATABASE.TIMEOUT;
}
if (hardTimeout > 0) {
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error(`DB query failed to return, reject or time out within ${hardTimeout / 1000}s - ${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}`));
}, hardTimeout);
// Use a specific connection if provided, otherwise delegate to the pool
const connectionPromise = connection ? Promise.resolve(connection) : this.getPool();
connectionPromise.then((pool: PoolConnection | Pool) => {
return pool.query(query, params) as Promise<[T, FieldPacket[]]>;
}).then(result => {
resolve(result);
}).catch(error => {
reject(error);
}).finally(() => {
clearTimeout(timer);
});
});
} else {
const pool = await this.getPool();
return pool.query(query, params);
}
}
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(queries: { query, params }[]): Promise<[T, FieldPacket[]][]>
{
const pool = await this.getPool();
return pool.query(query, params);
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
const results: [T, FieldPacket[]][] = [];
for (const query of queries) {
const result = await this.query(query.query, query.params, connection) as [T, FieldPacket[]];
results.push(result);
}
await connection.commit();
return results;
} catch (e) {
logger.err('Could not complete db transaction, rolling back: ' + (e instanceof Error ? e.message : e));
connection.rollback();
connection.release();
throw e;
} finally {
connection.release();
}
}
public async checkDbConnection() {

View File

@@ -2,6 +2,7 @@ import express from 'express';
import { Application, Request, Response, NextFunction } from 'express';
import * as http from 'http';
import * as WebSocket from 'ws';
import bitcoinApi from './api/bitcoin/bitcoin-api-factory';
import cluster from 'cluster';
import DB from './database';
import config from './config';
@@ -45,7 +46,8 @@ class Server {
private wss: WebSocket.Server | undefined;
private server: http.Server | undefined;
private app: Application;
private currentBackendRetryInterval = 5;
private currentBackendRetryInterval = 1;
private backendRetryCount = 0;
private maxHeapSize: number = 0;
private heapLogInterval: number = 60;
@@ -120,7 +122,7 @@ class Server {
await poolsUpdater.updatePoolsJson(); // Needs to be done before loading the disk cache because we sometimes wipe it
await syncAssets.syncAssets$();
if (config.MEMPOOL.ENABLED) {
diskCache.loadMempoolCache();
await diskCache.$loadMempoolCache();
}
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
@@ -148,7 +150,7 @@ class Server {
if (config.BISQ.ENABLED) {
bisq.startBisqService();
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitProperties('bsq-price', price));
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitData('bsq-price', price));
blocks.setNewBlockCallback(bisq.handleNewBitcoinBlock.bind(bisq));
bisqMarkets.startBisqService();
}
@@ -167,6 +169,7 @@ class Server {
}
async runMainUpdateLoop(): Promise<void> {
const start = Date.now();
try {
try {
await memPool.$updateMemPoolInfo();
@@ -178,22 +181,28 @@ class Server {
logger.debug(msg);
}
}
await blocks.$updateBlocks();
await memPool.$updateMempool();
const newMempool = await bitcoinApi.$getRawMempool();
const numHandledBlocks = await blocks.$updateBlocks();
if (numHandledBlocks === 0) {
await memPool.$updateMempool(newMempool);
}
indexer.$run();
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
this.currentBackendRetryInterval = 5;
// rerun immediately if we skipped the mempool update, otherwise wait POLL_RATE_MS
const elapsed = Date.now() - start;
const remainingTime = Math.max(0, config.MEMPOOL.POLL_RATE_MS - elapsed)
setTimeout(this.runMainUpdateLoop.bind(this), numHandledBlocks > 0 ? 0 : remainingTime);
this.backendRetryCount = 0;
} catch (e: any) {
let loggerMsg = `Exception in runMainUpdateLoop(). Retrying in ${this.currentBackendRetryInterval} sec.`;
this.backendRetryCount++;
let loggerMsg = `Exception in runMainUpdateLoop() (count: ${this.backendRetryCount}). Retrying in ${this.currentBackendRetryInterval} sec.`;
loggerMsg += ` Reason: ${(e instanceof Error ? e.message : e)}.`;
if (e?.stack) {
loggerMsg += ` Stack trace: ${e.stack}`;
}
// When we get a first Exception, only `logger.debug` it and retry after 5 seconds
// From the second Exception, `logger.warn` the Exception and increase the retry delay
// Maximum retry delay is 60 seconds
if (this.currentBackendRetryInterval > 5) {
if (this.backendRetryCount >= 5) {
logger.warn(loggerMsg);
mempool.setOutOfSync();
} else {
@@ -203,8 +212,8 @@ class Server {
logger.debug(`AxiosError: ${e?.message}`);
}
setTimeout(this.runMainUpdateLoop.bind(this), 1000 * this.currentBackendRetryInterval);
this.currentBackendRetryInterval *= 2;
this.currentBackendRetryInterval = Math.min(this.currentBackendRetryInterval, 60);
} finally {
diskCache.unlock();
}
}
@@ -237,7 +246,7 @@ class Server {
websocketHandler.setupConnectionHandling();
if (config.MEMPOOL.ENABLED) {
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.$handleMempoolChange.bind(websocketHandler));
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
}
priceUpdater.setRatesChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
@@ -275,7 +284,7 @@ class Server {
if (!this.warnedHeapCritical && this.maxHeapSize > warnThreshold) {
this.warnedHeapCritical = true;
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit * 100).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
}
if (this.lastHeapLogTime === null || (now - this.lastHeapLogTime) > (this.heapLogInterval * 1000)) {
logger.debug(`Memory usage: ${formatBytes(this.maxHeapSize, byteUnits)} / ${formatBytes(stats.heap_size_limit, byteUnits)}`);

View File

@@ -6,6 +6,8 @@ import logger from './logger';
import bitcoinClient from './api/bitcoin/bitcoin-client';
import priceUpdater from './tasks/price-updater';
import PricesRepository from './repositories/PricesRepository';
import config from './config';
import auditReplicator from './replication/AuditReplication';
export interface CoreIndex {
name: string;
@@ -72,7 +74,7 @@ class Indexer {
return;
}
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
if (task === 'blocksPrices' && !this.tasksRunning.includes(task) && !['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
@@ -134,6 +136,8 @@ class Indexer {
await mining.$generatePoolHashrateHistory();
await blocks.$generateBlocksSummariesDatabase();
await blocks.$generateCPFPDatabase();
await blocks.$generateAuditStats();
await auditReplicator.$sync();
} catch (e) {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));

View File

@@ -69,6 +69,10 @@ class Logger {
this.network = this.getNetwork();
}
public updateNetwork(): void {
this.network = this.getNetwork();
}
private addprio(prio): void {
this[prio] = (function(_this) {
return function(msg, tag?: string) {

View File

@@ -19,6 +19,7 @@ export interface PoolInfo {
blockCount: number;
slug: string;
avgMatchRate: number | null;
avgFeeDelta: number | null;
}
export interface PoolStats extends PoolInfo {
@@ -32,13 +33,19 @@ export interface BlockAudit {
hash: string,
missingTxs: string[],
freshTxs: string[],
sigopTxs: string[],
fullrbfTxs: string[],
addedTxs: string[],
matchRate: number,
expectedFees?: number,
expectedWeight?: number,
}
export interface AuditScore {
hash: string,
matchRate?: number,
expectedFees?: number
expectedWeight?: number
}
export interface MempoolBlock {
@@ -58,6 +65,7 @@ export interface MempoolBlockWithTransactions extends MempoolBlock {
export interface MempoolBlockDelta {
added: TransactionStripped[];
removed: string[];
changed: { txid: string, rate: number | undefined }[];
}
interface VinStrippedToScriptsig {
@@ -79,25 +87,54 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
descendants?: Ancestor[];
bestDescendant?: BestDescendant | null;
cpfpChecked?: boolean;
deleteAfter?: number;
position?: {
block: number,
vsize: number,
};
uid?: number;
}
export interface MempoolTransactionExtended extends TransactionExtended {
order: number;
sigops: number;
adjustedVsize: number;
adjustedFeePerVsize: number;
inputs?: number[];
lastBoosted?: number;
}
export interface AuditTransaction {
txid: string;
uid: number;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize: number;
vin: string[];
sigops: number;
inputs: number[];
relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>;
ancestorMap: Map<number, AuditTransaction>;
children: Set<AuditTransaction>;
ancestorFee: number;
ancestorWeight: number;
ancestorSigops: number;
score: number;
used: boolean;
modified: boolean;
modifiedNode: HeapNode<AuditTransaction>;
dependencyRate?: number;
}
export interface CompactThreadTransaction {
uid: number;
fee: number;
weight: number;
sigops: number;
feePerVsize: number;
effectiveFeePerVsize: number;
inputs: number[];
cpfpRoot?: number;
cpfpChecked?: boolean;
dirty?: boolean;
}
export interface ThreadTransaction {
@@ -106,7 +143,7 @@ export interface ThreadTransaction {
weight: number;
feePerVsize: number;
effectiveFeePerVsize?: number;
vin: string[];
inputs: number[];
cpfpRoot?: string;
cpfpChecked?: boolean;
}
@@ -145,6 +182,7 @@ export interface TransactionStripped {
fee: number;
vsize: number;
value: number;
rate?: number; // effective fee rate
}
export interface BlockExtension {
@@ -153,6 +191,8 @@ export interface BlockExtension {
feeRange: number[]; // fee rate percentiles
reward: number;
matchRate: number | null;
expectedFees: number | null;
expectedWeight: number | null;
similarity?: number;
pool: {
id: number; // Note - This is the `unique_id`, not to mix with the auto increment `id`
@@ -189,6 +229,7 @@ export interface BlockExtension {
*/
export interface BlockExtended extends IEsploraApi.Block {
extras: BlockExtension;
canonical?: string;
}
export interface BlockSummary {
@@ -196,6 +237,15 @@ export interface BlockSummary {
transactions: TransactionStripped[];
}
export interface AuditSummary extends BlockAudit {
timestamp?: number,
size?: number,
weight?: number,
tx_count?: number,
transactions: TransactionStripped[];
template?: TransactionStripped[];
}
export interface BlockPrice {
height: number;
priceId: number;
@@ -214,6 +264,28 @@ export interface MempoolStats {
tx_count: number;
}
export interface EffectiveFeeStats {
medianFee: number; // median effective fee rate
feeRange: number[]; // 2nd, 10th, 25th, 50th, 75th, 90th, 98th percentiles
}
export interface WorkingEffectiveFeeStats extends EffectiveFeeStats {
minFee: number;
maxFee: number;
}
export interface CpfpCluster {
root: string,
height: number,
txs: Ancestor[],
effectiveFeePerVsize: number,
}
export interface CpfpSummary {
transactions: TransactionExtended[];
clusters: CpfpCluster[];
}
export interface Statistic {
id?: number;
added: string;
@@ -309,9 +381,11 @@ export interface IDifficultyAdjustment {
remainingBlocks: number;
remainingTime: number;
previousRetarget: number;
previousTime: number;
nextRetargetHeight: number;
timeAvg: number;
timeOffset: number;
expectedBlocks: number;
}
export interface IndexedDifficultyAdjustment {

View File

@@ -0,0 +1,134 @@
import DB from '../database';
import logger from '../logger';
import { AuditSummary } from '../mempool.interfaces';
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import blocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import { $sync } from './replicator';
import config from '../config';
import { Common } from '../api/common';
import blocks from '../api/blocks';
const BATCH_SIZE = 16;
/**
* Syncs missing block template and audit data from trusted servers
*/
class AuditReplication {
inProgress: boolean = false;
skip: Set<string> = new Set();
public async $sync(): Promise<void> {
if (!config.REPLICATION.ENABLED || !config.REPLICATION.AUDIT) {
// replication not enabled
return;
}
if (this.inProgress) {
logger.info(`AuditReplication sync already in progress`, 'Replication');
return;
}
this.inProgress = true;
const missingAudits = await this.$getMissingAuditBlocks();
logger.debug(`Fetching missing audit data for ${missingAudits.length} blocks from trusted servers`, 'Replication');
let totalSynced = 0;
let totalMissed = 0;
let loggerTimer = Date.now();
// process missing audits in batches of
for (let i = 0; i < missingAudits.length; i += BATCH_SIZE) {
const slice = missingAudits.slice(i, i + BATCH_SIZE);
const results = await Promise.all(slice.map(hash => this.$syncAudit(hash)));
const synced = results.reduce((total, status) => status ? total + 1 : total, 0);
totalSynced += synced;
totalMissed += (slice.length - synced);
if (Date.now() - loggerTimer > 10000) {
loggerTimer = Date.now();
logger.info(`Found ${totalSynced} / ${totalSynced + totalMissed} of ${missingAudits.length} missing audits`, 'Replication');
}
await Common.sleep$(1000);
}
logger.debug(`Fetched ${totalSynced} audits, ${totalMissed} still missing`, 'Replication');
this.inProgress = false;
}
private async $syncAudit(hash: string): Promise<boolean> {
if (this.skip.has(hash)) {
// we already know none of our trusted servers have this audit
return false;
}
let success = false;
// start with a random server so load is uniformly spread
const syncResult = await $sync(`/api/v1/block/${hash}/audit-summary`);
if (syncResult) {
if (syncResult.data?.template?.length) {
await this.$saveAuditData(hash, syncResult.data);
logger.info(`Imported audit data from ${syncResult.server} for block ${syncResult.data.height} (${hash})`);
success = true;
}
if (!syncResult.data && !syncResult.exists) {
this.skip.add(hash);
}
}
return success;
}
private async $getMissingAuditBlocks(): Promise<string[]> {
try {
const startHeight = config.REPLICATION.AUDIT_START_HEIGHT || 0;
const [rows]: any[] = await DB.query(`
SELECT auditable.hash, auditable.height
FROM (
SELECT hash, height
FROM blocks
WHERE height >= ?
) AS auditable
LEFT JOIN blocks_audits ON auditable.hash = blocks_audits.hash
WHERE blocks_audits.hash IS NULL
ORDER BY auditable.height DESC
`, [startHeight]);
return rows.map(row => row.hash);
} catch (e: any) {
logger.err(`Cannot fetch missing audit blocks from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
private async $saveAuditData(blockHash: string, auditSummary: AuditSummary): Promise<void> {
// save audit & template to DB
await blocksSummariesRepository.$saveTemplate({
height: auditSummary.height,
template: {
id: blockHash,
transactions: auditSummary.template || []
}
});
await blocksAuditsRepository.$saveAudit({
hash: blockHash,
height: auditSummary.height,
time: auditSummary.timestamp || auditSummary.time,
missingTxs: auditSummary.missingTxs || [],
addedTxs: auditSummary.addedTxs || [],
freshTxs: auditSummary.freshTxs || [],
sigopTxs: auditSummary.sigopTxs || [],
fullrbfTxs: auditSummary.fullrbfTxs || [],
matchRate: auditSummary.matchRate,
expectedFees: auditSummary.expectedFees,
expectedWeight: auditSummary.expectedWeight,
});
// add missing data to cached blocks
const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash);
if (cachedBlock) {
cachedBlock.extras.matchRate = auditSummary.matchRate;
cachedBlock.extras.expectedFees = auditSummary.expectedFees || null;
cachedBlock.extras.expectedWeight = auditSummary.expectedWeight || null;
}
}
}
export default new AuditReplication();

View File

@@ -0,0 +1,70 @@
import config from '../config';
import backendInfo from '../api/backend-info';
import axios, { AxiosResponse } from 'axios';
import { SocksProxyAgent } from 'socks-proxy-agent';
import * as https from 'https';
export async function $sync(path): Promise<{ data?: any, exists: boolean, server?: string }> {
// start with a random server so load is uniformly spread
let allMissing = true;
const offset = Math.floor(Math.random() * config.REPLICATION.SERVERS.length);
for (let i = 0; i < config.REPLICATION.SERVERS.length; i++) {
const server = config.REPLICATION.SERVERS[(i + offset) % config.REPLICATION.SERVERS.length];
// don't query ourself
if (server === backendInfo.getBackendInfo().hostname) {
continue;
}
try {
const result = await query(`https://${server}${path}`);
if (result) {
return { data: result, exists: true, server };
}
} catch (e: any) {
if (e?.response?.status === 404) {
// this server is also missing this data
} else {
// something else went wrong
allMissing = false;
}
}
}
return { exists: !allMissing };
}
export async function query(path): Promise<object> {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpsAgent?: https.Agent;
};
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
if (config.SOCKS5PROXY.ENABLED) {
const socksOptions = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT,
username: config.SOCKS5PROXY.USERNAME || 'circuit0',
password: config.SOCKS5PROXY.PASSWORD,
};
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
const data: AxiosResponse = await axios.get(path, axiosOptions);
if (data.statusText === 'error' || !data.data) {
throw new Error(`${data.status}`);
}
return data.data;
}

View File

@@ -6,20 +6,32 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, fullrbf_txs, match_rate, expected_fees, expected_weight)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
} else {
logger.err(`Cannot save block audit into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
public async $getBlockPredictionsHistory(div: number, interval: string | null): Promise<any> {
public async $setSummary(hash: string, expectedFees: number, expectedWeight: number) {
try {
await DB.query(`
UPDATE blocks_audits SET
expected_fees = ?,
expected_weight = ?
WHERE hash = ?
`, [expectedFees, expectedWeight, hash]);
} catch (e: any) {
logger.err(`Cannot update block audit in db. Reason: ` + (e instanceof Error ? e.message : e));
}
}
public async $getBlocksHealthHistory(div: number, interval: string | null): Promise<any> {
try {
let query = `SELECT UNIX_TIMESTAMP(time) as time, height, match_rate FROM blocks_audits`;
@@ -32,17 +44,17 @@ class BlocksAuditRepositories {
const [rows] = await DB.query(query);
return rows;
} catch (e: any) {
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot fetch blocks health history. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getPredictionsCount(): Promise<number> {
public async $getBlocksHealthCount(): Promise<number> {
try {
const [rows] = await DB.query(`SELECT count(hash) as count FROM blocks_audits`);
return rows[0].count;
} catch (e: any) {
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot fetch blocks health count. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -50,12 +62,18 @@ class BlocksAuditRepositories {
public async $getBlockAudit(hash: string): Promise<any> {
try {
const [rows]: any[] = await DB.query(
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
blocks.weight, blocks.tx_count,
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
`SELECT blocks_audits.height, blocks_audits.hash as id, UNIX_TIMESTAMP(blocks_audits.time) as timestamp,
template,
missing_txs as missingTxs,
added_txs as addedTxs,
fresh_txs as freshTxs,
sigop_txs as sigopTxs,
fullrbf_txs as fullrbfTxs,
match_rate as matchRate,
expected_fees as expectedFees,
expected_weight as expectedWeight
FROM blocks_audits
JOIN blocks ON blocks.hash = blocks_audits.hash
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
JOIN blocks_templates ON blocks_templates.id = blocks_audits.hash
WHERE blocks_audits.hash = "${hash}"
`);
@@ -63,12 +81,11 @@ class BlocksAuditRepositories {
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].sigopTxs = JSON.parse(rows[0].sigopTxs);
rows[0].fullrbfTxs = JSON.parse(rows[0].fullrbfTxs);
rows[0].template = JSON.parse(rows[0].template);
if (rows[0].transactions.length) {
return rows[0];
}
return rows[0];
}
return null;
} catch (e: any) {
@@ -80,7 +97,7 @@ class BlocksAuditRepositories {
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
`SELECT hash, match_rate as matchRate, expected_fees as expectedFees, expected_weight as expectedWeight
FROM blocks_audits
WHERE blocks_audits.hash = "${hash}"
`);
@@ -94,7 +111,7 @@ class BlocksAuditRepositories {
public async $getBlockAuditScores(maxHeight: number, minHeight: number): Promise<AuditScore[]> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
`SELECT hash, match_rate as matchRate, expected_fees as expectedFees, expected_weight as expectedWeight
FROM blocks_audits
WHERE blocks_audits.height BETWEEN ? AND ?
`, [minHeight, maxHeight]);
@@ -104,6 +121,32 @@ class BlocksAuditRepositories {
throw e;
}
}
public async $getBlocksWithoutSummaries(): Promise<string[]> {
try {
const [fromRows]: any[] = await DB.query(`
SELECT height
FROM blocks_audits
WHERE expected_fees IS NULL
ORDER BY height DESC
LIMIT 1
`);
if (!fromRows?.length) {
return [];
}
const fromHeight = fromRows[0].height;
const [idRows]: any[] = await DB.query(`
SELECT hash
FROM blocks_audits
WHERE height <= ?
ORDER BY height DESC
`, [fromHeight]);
return idRows.map(row => row.hash);
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksAuditRepositories();

View File

@@ -1,4 +1,4 @@
import { BlockExtended, BlockExtension, BlockPrice } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockPrice, EffectiveFeeStats } from '../mempool.interfaces';
import DB from '../database';
import logger from '../logger';
import { Common } from '../api/common';
@@ -13,6 +13,48 @@ import chainTips from '../api/chain-tips';
import blocks from '../api/blocks';
import BlocksAuditsRepository from './BlocksAuditsRepository';
interface DatabaseBlock {
id: string;
height: number;
version: number;
timestamp: number;
bits: number;
nonce: number;
difficulty: number;
merkle_root: string;
tx_count: number;
size: number;
weight: number;
previousblockhash: string;
mediantime: number;
totalFees: number;
medianFee: number;
feeRange: string;
reward: number;
poolId: number;
poolName: string;
poolSlug: string;
avgFee: number;
avgFeeRate: number;
coinbaseRaw: string;
coinbaseAddress: string;
coinbaseSignature: string;
coinbaseSignatureAscii: string;
avgTxSize: number;
totalInputs: number;
totalOutputs: number;
totalOutputAmt: number;
medianFeeAmt: number;
feePercentiles: string;
segwitTotalTxs: number;
segwitTotalSize: number;
segwitTotalWeight: number;
header: string;
utxoSetChange: number;
utxoSetSize: number;
totalInputAmt: number;
}
const BLOCK_DB_FIELDS = `
blocks.hash AS id,
blocks.height,
@@ -52,7 +94,7 @@ const BLOCK_DB_FIELDS = `
blocks.header,
blocks.utxoset_change AS utxoSetChange,
blocks.utxoset_size AS utxoSetSize,
blocks.total_input_amt AS totalInputAmts
blocks.total_input_amt AS totalInputAmt
`;
class BlocksRepository {
@@ -171,6 +213,32 @@ class BlocksRepository {
}
}
/**
* Update missing fee amounts fields
*
* @param blockHash
* @param feeAmtPercentiles
* @param medianFeeAmt
*/
public async $updateFeeAmounts(blockHash: string, feeAmtPercentiles, medianFeeAmt) : Promise<void> {
try {
const query = `
UPDATE blocks
SET fee_percentiles = ?, median_fee_amt = ?
WHERE hash = ?
`;
const params: any[] = [
JSON.stringify(feeAmtPercentiles),
medianFeeAmt,
blockHash
];
await DB.query(query, params);
} catch (e: any) {
logger.err(`Cannot update fee amounts for block ${blockHash}. Reason: ' + ${e instanceof Error ? e.message : e}`);
throw e;
}
}
/**
* Get all block height that have not been indexed between [startHeight, endHeight]
*/
@@ -333,7 +401,7 @@ class BlocksRepository {
/**
* Get average block health for all blocks for a single pool
*/
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number> {
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number | null> {
const params: any[] = [];
const query = `
SELECT AVG(blocks_audits.match_rate) AS avg_match_rate
@@ -345,8 +413,8 @@ class BlocksRepository {
try {
const [rows] = await DB.query(query, params);
if (!rows[0] || !rows[0].avg_match_rate) {
return 0;
if (!rows[0] || rows[0].avg_match_rate == null) {
return null;
}
return Math.round(rows[0].avg_match_rate * 100) / 100;
} catch (e) {
@@ -432,7 +500,7 @@ class BlocksRepository {
const blocks: BlockExtended[] = [];
for (const block of rows) {
blocks.push(await this.formatDbBlockIntoExtendedBlock(block));
blocks.push(await this.formatDbBlockIntoExtendedBlock(block as DatabaseBlock));
}
return blocks;
@@ -459,37 +527,13 @@ class BlocksRepository {
return null;
}
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
return await this.formatDbBlockIntoExtendedBlock(rows[0] as DatabaseBlock);
} catch (e) {
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get one block by hash
*/
public async $getBlockByHash(hash: string): Promise<object | null> {
try {
const query = `
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE hash = ?;
`;
const [rows]: any[] = await DB.query(query, [hash]);
if (rows.length <= 0) {
return null;
}
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
} catch (e) {
logger.err(`Cannot get indexed block ${hash}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Return blocks difficulty
*/
@@ -533,19 +577,6 @@ class BlocksRepository {
}
}
/**
* Return blocks height
*/
public async $getBlocksHeightsAndTimestamp(): Promise<object[]> {
try {
const [rows]: any[] = await DB.query(`SELECT height, blockTimestamp as timestamp FROM blocks`);
return rows;
} catch (e) {
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get general block stats
*/
@@ -599,7 +630,6 @@ class BlocksRepository {
if (blocks[idx].previous_block_hash !== blocks[idx - 1].hash) {
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}`);
await this.$deleteBlocksFrom(blocks[idx - 1].height);
await BlocksSummariesRepository.$deleteBlocksFrom(blocks[idx - 1].height);
await HashratesRepository.$deleteHashratesFromTimestamp(blocks[idx - 1].timestamp - 604800);
await DifficultyAdjustmentsRepository.$deleteAdjustementsFromHeight(blocks[idx - 1].height);
return false;
@@ -619,7 +649,7 @@ class BlocksRepository {
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks from height ${blockHeight} from the database`);
logger.info(`Delete newer blocks from height ${blockHeight} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM blocks where height >= ${blockHeight}`);
@@ -834,7 +864,7 @@ class BlocksRepository {
/**
* Get all blocks which have not be linked to a price yet
*/
public async $getBlocksWithoutPrice(): Promise<object[]> {
public async $getBlocksWithoutPrice(): Promise<object[]> {
try {
const [rows]: any[] = await DB.query(`
SELECT UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.height
@@ -846,7 +876,7 @@ class BlocksRepository {
return rows;
} catch (e) {
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
return [];
}
}
@@ -866,7 +896,6 @@ class BlocksRepository {
logger.debug(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] because it has already been indexed, ignoring`);
} else {
logger.err(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
@@ -885,7 +914,7 @@ class BlocksRepository {
return blocks;
} catch (e) {
logger.err(`Cannot get blocks with missing coinstatsindex. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
return [];
}
}
@@ -908,13 +937,32 @@ class BlocksRepository {
}
}
/**
* Save indexed effective fee statistics
*
* @param id
* @param feeStats
*/
public async $saveEffectiveFeeStats(id: string, feeStats: EffectiveFeeStats): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET median_fee = ?, fee_span = ?
WHERE hash = ?`,
[feeStats.medianFee, JSON.stringify(feeStats.feeRange), id]
);
} catch (e) {
logger.err(`Cannot update block fee stats. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Convert a mysql row block into a BlockExtended. Note that you
* must provide the correct field into dbBlk object param
*
* @param dbBlk
*/
private async formatDbBlockIntoExtendedBlock(dbBlk: any): Promise<BlockExtended> {
private async formatDbBlockIntoExtendedBlock(dbBlk: DatabaseBlock): Promise<BlockExtended> {
const blk: Partial<BlockExtended> = {};
const extras: Partial<BlockExtension> = {};
@@ -970,14 +1018,19 @@ class BlocksRepository {
// Match rate is not part of the blocks table, but it is part of APIs so we must include it
extras.matchRate = null;
extras.expectedFees = null;
extras.expectedWeight = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(dbBlk.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
extras.expectedFees = auditScore.expectedFees;
extras.expectedWeight = auditScore.expectedWeight;
}
}
// If we're missing block summary related field, check if we can populate them on the fly now
// This is for example triggered upon re-org
if (Common.blocksSummariesIndexingEnabled() &&
(extras.medianFeeAmt === null || extras.feePercentiles === null))
{
@@ -985,11 +1038,12 @@ class BlocksRepository {
if (extras.feePercentiles === null) {
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
const summary = blocks.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(dbBlk.height, dbBlk.id, summary.transactions);
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
}
if (extras.feePercentiles !== null) {
extras.medianFeeAmt = extras.feePercentiles[3];
await this.$updateFeeAmounts(dbBlk.id, extras.feePercentiles, extras.medianFeeAmt);
}
}

View File

@@ -1,6 +1,6 @@
import DB from '../database';
import logger from '../logger';
import { BlockSummary } from '../mempool.interfaces';
import { BlockSummary, TransactionStripped } from '../mempool.interfaces';
class BlocksSummariesRepository {
public async $getByBlockId(id: string): Promise<BlockSummary | undefined> {
@@ -17,23 +17,17 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id;
public async $saveTransactions(blockHeight: number, blockId: string, transactions: TransactionStripped[]): Promise<void> {
try {
const transactions = JSON.stringify(params.mined?.transactions || []);
const transactionsStr = JSON.stringify(transactions);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
transactions = ?
`, [params.height, blockId, transactions, '[]', transactions]);
INSERT INTO blocks_summaries
SET height = ?, transactions = ?, id = ?
ON DUPLICATE KEY UPDATE transactions = ?`,
[blockHeight, transactionsStr, blockId, transactionsStr]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block summary for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
logger.debug(`Cannot save block summary transactions for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
@@ -42,21 +36,35 @@ class BlocksSummariesRepository {
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
INSERT INTO blocks_templates (id, template)
VALUE (?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
`, [blockId, transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
logger.warn(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
}
}
}
public async $getTemplate(id: string): Promise<BlockSummary | undefined> {
try {
const [templates]: any[] = await DB.query(`SELECT * from blocks_templates WHERE id = ?`, [id]);
if (templates.length > 0) {
return {
id: templates[0].id,
transactions: JSON.parse(templates[0].template),
};
}
} catch (e) {
logger.err(`Cannot get block template for block id ${id}. Reason: ` + (e instanceof Error ? e.message : e));
}
return undefined;
}
public async $getIndexedSummariesId(): Promise<string[]> {
try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
@@ -68,19 +76,6 @@ class BlocksSummariesRepository {
return [];
}
/**
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
try {
await DB.query(`DELETE FROM blocks_summaries where height >= ${blockHeight}`);
} catch (e) {
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Get the fee percentiles if the block has already been indexed, [] otherwise
*

View File

@@ -1,62 +1,19 @@
import cluster, { Cluster } from 'cluster';
import { RowDataPacket } from 'mysql2';
import DB from '../database';
import logger from '../logger';
import { Ancestor } from '../mempool.interfaces';
import { Ancestor, CpfpCluster } from '../mempool.interfaces';
import transactionRepository from '../repositories/TransactionRepository';
class CpfpRepository {
public async $saveCluster(clusterRoot: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<boolean> {
if (!txs[0]) {
return false;
}
// skip clusters of transactions with the same fees
const roundedEffectiveFee = Math.round(effectiveFeePerVsize * 100) / 100;
const equalFee = txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (equalFee) {
return false;
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[]): Promise<boolean> {
try {
const packedTxs = Buffer.from(this.pack(txs));
await DB.query(
`
INSERT INTO compact_cpfp_clusters(root, height, txs, fee_rate)
VALUE (UNHEX(?), ?, ?, ?)
ON DUPLICATE KEY UPDATE
height = ?,
txs = ?,
fee_rate = ?
`,
[clusterRoot, height, packedTxs, effectiveFeePerVsize, height, packedTxs, effectiveFeePerVsize]
);
const maxChunk = 10;
let chunkIndex = 0;
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk).map(tx => {
return { txid: tx.txid, cluster: clusterRoot };
});
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: any, effectiveFeePerVsize: number}[]): Promise<boolean> {
try {
const clusterValues: any[] = [];
const txs: any[] = [];
const clusterValues: [string, number, Buffer, number][] = [];
const txs: { txid: string, cluster: string }[] = [];
for (const cluster of clusters) {
if (cluster.txs?.length > 1) {
if (cluster.txs?.length) {
const roundedEffectiveFee = Math.round(cluster.effectiveFeePerVsize * 100) / 100;
const equalFee = cluster.txs.reduce((acc, tx) => {
const equalFee = cluster.txs.length > 1 && cluster.txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (!equalFee) {
@@ -77,16 +34,10 @@ class CpfpRepository {
return false;
}
const queries: { query, params }[] = [];
const maxChunk = 100;
let chunkIndex = 0;
// insert transactions in batches of up to 100 rows
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
chunkIndex = 0;
// insert clusters in batches of up to 100 rows
while (chunkIndex < clusterValues.length) {
const chunk = clusterValues.slice(chunkIndex, chunkIndex + maxChunk);
@@ -98,12 +49,23 @@ class CpfpRepository {
return (' (UNHEX(?), ?, ?, ?)');
}) + ';';
const values = chunk.flat();
await DB.query(
queries.push({
query,
values
);
params: values,
});
chunkIndex += maxChunk;
}
chunkIndex = 0;
// insert transactions in batches of up to 100 rows
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
queries.push(transactionRepository.buildBatchSetQuery(chunk));
chunkIndex += maxChunk;
}
await DB.$atomicQuery(queries);
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp clusters into db. Reason: ` + (e instanceof Error ? e.message : e));
@@ -111,7 +73,7 @@ class CpfpRepository {
}
}
public async $getCluster(clusterRoot: string): Promise<Cluster | void> {
public async $getCluster(clusterRoot: string): Promise<CpfpCluster | void> {
const [clusterRows]: any = await DB.query(
`
SELECT *
@@ -122,6 +84,7 @@ class CpfpRepository {
);
const cluster = clusterRows[0];
if (cluster?.txs) {
cluster.effectiveFeePerVsize = cluster.fee_rate;
cluster.txs = this.unpack(cluster.txs);
return cluster;
}

View File

@@ -220,7 +220,7 @@ class HashratesRepository {
* Delete hashrates from the database from timestamp
*/
public async $deleteHashratesFromTimestamp(timestamp: number) {
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`);
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp >= FROM_UNIXTIME(?)`, [timestamp]);

View File

@@ -39,7 +39,8 @@ class PoolsRepository {
pools.name AS name,
pools.link AS link,
slug,
AVG(blocks_audits.match_rate) AS avgMatchRate
AVG(blocks_audits.match_rate) AS avgMatchRate,
AVG((CAST(blocks.fees as SIGNED) - CAST(blocks_audits.expected_fees as SIGNED)) / NULLIF(CAST(blocks_audits.expected_fees as SIGNED), 0)) AS avgFeeDelta
FROM blocks
JOIN pools on pools.id = pool_id
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height
@@ -98,7 +99,7 @@ class PoolsRepository {
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
if (['testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools-v2.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
@@ -130,7 +131,7 @@ class PoolsRepository {
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
if (['testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);

View File

@@ -160,7 +160,7 @@ class PricesRepository {
// Compute fiat exchange rates
let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
if (!latestPrice || latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}

View File

@@ -25,9 +25,8 @@ class TransactionRepository {
}
}
public async $batchSetCluster(txs): Promise<void> {
try {
let query = `
public buildBatchSetQuery(txs: { txid: string, cluster: string }[]): { query, params } {
let query = `
INSERT IGNORE INTO compact_transactions
(
txid,
@@ -35,13 +34,22 @@ class TransactionRepository {
)
VALUES
`;
query += txs.map(tx => {
return (' (UNHEX(?), UNHEX(?))');
}) + ';';
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
query += txs.map(tx => {
return (' (UNHEX(?), UNHEX(?))');
}) + ';';
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
return {
query,
params: values,
};
}
public async $batchSetCluster(txs): Promise<void> {
try {
const query = this.buildBatchSetQuery(txs);
await DB.query(
query,
values
query.query,
query.params,
);
} catch (e: any) {
logger.err(`Cannot save cpfp transactions into db. Reason: ` + (e instanceof Error ? e.message : e));
@@ -105,6 +113,7 @@ class TransactionRepository {
return {
descendants,
ancestors,
effectiveFeePerVsize: cluster.effectiveFeePerVsize,
};
}
}

View File

@@ -27,7 +27,7 @@ class ForensicsService {
private async $runTasks(): Promise<void> {
try {
logger.info(`Running forensics scans`);
logger.debug(`Running forensics scans`);
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics(false);
@@ -73,7 +73,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
logger.debug(`Started running closed channel forensics...`);
let channels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
@@ -152,11 +152,11 @@ class ForensicsService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
logger.debug(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
logger.debug(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
@@ -217,7 +217,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running open channel forensics...`);
logger.debug(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
for (const openChannel of channels) {
@@ -257,7 +257,7 @@ class ForensicsService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating opened channel forensics ${progress}/${channels?.length}`);
logger.debug(`Updating opened channel forensics ${progress}/${channels?.length}`);
this.loggerTimer = new Date().getTime() / 1000;
this.truncateTempCache();
}
@@ -266,7 +266,7 @@ class ForensicsService {
}
}
logger.info(`Open channels forensics scan complete.`);
logger.debug(`Open channels forensics scan complete.`);
} catch (e) {
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
} finally {

View File

@@ -3,7 +3,6 @@ import logger from '../../logger';
import channelsApi from '../../api/explorer/channels.api';
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
import config from '../../config';
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
import { $lookupNodeLocation } from './sync-tasks/node-locations';
import lightningApi from '../../api/lightning/lightning-api-factory';
@@ -269,7 +268,11 @@ class NetworkSyncService {
}
private async $scanForClosedChannels(): Promise<void> {
if (this.closedChannelsScanBlock === blocks.getCurrentBlockHeight()) {
let currentBlockHeight = blocks.getCurrentBlockHeight();
if (config.MEMPOOL.ENABLED === false) { // https://github.com/mempool/mempool/issues/3582
currentBlockHeight = await bitcoinApi.$getBlockHeightTip();
}
if (this.closedChannelsScanBlock === currentBlockHeight) {
logger.debug(`We've already scan closed channels for this block, skipping.`);
return;
}
@@ -283,7 +286,7 @@ class NetworkSyncService {
} else {
log += ` for the first time`;
}
logger.info(`${log}`, logger.tags.ln);
logger.debug(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {
@@ -300,12 +303,12 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
logger.debug(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
this.closedChannelsScanBlock = currentBlockHeight;
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
} catch (e) {
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);

View File

@@ -15,16 +15,20 @@ class LightningStatsImporter {
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
async $run(): Promise<void> {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
try {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
} catch (e) {
logger.err(`Exception in LightningStatsImporter::$run(). ${e}`);
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
}
/**

View File

@@ -17,7 +17,7 @@ class PoolsUpdater {
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
public async updatePoolsJson(): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false ||
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) === false ||
config.MEMPOOL.ENABLED === false
) {
return;
@@ -62,7 +62,7 @@ class PoolsUpdater {
if (this.currentSha === null) {
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
} else {
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
logger.warn(`pools-v2.json is outdated, fetching latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
@@ -71,7 +71,7 @@ class PoolsUpdater {
poolsParser.setMiningPools(poolsJson);
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools-v2.json import completed (no database)');
logger.info(`Mining pools-v2.json (${githubSha}) import completed (no database)`);
return;
}
@@ -84,7 +84,7 @@ class PoolsUpdater {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;');
}
logger.info('PoolsUpdater completed');
logger.info(`Mining pools-v2.json (${githubSha}) import completed`);
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week

View File

@@ -73,7 +73,7 @@ class PriceUpdater {
}
public async $run(): Promise<void> {
if (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet') {
if (['testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
// Coins have no value on testnet/signet, so we want to always show 0
return;
}
@@ -153,6 +153,7 @@ class PriceUpdater {
try {
const p = 60 * 60 * 1000; // milliseconds in an hour
const nowRounded = new Date(Math.round(new Date().getTime() / p) * p); // https://stackoverflow.com/a/28037042
this.latestPrices.time = nowRounded.getTime() / 1000;
await PricesRepository.$savePrices(nowRounded.getTime() / 1000, this.latestPrices);
} catch (e) {
this.lastRun = previousRun + 5 * 60;
@@ -222,7 +223,7 @@ class PriceUpdater {
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
logger.debug(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
const historicalPrices: PriceHistory[] = [];

View File

@@ -26,4 +26,70 @@ export function formatBytes(bytes: number, toUnit: string, skipUnit = false): st
}
return `${bytes.toFixed(2)}${skipUnit ? '' : ' ' + byteUnits[unitIndex]}`;
}
// https://stackoverflow.com/a/64235212
export function hex2bin(hex: string): string {
if (!hex) {
return '';
}
hex = hex.replace('0x', '').toLowerCase();
let out = '';
for (const c of hex) {
switch (c) {
case '0': out += '0000'; break;
case '1': out += '0001'; break;
case '2': out += '0010'; break;
case '3': out += '0011'; break;
case '4': out += '0100'; break;
case '5': out += '0101'; break;
case '6': out += '0110'; break;
case '7': out += '0111'; break;
case '8': out += '1000'; break;
case '9': out += '1001'; break;
case 'a': out += '1010'; break;
case 'b': out += '1011'; break;
case 'c': out += '1100'; break;
case 'd': out += '1101'; break;
case 'e': out += '1110'; break;
case 'f': out += '1111'; break;
default: return '';
}
}
return out;
}
export function bin2hex(bin: string): string {
if (!bin) {
return '';
}
let out = '';
for (let i = 0; i < bin.length; i += 4) {
const c = bin.substring(i, i + 4);
switch (c) {
case '0000': out += '0'; break;
case '0001': out += '1'; break;
case '0010': out += '2'; break;
case '0011': out += '3'; break;
case '0100': out += '4'; break;
case '0101': out += '5'; break;
case '0110': out += '6'; break;
case '0111': out += '7'; break;
case '1000': out += '8'; break;
case '1001': out += '9'; break;
case '1010': out += 'a'; break;
case '1011': out += 'b'; break;
case '1100': out += 'c'; break;
case '1101': out += 'd'; break;
case '1110': out += 'e'; break;
case '1111': out += 'f'; break;
default: return '';
}
}
return out;
}

View File

@@ -5,6 +5,7 @@
"types": ["node", "jest"],
"lib": ["es2019", "dom"],
"strict": true,
"skipLibCheck": true,
"noImplicitAny": false,
"sourceMap": false,
"outDir": "dist",

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 24, 2023.
Signed: 0xflicker

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 28, 2023.
Signed: bennyhodl

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 21, 2023.
Signed: devinbileck

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: joostjager

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 7, 203.
Signed: learntheropes

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 8, 2023.
Signed: nothing0012

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 20, 2023.
Signed: pedromvpg

3
contributors/pfoytik.txt Normal file
View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 15, 2023.
Signed pfoytik

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 14, 2023.
Signed: secondl1ght

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: vostrnad

View File

@@ -144,8 +144,8 @@ Corresponding `docker-compose.yml` overrides:
MEMPOOL_ADVANCED_GBT_AUDIT: ""
MEMPOOL_ADVANCED_GBT_MEMPOOL: ""
MEMPOOL_CPFP_INDEXING: ""
MAX_BLOCKS_BULK_QUERY: ""
DISK_CACHE_BLOCK_INTERVAL: ""
MEMPOOL_MAX_BLOCKS_BULK_QUERY: ""
MEMPOOL_DISK_CACHE_BLOCK_INTERVAL: ""
...
```
@@ -204,7 +204,9 @@ Corresponding `docker-compose.yml` overrides:
`mempool-config.json`:
```json
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-socket",
"RETRY_UNIX_SOCKET_AFTER": 30000
},
```
@@ -213,6 +215,8 @@ Corresponding `docker-compose.yml` overrides:
api:
environment:
ESPLORA_REST_API_URL: ""
ESPLORA_UNIX_SOCKET_PATH: ""
ESPLORA_RETRY_UNIX_SOCKET_AFTER: ""
...
```
@@ -265,6 +269,7 @@ Corresponding `docker-compose.yml` overrides:
DATABASE_DATABASE: ""
DATABASE_USERNAME: ""
DATABASE_PASSWORD: ""
DATABASE_TIMEOUT: ""
...
```

View File

@@ -7,7 +7,13 @@ WORKDIR /build
COPY . .
RUN apt-get update
RUN apt-get install -y build-essential python3 pkg-config
RUN apt-get install -y build-essential python3 pkg-config curl ca-certificates
# Install Rust via rustup
RUN CPU_ARCH=$(uname -m); if [ "$CPU_ARCH" = "armv7l" ]; then c_rehash; fi
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
ENV PATH="/root/.cargo/bin:$PATH"
RUN npm install --omit=dev --omit=optional
RUN npm run package

View File

@@ -25,9 +25,14 @@
"AUDIT": __MEMPOOL_AUDIT__,
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
"RUST_GBT": __MEMPOOL_RUST_GBT__,
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__,
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__,
"MAX_PUSH_TX_SIZE_WEIGHT": __MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__,
"ALLOW_UNREACHABLE": __MEMPOOL_ALLOW_UNREACHABLE__,
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
@@ -42,7 +47,9 @@
"TLS_ENABLED": __ELECTRUM_TLS_ENABLED__
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": __ESPLORA_RETRY_UNIX_SOCKET_AFTER__
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
@@ -58,7 +65,8 @@
"PORT": __DATABASE_PORT__,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": __DATABASE_TIMEOUT__
},
"SYSLOG": {
"ENABLED": __SYSLOG_ENABLED__,
@@ -81,13 +89,15 @@
"STATS_REFRESH_INTERVAL": __LIGHTNING_STATS_REFRESH_INTERVAL__,
"GRAPH_REFRESH_INTERVAL": __LIGHTNING_GRAPH_REFRESH_INTERVAL__,
"LOGGER_UPDATE_INTERVAL": __LIGHTNING_LOGGER_UPDATE_INTERVAL__,
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__"
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"FORENSICS_INTERVAL": __LIGHTNING_FORENSICS_INTERVAL__,
"FORENSICS_RATE_LIMIT": __LIGHTNING_FORENSICS_RATE_LIMIT__
},
"LND": {
"TLS_CERT_PATH": "__LND_TLS_CERT_PATH__",
"MACAROON_PATH": "__LND_MACAROON_PATH__",
"REST_API_URL": "__LND_REST_API_URL__",
"TIMEOUT": "__LND_TIMEOUT__"
"TIMEOUT": __LND_TIMEOUT__
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
@@ -117,5 +127,11 @@
"GEOLITE2_CITY": "__MAXMIND_GEOLITE2_CITY__",
"GEOLITE2_ASN": "__MAXMIND_GEOLITE2_ASN__",
"GEOIP2_ISP": "__MAXMIND_GEOIP2_ISP__"
},
"REPLICATION": {
"ENABLED": __REPLICATION_ENABLED__,
"AUDIT": __REPLICATION_AUDIT__,
"AUDIT_START_HEIGHT": __REPLICATION_AUDIT_START_HEIGHT__,
"SERVERS": __REPLICATION_SERVERS__
}
}

View File

@@ -28,9 +28,13 @@ __MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.githu
__MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
__MEMPOOL_RUST_GBT__=${MEMPOOL_RUST_GBT:=false}
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__=${MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT:=4000000}
__MEMPOOL_ALLOW_UNREACHABLE__=${MEMPOOL_ALLOW_UNREACHABLE:=true}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@@ -46,6 +50,8 @@ __ELECTRUM_TLS_ENABLED__=${ELECTRUM_TLS_ENABLED:=false}
# ESPLORA
__ESPLORA_REST_API_URL__=${ESPLORA_REST_API_URL:=http://127.0.0.1:3000}
__ESPLORA_UNIX_SOCKET_PATH__=${ESPLORA_UNIX_SOCKET_PATH:="null"}
__ESPLORA_RETRY_UNIX_SOCKET_AFTER__=${ESPLORA_RETRY_UNIX_SOCKET_AFTER:=30000}
# SECOND_CORE_RPC
__SECOND_CORE_RPC_HOST__=${SECOND_CORE_RPC_HOST:=127.0.0.1}
@@ -62,6 +68,7 @@ __DATABASE_PORT__=${DATABASE_PORT:=3306}
__DATABASE_DATABASE__=${DATABASE_DATABASE:=mempool}
__DATABASE_USERNAME__=${DATABASE_USERNAME:=mempool}
__DATABASE_PASSWORD__=${DATABASE_PASSWORD:=mempool}
__DATABASE_TIMEOUT__=${DATABASE_TIMEOUT:=180000}
# SYSLOG
__SYSLOG_ENABLED__=${SYSLOG_ENABLED:=false}
@@ -105,6 +112,8 @@ __LIGHTNING_TOPOLOGY_FOLDER__=${LIGHTNING_TOPOLOGY_FOLDER:=""}
__LIGHTNING_STATS_REFRESH_INTERVAL__=${LIGHTNING_STATS_REFRESH_INTERVAL:=600}
__LIGHTNING_GRAPH_REFRESH_INTERVAL__=${LIGHTNING_GRAPH_REFRESH_INTERVAL:=600}
__LIGHTNING_LOGGER_UPDATE_INTERVAL__=${LIGHTNING_LOGGER_UPDATE_INTERVAL:=30}
__LIGHTNING_FORENSICS_INTERVAL__=${LIGHTNING_FORENSICS_INTERVAL:=43200}
__LIGHTNING_FORENSICS_RATE_LIMIT__=${LIGHTNING_FORENSICS_RATE_LIMIT:=20}
# LND
__LND_TLS_CERT_PATH__=${LND_TLS_CERT_PATH:=""}
@@ -121,85 +130,96 @@ __MAXMIND_GEOLITE2_CITY__=${MAXMIND_GEOLITE2_CITY:="/backend/GeoIP/GeoLite2-City
__MAXMIND_GEOLITE2_ASN__=${MAXMIND_GEOLITE2_ASN:="/backend/GeoIP/GeoLite2-ASN.mmdb"}
__MAXMIND_GEOIP2_ISP__=${MAXMIND_GEOIP2_ISP:=""}
# REPLICATION
__REPLICATION_ENABLED__=${REPLICATION_ENABLED:=true}
__REPLICATION_AUDIT__=${REPLICATION_AUDIT:=true}
__REPLICATION_AUDIT_START_HEIGHT__=${REPLICATION_AUDIT_START_HEIGHT:=774000}
__REPLICATION_SERVERS__=${REPLICATION_SERVERS:=[]}
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json
sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json
sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_NETWORK__!${__MEMPOOL_NETWORK__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BACKEND__!${__MEMPOOL_BACKEND__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ENABLED__!${__MEMPOOL_ENABLED__}!g" mempool-config.json
sed -i "s!__MEMPOOL_HTTP_PORT__!${__MEMPOOL_HTTP_PORT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_SPAWN_CLUSTER_PROCS__!${__MEMPOOL_SPAWN_CLUSTER_PROCS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
sed -i "s/__MEMPOOL_POLL_RATE_MS__/${__MEMPOOL_POLL_RATE_MS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_POLL_RATE_MS__!${__MEMPOOL_POLL_RATE_MS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CACHE_DIR__!${__MEMPOOL_CACHE_DIR__}!g" mempool-config.json
sed -i "s/__MEMPOOL_CLEAR_PROTECTION_MINUTES__/${__MEMPOOL_CLEAR_PROTECTION_MINUTES__}/g" mempool-config.json
sed -i "s/__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__/${__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BLOCK_WEIGHT_UNITS__/${__MEMPOOL_BLOCK_WEIGHT_UNITS__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INITIAL_BLOCKS_AMOUNT__/${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__/${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__/${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}/g" mempool-config.json
sed -i "s/__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__/${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}/g" mempool-config.json
sed -i "s!__MEMPOOL_CLEAR_PROTECTION_MINUTES__!${__MEMPOOL_CLEAR_PROTECTION_MINUTES__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__!${__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BLOCK_WEIGHT_UNITS__!${__MEMPOOL_BLOCK_WEIGHT_UNITS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_INITIAL_BLOCKS_AMOUNT__!${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__!${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_INDEXING_BLOCKS_AMOUNT__!${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__!${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__!${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_ASSETS__!${__MEMPOOL_EXTERNAL_ASSETS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
sed -i "s!__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__!${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__!${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_GBT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__!${__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__!${__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ALLOW_UNREACHABLE__!${__MEMPOOL_ALLOW_UNREACHABLE__}!g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
sed -i "s/__CORE_RPC_USERNAME__/${__CORE_RPC_USERNAME__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PASSWORD__/${__CORE_RPC_PASSWORD__}/g" mempool-config.json
sed -i "s/__CORE_RPC_TIMEOUT__/${__CORE_RPC_TIMEOUT__}/g" mempool-config.json
sed -i "s!__CORE_RPC_HOST__!${__CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PORT__!${__CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__CORE_RPC_USERNAME__!${__CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PASSWORD__!${__CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__CORE_RPC_TIMEOUT__!${__CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s/__ELECTRUM_HOST__/${__ELECTRUM_HOST__}/g" mempool-config.json
sed -i "s/__ELECTRUM_PORT__/${__ELECTRUM_PORT__}/g" mempool-config.json
sed -i "s/__ELECTRUM_TLS_ENABLED__/${__ELECTRUM_TLS_ENABLED__}/g" mempool-config.json
sed -i "s!__ELECTRUM_HOST__!${__ELECTRUM_HOST__}!g" mempool-config.json
sed -i "s!__ELECTRUM_PORT__!${__ELECTRUM_PORT__}!g" mempool-config.json
sed -i "s!__ELECTRUM_TLS_ENABLED__!${__ELECTRUM_TLS_ENABLED__}!g" mempool-config.json
sed -i "s!__ESPLORA_REST_API_URL__!${__ESPLORA_REST_API_URL__}!g" mempool-config.json
sed -i "s!__ESPLORA_UNIX_SOCKET_PATH__!${__ESPLORA_UNIX_SOCKET_PATH__}!g" mempool-config.json
sed -i "s!__ESPLORA_RETRY_UNIX_SOCKET_AFTER__!${__ESPLORA_RETRY_UNIX_SOCKET_AFTER__}!g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_HOST__/${__SECOND_CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_PORT__/${__SECOND_CORE_RPC_PORT__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_USERNAME__/${__SECOND_CORE_RPC_USERNAME__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_PASSWORD__/${__SECOND_CORE_RPC_PASSWORD__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_TIMEOUT__/${__SECOND_CORE_RPC_TIMEOUT__}/g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_HOST__!${__SECOND_CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PORT__!${__SECOND_CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_USERNAME__!${__SECOND_CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PASSWORD__!${__SECOND_CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_TIMEOUT__!${__SECOND_CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s/__DATABASE_ENABLED__/${__DATABASE_ENABLED__}/g" mempool-config.json
sed -i "s/__DATABASE_HOST__/${__DATABASE_HOST__}/g" mempool-config.json
sed -i "s!__DATABASE_ENABLED__!${__DATABASE_ENABLED__}!g" mempool-config.json
sed -i "s!__DATABASE_HOST__!${__DATABASE_HOST__}!g" mempool-config.json
sed -i "s!__DATABASE_SOCKET__!${__DATABASE_SOCKET__}!g" mempool-config.json
sed -i "s!__DATABASE_PORT__!${__DATABASE_PORT__}!g" mempool-config.json
sed -i "s!__DATABASE_DATABASE__!${__DATABASE_DATABASE__}!g" mempool-config.json
sed -i "s!__DATABASE_USERNAME__!${__DATABASE_USERNAME__}!g" mempool-config.json
sed -i "s!__DATABASE_PASSWORD__!${__DATABASE_PASSWORD__}!g" mempool-config.json
sed -i "s!__DATABASE_TIMEOUT__!${__DATABASE_TIMEOUT__}!g" mempool-config.json
sed -i "s/__DATABASE_PORT__/${__DATABASE_PORT__}/g" mempool-config.json
sed -i "s/__DATABASE_DATABASE__/${__DATABASE_DATABASE__}/g" mempool-config.json
sed -i "s/__DATABASE_USERNAME__/${__DATABASE_USERNAME__}/g" mempool-config.json
sed -i "s/__DATABASE_PASSWORD__/${__DATABASE_PASSWORD__}/g" mempool-config.json
sed -i "s!__SYSLOG_ENABLED__!${__SYSLOG_ENABLED__}!g" mempool-config.json
sed -i "s!__SYSLOG_HOST__!${__SYSLOG_HOST__}!g" mempool-config.json
sed -i "s!__SYSLOG_PORT__!${__SYSLOG_PORT__}!g" mempool-config.json
sed -i "s!__SYSLOG_MIN_PRIORITY__!${__SYSLOG_MIN_PRIORITY__}!g" mempool-config.json
sed -i "s!__SYSLOG_FACILITY__!${__SYSLOG_FACILITY__}!g" mempool-config.json
sed -i "s/__SYSLOG_ENABLED__/${__SYSLOG_ENABLED__}/g" mempool-config.json
sed -i "s/__SYSLOG_HOST__/${__SYSLOG_HOST__}/g" mempool-config.json
sed -i "s/__SYSLOG_PORT__/${__SYSLOG_PORT__}/g" mempool-config.json
sed -i "s/__SYSLOG_MIN_PRIORITY__/${__SYSLOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__SYSLOG_FACILITY__/${__SYSLOG_FACILITY__}/g" mempool-config.json
sed -i "s!__STATISTICS_ENABLED__!${__STATISTICS_ENABLED__}!g" mempool-config.json
sed -i "s!__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__!${__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__}!g" mempool-config.json
sed -i "s/__STATISTICS_ENABLED__/${__STATISTICS_ENABLED__}/g" mempool-config.json
sed -i "s/__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__/${__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__}/g" mempool-config.json
sed -i "s/__BISQ_ENABLED__/${__BISQ_ENABLED__}/g" mempool-config.json
sed -i "s!__BISQ_ENABLED__!${__BISQ_ENABLED__}!g" mempool-config.json
sed -i "s!__BISQ_DATA_PATH__!${__BISQ_DATA_PATH__}!g" mempool-config.json
sed -i "s/__SOCKS5PROXY_ENABLED__/${__SOCKS5PROXY_ENABLED__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_USE_ONION__/${__SOCKS5PROXY_USE_ONION__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_HOST__/${__SOCKS5PROXY_HOST__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_PORT__/${__SOCKS5PROXY_PORT__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_USERNAME__/${__SOCKS5PROXY_USERNAME__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_PASSWORD__/${__SOCKS5PROXY_PASSWORD__}/g" mempool-config.json
sed -i "s!__SOCKS5PROXY_ENABLED__!${__SOCKS5PROXY_ENABLED__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_USE_ONION__!${__SOCKS5PROXY_USE_ONION__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_HOST__!${__SOCKS5PROXY_HOST__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_PORT__!${__SOCKS5PROXY_PORT__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_USERNAME__!${__SOCKS5PROXY_USERNAME__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_PASSWORD__!${__SOCKS5PROXY_PASSWORD__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_TOR_URL__!${__PRICE_DATA_SERVER_TOR_URL__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_CLEARNET_URL__!${__PRICE_DATA_SERVER_CLEARNET_URL__}!g" mempool-config.json
@@ -218,6 +238,8 @@ sed -i "s!__LIGHTNING_TOPOLOGY_FOLDER__!${__LIGHTNING_TOPOLOGY_FOLDER__}!g" memp
sed -i "s!__LIGHTNING_STATS_REFRESH_INTERVAL__!${__LIGHTNING_STATS_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_GRAPH_REFRESH_INTERVAL__!${__LIGHTNING_GRAPH_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_LOGGER_UPDATE_INTERVAL__!${__LIGHTNING_LOGGER_UPDATE_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_FORENSICS_INTERVAL__!${__LIGHTNING_FORENSICS_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_FORENSICS_RATE_LIMIT__!${__LIGHTNING_FORENSICS_RATE_LIMIT__}!g" mempool-config.json
# LND
sed -i "s!__LND_TLS_CERT_PATH__!${__LND_TLS_CERT_PATH__}!g" mempool-config.json
@@ -234,5 +256,10 @@ sed -i "s!__MAXMIND_GEOLITE2_CITY__!${__MAXMIND_GEOLITE2_CITY__}!g" mempool-conf
sed -i "s!__MAXMIND_GEOLITE2_ASN__!${__MAXMIND_GEOLITE2_ASN__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOIP2_ISP__!${__MAXMIND_GEOIP2_ISP__}!g" mempool-config.json
# REPLICATION
sed -i "s!__REPLICATION_ENABLED__!${__REPLICATION_ENABLED__}!g" mempool-config.json
sed -i "s!__REPLICATION_AUDIT__!${__REPLICATION_AUDIT__}!g" mempool-config.json
sed -i "s!__REPLICATION_AUDIT_START_HEIGHT__!${__REPLICATION_AUDIT_START_HEIGHT__}!g" mempool-config.json
sed -i "s!__REPLICATION_SERVERS__!${__REPLICATION_SERVERS__}!g" mempool-config.json
node /backend/package/index.js

View File

@@ -10,10 +10,15 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf
sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf
cat /patch/nginx.conf > /etc/nginx/nginx.conf
if [ "${LIGHTNING_DETECTED_PORT}" != "" ];then
export LIGHTNING=true
fi
# Runtime overrides - read env vars defined in docker compose
__TESTNET_ENABLED__=${TESTNET_ENABLED:=false}
__SIGNET_ENABLED__=${SIGNET_ENABLED:=false}
__REGTEST_ENABLED__=${REGTEST_ENABLED:=false}
__LIQUID_ENABLED__=${LIQUID_EANBLED:=false}
__LIQUID_TESTNET_ENABLED__=${LIQUID_TESTNET_ENABLED:=false}
__BISQ_ENABLED__=${BISQ_ENABLED:=false}
@@ -40,6 +45,7 @@ __HISTORICAL_PRICE__=${HISTORICAL_PRICE:=true}
# Export as environment variables to be used by envsubst
export __TESTNET_ENABLED__
export __SIGNET_ENABLED__
export __REGTEST_ENABLED__
export __LIQUID_ENABLED__
export __LIQUID_TESTNET_ENABLED__
export __BISQ_ENABLED__

Some files were not shown because too many files have changed in this diff Show More