Compare commits

..

65 Commits

Author SHA1 Message Date
wiz
139c621a90 Merge remote-tracking branch 'mempooljs/main' into wiz/subtree-merge-mempool.js-repo 2022-01-29 08:02:10 +00:00
softsimon
3f0ab7307a Bumping version to 2.3.0 2022-01-10 15:53:10 +04:00
softsimon
7e8d5547fd Bumping to 2.3.0-dev1 2021-12-22 19:31:25 +04:00
softsimon
029cd8ad48 Bumping package version to v2.2.6 2021-12-20 23:36:19 +04:00
softsimon
f2a8ac7087 Updating endpoint addresses for Liquid and Bisq backend 2021-12-20 23:34:52 +04:00
softsimon
721ed60ba5 Fixing type for image response 2021-12-20 23:34:36 +04:00
softsimon
4893ff1d81 Adding support for Asset Icon API endpoints 2021-12-20 23:25:44 +04:00
softsimon
d238b1a779 Correcting "blocks" typo 2021-12-20 23:14:11 +04:00
softsimon
a9b2f31ae5 Merge pull request #32 from mikeriss/patch-2
Typo fixed
2021-12-20 23:13:32 +04:00
Dmitry Martynenko
ce56c1b03b websocket testnet url fix 2021-12-20 23:08:13 +04:00
softsimon
b5a8a5dc24 Merge pull request #35 from mempool/simon/version-bump-2.2.4
Bumping dependency versions and package version to 2.2.4
2021-12-20 22:26:57 +04:00
softsimon
06f23c5f37 Bumping dependency versions and package version to 2.2.4 2021-12-20 22:25:12 +04:00
softsimon
870bae5180 Merge pull request #30 from Draichi/patch-1
Update README-bitcoin.md
2021-12-08 21:36:58 +04:00
mikeriss
a614ad4119 Typo fixed 2021-11-02 17:15:01 +01:00
mikeriss
3a42e20cf1 Typo (#31)
changed res.blocks to res.block
2021-10-25 09:21:01 -03:00
Lucas
905ef5ed19 Update README-bitcoin.md
Fix 404 on page links
2021-09-06 13:36:58 -03:00
Miguel Medeiros
9fe77e49a2 Change liquid address doc example. 2021-08-10 13:09:38 -03:00
Miguel Medeiros
62902ba8c7 Add liquid.js documentation. 2021-08-10 13:03:23 -03:00
Miguel Medeiros
1873eb41c6 Change version to 2.2.1 for bisq / liquid modules. 2021-08-10 12:39:11 -03:00
Miguel Medeiros
efd0436851 Add bisq.js readme. 2021-08-10 12:38:33 -03:00
Miguel Medeiros
edab1ad3d5 Fix post tx documentation. 2021-08-10 01:44:50 -03:00
Miguel Medeiros
4efc927303 Add bisqJS and liquidJS npm modules. 2021-08-10 01:30:13 -03:00
Miguel Medeiros
acc0c80953 2.2.4 2021-08-04 08:11:27 -03:00
Miguel Medeiros
334ed2b173 Fix difficulty adjustment endpoint. 2021-08-04 08:09:29 -03:00
Miguel Medeiros
24f0ab8f84 2.2.3 2021-07-23 18:08:43 -03:00
Miguel Medeiros
c1f41d6e1c Add difficulty adjustment documentation link. 2021-07-23 18:02:14 -03:00
Miguel Medeiros
aed4bc5fc9 Add difficulty adjustment examples. 2021-07-23 17:58:54 -03:00
Miguel Medeiros
caf8d956b5 Add difficulty adjustment endpoints methods. 2021-07-23 17:37:28 -03:00
Miguel Medeiros
1c32b05abe Add localhost support. 2021-07-23 17:36:40 -03:00
Miguel Medeiros
7dec92a1bf Change documentation link to official mempool.space. 2021-07-22 02:22:03 -03:00
Miguel Medeiros
4cd7665e8c Add blockHeader instructions and examples. 2021-07-22 02:18:15 -03:00
Miguel Medeiros
47fa100786 Add getBlockHeader method. 2021-07-22 02:14:11 -03:00
Miguel Medeiros
f244ad191e Change command build. 2021-07-22 00:55:29 -03:00
Miguel Medeiros
92bf87821a 2.2.2 2021-05-31 21:03:20 -03:00
Miguel Medeiros
920622137b Fix readme instructions. 2021-05-31 20:36:00 -03:00
Miguel Medeiros
b3606e46c1 Fix getAddress endpoint url. 2021-05-31 20:02:16 -03:00
Miguel Medeiros
112e54ae57 2.2.1 (#22) 2021-05-20 12:25:11 -03:00
Miguel Medeiros
e3068c2d8d Bugfix Websocket hostname. (#21)
* Add yarn-error.log to gitignore.

* Add ts-node to devDependencies.
Add script to only build tsc.
Add websocket to keywords.

* Update yarn.lock libs.

* FIX websocket endpoint hostname.
FIX corrent import for all examples.
FIX websocket instrucions on readme.
2021-05-20 12:03:40 -03:00
Miguel Medeiros
0aa3757217 FIX rename package. 2021-04-17 00:34:47 -03:00
Miguel Medeiros
f26adbd982 FIX rename package name. (#19) (#20) 2021-04-17 00:20:32 -03:00
Miguel Medeiros
b63b5d7d92 2.2.0 2021-04-16 22:51:40 -03:00
Miguel Medeiros
ad41063fdb Merge branch 'main' of https://github.com/mempool/mempool-js 2021-04-15 15:54:04 -03:00
Miguel Medeiros
9c483af487 v2.2.0 (#18)
* FIX: getBlocks optional params

* v2.3.0 - new minor version for mempool-js
- Add support for Bisq API
- Add support for Liquid API
- Change the main object to export network objects.
- Change README.md instructions.

* 2.3.0

* FIX wrong npm link. (#15)

* FIX version name v2.2.0 (#17)

Co-authored-by: softsimon <softsimon@users.noreply.github.com>
2021-04-15 15:33:12 -03:00
Miguel Medeiros
5cea5595f9 2.3.0 2021-04-14 17:30:38 -03:00
Miguel Medeiros
c80f82a0b1 v2.3.0 (#12)
* FIX: getBlocks optional params

* v2.3.0 - new minor version for mempool-js
- Add support for Bisq API
- Add support for Liquid API
- Change the main object to export network objects.
- Change README.md instructions.

Co-authored-by: softsimon <softsimon@users.noreply.github.com>
2021-04-14 17:27:28 -03:00
softsimon
70d31f2062 Merge pull request #9 from mempool/feature/add-cnd-link
Add CDN provider to the mempool.js.
2021-04-10 19:38:07 +04:00
Miguel Medeiros
2dd50add4a - Add mempool.js link to all examples.
- Ignore files in dist folder, adding .gitkeep instead.
- Update readme text with CND instructions.
- Update readme badges.
- Update readme typo.
- Update all the html examples with new CDN link.
2021-04-10 12:30:24 -03:00
Miguel Medeiros
760bf97c1b 2.2.1 2021-04-10 11:53:07 -03:00
softsimon
f7ab448cd1 Merge pull request #8 from mempool/bugfix/build-script-standalone-tinyify
FIX: change build script to enable standalone
2021-04-10 18:45:36 +04:00
Miguel Medeiros
5003538319 FIX: change build script to enable standalone 2021-04-10 11:31:16 -03:00
Miguel Medeiros
f39361263a v2.2.0 - new major version for mempool-js (#2)
* - Refactoring code.
- Refactoring folder structure.
- Adding apiEndpoint and websocketEndpoint to Mempool config.
- Adding brownserify feature.
- Adding MIT LICENSE

* - Changing package.json information.
- Reorganizing README.md information.
- Default export for CommonJs and ES6 Modules.
- Changing default variable to mempoolJS.
- Organizing the API and WS providers.
- Splitting websocket connection types: client and server.

* Change version to 2.2.0.
Reorder keywords in alphabetical order.
2021-04-08 10:15:30 -03:00
MiguelMedeiros\Miguel Medeiros
3bfae54069 1.1.2 2021-02-10 11:26:46 -03:00
MiguelMedeiros\Miguel Medeiros
6efc2aefe4 Change URL website. 2021-02-10 11:26:34 -03:00
MiguelMedeiros\Miguel Medeiros
e837bf7be3 1.1.1 2021-02-10 11:20:13 -03:00
MiguelMedeiros\Miguel Medeiros
8fe9a67352 Changing readme badges. 2021-02-10 11:20:01 -03:00
MiguelMedeiros\Miguel Medeiros
68a11bfae5 1.1.0 2021-02-09 16:23:52 -03:00
MiguelMedeiros\Miguel Medeiros
90be85f6f0 Adding websocket. 2021-02-09 16:22:48 -03:00
MiguelMedeiros\Miguel Medeiros
b39fb0bd60 1.0.2 2021-02-08 17:13:27 -03:00
MiguelMedeiros\Miguel Medeiros
9c5c1b8ff0 Changing badges links. 2021-02-08 17:13:15 -03:00
MiguelMedeiros\Miguel Medeiros
eacf5a584d 1.0.1 2021-02-08 17:11:10 -03:00
MiguelMedeiros\Miguel Medeiros
6946f22aea Changing Donations addresses. 2021-02-08 17:10:19 -03:00
MiguelMedeiros\Miguel Medeiros
caed38420b Changing project name. 2021-02-08 17:03:18 -03:00
MiguelMedeiros\Miguel Medeiros
67ad99006f Changing text Instalation. 2021-02-08 17:00:32 -03:00
MiguelMedeiros\Miguel Medeiros
9a97b411c9 Adding links to README.md. 2021-02-08 16:59:48 -03:00
MiguelMedeiros\Miguel Medeiros
ff53139f78 Init 2021-02-08 16:54:37 -03:00
1073 changed files with 110207 additions and 283031 deletions

1
.github/CODEOWNERS vendored
View File

@@ -1 +0,0 @@
backend/src/api/database-migration.ts @wiz @softsimon

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: ['mempool'] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ['https://mempool.space/sponsor'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,13 +1,14 @@
---
name: 🐛 Bug Report
about: Report bugs (no support requests, please)
about: Report bugs or other issues to us on GitHub
---
<!--
SUPPORT REQUESTS:
This is for reporting bugs in Mempool, not for support requests.
If you have a support request, please reach out on Matrix:
https://matrix.to/#/#mempool.support:bitcoin.kyoto
If you have a support request, please join our Keybase or Matrix:
https://keybase.io/team/mempool
https://matrix.to/#/#mempool:bitcoin.kyoto
-->
### Description

View File

@@ -1,13 +1,14 @@
---
name: ✨ Feature Request
about: Request a feature or suggest other enhancements
about: Request a feature or suggest other enhancements 💡
---
<!--
SUPPORT REQUESTS:
This is for requesting features in Mempool, not for support requests.
If you have a support request, please reach out on Matrix:
https://matrix.to/#/#mempool.support:bitcoin.kyoto
If you have a support request, please join our Keybase or Matrix:
https://keybase.io/team/mempool
https://matrix.to/#/#mempool:bitcoin.kyoto
-->
### Description

View File

@@ -1,8 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: 🙋 Need help? Chat with us on Matrix
url: https://matrix.to/#/#mempool.support:bitcoin.kyoto
- name: 💬 Need help? Chat with us on Matrix
url: https://matrix.to/#/#mempool:bitcoin.kyoto
about: For support requests or general questions
- name: 💬 Need help? Chat with us on Keybase
url: https://keybase.io/team/mempool
about: For support requests or general questions
- name: 🌐 Want to help with translations? Use Transifex
url: https://www.transifex.com/mempool/mempool
about: All translations work is done on Transifex

View File

@@ -1,68 +0,0 @@
version: 2
updates:
- package-ecosystem: npm
versioning-strategy: increase
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
- package-ecosystem: npm
directory: "/frontend"
versioning-strategy: increase
groups:
frontend-angular-dependencies:
patterns:
- "@angular*"
- "@ng-*"
- "ngx-*"
frontend-jest-dependencies:
patterns:
- "@types/jest"
- "jest"
frontend-eslint-dependencies:
patterns:
- "@typescript-eslint*"
- "eslint"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: docker
directory: "/docker/frontend"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types:
["version-update:semver-major", "version-update:semver-patch"]

View File

@@ -1,6 +0,0 @@
<!--
Please do not open pull requests for translations.
All translations work is done on Transifex:
https://www.transifex.com/mempool/mempool
-->

View File

@@ -1,102 +0,0 @@
name: CI Pipeline for the Backend and Frontend
on:
pull_request:
types: [opened, review_requested, synchronize]
jobs:
backend:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16", "17", "18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
name: Backend (${{ matrix.flavor }}) - node ${{ matrix.node }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: ${{ matrix.node }}/${{ matrix.flavor }}
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.63.x Rust toolchain
uses: dtolnay/rust-toolchain@1.63
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Install (Prod dependencies only)
if: ${{ matrix.flavor == 'prod'}}
run: npm ci --omit=dev --omit=optional
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Lint
if: ${{ matrix.flavor == 'dev'}}
run: npm run lint
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Unit Tests
if: ${{ matrix.flavor == 'dev'}}
run: npm run test:ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
frontend:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16", "17", "18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
name: Frontend (${{ matrix.flavor }}) - node ${{ matrix.node }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: ${{ matrix.node }}/${{ matrix.flavor }}
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install (Prod dependencies only)
run: npm ci --omit=dev --omit=optional
if: ${{ matrix.flavor == 'prod'}}
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
- name: Lint
if: ${{ matrix.flavor == 'dev'}}
run: npm run lint
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
# - name: Test
# run: npm run test
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,64 +1,86 @@
name: Cypress Tests
on:
push:
branches: [master]
pull_request:
types: [opened, synchronize]
on: [push, pull_request]
jobs:
cypress:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
runs-on: "ubuntu-latest"
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
module: ["mempool", "liquid", "bisq"]
include:
- module: "mempool"
spec: |
cypress/e2e/mainnet/*.spec.ts
cypress/e2e/signet/*.spec.ts
cypress/e2e/testnet/*.spec.ts
- module: "liquid"
spec: |
cypress/e2e/liquid/liquid.spec.ts
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
- module: "bisq"
spec: |
cypress/e2e/bisq/bisq.spec.ts
name: E2E tests for ${{ matrix.module }}
containers: [1, 2, 3, 4, 5]
os: ["ubuntu-latest"]
browser: [chrome]
name: E2E tests on ${{ matrix.browser }} - ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: ${{ matrix.module }}
uses: actions/checkout@v2
- name: Setup node
uses: actions/setup-node@v3
uses: actions/setup-node@v2
with:
node-version: 18
cache: "npm"
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
- name: Chrome browser tests (${{ matrix.module }})
uses: cypress-io/github-action@v5
node-version: 16.10.0
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: ${{ matrix.browser }} browser tests (Mempool)
uses: cypress-io/github-action@v2
with:
tag: ${{ github.event_name }}
working-directory: ${{ matrix.module }}/frontend
build: npm run config:defaults:${{ matrix.module }}
start: npm run start:local-staging
wait-on: "http://localhost:4200"
working-directory: frontend
build: npm run config:defaults:mempool
start: npm run start:local-prod
wait-on: 'http://localhost:4200'
wait-on-timeout: 120
record: true
parallel: true
spec: ${{ matrix.spec }}
group: Tests on Chrome (${{ matrix.module }})
browser: "chrome"
ci-build-id: "${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}"
spec: |
cypress/integration/mainnet/*.spec.ts
cypress/integration/signet/*.spec.ts
cypress/integration/testnet/*.spec.ts
group: Tests on ${{ matrix.browser }} (Mempool)
browser: ${{ matrix.browser }}
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
- name: ${{ matrix.browser }} browser tests (Liquid)
uses: cypress-io/github-action@v2
if: always()
with:
working-directory: frontend
build: npm run config:defaults:liquid
start: npm run start:local-prod
wait-on: 'http://localhost:4200'
wait-on-timeout: 120
record: true
parallel: true
spec: |
cypress/integration/liquid/liquid.spec.ts
cypress/integration/liquidtestnet/liquidtestnet.spec.ts
group: Tests on ${{ matrix.browser }} (Liquid)
browser: ${{ matrix.browser }}
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
- name: ${{ matrix.browser }} browser tests (Bisq)
uses: cypress-io/github-action@v2
if: always()
with:
working-directory: frontend
build: npm run config:defaults:bisq
start: npm run start:local-prod
wait-on: 'http://localhost:4200'
wait-on-timeout: 120
record: true
parallel: true
spec: cypress/integration/bisq/bisq.spec.ts
group: Tests on ${{ matrix.browser }} (Bisq)
browser: ${{ matrix.browser }}
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
env:
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}

View File

@@ -1,26 +0,0 @@
name: 'Print images digest'
on:
workflow_dispatch:
inputs:
version:
description: 'Image Version'
required: false
default: 'latest'
type: string
jobs:
print-images-sha:
runs-on: 'ubuntu-latest'
name: Print digest for images
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: digest
- name: Run script
working-directory: digest
run: |
sh ./docker/scripts/get_image_digest.sh $VERSION
env:
VERSION: ${{ github.event.inputs.version }}

View File

@@ -1,7 +1,7 @@
name: Docker build on tag
env:
DOCKER_CLI_EXPERIMENTAL: enabled
TAG_FMT: "^refs/tags/(((.?[0-9]+){3,4}))$"
TAG_FMT: '^refs/tags/(((.?[0-9]+){3,4}))$'
DOCKER_BUILDKIT: 0
COMPOSE_DOCKER_CLI_BUILD: 0
@@ -11,9 +11,6 @@ on:
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-*
permissions:
contents: read
jobs:
build:
strategy:
@@ -21,46 +18,16 @@ jobs:
service:
- frontend
- backend
runs-on: ubuntu-latest
timeout-minutes: 120
runs-on: ubuntu-18.04
name: Build and push to DockerHub
steps:
# Workaround based on JonasAlfredsson/docker-on-tmpfs@v1.0.1
- name: Replace the current swap file
shell: bash
run: |
sudo swapoff /mnt/swapfile
sudo rm -v /mnt/swapfile
sudo fallocate -l 13G /mnt/swapfile
sudo chmod 600 /mnt/swapfile
sudo mkswap /mnt/swapfile
sudo swapon /mnt/swapfile
- name: Show current memory and swap status
shell: bash
run: |
sudo free -h
echo
sudo swapon --show
- name: Mount a tmpfs over /var/lib/docker
shell: bash
run: |
if [ ! -d "/var/lib/docker" ]; then
echo "Directory '/var/lib/docker' not found"
exit 1
fi
sudo mount -t tmpfs -o size=10G tmpfs /var/lib/docker
sudo systemctl restart docker
sudo df -h | grep docker
- name: Set env variables
run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV
- name: Show set environment variables
run: |
printf " TAG: %s\n" "$TAG"
- name: Add SHORT_SHA env property with commit short sha
run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV
@@ -68,24 +35,24 @@ jobs:
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Checkout project
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Init repo for Dockerization
run: docker/init.sh "$TAG"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v1
id: qemu
- name: Setup Docker buildx action
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v1
id: buildx
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Cache Docker layers
uses: actions/cache@v3
uses: actions/cache@v2
id: cache
with:
path: /tmp/.buildx-cache

4
.gitignore vendored
View File

@@ -2,7 +2,3 @@ sitemap
data
docker-compose.yml
backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js
target

2
.nvmrc
View File

@@ -1 +1 @@
v16.16.0
v16.10.0

View File

@@ -1,6 +1,4 @@
{
"editor.tabSize": 2,
"typescript.preferences.importModuleSpecifier": "relative",
"typescript.tsdk": "./backend/node_modules/typescript/lib",
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] }
"typescript.tsdk": "./backend/node_modules/typescript/lib"
}

View File

@@ -6,8 +6,6 @@ In order to clarify the intellectual property license granted with Contributions
When submitting a pull request for the first time, please create a file with a name like `/contributors/{github_username}.txt`, and in the content of that file indicate your agreement to the Contributor License Agreement terms below. An example of what that file should contain can be seen in wiz's agreement file. (This method of CLA "signing" is borrowed from Medium's open source project.)
Also, please GPG-sign all your commits (`git config commit.gpgsign true`).
# Contributor License Agreement
Last Updated: January 25, 2022

533
Cargo.lock generated
View File

@@ -1,533 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1586fa608b1dab41f667475b4a41faec5ba680aee428bfa5de4ea520fdc6e901"
dependencies = [
"quote",
"syn 2.0.20",
]
[[package]]
name = "gbt"
version = "0.1.0"
dependencies = [
"bytemuck",
"bytes",
"napi",
"napi-build",
"napi-derive",
"priority-queue",
"tracing",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
]
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "napi"
version = "2.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
"tokio",
]
[[package]]
name = "napi-build"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
[[package]]
name = "napi-derive"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20bbc7c69168d06a848f925ec5f0e0997f98e8c8d4f2cc30157f0da51c009e17"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"regex",
"semver",
"syn 1.0.109",
]
[[package]]
name = "napi-sys"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
dependencies = [
"libloading",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num_cpus"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
name = "priority-queue"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61"
dependencies = [
"autocfg",
"indexmap",
]
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.2",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "sharded-slab"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
dependencies = [
"lazy_static",
]
[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tokio"
version = "1.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
dependencies = [
"autocfg",
"num_cpus",
"pin-project-lite",
"windows-sys",
]
[[package]]
name = "tracing"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.20",
]
[[package]]
name = "tracing-core"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

View File

@@ -1,8 +0,0 @@
[workspace]
members = [
"./backend/rust-gbt",
]
[profile.release]
lto = true
codegen-units = 1

View File

@@ -1,5 +1,5 @@
The Mempool Open Source Project
Copyright (c) 2019-2023 The Mempool Open Source Project Developers
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:
@@ -13,7 +13,7 @@ the terms of (at your option) either:
proxy statement published on <https://mempool.space/about>.
However, this copyright license does not include an implied right or license to
use our trademarks: The Mempool Open Source Project®, mempool.space™, the
use our trademarks: The Mempool Open Source Project, mempool.space™, the
mempool Logo™, the mempool.space Vertical Logo™, the mempool.space Horizontal
Logo™, the mempool Square Logo™, and the mempool Blocks logo™ are registered
trademarks or trademarks of Mempool Space K.K in Japan, the United States,

424
README.md
View File

@@ -1,35 +1,415 @@
# The Mempool Open Source Project® [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
# The Mempool Open Source Project
https://user-images.githubusercontent.com/93150691/226236121-375ea64f-b4a1-4cc0-8fad-a6fb33226840.mp4
Mempool is the fully featured visualizer, explorer, and API service running on [mempool.space](https://mempool.space/), an open source project developed and operated for the benefit of the Bitcoin community, with a focus on the emerging transaction fee market to help our transition into a multi-layer ecosystem.
<br>
![mempool](https://mempool.space/resources/screenshots/v2.3.0-dashboard.png)
Mempool is the fully-featured mempool visualizer, explorer, and API service running at [mempool.space](https://mempool.space/).
## Installation Methods
It is an open-source project developed and operated for the benefit of the Bitcoin community, with a focus on the emerging transaction fee market that is evolving Bitcoin into a multi-layer ecosystem.
Mempool can be self-hosted on a wide variety of your own hardware, ranging from a simple one-click installation on a Raspberry Pi distro, all the way to an advanced high availability cluster of powerful servers for a production instance. We support the following installation methods, ranked in order from simple to advanced:
# Installation Methods
1) One-click installation on: [Umbrel](https://github.com/getumbrel/umbrel), [RaspiBlitz](https://github.com/rootzoll/raspiblitz), [RoninDojo](https://code.samourai.io/ronindojo/RoninDojo), or [MyNode](https://github.com/mynodebtc/mynode).
2) [Docker installation on Linux using docker-compose](https://github.com/mempool/mempool/tree/master/docker)
3) [Manual installation on Linux or FreeBSD](https://github.com/mempool/mempool#manual-installation)
4) [Production installation on a powerful FreeBSD server](https://github.com/mempool/mempool/tree/master/production)
5) [High Availability cluster using powerful FreeBSD servers](https://github.com/mempool/mempool/tree/master/production#high-availability)
Mempool can be self-hosted on a wide variety of your own hardware, ranging from a simple one-click installation on a Raspberry Pi full-node distro all the way to a robust production instance on a powerful FreeBSD server.
# Docker Installation
**Most people should use a one-click install method.** Other install methods are meant for developers and others with experience managing servers.
The `docker` directory contains the Dockerfiles used to build and release the official images and a `docker-compose.yml` file that is intended for end users to run a Mempool instance with minimal effort.
<a id="one-click-installation"></a>
## One-Click Installation
## bitcoind only configuration
Mempool can be conveniently installed on the following full-node distros:
- [Umbrel](https://github.com/getumbrel/umbrel)
- [RaspiBlitz](https://github.com/rootzoll/raspiblitz)
- [RoninDojo](https://code.samourai.io/ronindojo/RoninDojo)
- [myNode](https://github.com/mynodebtc/mynode)
- [Start9](https://github.com/Start9Labs/embassy-os)
To run an instance with the default settings, use the following command:
**We highly recommend you deploy your own Mempool instance this way.** No matter which option you pick, you'll be able to get your own fully-sovereign instance of Mempool up quickly without needing to fiddle with any settings.
```bash
$ docker-compose up
```
## Advanced Installation Methods
The default configuration will allow you to run Mempool using `bitcoind` as the backend, so address lookups will be disabled. It assumes you have added RPC credentials for the `mempool` user with a `mempool` password in your `bitcoin.conf` file:
Mempool can be installed in other ways too, but we only recommend doing so if you're a developer, have experience managing servers, or otherwise know what you're doing.
```
rpcuser=mempool
rpcpassword=mempool
```
- See the [`docker/`](./docker/) directory for instructions on deploying Mempool with Docker.
- See the [`backend/`](./backend/) and [`frontend/`](./frontend/) directories for manual install instructions oriented for developers.
- See the [`production/`](./production/) directory for guidance on setting up a more serious Mempool instance designed for high performance at scale.
If you want to use your current credentials, update them in the `docker-compose.yml` file:
```
api:
environment:
MEMPOOL_BACKEND: "none"
RPC_HOST: "172.27.0.1"
RPC_PORT: "8332"
RPC_USER: "mempool"
RPC_PASS: "mempool"
```
Note: the IP in the example above refers to Docker's default gateway IP address so the container can hit the `bitcoind` instance running on the host machine. If your setup is different, update it accordingly.
You can check if the instance is running by visiting http://localhost - the graphs will be populated as new transactions are detected.
## bitcoind+electrum configuration
In order to run with a `electrum` compatible server as the backend, in addition to the settings required for running with `bitcoind` above, you will need to make the following changes to the `docker-compose.yml` file:
- Under the `api` service, change the value of the `MEMPOOL_BACKEND` key from `none` to `electrum`:
```
api:
environment:
MEMPOOL_BACKEND: "none"
```
- Under the `api` service, set the `ELECTRUM_HOST` and `ELECTRUM_PORT` keys to your Docker host IP address and set `ELECTRUM_TLS_ENABLED` to `false`:
```
api:
environment:
ELECTRUM_HOST: "172.27.0.1"
ELECTRUM_PORT: "50002"
ELECTRUM_TLS_ENABLED: "false"
```
You can update any of the backend settings in the `mempool-config.json` file using the following environment variables to override them under the same `api` `environment` section.
JSON:
```
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
"POLL_RATE_MS": 2000,
"CACHE_DIR": "./cache",
"CLEAR_PROTECTION_MINUTES": 20,
"RECOMMENDED_FEE_PERCENTILE": 50,
"BLOCK_WEIGHT_UNITS": 4000000,
"INITIAL_BLOCKS_AMOUNT": 8,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"PRICE_FEED_UPDATE_INTERVAL": 3600,
"USE_SECOND_NODE_FOR_MINFEE": false,
"EXTERNAL_ASSETS": []
},
```
docker-compose overrides::
```
MEMPOOL_NETWORK: ""
MEMPOOL_BACKEND: ""
MEMPOOL_HTTP_PORT: ""
MEMPOOL_SPAWN_CLUSTER_PROCS: ""
MEMPOOL_API_URL_PREFIX: ""
MEMPOOL_POLL_RATE_MS: ""
MEMPOOL_CACHE_DIR: ""
MEMPOOL_CLEAR_PROTECTION_MINUTES: ""
MEMPOOL_RECOMMENDED_FEE_PERCENTILE: ""
MEMPOOL_BLOCK_WEIGHT_UNITS: ""
MEMPOOL_INITIAL_BLOCKS_AMOUNT: ""
MEMPOOL_MEMPOOL_BLOCKS_AMOUNT: ""
MEMPOOL_PRICE_FEED_UPDATE_INTERVAL: ""
MEMPOOL_USE_SECOND_NODE_FOR_MINFEE: ""
MEMPOOL_EXTERNAL_ASSETS: ""
```
JSON:
```
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
```
docker-compose overrides:
```
CORE_RPC_HOST: ""
CORE_RPC_PORT: ""
CORE_RPC_USERNAME: ""
CORE_RPC_PASSWORD: ""
```
JSON:
```
"ELECTRUM": {
"HOST": "127.0.0.1",
"PORT": 50002,
"TLS_ENABLED": true
},
```
docker-compose overrides:
```
ELECTRUM_HOST: ""
ELECTRUM_PORT: ""
ELECTRUM_TLS: ""
```
JSON:
```
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
},
```
docker-compose overrides:
```
ESPLORA_REST_API_URL: ""
```
JSON:
```
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
```
docker-compose overrides:
```
SECOND_CORE_RPC_HOST: ""
SECOND_CORE_RPC_PORT: ""
SECOND_CORE_RPC_USERNAME: ""
SECOND_CORE_RPC_PASSWORD: ""
```
JSON:
```
"DATABASE": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 3306,
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
```
docker-compose overrides:
```
DATABASE_ENABLED: ""
DATABASE_HOST: ""
DATABASE_PORT: ""
DATABASE_DATABASE: ""
DATABASE_USERAME: ""
DATABASE_PASSWORD: ""
```
JSON:
```
"SYSLOG": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 514,
"MIN_PRIORITY": "info",
"FACILITY": "local7"
},
```
docker-compose overrides:
```
SYSLOG_ENABLED: ""
SYSLOG_HOST: ""
SYSLOG_PORT: ""
SYSLOG_MIN_PRIORITY: ""
SYSLOG_FACILITY: ""
```
JSON:
```
"STATISTICS": {
"ENABLED": true,
"TX_PER_SECOND_SAMPLE_PERIOD": 150
},
```
docker-compose overrides:
```
STATISTICS_ENABLED: ""
STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD: ""
```
JSON:
```
"BISQ": {
"ENABLED": false,
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
}
```
docker-compose overrides:
```
BISQ_ENABLED: ""
BISQ_DATA_PATH: ""
```
# Manual Installation
The following instructions are for a manual installation on Linux or FreeBSD. The file and directory paths may need to be changed to match your OS.
## Dependencies
* [Bitcoin](https://github.com/bitcoin/bitcoin)
* [Electrum](https://github.com/romanz/electrs)
* [NodeJS](https://github.com/nodejs/node)
* [MariaDB](https://github.com/mariadb/server)
* [Nginx](https://github.com/nginx/nginx)
## Mempool
Clone the mempool repo, and checkout the latest release tag:
```bash
git clone https://github.com/mempool/mempool
cd mempool
latestrelease=$(curl -s https://api.github.com/repos/mempool/mempool/releases/latest|grep tag_name|head -1|cut -d '"' -f4)
git checkout $latestrelease
```
## Bitcoin Core (bitcoind)
Enable RPC and txindex in `bitcoin.conf`:
```bash
rpcuser=mempool
rpcpassword=mempool
txindex=1
```
## MySQL
Install MariaDB from OS package manager:
```bash
# Linux
apt-get install mariadb-server mariadb-client
# macOS
brew install mariadb
mysql.server start
```
Create database and grant privileges:
```bash
MariaDB [(none)]> drop database mempool;
Query OK, 0 rows affected (0.00 sec)
MariaDB [(none)]> create database mempool;
Query OK, 1 row affected (0.00 sec)
MariaDB [(none)]> grant all privileges on mempool.* to 'mempool'@'%' identified by 'mempool';
Query OK, 0 rows affected (0.00 sec)
```
## Mempool Backend
Install mempool dependencies from npm and build the backend:
```bash
# backend
cd backend
npm install --prod
npm run build
```
In the `backend` folder, make a copy of the sample config and modify it to fit your settings.
```bash
cp mempool-config.sample.json mempool-config.json
```
Edit `mempool-config.json` to add your Bitcoin Core node RPC credentials:
```bash
{
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"HTTP_PORT": 8999
},
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
"ELECTRUM": {
"HOST": "127.0.0.1",
"PORT": 50002,
"TLS_ENABLED": true
},
"DATABASE": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 3306,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"DATABASE": "mempool"
}
}
```
Start the backend:
```bash
npm run start
```
When it's running you should see output like this:
```bash
Mempool updated in 0.189 seconds
Updating mempool
Mempool updated in 0.096 seconds
Updating mempool
Mempool updated in 0.099 seconds
Updating mempool
Calculated fee for transaction 1 / 10
Calculated fee for transaction 2 / 10
Calculated fee for transaction 3 / 10
Calculated fee for transaction 4 / 10
Calculated fee for transaction 5 / 10
Calculated fee for transaction 6 / 10
Calculated fee for transaction 7 / 10
Calculated fee for transaction 8 / 10
Calculated fee for transaction 9 / 10
Calculated fee for transaction 10 / 10
Mempool updated in 0.243 seconds
Updating mempool
```
## Mempool Frontend
Install mempool dependencies from npm and build the frontend static HTML/CSS/JS:
```bash
# frontend
cd frontend
npm install --prod
npm run build
```
Install the output into nginx webroot folder:
```bash
sudo rsync -av --delete dist/ /var/www/
```
## nginx + certbot
Install the supplied nginx.conf and nginx-mempool.conf in /etc/nginx
```bash
# install nginx and certbot
apt-get install -y nginx python3-certbot-nginx
# install the mempool configuration for nginx
cp nginx.conf nginx-mempool.conf /etc/nginx/
# replace example.com with your domain name
certbot --nginx -d example.com
```
If everything went okay you should see the beautiful mempool :grin:
If you get stuck on "loading blocks", this means the websocket can't connect.
Check your nginx proxy setup, firewalls, etc. and open an issue if you need help.

View File

@@ -1 +0,0 @@
Dockerfile

View File

@@ -1,17 +0,0 @@
# Editor configuration, see https://editorconfig.org
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
quote_type = single
[*.md]
max_line_length = off
trim_trailing_whitespace = false

View File

@@ -1,2 +0,0 @@
node_modules
dist

View File

@@ -1,37 +0,0 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"plugins": [
"@typescript-eslint"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"prettier"
],
"rules": {
"@typescript-eslint/ban-ts-comment": 1,
"@typescript-eslint/ban-types": 1,
"@typescript-eslint/no-empty-function": 1,
"@typescript-eslint/no-explicit-any": 1,
"@typescript-eslint/no-inferrable-types": 0,
"@typescript-eslint/no-namespace": 1,
"@typescript-eslint/no-this-alias": 1,
"@typescript-eslint/no-var-requires": 1,
"@typescript-eslint/explicit-function-return-type": 1,
"no-console": 1,
"no-constant-condition": 1,
"no-dupe-else-if": 1,
"no-empty": 1,
"no-prototype-builtins": 1,
"no-self-assign": 1,
"no-useless-catch": 1,
"no-var": 1,
"prefer-const": 1,
"prefer-rest-params": 1,
"quotes": [1, "single", { "allowTemplateLiterals": true }],
"semi": 1,
"eqeqeq": 1
}
}

8
backend/.gitignore vendored
View File

@@ -1,10 +1,9 @@
# See http://help.github.com/ignore-files/ for more about ignoring files.
# production config and external assets
!mempool-config.template.json
*.json
!mempool-config.sample.json
mempool-config.json
pools.json
icons.json
# compiled output
@@ -45,6 +44,3 @@ testem.log
#System Files
.DS_Store
Thumbs.db
# package folder (npm run package output)
/package

View File

@@ -1,2 +0,0 @@
node_modules
package-lock.json

View File

@@ -1,6 +0,0 @@
{
"endOfLine": "lf",
"printWidth": 80,
"tabWidth": 2,
"trailingComma": "es5"
}

View File

@@ -1,256 +1,22 @@
# Mempool Backend
# Setup backend watchers
These instructions are mostly intended for developers.
The backend is static. Typescript scripts are compiled into the `dist` folder and served through a node web server.
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool only provides support for custom setups to [enterprise sponsors](https://mempool.space/enterprise).
You can avoid the manual shutdown/recompile/restart command line cycle by using a watcher.
See other ways to set up Mempool on [the main README](/../../#installation-methods).
Make sure you are in the `backend` directory `cd backend`.
Jump to a section in this doc:
- [Set Up the Backend](#setup)
- [Development Tips](#development-tips)
## Setup
### 1. Clone Mempool Repository
Get the latest Mempool code:
1. Install nodemon and ts-node
```
git clone https://github.com/mempool/mempool
cd mempool
sudo npm install -g ts-node nodemon
```
Check out the latest release:
2. Run the watcher
> Note: You can find your npm global binary folder using `npm -g bin`, where nodemon will be installed.
```
latestrelease=$(curl -s https://api.github.com/repos/mempool/mempool/releases/latest|grep tag_name|head -1|cut -d '"' -f4)
git checkout $latestrelease
nodemon src/index.ts --ignore cache/ --ignore pools.json
```
### 2. Configure Bitcoin Core
Turn on `txindex`, enable RPC, and set RPC credentials in `bitcoin.conf`:
```
txindex=1
server=1
rpcuser=mempool
rpcpassword=mempool
```
### 3. Configure Electrum Server
[Pick an Electrum Server implementation](https://mempool.space/docs/faq#address-lookup-issues), configure it, and make sure it's synced.
**This step is optional.** You can run Mempool without configuring an Electrum Server for it, but address lookups will be disabled.
### 4. Configure MariaDB
_Mempool needs MariaDB v10.5 or later. If you already have MySQL installed, make sure to migrate any existing databases **before** installing MariaDB._
Get MariaDB from your operating system's package manager:
```
# Debian, Ubuntu, etc.
apt-get install mariadb-server mariadb-client
# macOS
brew install mariadb
mysql.server start
```
Create a database and grant privileges:
```
MariaDB [(none)]> drop database mempool;
Query OK, 0 rows affected (0.00 sec)
MariaDB [(none)]> create database mempool;
Query OK, 1 row affected (0.00 sec)
MariaDB [(none)]> grant all privileges on mempool.* to 'mempool'@'%' identified by 'mempool';
Query OK, 0 rows affected (0.00 sec)
```
### 5. Prepare Mempool Backend
#### Build
_Make sure to use Node.js 16.10 and npm 7._
_The build process requires [Rust](https://www.rust-lang.org/tools/install) to be installed._
Install dependencies with `npm` and build the backend:
```
cd backend
npm install
npm run build
```
#### Configure
In the backend folder, make a copy of the sample config file:
```
cp mempool-config.sample.json mempool-config.json
```
Edit `mempool-config.json` as needed.
In particular, make sure:
- the correct Bitcoin Core RPC credentials are specified in `CORE_RPC`
- the correct `BACKEND` is specified in `MEMPOOL`:
- "electrum" if you're using [romanz/electrs](https://github.com/romanz/electrs) or [cculianu/Fulcrum](https://github.com/cculianu/Fulcrum)
- "esplora" if you're using [Blockstream/electrs](https://github.com/Blockstream/electrs)
- "none" if you're not using any Electrum Server
### 6. Run Mempool Backend
Run the Mempool backend:
```
npm run start
```
You can also set env var `MEMPOOL_CONFIG_FILE` to specify a custom config file location:
```
MEMPOOL_CONFIG_FILE=/path/to/mempool-config.json npm run start
```
When it's running, you should see output like this:
```
Mempool updated in 0.189 seconds
Updating mempool
Mempool updated in 0.096 seconds
Updating mempool
Mempool updated in 0.099 seconds
Updating mempool
Calculated fee for transaction 1 / 10
Calculated fee for transaction 2 / 10
Calculated fee for transaction 3 / 10
Calculated fee for transaction 4 / 10
Calculated fee for transaction 5 / 10
Calculated fee for transaction 6 / 10
Calculated fee for transaction 7 / 10
Calculated fee for transaction 8 / 10
Calculated fee for transaction 9 / 10
Calculated fee for transaction 10 / 10
Mempool updated in 0.243 seconds
Updating mempool
```
### 7. Set Up Mempool Frontend
With the backend configured and running, proceed to set up the [Mempool frontend](../frontend#manual-setup).
## Development Tips
### Set Up Backend Watchers
The Mempool backend is static. TypeScript scripts are compiled into the `dist` folder and served through a Node.js web server.
As a result, for development purposes, you may find it helpful to set up backend watchers to avoid the manual shutdown/recompile/restart command-line cycle.
First, install `nodemon` and `ts-node`:
```
npm install -g ts-node nodemon
```
Then, run the watcher:
```
nodemon src/index.ts --ignore cache/
```
`nodemon` should be in npm's global binary folder. If needed, you can determine where that is with `npm -g bin`.
### Useful Regtest Commands
Helpful link: https://gist.github.com/System-Glitch/cb4e87bf1ae3fec9925725bb3ebe223a
Run bitcoind on regtest:
```
bitcoind -regtest
```
Create a new wallet, if needed:
```
bitcoin-cli -regtest createwallet test
```
Load wallet (this command may take a while if you have lot of UTXOs):
```
bitcoin-cli -regtest loadwallet test
```
Get a new address:
```
address=$(bitcoin-cli -regtest getnewaddress)
```
Mine blocks to the previously generated address. You need at least 101 blocks before you can spend. This will take some time to execute (~1 min):
```
bitcoin-cli -regtest generatetoaddress 101 $address
```
Send 0.1 BTC at 5 sat/vB to another address:
```
bitcoin-cli -named -regtest sendtoaddress address=$(bitcoin-cli -regtest getnewaddress) amount=0.1 fee_rate=5
```
See more example of `sendtoaddress`:
```
bitcoin-cli sendtoaddress # will print the help
```
Mini script to generate random network activity (random TX count with random tx fee-rate). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
```
#!/bin/bash
address=$(bitcoin-cli -regtest getnewaddress)
bitcoin-cli -regtest generatetoaddress 101 $address
for i in {1..1000000}
do
for y in $(seq 1 "$(jot -r 1 1 1000)")
do
bitcoin-cli -regtest -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
done
bitcoin-cli -regtest generatetoaddress 1 $address
sleep 5
done
```
Generate block at regular interval (every 10 seconds in this example):
```
watch -n 10 "bitcoin-cli -regtest generatetoaddress 1 $address"
```
### Mining pools update
By default, mining pools will be not automatically updated regularly (`config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` is set to `false`).
To manually update your mining pools, you can use the `--update-pools` command line flag when you run the nodejs backend. For example `npm run start --update-pools`. This will trigger the mining pools update and automatically re-index appropriate blocks.
You can enabled the automatic mining pools update by settings `config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` to `true` in your `mempool-config.json`.
When a `coinbase tag` or `coinbase address` change is detected, all blocks tagged to the `unknown` mining pools (starting from height 130635) will be deleted from the `blocks` table. Additionaly, all blocks which were tagged to the pool which has been updated will also be deleted from the `blocks` table. Of course, those blocks will be automatically reindexed.
### Re-index tables
You can manually force the nodejs backend to drop all data from a specified set of tables for future re-index. This is mostly useful for the mining dashboard and the lightning explorer.
Use the `--reindex` command to specify a list of comma separated table which will be truncated at start. Note that a 5 seconds delay will be observed before truncating tables in order to give you a chance to cancel (CTRL+C) in case of misuse of the command.
Usage:
```
npm run start --reindex=blocks,hashrates
```
Example output:
```
Feb 13 14:55:27 [63246] WARN: <lightning> Indexed data for "hashrates" tables will be erased in 5 seconds (using '--reindex')
Feb 13 14:55:32 [63246] NOTICE: <lightning> Table hashrates has been truncated
```
Reference: https://github.com/mempool/mempool/pull/1269

View File

@@ -1,20 +0,0 @@
import type { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
verbose: true,
automock: false,
collectCoverage: true,
collectCoverageFrom: ["./src/**/**.ts"],
coverageProvider: "babel",
coverageThreshold: {
global: {
lines: 1
}
},
setupFiles: [
"./testSetup.ts",
],
}
export default config;

View File

@@ -2,45 +2,28 @@
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
"POLL_RATE_MS": 2000,
"CACHE_DIR": "./cache",
"CACHE_ENABLED": true,
"CLEAR_PROTECTION_MINUTES": 20,
"RECOMMENDED_FEE_PERCENTILE": 50,
"BLOCK_WEIGHT_UNITS": 4000000,
"INITIAL_BLOCKS_AMOUNT": 8,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"INDEXING_BLOCKS_AMOUNT": 11000,
"BLOCKS_SUMMARIES_INDEXING": false,
"INDEXING_BLOCKS_AMOUNT": 1100,
"PRICE_FEED_UPDATE_INTERVAL": 3600,
"USE_SECOND_NODE_FOR_MINFEE": false,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 1,
"EXTERNAL_RETRY_INTERVAL": 0,
"USER_AGENT": "mempool",
"STDOUT_LOG_MIN_PRIORITY": "debug",
"AUTOMATIC_BLOCK_REINDEXING": false,
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
"AUDIT": false,
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"RUST_GBT": false,
"CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true,
"PRICE_UPDATES_PER_HOUR": 1
"EXTERNAL_ASSETS": [
"https://mempool.space/resources/pools.json"
]
},
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"PASSWORD": "mempool"
},
"ELECTRUM": {
"HOST": "127.0.0.1",
@@ -48,27 +31,21 @@
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-bitcoin-mainnet",
"RETRY_UNIX_SOCKET_AFTER": 30000,
"FALLBACK": []
"REST_API_URL": "http://127.0.0.1:3000"
},
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"PASSWORD": "mempool"
},
"DATABASE": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 3306,
"SOCKET": "/var/run/mysql/mysql.sock",
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 180000
"PASSWORD": "mempool"
},
"SYSLOG": {
"ENABLED": true,
@@ -81,63 +58,8 @@
"ENABLED": true,
"TX_PER_SECOND_SAMPLE_PERIOD": 150
},
"MAXMIND": {
"ENABLED": false,
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
},
"BISQ": {
"ENABLED": false,
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
},
"LIGHTNING": {
"ENABLED": false,
"BACKEND": "lnd",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
},
"LND": {
"TLS_CERT_PATH": "tls.cert",
"MACAROON_PATH": "readonly.macaroon",
"REST_API_URL": "https://localhost:8080",
"TIMEOUT": 10000
},
"CLIGHTNING": {
"SOCKET": "lightning-rpc"
},
"SOCKS5PROXY": {
"ENABLED": false,
"USE_ONION": true,
"HOST": "127.0.0.1",
"PORT": 9050,
"USERNAME": "",
"PASSWORD": ""
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "https://mempool.space/api/v1",
"MEMPOOL_ONION": "http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1",
"LIQUID_API": "https://liquid.network/api/v1",
"LIQUID_ONION": "http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1",
"BISQ_URL": "https://bisq.markets/api",
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": [
"list",
"of",
"trusted",
"servers"
]
},
"MEMPOOL_SERVICES": {
"API": "https://mempool.space/api",
"ACCELERATIONS": false
}
}

View File

@@ -1,17 +0,0 @@
#/bin/sh
set -e
# Remove previous dist folder
rm -rf dist
# Build new dist folder
npm run build
# Remove previous package folder
rm -rf package
# Move JS and deps
mv dist package
cp -R node_modules package
# Remove symlink for rust-gbt and insert real folder
rm package/node_modules/rust-gbt
cp -R rust-gbt package/node_modules
# Clean up deps
npm run package-rm-build-deps

View File

@@ -1,12 +0,0 @@
#/bin/sh
set -e
# Cleaning up inside the node_modules folder
cd package/node_modules
rm -r \
typescript \
@typescript-eslint \
@napi-rs \
./rust-gbt/src \
./rust-gbt/Cargo.toml \
./rust-gbt/build.rs

12723
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "mempool-backend",
"version": "3.0.0-dev",
"version": "2.4.0-dev",
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
"license": "GNU Affero General Public License v3.0",
"homepage": "https://mempool.space",
@@ -16,58 +16,35 @@
"mempool",
"blockchain",
"explorer",
"liquid",
"lightning"
"liquid"
],
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run rust-build && npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "./npm_package.sh",
"package-rm-build-deps": "./npm_package_rm_build_deps.sh",
"ng": "./node_modules/@angular/cli/bin/ng",
"tsc": "./node_modules/typescript/bin/tsc",
"build": "npm run tsc",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js",
"reindex-updated-pools": "npm run start-production --update-pools",
"reindex-all-blocks": "npm run start-production --update-pools --reindex-blocks",
"test": "./node_modules/.bin/jest --coverage",
"test:ci": "CI=true ./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
"rust-build": "cd rust-gbt && npm run build-release"
"start-production": "node --max-old-space-size=4096 dist/index.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"@babel/core": "^7.21.3",
"@mempool/electrum-client": "1.1.9",
"@types/node": "^18.15.3",
"axios": "~1.4.0",
"bitcoinjs-lib": "~6.1.3",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.11",
"mysql2": "~3.6.0",
"rust-gbt": "file:./rust-gbt",
"redis": "^4.6.6",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.9.3",
"ws": "~8.13.0"
"@mempool/bitcoin": "^3.0.3",
"@mempool/electrum-client": "^1.1.7",
"@types/ws": "8.2.2",
"axios": "0.24.0",
"bitcoinjs-lib": "6.0.1",
"crypto-js": "^4.0.0",
"express": "^4.17.1",
"locutus": "^2.0.12",
"mysql2": "2.3.3",
"node-worker-threads-pool": "^1.4.3",
"typescript": "4.4.4",
"ws": "8.3.0"
},
"devDependencies": {
"@babel/code-frame": "^7.18.6",
"@babel/core": "^7.21.3",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.17",
"@types/jest": "^29.5.0",
"@types/ws": "~8.5.5",
"@typescript-eslint/eslint-plugin": "^5.55.0",
"@typescript-eslint/parser": "^5.55.0",
"eslint": "^8.36.0",
"eslint-config-prettier": "^8.8.0",
"jest": "^29.5.0",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1"
"@types/compression": "^1.0.1",
"@types/express": "^4.17.2",
"@types/locutus": "^0.0.6",
"tslint": "^6.1.0"
}
}

View File

@@ -1,4 +0,0 @@
*.node
**/node_modules
**/.DS_Store
npm-debug.log*

View File

@@ -1,27 +0,0 @@
[package]
name = "gbt"
version = "0.1.0"
description = "An inefficient re-implementation of the getBlockTemplate algorithm in Rust"
authors = ["mononaut"]
edition = "2021"
publish = false
[workspace]
[lib]
crate-type = ["cdylib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
priority-queue = "1.3.2"
bytes = "1.4.0"
napi = { version = "2.13.2", features = ["napi8", "tokio_rt"] }
napi-derive = "2.13.0"
bytemuck = "1.13.1"
tracing = "0.1.36"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.15", features = ["env-filter"]}
[build-dependencies]
napi-build = "2.0.1"

View File

@@ -1,123 +0,0 @@
# gbt
**gbt:** rust implementation of the getBlockTemplate algorithm
This project was bootstrapped by [napi](https://www.npmjs.com/package/@napi-rs/cli).
## Installing gbt
Installing gbt requires a [supported version of Node and Rust](https://github.com/napi-rs/napi-rs#platform-support).
The build process also requires [Rust](https://www.rust-lang.org/tools/install) to be installed.
You can install the project with npm. In the project directory, run:
```sh
$ npm install
```
This fully installs the project, including installing any dependencies and running the build.
## Building gbt
If you have already installed the project and only want to run the build, run:
```sh
$ npm run build
```
This command uses the [napi build](https://www.npmjs.com/package/@napi-rs/cli) utility to run the Rust build and copy the built library into `./gbt.[TARGET_TRIPLE].node`.
## Exploring gbt
After building gbt, you can explore its exports at the Node REPL:
```sh
$ npm install
$ node
> require('.').hello()
"hello node"
```
## Available Scripts
In the project directory, you can run:
### `npm install`
Installs the project, including running `npm run build-release`.
### `npm build`
Builds the Node addon (`gbt.[TARGET_TRIPLE].node`) from source.
Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm build` and `npm build-*` commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html):
```
npm run build -- --feature=beetle
```
#### `npm build-debug`
Alias for `npm build`.
#### `npm build-release`
Same as [`npm build`](#npm-build) but, builds the module with the [`release`](https://doc.rust-lang.org/cargo/reference/profiles.html#release) profile. Release builds will compile slower, but run faster.
### `npm test`
Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/).
## Project Layout
The directory structure of this project is:
```
gbt/
├── Cargo.toml
├── README.md
├── gbt.[TARGET_TRIPLE].node
├── package.json
├── src/
| └── lib.rs
└── target/
```
### Cargo.toml
The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command.
### README.md
This file.
### gbt.\[TARGET_TRIPLE\].node
The Node addon—i.e., a binary Node module—generated by building the project. This is the main module for this package, as dictated by the `"main"` key in `package.json`.
Under the hood, a [Node addon](https://nodejs.org/api/addons.html) is a [dynamically-linked shared object](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries). The `"build"` script produces this file by copying it from within the `target/` directory, which is where the Rust build produces the shared object.
### package.json
The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command.
### src/
The directory tree containing the Rust source code for the project.
### src/lib.rs
The Rust library's main module.
### target/
Binary artifacts generated by the Rust build.
## Learn More
To learn more about Neon, see the [Napi-RS documentation](https://napi.rs/docs/introduction/getting-started).
To learn more about Rust, see the [Rust documentation](https://www.rust-lang.org).
To learn more about Node, see the [Node documentation](https://nodejs.org).

View File

@@ -1,3 +0,0 @@
fn main() {
napi_build::setup();
}

View File

@@ -1,49 +0,0 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export interface ThreadTransaction {
uid: number
order: number
fee: number
weight: number
sigops: number
effectiveFeePerVsize: number
inputs: Array<number>
}
export interface ThreadAcceleration {
uid: number
delta: number
}
export class GbtGenerator {
constructor()
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
make(mempool: Array<ThreadTransaction>, accelerations: Array<ThreadAcceleration>, maxUid: number): Promise<GbtResult>
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, accelerations: Array<ThreadAcceleration>, maxUid: number): Promise<GbtResult>
}
/**
* The result from calling the gbt function.
*
* This tuple contains the following:
* blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
* block_weights: A Vector of total weights per block.
* clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
* rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
*/
export class GbtResult {
blocks: Array<Array<number>>
blockWeights: Array<number>
clusters: Array<Array<number>>
rates: Array<Array<number>>
constructor(blocks: Array<Array<number>>, blockWeights: Array<number>, clusters: Array<Array<number>>, rates: Array<Array<number>>)
}

View File

@@ -1,258 +0,0 @@
/* tslint:disable */
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
let nativeBinding = null
let localFileExisted = false
let loadError = null
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm64.node')
} else {
nativeBinding = require('gbt-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm-eabi.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm-eabi.node')
} else {
nativeBinding = require('gbt-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-x64-msvc.node')
} else {
nativeBinding = require('gbt-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-ia32-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-ia32-msvc.node')
} else {
nativeBinding = require('gbt-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-arm64-msvc.node')
} else {
nativeBinding = require('gbt-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-universal.node')
} else {
nativeBinding = require('gbt-darwin-universal')
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-x64.node')
} else {
nativeBinding = require('gbt-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-arm64.node')
} else {
nativeBinding = require('gbt-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'gbt.freebsd-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.freebsd-x64.node')
} else {
nativeBinding = require('gbt-freebsd-x64')
}
} catch (e) {
loadError = e
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-musl.node')
} else {
nativeBinding = require('gbt-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-gnu.node')
} else {
nativeBinding = require('gbt-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-musl.node')
} else {
nativeBinding = require('gbt-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-gnu.node')
} else {
nativeBinding = require('gbt-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('gbt-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
throw loadError
}
throw new Error(`Failed to load native binding`)
}
const { GbtGenerator, GbtResult } = nativeBinding
module.exports.GbtGenerator = GbtGenerator
module.exports.GbtResult = GbtResult

View File

@@ -1,34 +0,0 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "gbt",
"version": "3.0.0-dev",
"hasInstallScript": true,
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"bin": {
"napi": "scripts/index.js"
},
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
}
}
}

View File

@@ -1,33 +0,0 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"description": "An inefficient re-implementation of the getBlockTemplate algorithm in Rust",
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform",
"build-debug": "npm run build",
"build-release": "npm run build -- --release --strip",
"install": "npm run build-release",
"prepublishOnly": "napi prepublish -t npm",
"test": "cargo test"
},
"author": "mononaut",
"napi": {
"name": "gbt",
"triples": {
"defaults": false,
"additional": [
"x86_64-unknown-linux-gnu",
"x86_64-unknown-freebsd"
]
}
},
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
}

View File

@@ -1,225 +0,0 @@
use crate::{
u32_hasher_types::{u32hashset_new, U32HasherState},
ThreadTransaction, thread_acceleration::ThreadAcceleration,
};
use std::{
cmp::Ordering,
collections::HashSet,
hash::{Hash, Hasher},
};
#[allow(clippy::struct_excessive_bools)]
#[derive(Clone, Debug)]
pub struct AuditTransaction {
pub uid: u32,
order: u32,
pub fee: u64,
pub weight: u32,
// exact sigop-adjusted weight
pub sigop_adjusted_weight: u32,
// sigop-adjusted vsize rounded up the the next integer
pub sigop_adjusted_vsize: u32,
pub sigops: u32,
adjusted_fee_per_vsize: f64,
pub effective_fee_per_vsize: f64,
pub dependency_rate: f64,
pub inputs: Vec<u32>,
pub relatives_set_flag: bool,
pub ancestors: HashSet<u32, U32HasherState>,
pub children: HashSet<u32, U32HasherState>,
ancestor_fee: u64,
ancestor_sigop_adjusted_weight: u32,
ancestor_sigop_adjusted_vsize: u32,
ancestor_sigops: u32,
// Safety: Must be private to prevent NaN breaking Ord impl.
score: f64,
pub used: bool,
/// whether this transaction has been moved to the "modified" priority queue
pub modified: bool,
pub dirty: bool,
}
impl Hash for AuditTransaction {
fn hash<H: Hasher>(&self, state: &mut H) {
self.uid.hash(state);
}
}
impl PartialEq for AuditTransaction {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for AuditTransaction {}
#[inline]
pub fn partial_cmp_uid_score(a: (u32, u32, f64), b: (u32, u32, f64)) -> Option<Ordering> {
// If either score is NaN, this is false,
// and partial_cmp will return None
if a.2 != b.2 {
// compare by score (sorts by ascending score)
a.2.partial_cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by comparing partial txids (sorts by descending txid)
Some(b.1.cmp(&a.1))
} else {
// tie-break partial txid collisions by comparing uids (sorts by descending uid)
Some(b.0.cmp(&a.0))
}
}
impl PartialOrd for AuditTransaction {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for AuditTransaction {
fn cmp(&self, other: &Self) -> Ordering {
// Safety: The only possible values for score are f64
// that are not NaN. This is because outside code can not
// freely assign score. Also, calc_new_score guarantees no NaN.
self.partial_cmp(other).expect("score will never be NaN")
}
}
#[inline]
fn calc_fee_rate(fee: u64, vsize: f64) -> f64 {
(fee as f64) / (if vsize == 0.0 { 1.0 } else { vsize })
}
impl AuditTransaction {
pub fn from_thread_transaction(tx: &ThreadTransaction, maybe_acceleration: Option<Option<&ThreadAcceleration>>) -> Self {
let fee_delta = match maybe_acceleration {
Some(Some(acceleration)) => acceleration.delta,
_ => 0.0
};
let fee = (tx.fee as u64) + (fee_delta as u64);
// rounded up to the nearest integer
let is_adjusted = tx.weight < (tx.sigops * 20);
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
let effective_fee_per_vsize = if is_adjusted || fee_delta > 0.0 {
calc_fee_rate(fee, f64::from(sigop_adjusted_weight) / 4.0)
} else {
tx.effective_fee_per_vsize
};
Self {
uid: tx.uid,
order: tx.order,
fee,
weight: tx.weight,
sigop_adjusted_weight,
sigop_adjusted_vsize,
sigops: tx.sigops,
adjusted_fee_per_vsize: calc_fee_rate(fee, f64::from(sigop_adjusted_vsize)),
effective_fee_per_vsize,
dependency_rate: f64::INFINITY,
inputs: tx.inputs.clone(),
relatives_set_flag: false,
ancestors: u32hashset_new(),
children: u32hashset_new(),
ancestor_fee: fee,
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
ancestor_sigops: tx.sigops,
score: 0.0,
used: false,
modified: false,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize || fee_delta > 0.0,
}
}
#[inline]
pub const fn score(&self) -> f64 {
self.score
}
#[inline]
pub const fn order(&self) -> u32 {
self.order
}
#[inline]
pub const fn ancestor_sigop_adjusted_vsize(&self) -> u32 {
self.ancestor_sigop_adjusted_vsize
}
#[inline]
pub const fn ancestor_sigops(&self) -> u32 {
self.ancestor_sigops
}
#[inline]
pub fn cluster_rate(&self) -> f64 {
// Safety: self.ancestor_weight can never be 0.
// Even if it could, as it approaches 0, the value inside the min() call
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
// the smaller of the two. If either side is NaN, the other side is returned.
self.dependency_rate.min(calc_fee_rate(
self.ancestor_fee,
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
))
}
pub fn set_dirty_if_different(&mut self, cluster_rate: f64) {
if self.effective_fee_per_vsize != cluster_rate {
self.effective_fee_per_vsize = cluster_rate;
self.dirty = true;
}
}
/// Safety: This function must NEVER set score to NaN.
#[inline]
fn calc_new_score(&mut self) {
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
self.ancestor_fee,
f64::from(self.ancestor_sigop_adjusted_vsize),
));
}
#[inline]
pub fn set_ancestors(
&mut self,
ancestors: HashSet<u32, U32HasherState>,
total_fee: u64,
total_sigop_adjusted_weight: u32,
total_sigop_adjusted_vsize: u32,
total_sigops: u32,
) {
self.ancestors = ancestors;
self.ancestor_fee = self.fee + total_fee;
self.ancestor_sigop_adjusted_weight =
self.sigop_adjusted_weight + total_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize = self.sigop_adjusted_vsize + total_sigop_adjusted_vsize;
self.ancestor_sigops = self.sigops + total_sigops;
self.calc_new_score();
self.relatives_set_flag = true;
}
#[inline]
pub fn remove_root(
&mut self,
root_txid: u32,
root_fee: u64,
root_sigop_adjusted_weight: u32,
root_sigop_adjusted_vsize: u32,
root_sigops: u32,
cluster_rate: f64,
) -> f64 {
let old_score = self.score();
self.dependency_rate = self.dependency_rate.min(cluster_rate);
if self.ancestors.remove(&root_txid) {
self.ancestor_fee -= root_fee;
self.ancestor_sigop_adjusted_weight -= root_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize -= root_sigop_adjusted_vsize;
self.ancestor_sigops -= root_sigops;
self.calc_new_score();
}
old_score
}
}

View File

@@ -1,430 +0,0 @@
use priority_queue::PriorityQueue;
use std::{cmp::Ordering, collections::HashSet, mem::ManuallyDrop};
use tracing::{info, trace};
use crate::{
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
GbtResult, ThreadTransactionsMap, thread_acceleration::ThreadAcceleration,
};
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
const BLOCK_SIGOPS: u32 = 80_000;
const BLOCK_RESERVED_WEIGHT: u32 = 4_000;
const BLOCK_RESERVED_SIGOPS: u32 = 400;
const MAX_BLOCKS: usize = 8;
type AuditPool = Vec<Option<ManuallyDrop<AuditTransaction>>>;
type ModifiedQueue = PriorityQueue<u32, TxPriority, U32HasherState>;
#[derive(Debug)]
struct TxPriority {
uid: u32,
order: u32,
score: f64,
}
impl PartialEq for TxPriority {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for TxPriority {}
impl PartialOrd for TxPriority {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for TxPriority {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).expect("score will never be NaN")
}
}
/// Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core.
///
/// See `BlockAssembler` in Bitcoin Core's
/// [miner.cpp](https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp).
/// Ported from mempool backend's
/// [tx-selection-worker.ts](https://github.com/mempool/mempool/blob/master/backend/src/api/tx-selection-worker.ts).
//
// TODO: Make gbt smaller to fix these lints.
#[allow(clippy::too_many_lines)]
#[allow(clippy::cognitive_complexity)]
pub fn gbt(mempool: &mut ThreadTransactionsMap, accelerations: &[ThreadAcceleration], max_uid: usize) -> GbtResult {
let mut indexed_accelerations = Vec::with_capacity(max_uid + 1);
indexed_accelerations.resize(max_uid + 1, None);
for acceleration in accelerations {
indexed_accelerations[acceleration.uid as usize] = Some(acceleration);
}
let mempool_len = mempool.len();
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
audit_pool.resize(max_uid + 1, None);
let mut mempool_stack: Vec<u32> = Vec::with_capacity(mempool_len);
let mut clusters: Vec<Vec<u32>> = Vec::new();
let mut block_weights: Vec<u32> = Vec::new();
info!("Initializing working structs");
for (uid, tx) in &mut *mempool {
let acceleration = indexed_accelerations.get(*uid as usize);
let audit_tx = AuditTransaction::from_thread_transaction(tx, acceleration.copied());
// Safety: audit_pool and mempool_stack must always contain the same transactions
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
mempool_stack.push(*uid);
}
info!("Building relatives graph & calculate ancestor scores");
for txid in &mempool_stack {
set_relatives(*txid, &mut audit_pool);
}
trace!("Post relative graph Audit Pool: {:#?}", audit_pool);
info!("Sorting by descending ancestor score");
let mut mempool_stack: Vec<(u32, u32, f64)> = mempool_stack
.into_iter()
.map(|txid| {
let atx = audit_pool
.get(txid as usize)
.and_then(Option::as_ref)
.expect("All txids are from audit_pool");
(txid, atx.order(), atx.score())
})
.collect();
mempool_stack.sort_unstable_by(|a, b| partial_cmp_uid_score(*a, *b).expect("Not NaN"));
let mut mempool_stack: Vec<u32> = mempool_stack.into_iter().map(|(txid, _, _)| txid).collect();
info!("Building blocks by greedily choosing the highest feerate package");
info!("(i.e. the package rooted in the transaction with the best ancestor score)");
let mut blocks: Vec<Vec<u32>> = Vec::new();
let mut block_weight: u32 = BLOCK_RESERVED_WEIGHT;
let mut block_sigops: u32 = BLOCK_RESERVED_SIGOPS;
// No need to be bigger than 4096 transactions for the per-block transaction Vec.
let initial_txes_per_block: usize = 4096.min(mempool_len);
let mut transactions: Vec<u32> = Vec::with_capacity(initial_txes_per_block);
let mut modified: ModifiedQueue = u32priority_queue_with_capacity(mempool_len);
let mut overflow: Vec<u32> = Vec::new();
let mut failures = 0;
while !mempool_stack.is_empty() || !modified.is_empty() {
// This trace log storm is big, so to make scrolling through
// Each iteration easier, leaving a bunch of empty rows
// And a header of ======
trace!("\n\n\n\n\n\n\n\n\n\n==================================");
trace!("mempool_array: {:#?}", mempool_stack);
trace!("clusters: {:#?}", clusters);
trace!("modified: {:#?}", modified);
trace!("audit_pool: {:#?}", audit_pool);
trace!("blocks: {:#?}", blocks);
trace!("block_weight: {:#?}", block_weight);
trace!("block_sigops: {:#?}", block_sigops);
trace!("transactions: {:#?}", transactions);
trace!("overflow: {:#?}", overflow);
trace!("failures: {:#?}", failures);
trace!("\n==================================");
let next_from_stack = next_valid_from_stack(&mut mempool_stack, &audit_pool);
let next_from_queue = next_valid_from_queue(&mut modified, &audit_pool);
if next_from_stack.is_none() && next_from_queue.is_none() {
continue;
}
let (next_tx, from_stack) = match (next_from_stack, next_from_queue) {
(Some(stack_tx), Some(queue_tx)) => match queue_tx.cmp(stack_tx) {
std::cmp::Ordering::Less => (stack_tx, true),
_ => (queue_tx, false),
},
(Some(stack_tx), None) => (stack_tx, true),
(None, Some(queue_tx)) => (queue_tx, false),
(None, None) => unreachable!(),
};
if from_stack {
mempool_stack.pop();
} else {
modified.pop();
}
if blocks.len() < (MAX_BLOCKS - 1)
&& ((block_weight + (4 * next_tx.ancestor_sigop_adjusted_vsize())
>= MAX_BLOCK_WEIGHT_UNITS)
|| (block_sigops + next_tx.ancestor_sigops() > BLOCK_SIGOPS))
{
// hold this package in an overflow list while we check for smaller options
overflow.push(next_tx.uid);
failures += 1;
} else {
let mut package: Vec<(u32, u32, usize)> = Vec::new();
let mut cluster: Vec<u32> = Vec::new();
let is_cluster: bool = !next_tx.ancestors.is_empty();
for ancestor_id in &next_tx.ancestors {
if let Some(Some(ancestor)) = audit_pool.get(*ancestor_id as usize) {
package.push((*ancestor_id, ancestor.order(), ancestor.ancestors.len()));
}
}
package.sort_unstable_by(|a, b| -> Ordering {
if a.2 != b.2 {
// order by ascending ancestor count
a.2.cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by ascending partial txid
a.1.cmp(&b.1)
} else {
// tie-break partial txid collisions by ascending uid
a.0.cmp(&b.0)
}
});
package.push((next_tx.uid, next_tx.order(), next_tx.ancestors.len()));
let cluster_rate = next_tx.cluster_rate();
for (txid, _, _) in &package {
cluster.push(*txid);
if let Some(Some(tx)) = audit_pool.get_mut(*txid as usize) {
tx.used = true;
tx.set_dirty_if_different(cluster_rate);
transactions.push(tx.uid);
block_weight += tx.weight;
block_sigops += tx.sigops;
}
update_descendants(*txid, &mut audit_pool, &mut modified, cluster_rate);
}
if is_cluster {
clusters.push(cluster);
}
failures = 0;
}
// this block is full
let exceeded_package_tries =
failures > 1000 && block_weight > (MAX_BLOCK_WEIGHT_UNITS - BLOCK_RESERVED_WEIGHT);
let queue_is_empty = mempool_stack.is_empty() && modified.is_empty();
if (exceeded_package_tries || queue_is_empty) && blocks.len() < (MAX_BLOCKS - 1) {
// finalize this block
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
// reset for the next block
transactions = Vec::with_capacity(initial_txes_per_block);
block_weight = BLOCK_RESERVED_WEIGHT;
block_sigops = BLOCK_RESERVED_SIGOPS;
failures = 0;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
overflow.reverse();
for overflowed in &overflow {
if let Some(Some(overflowed_tx)) = audit_pool.get(*overflowed as usize) {
if overflowed_tx.modified {
modified.push(
*overflowed,
TxPriority {
uid: *overflowed,
order: overflowed_tx.order(),
score: overflowed_tx.score(),
},
);
} else {
mempool_stack.push(*overflowed);
}
}
}
overflow = Vec::new();
}
}
info!("add the final unbounded block if it contains any transactions");
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
info!("make a list of dirty transactions and their new rates");
let mut rates: Vec<Vec<f64>> = Vec::new();
for (uid, thread_tx) in mempool {
// Takes ownership of the audit_tx and replaces with None
if let Some(Some(audit_tx)) = audit_pool.get_mut(*uid as usize).map(Option::take) {
trace!("txid: {}, is_dirty: {}", uid, audit_tx.dirty);
if audit_tx.dirty {
rates.push(vec![f64::from(*uid), audit_tx.effective_fee_per_vsize]);
thread_tx.effective_fee_per_vsize = audit_tx.effective_fee_per_vsize;
}
// Drops the AuditTransaction manually
// There are no audit_txs that are not in the mempool HashMap
// So there is guaranteed to be no memory leaks.
ManuallyDrop::into_inner(audit_tx);
}
}
trace!("\n\n\n\n\n====================");
trace!("blocks: {:#?}", blocks);
trace!("clusters: {:#?}", clusters);
trace!("rates: {:#?}\n====================\n\n\n\n\n", rates);
GbtResult {
blocks,
block_weights,
clusters,
rates,
}
}
fn next_valid_from_stack<'a>(
mempool_stack: &mut Vec<u32>,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some(next_txid) = mempool_stack.last() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used && !tx.modified => {
return Some(tx);
}
_ => {
mempool_stack.pop();
}
}
}
None
}
fn next_valid_from_queue<'a>(
queue: &mut ModifiedQueue,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some((next_txid, _)) = queue.peek() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used => {
return Some(tx);
}
_ => {
queue.pop();
}
}
}
None
}
fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut parents: HashSet<u32, U32HasherState> = u32hashset_new();
if let Some(Some(tx)) = audit_pool.get(txid as usize) {
if tx.relatives_set_flag {
return;
}
for input in &tx.inputs {
parents.insert(*input);
}
} else {
return;
}
let mut ancestors: HashSet<u32, U32HasherState> = u32hashset_new();
for parent_id in &parents {
set_relatives(*parent_id, audit_pool);
if let Some(Some(parent)) = audit_pool.get_mut(*parent_id as usize) {
// Safety: ancestors must always contain only txes in audit_pool
ancestors.insert(*parent_id);
parent.children.insert(txid);
for ancestor in &parent.ancestors {
ancestors.insert(*ancestor);
}
}
}
let mut total_fee: u64 = 0;
let mut total_sigop_adjusted_weight: u32 = 0;
let mut total_sigop_adjusted_vsize: u32 = 0;
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
if let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors")
{
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
} else { todo!() };
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {
tx.set_ancestors(
ancestors,
total_fee,
total_sigop_adjusted_weight,
total_sigop_adjusted_vsize,
total_sigops,
);
}
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
fn update_descendants(
root_txid: u32,
audit_pool: &mut AuditPool,
modified: &mut ModifiedQueue,
cluster_rate: f64,
) {
let mut visited: HashSet<u32, U32HasherState> = u32hashset_new();
let mut descendant_stack: Vec<u32> = Vec::new();
let root_fee: u64;
let root_sigop_adjusted_weight: u32;
let root_sigop_adjusted_vsize: u32;
let root_sigops: u32;
if let Some(Some(root_tx)) = audit_pool.get(root_txid as usize) {
for descendant_id in &root_tx.children {
if !visited.contains(descendant_id) {
descendant_stack.push(*descendant_id);
visited.insert(*descendant_id);
}
}
root_fee = root_tx.fee;
root_sigop_adjusted_weight = root_tx.sigop_adjusted_weight;
root_sigop_adjusted_vsize = root_tx.sigop_adjusted_vsize;
root_sigops = root_tx.sigops;
} else {
return;
}
while let Some(next_txid) = descendant_stack.pop() {
if let Some(Some(descendant)) = audit_pool.get_mut(next_txid as usize) {
// remove root tx as ancestor
let old_score = descendant.remove_root(
root_txid,
root_fee,
root_sigop_adjusted_weight,
root_sigop_adjusted_vsize,
root_sigops,
cluster_rate,
);
// add to priority queue or update priority if score has changed
if descendant.score() < old_score {
descendant.modified = true;
modified.push_decrease(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
} else if descendant.score() > old_score {
descendant.modified = true;
modified.push_increase(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
}
// add this node's children to the stack
for child_id in &descendant.children {
if !visited.contains(child_id) {
descendant_stack.push(*child_id);
visited.insert(*child_id);
}
}
}
}
}

View File

@@ -1,183 +0,0 @@
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::float_cmp)]
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use thread_transaction::ThreadTransaction;
use thread_acceleration::ThreadAcceleration;
use tracing::{debug, info, trace};
use tracing_log::LogTracer;
use tracing_subscriber::{EnvFilter, FmtSubscriber};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
mod audit_transaction;
mod gbt;
mod thread_transaction;
mod thread_acceleration;
mod u32_hasher_types;
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
/// This is the initial capacity of the `GbtGenerator` struct's inner `HashMap`.
///
/// Note: This doesn't *have* to be a power of 2. (uwu)
const STARTING_CAPACITY: usize = 1_048_576;
type ThreadTransactionsMap = HashMap<u32, ThreadTransaction, U32HasherState>;
#[napi]
pub struct GbtGenerator {
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
}
#[napi::module_init]
fn init() {
// Set all `tracing` logs to print to STDOUT
// Note: Passing RUST_LOG env variable to the node process
// will change the log level for the rust module.
tracing::subscriber::set_global_default(
FmtSubscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.with_ansi(
// Default to no-color logs.
// Setting RUST_LOG_COLOR to 1 or true|TRUE|True etc.
// will enable color
std::env::var("RUST_LOG_COLOR")
.map(|s| ["1", "true"].contains(&&*s.to_lowercase()))
.unwrap_or(false),
)
.finish(),
)
.expect("Logging subscriber failed");
// Convert all `log` logs into `tracing` events
LogTracer::init().expect("Legacy log subscriber failed");
}
#[napi]
impl GbtGenerator {
#[napi(constructor)]
#[allow(clippy::new_without_default)]
#[must_use]
pub fn new() -> Self {
debug!("Created new GbtGenerator");
Self {
thread_transactions: Arc::new(Mutex::new(u32hashmap_with_capacity(STARTING_CAPACITY))),
}
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn make(&self, mempool: Vec<ThreadTransaction>, accelerations: Vec<ThreadAcceleration>, max_uid: u32) -> Result<GbtResult> {
trace!("make: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
accelerations,
max_uid as usize,
move |map| {
for tx in mempool {
map.insert(tx.uid, tx);
}
},
)
.await
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn update(
&self,
new_txs: Vec<ThreadTransaction>,
remove_txs: Vec<u32>,
accelerations: Vec<ThreadAcceleration>,
max_uid: u32,
) -> Result<GbtResult> {
trace!("update: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
accelerations,
max_uid as usize,
move |map| {
for tx in new_txs {
map.insert(tx.uid, tx);
}
for txid in &remove_txs {
map.remove(txid);
}
},
)
.await
}
}
/// The result from calling the gbt function.
///
/// This tuple contains the following:
/// blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
/// block_weights: A Vector of total weights per block.
/// clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
/// rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
#[napi(constructor)]
pub struct GbtResult {
pub blocks: Vec<Vec<u32>>,
pub block_weights: Vec<u32>,
pub clusters: Vec<Vec<u32>>,
pub rates: Vec<Vec<f64>>, // Tuples not supported. u32 fits inside f64
}
/// All on another thread, this runs an arbitrary task in between
/// taking the lock and running gbt.
///
/// Rather than filling / updating the `HashMap` on the main thread,
/// this allows for `HashMap` modifying tasks to be run before running and returning gbt results.
///
/// `thread_transactions` is a cloned `Arc` of the `Mutex` for the `HashMap` state.
/// `callback` is a `'static + Send` `FnOnce` closure/function that takes a mutable reference
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
async fn run_task<F>(
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
accelerations: Vec<ThreadAcceleration>,
max_uid: usize,
callback: F,
) -> Result<GbtResult>
where
F: FnOnce(&mut ThreadTransactionsMap) + Send + 'static,
{
debug!("Spawning thread...");
let handle = napi::tokio::task::spawn_blocking(move || {
debug!(
"Getting lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
let mut map = thread_transactions
.lock()
.map_err(|_| napi::Error::from_reason("THREAD_TRANSACTIONS Mutex poisoned"))?;
callback(&mut map);
info!("Starting gbt algorithm for {} elements...", map.len());
let result = gbt::gbt(&mut map, &accelerations, max_uid);
info!("Finished gbt algorithm for {} elements...", map.len());
debug!(
"Releasing lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
drop(map);
Ok(result)
});
handle
.await
.map_err(|_| napi::Error::from_reason("thread panicked"))?
}

View File

@@ -1,8 +0,0 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadAcceleration {
pub uid: u32,
pub delta: f64, // fee delta
}

View File

@@ -1,13 +0,0 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadTransaction {
pub uid: u32,
pub order: u32,
pub fee: f64,
pub weight: u32,
pub sigops: u32,
pub effective_fee_per_vsize: f64,
pub inputs: Vec<u32>,
}

View File

@@ -1,132 +0,0 @@
use priority_queue::PriorityQueue;
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
hash::{BuildHasher, Hasher},
};
/// This is the only way to create a `HashMap` with the `U32HasherState` and capacity
pub fn u32hashmap_with_capacity<V>(capacity: usize) -> HashMap<u32, V, U32HasherState> {
HashMap::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `PriorityQueue` with the `U32HasherState` and capacity
pub fn u32priority_queue_with_capacity<V: Ord>(
capacity: usize,
) -> PriorityQueue<u32, V, U32HasherState> {
PriorityQueue::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `HashSet` with the `U32HasherState`
pub fn u32hashset_new() -> HashSet<u32, U32HasherState> {
HashSet::with_hasher(U32HasherState(()))
}
/// A private unit type is contained so no one can make an instance of it.
#[derive(Clone)]
pub struct U32HasherState(());
impl Debug for U32HasherState {
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
impl BuildHasher for U32HasherState {
type Hasher = U32Hasher;
fn build_hasher(&self) -> Self::Hasher {
U32Hasher(0)
}
}
/// This also can't be created outside this module due to private field.
pub struct U32Hasher(u32);
impl Hasher for U32Hasher {
fn finish(&self) -> u64 {
// Safety: Two u32s next to each other will make a u64
bytemuck::cast([self.0, 0])
}
fn write(&mut self, bytes: &[u8]) {
// Assert in debug builds (testing too) that only 4 byte keys (u32, i32, f32, etc.) run
debug_assert!(bytes.len() == 4);
// Safety: We know that the size of the key is 4 bytes
// We also know that the only way to get an instance of HashMap using this "hasher"
// is through the public functions in this module which set the key type to u32.
self.0 = *bytemuck::from_bytes(bytes);
}
}
#[cfg(test)]
mod tests {
use super::U32HasherState;
use priority_queue::PriorityQueue;
use std::collections::HashMap;
#[test]
fn test_hashmap() {
let mut hm: HashMap<u32, String, U32HasherState> = HashMap::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
hm.insert(0, String::from("0"));
hm.insert(42, String::from("42"));
hm.insert(256, String::from("256"));
hm.insert(u32::MAX, String::from("MAX"));
hm.insert(u32::MAX >> 2, String::from("MAX >> 2"));
assert_eq!(hm.get(&0), Some(&String::from("0")));
assert_eq!(hm.get(&42), Some(&String::from("42")));
assert_eq!(hm.get(&256), Some(&String::from("256")));
assert_eq!(hm.get(&u32::MAX), Some(&String::from("MAX")));
assert_eq!(hm.get(&(u32::MAX >> 2)), Some(&String::from("MAX >> 2")));
assert_eq!(hm.get(&(u32::MAX >> 4)), None);
assert_eq!(hm.get(&3), None);
assert_eq!(hm.get(&43), None);
}
#[test]
fn test_priority_queue() {
let mut pq: PriorityQueue<u32, i32, U32HasherState> =
PriorityQueue::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
assert_eq!(pq.push(1, 5), None);
assert_eq!(pq.push(2, -10), None);
assert_eq!(pq.push(3, 7), None);
assert_eq!(pq.push(4, 20), None);
assert_eq!(pq.push(u32::MAX, -42), None);
assert_eq!(pq.push_increase(1, 4), Some(4));
assert_eq!(pq.push_increase(2, -8), Some(-10));
assert_eq!(pq.push_increase(3, 5), Some(5));
assert_eq!(pq.push_increase(4, 21), Some(20));
assert_eq!(pq.push_increase(u32::MAX, -99), Some(-99));
assert_eq!(pq.push_increase(42, 1337), None);
assert_eq!(pq.push_decrease(1, 4), Some(5));
assert_eq!(pq.push_decrease(2, -10), Some(-8));
assert_eq!(pq.push_decrease(3, 5), Some(7));
assert_eq!(pq.push_decrease(4, 20), Some(21));
assert_eq!(pq.push_decrease(u32::MAX, 100), Some(100));
assert_eq!(pq.push_decrease(69, 420), None);
assert_eq!(pq.peek(), Some((&42, &1337)));
assert_eq!(pq.pop(), Some((42, 1337)));
assert_eq!(pq.peek(), Some((&69, &420)));
assert_eq!(pq.pop(), Some((69, 420)));
assert_eq!(pq.peek(), Some((&4, &20)));
assert_eq!(pq.pop(), Some((4, 20)));
assert_eq!(pq.peek(), Some((&3, &5)));
assert_eq!(pq.pop(), Some((3, 5)));
assert_eq!(pq.peek(), Some((&1, &4)));
assert_eq!(pq.pop(), Some((1, 4)));
assert_eq!(pq.peek(), Some((&2, &-10)));
assert_eq!(pq.pop(), Some((2, -10)));
assert_eq!(pq.peek(), Some((&u32::MAX, &-42)));
assert_eq!(pq.pop(), Some((u32::MAX, -42)));
assert_eq!(pq.peek(), None);
assert_eq!(pq.pop(), None);
}
}

View File

@@ -1,138 +0,0 @@
{
"MEMPOOL": {
"ENABLED": true,
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"BLOCKS_SUMMARIES_INDEXING": true,
"HTTP_PORT": 1,
"SPAWN_CLUSTER_PROCS": 2,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
"AUTOMATIC_BLOCK_REINDEXING": false,
"POLL_RATE_MS": 3,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CACHE_ENABLED": true,
"CLEAR_PROTECTION_MINUTES": 4,
"RECOMMENDED_FEE_PERCENTILE": 5,
"BLOCK_WEIGHT_UNITS": 6,
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 12,
"EXTERNAL_RETRY_INTERVAL": 13,
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__",
"AUDIT": true,
"ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": false,
"CPFP_INDEXING": true,
"MAX_BLOCKS_BULK_QUERY": 999,
"DISK_CACHE_BLOCK_INTERVAL": 999,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true,
"PRICE_UPDATES_PER_HOUR": 1
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": 1000
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
"PORT": 16,
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": 888,
"FALLBACK": []
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": 2000
},
"DATABASE": {
"ENABLED": false,
"HOST": "__DATABASE_HOST__",
"SOCKET": "__DATABASE_SOCKET__",
"PORT": 18,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": 3000
},
"SYSLOG": {
"ENABLED": false,
"HOST": "__SYSLOG_HOST__",
"PORT": 19,
"MIN_PRIORITY": "__SYSLOG_MIN_PRIORITY__",
"FACILITY": "__SYSLOG_FACILITY__"
},
"STATISTICS": {
"ENABLED": false,
"TX_PER_SECOND_SAMPLE_PERIOD": 20
},
"BISQ": {
"ENABLED": true,
"DATA_PATH": "__BISQ_DATA_PATH__"
},
"SOCKS5PROXY": {
"ENABLED": true,
"USE_ONION": true,
"HOST": "__SOCKS5PROXY_HOST__",
"PORT": "__SOCKS5PROXY_PORT__",
"USERNAME": "__SOCKS5PROXY_USERNAME__",
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
"LIQUID_API": "__EXTERNAL_DATA_SERVER_LIQUID_API__",
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"LIGHTNING": {
"ENABLED": true,
"BACKEND": "__LIGHTNING_BACKEND__",
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 1234
},
"LND": {
"TLS_CERT_PATH": "",
"MACAROON_PATH": "",
"REST_API_URL": "https://localhost:8080",
"TIMEOUT": 10000
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": []
},
"MEMPOOL_SERVICES": {
"API": "",
"ACCELERATIONS": false
},
"REDIS": {
"ENABLED": false,
"UNIX_SOCKET_PATH": "/tmp/redis.sock"
}
}

View File

@@ -1,24 +0,0 @@
import { Common } from '../../api/common';
import { MempoolTransactionExtended } from '../../mempool.interfaces';
const randomTransactions = require('./test-data/transactions-random.json');
const replacedTransactions = require('./test-data/transactions-replaced.json');
const rbfTransactions = require('./test-data/transactions-rbfs.json');
describe('Mempool Utils', () => {
test('should detect RBF transactions with fast method', () => {
const newTransactions = rbfTransactions.concat(randomTransactions);
const result: { [txid: string]: MempoolTransactionExtended[] } = Common.findRbfTransactions(newTransactions, replacedTransactions);
expect(Object.values(result).length).toEqual(2);
expect(result).toHaveProperty('7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6');
expect(result).toHaveProperty('5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875');
});
test.only('should detect RBF transactions with scalable method', () => {
const newTransactions = rbfTransactions.concat(randomTransactions);
const result: { [txid: string]: MempoolTransactionExtended[] } = Common.findRbfTransactions(newTransactions, replacedTransactions, true);
expect(Object.values(result).length).toEqual(2);
expect(result).toHaveProperty('7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6');
expect(result).toHaveProperty('5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875');
});
});

View File

@@ -1,135 +0,0 @@
import {
calcBitsDifference,
calcDifficultyAdjustment,
DifficultyAdjustment,
} from '../../api/difficulty-adjustment';
describe('Mempool Difficulty Adjustment', () => {
test('should calculate Difficulty Adjustments properly', () => {
const dt = (dtString) => {
return Math.floor(new Date(dtString).getTime() / 1000);
};
const vectors = [
[ // Vector 1
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
0, // Latest block timestamp in seconds (only used if difficulty already locked in)
],
{ // Expected Result
progressPercent: 9.027777777777777,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
previousTime: 1660820820,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: 0,
expectedBlocks: 161.68833333333333,
},
],
[ // Vector 2 (testnet)
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'testnet', // Network
dt('2022-08-19T13:52:46.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result is same other than timeOffset
progressPercent: 9.027777777777777,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousTime: 1660820820,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
expectedBlocks: 161.68833333333333,
},
],
[ // Vector 3 (mainnet lock-in (epoch ending 788255))
[ // Inputs
dt('2023-04-20T09:57:33.000Z'), // Last DA time (in seconds)
dt('2023-05-04T14:54:09.000Z'), // Current time (now) (in seconds)
788255, // Current block height
1.7220298879531821, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
dt('2023-05-04T14:54:26.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result
progressPercent: 99.95039682539682,
difficultyChange: -1.4512637555574193,
estimatedRetargetDate: 1683212658129,
remainingBlocks: 1,
remainingTime: 609129,
previousRetarget: 1.7220298879531821,
previousTime: 1681984653,
nextRetargetHeight: 788256,
timeAvg: 609129,
timeOffset: 0,
expectedBlocks: 2045.66,
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
for (const vector of vectors) {
const result = calcDifficultyAdjustment(...vector[0]);
// previousRetarget is passed through untouched
expect(result.previousRetarget).toStrictEqual(vector[0][3]);
expect(result).toStrictEqual(vector[1]);
}
});
test('should calculate Difficulty change from bits fields of two blocks', () => {
// Check same exponent + check min max for output
expect(calcBitsDifference(0x1d000200, 0x1d000100)).toEqual(100);
expect(calcBitsDifference(0x1d000400, 0x1d000100)).toEqual(300);
expect(calcBitsDifference(0x1d000800, 0x1d000100)).toEqual(300); // Actually 700
expect(calcBitsDifference(0x1d000100, 0x1d000200)).toEqual(-50);
expect(calcBitsDifference(0x1d000100, 0x1d000400)).toEqual(-75);
expect(calcBitsDifference(0x1d000100, 0x1d000800)).toEqual(-75); // Actually -87.5
// Check new higher exponent
expect(calcBitsDifference(0x1c000200, 0x1d000001)).toEqual(100);
expect(calcBitsDifference(0x1c000400, 0x1d000001)).toEqual(300);
expect(calcBitsDifference(0x1c000800, 0x1d000001)).toEqual(300);
expect(calcBitsDifference(0x1c000100, 0x1d000002)).toEqual(-50);
expect(calcBitsDifference(0x1c000100, 0x1d000004)).toEqual(-75);
expect(calcBitsDifference(0x1c000100, 0x1d000008)).toEqual(-75);
// Check new lower exponent
expect(calcBitsDifference(0x1d000002, 0x1c000100)).toEqual(100);
expect(calcBitsDifference(0x1d000004, 0x1c000100)).toEqual(300);
expect(calcBitsDifference(0x1d000008, 0x1c000100)).toEqual(300);
expect(calcBitsDifference(0x1d000001, 0x1c000200)).toEqual(-50);
expect(calcBitsDifference(0x1d000001, 0x1c000400)).toEqual(-75);
expect(calcBitsDifference(0x1d000001, 0x1c000800)).toEqual(-75);
// Check error when exponents are too far apart
expect(() => calcBitsDifference(0x1d000001, 0x1a000800)).toThrow(
/Impossible exponent difference/
);
// Check invalid inputs
expect(() => calcBitsDifference(0x7f000001, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0, 0x1a000800)).toThrow(/Invalid bits/);
expect(() => calcBitsDifference(100.2783, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0x00800000, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0x1c000000, 0x1a000800)).toThrow(
/Invalid bits/
);
});
});

View File

@@ -1,277 +0,0 @@
[
{
"txid": "13f007241d78e8b0b4e57d2ae3fd37bcfe3226534d7cadeba5a549860d960db0",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "cb8f206f4e88bec97107089f3e9e61d50cde53d4541992ae19759b71103cf75c",
"vout": 0,
"prevout": {
"scriptpubkey": "0014fd6d15ff832c12f1ff04a5ccd5039f7227b260bd",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 fd6d15ff832c12f1ff04a5ccd5039f7227b260bd",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1ql4k3tlur9sf0rlcy5hxd2qulwgnmyc9akehvth",
"value": 610677
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304302205c430b36ebd2bb327951d83440af1f58f127871b2baada4c4dde2bc0b6721f56021f3445099f1a40e35baeda32e8e3727b505ffba0d882b11f498c7762f4184e9901",
"0236b5edd4fbbcfb045960e42ec8a9968944084785932e32940e8cd2583b37da67"
],
"is_coinbase": false,
"sequence": 2147483648
}
],
"vout": [
{
"scriptpubkey": "76a9149d32ef812385f3811634e0c0117dd153a5de10a488ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 9d32ef812385f3811634e0c0117dd153a5de10a4 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1FLC7Bag7okAkKPCyZbgZZg3Hh1EuGZ5Rd",
"value": 344697
},
{
"scriptpubkey": "00147dee8a7a38abbfb00dbfba365c8d6712934cc491",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 7dee8a7a38abbfb00dbfba365c8d6712934cc491",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q0hhg573c4wlmqrdlhgm9ert8z2f5e3y3lf9hvx",
"value": 265396
}
],
"size": 224,
"weight": 572,
"fee": 584,
"status": {
"confirmed": false
},
"order": 2953680397,
"vsize": 143,
"adjustedVsize": 143,
"sigops": 5,
"feePerVsize": 4.083916083916084,
"adjustedFeePerVsize": 4.083916083916084,
"effectiveFeePerVsize": 4.083916083916084,
"firstSeen": 1691222538,
"uid": 526973,
"inputs": [
526728
],
"position": {
"block": 7,
"vsize": 21429708.5
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "8e89b20f8a7fadb0e4cdbe57a00eee224f5076bac5387fc276916724e7c4a16a",
"version": 2,
"locktime": 800571,
"vin": [
{
"txid": "35e16762459539f3a8e52c5dee6a9ccaa9e9268efed33aa2c6e1b7805e849f24",
"vout": 0,
"prevout": {
"scriptpubkey": "0014d4f16ef275b3e1c4a4ecbef55a164933e0f6460f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 d4f16ef275b3e1c4a4ecbef55a164933e0f6460f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q6nckaun4k0suff8vhm6459jfx0s0v3s0ff4ukl",
"value": 1528924
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022019008b26e885bb43da25a11ffac147a057722072eedb68411f114f6e7eb82ebc02201b618264bb97756b88fc3bbc365b73044ac18b33b1067e31cfd5bcd0f50ed2c701",
"039b71145070bd3e8af28e27fa577f2e12ab6bb4e212d3eeaef08b4bc39e8cbc13"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "67c27ed0f767526234bcd5f795a31fab8ec4d0251bf12c68f2746951f4110d90",
"vout": 3,
"prevout": {
"scriptpubkey": "0014a7c3d613b321375054b2ac9b6114367bc034ad6f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a7c3d613b321375054b2ac9b6114367bc034ad6f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5lpavyanyym4q49j4jdkz9pk00qrftt0yqzvk3",
"value": 436523
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204e67285fc656bc45ed082499b076d5dba2fa21d0d7e64a0ae52b19d69a11760002200f037d81ee540b74397844513b72b08ed92b06db76bd20b08f7a0a3b36ab13d501",
"02a3ebae85f0225b6fbb5ff060afce683a4683507a57544605a29ee7d287e591b4"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "21c38fb9a2521e438c614f53b19ddd7a5594bcc4b77480e762fd4b702fad3374",
"vout": 1,
"prevout": {
"scriptpubkey": "00149660e34ef88106536c816c037b5b28dd64a812e2",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 9660e34ef88106536c816c037b5b28dd64a812e2",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qjeswxnhcsyr9xmypdsphkkegm4j2syhztgzxv4",
"value": 758149
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022021b556f0aa99329076bcc435338aceaf534963efcab306931b1b2b0461e16e0c02203a78942a3745c4da656bddfd8cf16b85dc04d652904e88682127cdd9ca63339001",
"0298963be4a8f66aca9fcf1c6dc95547aeaa82347543190c91e094c2321142b9f0"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "aa998dbae65240a7386bf7d468459551d99c3de8e2f9057ff5f2d38e17daf788",
"vout": 0,
"prevout": {
"scriptpubkey": "00147bb7413a39943b21ded98ad5e6ad7a222d273e17",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 7bb7413a39943b21ded98ad5e6ad7a222d273e17",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q0wm5zw3ejsajrhke3t27dtt6ygkjw0sh9lltg6",
"value": 1067200
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402205e2269f7d4ee0513b34354c38e920aef2dabac6f4350afb2dd105ff3ee43ae7b02202870322f2cb85cb0b2b0e38152f018bfff271dc3ec5aed0515854d0b259aaf3d01",
"03b87320cf3263a644a0d3f89c1b4a7304d9dfda9eb8c891560716abcb73e88b99"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "230253d195d779d4688ba16993985cd27b2e7a687d8b889b3bc63f19ece36f20",
"vout": 0,
"prevout": {
"scriptpubkey": "001439647bd997819d12dfc72b0fb9ff9ffcb84946f8",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 39647bd997819d12dfc72b0fb9ff9ffcb84946f8",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q89j8hkvhsxw39h789v8mnlulljuyj3hc9zve97",
"value": 361950
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204f7ca868bb9b92a07fecdc6b9dd56e4e1d007ca1405952f98ed6bc416345b5f2022055320a97791417abf6628fcf6513ac5785b06c630f854d8595e96ea06c3841d301",
"03a3ffe8e3ef2eea129b227e9658164bae0a6d21c17da6de9973ba34d9e04b21a0"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "670771e265a0b62dbd3c1fec2b865177eaf0bafd0ae49dd40a1c9fcd9a847a81",
"vout": 0,
"prevout": {
"scriptpubkey": "0014d45d1b0022c7387e42c5452ced561bdb8fd4b521",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 d45d1b0022c7387e42c5452ced561bdb8fd4b521",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q63w3kqpzcuu8usk9g5kw64smmw8afdfpxmc2m0",
"value": 453275
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022071312921800441903b2099e723add8702dd0f92ec11526ff87acf6967ec64cbd02203deabe7ed56d5daaa9a95c5a607b1ab705ff1c46bc6984a6dca120e63a91768601",
"0257302ac8d9c4c8f9b1744f19bb432359326b9cc7bdddeeab9202749a6d92be58"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "0af82159eee2b69242f2ff032636e410b67ec1ace52e55fb0d20ed814cd64803",
"vout": 0,
"prevout": {
"scriptpubkey": "001459e4d6bfefc6b45f955a69c4aeca26348e9d54ed",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 59e4d6bfefc6b45f955a69c4aeca26348e9d54ed",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qt8jdd0l0c669l926d8z2aj3xxj8f648dtyn7tc",
"value": 439961
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022027540322e92c23c5513aa2587e7feb56a8ce82f879269d6b3cbd425634b44f8e022045572dee7262b02130bfe32d8aa8abbfaa64e101abfc819bba5380c78876692d01",
"03fe02262d87f4a5289d3dd66e3d9a74cd49fa1cad0249284a7451896a827249a5"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "68cf9c784870a4f888f044755f7ce318557f652461db8ef887d279672f186018",
"vout": 0,
"prevout": {
"scriptpubkey": "001454822b2d5d52597a78b630921cf439a41e32f2f9",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 54822b2d5d52597a78b630921cf439a41e32f2f9",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q2jpzkt2a2fvh579kxzfpeape5s0r9uhewhl5n4",
"value": 227639
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402203ad511d6a8730748b8828bc38897d360451adf620ebdc1d229c08c097c80bef202202f50c793d95b5200cf2258e03896a3be7720df0eb3b8c810c86db74341a7e83e01",
"0294992e9f4546e6e119741f908411ae531e9d1ff732d69b4dff8172aaf2a4b216"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "793f01dfdb19bf41f958fd917c16d9c4dd5d5e1a5c0434bfdb367212659d1b5b",
"vout": 0,
"prevout": {
"scriptpubkey": "0014f54edf8ae647b5300e2674523254e923d93d169f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 f54edf8ae647b5300e2674523254e923d93d169f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q748dlzhxg76nqr3xw3fry48fy0vn695lvhlkxv",
"value": 227070
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402206e807ab616f4f2887ba703ae744d856142d9aca8128698419bbb67fb4fad8177022060fc65c7cd66baa88ad1e1d317a6edd5f6cb52fe8bff6e5405ffa1acf9d945d901",
"02a0ad0167c6e9edf62677404d74d3b80ea276e47e758ffaa6ca17bd65ac79f7aa"
],
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "00148a5c45ccfc29d209940d94525e2edb7743a1ad8a",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 8a5c45ccfc29d209940d94525e2edb7743a1ad8a",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q3fwytn8u98fqn9qdj3f9utkmwap6rtv2ym33zm",
"value": 5500000
}
],
"size": 1375,
"weight": 2605,
"fee": 691,
"status": {
"confirmed": false
},
"order": 1788986599,
"vsize": 651,
"adjustedVsize": 651.25,
"sigops": 9,
"feePerVsize": 1.0610364683301343,
"adjustedFeePerVsize": 1.0610364683301343,
"effectiveFeePerVsize": 1.0610364683301343,
"firstSeen": 1691163298,
"uid": 120494,
"inputs": [],
"position": {
"block": 7,
"vsize": 93780091.5
},
"bestDescendant": null,
"cpfpChecked": true
}
]

View File

@@ -1,121 +0,0 @@
[
{
"txid": "7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6",
"version": 1,
"locktime": 0,
"vin": [
{
"txid": "d863deb706de5a611028f7547e16ea81d7819e44beb640fb30a9ba30c585140f",
"vout": 0,
"prevout": {
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799995000
},
"scriptsig": "483045022100aeeddfb9785c5a4b70e90d0445785c68b7a44e28853441134a70ddc4da39527602203dfe1ec1a377aaacb64ae65c7c944caf1398d2dc063f712251b4cf696d44d3cb01210314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"scriptsig_asm": "OP_PUSHBYTES_72 3045022100aeeddfb9785c5a4b70e90d0445785c68b7a44e28853441134a70ddc4da39527602203dfe1ec1a377aaacb64ae65c7c944caf1398d2dc063f712251b4cf696d44d3cb01 OP_PUSHBYTES_33 0314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "6a4c5058325b8669baa9259e082f064005bc92274b559337ac317798f5d76f2d0577ed5a96042fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_asm": "OP_RETURN OP_PUSHDATA1 58325b8669baa9259e082f064005bc92274b559337ac317798f5d76f2d0577ed5a96042fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_type": "op_return",
"value": 0
},
{
"scriptpubkey": "a9144890aae025c84cb72a9730b49ca12595d6f6088d87",
"scriptpubkey_asm": "OP_HASH160 OP_PUSHBYTES_20 4890aae025c84cb72a9730b49ca12595d6f6088d OP_EQUAL",
"scriptpubkey_type": "p2sh",
"scriptpubkey_address": "38Jht2bzmJL4EwoFvvyFzejhfEb4J7KxLb",
"value": 155000
},
{
"scriptpubkey": "76a91486e7dad6617303942a448b7f8afe9653e5624a5e88ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 86e7dad6617303942a448b7f8afe9653e5624a5e OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1DJKJGApgX4W8BSQ8FRPLqX78UaCskT4r2",
"value": 155000
},
{
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799675549
}
],
"size": 350,
"weight": 1400,
"fee": 9451,
"status": {
"confirmed": false
},
"order": 2798688215,
"vsize": 350,
"adjustedVsize": 350,
"sigops": 8,
"feePerVsize": 27.002857142857142,
"adjustedFeePerVsize": 27.002857142857142,
"effectiveFeePerVsize": 27.002857142857142,
"firstSeen": 1691218536,
"uid": 513598,
"inputs": [],
"position": {
"block": 0,
"vsize": 22166
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "b50225a04a1d6fbbfa7a2122bc0580396f614027b3957f476229633576f06130",
"vout": 0,
"prevout": {
"scriptpubkey": "0014a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5f8ez0u2nsc2fsczctrc7t7h4hds3lg82ewqhz",
"value": 612917
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3045022100a0c23953ace5d022b7a6d45d1ae1730bf20a4d594bb5d4fa7aa80e4881b44d320220008f9b144805bb91995fc0f452a56e09f4ad16fa149d71ae9b5d57c742e8e2cc01",
"03dc2c7b687019b40a68d713322675206cc266e34e5340ec982c13ff0222c3b2b6"
],
"is_coinbase": false,
"sequence": 2147483649
}
],
"vout": [
{
"scriptpubkey": "0014199a98f9589364ffe5ef5bbae45ce5dfcbb873bd",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 199a98f9589364ffe5ef5bbae45ce5dfcbb873bd",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qrxdf372cjdj0le00twawgh89ml9msuaau62gk4",
"value": 611909
}
],
"size": 192,
"weight": 438,
"fee": 1008,
"status": {
"confirmed": false
},
"bestDescendant": null,
"descendants": null,
"adjustedFeePerVsize": 10.2283,
"sigops": 1,
"adjustedVsize": 109.5
}
]

View File

@@ -1,139 +0,0 @@
[
{
"txid": "008592364e21c1e3d62ba9538ac78a81779897b52100af5707ab063df98964f2",
"version": 1,
"locktime": 0,
"vin": [
{
"txid": "d863deb706de5a611028f7547e16ea81d7819e44beb640fb30a9ba30c585140f",
"vout": 0,
"prevout": {
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799995000
},
"scriptsig": "483045022100c1fb331d155a7d299a0451d14fa1122b328e0e239afc9ba8dc2aff449ddc5a3a02201c1e19030d1efa432f5069cd369d7ad09a67f68501345e4db35f7b799605f55601210314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"scriptsig_asm": "OP_PUSHBYTES_72 3045022100c1fb331d155a7d299a0451d14fa1122b328e0e239afc9ba8dc2aff449ddc5a3a02201c1e19030d1efa432f5069cd369d7ad09a67f68501345e4db35f7b799605f55601 OP_PUSHBYTES_33 0314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "6a4c5058325b78064160b631b5a15d9078d99c0db066449fb4c59bbfa4d987ba906e2990088b2fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_asm": "OP_RETURN OP_PUSHDATA1 58325b78064160b631b5a15d9078d99c0db066449fb4c59bbfa4d987ba906e2990088b2fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_type": "op_return",
"value": 0
},
{
"scriptpubkey": "a9144890aae025c84cb72a9730b49ca12595d6f6088d87",
"scriptpubkey_asm": "OP_HASH160 OP_PUSHBYTES_20 4890aae025c84cb72a9730b49ca12595d6f6088d OP_EQUAL",
"scriptpubkey_type": "p2sh",
"scriptpubkey_address": "38Jht2bzmJL4EwoFvvyFzejhfEb4J7KxLb",
"value": 155000
},
{
"scriptpubkey": "76a91486e7dad6617303942a448b7f8afe9653e5624a5e88ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 86e7dad6617303942a448b7f8afe9653e5624a5e OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1DJKJGApgX4W8BSQ8FRPLqX78UaCskT4r2",
"value": 155000
},
{
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799676250
}
],
"size": 350,
"weight": 1400,
"fee": 8750,
"status": {
"confirmed": false
},
"order": 4066675193,
"vsize": 350,
"adjustedVsize": 350,
"sigops": 8,
"feePerVsize": 25,
"adjustedFeePerVsize": 25,
"effectiveFeePerVsize": 25,
"firstSeen": 1691218516,
"uid": 512584,
"inputs": [],
"position": {
"block": 0,
"vsize": 13846
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "b7981a624e4261c11f1246314d41e74be56af82eb557bcd054a5e0f94c023668",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "b50225a04a1d6fbbfa7a2122bc0580396f614027b3957f476229633576f06130",
"vout": 0,
"prevout": {
"scriptpubkey": "0014a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5f8ez0u2nsc2fsczctrc7t7h4hds3lg82ewqhz",
"value": 612917
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204dd10f14afa41bc76d8278140ff1ec3d3f87f2c207bbb5418cc76dab30d7f6a402207877cc9c6a2c724b6ea7a1c24ac00022469f194fd1a4bd8030bbca1787d3f5f301",
"03dc2c7b687019b40a68d713322675206cc266e34e5340ec982c13ff0222c3b2b6"
],
"is_coinbase": false,
"sequence": 2147483648
}
],
"vout": [
{
"scriptpubkey": "76a9149d32ef812385f3811634e0c0117dd153a5de10a488ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 9d32ef812385f3811634e0c0117dd153a5de10a4 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1FLC7Bag7okAkKPCyZbgZZg3Hh1EuGZ5Rd",
"value": 344697
},
{
"scriptpubkey": "00144c2671336ca8761863b4c68d64d4672491fec1b9",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 4c2671336ca8761863b4c68d64d4672491fec1b9",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qfsn8zvmv4pmpsca5c6xkf4r8yjglasdesrawcx",
"value": 267636
}
],
"size": 225,
"weight": 573,
"fee": 584,
"status": {
"confirmed": false
},
"order": 1748369996,
"vsize": 143,
"adjustedVsize": 143.25,
"sigops": 5,
"feePerVsize": 4.076788830715532,
"adjustedFeePerVsize": 4.076788830715532,
"effectiveFeePerVsize": 4.076788830715532,
"firstSeen": 1691222376,
"uid": 526515,
"inputs": [],
"position": {
"block": 7,
"vsize": 22021095.5
},
"bestDescendant": null,
"cpfpChecked": true
}
]

View File

@@ -1,278 +0,0 @@
import * as fs from 'fs';
describe('Mempool Backend Config', () => {
beforeEach(() => {
jest.resetAllMocks();
jest.resetModules();
});
test('should return defaults when no file is present', () => {
jest.isolateModules(() => {
jest.mock('../../mempool-config.json', () => ({}), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual({
ENABLED: true,
NETWORK: 'mainnet',
BACKEND: 'none',
BLOCKS_SUMMARIES_INDEXING: false,
HTTP_PORT: 8999,
SPAWN_CLUSTER_PROCS: 0,
API_URL_PREFIX: '/api/v1/',
AUTOMATIC_BLOCK_REINDEXING: false,
POLL_RATE_MS: 2000,
CACHE_DIR: './cache',
CACHE_ENABLED: true,
CLEAR_PROTECTION_MINUTES: 20,
RECOMMENDED_FEE_PERCENTILE: 50,
BLOCK_WEIGHT_UNITS: 4000000,
INITIAL_BLOCKS_AMOUNT: 8,
MEMPOOL_BLOCKS_AMOUNT: 8,
INDEXING_BLOCKS_AMOUNT: 11000,
USE_SECOND_NODE_FOR_MINFEE: false,
EXTERNAL_ASSETS: [],
EXTERNAL_MAX_RETRY: 1,
EXTERNAL_RETRY_INTERVAL: 0,
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
RUST_GBT: false,
CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0,
DISK_CACHE_BLOCK_INTERVAL: 6,
MAX_PUSH_TX_SIZE_WEIGHT: 400000,
ALLOW_UNREACHABLE: true,
PRICE_UPDATES_PER_HOUR: 1,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({
REST_API_URL: 'http://127.0.0.1:3000',
UNIX_SOCKET_PATH: null,
RETRY_UNIX_SOCKET_AFTER: 30000,
FALLBACK: [],
});
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 60000
});
expect(config.SECOND_CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 60000
});
expect(config.DATABASE).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
SOCKET: '',
PORT: 3306,
DATABASE: 'mempool',
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 180000,
});
expect(config.SYSLOG).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
PORT: 514,
MIN_PRIORITY: 'info',
FACILITY: 'local7'
});
expect(config.STATISTICS).toStrictEqual({ ENABLED: true, TX_PER_SECOND_SAMPLE_PERIOD: 150 });
expect(config.BISQ).toStrictEqual({ ENABLED: false, DATA_PATH: '/bisq/statsnode-data/btc_mainnet/db' });
expect(config.SOCKS5PROXY).toStrictEqual({
ENABLED: false,
USE_ONION: true,
HOST: '127.0.0.1',
PORT: 9050,
USERNAME: '',
PASSWORD: ''
});
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual({
MEMPOOL_API: 'https://mempool.space/api/v1',
MEMPOOL_ONION: 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
LIQUID_API: 'https://liquid.network/api/v1',
LIQUID_ONION: 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
BISQ_URL: 'https://bisq.markets/api',
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
});
expect(config.MAXMIND).toStrictEqual({
ENABLED: false,
GEOLITE2_CITY: '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
GEOLITE2_ASN: '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
GEOIP2_ISP: '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
});
expect(config.REPLICATION).toStrictEqual({
ENABLED: false,
AUDIT: false,
AUDIT_START_HEIGHT: 774000,
SERVERS: []
});
expect(config.MEMPOOL_SERVICES).toStrictEqual({
API: "",
ACCELERATIONS: false,
});
expect(config.REDIS).toStrictEqual({
ENABLED: false,
UNIX_SOCKET_PATH: ''
});
});
});
test('should override the default values with the passed values', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
jest.mock('../../mempool-config.json', () => (fixture), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual(fixture.MEMPOOL);
expect(config.ELECTRUM).toStrictEqual(fixture.ELECTRUM);
expect(config.ESPLORA).toStrictEqual(fixture.ESPLORA);
expect(config.CORE_RPC).toStrictEqual(fixture.CORE_RPC);
expect(config.SECOND_CORE_RPC).toStrictEqual(fixture.SECOND_CORE_RPC);
expect(config.DATABASE).toStrictEqual(fixture.DATABASE);
expect(config.SYSLOG).toStrictEqual(fixture.SYSLOG);
expect(config.STATISTICS).toStrictEqual(fixture.STATISTICS);
expect(config.BISQ).toStrictEqual(fixture.BISQ);
expect(config.SOCKS5PROXY).toStrictEqual(fixture.SOCKS5PROXY);
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
expect(config.MEMPOOL_SERVICES).toStrictEqual(fixture.MEMPOOL_SERVICES);
expect(config.REDIS).toStrictEqual(fixture.REDIS);
});
});
test('should ensure the docker start.sh script has default values', () => {
jest.isolateModules(() => {
const startSh = fs.readFileSync(`${__dirname}/../../../docker/backend/start.sh`, 'utf-8');
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
// We have a few cases where we can't follow the pattern
if (root === 'MEMPOOL' && key === 'HTTP_PORT') {
if (process.env.CI) {
console.log('skipping check for MEMPOOL_HTTP_PORT');
}
continue;
}
if (root) {
//The flattened string, i.e, __MEMPOOL_ENABLED__
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
//The string used as the environment variable, i.e, MEMPOOL_ENABLED
const envVarStr = `${root ? root : ''}_${key}`;
let defaultEntry;
//The string used as the default value, to be checked as a regex, i.e, __MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=(.*)}
if (Array.isArray(value)) {
defaultEntry = `${replaceStr}=\${${envVarStr}:=[]}`;
if (process.env.CI) {
console.log(`looking for ${defaultEntry} in the start.sh script`);
}
//Regex matching does not work with the array values
expect(startSh).toContain(defaultEntry);
} else {
defaultEntry = replaceStr + '=' + '\\${' + envVarStr + ':=(.*)' + '}';
if (process.env.CI) {
console.log(`looking for ${defaultEntry} in the start.sh script`);
}
const re = new RegExp(defaultEntry);
expect(startSh).toMatch(re);
}
//The string that actually replaces the values in the config file
const sedStr = 'sed -i "s!' + replaceStr + '!${' + replaceStr + '}!g" mempool-config.json';
if (process.env.CI) {
console.log(`looking for ${sedStr} in the start.sh script`);
}
expect(startSh).toContain(sedStr);
}
else {
parseJson(value, key);
}
}
}
parseJson(fixture);
});
});
test('should ensure that the mempool-config.json Docker template has all the keys', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
const dockerJson = fs.readFileSync(`${__dirname}/../../../docker/backend/mempool-config.json`, 'utf-8');
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
} else {
//Check for top level config keys
expect(dockerJson).toContain(`"${key}"`);
parseJson(value, key);
break;
}
}
case 'string': {
// strings should be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": "${replaceStr}"`);
break;
}
default: {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
}
}
};
}
parseJson(fixture);
});
});
});

View File

@@ -1,68 +0,0 @@
import fs from 'fs';
import { GbtGenerator, ThreadTransaction } from 'rust-gbt';
import path from 'path';
const baseline = require('./test-data/target-template.json');
const testVector = require('./test-data/test-data-ids.json');
const vectorUidMap: Map<number, string> = new Map(testVector.map(x => [x[0], x[1]]));
const vectorTxidMap: Map<string, number> = new Map(testVector.map(x => [x[1], x[0]]));
// Note that this test buffer is specially constructed
// such that uids are assigned in numerical txid order
// so that ties break the same way as in Core's implementation
const vectorBuffer: Buffer = fs.readFileSync(path.join(__dirname, './', './test-data/test-buffer.bin'));
describe('Rust GBT', () => {
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
const rustGbt = new GbtGenerator();
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
const result = await rustGbt.make(mempool, [], maxUid);
const blocks: [string, number][][] = result.blocks.map(block => {
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);
});
const template = baseline.map(tx => [tx.txid, vectorTxidMap.get(tx.txid)]);
expect(blocks[0].length).toEqual(baseline.length);
expect(blocks[0]).toEqual(template);
});
});
function mempoolFromArrayBuffer(buf: ArrayBuffer): { mempool: ThreadTransaction[], maxUid: number } {
let maxUid = 0;
const view = new DataView(buf);
const count = view.getUint32(0, false);
const txs: ThreadTransaction[] = [];
let offset = 4;
for (let i = 0; i < count; i++) {
const uid = view.getUint32(offset, false);
maxUid = Math.max(maxUid, uid);
const tx: ThreadTransaction = {
uid,
order: txidToOrdering(vectorUidMap.get(uid) as string),
fee: view.getFloat64(offset + 4, false),
weight: view.getUint32(offset + 12, false),
sigops: view.getUint32(offset + 16, false),
// feePerVsize: view.getFloat64(offset + 20, false),
effectiveFeePerVsize: view.getFloat64(offset + 28, false),
inputs: [],
};
const numInputs = view.getUint32(offset + 36, false);
offset += 40;
for (let j = 0; j < numInputs; j++) {
tx.inputs.push(view.getUint32(offset, false));
offset += 4;
}
txs.push(tx);
}
return { mempool: txs, maxUid };
}
function txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,163 +0,0 @@
import config from '../config';
import logger from '../logger';
import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
import rbfCache from './rbf-cache';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }, useAccelerations: boolean = false)
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 0, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen or lastBoosted within PROPAGATION_MARGIN
const rbf: string[] = []; // either missing or present, and either part of a full-rbf replacement, or a conflict with the mined block
const accelerated: string[] = []; // prioritized by the mempool accelerator
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
const inTemplate = {};
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
if (mempool[tx.txid] && mempool[tx.txid].acceleration) {
accelerated.push(tx.txid);
}
}
// coinbase is always expected
if (transactions[0]) {
inTemplate[transactions[0].txid] = true;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// allow missing transactions which either belong to a full rbf tree, or conflict with any transaction in the mined block
if (rbfCache.has(txid) && (rbfCache.isFullRbf(txid) || rbfCache.anyInSameTree(txid, (tx) => inBlock[tx.txid]))) {
rbf.push(txid);
} else if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
// tx is recent, may have reached the miner too late for inclusion
fresh.push(txid);
} else if (mempool[txid]?.lastBoosted != null && (now - (mempool[txid]?.lastBoosted || 0)) <= PROPAGATION_MARGIN) {
// tx was recently cpfp'd, miner may not have the latest effective rate
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid]?.weight || 0;
} else {
matchedWeight += mempool[txid]?.weight || 0;
}
projectedWeight += mempool[txid]?.weight || 0;
inTemplate[txid] = true;
}
if (transactions[0]) {
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
}
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
let displacedWeightRemaining = displacedWeight;
let index = 0;
let lastFeeRate = Infinity;
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const tx = mempool[txid];
if (tx) {
const fits = (tx.weight - displacedWeightRemaining) < 4000;
const feeMatches = tx.effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, tx.effectiveFeePerVsize);
}
if (tx.firstSeen == null || (now - (tx?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= tx.weight;
}
failures = 0;
} else {
failures++;
}
} else {
logger.warn('projected transaction missing from mempool cache');
}
index++;
}
// mark unexpected transactions in the mined block as 'added'
let overflowWeight = 0;
let totalWeight = 0;
for (const tx of transactions) {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (rbfCache.has(tx.txid)) {
rbf.push(tx.txid);
} else if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
}
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
}
// transactions missing from near the end of our template are probably not being censored
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
let maxOverflowRate = 0;
let rateThreshold = 0;
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
const tx = mempool[txid];
if (tx) {
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (tx.effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = tx.effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (tx.effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
} else {
logger.warn('projected transaction missing from mempool cache');
}
index--;
}
const numCensored = Object.keys(isCensored).length;
const numMatches = matches.length - 1; // adjust for coinbase tx
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
return {
censored: Object.keys(isCensored),
added,
fresh,
sigop: [],
fullrbf: rbf,
accelerated,
score,
similarity,
};
}
}
export default new Audit();

View File

@@ -1,39 +1,46 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
import * as fs from 'fs';
import * as os from 'os';
import logger from '../logger';
import { IBackendInfo } from '../mempool.interfaces';
import config from '../config';
class BackendInfo {
private backendInfo: IBackendInfo;
private gitCommitHash = '';
private hostname = '';
private version = '';
constructor() {
// This file is created by ./fetch-version.ts during building
const versionFile = path.join(__dirname, 'version.json')
var versionInfo;
if (fs.existsSync(versionFile)) {
versionInfo = JSON.parse(fs.readFileSync(versionFile).toString());
} else {
// Use dummy values if `versionFile` doesn't exist (e.g., during testing)
versionInfo = {
version: '?',
gitCommit: '?'
};
}
this.backendInfo = {
hostname: os.hostname(),
version: versionInfo.version,
gitCommit: versionInfo.gitCommit,
lightning: config.LIGHTNING.ENABLED
};
this.setLatestCommitHash();
this.setVersion();
this.hostname = os.hostname();
}
public getBackendInfo(): IBackendInfo {
return this.backendInfo;
return {
hostname: this.hostname,
gitCommit: this.gitCommitHash,
version: this.version,
};
}
public getShortCommitHash() {
return this.backendInfo.gitCommit.slice(0, 7);
return this.gitCommitHash.slice(0, 7);
}
private setLatestCommitHash(): void {
try {
this.gitCommitHash = fs.readFileSync('../.git/refs/heads/master').toString().trim();
} catch (e) {
logger.err('Could not load git commit info: ' + (e instanceof Error ? e.message : e));
}
}
private setVersion(): void {
try {
const packageJson = fs.readFileSync('package.json').toString();
this.version = JSON.parse(packageJson).version;
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
}
}
}

View File

@@ -1,381 +0,0 @@
import { Application, Request, Response } from 'express';
import config from '../../config';
import { RequiredSpec } from '../../mempool.interfaces';
import bisq from './bisq';
import { MarketsApiError } from './interfaces';
import marketsApi from './markets-api';
class BisqRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', this.getBisqStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', this.getBisqTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', this.getBisqBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', this.getBisqTip)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', this.getBisqBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', this.getBisqAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', this.getBisqTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', this.getBisqMarketCurrencies.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', this.getBisqMarketDepth.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', this.getBisqMarketHloc.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', this.getBisqMarketMarkets.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', this.getBisqMarketOffers.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', this.getBisqMarketTicker.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', this.getBisqMarketTrades.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', this.getBisqMarketVolumes.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', this.getBisqMarketVolumes7d.bind(this))
;
}
private getBisqStats(req: Request, res: Response) {
const result = bisq.getStats();
res.json(result);
}
private getBisqTip(req: Request, res: Response) {
const result = bisq.getLatestBlockHeight();
res.type('text/plain');
res.send(result.toString());
}
private getBisqTransaction(req: Request, res: Response) {
const result = bisq.getTransaction(req.params.txId);
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq transaction not found');
}
}
private getBisqTransactions(req: Request, res: Response) {
const types: string[] = [];
req.query.types = req.query.types || [];
if (!Array.isArray(req.query.types)) {
res.status(500).send('Types is not an array');
return;
}
for (const _type in req.query.types) {
if (typeof req.query.types[_type] === 'string') {
types.push(req.query.types[_type].toString());
}
}
const index = parseInt(req.params.index, 10) || 0;
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
const [transactions, count] = bisq.getTransactions(index, length, types);
res.header('X-Total-Count', count.toString());
res.json(transactions);
}
private getBisqBlock(req: Request, res: Response) {
const result = bisq.getBlock(req.params.hash);
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq block not found');
}
}
private getBisqBlocks(req: Request, res: Response) {
const index = parseInt(req.params.index, 10) || 0;
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
const [transactions, count] = bisq.getBlocks(index, length);
res.header('X-Total-Count', count.toString());
res.json(transactions);
}
private getBisqAddress(req: Request, res: Response) {
const result = bisq.getAddress(req.params.address.substr(1));
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq address not found');
}
}
private getBisqMarketCurrencies(req: Request, res: Response) {
const constraints: RequiredSpec = {
'type': {
required: false,
types: ['crypto', 'fiat', 'all']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getCurrencies(p.type);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketCurrencies error'));
}
}
private getBisqMarketDepth(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getDepth(p.market);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketDepth error'));
}
}
private getBisqMarketMarkets(req: Request, res: Response) {
const result = marketsApi.getMarkets();
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketMarkets error'));
}
}
private getBisqMarketTrades(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'trade_id_to': {
required: false,
types: ['@string']
},
'trade_id_from': {
required: false,
types: ['@string']
},
'direction': {
required: false,
types: ['buy', 'sell']
},
'limit': {
required: false,
types: ['@number']
},
'sort': {
required: false,
types: ['asc', 'desc']
}
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getTrades(p.market, p.timestamp_from,
p.timestamp_to, p.trade_id_from, p.trade_id_to, p.direction, p.limit, p.sort);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTrades error'));
}
}
private getBisqMarketOffers(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'direction': {
required: false,
types: ['buy', 'sell']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getOffers(p.market, p.direction);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketOffers error'));
}
}
private getBisqMarketVolumes(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: false,
types: ['@string']
},
'interval': {
required: false,
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'milliseconds': {
required: false,
types: ['@boolean']
},
'timestamp': {
required: false,
types: ['no', 'yes']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getVolumes(p.market, p.timestamp_from, p.timestamp_to, p.interval, p.milliseconds, p.timestamp);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes error'));
}
}
private getBisqMarketHloc(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'interval': {
required: false,
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'milliseconds': {
required: false,
types: ['@boolean']
},
'timestamp': {
required: false,
types: ['no', 'yes']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getHloc(p.market, p.interval, p.timestamp_from, p.timestamp_to, p.milliseconds, p.timestamp);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketHloc error'));
}
}
private getBisqMarketTicker(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: false,
types: ['@string']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getTicker(p.market);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTicker error'));
}
}
private getBisqMarketVolumes7d(req: Request, res: Response) {
const result = marketsApi.getVolumesByTime(604800);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes7d error'));
}
}
private parseRequestParameters(requestParams: object, params: RequiredSpec): { [name: string]: any; } {
const final = {};
for (const i in params) {
if (params.hasOwnProperty(i)) {
if (params[i].required && requestParams[i] === undefined) {
return { error: i + ' parameter missing'};
}
if (typeof requestParams[i] === 'string') {
const str = (requestParams[i] || '').toString().toLowerCase();
if (params[i].types.indexOf('@number') > -1) {
const number = parseInt((str).toString(), 10);
final[i] = number;
} else if (params[i].types.indexOf('@string') > -1) {
final[i] = str;
} else if (params[i].types.indexOf('@boolean') > -1) {
final[i] = str === 'true' || str === 'yes';
} else if (params[i].types.indexOf(str) > -1) {
final[i] = str;
} else {
return { error: i + ' parameter invalid'};
}
} else if (typeof requestParams[i] === 'number') {
final[i] = requestParams[i];
}
}
}
return final;
}
private getBisqMarketErrorResponse(message: string): MarketsApiError {
return {
'success': 0,
'error': message
};
}
}
export default new BisqRoutes;

View File

@@ -1,13 +1,10 @@
import config from '../../config';
import * as fs from 'fs';
import axios, { AxiosResponse } from 'axios';
import * as http from 'http';
import * as https from 'https';
import { SocksProxyAgent } from 'socks-proxy-agent';
import axios from 'axios';
import { BisqBlocks, BisqBlock, BisqTransaction, BisqStats, BisqTrade } from './interfaces';
import { Common } from '../common';
import { BlockExtended } from '../../mempool.interfaces';
import backendInfo from '../backend-info';
import { StaticPool } from 'node-worker-threads-pool';
import logger from '../../logger';
class Bisq {
@@ -30,17 +27,15 @@ class Bisq {
private priceUpdateCallbackFunction: ((price: number) => void) | undefined;
private topDirectoryWatcher: fs.FSWatcher | undefined;
private subdirectoryWatcher: fs.FSWatcher | undefined;
private jsonParsePool = new StaticPool({
size: 4,
task: (blob: string) => JSON.parse(blob),
});
constructor() {}
startBisqService(): void {
try {
this.checkForBisqDataFolder();
} catch (e) {
logger.info('Retrying to start bisq service in 3 minutes');
setTimeout(this.startBisqService.bind(this), 180000);
return;
}
this.checkForBisqDataFolder();
this.loadBisqDumpFile();
setInterval(this.updatePrice.bind(this), 1000 * 60 * 60);
this.updatePrice();
@@ -95,7 +90,7 @@ class Bisq {
private checkForBisqDataFolder() {
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
logger.warn(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist. Make sure Bisq is running and the config is correct before starting the server.`);
throw new Error(`Cannot load BISQ ${Bisq.BLOCKS_JSON_FILE_PATH} file`);
return process.exit(1);
}
}
@@ -142,59 +137,12 @@ class Bisq {
}, 2000);
});
}
private async updatePrice() {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpAgent?: http.Agent;
httpsAgent?: https.Agent;
}
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
const BISQ_URL = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.EXTERNAL_DATA_SERVER.BISQ_ONION : config.EXTERNAL_DATA_SERVER.BISQ_URL;
const isHTTP = (new URL(BISQ_URL).protocol.split(':')[0] === 'http') ? true : false;
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
let retry = 0;
while(retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
try {
if (config.SOCKS5PROXY.ENABLED) {
const socksOptions: any = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT
};
if (config.SOCKS5PROXY.USERNAME && config.SOCKS5PROXY.PASSWORD) {
socksOptions.username = config.SOCKS5PROXY.USERNAME;
socksOptions.password = config.SOCKS5PROXY.PASSWORD;
} else {
// Retry with different tor circuits https://stackoverflow.com/a/64960234
socksOptions.username = `circuit${retry}`;
}
// Handle proxy agent for onion addresses
if (isHTTP) {
axiosOptions.httpAgent = new SocksProxyAgent(socksOptions);
} else {
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
}
const data: AxiosResponse = await axios.get(`${BISQ_URL}/trades/?market=bsq_btc`, axiosOptions);
if (data.statusText === 'error' || !data.data) {
throw new Error(`Could not fetch data from Bisq market, Error: ${data.status}`);
}
private updatePrice() {
axios.get<BisqTrade[]>('https://bisq.markets/api/trades/?market=bsq_btc', { timeout: 10000 })
.then((response) => {
const prices: number[] = [];
data.data.forEach((trade) => {
response.data.forEach((trade) => {
prices.push(parseFloat(trade.price) * 100000000);
});
prices.sort((a, b) => a - b);
@@ -202,24 +150,19 @@ class Bisq {
if (this.priceUpdateCallbackFunction) {
this.priceUpdateCallbackFunction(this.price);
}
logger.debug('Successfully updated Bisq market price');
break;
} catch (e) {
logger.err('Error updating Bisq market price: ' + (e instanceof Error ? e.message : e));
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
retry++;
}
}
}).catch((err) => {
logger.err('Error updating Bisq market price: ' + err);
});
}
private async loadBisqDumpFile(): Promise<void> {
this.allBlocks = [];
try {
await this.loadData();
const data = await this.loadData();
await this.loadBisqBlocksDump(data);
this.buildIndex();
this.calculateStats();
} catch (e) {
logger.info('Cannot load bisq dump file because: ' + (e instanceof Error ? e.message : e));
logger.info('loadBisqDumpFile() error.' + (e instanceof Error ? e.message : e));
}
}
@@ -298,61 +241,36 @@ class Bisq {
};
}
private async loadData(): Promise<any> {
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
throw new Error(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist`);
}
const readline = require('readline');
const events = require('events');
const rl = readline.createInterface({
input: fs.createReadStream(Bisq.BLOCKS_JSON_FILE_PATH),
crlfDelay: Infinity
});
let blockBuffer = '';
let readingBlock = false;
let lineCount = 1;
private async loadBisqBlocksDump(cacheData: string): Promise<void> {
const start = new Date().getTime();
logger.debug('Processing Bisq data dump...');
rl.on('line', (line) => {
if (lineCount === 2) {
line = line.replace(' "chainHeight": ', '');
this.latestBlockHeight = parseInt(line, 10);
if (cacheData && cacheData.length !== 0) {
logger.debug('Processing Bisq data dump...');
const data: BisqBlocks = await this.jsonParsePool.exec(cacheData);
if (data.blocks && data.blocks.length !== this.allBlocks.length) {
this.allBlocks = data.blocks;
this.allBlocks.reverse();
this.blocks = this.allBlocks.filter((block) => block.txs.length > 0);
this.latestBlockHeight = data.chainHeight;
const time = new Date().getTime() - start;
logger.debug('Bisq dump processed in ' + time + ' ms (worker thread)');
} else {
throw new Error(`Bisq dump didn't contain any blocks`);
}
}
}
if (line === ' {') {
readingBlock = true;
} else if (line === ' },') {
blockBuffer += '}';
try {
const block: BisqBlock = JSON.parse(blockBuffer);
this.allBlocks.push(block);
readingBlock = false;
blockBuffer = '';
} catch (e) {
logger.debug(blockBuffer);
throw Error(`Unable to parse Bisq data dump at line ${lineCount}` + (e instanceof Error ? e.message : e));
private loadData(): Promise<string> {
return new Promise((resolve, reject) => {
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
return reject(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist`);
}
fs.readFile(Bisq.BLOCKS_JSON_FILE_PATH, 'utf8', (err, data) => {
if (err) {
reject(err);
}
}
if (readingBlock === true) {
blockBuffer += line;
}
++lineCount;
resolve(data);
});
});
await events.once(rl, 'close');
this.allBlocks.reverse();
this.blocks = this.allBlocks.filter((block) => block.txs.length > 0);
const time = new Date().getTime() - start;
logger.debug('Bisq dump processed in ' + time + ' ms');
}
}

View File

@@ -1,7 +1,7 @@
import { Currencies, OffersData, TradesData, Depth, Currency, Interval, HighLowOpenClose,
Markets, Offers, Offer, BisqTrade, MarketVolume, Tickers, Ticker, SummarizedIntervals, SummarizedInterval } from './interfaces';
const strtotime = require('./strtotime');
import * as datetime from 'locutus/php/datetime';
class BisqMarketsApi {
private cryptoCurrencyData: Currency[] = [];
@@ -312,7 +312,7 @@ class BisqMarketsApi {
getTickerFromMarket(market: string): Ticker | null {
let ticker: Ticker;
const timestamp_from = strtotime('-24 hour');
const timestamp_from = datetime.strtotime('-24 hour');
const timestamp_to = new Date().getTime() / 1000;
const trades = this.getTradesByCriteria(market, timestamp_to, timestamp_from,
undefined, undefined, undefined, 'asc', Number.MAX_SAFE_INTEGER);
@@ -638,13 +638,13 @@ class BisqMarketsApi {
case 'half_day':
return (ts - (ts % (3600 * 12)));
case 'day':
return strtotime('midnight today', ts);
return datetime.strtotime('midnight today', ts);
case 'week':
return strtotime('midnight sunday last week', ts);
return datetime.strtotime('midnight sunday last week', ts);
case 'month':
return strtotime('midnight first day of this month', ts);
return datetime.strtotime('midnight first day of this month', ts);
case 'year':
return strtotime('midnight first day of january', ts);
return datetime.strtotime('midnight first day of january', ts);
default:
throw new Error('Unsupported interval: ' + interval);
}

View File

@@ -26,13 +26,7 @@ class Bisq {
constructor() {}
startBisqService(): void {
try {
this.checkForBisqDataFolder();
} catch (e) {
logger.info('Retrying to start bisq service (markets) in 3 minutes');
setTimeout(this.startBisqService.bind(this), 180000);
return;
}
this.checkForBisqDataFolder();
this.loadBisqDumpFile();
this.startBisqDirectoryWatcher();
}
@@ -40,7 +34,7 @@ class Bisq {
private checkForBisqDataFolder() {
if (!fs.existsSync(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency)) {
logger.err(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency + ` doesn't exist. Make sure Bisq is running and the config is correct before starting the server.`);
throw new Error(`Cannot load BISQ ${Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency} file`);
return process.exit(1);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,29 +2,17 @@ import { IEsploraApi } from './esplora-api.interface';
export interface AbstractBitcoinApi {
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
$getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]>;
$getAllMempoolTransactions(lastTxid: string);
$getTransactionHex(txId: string): Promise<string>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean): Promise<IEsploraApi.Transaction>;
$getBlockHeightTip(): Promise<number>;
$getBlockHashTip(): Promise<string>;
$getTxIdsForBlock(hash: string): Promise<string[]>;
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]>;
$getBlockHash(height: number): Promise<string>;
$getBlockHeader(hash: string): Promise<string>;
$getBlock(hash: string): Promise<IEsploraApi.Block>;
$getRawBlock(hash: string): Promise<Buffer>;
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash>;
$getScriptHashTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$sendRawTransaction(rawTransaction: string): Promise<string>;
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
$getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
startHealthChecks(): void;
}
export interface BitcoinRpcCredentials {
host: string;

View File

@@ -17,6 +17,4 @@ function bitcoinApiFactory(): AbstractBitcoinApi {
}
}
export const bitcoinCoreApi = new BitcoinApi(bitcoinClient);
export default bitcoinApiFactory();

View File

@@ -4,7 +4,6 @@ export namespace IBitcoinApi {
size: number; // (numeric) Current tx count
bytes: number; // (numeric) Sum of all virtual transaction sizes as defined in BIP 141.
usage: number; // (numeric) Total memory usage for the mempool
total_fee: number; // (numeric) Total fees of transactions in the mempool
maxmempool: number; // (numeric) Maximum memory usage for the mempool
mempoolminfee: number; // (numeric) Minimum fee rate in BTC/kB for tx to be accepted.
minrelaytxfee: number; // (numeric) Current minimum relay fee for transactions
@@ -73,14 +72,6 @@ export namespace IBitcoinApi {
time: number; // (numeric) Same as blocktime
}
export interface VerboseBlock extends Block {
tx: VerboseTransaction[]; // The transactions in the format of the getrawtransaction RPC. Different from verbosity = 1 "tx" result
}
export interface VerboseTransaction extends Transaction {
fee?: number; // (numeric) The transaction fee in BTC, omitted if block undo data is not available
}
export interface Vin {
txid?: string; // (string) The transaction id
vout?: number; // (string)
@@ -172,35 +163,4 @@ export namespace IBitcoinApi {
}
}
export interface BlockStats {
"avgfee": number;
"avgfeerate": number;
"avgtxsize": number;
"blockhash": string;
"feerate_percentiles": [number, number, number, number, number];
"height": number;
"ins": number;
"maxfee": number;
"maxfeerate": number;
"maxtxsize": number;
"medianfee": number;
"mediantime": number;
"mediantxsize": number;
"minfee": number;
"minfeerate": number;
"mintxsize": number;
"outs": number;
"subsidy": number;
"swtotal_size": number;
"swtotal_weight": number;
"swtxs": number;
"time": number;
"total_out": number;
"total_size": number;
"total_weight": number;
"totalfee": number;
"txs": number;
"utxo_increase": number;
"utxo_size_inc": number;
}
}

View File

@@ -5,7 +5,6 @@ import { IEsploraApi } from './esplora-api.interface';
import blocks from '../blocks';
import mempool from '../mempool';
import { TransactionExtended } from '../../mempool.interfaces';
import transactionUtils from '../transaction-utils';
class BitcoinApi implements AbstractBitcoinApi {
private rawMempoolCache: IBitcoinApi.RawMempool | null = null;
@@ -15,27 +14,7 @@ class BitcoinApi implements AbstractBitcoinApi {
this.bitcoindClient = bitcoinClient;
}
static convertBlock(block: IBitcoinApi.Block): IEsploraApi.Block {
return {
id: block.hash,
height: block.height,
version: block.version,
timestamp: block.time,
bits: parseInt(block.bits, 16),
nonce: block.nonce,
difficulty: block.difficulty,
merkle_root: block.merkleroot,
tx_count: block.nTx,
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
mediantime: block.mediantime,
stale: block.confirmations === -1,
};
}
$getRawTransaction(txId: string, skipConversion = false, addPrevout = false, lazyPrevouts = false): Promise<IEsploraApi.Transaction> {
$getRawTransaction(txId: string, skipConversion = false, addPrevout = false): Promise<IEsploraApi.Transaction> {
// If the transaction is in the mempool we already converted and fetched the fee. Only prevouts are missing
const txInMempool = mempool.getMempool()[txId];
if (txInMempool && addPrevout) {
@@ -46,11 +25,11 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((transaction: IBitcoinApi.Transaction) => {
if (skipConversion) {
transaction.vout.forEach((vout) => {
vout.value = Math.round(vout.value * 100000000);
vout.value = vout.value * 100000000;
});
return transaction;
}
return this.$convertTransaction(transaction, addPrevout, lazyPrevouts);
return this.$convertTransaction(transaction, addPrevout);
})
.catch((e: Error) => {
if (e.message.startsWith('The genesis block coinbase')) {
@@ -60,33 +39,9 @@ class BitcoinApi implements AbstractBitcoinApi {
});
}
$getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getMempoolTransactions not supported by the Bitcoin RPC API.');
}
$getAllMempoolTransactions(lastTxid: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getAllMempoolTransactions not supported by the Bitcoin RPC API.');
}
async $getTransactionHex(txId: string): Promise<string> {
const txInMempool = mempool.getMempool()[txId];
if (txInMempool && txInMempool.hex) {
return txInMempool.hex;
}
return this.bitcoindClient.getRawTransaction(txId, true)
.then((transaction: IBitcoinApi.Transaction) => {
return transaction.hex;
});
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getBlockCount();
}
$getBlockHashTip(): Promise<string> {
return this.bitcoindClient.getBestBlockHash();
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => result[0].height);
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
@@ -94,13 +49,8 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
}
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getTxsForBlock not supported by the Bitcoin RPC API.');
}
$getRawBlock(hash: string): Promise<Buffer> {
return this.bitcoindClient.getBlock(hash, 0)
.then((raw: string) => Buffer.from(raw, "hex"));
$getRawBlock(hash: string): Promise<string> {
return this.bitcoindClient.getBlock(hash, 0);
}
$getBlockHash(height: number): Promise<string> {
@@ -118,7 +68,7 @@ class BitcoinApi implements AbstractBitcoinApi {
}
return this.bitcoindClient.getBlock(hash)
.then((block: IBitcoinApi.Block) => BitcoinApi.convertBlock(block));
.then((block: IBitcoinApi.Block) => this.convertBlock(block));
}
$getAddress(address: string): Promise<IEsploraApi.Address> {
@@ -129,81 +79,48 @@ class BitcoinApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not supported by the Bitcoin RPC API.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not supported by the Bitcoin RPC API.');
}
$getScriptHashTransactions(scripthash: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not supported by the Bitcoin RPC API.');
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.bitcoindClient.getRawMemPool();
}
$getAddressPrefix(prefix: string): string[] {
const found: { [address: string]: string } = {};
const found: string[] = [];
const mp = mempool.getMempool();
for (const tx in mp) {
for (const vout of mp[tx].vout) {
if (vout.scriptpubkey_address.indexOf(prefix) === 0) {
found[vout.scriptpubkey_address] = '';
if (Object.keys(found).length >= 10) {
return Object.keys(found);
found.push(vout.scriptpubkey_address);
if (found.length >= 10) {
return found;
}
}
}
}
return Object.keys(found);
return found;
}
$sendRawTransaction(rawTransaction: string): Promise<string> {
return this.bitcoindClient.sendRawTransaction(rawTransaction);
}
async $getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
const txOut = await this.bitcoindClient.getTxOut(txId, vout, false);
return {
spent: txOut === null,
status: {
confirmed: true,
}
};
}
async $getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
const outSpends: IEsploraApi.Outspend[] = [];
const tx = await this.$getRawTransaction(txId, true, false);
for (let i = 0; i < tx.vout.length; i++) {
if (tx.status && tx.status.block_height === 0) {
outSpends.push({
spent: false
});
} else {
const txOut = await this.bitcoindClient.getTxOut(txId, i);
outSpends.push({
spent: txOut === null,
});
}
const txOut = await this.bitcoindClient.getTxOut(txId, i);
outSpends.push({
spent: txOut === null,
});
}
return outSpends;
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
const outspends: IEsploraApi.Outspend[][] = [];
for (const tx of txId) {
const outspend = await this.$getOutspends(tx);
outspends.push(outspend);
}
return outspends;
}
$getEstimatedHashrate(blockHeight: number): Promise<number> {
// 120 is the default block span in Core
return this.bitcoindClient.getNetworkHashPs(120, blockHeight);
}
protected async $convertTransaction(transaction: IBitcoinApi.Transaction, addPrevout: boolean, lazyPrevouts = false): Promise<IEsploraApi.Transaction> {
protected async $convertTransaction(transaction: IBitcoinApi.Transaction, addPrevout: boolean): Promise<IEsploraApi.Transaction> {
let esploraTransaction: IEsploraApi.Transaction = {
txid: transaction.txid,
version: transaction.version,
@@ -218,11 +135,11 @@ class BitcoinApi implements AbstractBitcoinApi {
esploraTransaction.vout = transaction.vout.map((vout) => {
return {
value: Math.round(vout.value * 100000000),
value: vout.value * 100000000,
scriptpubkey: vout.scriptPubKey.hex,
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
scriptpubkey_asm: vout.scriptPubKey.asm ? transactionUtils.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.asm) : '',
scriptpubkey_type: this.translateScriptPubKeyType(vout.scriptPubKey.type),
};
});
@@ -232,13 +149,11 @@ class BitcoinApi implements AbstractBitcoinApi {
is_coinbase: !!vin.coinbase,
prevout: null,
scriptsig: vin.scriptSig && vin.scriptSig.hex || vin.coinbase || '',
scriptsig_asm: vin.scriptSig && transactionUtils.convertScriptSigAsm(vin.scriptSig.hex) || '',
scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.asm) || '',
sequence: vin.sequence,
txid: vin.txid || '',
vout: vin.vout || 0,
witness: vin.txinwitness || [],
inner_redeemscript_asm: '',
inner_witnessscript_asm: '',
witness: vin.txinwitness,
};
});
@@ -251,15 +166,35 @@ class BitcoinApi implements AbstractBitcoinApi {
};
}
if (addPrevout) {
esploraTransaction = await this.$calculateFeeFromInputs(esploraTransaction, false, lazyPrevouts);
} else if (!transaction.confirmations) {
if (transaction.confirmations) {
esploraTransaction = await this.$calculateFeeFromInputs(esploraTransaction, addPrevout);
} else {
esploraTransaction = await this.$appendMempoolFeeData(esploraTransaction);
if (addPrevout) {
esploraTransaction = await this.$calculateFeeFromInputs(esploraTransaction, addPrevout);
}
}
return esploraTransaction;
}
private convertBlock(block: IBitcoinApi.Block): IEsploraApi.Block {
return {
id: block.hash,
height: block.height,
version: block.version,
timestamp: block.time,
bits: parseInt(block.bits, 16),
nonce: block.nonce,
difficulty: block.difficulty,
merkle_root: block.merkleroot,
tx_count: block.nTx,
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
};
}
private translateScriptPubKeyType(outputType: string): string {
const map = {
'pubkey': 'p2pk',
@@ -269,14 +204,13 @@ class BitcoinApi implements AbstractBitcoinApi {
'witness_v0_scripthash': 'v0_p2wsh',
'witness_v1_taproot': 'v1_p2tr',
'nonstandard': 'nonstandard',
'multisig': 'multisig',
'nulldata': 'op_return'
};
if (map[outputType]) {
return map[outputType];
} else {
return 'unknown';
return '';
}
}
@@ -293,7 +227,7 @@ class BitcoinApi implements AbstractBitcoinApi {
} else {
mempoolEntry = await this.$getMempoolEntry(transaction.txid);
}
transaction.fee = Math.round(mempoolEntry.fees.base * 100000000);
transaction.fee = mempoolEntry.fees.base * 100000000;
return transaction;
}
@@ -302,9 +236,9 @@ class BitcoinApi implements AbstractBitcoinApi {
if (vin.prevout) {
continue;
}
const innerTx = await this.$getRawTransaction(vin.txid, false, false);
const innerTx = await this.$getRawTransaction(vin.txid, false);
vin.prevout = innerTx.vout[vin.vout];
transactionUtils.addInnerScriptsToVin(vin);
this.addInnerScriptsToVin(vin);
}
return transaction;
}
@@ -328,34 +262,70 @@ class BitcoinApi implements AbstractBitcoinApi {
return this.bitcoindClient.getRawMemPool(true);
}
private async $calculateFeeFromInputs(transaction: IEsploraApi.Transaction, addPrevout: boolean, lazyPrevouts: boolean): Promise<IEsploraApi.Transaction> {
private async $calculateFeeFromInputs(transaction: IEsploraApi.Transaction, addPrevout: boolean): Promise<IEsploraApi.Transaction> {
if (transaction.vin[0].is_coinbase) {
transaction.fee = 0;
return transaction;
}
let totalIn = 0;
for (let i = 0; i < transaction.vin.length; i++) {
if (lazyPrevouts && i > 12) {
transaction.vin[i].lazy = true;
continue;
for (const vin of transaction.vin) {
const innerTx = await this.$getRawTransaction(vin.txid, !addPrevout);
if (addPrevout) {
vin.prevout = innerTx.vout[vin.vout];
this.addInnerScriptsToVin(vin);
}
const innerTx = await this.$getRawTransaction(transaction.vin[i].txid, false, false);
transaction.vin[i].prevout = innerTx.vout[transaction.vin[i].vout];
transactionUtils.addInnerScriptsToVin(transaction.vin[i]);
totalIn += innerTx.vout[transaction.vin[i].vout].value;
}
if (lazyPrevouts && transaction.vin.length > 12) {
transaction.fee = -1;
} else {
const totalOut = transaction.vout.reduce((p, output) => p + output.value, 0);
transaction.fee = parseFloat((totalIn - totalOut).toFixed(8));
totalIn += innerTx.vout[vin.vout].value;
}
const totalOut = transaction.vout.reduce((p, output) => p + output.value, 0);
transaction.fee = parseFloat((totalIn - totalOut).toFixed(8));
return transaction;
}
public startHealthChecks(): void {};
private convertScriptSigAsm(str: string): string {
const a = str.split(' ');
const b: string[] = [];
a.forEach((chunk) => {
if (chunk.substr(0, 3) === 'OP_') {
chunk = chunk.replace(/^OP_(\d+)/, 'OP_PUSHNUM_$1');
chunk = chunk.replace('OP_CHECKSEQUENCEVERIFY', 'OP_CSV');
b.push(chunk);
} else {
chunk = chunk.replace('[ALL]', '01');
if (chunk === '0') {
b.push('OP_0');
} else {
b.push('OP_PUSHBYTES_' + Math.round(chunk.length / 2) + ' ' + chunk);
}
}
});
return b.join(' ');
}
private addInnerScriptsToVin(vin: IEsploraApi.Vin): void {
if (!vin.prevout) {
return;
}
if (vin.prevout.scriptpubkey_type === 'p2sh') {
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
vin.inner_redeemscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(redeemScript, 'hex')));
if (vin.witness && vin.witness.length > 2) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
}
}
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
const witnessScript = vin.witness[vin.witness.length - 2];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
}
}
}
export default BitcoinApi;

View File

@@ -1,5 +1,5 @@
import config from '../../config';
const bitcoin = require('../../rpc-api/index');
import * as bitcoin from '@mempool/bitcoin';
import { BitcoinRpcCredentials } from './bitcoin-api-abstract-factory';
const nodeRpcCredentials: BitcoinRpcCredentials = {
@@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
port: config.CORE_RPC.PORT,
user: config.CORE_RPC.USERNAME,
pass: config.CORE_RPC.PASSWORD,
timeout: config.CORE_RPC.TIMEOUT,
timeout: 60000,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@@ -1,5 +1,5 @@
import config from '../../config';
const bitcoin = require('../../rpc-api/index');
import * as bitcoin from '@mempool/bitcoin';
import { BitcoinRpcCredentials } from './bitcoin-api-abstract-factory';
const nodeRpcCredentials: BitcoinRpcCredentials = {
@@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
port: config.SECOND_CORE_RPC.PORT,
user: config.SECOND_CORE_RPC.USERNAME,
pass: config.SECOND_CORE_RPC.PASSWORD,
timeout: config.SECOND_CORE_RPC.TIMEOUT,
timeout: 60000,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@@ -1,791 +0,0 @@
import { Application, Request, Response } from 'express';
import axios from 'axios';
import * as bitcoinjs from 'bitcoinjs-lib';
import config from '../../config';
import websocketHandler from '../websocket-handler';
import mempool from '../mempool';
import feeApi from '../fee-api';
import mempoolBlocks from '../mempool-blocks';
import bitcoinApi from './bitcoin-api-factory';
import { Common } from '../common';
import backendInfo from '../backend-info';
import transactionUtils from '../transaction-utils';
import { IEsploraApi } from './esplora-api.interface';
import loadingIndicators from '../loading-indicators';
import { TransactionExtended } from '../../mempool.interfaces';
import logger from '../../logger';
import blocks from '../blocks';
import bitcoinClient from './bitcoin-client';
import difficultyAdjustment from '../difficulty-adjustment';
import transactionRepository from '../../repositories/TransactionRepository';
import rbfCache from '../rbf-cache';
class BitcoinRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/rbf', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/cached', this.getCachedTx)
.get(config.MEMPOOL.API_URL_PREFIX + 'replacements', this.getRbfReplacements)
.get(config.MEMPOOL.API_URL_PREFIX + 'fullrbf/replacements', this.getFullRbfReplacements)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
;
if (config.MEMPOOL.BACKEND !== 'esplora') {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', this.getMempool)
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', this.getMempoolTxIds)
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', this.getRecentMempoolTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', this.getTransaction)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', this.$postTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', this.getRawTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', this.getBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', this.getBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash', this.getScriptHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash/txs', this.getScriptHashTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
;
}
}
private getInitData(req: Request, res: Response) {
try {
const result = websocketHandler.getSerializedInitData();
res.set('Content-Type', 'application/json');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getRecommendedFees(req: Request, res: Response) {
if (!mempool.isInSync()) {
res.statusCode = 503;
res.send('Service Unavailable');
return;
}
const result = feeApi.getRecommendedFee();
res.json(result);
}
private getMempoolBlocks(req: Request, res: Response) {
try {
const result = mempoolBlocks.getMempoolBlocks();
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getTransactionTimes(req: Request, res: Response) {
if (!Array.isArray(req.query.txId)) {
res.status(500).send('Not an array');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
const times = mempool.getFirstSeenForTransactions(txIds);
res.json(times);
}
private async $getBatchedOutspends(req: Request, res: Response) {
if (!Array.isArray(req.query.txId)) {
res.status(500).send('Not an array');
return;
}
if (req.query.txId.length > 50) {
res.status(400).send('Too many txids requested');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
try {
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txIds);
res.json(batchedOutspends);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getCpfpInfo(req: Request, res: Response) {
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
res.status(501).send(`Invalid transaction ID.`);
return;
}
const tx = mempool.getMempool()[req.params.txId];
if (tx) {
if (tx?.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
sigops: tx.sigops,
adjustedVsize: tx.adjustedVsize,
acceleration: tx.acceleration
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
return;
} else {
let cpfpInfo;
if (config.DATABASE.ENABLED) {
try {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
} catch (e) {
res.status(500).send('failed to get CPFP info');
return;
}
}
if (cpfpInfo) {
res.json(cpfpInfo);
return;
} else {
res.json({
ancestors: []
});
return;
}
}
}
private getBackendInfo(req: Request, res: Response) {
res.json(backendInfo.getBackendInfo());
}
private async getTransaction(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
res.json(transaction);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
private async getRawTransaction(req: Request, res: Response) {
try {
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(req.params.txId, true);
res.setHeader('content-type', 'text/plain');
res.send(transaction.hex);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
/**
* Takes the PSBT as text/plain body, parses it, and adds the full
* parent transaction to each input that doesn't already have it.
* This is used for BTCPayServer / Trezor users which need access to
* the full parent transaction even with segwit inputs.
* It will respond with a text/plain PSBT in the same format (hex|base64).
*/
private async postPsbtCompletion(req: Request, res: Response): Promise<void> {
res.setHeader('content-type', 'text/plain');
const notFoundError = `Couldn't get transaction hex for parent of input`;
try {
let psbt: bitcoinjs.Psbt;
let format: 'hex' | 'base64';
let isModified = false;
try {
psbt = bitcoinjs.Psbt.fromBase64(req.body);
format = 'base64';
} catch (e1) {
try {
psbt = bitcoinjs.Psbt.fromHex(req.body);
format = 'hex';
} catch (e2) {
throw new Error(`Unable to parse PSBT`);
}
}
for (const [index, input] of psbt.data.inputs.entries()) {
if (!input.nonWitnessUtxo) {
// Buffer.from ensures it won't be modified in place by reverse()
const txid = Buffer.from(psbt.txInputs[index].hash)
.reverse()
.toString('hex');
let transactionHex: string;
// If missing transaction, return 404 status error
try {
transactionHex = await bitcoinApi.$getTransactionHex(txid);
if (!transactionHex) {
throw new Error('');
}
} catch (err) {
throw new Error(`${notFoundError} #${index} @ ${txid}`);
}
psbt.updateInput(index, {
nonWitnessUtxo: Buffer.from(transactionHex, 'hex'),
});
if (!isModified) {
isModified = true;
}
}
}
if (isModified) {
res.send(format === 'hex' ? psbt.toHex() : psbt.toBase64());
} else {
// Not modified
// 422 Unprocessable Entity
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
res.status(422).send(`Psbt had no missing nonWitnessUtxos.`);
}
} catch (e: any) {
if (e instanceof Error && new RegExp(notFoundError).test(e.message)) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async getTransactionStatus(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
res.json(transaction.status);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
private async getStrippedBlockTransactions(req: Request, res: Response) {
try {
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlock(req: Request, res: Response) {
try {
const block = await blocks.$getBlock(req.params.hash);
const blockAge = new Date().getTime() / 1000 - block.timestamp;
const day = 24 * 3600;
let cacheDuration;
if (blockAge > 365 * day) {
cacheDuration = 30 * day;
} else if (blockAge > 30 * day) {
cacheDuration = 10 * day;
} else {
cacheDuration = 600
}
res.setHeader('Expires', new Date(Date.now() + 1000 * cacheDuration).toUTCString());
res.json(block);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockHeader(req: Request, res: Response) {
try {
const blockHeader = await bitcoinApi.$getBlockHeader(req.params.hash);
res.setHeader('content-type', 'text/plain');
res.send(blockHeader);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockAuditSummary(req: Request, res: Response) {
try {
const auditSummary = await blocks.$getBlockAuditSummary(req.params.hash);
if (auditSummary) {
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(auditSummary);
} else {
return res.status(404).send(`audit not available`);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlocks(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocks(height, 15));
} else { // Liquid, Bisq
return await this.getLegacyBlocks(req, res);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlocksByBulk(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
return res.status(404).send(`This API is only available for Bitcoin networks`);
}
if (config.MEMPOOL.MAX_BLOCKS_BULK_QUERY <= 0) {
return res.status(404).send(`This API is disabled. Set config.MEMPOOL.MAX_BLOCKS_BULK_QUERY to a positive number to enable it.`);
}
if (!Common.indexingEnabled()) {
return res.status(404).send(`Indexing is required for this API`);
}
const from = parseInt(req.params.from, 10);
if (!req.params.from || from < 0) {
return res.status(400).send(`Parameter 'from' must be a block height (integer)`);
}
const to = req.params.to === undefined ? await bitcoinApi.$getBlockHeightTip() : parseInt(req.params.to, 10);
if (to < 0) {
return res.status(400).send(`Parameter 'to' must be a block height (integer)`);
}
if (from > to) {
return res.status(400).send(`Parameter 'to' must be a higher block height than 'from'`);
}
if ((to - from + 1) > config.MEMPOOL.MAX_BLOCKS_BULK_QUERY) {
return res.status(400).send(`You can only query ${config.MEMPOOL.MAX_BLOCKS_BULK_QUERY} blocks at once.`);
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocksBetweenHeight(from, to));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getLegacyBlocks(req: Request, res: Response) {
try {
const returnBlocks: IEsploraApi.Block[] = [];
const tip = blocks.getCurrentBlockHeight();
const fromHeight = Math.min(parseInt(req.params.height, 10) || tip, tip);
// Check if block height exist in local cache to skip the hash lookup
const blockByHeight = blocks.getBlocks().find((b) => b.height === fromHeight);
let startFromHash: string | null = null;
if (blockByHeight) {
startFromHash = blockByHeight.id;
} else {
startFromHash = await bitcoinApi.$getBlockHash(fromHeight);
}
let nextHash = startFromHash;
for (let i = 0; i < 10 && nextHash; i++) {
const localBlock = blocks.getBlocks().find((b) => b.id === nextHash);
if (localBlock) {
returnBlocks.push(localBlock);
nextHash = localBlock.previousblockhash;
} else {
const block = await bitcoinApi.$getBlock(nextHash);
returnBlocks.push(block);
nextHash = block.previousblockhash;
}
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(returnBlocks);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockTransactions(req: Request, res: Response) {
try {
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 0);
const txIds = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
const transactions: TransactionExtended[] = [];
const startingIndex = Math.max(0, parseInt(req.params.index || '0', 10));
const endIndex = Math.min(startingIndex + 10, txIds.length);
for (let i = startingIndex; i < endIndex; i++) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txIds[i], true, true);
transactions.push(transaction);
loadingIndicators.setProgress('blocktxs-' + req.params.hash, (i - startingIndex + 1) / (endIndex - startingIndex) * 100);
} catch (e) {
logger.debug('getBlockTransactions error: ' + (e instanceof Error ? e.message : e));
}
}
res.json(transactions);
} catch (e) {
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 100);
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockHeight(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getBlockHash(parseInt(req.params.height, 10));
res.send(blockHash);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddress(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const addressData = await bitcoinApi.$getAddress(req.params.address);
res.json(addressData);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddressTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getScriptHash(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const addressData = await bitcoinApi.$getScriptHash(req.params.scripthash);
res.json(addressData);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getScriptHashTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getScriptHashTransactions(req.params.scripthash, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddressPrefix(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);
res.send(blockHash);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRecentMempoolTransactions(req: Request, res: Response) {
const latestTransactions = Object.entries(mempool.getMempool())
.sort((a, b) => (b[1].firstSeen || 0) - (a[1].firstSeen || 0))
.slice(0, 10).map((tx) => Common.stripTransaction(tx[1]));
res.json(latestTransactions);
}
private async getMempool(req: Request, res: Response) {
const info = mempool.getMempoolInfo();
res.json({
count: info.size,
vsize: info.bytes,
total_fee: info.total_fee * 1e8,
fee_histogram: []
});
}
private async getMempoolTxIds(req: Request, res: Response) {
try {
const rawMempool = await bitcoinApi.$getRawMempool();
res.send(rawMempool);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getBlockTipHeight(req: Request, res: Response) {
try {
const result = blocks.getCurrentBlockHeight();
if (!result) {
return res.status(503).send(`Service Temporarily Unavailable`);
}
res.setHeader('content-type', 'text/plain');
res.send(result.toString());
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockTipHash(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlockHashTip();
res.setHeader('content-type', 'text/plain');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRawBlock(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getRawBlock(req.params.hash);
res.setHeader('content-type', 'application/octet-stream');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getTxIdsForBlock(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async validateAddress(req: Request, res: Response) {
try {
const result = await bitcoinClient.validateAddress(req.params.address);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRbfHistory(req: Request, res: Response) {
try {
const replacements = rbfCache.getRbfTree(req.params.txId) || null;
const replaces = rbfCache.getReplaces(req.params.txId) || null;
res.json({
replacements,
replaces
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(false);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getFullRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(true);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getCachedTx(req: Request, res: Response) {
try {
const result = rbfCache.getTx(req.params.txId);
if (result) {
res.json(result);
} else {
res.status(204).send();
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getTransactionOutspends(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getOutspends(req.params.txId);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getDifficultyChange(req: Request, res: Response) {
try {
const da = difficultyAdjustment.getDifficultyAdjustment();
if (da) {
res.json(da);
} else {
res.status(503).send(`Service Temporarily Unavailable`);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $postTransaction(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
try {
const rawTx = Common.getTransactionFromRequest(req, false);
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
res.send(txIdResult);
} catch (e: any) {
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
: (e.message || 'Error'));
}
}
private async $postTransactionForm(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
try {
const txHex = Common.getTransactionFromRequest(req, true);
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
res.send(txIdResult);
} catch (e: any) {
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
: (e.message || 'Error'));
}
}
}
export default new BitcoinRoutes();

View File

@@ -1,11 +1,12 @@
import config from '../../config';
import Client from '@mempool/electrum-client';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import { IElectrumApi } from './electrum-api.interface';
import BitcoinApi from './bitcoin-api';
import logger from '../../logger';
import crypto from "crypto-js";
import * as ElectrumClient from '@mempool/electrum-client';
import * as sha256 from 'crypto-js/sha256';
import * as hexEnc from 'crypto-js/enc-hex';
import loadingIndicators from '../loading-indicators';
import memoryCache from '../memory-cache';
@@ -16,7 +17,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
super(bitcoinClient);
const electrumConfig = { client: 'mempool-v2', version: '1.4' };
const electrumPersistencePolicy = { retryPeriod: 1000, maxRetry: Number.MAX_SAFE_INTEGER, callback: null };
const electrumPersistencePolicy = { retryPeriod: 10000, maxRetry: 1000, callback: null };
const electrumCallbacks = {
onConnect: (client, versionInfo) => { logger.info(`Connected to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT} (${JSON.stringify(versionInfo)})`); },
@@ -25,7 +26,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
onLog: (str) => { logger.debug(str); },
};
this.electrumClient = new Client(
this.electrumClient = new ElectrumClient(
config.ELECTRUM.PORT,
config.ELECTRUM.HOST,
config.ELECTRUM.TLS_ENABLED ? 'tls' : 'tcp',
@@ -34,7 +35,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
);
this.electrumClient.initElectrum(electrumConfig, electrumPersistencePolicy)
.then(() => { })
.then(() => {})
.catch((err) => {
logger.err(`Error connecting to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT}`);
});
@@ -94,7 +95,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
async $getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
const addressInfo = await this.bitcoindClient.validateAddress(address);
if (!addressInfo || !addressInfo.isvalid) {
return [];
return [];
}
try {
@@ -126,77 +127,6 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
}
}
async $getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
try {
const balance = await this.electrumClient.blockchainScripthash_getBalance(scripthash);
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
const unconfirmed = history ? history.filter((h) => h.fee).length : 0;
return {
'scripthash': scripthash,
'chain_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.confirmed ? balance.confirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.confirmed < 0 ? balance.confirmed : 0,
'tx_count': (history?.length || 0) - unconfirmed,
},
'mempool_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.unconfirmed > 0 ? balance.unconfirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.unconfirmed < 0 ? -balance.unconfirmed : 0,
'tx_count': unconfirmed,
},
'electrum': true,
};
} catch (e: any) {
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
async $getScriptHashTransactions(scripthash: string, lastSeenTxId?: string): Promise<IEsploraApi.Transaction[]> {
try {
loadingIndicators.setProgress('address-' + scripthash, 0);
const transactions: IEsploraApi.Transaction[] = [];
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
if (!history) {
throw new Error('failed to get scripthash history');
}
history.sort((a, b) => (b.height || 9999999) - (a.height || 9999999));
let startingIndex = 0;
if (lastSeenTxId) {
const pos = history.findIndex((historicalTx) => historicalTx.tx_hash === lastSeenTxId);
if (pos) {
startingIndex = pos + 1;
}
}
const endIndex = Math.min(startingIndex + 10, history.length);
for (let i = startingIndex; i < endIndex; i++) {
const tx = await this.$getRawTransaction(history[i].tx_hash, false, true);
transactions.push(tx);
loadingIndicators.setProgress('address-' + scripthash, (i + 1) / endIndex * 100);
}
return transactions;
} catch (e: any) {
loadingIndicators.setProgress('address-' + scripthash, 100);
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
private $getScriptHashBalance(scriptHash: string): Promise<IElectrumApi.ScriptHashBalance> {
return this.electrumClient.blockchainScripthash_getBalance(this.encodeScriptHash(scriptHash));
}
@@ -214,8 +144,8 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
}
private encodeScriptHash(scriptPubKey: string): string {
const addrScripthash = crypto.enc.Hex.stringify(crypto.SHA256(crypto.enc.Hex.parse(scriptPubKey)));
return addrScripthash!.match(/.{2}/g)!.reverse().join('');
const addrScripthash = hexEnc.stringify(sha256(hexEnc.parse(scriptPubKey)));
return addrScripthash.match(/.{2}/g).reverse().join('');
}
}

View File

@@ -25,16 +25,14 @@ export namespace IEsploraApi {
is_coinbase: boolean;
scriptsig: string;
scriptsig_asm: string;
inner_redeemscript_asm: string;
inner_witnessscript_asm: string;
inner_redeemscript_asm?: string;
inner_witnessscript_asm?: string;
sequence: any;
witness: string[];
witness?: string[];
prevout: Vout | null;
// Elements
is_pegin?: boolean;
issuance?: Issuance;
// Custom
lazy?: boolean;
}
interface Issuance {
@@ -88,8 +86,6 @@ export namespace IEsploraApi {
size: number;
weight: number;
previousblockhash: string;
mediantime: number;
stale: boolean;
}
export interface Address {
@@ -99,13 +95,6 @@ export namespace IEsploraApi {
electrum?: boolean;
}
export interface ScriptHash {
scripthash: string;
chain_stats: ChainStats;
mempool_stats: MempoolStats;
electrum?: boolean;
}
export interface ChainStats {
funded_txo_count: number;
funded_txo_sum: number;

View File

@@ -1,261 +1,48 @@
import config from '../../config';
import axios, { AxiosResponse } from 'axios';
import http from 'http';
import axios, { AxiosRequestConfig } from 'axios';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import logger from '../../logger';
interface FailoverHost {
host: string,
rtts: number[],
rtt: number
failures: number,
socket?: boolean,
outOfSync?: boolean,
unreachable?: boolean,
preferred?: boolean,
}
class FailoverRouter {
activeHost: FailoverHost;
fallbackHost: FailoverHost;
hosts: FailoverHost[];
multihost: boolean;
pollInterval: number = 60000;
pollTimer: NodeJS.Timeout | null = null;
pollConnection = axios.create();
requestConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true })
});
constructor() {
// setup list of hosts
this.hosts = (config.ESPLORA.FALLBACK || []).map(domain => {
return {
host: domain,
rtts: [],
rtt: Infinity,
failures: 0,
};
});
this.activeHost = {
host: config.ESPLORA.UNIX_SOCKET_PATH || config.ESPLORA.REST_API_URL,
rtts: [],
rtt: 0,
failures: 0,
socket: !!config.ESPLORA.UNIX_SOCKET_PATH,
preferred: true,
};
this.fallbackHost = this.activeHost;
this.hosts.unshift(this.activeHost);
this.multihost = this.hosts.length > 1;
}
public startHealthChecks(): void {
// use axios interceptors to measure request rtt
this.pollConnection.interceptors.request.use((config) => {
config['meta'] = { startTime: Date.now() };
return config;
});
this.pollConnection.interceptors.response.use((response) => {
response.config['meta'].rtt = Date.now() - response.config['meta'].startTime;
return response;
});
if (this.multihost) {
this.pollHosts();
}
}
// start polling hosts to measure availability & rtt
private async pollHosts(): Promise<void> {
if (this.pollTimer) {
clearTimeout(this.pollTimer);
}
const results = await Promise.allSettled(this.hosts.map(async (host) => {
if (host.socket) {
return this.pollConnection.get<number>('/blocks/tip/height', { socketPath: host.host, timeout: 2000 });
} else {
return this.pollConnection.get<number>(host.host + '/blocks/tip/height', { timeout: 2000 });
}
}));
const maxHeight = results.reduce((max, result) => Math.max(max, result.status === 'fulfilled' ? result.value?.data || 0 : 0), 0);
// update rtts & sync status
for (let i = 0; i < results.length; i++) {
const host = this.hosts[i];
const result = results[i].status === 'fulfilled' ? (results[i] as PromiseFulfilledResult<AxiosResponse<number, any>>).value : null;
if (result) {
const height = result.data;
const rtt = result.config['meta'].rtt;
host.rtts.unshift(rtt);
host.rtts.slice(0, 5);
host.rtt = host.rtts.reduce((acc, l) => acc + l, 0) / host.rtts.length;
if (height == null || isNaN(height) || (maxHeight - height > 2)) {
host.outOfSync = true;
} else {
host.outOfSync = false;
}
host.unreachable = false;
} else {
host.unreachable = true;
}
}
this.sortHosts();
logger.debug(`Tomahawk ranking: ${this.hosts.map(host => '\navg rtt ' + Math.round(host.rtt).toString().padStart(5, ' ') + ' | reachable? ' + (!host.unreachable || false).toString().padStart(5, ' ') + ' | in sync? ' + (!host.outOfSync || false).toString().padStart(5, ' ') + ` | ${host.host}`).join('')}`);
// switch if the current host is out of sync or significantly slower than the next best alternative
if (this.activeHost.outOfSync || this.activeHost.unreachable || (this.activeHost !== this.hosts[0] && this.hosts[0].preferred) || (!this.activeHost.preferred && this.activeHost.rtt > (this.hosts[0].rtt * 2) + 50)) {
if (this.activeHost.unreachable) {
logger.warn(`Unable to reach ${this.activeHost.host}, failing over to next best alternative`);
} else if (this.activeHost.outOfSync) {
logger.warn(`${this.activeHost.host} has fallen behind, failing over to next best alternative`);
} else {
logger.debug(`${this.activeHost.host} is no longer the best esplora host`);
}
this.electHost();
}
this.pollTimer = setTimeout(() => { this.pollHosts(); }, this.pollInterval);
}
// sort hosts by connection quality, and update default fallback
private sortHosts(): void {
// sort by connection quality
this.hosts.sort((a, b) => {
if ((a.unreachable || a.outOfSync) === (b.unreachable || b.outOfSync)) {
if (a.preferred === b.preferred) {
// lower rtt is best
return a.rtt - b.rtt;
} else { // unless we have a preferred host
return a.preferred ? -1 : 1;
}
} else { // or the host is out of sync
return (a.unreachable || a.outOfSync) ? 1 : -1;
}
});
if (this.hosts.length > 1 && this.hosts[0] === this.activeHost) {
this.fallbackHost = this.hosts[1];
} else {
this.fallbackHost = this.hosts[0];
}
}
// depose the active host and choose the next best replacement
private electHost(): void {
this.activeHost.outOfSync = true;
this.activeHost.failures = 0;
this.sortHosts();
this.activeHost = this.hosts[0];
logger.warn(`Switching esplora host to ${this.activeHost.host}`);
}
private addFailure(host: FailoverHost): FailoverHost {
host.failures++;
if (host.failures > 5 && this.multihost) {
logger.warn(`Too many esplora failures on ${this.activeHost.host}, falling back to next best alternative`);
this.electHost();
return this.activeHost;
} else {
return this.fallbackHost;
}
}
private async $query<T>(method: 'get'| 'post', path, data: any, responseType = 'json', host = this.activeHost, retry: boolean = true): Promise<T> {
let axiosConfig;
let url;
if (host.socket) {
axiosConfig = { socketPath: host.host, timeout: 10000, responseType };
url = path;
} else {
axiosConfig = { timeout: 10000, responseType };
url = host.host + path;
}
return (method === 'post'
? this.requestConnection.post<T>(url, data, axiosConfig)
: this.requestConnection.get<T>(url, axiosConfig)
).then((response) => { host.failures = Math.max(0, host.failures - 1); return response.data; })
.catch((e) => {
let fallbackHost = this.fallbackHost;
if (e?.response?.status !== 404) {
logger.warn(`esplora request failed ${e?.response?.status || 500} ${host.host}${path}`);
fallbackHost = this.addFailure(host);
}
if (retry && e?.code === 'ECONNREFUSED' && this.multihost) {
// Retry immediately
return this.$query(method, path, data, responseType, fallbackHost, false);
} else {
throw e;
}
});
}
public async $get<T>(path, responseType = 'json'): Promise<T> {
return this.$query<T>('get', path, null, responseType);
}
public async $post<T>(path, data: any, responseType = 'json'): Promise<T> {
return this.$query<T>('post', path, data, responseType);
}
}
class ElectrsApi implements AbstractBitcoinApi {
private failoverRouter = new FailoverRouter();
axiosConfig: AxiosRequestConfig = {
timeout: 10000,
};
constructor() { }
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.failoverRouter.$get<IEsploraApi.Transaction['txid'][]>('/mempool/txids');
return axios.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
.then((response) => response.data);
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return this.failoverRouter.$get<IEsploraApi.Transaction>('/tx/' + txId);
}
async $getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$post<IEsploraApi.Transaction[]>('/mempool/txs', txids, 'json');
}
async $getAllMempoolTransactions(lastSeenTxid?: string): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$get<IEsploraApi.Transaction[]>('/mempool/txs' + (lastSeenTxid ? '/' + lastSeenTxid : ''));
}
$getTransactionHex(txId: string): Promise<string> {
return this.failoverRouter.$get<string>('/tx/' + txId + '/hex');
return axios.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeightTip(): Promise<number> {
return this.failoverRouter.$get<number>('/blocks/tip/height');
}
$getBlockHashTip(): Promise<string> {
return this.failoverRouter.$get<string>('/blocks/tip/hash');
return axios.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return this.failoverRouter.$get<string[]>('/block/' + hash + '/txids');
}
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$get<IEsploraApi.Transaction[]>('/block/' + hash + '/txs');
return axios.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHash(height: number): Promise<string> {
return this.failoverRouter.$get<string>('/block-height/' + height);
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeader(hash: string): Promise<string> {
return this.failoverRouter.$get<string>('/block/' + hash + '/header');
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
.then((response) => response.data);
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return this.failoverRouter.$get<IEsploraApi.Block>('/block/' + hash);
}
$getRawBlock(hash: string): Promise<Buffer> {
return this.failoverRouter.$get<any>('/block/' + hash + '/raw', 'arraybuffer')
.then((response) => { return Buffer.from(response.data); });
return axios.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
.then((response) => response.data);
}
$getAddress(address: string): Promise<IEsploraApi.Address> {
@@ -266,14 +53,6 @@ class ElectrsApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not implemented.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not implemented.');
}
$getScriptHashTransactions(scripthash: string, txId?: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not implemented.');
}
$getAddressPrefix(prefix: string): string[] {
throw new Error('Method not implemented.');
}
@@ -282,25 +61,8 @@ class ElectrsApi implements AbstractBitcoinApi {
throw new Error('Method not implemented.');
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return this.failoverRouter.$get<IEsploraApi.Outspend>('/tx/' + txId + '/outspend/' + vout);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return this.failoverRouter.$get<IEsploraApi.Outspend[]>('/tx/' + txId + '/outspends');
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
const outspends: IEsploraApi.Outspend[][] = [];
for (const tx of txId) {
const outspend = await this.$getOutspends(tx);
outspends.push(outspend);
}
return outspends;
}
public startHealthChecks(): void {
this.failoverRouter.startHealthChecks();
$getOutspends(): Promise<IEsploraApi.Outspend[]> {
throw new Error('Method not implemented.');
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,61 +0,0 @@
import logger from '../logger';
import bitcoinClient from './bitcoin/bitcoin-client';
export interface ChainTip {
height: number;
hash: string;
branchlen: number;
status: 'invalid' | 'active' | 'valid-fork' | 'valid-headers' | 'headers-only';
};
export interface OrphanedBlock {
height: number;
hash: string;
status: 'valid-fork' | 'valid-headers' | 'headers-only';
}
class ChainTips {
private chainTips: ChainTip[] = [];
private orphanedBlocks: OrphanedBlock[] = [];
public async updateOrphanedBlocks(): Promise<void> {
try {
this.chainTips = await bitcoinClient.getChainTips();
this.orphanedBlocks = [];
for (const chain of this.chainTips) {
if (chain.status === 'valid-fork' || chain.status === 'valid-headers') {
let block = await bitcoinClient.getBlock(chain.hash);
while (block && block.confirmations === -1) {
this.orphanedBlocks.push({
height: block.height,
hash: block.hash,
status: chain.status
});
block = await bitcoinClient.getBlock(block.previousblockhash);
}
}
}
logger.debug(`Updated orphaned blocks cache. Found ${this.orphanedBlocks.length} orphaned blocks`);
} catch (e) {
logger.err(`Cannot get fetch orphaned blocks. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public getOrphanedBlocksAtHeight(height: number | undefined): OrphanedBlock[] {
if (height === undefined) {
return [];
}
const orphans: OrphanedBlock[] = [];
for (const block of this.orphanedBlocks) {
if (block.height === height) {
orphans.push(block);
}
}
return orphans;
}
}
export default new ChainTips();

View File

@@ -1,9 +1,5 @@
import * as bitcoinjs from 'bitcoinjs-lib';
import { Request } from 'express';
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats } from '../mempool.interfaces';
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
export class Common {
static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
'144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
@@ -37,123 +33,51 @@ export class Common {
}
static getFeesInRange(transactions: TransactionExtended[], rangeLength: number) {
const filtered: TransactionExtended[] = [];
let lastValidRate = Infinity;
// filter out anomalous fee rates to ensure monotonic range
for (const tx of transactions) {
if (tx.effectiveFeePerVsize <= lastValidRate) {
filtered.push(tx);
lastValidRate = tx.effectiveFeePerVsize;
}
}
const arr = [filtered[filtered.length - 1].effectiveFeePerVsize];
const arr = [transactions[transactions.length - 1].effectiveFeePerVsize];
const chunk = 1 / (rangeLength - 1);
let itemsToAdd = rangeLength - 2;
while (itemsToAdd > 0) {
arr.push(filtered[Math.floor(filtered.length * chunk * itemsToAdd)].effectiveFeePerVsize);
arr.push(transactions[Math.floor(transactions.length * chunk * itemsToAdd)].effectiveFeePerVsize);
itemsToAdd--;
}
arr.push(filtered[0].effectiveFeePerVsize);
arr.push(transactions[0].effectiveFeePerVsize);
return arr;
}
static findRbfTransactions(added: MempoolTransactionExtended[], deleted: MempoolTransactionExtended[], forceScalable = false): { [txid: string]: MempoolTransactionExtended[] } {
const matches: { [txid: string]: MempoolTransactionExtended[] } = {};
// For small N, a naive nested loop is extremely fast, but it doesn't scale
if (added.length < 1000 && deleted.length < 50 && !forceScalable) {
added.forEach((addedTx) => {
const foundMatches = deleted.filter((deletedTx) => {
static findRbfTransactions(added: TransactionExtended[], deleted: TransactionExtended[]): { [txid: string]: TransactionExtended } {
const matches: { [txid: string]: TransactionExtended } = {};
deleted
// The replaced tx must have at least one input with nSequence < maxint-1 (Thats the opt-in)
.filter((tx) => tx.vin.some((vin) => vin.sequence < 0xfffffffe))
.forEach((deletedTx) => {
const foundMatches = added.find((addedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
return addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.adjustedFeePerVsize > deletedTx.adjustedFeePerVsize
&& addedTx.feePerVsize > deletedTx.feePerVsize
// Spends one or more of the same inputs
&& deletedTx.vin.some((deletedVin) =>
addedTx.vin.some((vin) => vin.txid === deletedVin.txid && vin.vout === deletedVin.vout));
addedTx.vin.some((vin) => vin.txid === deletedVin.txid));
});
if (foundMatches?.length) {
matches[addedTx.txid] = [...new Set(foundMatches)];
if (foundMatches) {
matches[deletedTx.txid] = foundMatches;
}
});
} else {
// for large N, build a lookup table of prevouts we can check in ~constant time
const deletedSpendMap: { [txid: string]: { [vout: number]: MempoolTransactionExtended } } = {};
for (const tx of deleted) {
for (const vin of tx.vin) {
if (!deletedSpendMap[vin.txid]) {
deletedSpendMap[vin.txid] = {};
}
deletedSpendMap[vin.txid][vin.vout] = tx;
}
}
for (const addedTx of added) {
const foundMatches = new Set<MempoolTransactionExtended>();
for (const vin of addedTx.vin) {
const deletedTx = deletedSpendMap[vin.txid]?.[vin.vout];
if (deletedTx && deletedTx.txid !== addedTx.txid
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
&& addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.adjustedFeePerVsize > deletedTx.adjustedFeePerVsize
) {
foundMatches.add(deletedTx);
}
if (foundMatches.size) {
matches[addedTx.txid] = [...foundMatches];
}
}
}
}
return matches;
}
static findMinedRbfTransactions(minedTransactions: TransactionExtended[], spendMap: Map<string, MempoolTransactionExtended>): { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} {
const matches: { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} = {};
for (const tx of minedTransactions) {
const replaced: Set<MempoolTransactionExtended> = new Set();
for (let i = 0; i < tx.vin.length; i++) {
const vin = tx.vin[i];
const match = spendMap.get(`${vin.txid}:${vin.vout}`);
if (match && match.txid !== tx.txid) {
replaced.add(match);
// remove this tx from the spendMap
// prevents the same tx being replaced more than once
for (const replacedVin of match.vin) {
const key = `${replacedVin.txid}:${replacedVin.vout}`;
spendMap.delete(key);
}
}
const key = `${vin.txid}:${vin.vout}`;
spendMap.delete(key);
}
if (replaced.size) {
matches[tx.txid] = { replaced: Array.from(replaced), replacedBy: tx };
}
}
return matches;
}
static stripTransaction(tx: TransactionExtended): TransactionStripped {
return {
txid: tx.txid,
fee: tx.fee || 0,
fee: tx.fee,
vsize: tx.weight / 4,
value: tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0),
acc: tx.acceleration || undefined,
rate: tx.effectiveFeePerVsize,
};
}
static stripTransactions(txs: TransactionExtended[]): TransactionStripped[] {
return txs.map(this.stripTransaction);
}
static sleep$(ms: number): Promise<void> {
static sleep(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
@@ -168,18 +92,18 @@ export class Common {
}
}
static setRelativesAndGetCpfpInfo(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): CpfpInfo {
static setRelativesAndGetCpfpInfo(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): CpfpInfo {
const parents = this.findAllParents(tx, memPool);
const lowerFeeParents = parents.filter((parent) => parent.adjustedFeePerVsize < tx.effectiveFeePerVsize);
const lowerFeeParents = parents.filter((parent) => parent.feePerVsize < tx.effectiveFeePerVsize);
let totalWeight = (tx.adjustedVsize * 4) + lowerFeeParents.reduce((prev, val) => prev + (val.adjustedVsize * 4), 0);
let totalWeight = tx.weight + lowerFeeParents.reduce((prev, val) => prev + val.weight, 0);
let totalFees = tx.fee + lowerFeeParents.reduce((prev, val) => prev + val.fee, 0);
tx.ancestors = parents
.map((t) => {
return {
txid: t.txid,
weight: (t.adjustedVsize * 4),
weight: t.weight,
fee: t.fee,
};
});
@@ -190,7 +114,7 @@ export class Common {
totalFees += tx.bestDescendant.fee;
}
tx.effectiveFeePerVsize = Math.max(0, totalFees / (totalWeight / 4));
tx.effectiveFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1, totalFees / (totalWeight / 4));
tx.cpfpChecked = true;
return {
@@ -200,8 +124,8 @@ export class Common {
}
private static findAllParents(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): MempoolTransactionExtended[] {
let parents: MempoolTransactionExtended[] = [];
private static findAllParents(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): TransactionExtended[] {
let parents: TransactionExtended[] = [];
tx.vin.forEach((parent) => {
if (parents.find((p) => p.txid === parent.txid)) {
return;
@@ -209,17 +133,17 @@ export class Common {
const parentTx = memPool[parent.txid];
if (parentTx) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.adjustedFeePerVsize) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.feePerVsize) {
if (parentTx.bestDescendant && parentTx.bestDescendant.fee < tx.fee + tx.bestDescendant.fee) {
parentTx.bestDescendant = {
weight: (tx.adjustedVsize * 4) + tx.bestDescendant.weight,
weight: tx.weight + tx.bestDescendant.weight,
fee: tx.fee + tx.bestDescendant.fee,
txid: tx.txid,
};
}
} else if (tx.adjustedFeePerVsize > parentTx.adjustedFeePerVsize) {
} else if (tx.feePerVsize > parentTx.feePerVsize) {
parentTx.bestDescendant = {
weight: (tx.adjustedVsize * 4),
weight: tx.weight,
fee: tx.fee,
txid: tx.txid
};
@@ -230,546 +154,4 @@ export class Common {
});
return parents;
}
// calculates the ratio of matched transactions to projected transactions by weight
static getSimilarity(projectedBlock: MempoolBlockWithTransactions, transactions: TransactionExtended[]): number {
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const tx of projectedBlock.transactions) {
if (inBlock[tx.txid]) {
matchedWeight += tx.vsize * 4;
}
projectedWeight += tx.vsize * 4;
}
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
return projectedWeight ? matchedWeight / projectedWeight : 1;
}
static getSqlInterval(interval: string | null): string | null {
switch (interval) {
case '24h': return '1 DAY';
case '3d': return '3 DAY';
case '1w': return '1 WEEK';
case '1m': return '1 MONTH';
case '3m': return '3 MONTH';
case '6m': return '6 MONTH';
case '1y': return '1 YEAR';
case '2y': return '2 YEAR';
case '3y': return '3 YEAR';
case '4y': return '4 YEAR';
default: return null;
}
}
static indexingEnabled(): boolean {
return (
['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) &&
config.DATABASE.ENABLED === true &&
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
);
}
static blocksSummariesIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
);
}
static cpfpIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.CPFP_INDEXING === true
);
}
static setDateMidnight(date: Date): void {
date.setUTCHours(0);
date.setUTCMinutes(0);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
}
static channelShortIdToIntegerId(channelId: string): string {
if (channelId.indexOf('x') === -1) { // Already an integer id
return channelId;
}
if (channelId.indexOf('/') !== -1) { // Topology import
channelId = channelId.slice(0, -2);
}
const s = channelId.split('x').map(part => BigInt(part));
return ((s[0] << 40n) | (s[1] << 16n) | s[2]).toString();
}
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
static channelIntegerIdToShortId(id: string): string {
if (id.indexOf('/') !== -1) {
id = id.slice(0, -2);
}
if (id.indexOf('x') !== -1) { // Already a short id
return id;
}
const n = BigInt(id);
return [
n >> 40n, // nth block
(n >> 16n) & 0xffffffn, // nth tx of the block
n & 0xffffn // nth output of the tx
].join('x');
}
static utcDateToMysql(date?: number | null): string | null {
if (date === null) {
return null;
}
const d = new Date((date || 0) * 1000);
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
}
static findSocketNetwork(addr: string): {network: string | null, url: string} {
let network: string | null = null;
let url: string = addr;
if (config.LIGHTNING.BACKEND === 'cln') {
url = addr.split('://')[1];
}
if (!url) {
return {
network: null,
url: addr,
};
}
if (addr.indexOf('onion') !== -1) {
if (url.split('.')[0].length >= 56) {
network = 'torv3';
} else {
network = 'torv2';
}
} else if (addr.indexOf('i2p') !== -1) {
network = 'i2p';
} else if (addr.indexOf('ipv4') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && isIP(url.split(':')[0]) === 4)) {
const ipv = isIP(url.split(':')[0]);
if (ipv === 4) {
network = 'ipv4';
} else {
return {
network: null,
url: addr,
};
}
} else if (addr.indexOf('ipv6') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && url.indexOf(']:'))) {
url = url.split('[')[1].split(']')[0];
const ipv = isIP(url);
if (ipv === 6) {
const parts = addr.split(':');
network = 'ipv6';
url = `[${url}]:${parts[parts.length - 1]}`;
} else {
return {
network: null,
url: addr,
};
}
} else {
return {
network: null,
url: addr,
};
}
return {
network: network,
url: url,
};
}
static formatSocket(publicKey: string, socket: {network: string, addr: string}): NodeSocket {
if (config.LIGHTNING.BACKEND === 'cln') {
return {
publicKey: publicKey,
network: socket.network,
addr: socket.addr,
};
} else /* if (config.LIGHTNING.BACKEND === 'lnd') */ {
const formatted = this.findSocketNetwork(socket.addr);
return {
publicKey: publicKey,
network: formatted.network,
addr: formatted.url,
};
}
}
static calculateCpfp(height: number, transactions: TransactionExtended[]): CpfpSummary {
const clusters: CpfpCluster[] = []; // list of all cpfp clusters in this block
const clusterMap: { [txid: string]: CpfpCluster } = {}; // map transactions to their cpfp cluster
let clusterTxs: TransactionExtended[] = []; // working list of elements of the current cluster
let ancestors: { [txid: string]: boolean } = {}; // working set of ancestors of the current cluster root
const txMap = {};
// initialize the txMap
for (const tx of transactions) {
txMap[tx.txid] = tx;
}
// reverse pass to identify CPFP clusters
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
clusterTxs.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += (tx.weight / 4);
});
const effectiveFeePerVsize = totalFee / totalVSize;
let cluster: CpfpCluster;
if (clusterTxs.length > 1) {
cluster = {
root: clusterTxs[0].txid,
height,
txs: clusterTxs.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
};
clusters.push(cluster);
}
clusterTxs.forEach(tx => {
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
if (cluster) {
clusterMap[tx.txid] = cluster;
}
});
// reset working vars
clusterTxs = [];
ancestors = {};
}
clusterTxs.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
// forward pass to enforce ancestor rate caps
for (const tx of transactions) {
let minAncestorRate = tx.effectiveFeePerVsize;
for (const vin of tx.vin) {
if (txMap[vin.txid]?.effectiveFeePerVsize) {
minAncestorRate = Math.min(minAncestorRate, txMap[vin.txid].effectiveFeePerVsize);
}
}
// check rounded values to skip cases with almost identical fees
const roundedMinAncestorRate = Math.ceil(minAncestorRate);
const roundedEffectiveFeeRate = Math.floor(tx.effectiveFeePerVsize);
if (roundedMinAncestorRate < roundedEffectiveFeeRate) {
tx.effectiveFeePerVsize = minAncestorRate;
if (!clusterMap[tx.txid]) {
// add a single-tx cluster to record the dependent rate
const cluster = {
root: tx.txid,
height,
txs: [{ txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }],
effectiveFeePerVsize: minAncestorRate,
};
clusterMap[tx.txid] = cluster;
clusters.push(cluster);
} else {
// update the existing cluster with the dependent rate
clusterMap[tx.txid].effectiveFeePerVsize = minAncestorRate;
}
}
}
return {
transactions,
clusters,
};
}
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string, acceleration?: boolean }[]): EffectiveFeeStats {
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
let weightCount = 0;
let medianFee = 0;
let medianWeight = 0;
// calculate the "medianFee" as the average fee rate of the middle 10000 weight units of transactions
const leftBound = 1995000;
const rightBound = 2005000;
for (let i = 0; i < sortedTxs.length && weightCount < rightBound; i++) {
const left = weightCount;
const right = weightCount + sortedTxs[i].weight;
if (right > leftBound) {
const weight = Math.min(right, rightBound) - Math.max(left, leftBound);
medianFee += (sortedTxs[i].rate * (weight / 4) );
medianWeight += weight;
}
weightCount += sortedTxs[i].weight;
}
const medianFeeRate = medianWeight ? (medianFee / (medianWeight / 4)) : 0;
// minimum effective fee heuristic:
// lowest of
// a) the 1st percentile of effective fee rates
// b) the minimum effective fee rate in the last 2% of transactions (in block order)
const minFee = Math.min(
Common.getNthPercentile(1, sortedTxs).rate,
transactions.slice(-transactions.length / 50).reduce((min, tx) => { return Math.min(min, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, Infinity)
);
// maximum effective fee heuristic:
// highest of
// a) the 99th percentile of effective fee rates
// b) the maximum effective fee rate in the first 2% of transactions (in block order)
const maxFee = Math.max(
Common.getNthPercentile(99, sortedTxs).rate,
transactions.slice(0, transactions.length / 50).reduce((max, tx) => { return Math.max(max, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, 0)
);
return {
medianFee: medianFeeRate,
feeRange: [
minFee,
[10,25,50,75,90].map(n => Common.getNthPercentile(n, sortedTxs).rate),
maxFee,
].flat(),
};
}
static getNthPercentile(n: number, sortedDistribution: any[]): any {
return sortedDistribution[Math.floor((sortedDistribution.length - 1) * (n / 100))];
}
static getTransactionFromRequest(req: Request, form: boolean): string {
let rawTx: any = typeof req.body === 'object' && form
? Object.values(req.body)[0] as any
: req.body;
if (typeof rawTx !== 'string') {
throw Object.assign(new Error('Non-string request body'), { code: -1 });
}
// Support both upper and lower case hex
// Support both txHash= Form and direct API POST
const reg = form ? /^txHash=((?:[a-fA-F0-9]{2})+)$/ : /^((?:[a-fA-F0-9]{2})+)$/;
const matches = reg.exec(rawTx);
if (!matches || !matches[1]) {
throw Object.assign(new Error('Non-hex request body'), { code: -2 });
}
// Guaranteed to be a hex string of multiple of 2
// Guaranteed to be lower case
// Guaranteed to pass validation (see function below)
return this.validateTransactionHex(matches[1].toLowerCase());
}
private static validateTransactionHex(txhex: string): string {
// Do not mutate txhex
// We assume txhex to be valid hex (output of getTransactionFromRequest above)
// Check 1: Valid transaction parse
let tx: bitcoinjs.Transaction;
try {
tx = bitcoinjs.Transaction.fromHex(txhex);
} catch(e) {
throw Object.assign(new Error('Invalid transaction (could not parse)'), { code: -4 });
}
// Check 2: Simple size check
if (tx.weight() > config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT) {
throw Object.assign(new Error(`Transaction too large (max ${config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT} weight units)`), { code: -3 });
}
// Check 3: Check unreachable script in taproot (if not allowed)
if (!config.MEMPOOL.ALLOW_UNREACHABLE) {
tx.ins.forEach(input => {
const witness = input.witness;
// See BIP 341: Script validation rules
const hasAnnex = witness.length >= 2 &&
witness[witness.length - 1][0] === 0x50;
const scriptSpendMinLength = hasAnnex ? 3 : 2;
const maybeScriptSpend = witness.length >= scriptSpendMinLength;
if (maybeScriptSpend) {
const controlBlock = witness[witness.length - scriptSpendMinLength + 1];
if (controlBlock.length === 0 || !this.isValidLeafVersion(controlBlock[0])) {
// Skip this input, it's not taproot
return;
}
// Definitely taproot. Get script
const script = witness[witness.length - scriptSpendMinLength];
const decompiled = bitcoinjs.script.decompile(script);
if (!decompiled || decompiled.length < 2) {
// Skip this input
return;
}
// Iterate up to second last (will look ahead 1 item)
for (let i = 0; i < decompiled.length - 1; i++) {
const first = decompiled[i];
const second = decompiled[i + 1];
if (
first === bitcoinjs.opcodes.OP_FALSE &&
second === bitcoinjs.opcodes.OP_IF
) {
throw Object.assign(new Error('Unreachable taproot scripts not allowed'), { code: -5 });
}
}
}
})
}
// Pass through the input string untouched
return txhex;
}
private static isValidLeafVersion(leafVersion: number): boolean {
// See Note 7 in BIP341
// https://github.com/bitcoin/bips/blob/66a1a8151021913047934ebab3f8883f2f8ca75b/bip-0341.mediawiki#cite_note-7
// "What constraints are there on the leaf version?"
// Must be an integer between 0 and 255
// Since we're parsing a byte
if (Math.floor(leafVersion) !== leafVersion || leafVersion < 0 || leafVersion > 255) {
return false;
}
// "the leaf version cannot be odd"
if ((leafVersion & 0x01) === 1) {
return false;
}
// "The values that comply to this rule are
// the 32 even values between 0xc0 and 0xfe
if (leafVersion >= 0xc0 && leafVersion <= 0xfe) {
return true;
}
// and also 0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe."
if ([0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe].includes(leafVersion)) {
return true;
}
// Otherwise, invalid
return false;
}
}
/**
* Class to calculate average fee rates of a list of transactions
* at certain weight percentiles, in a single pass
*
* init with:
* maxWeight - the total weight to measure percentiles relative to (e.g. 4MW for a single block)
* percentileBandWidth - how many weight units to average over for each percentile (as a % of maxWeight)
* percentiles - an array of weight percentiles to compute, in %
*
* then call .processNext(tx) for each transaction, in descending order
*
* retrieve the final results with .getFeeStats()
*/
export class OnlineFeeStatsCalculator {
private maxWeight: number;
private percentiles = [10,25,50,75,90];
private bandWidthPercent = 2;
private bandWidth: number = 0;
private bandIndex = 0;
private leftBound = 0;
private rightBound = 0;
private inBand = false;
private totalBandFee = 0;
private totalBandWeight = 0;
private minBandRate = Infinity;
private maxBandRate = 0;
private feeRange: { avg: number, min: number, max: number }[] = [];
private totalWeight: number = 0;
constructor (maxWeight: number, percentileBandWidth?: number, percentiles?: number[]) {
this.maxWeight = maxWeight;
if (percentiles && percentiles.length) {
this.percentiles = percentiles;
}
if (percentileBandWidth != null) {
this.bandWidthPercent = percentileBandWidth;
}
this.bandWidth = this.maxWeight * (this.bandWidthPercent / 100);
// add min/max percentiles aligned to the ends of the range
this.percentiles.unshift(this.bandWidthPercent / 2);
this.percentiles.push(100 - (this.bandWidthPercent / 2));
this.setNextBounds();
}
processNext(tx: { weight: number, fee: number, effectiveFeePerVsize?: number, feePerVsize?: number, rate?: number, txid: string }): void {
let left = this.totalWeight;
const right = this.totalWeight + tx.weight;
if (!this.inBand && right <= this.leftBound) {
this.totalWeight += tx.weight;
return;
}
while (left < right) {
if (right > this.leftBound) {
this.inBand = true;
const txRate = (tx.rate || tx.effectiveFeePerVsize || tx.feePerVsize || 0);
const weight = Math.min(right, this.rightBound) - Math.max(left, this.leftBound);
this.totalBandFee += (txRate * weight);
this.totalBandWeight += weight;
this.maxBandRate = Math.max(this.maxBandRate, txRate);
this.minBandRate = Math.min(this.minBandRate, txRate);
}
left = Math.min(right, this.rightBound);
if (left >= this.rightBound) {
this.inBand = false;
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
this.bandIndex++;
this.setNextBounds();
this.totalBandFee = 0;
this.totalBandWeight = 0;
this.minBandRate = Infinity;
this.maxBandRate = 0;
}
}
this.totalWeight += tx.weight;
}
private setNextBounds(): void {
const nextPercentile = this.percentiles[this.bandIndex];
if (nextPercentile != null) {
this.leftBound = ((nextPercentile / 100) * this.maxWeight) - (this.bandWidth / 2);
this.rightBound = this.leftBound + this.bandWidth;
} else {
this.leftBound = Infinity;
this.rightBound = Infinity;
}
}
getRawFeeStats(): WorkingEffectiveFeeStats {
if (this.totalBandWeight > 0) {
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
}
while (this.feeRange.length < this.percentiles.length) {
this.feeRange.unshift({ avg: 0, min: 0, max: 0 });
}
return {
minFee: this.feeRange[0].min,
medianFee: this.feeRange[Math.floor(this.feeRange.length / 2)].avg,
maxFee: this.feeRange[this.feeRange.length - 1].max,
feeRange: this.feeRange.map(f => f.avg),
};
}
getFeeStats(): EffectiveFeeStats {
const stats = this.getRawFeeStats();
stats.feeRange[0] = stats.minFee;
stats.feeRange[stats.feeRange.length - 1] = stats.maxFee;
return stats;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,166 +0,0 @@
import config from '../config';
import { IDifficultyAdjustment } from '../mempool.interfaces';
import blocks from './blocks';
export interface DifficultyAdjustment {
progressPercent: number; // Percent: 0 to 100
difficultyChange: number; // Percent: -75 to 300
estimatedRetargetDate: number; // Unix time in ms
remainingBlocks: number; // Block count
remainingTime: number; // Duration of time in ms
previousRetarget: number; // Percent: -75 to 300
previousTime: number; // Unix time in ms
nextRetargetHeight: number; // Block Height
timeAvg: number; // Duration of time in ms
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
expectedBlocks: number; // Block count
}
/**
* Calculate the difficulty increase/decrease by using the `bits` integer contained in two
* block headers.
*
* Warning: Only compare `bits` from blocks in two adjacent difficulty periods. This code
* assumes the maximum difference is x4 or /4 (as per the protocol) and will throw an
* error if an exponent difference of 2 or more is seen.
*
* @param {number} oldBits The 32 bit `bits` integer from a block header.
* @param {number} newBits The 32 bit `bits` integer from a block header in the next difficulty period.
* @returns {number} A floating point decimal of the difficulty change from old to new.
* (ie. 21.3 means 21.3% increase in difficulty, -21.3 is a 21.3% decrease in difficulty)
*/
export function calcBitsDifference(oldBits: number, newBits: number): number {
// Must be
// - integer
// - highest exponent is 0x20, so max value (as integer) is 0x207fffff
// - min value is 1 (exponent = 0)
// - highest bit of the number-part is +- sign, it must not be 1
const verifyBits = (bits: number): void => {
if (
Math.floor(bits) !== bits ||
bits > 0x207fffff ||
bits < 1 ||
(bits & 0x00800000) !== 0 ||
(bits & 0x007fffff) === 0
) {
throw new Error('Invalid bits');
}
};
verifyBits(oldBits);
verifyBits(newBits);
// No need to mask exponents because we checked the bounds above
const oldExp = oldBits >> 24;
const newExp = newBits >> 24;
const oldNum = oldBits & 0x007fffff;
const newNum = newBits & 0x007fffff;
// The diff can only possibly be 1, 0, -1
// (because maximum difficulty change is x4 or /4 (2 bits up or down))
let result: number;
switch (newExp - oldExp) {
// New less than old, target lowered, difficulty increased
case -1:
result = ((oldNum << 8) * 100) / newNum - 100;
break;
// Same exponent, compare numbers as is.
case 0:
result = (oldNum * 100) / newNum - 100;
break;
// Old less than new, target raised, difficulty decreased
case 1:
result = (oldNum * 100) / (newNum << 8) - 100;
break;
default:
throw new Error('Impossible exponent difference');
}
// Min/Max values
return result > 300 ? 300 : result < -75 ? -75 : result;
}
export function calcDifficultyAdjustment(
DATime: number,
nowSeconds: number,
blockHeight: number,
previousRetarget: number,
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = Math.max(0, nowSeconds - DATime);
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
const actualTimespan = (blocksInEpoch === 2015 ? latestBlockTimestamp : nowSeconds) - DATime;
let difficultyChange = 0;
let timeAvgSecs = blocksInEpoch ? diffSeconds / blocksInEpoch : BLOCK_SECONDS_TARGET;
difficultyChange = (BLOCK_SECONDS_TARGET / (actualTimespan / (blocksInEpoch + 1)) - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
// therefore the time between blocks will always be below 20 minutes (1200s).
let timeOffset = 0;
if (network === 'testnet') {
if (timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeAvgSecs = TESTNET_MAX_BLOCK_SECONDS;
}
const secondsSinceLastBlock = nowSeconds - latestBlockTimestamp;
if (secondsSinceLastBlock + timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeOffset = -Math.min(secondsSinceLastBlock, TESTNET_MAX_BLOCK_SECONDS) * 1000;
}
}
const timeAvg = Math.floor(timeAvgSecs * 1000);
const remainingTime = remainingBlocks * timeAvg;
const estimatedRetargetDate = remainingTime + nowSeconds * 1000;
return {
progressPercent,
difficultyChange,
estimatedRetargetDate,
remainingBlocks,
remainingTime,
previousRetarget,
previousTime: DATime,
nextRetargetHeight,
timeAvg,
timeOffset,
expectedBlocks,
};
}
class DifficultyAdjustmentApi {
public getDifficultyAdjustment(): IDifficultyAdjustment | null {
const DATime = blocks.getLastDifficultyAdjustmentTime();
const previousRetarget = blocks.getPreviousDifficultyRetarget();
const blockHeight = blocks.getCurrentBlockHeight();
const blocksCache = blocks.getBlocks();
const latestBlock = blocksCache[blocksCache.length - 1];
if (!latestBlock) {
return null;
}
const nowSeconds = Math.floor(new Date().getTime() / 1000);
return calcDifficultyAdjustment(
DATime, nowSeconds, blockHeight, previousRetarget,
config.MEMPOOL.NETWORK, latestBlock.timestamp
);
}
}
export default new DifficultyAdjustmentApi();

View File

@@ -1,202 +1,74 @@
import * as fs from 'fs';
const fsPromises = fs.promises;
import cluster from 'cluster';
import * as cluster from 'cluster';
import memPool from './mempool';
import blocks from './blocks';
import logger from '../logger';
import config from '../config';
import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
import rbfCache from './rbf-cache';
class DiskCache {
private cacheSchemaVersion = 3;
private rbfCacheSchemaVersion = 1;
private static TMP_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-cache.json';
private static TMP_FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/tmp-cache{number}.json';
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
private static TMP_RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-rbfcache.json';
private static RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/rbfcache.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
private ignoreBlocksCache = false;
private semaphore: { resume: (() => void)[], locks: number } = {
resume: [],
locks: 0,
};
constructor() { }
constructor() {
if (!cluster.isPrimary || !config.MEMPOOL.CACHE_ENABLED) {
return;
}
process.on('SIGINT', (e) => {
this.$saveCacheToDisk(true);
process.exit(0);
});
}
async $saveCacheToDisk(sync: boolean = false): Promise<void> {
if (!cluster.isPrimary || !config.MEMPOOL.CACHE_ENABLED) {
async $saveCacheToDisk(): Promise<void> {
if (!cluster.isMaster) {
return;
}
if (this.isWritingCache) {
logger.debug('Saving cache already in progress. Skipping.');
logger.debug('Saving cache already in progress. Skipping.')
return;
}
try {
logger.debug(`Writing mempool and blocks data to disk cache (${ sync ? 'sync' : 'async' })...`);
logger.debug('Writing mempool and blocks data to disk cache (async)...');
this.isWritingCache = true;
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
if (mempool[tx]) {
mempoolArray.push(mempool[tx]);
}
mempoolArray.push(mempool[tx]);
}
Common.shuffleArray(mempoolArray);
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
if (sync) {
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
blocks: blocks.getBlocks(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), {flag: 'w'});
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
} else {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
await fsPromises.rename(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.rename(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
}), {flag: 'w'});
}
logger.debug('Mempool and blocks data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
try {
logger.debug('Writing rbf data to disk cache (async)...');
this.isWritingCache = true;
const rbfData = rbfCache.dump();
if (sync) {
fs.writeFileSync(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
fs.renameSync(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
} else {
await fsPromises.writeFile(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
await fsPromises.rename(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
}
logger.debug('Rbf data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing rbf data to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
}
wipeCache(): void {
logger.notice(`Wiping nodejs backend cache/cache*.json files`);
try {
fs.unlinkSync(DiskCache.FILE_NAME);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${DiskCache.FILE_NAME}. Exception ${JSON.stringify(e)}`);
}
}
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
const filename = DiskCache.FILE_NAMES.replace('{number}', i.toString());
try {
fs.unlinkSync(filename);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${filename}. Exception ${JSON.stringify(e)}`);
}
}
}
}
wipeRbfCache() {
logger.notice(`Wipping nodejs backend cache/rbfcache.json file`);
try {
fs.unlinkSync(DiskCache.RBF_FILE_NAME);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${DiskCache.RBF_FILE_NAME}. Exception ${JSON.stringify(e)}`);
}
}
}
async $loadMempoolCache(): Promise<void> {
if (!config.MEMPOOL.CACHE_ENABLED || !fs.existsSync(DiskCache.FILE_NAME)) {
loadMempoolCache() {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
try {
const start = Date.now();
let data: any = {};
const cacheData = fs.readFileSync(DiskCache.FILE_NAME, 'utf8');
if (cacheData) {
logger.info('Restoring mempool and blocks data from disk cache');
data = JSON.parse(cacheData);
if (data.cacheSchemaVersion === undefined || data.cacheSchemaVersion !== this.cacheSchemaVersion) {
logger.notice('Disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.network && data.network !== config.MEMPOOL.NETWORK) {
logger.notice('Disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.mempoolArray) {
for (const tx of data.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
}
@@ -209,7 +81,6 @@ class DiskCache {
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
if (cacheData2.mempoolArray) {
for (const tx of cacheData2.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
} else {
@@ -217,76 +88,15 @@ class DiskCache {
}
}
} catch (e) {
logger.err('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
logger.debug('Error parsing ' + fileName + '. Skipping.');
}
}
logger.info(`Loaded mempool from disk cache in ${Date.now() - start} ms`);
await memPool.$setMempool(data.mempool);
if (!this.ignoreBlocksCache) {
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
} else {
logger.info('Re-saving cache with empty recent blocks data');
await this.$saveCacheToDisk(true);
}
memPool.setMempool(data.mempool);
blocks.setBlocks(data.blocks);
} catch (e) {
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
logger.warn('Failed to parse mempoool and blocks cache. Skipping.');
}
try {
let rbfData: any = {};
const rbfCacheData = fs.readFileSync(DiskCache.RBF_FILE_NAME, 'utf8');
if (rbfCacheData) {
logger.info('Restoring rbf data from disk cache');
rbfData = JSON.parse(rbfCacheData);
if (rbfData.rbfCacheSchemaVersion === undefined || rbfData.rbfCacheSchemaVersion !== this.rbfCacheSchemaVersion) {
logger.notice('Rbf disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
if (rbfData.network && rbfData.network !== config.MEMPOOL.NETWORK) {
logger.notice('Rbf disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
}
if (rbfData?.rbf) {
rbfCache.load(rbfData.rbf);
}
} catch (e) {
logger.warn('Failed to parse rbf cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
private $yield(): Promise<void> {
if (this.semaphore.locks) {
logger.debug('Pause writing mempool and blocks data to disk cache (async)');
return new Promise((resolve) => {
this.semaphore.resume.push(resolve);
});
} else {
return Promise.resolve();
}
}
public lock(): void {
this.semaphore.locks++;
}
public unlock(): void {
this.semaphore.locks = Math.max(0, this.semaphore.locks - 1);
if (!this.semaphore.locks && this.semaphore.resume.length) {
const nextResume = this.semaphore.resume.shift();
if (nextResume) {
logger.debug('Resume writing mempool and blocks data to disk cache (async)');
nextResume();
}
}
}
public setIgnoreBlocksCache(): void {
this.ignoreBlocksCache = true;
}
}

View File

@@ -1,729 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import nodesApi from './nodes.api';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { Common } from '../common';
class ChannelsApi {
public async $getAllChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getAllChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getAllChannelsGeo(publicKey?: string, style?: string): Promise<any[]> {
try {
let select: string;
if (style === 'widget') {
select = `
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
} else {
select = `
nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
}
const params: string[] = [];
let query = `SELECT ${select}
FROM channels
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
WHERE channels.status = 1
AND nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
`;
if (publicKey !== undefined) {
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
params.push(publicKey);
params.push(publicKey);
} else {
query += ` AND channels.capacity > 1000000
GROUP BY nodes_1.public_key, nodes_2.public_key
ORDER BY channels.capacity DESC
LIMIT 10000
`;
}
const [rows]: any = await DB.query(query, params);
return rows.map((row) => {
if (style === 'widget') {
return [
row.node1_longitude, row.node1_latitude,
row.node2_longitude, row.node2_latitude,
];
} else {
return [
row.node1_public_key, row.node1_alias,
row.node1_longitude, row.node1_latitude,
row.node2_public_key, row.node2_alias,
row.node2_longitude, row.node2_latitude,
];
}
});
} catch (e) {
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $searchChannelsById(search: string): Promise<any[]> {
try {
const searchStripped = search.replace(/[^0-9x]/g, '') + '%';
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;
} catch (e) {
logger.err('$searchChannelsById error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsByStatus(status: number | number[]): Promise<any[]> {
try {
let query: string;
if (Array.isArray(status)) {
query = `SELECT * FROM channels WHERE status IN (${status.join(',')})`;
} else {
query = `SELECT * FROM channels WHERE status = ?`;
}
const [rows]: any = await DB.query(query, [status]);
return rows;
} catch (e) {
logger.err('$getChannelsByStatus error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getClosedChannelsWithoutReason(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason IS NULL AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getClosedChannelsWithoutReason error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getPenaltyClosedChannels(): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left,
n2.alias AS alias_right,
channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.status = 2 AND channels.closing_reason = 3
ORDER BY closing_date DESC
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getPenaltyClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutSourceChecked(): Promise<any[]> {
try {
const query = `
SELECT channels.*
FROM channels
WHERE channels.source_checked != 1
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE created IS NULL`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getChannelsWithoutCreatedDate error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannel(id: string): Promise<any> {
try {
const query = `
SELECT n1.alias AS alias_left, n1.longitude as node1_longitude, n1.latitude as node1_latitude,
n2.alias AS alias_right, n2.longitude as node2_longitude, n2.latitude as node2_latitude,
channels.*,
ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key
LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key
WHERE (
ns1.id = (
SELECT MAX(id)
FROM node_stats
WHERE public_key = channels.node1_public_key
)
AND ns2.id = (
SELECT MAX(id)
FROM node_stats
WHERE public_key = channels.node2_public_key
)
)
AND channels.id = ?
`;
const [rows]: any = await DB.query(query, [id]);
if (rows[0]) {
return this.convertChannel(rows[0]);
}
} catch (e) {
logger.err('$getChannel error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsStats(): Promise<any> {
try {
// Feedback from zerofeerouting:
// "I would argue > 5000ppm can be ignored. Channels charging more than .5% fee are ignored by CLN for example."
const ignoredFeeRateThreshold = 5000;
const ignoredBaseFeeThreshold = 5000;
// Capacity
let query = `SELECT AVG(capacity) AS avgCapacity FROM channels WHERE status = 1 ORDER BY capacity`;
const [avgCapacity]: any = await DB.query(query);
query = `SELECT capacity FROM channels WHERE status = 1 ORDER BY capacity`;
let [capacity]: any = await DB.query(query);
capacity = capacity.map(capacity => capacity.capacity);
const medianCapacity = capacity[Math.floor(capacity.length / 2)];
// Fee rates
query = `SELECT node1_fee_rate FROM channels WHERE node1_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
let [feeRates1]: any = await DB.query(query);
feeRates1 = feeRates1.map(rate => rate.node1_fee_rate);
query = `SELECT node2_fee_rate FROM channels WHERE node2_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
let [feeRates2]: any = await DB.query(query);
feeRates2 = feeRates2.map(rate => rate.node2_fee_rate);
let feeRates = (feeRates1.concat(feeRates2)).sort((a, b) => a - b);
let avgFeeRate = 0;
for (const rate of feeRates) {
avgFeeRate += rate;
}
avgFeeRate /= feeRates.length;
const medianFeeRate = feeRates[Math.floor(feeRates.length / 2)];
// Base fees
query = `SELECT node1_base_fee_mtokens FROM channels WHERE node1_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
let [baseFees1]: any = await DB.query(query);
baseFees1 = baseFees1.map(rate => rate.node1_base_fee_mtokens);
query = `SELECT node2_base_fee_mtokens FROM channels WHERE node2_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
let [baseFees2]: any = await DB.query(query);
baseFees2 = baseFees2.map(rate => rate.node2_base_fee_mtokens);
let baseFees = (baseFees1.concat(baseFees2)).sort((a, b) => a - b);
let avgBaseFee = 0;
for (const fee of baseFees) {
avgBaseFee += fee;
}
avgBaseFee /= baseFees.length;
const medianBaseFee = feeRates[Math.floor(baseFees.length / 2)];
return {
avgCapacity: parseInt(avgCapacity[0].avgCapacity, 10),
avgFeeRate: avgFeeRate,
avgBaseFee: avgBaseFee,
medianCapacity: medianCapacity,
medianFeeRate: medianFeeRate,
medianBaseFee: medianBaseFee,
}
} catch (e) {
logger.err(`Cannot calculate channels statistics. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.transaction_id IN ? OR channels.closing_transaction_id IN ?
`;
const [rows]: any = await DB.query(query, [[transactionIds], [transactionIds]]);
const channels = rows.map((row) => this.convertChannel(row));
return channels;
} catch (e) {
logger.err('$getChannelByTransactionId error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelByClosingId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.closing_transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
rows[0].outputs = JSON.parse(rows[0].outputs);
return rows[0];
}
} catch (e) {
logger.err('$getChannelByClosingId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsByOpeningId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
return rows.map(row => {
row.outputs = JSON.parse(row.outputs);
return row;
});
}
} catch (e) {
logger.err('$getChannelsByOpeningId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateClosingInfo(channelInfo: { id: string, node1_closing_balance: number, node2_closing_balance: number, closed_by: string | null, closing_fee: number, outputs: ILightningApi.ForensicOutput[]}): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_closing_balance = ?,
node2_closing_balance = ?,
closed_by = ?,
closing_fee = ?,
outputs = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_closing_balance || 0,
channelInfo.node2_closing_balance || 0,
channelInfo.closed_by,
channelInfo.closing_fee || 0,
JSON.stringify(channelInfo.outputs),
channelInfo.id,
]);
} catch (e) {
logger.err('$updateClosingInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateOpeningInfo(channelInfo: { id: string, node1_funding_balance: number, node2_funding_balance: number, funding_ratio: number, single_funded: boolean | void }): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_funding_balance = ?,
node2_funding_balance = ?,
funding_ratio = ?,
single_funded = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_funding_balance || 0,
channelInfo.node2_funding_balance || 0,
channelInfo.funding_ratio,
channelInfo.single_funded ? 1 : 0,
channelInfo.id,
]);
} catch (e) {
logger.err('$updateOpeningInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $markChannelSourceChecked(id: string): Promise<void> {
try {
const query = `
UPDATE channels
SET source_checked = 1
WHERE id = ?
`;
await DB.query<ResultSetHeader>(query, [id]);
} catch (e) {
logger.err('$markChannelSourceChecked error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
try {
let channelStatusFilter;
if (status === 'open') {
channelStatusFilter = '< 2';
} else if (status === 'active') {
channelStatusFilter = '= 1';
} else if (status === 'closed') {
channelStatusFilter = '= 2';
} else {
throw new Error('getChannelsForNode: Invalid status requested');
}
// Channels originating from node
let query = `
SELECT COALESCE(node2.alias, SUBSTRING(node2_public_key, 0, 20)) AS alias, COALESCE(node2.public_key, node2_public_key) AS public_key,
channels.status, channels.node1_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
FROM channels
LEFT JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsFromNode]: any = await DB.query(query, [public_key]);
// Channels incoming to node
query = `
SELECT COALESCE(node1.alias, SUBSTRING(node1_public_key, 0, 20)) AS alias, COALESCE(node1.public_key, node1_public_key) AS public_key,
channels.status, channels.node2_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
FROM channels
LEFT JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsToNode]: any = await DB.query(query, [public_key]);
let allChannels = channelsFromNode.concat(channelsToNode);
allChannels.sort((a, b) => {
if (status === 'closed') {
if (!b.closing_date && !a.closing_date) {
return (b.updated_at ?? 0) - (a.updated_at ?? 0);
} else {
return (b.closing_date ?? 0) - (a.closing_date ?? 0);
}
} else {
return b.capacity - a.capacity;
}
});
if (index >= 0) {
allChannels = allChannels.slice(index, index + length);
} else if (index === -1) { // Node channels tree chart
allChannels = allChannels.slice(0, 1000);
}
const channels: any[] = []
for (const row of allChannels) {
let channel;
if (index >= 0) {
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
channel = {
status: row.status,
closing_reason: row.closing_reason,
closing_date: row.closing_date,
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
channels: activeChannelsStats.active_channel_count ?? 0,
capacity: activeChannelsStats.capacity ?? 0,
}
};
} else if (index === -1) {
channel = {
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
}
};
}
channels.push(channel);
}
return channels;
} catch (e) {
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsCountForNode(public_key: string, status: string): Promise<any> {
try {
// Default active and inactive channels
let statusQuery = '< 2';
// Closed channels only
if (status === 'closed') {
statusQuery = '= 2';
}
const query = `
SELECT COUNT(*) AS count
FROM channels
WHERE (node1_public_key = ? OR node2_public_key = ?)
AND status ${statusQuery}
`;
const [rows]: any = await DB.query(query, [public_key, public_key]);
return rows[0]['count'];
} catch (e) {
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
private convertChannel(channel: any): any {
return {
'id': channel.id,
'short_id': channel.short_id,
'capacity': channel.capacity,
'transaction_id': channel.transaction_id,
'transaction_vout': channel.transaction_vout,
'closing_transaction_id': channel.closing_transaction_id,
'closing_fee': channel.closing_fee,
'closing_reason': channel.closing_reason,
'closing_date': channel.closing_date,
'updated_at': channel.updated_at,
'created': channel.created,
'status': channel.status,
'funding_ratio': channel.funding_ratio,
'closed_by': channel.closed_by,
'single_funded': !!channel.single_funded,
'node_left': {
'alias': channel.alias_left,
'public_key': channel.node1_public_key,
'channels': channel.channels_left,
'capacity': channel.capacity_left,
'base_fee_mtokens': channel.node1_base_fee_mtokens,
'cltv_delta': channel.node1_cltv_delta,
'fee_rate': channel.node1_fee_rate,
'is_disabled': channel.node1_is_disabled,
'max_htlc_mtokens': channel.node1_max_htlc_mtokens,
'min_htlc_mtokens': channel.node1_min_htlc_mtokens,
'updated_at': channel.node1_updated_at,
'longitude': channel.node1_longitude,
'latitude': channel.node1_latitude,
'funding_balance': channel.node1_funding_balance,
'closing_balance': channel.node1_closing_balance,
'initiated_close': channel.closed_by === channel.node1_public_key ? true : undefined,
},
'node_right': {
'alias': channel.alias_right,
'public_key': channel.node2_public_key,
'channels': channel.channels_right,
'capacity': channel.capacity_right,
'base_fee_mtokens': channel.node2_base_fee_mtokens,
'cltv_delta': channel.node2_cltv_delta,
'fee_rate': channel.node2_fee_rate,
'is_disabled': channel.node2_is_disabled,
'max_htlc_mtokens': channel.node2_max_htlc_mtokens,
'min_htlc_mtokens': channel.node2_min_htlc_mtokens,
'updated_at': channel.node2_updated_at,
'longitude': channel.node2_longitude,
'latitude': channel.node2_latitude,
'funding_balance': channel.node2_funding_balance,
'closing_balance': channel.node2_closing_balance,
'initiated_close': channel.closed_by === channel.node2_public_key ? true : undefined,
},
};
}
/**
* Save or update a channel present in the graph
*/
public async $saveChannel(channel: ILightningApi.Channel, status = 1): Promise<void> {
const [ txid, vout ] = channel.chan_point.split(':');
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
// https://github.com/mempool/mempool/issues/3006
if ((channel.last_update ?? 0) < 1514736061) { // January 1st 2018
channel.last_update = null;
}
if ((policy1.last_update ?? 0) < 1514736061) { // January 1st 2018
policy1.last_update = null;
}
if ((policy2.last_update ?? 0) < 1514736061) { // January 1st 2018
policy2.last_update = null;
}
const query = `INSERT INTO channels
(
id,
short_id,
capacity,
transaction_id,
transaction_vout,
updated_at,
status,
node1_public_key,
node1_base_fee_mtokens,
node1_cltv_delta,
node1_fee_rate,
node1_is_disabled,
node1_max_htlc_mtokens,
node1_min_htlc_mtokens,
node1_updated_at,
node2_public_key,
node2_base_fee_mtokens,
node2_cltv_delta,
node2_fee_rate,
node2_is_disabled,
node2_max_htlc_mtokens,
node2_min_htlc_mtokens,
node2_updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ${status}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
capacity = ?,
updated_at = ?,
status = ${status},
node1_public_key = ?,
node1_base_fee_mtokens = ?,
node1_cltv_delta = ?,
node1_fee_rate = ?,
node1_is_disabled = ?,
node1_max_htlc_mtokens = ?,
node1_min_htlc_mtokens = ?,
node1_updated_at = ?,
node2_public_key = ?,
node2_base_fee_mtokens = ?,
node2_cltv_delta = ?,
node2_fee_rate = ?,
node2_is_disabled = ?,
node2_max_htlc_mtokens = ?,
node2_min_htlc_mtokens = ?,
node2_updated_at = ?
;`;
await DB.query(query, [
Common.channelShortIdToIntegerId(channel.channel_id),
Common.channelIntegerIdToShortId(channel.channel_id),
channel.capacity,
txid,
vout,
Common.utcDateToMysql(channel.last_update),
channel.node1_pub,
policy1.fee_base_msat,
policy1.time_lock_delta,
policy1.fee_rate_milli_msat,
policy1.disabled,
policy1.max_htlc_msat,
policy1.min_htlc,
Common.utcDateToMysql(policy1.last_update),
channel.node2_pub,
policy2.fee_base_msat,
policy2.time_lock_delta,
policy2.fee_rate_milli_msat,
policy2.disabled,
policy2.max_htlc_msat,
policy2.min_htlc,
Common.utcDateToMysql(policy2.last_update),
channel.capacity,
Common.utcDateToMysql(channel.last_update),
channel.node1_pub,
policy1.fee_base_msat,
policy1.time_lock_delta,
policy1.fee_rate_milli_msat,
policy1.disabled,
policy1.max_htlc_msat,
policy1.min_htlc,
Common.utcDateToMysql(policy1.last_update),
channel.node2_pub,
policy2.fee_base_msat,
policy2.time_lock_delta,
policy2.fee_rate_milli_msat,
policy2.disabled,
policy2.max_htlc_msat,
policy2.min_htlc,
Common.utcDateToMysql(policy2.last_update)
]);
}
/**
* Set all channels not in `graphChannelsIds` as inactive (status = 0)
*/
public async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
if (graphChannelsIds.length === 0) {
return;
}
try {
const result = await DB.query<ResultSetHeader>(`
UPDATE channels
SET status = 0
WHERE id NOT IN (
${graphChannelsIds.map(id => `"${id}"`).join(',')}
)
AND status != 2
`);
if (result[0].changedRows ?? 0 > 0) {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
public async $getLatestChannelUpdateForNode(publicKey: string): Promise<number> {
try {
const query = `
SELECT MAX(UNIX_TIMESTAMP(updated_at)) as updated_at
FROM channels
WHERE node1_public_key = ?
`;
const [rows]: any[] = await DB.query(query, [publicKey]);
if (rows.length > 0) {
return rows[0].updated_at;
}
} catch (e) {
logger.err(`Can't getLatestChannelUpdateForNode for ${publicKey}. Reason ${e instanceof Error ? e.message : e}`);
}
return 0;
}
}
export default new ChannelsApi();

View File

@@ -1,139 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import channelsApi from './channels.api';
class ChannelsRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/txids', this.$getChannelsByTransactionIds)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/penalties', this.$getPenaltyClosedChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
;
}
private async $searchChannelsById(req: Request, res: Response) {
try {
const channels = await channelsApi.$searchChannelsById(req.params.search);
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannel(req: Request, res: Response) {
try {
const channel = await channelsApi.$getChannel(req.params.short_id);
if (!channel) {
res.status(404).send('Channel not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channel);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannelsForNode(req: Request, res: Response) {
try {
if (typeof req.query.public_key !== 'string') {
res.status(400).send('Missing parameter: public_key');
return;
}
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
if (index < -1) {
res.status(400).send('Invalid index');
}
if (['open', 'active', 'closed'].includes(status) === false) {
res.status(400).send('Invalid status');
}
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.header('X-Total-Count', channelsCount.toString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannelsByTransactionIds(req: Request, res: Response): Promise<void> {
try {
if (!Array.isArray(req.query.txId)) {
res.status(400).send('Not an array');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
const result: any[] = [];
for (const txid of txIds) {
const inputs: any = {};
const outputs: any = {};
// Assuming that we only have one lightning close input in each transaction. This may not be true in the future
const foundChannelsFromInput = channels.find((channel) => channel.closing_transaction_id === txid);
if (foundChannelsFromInput) {
inputs[0] = foundChannelsFromInput;
}
const foundChannelsFromOutputs = channels.filter((channel) => channel.transaction_id === txid);
for (const output of foundChannelsFromOutputs) {
outputs[output.transaction_vout] = output;
}
result.push({
inputs,
outputs,
});
}
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getPenaltyClosedChannels(req: Request, res: Response): Promise<void> {
try {
const channels = await channelsApi.$getPenaltyClosedChannels();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getAllChannelsGeo(req: Request, res: Response) {
try {
const style: string = typeof req.query.style === 'string' ? req.query.style : '';
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey, style);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new ChannelsRoutes();

View File

@@ -1,58 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import nodesApi from './nodes.api';
import channelsApi from './channels.api';
import statisticsApi from './statistics.api';
class GeneralLightningRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/search', this.$searchNodesAndChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/latest', this.$getGeneralStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/:interval', this.$getStatistics)
;
}
private async $searchNodesAndChannels(req: Request, res: Response) {
if (typeof req.query.searchText !== 'string') {
res.status(400).send('Missing parameter: searchText');
return;
}
try {
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.query.searchText);
const channels = await channelsApi.$searchChannelsById(req.query.searchText);
res.json({
nodes: nodes,
channels: channels,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getStatistics(req: Request, res: Response) {
try {
const statistics = await statisticsApi.$getStatistics(req.params.interval);
const statisticsCount = await statisticsApi.$getStatisticsCount();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', statisticsCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getGeneralStats(req: Request, res: Response) {
try {
const statistics = await statisticsApi.$getLatestStatistics();
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new GeneralLightningRoutes();

View File

@@ -1,753 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
import { bin2hex } from '../../utils/format';
class NodesApi {
public async $getWorldNodes(): Promise<any> {
try {
let query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.longitude, nodes.latitude,
geo_names_country.names as country, geo_names_iso.names as isoCode
FROM nodes
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE status = 1 AND nodes.as_number IS NOT NULL
ORDER BY capacity
`;
const [nodes]: any[] = await DB.query(query);
for (let i = 0; i < nodes.length; ++i) {
nodes[i].country = JSON.parse(nodes[i].country);
}
query = `
SELECT MAX(nodes.capacity) as maxLiquidity, MAX(nodes.channels) as maxChannels
FROM nodes
WHERE status = 1 AND nodes.as_number IS NOT NULL
`;
const [maximums]: any[] = await DB.query(query);
return {
maxLiquidity: maximums[0].maxLiquidity,
maxChannels: maximums[0].maxChannels,
nodes: nodes.map(node => [
node.longitude, node.latitude,
node.publicKey, node.alias, node.capacity, node.channels,
node.country, node.isoCode
])
};
} catch (e) {
logger.err(`Can't get world nodes list. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public async $getNode(public_key: string): Promise<any> {
try {
// General info
let query = `
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision,
features
FROM nodes
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = subdivision_id
LEFT JOIN geo_names geo_names_country on geo_names_country.id = country_id
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE public_key = ?
`;
let [rows]: any[] = await DB.query(query, [public_key]);
if (rows.length === 0) {
throw new Error(`This node does not exist, or our node is not seeing it yet`);
}
const node = rows[0];
node.as_organization = JSON.parse(node.as_organization);
node.subdivision = JSON.parse(node.subdivision);
node.city = JSON.parse(node.city);
node.country = JSON.parse(node.country);
// Features
node.features = JSON.parse(node.features);
node.featuresBits = null;
if (node.features) {
let maxBit = 0;
for (const feature of node.features) {
maxBit = Math.max(maxBit, feature.bit);
}
maxBit = Math.ceil(maxBit / 4) * 4 - 1;
node.featuresBits = new Array(maxBit + 1).fill(0);
for (const feature of node.features) {
node.featuresBits[feature.bit] = 1;
}
node.featuresBits = bin2hex(node.featuresBits.reverse().join(''));
}
// Active channels and capacity
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
node.capacity = activeChannelsStats.capacity ?? 0;
// Opened channels count
query = `
SELECT count(short_id) as opened_channel_count
FROM channels
WHERE status != 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
[rows] = await DB.query(query, [public_key, public_key]);
node.opened_channel_count = 0;
if (rows.length > 0) {
node.opened_channel_count = rows[0].opened_channel_count;
}
// Closed channels count
query = `
SELECT count(short_id) as closed_channel_count
FROM channels
WHERE status = 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
[rows] = await DB.query(query, [public_key, public_key]);
node.closed_channel_count = 0;
if (rows.length > 0) {
node.closed_channel_count = rows[0].closed_channel_count;
}
// Custom records
query = `
SELECT type, payload
FROM nodes_records
WHERE public_key = ?
`;
[rows] = await DB.query(query, [public_key]);
node.custom_records = {};
for (const record of rows) {
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
}
return node;
} catch (e) {
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getActiveChannelsStats(node_public_key: string): Promise<unknown> {
const query = `
SELECT count(short_id) as active_channel_count, sum(capacity) as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
const [rows]: any[] = await DB.query(query, [node_public_key, node_public_key]);
if (rows.length > 0) {
return {
active_channel_count: rows[0].active_channel_count,
capacity: rows[0].capacity
};
} else {
return null;
}
}
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
try {
const inQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
const outQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
return {
incoming: inRows.length > 0 ? inRows : [],
outgoing: outRows.length > 0 ? outRows : [],
};
} catch (e) {
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getAllNodes(): Promise<any> {
try {
const query = `SELECT * FROM nodes`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getAllNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getNodeStats(public_key: string): Promise<any> {
try {
const query = `
SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels
FROM node_stats
WHERE public_key = ?
ORDER BY added DESC
`;
const [rows]: any = await DB.query(query, [public_key]);
return rows;
} catch (e) {
logger.err('$getNodeStats error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.capacity
FROM nodes
ORDER BY capacity DESC
LIMIT 6
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY capacity DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopCapacityNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT
nodes.public_key as publicKey,
IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.channels,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY channels DESC
LIMIT 6;
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY channels DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getOldestNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
try {
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
const latestDate = rows[0].maxAdded;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
node_stats.channels
FROM node_stats
JOIN nodes ON nodes.public_key = node_stats.public_key
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100;
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM node_stats
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace(/[^a-zA-Z0-9]/g, '') + '%';
const aliasSearch = search
.replace(/[-_.]/g, ' ') // Replace all -_. characters with empty space. Eg: "ln.nicehash" becomes "ln nicehash".
.replace(/[^a-zA-Z0-9 ]/g, '') // Remove all special characters and keep just A to Z, 0 to 9.
.split(' ')
.filter(key => key.length)
.map((search) => '+' + search + '*').join(' ');
// %keyword% is wildcard search and can't be indexed so it's slower as the node database grow. keyword% can be indexed but then you can't search for "Nicehash" and get result for ln.nicehash.com. So we use fulltext index for words "ln, nicehash, com" and nicehash* will find it instantly.
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
return rows;
} catch (e) {
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getNodesISPRanking() {
try {
let query = '';
// List all channels and the two linked ISP
query = `
SELECT short_id, channels.capacity,
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1
ORDER BY short_id DESC
`;
const [channelsIsp]: any = await DB.query(query);
// Sum channels capacity and node count per ISP
const ispList = {};
for (const channel of channelsIsp) {
const isp1 = JSON.parse(channel.isp1);
const isp2 = JSON.parse(channel.isp2);
if (!ispList[isp1]) {
ispList[isp1] = {
ids: [channel.isp1ID],
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp1].ids.includes(channel.isp1ID) === false) {
ispList[isp1].ids.push(channel.isp1ID);
}
if (!ispList[isp2]) {
ispList[isp2] = {
ids: [channel.isp2ID],
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp2].ids.includes(channel.isp2ID) === false) {
ispList[isp2].ids.push(channel.isp2ID);
}
ispList[isp1].capacity += channel.capacity;
ispList[isp1].channels += 1;
ispList[isp1].nodes[channel.node1PublicKey] = true;
ispList[isp2].capacity += channel.capacity;
ispList[isp2].channels += 1;
ispList[isp2].nodes[channel.node2PublicKey] = true;
}
const ispRanking: any[] = [];
for (const isp of Object.keys(ispList)) {
ispRanking.push([
ispList[isp].ids.sort((a, b) => a - b).join(','),
isp,
ispList[isp].capacity,
ispList[isp].channels,
Object.keys(ispList[isp].nodes).length,
]);
}
// Total active channels capacity
query = `SELECT SUM(capacity) AS capacity FROM channels WHERE status = 1`;
const [totalCapacity]: any = await DB.query(query);
// Get the total capacity of all channels which have at least one node on clearnet
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks LIKE '%ipv%'
`;
const [clearnetCapacity]: any = await DB.query(query);
// Get the total capacity of all channels which have both nodes on Tor
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks NOT LIKE '%ipv%' AND
channels_tmp.networks NOT LIKE '%dns%' AND
channels_tmp.networks NOT LIKE '%websocket%'
`;
const [torCapacity]: any = await DB.query(query);
const clearnetCapacityValue = parseInt(clearnetCapacity[0].capacity, 10);
const torCapacityValue = parseInt(torCapacity[0].capacity, 10);
const unknownCapacityValue = parseInt(totalCapacity[0].capacity) - clearnetCapacityValue - torCapacityValue;
return {
clearnetCapacity: clearnetCapacityValue,
torCapacity: torCapacityValue,
unknownCapacity: unknownCapacityValue,
ispRanking: ispRanking,
};
} catch (e) {
logger.err(`Cannot get LN ISP ranking. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesPerCountry(countryId: string) {
try {
const query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude, nodes.as_number, geo_names_isp.names as isp
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
LEFT JOIN geo_names geo_names_isp on geo_names_isp.id = nodes.as_number AND geo_names_isp.type = 'as_organization'
WHERE geo_names_country.id = ?
ORDER BY capacity DESC
`;
const [rows]: any = await DB.query(query, [countryId]);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
rows[i].subdivision = JSON.parse(rows[i].subdivision);
rows[i].isp = JSON.parse(rows[i].isp);
}
return rows;
} catch (e) {
logger.err(`Cannot get nodes for country id ${countryId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesPerISP(ISPId: string) {
try {
let query = `
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
ORDER BY short_id DESC
`;
const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {};
const intISPIds: number[] = [];
for (const ispId of IPSIds) {
intISPIds.push(parseInt(ispId, 10));
}
for (const channel of rows) {
if (intISPIds.includes(channel.isp1ID)) {
nodes[channel.node1PublicKey] = true;
}
if (intISPIds.includes(channel.isp2ID)) {
nodes[channel.node2PublicKey] = true;
}
}
query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE nodes.public_key IN (?)
ORDER BY capacity DESC
`;
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
for (let i = 0; i < rows2.length; ++i) {
rows2[i].country = JSON.parse(rows2[i].country);
rows2[i].city = JSON.parse(rows2[i].city);
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
}
return rows2;
} catch (e) {
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesCountries() {
try {
let query = `SELECT geo_names.names as names, geo_names_iso.names as iso_code, COUNT(DISTINCT nodes.public_key) as nodesCount, SUM(capacity) as capacity
FROM nodes
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
GROUP BY country_id
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
`;
const [nodesCountPerCountry]: any = await DB.query(query);
query = `SELECT COUNT(*) as total FROM nodes WHERE country_id IS NOT NULL`;
const [nodesWithAS]: any = await DB.query(query);
const nodesPerCountry: any[] = [];
for (const country of nodesCountPerCountry) {
nodesPerCountry.push({
name: JSON.parse(country.names),
iso: country.iso_code,
count: country.nodesCount,
share: Math.floor(country.nodesCount / nodesWithAS[0].total * 10000) / 100,
capacity: country.capacity,
})
}
return nodesPerCountry;
} catch (e) {
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
/**
* Save or update a node present in the graph
*/
public async $saveNode(node: ILightningApi.Node): Promise<void> {
try {
// https://github.com/mempool/mempool/issues/3006
if ((node.last_update ?? 0) < 1514736061) { // January 1st 2018
node.last_update = null;
}
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
const query = `INSERT INTO nodes(
public_key,
first_seen,
updated_at,
alias,
alias_search,
color,
sockets,
status,
features
)
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1, ?)
ON DUPLICATE KEY UPDATE
updated_at = FROM_UNIXTIME(?),
alias = ?,
alias_search = ?,
color = ?,
sockets = ?,
status = 1,
features = ?
`;
await DB.query(query, [
node.pub_key,
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
]);
} catch (e) {
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Update node sockets
*/
public async $updateNodeSockets(publicKey: string, sockets: {network: string; addr: string}[]): Promise<void> {
const formattedSockets = (sockets.map(a => a.addr).join(',')) ?? '';
try {
await DB.query(`UPDATE nodes SET sockets = ? WHERE public_key = ?`, [formattedSockets, publicKey]);
} catch (e) {
logger.err(`Cannot update node sockets for ${publicKey}. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Set all nodes not in `nodesPubkeys` as inactive (status = 0)
*/
public async $setNodesInactive(graphNodesPubkeys: string[]): Promise<void> {
if (graphNodesPubkeys.length === 0) {
return;
}
try {
const result = await DB.query<ResultSetHeader>(`
UPDATE nodes
SET status = 0
WHERE public_key NOT IN (
${graphNodesPubkeys.map(pubkey => `"${pubkey}"`).join(',')}
)
`);
if (result[0].changedRows ?? 0 > 0) {
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
private aliasToSearchText(str: string): string {
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
}
}
export default new NodesApi();

View File

@@ -1,316 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import nodesApi from './nodes.api';
import DB from '../../database';
import { INodesRanking } from '../../mempool.interfaces';
class NodesRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/world', this.$getWorldNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings', this.$getNodesRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/liquidity', this.$getTopNodesByCapacity)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
;
}
private async $searchNode(req: Request, res: Response) {
try {
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.params.search);
res.json(nodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodeGroup(req: Request, res: Response) {
try {
let nodesList;
let nodes: any[] = [];
switch (config.MEMPOOL.NETWORK) {
case 'testnet':
nodesList = [
'032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b',
'025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7',
'0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55',
'032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0',
'029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3',
'0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af',
'03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab',
'032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8',
'02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605',
'039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205',
'033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18',
'029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584',
'0235ad0b56ed8c42c4354444c24e971c05e769ec0b5fb0ccea42880095dc02ea2c',
'029700819a37afea630f80e6cc461f3fd3c4ace2598a21cfbbe64d1c78d0ee69a5',
'02c2d8b2dbf87c7894af2f1d321290e2fe6db5446cd35323987cee98f06e2e0075',
'030b0ca1ea7b1075716d2a555630e6fd47ef11bc7391fe68963ec06cf370a5e382',
'031adb9eb2d66693f85fa31a4adca0319ba68219f3ad5f9a2ef9b34a6b40755fa1',
'02ccd07faa47eda810ecf5591ccf5ca50f6c1034d0d175052898d32a00b9bae24f',
];
break;
case 'signet':
nodesList = [
'03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956',
'033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de',
'02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781',
'025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029',
'027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe',
'03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43',
'02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991',
'02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34',
'02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86',
'038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7',
'03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761',
'028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7',
'02ff690d06c187ab994bf83c5a2114fe5bf50112c2c817af0f788f736be9fa2070',
'02a9f570c51a2526a5ee85802e88f9281bed771eb66a0c8a7d898430dd5d0eae45',
'038c3de773255d3bd7a50e31e58d423baac5c90826a74d75e64b74c95475de1097',
'0242c7f7d315095f37ad1421ae0a2fc967d4cbe65b61b079c5395a769436959853',
'02a909e70eb03742f12666ebb1f56ac42a5fbaab0c0e8b5b1df4aa9f10f8a09240',
'03a26efa12489803c07f3ac2f1dba63812e38f0f6e866ce3ebb34df7de1f458cd2',
];
break;
default:
nodesList = [
'03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61',
'03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437',
'03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144',
'0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108',
'03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c',
'03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5',
'021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a',
'032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9',
'02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34',
'02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf',
'03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c',
'0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43',
'02521287789f851268a39c9eccc9d6180d2c614315b583c9e6ae0addbd6d79df06',
'0258c2a7b7f8af2585b4411b1ec945f70988f30412bb1df179de941f14d0b1bc3e',
'03c3389ff1a896f84d921ed01a19fc99c6724ce8dc4b960cd3b7b2362b62cd60d7',
'038d118996b3eaa15dcd317b32a539c9ecfdd7698f204acf8a087336af655a9192',
'02a928903d93d78877dacc3642b696128a3636e9566dd42d2d132325b2c8891c09',
'0328cd17f3a9d3d90b532ade0d1a67e05eb8a51835b3dce0a2e38eac04b5a62a57',
];
}
for (let pubKey of nodesList) {
try {
const node = await nodesApi.$getNode(pubKey);
if (node) {
nodes.push(node);
}
} catch (e) {}
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(nodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNode(req: Request, res: Response) {
try {
const node = await nodesApi.$getNode(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalNodeStats(req: Request, res: Response) {
try {
const statistics = await nodesApi.$getNodeStats(req.params.public_key);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getFeeHistogram(req: Request, res: Response) {
try {
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);
const topChannelsNodes = await nodesApi.$getTopChannelsNodes(false);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(<INodesRanking>{
topByCapacity: topCapacityNodes,
topByChannels: topChannelsNodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getTopNodesByCapacity(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getTopNodesByChannels(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopChannelsNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getOldestNodes(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getOldestNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getISPRanking(req: Request, res: Response): Promise<void> {
try {
const nodesPerAs = await nodesApi.$getNodesISPRanking();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(nodesPerAs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getWorldNodes(req: Request, res: Response) {
try {
const worldNodes = await nodesApi.$getWorldNodes();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(worldNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesPerCountry(req: Request, res: Response) {
try {
const [country]: any[] = await DB.query(
`SELECT geo_names.id, geo_names_country.names as country_names
FROM geo_names
JOIN geo_names geo_names_country on geo_names.id = geo_names_country.id AND geo_names_country.type = 'country'
WHERE geo_names.type = 'country_iso_code' AND geo_names.names = ?`,
[req.params.country]
);
if (country.length === 0) {
res.status(404).send(`This country does not exist or does not host any lightning nodes on clearnet`);
return;
}
const nodes = await nodesApi.$getNodesPerCountry(country[0].id);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json({
country: JSON.parse(country[0].country_names),
nodes: nodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesPerISP(req: Request, res: Response) {
try {
const [isp]: any[] = await DB.query(
`SELECT geo_names.names as isp_name
FROM geo_names
WHERE geo_names.type = 'as_organization' AND geo_names.id = ?`,
[req.params.isp]
);
if (isp.length === 0) {
res.status(404).send(`This ISP does not exist or does not host any lightning nodes on clearnet`);
return;
}
const nodes = await nodesApi.$getNodesPerISP(req.params.isp);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json({
isp: JSON.parse(isp[0].isp_name),
nodes: nodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesCountries(req: Request, res: Response) {
try {
const nodesPerAs = await nodesApi.$getNodesCountries();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(nodesPerAs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new NodesRoutes();

View File

@@ -1,53 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import { Common } from '../common';
class StatisticsApi {
public async $getStatistics(interval: string | null = null): Promise<any> {
interval = Common.getSqlInterval(interval);
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity,
tor_nodes, clearnet_nodes, unannounced_nodes, clearnet_tor_nodes
FROM lightning_stats`;
if (interval) {
query += ` WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
query += ` ORDER BY added DESC`;
try {
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getLatestStatistics(): Promise<any> {
try {
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats WHERE DATE(added) = DATE(NOW() - INTERVAL 7 DAY)`);
return {
latest: rows[0],
previous: rows2[0],
};
} catch (e) {
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getStatisticsCount(): Promise<number> {
try {
const [rows]: any = await DB.query(`SELECT count(*) as count FROM lightning_stats`);
return rows[0].count;
} catch (e) {
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new StatisticsApi();

View File

@@ -1,3 +1,4 @@
import config from '../config';
import { MempoolBlock } from '../mempool.interfaces';
import { Common } from './common';
import mempool from './mempool';
@@ -18,7 +19,6 @@ class FeeApi {
'fastestFee': this.defaultFee,
'halfHourFee': this.defaultFee,
'hourFee': this.defaultFee,
'economyFee': minimumFee,
'minimumFee': minimumFee,
};
}
@@ -31,7 +31,6 @@ class FeeApi {
'fastestFee': firstMedianFee,
'halfHourFee': secondMedianFee,
'hourFee': thirdMedianFee,
'economyFee': Math.min(2 * minimumFee, thirdMedianFee),
'minimumFee': minimumFee,
};
}

View File

@@ -1,37 +0,0 @@
import fs from 'fs';
import path from "path";
const { spawnSync } = require('child_process');
function getVersion(): string {
const packageJson = fs.readFileSync('package.json').toString();
return JSON.parse(packageJson).version;
}
function getGitCommit(): string {
if (process.env.MEMPOOL_COMMIT_HASH) {
return process.env.MEMPOOL_COMMIT_HASH;
} else {
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
if (!gitRevParse.error) {
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
if (output) {
return output;
} else {
console.log('Could not fetch git commit: No repo available');
}
} else if (gitRevParse.error.code === 'ENOENT') {
console.log('Could not fetch git commit: Command `git` is unavailable');
}
}
return '?';
}
const versionInfo = {
version: getVersion(),
gitCommit: getGitCommit()
}
fs.writeFileSync(
path.join(__dirname, 'version.json'),
JSON.stringify(versionInfo, null, 2) + "\n"
);

View File

@@ -0,0 +1,44 @@
import logger from '../logger';
import axios from 'axios';
import { IConversionRates } from '../mempool.interfaces';
import config from '../config';
class FiatConversion {
private conversionRates: IConversionRates = {
'USD': 0
};
private ratesChangedCallback: ((rates: IConversionRates) => void) | undefined;
constructor() { }
public setProgressChangedCallback(fn: (rates: IConversionRates) => void) {
this.ratesChangedCallback = fn;
}
public startService() {
logger.info('Starting currency rates service');
setInterval(this.updateCurrency.bind(this), 1000 * config.MEMPOOL.PRICE_FEED_UPDATE_INTERVAL);
this.updateCurrency();
}
public getConversionRates() {
return this.conversionRates;
}
private async updateCurrency(): Promise<void> {
try {
const response = await axios.get('https://price.bisq.wiz.biz/getAllMarketPrices', { timeout: 10000 });
const usd = response.data.data.find((item: any) => item.currencyCode === 'USD');
this.conversionRates = {
'USD': usd.price,
};
if (this.ratesChangedCallback) {
this.ratesChangedCallback(this.conversionRates);
}
} catch (e) {
logger.err('Error updating fiat conversion rates: ' + (e instanceof Error ? e.message : e));
}
}
}
export default new FiatConversion();

View File

@@ -1,270 +0,0 @@
// Imported from https://github.com/shesek/lightning-client-js
'use strict';
const methods = [
'addgossip',
'autocleaninvoice',
'check',
'checkmessage',
'close',
'connect',
'createinvoice',
'createinvoicerequest',
'createoffer',
'createonion',
'decode',
'decodepay',
'delexpiredinvoice',
'delinvoice',
'delpay',
'dev-listaddrs',
'dev-rescan-outputs',
'disableoffer',
'disconnect',
'estimatefees',
'feerates',
'fetchinvoice',
'fundchannel',
'fundchannel_cancel',
'fundchannel_complete',
'fundchannel_start',
'fundpsbt',
'getchaininfo',
'getinfo',
'getlog',
'getrawblockbyheight',
'getroute',
'getsharedsecret',
'getutxout',
'help',
'invoice',
'keysend',
'legacypay',
'listchannels',
'listconfigs',
'listforwards',
'listfunds',
'listinvoices',
'listnodes',
'listoffers',
'listpays',
'listpeers',
'listsendpays',
'listtransactions',
'multifundchannel',
'multiwithdraw',
'newaddr',
'notifications',
'offer',
'offerout',
'openchannel_abort',
'openchannel_bump',
'openchannel_init',
'openchannel_signed',
'openchannel_update',
'pay',
'payersign',
'paystatus',
'ping',
'plugin',
'reserveinputs',
'sendinvoice',
'sendonion',
'sendonionmessage',
'sendpay',
'sendpsbt',
'sendrawtransaction',
'setchannelfee',
'signmessage',
'signpsbt',
'stop',
'txdiscard',
'txprepare',
'txsend',
'unreserveinputs',
'utxopsbt',
'waitanyinvoice',
'waitblockheight',
'waitinvoice',
'waitsendpay',
'withdraw'
];
import EventEmitter from 'events';
import { existsSync, statSync } from 'fs';
import { createConnection, Socket } from 'net';
import { homedir } from 'os';
import path from 'path';
import { createInterface, Interface } from 'readline';
import logger from '../../../logger';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import { convertAndmergeBidirectionalChannels, convertNode } from './clightning-convert';
class LightningError extends Error {
type: string = 'lightning';
message: string = 'lightning-client error';
constructor(error) {
super();
this.type = error.type;
this.message = error.message;
}
}
const defaultRpcPath = path.join(homedir(), '.lightning')
, fStat = (...p) => statSync(path.join(...p))
, fExists = (...p) => existsSync(path.join(...p))
export default class CLightningClient extends EventEmitter implements AbstractLightningApi {
private rpcPath: string;
private reconnectWait: number;
private reconnectTimeout;
private reqcount: number;
private client: Socket;
private rl: Interface;
private clientConnectionPromise: Promise<unknown>;
constructor(rpcPath = defaultRpcPath) {
if (!path.isAbsolute(rpcPath)) {
throw new Error('The rpcPath must be an absolute path');
}
if (!fExists(rpcPath) || !fStat(rpcPath).isSocket()) {
// network directory provided, use the lightning-rpc within in
if (fExists(rpcPath, 'lightning-rpc')) {
rpcPath = path.join(rpcPath, 'lightning-rpc');
}
// main data directory provided, default to using the bitcoin mainnet subdirectory
// to be removed in v0.2.0
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
logger.warn(`${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`, logger.tags.ln)
logger.warn(`specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`, logger.tags.ln)
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
}
}
logger.debug(`Connecting to ${rpcPath}`, logger.tags.ln);
super();
this.rpcPath = rpcPath;
this.reconnectWait = 0.5;
this.reconnectTimeout = null;
this.reqcount = 0;
const _self = this;
this.client = createConnection(rpcPath).on(
'error', () => {
_self.increaseWaitTime();
_self.reconnect();
}
);
this.rl = createInterface({ input: this.client }).on(
'error', () => {
_self.increaseWaitTime();
_self.reconnect();
}
);
this.clientConnectionPromise = new Promise<void>(resolve => {
_self.client.on('connect', () => {
logger.info(`CLightning client connected`, logger.tags.ln);
_self.reconnectWait = 1;
resolve();
});
_self.client.on('end', () => {
logger.err(`CLightning client connection closed, reconnecting`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
_self.client.on('error', error => {
logger.err(`CLightning client connection error: ${error}`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
});
this.rl.on('line', line => {
line = line.trim();
if (!line) {
return;
}
const data = JSON.parse(line);
_self.emit('res:' + data.id, data);
});
}
increaseWaitTime(): void {
if (this.reconnectWait >= 16) {
this.reconnectWait = 16;
} else {
this.reconnectWait *= 2;
}
}
reconnect(): void {
const _self = this;
if (this.reconnectTimeout) {
return;
}
this.reconnectTimeout = setTimeout(() => {
logger.debug(`Trying to reconnect...`, logger.tags.ln);
_self.client.connect(_self.rpcPath);
_self.reconnectTimeout = null;
}, this.reconnectWait * 1000);
}
call(method, args = []): Promise<any> {
const _self = this;
const callInt = ++this.reqcount;
const sendObj = {
jsonrpc: '2.0',
method,
params: args,
id: '' + callInt
};
// Wait for the client to connect
return this.clientConnectionPromise
.then(() => new Promise((resolve, reject) => {
// Wait for a response
this.once('res:' + callInt, res => res.error == null
? resolve(res.result)
: reject(new LightningError(res.error))
);
// Send the command
_self.client.write(JSON.stringify(sendObj));
}));
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
const listnodes: any[] = await this.call('listnodes');
const listchannels: any[] = await this.call('listchannels');
const channelsList = await convertAndmergeBidirectionalChannels(listchannels['channels']);
return {
nodes: listnodes['nodes'].map(node => convertNode(node)),
edges: channelsList,
};
}
}
const protify = s => s.replace(/-([a-z])/g, m => m[1].toUpperCase());
methods.forEach(k => {
CLightningClient.prototype[protify(k)] = function (...args: any) {
return this.call(k, args);
};
});

View File

@@ -1,267 +0,0 @@
import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
import { hex2bin } from '../../../utils/format';
import config from '../../../config';
// https://github.com/lightningnetwork/lnd/blob/master/lnwire/features.go
export enum FeatureBits {
DataLossProtectRequired = 0,
DataLossProtectOptional = 1,
InitialRoutingSync = 3,
UpfrontShutdownScriptRequired = 4,
UpfrontShutdownScriptOptional = 5,
GossipQueriesRequired = 6,
GossipQueriesOptional = 7,
TLVOnionPayloadRequired = 8,
TLVOnionPayloadOptional = 9,
StaticRemoteKeyRequired = 12,
StaticRemoteKeyOptional = 13,
PaymentAddrRequired = 14,
PaymentAddrOptional = 15,
MPPRequired = 16,
MPPOptional = 17,
WumboChannelsRequired = 18,
WumboChannelsOptional = 19,
AnchorsRequired = 20,
AnchorsOptional = 21,
AnchorsZeroFeeHtlcTxRequired = 22,
AnchorsZeroFeeHtlcTxOptional = 23,
ShutdownAnySegwitRequired = 26,
ShutdownAnySegwitOptional = 27,
AMPRequired = 30,
AMPOptional = 31,
ExplicitChannelTypeRequired = 44,
ExplicitChannelTypeOptional = 45,
ScidAliasRequired = 46,
ScidAliasOptional = 47,
PaymentMetadataRequired = 48,
PaymentMetadataOptional = 49,
ZeroConfRequired = 50,
ZeroConfOptional = 51,
KeysendRequired = 54,
KeysendOptional = 55,
ScriptEnforcedLeaseRequired = 2022,
ScriptEnforcedLeaseOptional = 2023,
MaxBolt11Feature = 5114,
};
export const FeaturesMap = new Map<FeatureBits, string>([
[FeatureBits.DataLossProtectRequired, 'data-loss-protect'],
[FeatureBits.DataLossProtectOptional, 'data-loss-protect'],
[FeatureBits.InitialRoutingSync, 'initial-routing-sync'],
[FeatureBits.UpfrontShutdownScriptRequired, 'upfront-shutdown-script'],
[FeatureBits.UpfrontShutdownScriptOptional, 'upfront-shutdown-script'],
[FeatureBits.GossipQueriesRequired, 'gossip-queries'],
[FeatureBits.GossipQueriesOptional, 'gossip-queries'],
[FeatureBits.TLVOnionPayloadRequired, 'tlv-onion'],
[FeatureBits.TLVOnionPayloadOptional, 'tlv-onion'],
[FeatureBits.StaticRemoteKeyOptional, 'static-remote-key'],
[FeatureBits.StaticRemoteKeyRequired, 'static-remote-key'],
[FeatureBits.PaymentAddrOptional, 'payment-addr'],
[FeatureBits.PaymentAddrRequired, 'payment-addr'],
[FeatureBits.MPPOptional, 'multi-path-payments'],
[FeatureBits.MPPRequired, 'multi-path-payments'],
[FeatureBits.AnchorsRequired, 'anchor-commitments'],
[FeatureBits.AnchorsOptional, 'anchor-commitments'],
[FeatureBits.AnchorsZeroFeeHtlcTxRequired, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.AnchorsZeroFeeHtlcTxOptional, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.WumboChannelsRequired, 'wumbo-channels'],
[FeatureBits.WumboChannelsOptional, 'wumbo-channels'],
[FeatureBits.AMPRequired, 'amp'],
[FeatureBits.AMPOptional, 'amp'],
[FeatureBits.PaymentMetadataOptional, 'payment-metadata'],
[FeatureBits.PaymentMetadataRequired, 'payment-metadata'],
[FeatureBits.ExplicitChannelTypeOptional, 'explicit-commitment-type'],
[FeatureBits.ExplicitChannelTypeRequired, 'explicit-commitment-type'],
[FeatureBits.KeysendOptional, 'keysend'],
[FeatureBits.KeysendRequired, 'keysend'],
[FeatureBits.ScriptEnforcedLeaseRequired, 'script-enforced-lease'],
[FeatureBits.ScriptEnforcedLeaseOptional, 'script-enforced-lease'],
[FeatureBits.ScidAliasRequired, 'scid-alias'],
[FeatureBits.ScidAliasOptional, 'scid-alias'],
[FeatureBits.ZeroConfRequired, 'zero-conf'],
[FeatureBits.ZeroConfOptional, 'zero-conf'],
[FeatureBits.ShutdownAnySegwitRequired, 'shutdown-any-segwit'],
[FeatureBits.ShutdownAnySegwitOptional, 'shutdown-any-segwit'],
]);
/**
* Convert a clightning "listnode" entry to a lnd node entry
*/
export function convertNode(clNode: any): ILightningApi.Node {
let custom_records: { [type: number]: string } | undefined = undefined;
if (clNode.option_will_fund) {
try {
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
} catch (e) {
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
custom_records = undefined;
}
}
const nodeFeatures: ILightningApi.Feature[] = [];
const nodeFeaturesBinary = hex2bin(clNode.features).split('').reverse().join('');
for (let i = 0; i < nodeFeaturesBinary.length; i++) {
if (nodeFeaturesBinary[i] === '0') {
continue;
}
const feature = FeaturesMap.get(i);
if (!feature) {
nodeFeatures.push({
bit: i,
name: 'unknown',
is_required: i % 2 === 0,
is_known: false
});
} else {
nodeFeatures.push({
bit: i,
name: feature,
is_required: i % 2 === 0,
is_known: true
});
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
features: nodeFeatures,
pub_key: clNode.nodeid,
addresses: clNode.addresses?.map((addr) => {
let address = addr.address;
if (addr.type === 'ipv6') {
address = `[${address}]`;
}
return {
network: addr.type,
addr: `${address}:${addr.port}`
};
}) ?? [],
last_update: clNode?.last_timestamp ?? 0,
custom_records
};
}
/**
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
*/
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
logger.debug(`Converting clightning nodes and channels to lnd graph format`, logger.tags.ln);
let loggerTimer = new Date().getTime() / 1000;
let channelProcessed = 0;
const consolidatedChannelList: ILightningApi.Channel[] = [];
const clChannelsDict = {};
const clChannelsDictCount = {};
for (const clChannel of clChannels) {
if (!clChannelsDict[clChannel.short_channel_id]) {
clChannelsDict[clChannel.short_channel_id] = clChannel;
clChannelsDictCount[clChannel.short_channel_id] = 1;
} else {
const fullChannel = await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id]);
if (fullChannel !== null) {
consolidatedChannelList.push(fullChannel);
delete clChannelsDict[clChannel.short_channel_id];
clChannelsDictCount[clChannel.short_channel_id]++;
}
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`, logger.tags.ln);
loggerTimer = new Date().getTime() / 1000;
}
++channelProcessed;
}
channelProcessed = 0;
const keys = Object.keys(clChannelsDict);
for (const short_channel_id of keys) {
const incompleteChannel = await buildIncompleteChannel(clChannelsDict[short_channel_id]);
if (incompleteChannel !== null) {
consolidatedChannelList.push(incompleteChannel);
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}
channelProcessed++;
}
return consolidatedChannelList;
}
/**
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy for both nodes
*/
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel | null> {
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
if (!tx) {
return null;
}
const parts = clChannelA.short_channel_id.split('x');
const outputIdx = parts[2];
return {
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
capacity: (clChannelA.amount_msat / 1000).toString(),
last_update: lastUpdate,
node1_policy: convertPolicy(clChannelA),
node2_policy: convertPolicy(clChannelB),
chan_point: `${tx.txid}:${outputIdx}`,
node1_pub: clChannelA.source,
node2_pub: clChannelB.source,
};
}
/**
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy of only one node
*/
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel | null> {
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
if (!tx) {
return null;
}
const parts = clChannel.short_channel_id.split('x');
const outputIdx = parts[2];
return {
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
capacity: (clChannel.amount_msat / 1000).toString(),
last_update: clChannel.last_update ?? 0,
node1_policy: convertPolicy(clChannel),
node2_policy: null,
chan_point: `${tx.txid}:${outputIdx}`,
node1_pub: clChannel.source,
node2_pub: clChannel.destination,
};
}
/**
* Convert a clightning "listnode" response to a lnd channel policy format
*/
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
return {
time_lock_delta: clChannel.delay,
min_htlc: clChannel.htlc_minimum_msat.toString(),
max_htlc_msat: clChannel.htlc_maximum_msat.toString(),
fee_base_msat: clChannel.base_fee_millisatoshi,
fee_rate_milli_msat: clChannel.fee_per_millionth,
disabled: !clChannel.active,
last_update: clChannel.last_update ?? 0,
};
}

View File

@@ -1,5 +0,0 @@
import { ILightningApi } from './lightning-api.interface';
export interface AbstractLightningApi {
$getNetworkGraph(): Promise<ILightningApi.NetworkGraph>;
}

View File

@@ -1,16 +0,0 @@
import config from '../../config';
import CLightningClient from './clightning/clightning-client';
import { AbstractLightningApi } from './lightning-api-abstract-factory';
import LndApi from './lnd/lnd-api';
function lightningApiFactory(): AbstractLightningApi {
switch (config.LIGHTNING.ENABLED === true && config.LIGHTNING.BACKEND) {
case 'cln':
return new CLightningClient(config.CLIGHTNING.SOCKET);
case 'lnd':
default:
return new LndApi();
}
}
export default lightningApiFactory();

View File

@@ -1,93 +0,0 @@
export namespace ILightningApi {
export interface NetworkInfo {
graph_diameter: number;
avg_out_degree: number;
max_out_degree: number;
num_nodes: number;
num_channels: number;
total_network_capacity: string;
avg_channel_size: number;
min_channel_size: string;
max_channel_size: string;
median_channel_size_sat: string;
num_zombie_chans: string;
}
export interface NetworkGraph {
nodes: Node[];
edges: Channel[];
}
export interface Channel {
channel_id: string;
chan_point: string;
last_update: number | null;
node1_pub: string;
node2_pub: string;
capacity: string;
node1_policy: RoutingPolicy | null;
node2_policy: RoutingPolicy | null;
}
export interface RoutingPolicy {
time_lock_delta: number;
min_htlc: string;
fee_base_msat: string;
fee_rate_milli_msat: string;
disabled: boolean;
max_htlc_msat: string;
last_update: number | null;
}
export interface Node {
last_update: number | null;
pub_key: string;
alias: string;
addresses: {
network: string;
addr: string;
}[];
color: string;
features: { [key: number]: Feature };
custom_records?: { [type: number]: string };
}
export interface Info {
identity_pubkey: string;
alias: string;
num_pending_channels: number;
num_active_channels: number;
num_peers: number;
block_height: number;
block_hash: string;
synced_to_chain: boolean;
testnet: boolean;
uris: string[];
best_header_timestamp: string;
version: string;
num_inactive_channels: number;
chains: {
chain: string;
network: string;
}[];
color: string;
synced_to_graph: boolean;
features: { [key: number]: Feature };
commit_hash: string;
/** Available on LND since v0.15.0-beta */
require_htlc_interceptor?: boolean;
}
export interface Feature {
bit: number;
name: string;
is_required: boolean;
is_known: boolean;
}
export interface ForensicOutput {
node?: 1 | 2;
type: number;
value: number;
}
}

View File

@@ -1,64 +0,0 @@
import axios, { AxiosRequestConfig } from 'axios';
import { Agent } from 'https';
import * as fs from 'fs';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import config from '../../../config';
import logger from '../../../logger';
class LndApi implements AbstractLightningApi {
axiosConfig: AxiosRequestConfig = {};
constructor() {
if (!config.LIGHTNING.ENABLED) {
return;
}
try {
this.axiosConfig = {
headers: {
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex'),
},
httpsAgent: new Agent({
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
}),
timeout: config.LND.TIMEOUT
};
} catch (e) {
config.LIGHTNING.ENABLED = false;
logger.updateNetwork();
logger.err(`Could not initialize LND Macaroon/TLS Cert. Disabling LIGHTNING. ` + (e instanceof Error ? e.message : e));
}
}
async $getNetworkInfo(): Promise<ILightningApi.NetworkInfo> {
return axios.get<ILightningApi.NetworkInfo>(config.LND.REST_API_URL + '/v1/graph/info', this.axiosConfig)
.then((response) => response.data);
}
async $getInfo(): Promise<ILightningApi.Info> {
return axios.get<ILightningApi.Info>(config.LND.REST_API_URL + '/v1/getinfo', this.axiosConfig)
.then((response) => response.data);
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
const graph = await axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
.then((response) => response.data);
for (const node of graph.nodes) {
const nodeFeatures: ILightningApi.Feature[] = [];
for (const bit in node.features) {
nodeFeatures.push({
bit: parseInt(bit, 10),
name: node.features[bit].name,
is_required: node.features[bit].is_required,
is_known: node.features[bit].is_known,
});
}
node.features = nodeFeatures;
}
return graph;
}
}
export default LndApi;

View File

@@ -2,7 +2,7 @@ import { IBitcoinApi } from '../bitcoin/bitcoin-api.interface';
import bitcoinClient from '../bitcoin/bitcoin-client';
import bitcoinSecondClient from '../bitcoin/bitcoin-second-client';
import { Common } from '../common';
import DB from '../../database';
import { DB } from '../../database';
import logger from '../../logger';
class ElementsParser {
@@ -33,8 +33,10 @@ class ElementsParser {
}
public async $getPegDataByMonth(): Promise<any> {
const connection = await DB.pool.getConnection();
const query = `SELECT SUM(amount) AS amount, DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y-%m-01') AS date FROM elements_pegs GROUP BY DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y%m')`;
const [rows] = await DB.query(query);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
}
@@ -77,6 +79,7 @@ class ElementsParser {
protected async $savePegToDatabase(height: number, blockTime: number, amount: number, txid: string,
txindex: number, bitcoinaddress: string, bitcointxid: string, bitcoinindex: number, final_tx: number): Promise<void> {
const connection = await DB.pool.getConnection();
const query = `INSERT INTO elements_pegs(
block, datetime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`;
@@ -84,19 +87,24 @@ class ElementsParser {
const params: (string | number)[] = [
height, blockTime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
];
await DB.query(query, params);
await connection.query(query, params);
connection.release();
logger.debug(`Saved L-BTC peg from block height #${height} with TXID ${txid}.`);
}
protected async $getLatestBlockHeightFromDatabase(): Promise<number> {
const connection = await DB.pool.getConnection();
const query = `SELECT number FROM state WHERE name = 'last_elements_block'`;
const [rows] = await DB.query(query);
const [rows] = await connection.query<any>(query);
connection.release();
return rows[0]['number'];
}
protected async $saveLatestBlockToDatabase(blockHeight: number) {
const connection = await DB.pool.getConnection();
const query = `UPDATE state SET number = ? WHERE name = 'last_elements_block'`;
await DB.query(query, [blockHeight]);
await connection.query<any>(query, [blockHeight]);
connection.release();
}
}

View File

@@ -1,8 +1,9 @@
import * as fs from 'fs';
import config from '../../config';
import logger from '../../logger';
class Icons {
private static FILE_NAME = '/elements/asset_registry_db/icons.json';
private static FILE_NAME = './icons.json';
private iconIds: string[] = [];
private icons: { [assetId: string]: string; } = {};

View File

@@ -1,73 +0,0 @@
import axios from 'axios';
import { Application, Request, Response } from 'express';
import config from '../../config';
import elementsParser from './elements-parser';
import icons from './icons';
class LiquidRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', this.getAllLiquidIcon)
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', this.$getAllFeaturedLiquidAssets)
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', this.getLiquidIcon)
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', this.$getAssetGroup)
;
if (config.DATABASE.ENABLED) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', this.$getElementsPegsByMonth)
;
}
}
private getLiquidIcon(req: Request, res: Response) {
const result = icons.getIconByAssetId(req.params.assetId);
if (result) {
res.setHeader('content-type', 'image/png');
res.setHeader('content-length', result.length);
res.send(result);
} else {
res.status(404).send('Asset icon not found');
}
}
private getAllLiquidIcon(req: Request, res: Response) {
const result = icons.getAllIconIds();
if (result) {
res.json(result);
} else {
res.status(404).send('Asset icons not found');
}
}
private async $getAllFeaturedLiquidAssets(req: Request, res: Response) {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/featured`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
}
private async $getAssetGroup(req: Request, res: Response) {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/group/${parseInt(req.params.id, 10)}`,
{ responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
}
private async $getElementsPegsByMonth(req: Request, res: Response) {
try {
const pegs = await elementsParser.$getPegDataByMonth();
res.json(pegs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new LiquidRoutes();

View File

@@ -12,8 +12,8 @@ class LoadingIndicators {
this.progressChangedCallback = fn;
}
public setProgress(name: string, progressPercent: number, rounded: boolean = true) {
const newProgress = rounded === true ? Math.round(progressPercent) : progressPercent;
public setProgress(name: string, progressPercent: number) {
const newProgress = Math.round(progressPercent);
if (newProgress >= 100) {
delete this.loadingIndicators[name];
} else {

Some files were not shown because too many files have changed in this diff Show More