Compare commits

..

4 Commits

1011 changed files with 21152 additions and 349981 deletions

3
.gitattributes vendored
View File

@@ -1,3 +0,0 @@
# ignore all differences in line endings
package.json eol=crlf -crlf
*/package.json eol=crlf -crlf

View File

@@ -1,43 +0,0 @@
---
name: 🐛 Bug Report
about: Report bugs (no support requests, please)
---
<!--
SUPPORT REQUESTS:
This is for reporting bugs in Mempool, not for support requests.
If you have a support request, please reach out on Matrix:
https://matrix.to/#/#mempool.support:bitcoin.kyoto
-->
### Description
<!-- brief description of the bug -->
#### Version
<!-- commit id or version number -->
### Steps to reproduce
<!-- if you can reliably reproduce the bug, list the steps here -->
### Expected behaviour
<!-- description of the expected behavior -->
### Actual behaviour
<!-- explain what happened instead of the expected behaviour -->
### Screenshots
<!-- Screenshots if gui related, drag and drop to add to the issue -->
#### Device or machine
<!-- device/machine used, operating system -->
#### Additional info
<!-- Additional information useful for debugging (e.g. logs) -->

View File

@@ -1,27 +0,0 @@
---
name: ✨ Feature Request
about: Request a feature or suggest other enhancements
---
<!--
SUPPORT REQUESTS:
This is for requesting features in Mempool, not for support requests.
If you have a support request, please reach out on Matrix:
https://matrix.to/#/#mempool.support:bitcoin.kyoto
-->
### Description
<!-- brief description of the feature request -->
### Problem to be solved
<!-- description of the the problem you're having -->
### Proposed solution
<!-- explain how you think we should solve the problem -->
#### Additional info
<!-- Additional information useful for implementing (e.g. docs, links, etc.) -->

View File

@@ -1,8 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: 🙋 Need help? Chat with us on Matrix
url: https://matrix.to/#/#mempool.support:bitcoin.kyoto
about: For support requests or general questions
- name: 🌐 Want to help with translations? Use Transifex
url: https://www.transifex.com/mempool/mempool
about: All translations work is done on Transifex

View File

@@ -1,28 +0,0 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: daily
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
ignore:
- update-types: ["version-update:semver-major"]

View File

@@ -1,6 +0,0 @@
<!--
Please do not open pull requests for translations.
All translations work is done on Transifex:
https://www.transifex.com/mempool/mempool
-->

View File

@@ -1,96 +0,0 @@
name: CI Pipeline for the Backend and Frontend
on:
pull_request:
types: [opened, review_requested, synchronize]
jobs:
backend:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.5.0"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
name: Backend (${{ matrix.flavor }}) - node ${{ matrix.node }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: ${{ matrix.node }}/${{ matrix.flavor }}
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Install (Prod dependencies only)
if: ${{ matrix.flavor == 'prod'}}
run: npm ci --omit=dev --omit=optional
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Lint
if: ${{ matrix.flavor == 'dev'}}
run: npm run lint
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Unit Tests
if: ${{ matrix.flavor == 'dev'}}
run: npm run test
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
frontend:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.15.0", "18.5.0"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
name: Frontend (${{ matrix.flavor }}) - node ${{ matrix.node }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: ${{ matrix.node }}/${{ matrix.flavor }}
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install (Prod dependencies only)
run: npm ci --omit=dev --omit=optional
if: ${{ matrix.flavor == 'prod'}}
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
- name: Lint
if: ${{ matrix.flavor == 'dev'}}
run: npm run lint
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
# - name: Test
# run: npm run test
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend

View File

@@ -1,61 +0,0 @@
name: Cypress Tests
on:
pull_request:
types: [ opened, review_requested, synchronize ]
jobs:
cypress:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
runs-on: "ubuntu-latest"
strategy:
fail-fast: false
matrix:
module: ["mempool", "liquid", "bisq"]
include:
- module: "mempool"
spec: |
cypress/e2e/mainnet/*.spec.ts
cypress/e2e/signet/*.spec.ts
cypress/e2e/testnet/*.spec.ts
- module: "liquid"
spec: |
cypress/e2e/liquid/liquid.spec.ts
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
- module: "bisq"
spec: |
cypress/e2e/bisq/bisq.spec.ts
name: E2E tests for ${{ matrix.module }}
steps:
- name: Checkout
uses: actions/checkout@v2
with:
path: ${{ matrix.module }}
- name: Setup node
uses: actions/setup-node@v2
with:
node-version: 16.15.0
cache: 'npm'
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
- name: Chrome browser tests (${{ matrix.module }})
uses: cypress-io/github-action@v4
with:
tag: ${{ github.event_name }}
working-directory: ${{ matrix.module }}/frontend
build: npm run config:defaults:${{ matrix.module }}
start: npm run start:local-staging
wait-on: 'http://localhost:4200'
wait-on-timeout: 120
record: true
parallel: true
spec: ${{ matrix.spec }}
group: Tests on Chrome (${{ matrix.module }})
browser: "chrome"
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
env:
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}

View File

@@ -1,105 +0,0 @@
name: Docker build on tag
env:
DOCKER_CLI_EXPERIMENTAL: enabled
TAG_FMT: "^refs/tags/(((.?[0-9]+){3,4}))$"
DOCKER_BUILDKIT: 0
COMPOSE_DOCKER_CLI_BUILD: 0
on:
push:
tags:
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-*
permissions:
contents: read
jobs:
build:
strategy:
matrix:
service:
- frontend
- backend
runs-on: ubuntu-latest
timeout-minutes: 120
name: Build and push to DockerHub
steps:
# Workaround based on JonasAlfredsson/docker-on-tmpfs@v1.0.1
- name: Replace the current swap file
shell: bash
run: |
sudo swapoff /mnt/swapfile
sudo rm -v /mnt/swapfile
sudo fallocate -l 10G /mnt/swapfile
sudo chmod 600 /mnt/swapfile
sudo mkswap /mnt/swapfile
sudo swapon /mnt/swapfile
- name: Show current memory and swap status
shell: bash
run: |
sudo free -h
echo
sudo swapon --show
- name: Mount a tmpfs over /var/lib/docker
shell: bash
run: |
if [ ! -d "/var/lib/docker" ]; then
echo "Directory '/var/lib/docker' not found"
exit 1
fi
sudo mount -t tmpfs -o size=10G tmpfs /var/lib/docker
sudo systemctl restart docker
sudo df -h | grep docker
- name: Set env variables
run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV
- name: Show set environment variables
run: |
printf " TAG: %s\n" "$TAG"
- name: Add SHORT_SHA env property with commit short sha
run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV
- name: Login to Docker for building
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Checkout project
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0
- name: Init repo for Dockerization
run: docker/init.sh "$TAG"
- name: Set up QEMU
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2.1.0
id: qemu
- name: Setup Docker buildx action
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2.2.1
id: buildx
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Cache Docker layers
uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # v3.0.11
id: cache
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Run Docker buildx for ${{ matrix.service }} against tag
run: |
docker buildx build \
--cache-from "type=local,src=/tmp/.buildx-cache" \
--cache-to "type=local,dest=/tmp/.buildx-cache" \
--platform linux/amd64,linux/arm64,linux/arm/v7 \
--tag ${{ secrets.DOCKER_HUB_USER }}/${{ matrix.service }}:$TAG \
--tag ${{ secrets.DOCKER_HUB_USER }}/${{ matrix.service }}:latest \
--output "type=registry" ./${{ matrix.service }}/ \
--build-arg commitHash=$SHORT_SHA

7
.gitignore vendored
View File

@@ -1,7 +0,0 @@
sitemap
data
docker-compose.yml
backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js

1
.nvmrc
View File

@@ -1 +0,0 @@
v16.16.0

View File

@@ -1,4 +0,0 @@
{
"editor.tabSize": 2,
"typescript.tsdk": "./backend/node_modules/typescript/lib"
}

View File

@@ -1,51 +0,0 @@
# Contributing to The Mempool Open Source Project
Thank you for contributing to The Mempool Open Source Project managed by Mempool Space K.K. (“Mempool”).
In order to clarify the intellectual property license granted with Contributions from any person or entity, Mempool must have a statement on file from each Contributor indicating their agreement to the Contributor License Agreement (“Agreement”). This license is for your protection as a Contributor as well as the protection of Mempool and its other contributors and users; it does not change your rights to use your own Contributions for any other purpose.
When submitting a pull request for the first time, please create a file with a name like `/contributors/{github_username}.txt`, and in the content of that file indicate your agreement to the Contributor License Agreement terms below. An example of what that file should contain can be seen in wiz's agreement file. (This method of CLA "signing" is borrowed from Medium's open source project.)
Also, please GPG-sign all your commits (`git config commit.gpgsign true`).
# Contributor License Agreement
Last Updated: January 25, 2022
By accepting this Agreement, You agree to the following terms and conditions for Your present and future Contributions submitted to Mempool. Except for the license granted herein to Mempool and recipients of software distributed by Mempool, You reserve all right, title, and interest in and to Your Contributions.
### 1. Definitions
“You” (or “Your”) shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Mempool. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, “control” means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
“Contribution” shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to Mempool for inclusion in, or documentation of, any of the products owned or managed by Mempool (“Work”). For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to Mempool or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, Mempool for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as “Not a Contribution.”
### 2. Grant of Copyright License
Subject to the terms and conditions of this Agreement, You hereby grant to Mempool and to recipients of software distributed by Mempool a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works.
### 3. Grant of Patent License
Subject to the terms and conditions of this Agreement, You hereby grant to Mempool and to recipients of software distributed by Mempool a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed.
### 4. Authority
You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to Mempool, or that your employer has executed a separate Corporate Contributor License Agreement with Mempool.
### 5. Originality
You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware, and which are associated with any part of Your Contributions.
### 6. Support
You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON- INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
### 7. Third Party Contributions
Should You wish to submit work that is not Your original creation, You may submit it to Mempool separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as “Submitted on behalf of a third-party: [named here]”.
### 8. Notifications
You agree to notify Mempool of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect.
EOF

57
Dockerfile Normal file
View File

@@ -0,0 +1,57 @@
FROM alpine:latest
RUN mkdir /mempool.space/
COPY ./backend /mempool.space/backend/
COPY ./frontend /mempool.space/frontend/
COPY ./mariadb-structure.sql /mempool.space/mariadb-structure.sql
RUN apk add mariadb mariadb-client git nginx npm rsync bash
RUN mysql_install_db --user=mysql --datadir=/var/lib/mysql/
RUN /usr/bin/mysqld_safe --datadir='/var/lib/mysql/'& \
sleep 60 && \
mysql -e "create database mempool" && \
mysql -e "grant all privileges on mempool.* to 'mempool'@'localhost' identified by 'mempool'" && \
mysql mempool < /mempool.space/mariadb-structure.sql
RUN sed -i "/^skip-networking/ c#skip-networking" /etc/my.cnf.d/mariadb-server.cnf
RUN export NG_CLI_ANALYTICS=ci && \
npm install -g typescript && \
cd /mempool.space/frontend && \
npm install && \
cd /mempool.space/backend && \
npm install && \
tsc
COPY ./nginx-nossl-docker.conf /etc/nginx/nginx.conf
ENV ENV dev
ENV DB_HOST localhost
ENV DB_PORT 3306
ENV DB_USER mempool
ENV DB_PASSWORD mempool
ENV DB_DATABASE mempool
ENV API_ENDPOINT /api/v1/
ENV CHAT_SSL_ENABLED false
ENV MEMPOOL_REFRESH_RATE_MS 500
ENV INITIAL_BLOCK_AMOUNT 8
ENV DEFAULT_PROJECTED_BLOCKS_AMOUNT 3
ENV KEEP_BLOCK_AMOUNT 24
ENV BITCOIN_NODE_HOST bitcoinhost
ENV BITCOIN_NODE_PORT 8332
ENV BITCOIN_NODE_USER bitcoinuser
ENV BITCOIN_NODE_PASS bitcoinpass
ENV TX_PER_SECOND_SPAN_SECONDS 150
ENV BACKEND_API bitcoind
ENV ELECTRS_API_URL https://mempool.space/api
RUN cd /mempool.space/frontend/ && \
npm run build && \
rsync -av --delete dist/mempool/ /var/www/html/
EXPOSE 80
COPY ./entrypoint.sh /mempool.space/entrypoint.sh
RUN chmod +x /mempool.space/entrypoint.sh
WORKDIR /mempool.space
CMD ["/mempool.space/entrypoint.sh"]

View File

@@ -1,47 +0,0 @@
# If you see pwd_unknown showing up check permissions
PWD ?= pwd_unknown
# DATABASE DEPLOY FOLDER CONFIG - default ./data
ifeq ($(data),)
DATA := data
export DATA
else
DATA := $(data)
export DATA
endif
.PHONY: help
help:
@echo ''
@echo ''
@echo ' Usage: make [COMMAND]'
@echo ''
@echo ' make all # build init mempool and electrs'
@echo ' make init # setup some useful configs'
@echo ' make mempool # build q dockerized mempool.space'
@echo ' make electrs # build a docker electrs image'
@echo ''
.PHONY: init
init:
@echo ''
mkdir -p $(DATA) $(DATA)/mysql $(DATA)/mysql/data
#REF: https://github.com/mempool/mempool/blob/master/docker/README.md
cat docker/docker-compose.yml > docker-compose.yml
cat backend/mempool-config.sample.json > backend/mempool-config.json
.PHONY: mempool
mempool: init
@echo ''
docker-compose up --force-recreate --always-recreate-deps
@echo ''
.PHONY: electrs
electrum:
#REF: https://hub.docker.com/r/beli/electrum
@echo ''
docker build -f docker/electrum/Dockerfile .
@echo ''
.PHONY: all
all: init
make mempool
#######################
-include Makefile

42
LICENSE
View File

@@ -1,29 +1,21 @@
The Mempool Open Source Project
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
MIT License
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:
Copyright (c) 2019 Simon Lindh
1) the GNU Affero General Public License as published by the Free Software
Foundation, either version 3 of the License or any later version approved by a
proxy statement published on <https://mempool.space/about>; or
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
2) the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License or any later version approved by a
proxy statement published on <https://mempool.space/about>.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
However, this copyright license does not include an implied right or license to
use our trademarks: The Mempool Open Source Project™, mempool.space™, the
mempool Logo™, the mempool.space Vertical Logo™, the mempool.space Horizontal
Logo™, the mempool Square Logo™, and the mempool Blocks logo™ are registered
trademarks or trademarks of Mempool Space K.K in Japan, the United States,
and/or other countries. See our full Trademark Policy and Guidelines for more
details, published on <https://mempool.space/trademark-policy>.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the full license terms for more details.
You should have received a copy of both the GNU Affero General Public License
and the GNU General Public License along with this program. If not, see
<http://www.gnu.org/licenses/>.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,660 +0,0 @@
### GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc.
<https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
### Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains
free software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing
under this license.
The precise terms and conditions for copying, distribution and
modification follow.
### TERMS AND CONDITIONS
#### 0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public
License.
"Copyright" also means copyright-like laws that apply to other kinds
of works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of
an exact copy. The resulting work is called a "modified version" of
the earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user
through a computer network, with no transfer of a copy, is not
conveying.
An interactive user interface displays "Appropriate Legal Notices" to
the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
#### 1. Source Code.
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of
a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users can
regenerate automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same
work.
#### 2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey,
without conditions so long as your license otherwise remains in force.
You may convey covered works to others for the sole purpose of having
them make modifications exclusively for you, or provide you with
facilities for running those works, provided that you comply with the
terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for
you must do so exclusively on your behalf, under your direction and
control, on terms that prohibit them from making any copies of your
copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the
conditions stated below. Sublicensing is not allowed; section 10 makes
it unnecessary.
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such
circumvention is effected by exercising rights under this License with
respect to the covered work, and you disclaim any intention to limit
operation or modification of the work as a means of enforcing, against
the work's users, your or third parties' legal rights to forbid
circumvention of technological measures.
#### 4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
#### 5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these
conditions:
- a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
- b) The work must carry prominent notices stating that it is
released under this License and any conditions added under
section 7. This requirement modifies the requirement in section 4
to "keep intact all notices".
- c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
- d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
#### 6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of
sections 4 and 5, provided that you also convey the machine-readable
Corresponding Source under the terms of this License, in one of these
ways:
- a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
- b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the Corresponding
Source from a network server at no charge.
- c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
- d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
- e) Convey the object code using peer-to-peer transmission,
provided you inform other peers where the object code and
Corresponding Source of the work are being offered to the general
public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal,
family, or household purposes, or (2) anything designed or sold for
incorporation into a dwelling. In determining whether a product is a
consumer product, doubtful cases shall be resolved in favor of
coverage. For a particular product received by a particular user,
"normally used" refers to a typical or common use of that class of
product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected
to use, the product. A product is a consumer product regardless of
whether the product has substantial commercial, industrial or
non-consumer uses, unless such uses represent the only significant
mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to
install and execute modified versions of a covered work in that User
Product from a modified version of its Corresponding Source. The
information must suffice to ensure that the continued functioning of
the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or
updates for a work that has been modified or installed by the
recipient, or for the User Product in which it has been modified or
installed. Access to a network may be denied when the modification
itself materially and adversely affects the operation of the network
or violates the rules and protocols for communication across the
network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
#### 7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders
of that material) supplement the terms of this License with terms:
- a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
- b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
- c) Prohibiting misrepresentation of the origin of that material,
or requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
- d) Limiting the use for publicity purposes of names of licensors
or authors of the material; or
- e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
- f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions
of it) with contractual assumptions of liability to the recipient,
for any liability that these contractual assumptions directly
impose on those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions; the
above requirements apply either way.
#### 8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your license
from a particular copyright holder is reinstated (a) provisionally,
unless and until the copyright holder explicitly and finally
terminates your license, and (b) permanently, if the copyright holder
fails to notify you of the violation by some reasonable means prior to
60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
#### 9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run
a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
#### 10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
#### 11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned
or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on
the non-exercise of one or more of the rights that are specifically
granted under this License. You may not convey a covered work if you
are a party to an arrangement with a third party that is in the
business of distributing software, under which you make payment to the
third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties
who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by
you (or copies made from those copies), or (b) primarily for and in
connection with specific products or compilations that contain the
covered work, unless you entered into that arrangement, or that patent
license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
#### 12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under
this License and any other pertinent obligations, then as a
consequence you may not convey it at all. For example, if you agree to
terms that obligate you to collect a royalty for further conveying
from those to whom you convey the Program, the only way you could
satisfy both those terms and this License would be to refrain entirely
from conveying the Program.
#### 13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your
version supports such interaction) an opportunity to receive the
Corresponding Source of your version by providing access to the
Corresponding Source from a network server at no charge, through some
standard or customary means of facilitating copying of software. This
Corresponding Source shall include the Corresponding Source for any
work covered by version 3 of the GNU General Public License that is
incorporated pursuant to the following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
#### 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Affero General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever
published by the Free Software Foundation.
If the Program specifies that a proxy can decide which future versions
of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
#### 15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
#### 16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
#### 17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
### How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these
terms.
To do so, attach the following notices to the program. It is safest to
attach them to the start of each source file to most effectively state
the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper
mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for
the specific requirements.
You should also get your employer (if you work as a programmer) or
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. For more information on this, and how to apply and follow
the GNU AGPL, see <https://www.gnu.org/licenses/>.

View File

@@ -1,675 +0,0 @@
### GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
<https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
### Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom
to share and change all versions of a program--to make sure it remains
free software for all its users. We, the Free Software Foundation, use
the GNU General Public License for most of our software; it applies
also to any other work released this way by its authors. You can apply
it to your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you
have certain responsibilities if you distribute copies of the
software, or if you modify it: responsibilities to respect the freedom
of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the
manufacturer can do so. This is fundamentally incompatible with the
aim of protecting users' freedom to change the software. The
systematic pattern of such abuse occurs in the area of products for
individuals to use, which is precisely where it is most unacceptable.
Therefore, we have designed this version of the GPL to prohibit the
practice for those products. If such problems arise substantially in
other domains, we stand ready to extend this provision to those
domains in future versions of the GPL, as needed to protect the
freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish
to avoid the special danger that patents applied to a free program
could make it effectively proprietary. To prevent this, the GPL
assures that patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
### TERMS AND CONDITIONS
#### 0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds
of works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of
an exact copy. The resulting work is called a "modified version" of
the earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user
through a computer network, with no transfer of a copy, is not
conveying.
An interactive user interface displays "Appropriate Legal Notices" to
the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
#### 1. Source Code.
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of
a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users can
regenerate automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same
work.
#### 2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey,
without conditions so long as your license otherwise remains in force.
You may convey covered works to others for the sole purpose of having
them make modifications exclusively for you, or provide you with
facilities for running those works, provided that you comply with the
terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for
you must do so exclusively on your behalf, under your direction and
control, on terms that prohibit them from making any copies of your
copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the
conditions stated below. Sublicensing is not allowed; section 10 makes
it unnecessary.
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such
circumvention is effected by exercising rights under this License with
respect to the covered work, and you disclaim any intention to limit
operation or modification of the work as a means of enforcing, against
the work's users, your or third parties' legal rights to forbid
circumvention of technological measures.
#### 4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
#### 5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these
conditions:
- a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
- b) The work must carry prominent notices stating that it is
released under this License and any conditions added under
section 7. This requirement modifies the requirement in section 4
to "keep intact all notices".
- c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
- d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
#### 6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of
sections 4 and 5, provided that you also convey the machine-readable
Corresponding Source under the terms of this License, in one of these
ways:
- a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
- b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the Corresponding
Source from a network server at no charge.
- c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
- d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
- e) Convey the object code using peer-to-peer transmission,
provided you inform other peers where the object code and
Corresponding Source of the work are being offered to the general
public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal,
family, or household purposes, or (2) anything designed or sold for
incorporation into a dwelling. In determining whether a product is a
consumer product, doubtful cases shall be resolved in favor of
coverage. For a particular product received by a particular user,
"normally used" refers to a typical or common use of that class of
product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected
to use, the product. A product is a consumer product regardless of
whether the product has substantial commercial, industrial or
non-consumer uses, unless such uses represent the only significant
mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to
install and execute modified versions of a covered work in that User
Product from a modified version of its Corresponding Source. The
information must suffice to ensure that the continued functioning of
the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or
updates for a work that has been modified or installed by the
recipient, or for the User Product in which it has been modified or
installed. Access to a network may be denied when the modification
itself materially and adversely affects the operation of the network
or violates the rules and protocols for communication across the
network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
#### 7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders
of that material) supplement the terms of this License with terms:
- a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
- b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
- c) Prohibiting misrepresentation of the origin of that material,
or requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
- d) Limiting the use for publicity purposes of names of licensors
or authors of the material; or
- e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
- f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions
of it) with contractual assumptions of liability to the recipient,
for any liability that these contractual assumptions directly
impose on those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions; the
above requirements apply either way.
#### 8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your license
from a particular copyright holder is reinstated (a) provisionally,
unless and until the copyright holder explicitly and finally
terminates your license, and (b) permanently, if the copyright holder
fails to notify you of the violation by some reasonable means prior to
60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
#### 9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run
a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
#### 10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
#### 11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned
or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on
the non-exercise of one or more of the rights that are specifically
granted under this License. You may not convey a covered work if you
are a party to an arrangement with a third party that is in the
business of distributing software, under which you make payment to the
third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties
who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by
you (or copies made from those copies), or (b) primarily for and in
connection with specific products or compilations that contain the
covered work, unless you entered into that arrangement, or that patent
license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
#### 12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under
this License and any other pertinent obligations, then as a
consequence you may not convey it at all. For example, if you agree to
terms that obligate you to collect a royalty for further conveying
from those to whom you convey the Program, the only way you could
satisfy both those terms and this License would be to refrain entirely
from conveying the Program.
#### 13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
#### 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions
of the GNU General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in
detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies that a certain numbered version of the GNU General Public
License "or any later version" applies to it, you have the option of
following the terms and conditions either of that numbered version or
of any later version published by the Free Software Foundation. If the
Program does not specify a version number of the GNU General Public
License, you may choose any version ever published by the Free
Software Foundation.
If the Program specifies that a proxy can decide which future versions
of the GNU General Public License can be used, that proxy's public
statement of acceptance of a version permanently authorizes you to
choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
#### 15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
#### 16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
#### 17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
### How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these
terms.
To do so, attach the following notices to the program. It is safest to
attach them to the start of each source file to most effectively state
the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper
mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands \`show w' and \`show c' should show the
appropriate parts of the General Public License. Of course, your
program's commands might be different; for a GUI interface, you would
use an "about box".
You should also get your employer (if you work as a programmer) or
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. For more information on this, and how to apply and follow
the GNU GPL, see <https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your
program into proprietary programs. If your program is a subroutine
library, you may consider it more useful to permit linking proprietary
applications with the library. If this is what you want to do, use the
GNU Lesser General Public License instead of this License. But first,
please read <https://www.gnu.org/licenses/why-not-lgpl.html>.

View File

@@ -1 +0,0 @@
# For additional configs/scripting

217
README.md
View File

@@ -1,33 +1,208 @@
# The Mempool Open Source Project™ [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
# mempool
## a mempool visualizer and explorer for Bitcoin
Mempool is the fully-featured mempool visualizer, explorer, and API service running at [mempool.space](https://mempool.space/).
![mempool](https://pbs.twimg.com/media/EAETXWCU4AAv2v-?format=jpg&name=4096x4096)
![blockchain](https://pbs.twimg.com/media/EAETXWAU8AAj4IP?format=jpg&name=4096x4096)
It is an open-source project developed and operated for the benefit of the Bitcoin community, with a focus on the emerging transaction fee market that is evolving Bitcoin into a multi-layer ecosystem.
## Pick the right version for your use case
![mempool](https://mempool.space/resources/screenshots/v2.4.0-dashboard.png)
Mempool V1 has basic explorer functionality and can run from a Bitcoin Core full node on a Raspberry Pi (no pruning, txindex=1).
# Installation Methods
Mempool V2 is what runs on https://mempool.space and has advanced explorer functionality, but requires a fully synced electrs backend running on powerful server hardware.
Mempool can be self-hosted on a wide variety of your own hardware, ranging from a simple one-click installation on a Raspberry Pi full-node distro all the way to a robust production instance on a powerful FreeBSD server.
# Mempool V1 using Docker (easy)
**Most people should use a one-click install method.** Other install methods are meant for developers and others with experience managing servers.
Install from Docker Hub, passing your Bitcoin Core RPC credentials as environment variables:
<a id="one-click-installation"></a>
## One-Click Installation
```bash
docker pull mempool/mempool:v1.0
docker create -p 80:80 -e BITCOIN_NODE_HOST=192.168.1.102 -e BITCOIN_NODE_USER=foo -e BITCOIN_NODE_PASS=bar --name mempool mempool/mempool:v1.0
docker start mempool
docker logs mempool
```
Mempool can be conveniently installed on the following full-node distros:
- [Umbrel](https://github.com/getumbrel/umbrel)
- [RaspiBlitz](https://github.com/rootzoll/raspiblitz)
- [RoninDojo](https://code.samourai.io/ronindojo/RoninDojo)
- [myNode](https://github.com/mynodebtc/mynode)
- [Start9](https://github.com/Start9Labs/embassy-os)
You should see mempool starting up, which takes over an hour (needs 8 blocks). When it's ready, visit http://127.0.0.1/ to see your mempool.
**We highly recommend you deploy your own Mempool instance this way.** No matter which option you pick, you'll be able to get your own fully-sovereign instance of Mempool up quickly without needing to fiddle with any settings.
# Mempool V1 not using Docker (advanced)
## Advanced Installation Methods
## Dependencies
Mempool can be installed in other ways too, but we only recommend doing so if you're a developer, have experience managing servers, or otherwise know what you're doing.
* Bitcoin (full node required, no pruning, txindex=1)
* NodeJS (official stable LTS)
* MySQL or MariaDB (default config)
* Nginx (use supplied nginx.conf)
- See the [`docker/`](./docker/) directory for instructions on deploying Mempool with Docker.
- See the [`backend/`](./backend/) and [`frontend/`](./frontend/) directories for manual install instructions oriented for developers.
- See the [`production/`](./production/) directory for guidance on setting up a more serious Mempool instance designed for high performance at scale.
## Checking out release tag
```bash
git clone https://github.com/mempool-space/mempool.space
cd mempool.space
git checkout v1.0.0 # put latest release tag here
```
## Bitcoin Core (bitcoind)
Enable RPC and txindex in bitcoin.conf
```bash
rpcuser=mempool
rpcpassword=71b61986da5b03a5694d7c7d5165ece5
txindex=1
```
## NodeJS
Install dependencies and build code:
```bash
# Install TypeScript Globally
npm install -g typescript
# Frontend
cd frontend
npm install
npm run build
# Backend
cd ../backend/
npm install
npm run build
```
## Mempool Configuration
In the `backend` folder, make a copy of the sample config and modify it to fit your settings.
```bash
cp mempool-config.sample.json mempool-config.json
```
Edit `mempool-config.json` to add your Bitcoin Core node RPC credentials:
```bash
"BITCOIN_NODE_HOST": "192.168.1.5",
"BITCOIN_NODE_PORT": 8332,
"BITCOIN_NODE_USER": "mempool",
"BITCOIN_NODE_PASS": "71b61986da5b03a5694d7c7d5165ece5",
```
## MySQL
Install MariaDB:
```bash
# Linux
apt-get install mariadb-server mariadb-client
# macOS
brew install mariadb
brew services start mariadb
```
Create database and grant privileges:
```bash
MariaDB [(none)]> drop database mempool;
Query OK, 0 rows affected (0.00 sec)
MariaDB [(none)]> create database mempool;
Query OK, 1 row affected (0.00 sec)
MariaDB [(none)]> grant all privileges on mempool.* to 'mempool' identified by 'mempool';
Query OK, 0 rows affected (0.00 sec)
```
From the root folder, initialize database structure:
```bash
mysql -u mempool -p mempool < mariadb-structure.sql
```
## Running (Backend)
Create an initial empty cache and start the app:
```bash
touch cache.json
npm run start # node dist/index.js
```
After starting you should see:
```bash
Server started on port 8999 :)
New block found (#586498)! 0 of 1986 found in mempool. 1985 not found.
New block found (#586499)! 0 of 1094 found in mempool. 1093 not found.
New block found (#586500)! 0 of 2735 found in mempool. 2734 not found.
New block found (#586501)! 0 of 2675 found in mempool. 2674 not found.
New block found (#586502)! 0 of 975 found in mempool. 974 not found.
New block found (#586503)! 0 of 2130 found in mempool. 2129 not found.
New block found (#586504)! 0 of 2770 found in mempool. 2769 not found.
New block found (#586505)! 0 of 2759 found in mempool. 2758 not found.
Updating mempool
Calculated fee for transaction 1 / 3257
Calculated fee for transaction 2 / 3257
Calculated fee for transaction 3 / 3257
Calculated fee for transaction 4 / 3257
Calculated fee for transaction 5 / 3257
Calculated fee for transaction 6 / 3257
Calculated fee for transaction 7 / 3257
Calculated fee for transaction 8 / 3257
Calculated fee for transaction 9 / 3257
```
You need to wait for at least *8 blocks to be mined*, so please wait ~80 minutes.
The backend also needs to index transactions, calculate fees, etc.
When it's ready you will see output like this:
```bash
Mempool updated in 0.189 seconds
Updating mempool
Mempool updated in 0.096 seconds
Updating mempool
Mempool updated in 0.099 seconds
Updating mempool
Calculated fee for transaction 1 / 10
Calculated fee for transaction 2 / 10
Calculated fee for transaction 3 / 10
Calculated fee for transaction 4 / 10
Calculated fee for transaction 5 / 10
Calculated fee for transaction 6 / 10
Calculated fee for transaction 7 / 10
Calculated fee for transaction 8 / 10
Calculated fee for transaction 9 / 10
Calculated fee for transaction 10 / 10
Mempool updated in 0.243 seconds
Updating mempool
```
## nginx + CertBot (LetsEncrypt)
Setup nginx using the supplied nginx.conf
```bash
# install nginx and certbot
apt-get install -y nginx python-certbot-nginx
# replace example.com with your domain name
certbot --nginx -d example.com
# install the mempool configuration for nginx
cp nginx.conf /etc/nginx/nginx.conf
# edit the installed nginx.conf, and replace all
# instances of example.com with your domain name
```
Make sure you can access https://<your-domain-name>/ in browser before proceeding
## Running (Frontend)
Build the frontend static HTML/CSS/JS, rsync the output into nginx folder:
```bash
cd frontend/
npm run build
sudo rsync -av --delete dist/mempool/ /var/www/html/
```
## Try It Out
If everything went okay you should see the beautiful mempool :grin:
If you get stuck on "loading blocks", this means the websocket can't connect.
Check your nginx proxy setup, firewalls, etc. and open an issue if you need help.

View File

@@ -1,17 +0,0 @@
# Editor configuration, see https://editorconfig.org
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
quote_type = single
[*.md]
max_line_length = off
trim_trailing_whitespace = false

View File

@@ -1,2 +0,0 @@
node_modules
dist

View File

@@ -1,37 +0,0 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"plugins": [
"@typescript-eslint"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"prettier"
],
"rules": {
"@typescript-eslint/ban-ts-comment": 1,
"@typescript-eslint/ban-types": 1,
"@typescript-eslint/no-empty-function": 1,
"@typescript-eslint/no-explicit-any": 1,
"@typescript-eslint/no-inferrable-types": 0,
"@typescript-eslint/no-namespace": 1,
"@typescript-eslint/no-this-alias": 1,
"@typescript-eslint/no-var-requires": 1,
"@typescript-eslint/explicit-function-return-type": 1,
"no-console": 1,
"no-constant-condition": 1,
"no-dupe-else-if": 1,
"no-empty": 1,
"no-prototype-builtins": 1,
"no-self-assign": 1,
"no-useless-catch": 1,
"no-var": 1,
"prefer-const": 1,
"prefer-rest-params": 1,
"quotes": [1, "single", { "allowTemplateLiterals": true }],
"semi": 1,
"eqeqeq": 1
}
}

8
backend/.gitignore vendored
View File

@@ -1,11 +1,7 @@
# See http://help.github.com/ignore-files/ for more about ignoring files.
# production config and external assets
!mempool-config.template.json
!mempool-config.sample.json
# production config
mempool-config.json
pools.json
icons.json
# compiled output
/dist
@@ -45,3 +41,5 @@ testem.log
#System Files
.DS_Store
Thumbs.db
cache.json

View File

@@ -1,2 +0,0 @@
node_modules
package-lock.json

View File

@@ -1,6 +0,0 @@
{
"endOfLine": "lf",
"printWidth": 80,
"tabWidth": 2,
"trailingComma": "es5"
}

View File

@@ -1,4 +0,0 @@
{
"editor.tabSize": 2,
"typescript.tsdk": "../backend/node_modules/typescript/lib"
}

View File

@@ -1,220 +0,0 @@
# Mempool Backend
These instructions are mostly intended for developers.
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool does not provide support for custom setups.
See other ways to set up Mempool on [the main README](/../../#installation-methods).
Jump to a section in this doc:
- [Set Up the Backend](#setup)
- [Development Tips](#development-tips)
## Setup
### 1. Clone Mempool Repository
Get the latest Mempool code:
```
git clone https://github.com/mempool/mempool
cd mempool
```
Check out the latest release:
```
latestrelease=$(curl -s https://api.github.com/repos/mempool/mempool/releases/latest|grep tag_name|head -1|cut -d '"' -f4)
git checkout $latestrelease
```
### 2. Configure Bitcoin Core
Turn on `txindex`, enable RPC, and set RPC credentials in `bitcoin.conf`:
```
txindex=1
server=1
rpcuser=mempool
rpcpassword=mempool
```
### 3. Configure Electrum Server
[Pick an Electrum Server implementation](https://mempool.space/docs/faq#address-lookup-issues), configure it, and make sure it's synced.
**This step is optional.** You can run Mempool without configuring an Electrum Server for it, but address lookups will be disabled.
### 4. Configure MariaDB
_Mempool needs MariaDB v10.5 or later. If you already have MySQL installed, make sure to migrate any existing databases **before** installing MariaDB._
Get MariaDB from your operating system's package manager:
```
# Debian, Ubuntu, etc.
apt-get install mariadb-server mariadb-client
# macOS
brew install mariadb
mysql.server start
```
Create a database and grant privileges:
```
MariaDB [(none)]> drop database mempool;
Query OK, 0 rows affected (0.00 sec)
MariaDB [(none)]> create database mempool;
Query OK, 1 row affected (0.00 sec)
MariaDB [(none)]> grant all privileges on mempool.* to 'mempool'@'%' identified by 'mempool';
Query OK, 0 rows affected (0.00 sec)
```
### 5. Prepare Mempool Backend
#### Build
_Make sure to use Node.js 16.10 and npm 7._
Install dependencies with `npm` and build the backend:
```
cd backend
npm install
npm run build
```
#### Configure
In the backend folder, make a copy of the sample config file:
```
cp mempool-config.sample.json mempool-config.json
```
Edit `mempool-config.json` as needed.
In particular, make sure:
- the correct Bitcoin Core RPC credentials are specified in `CORE_RPC`
- the correct `BACKEND` is specified in `MEMPOOL`:
- "electrum" if you're using [romanz/electrs](https://github.com/romanz/electrs) or [cculianu/Fulcrum](https://github.com/cculianu/Fulcrum)
- "esplora" if you're using [Blockstream/electrs](https://github.com/Blockstream/electrs)
- "none" if you're not using any Electrum Server
### 6. Run Mempool Backend
Run the Mempool backend:
```
npm run start
```
You can also set env var `MEMPOOL_CONFIG_FILE` to specify a custom config file location:
```
MEMPOOL_CONFIG_FILE=/path/to/mempool-config.json npm run start
```
When it's running, you should see output like this:
```
Mempool updated in 0.189 seconds
Updating mempool
Mempool updated in 0.096 seconds
Updating mempool
Mempool updated in 0.099 seconds
Updating mempool
Calculated fee for transaction 1 / 10
Calculated fee for transaction 2 / 10
Calculated fee for transaction 3 / 10
Calculated fee for transaction 4 / 10
Calculated fee for transaction 5 / 10
Calculated fee for transaction 6 / 10
Calculated fee for transaction 7 / 10
Calculated fee for transaction 8 / 10
Calculated fee for transaction 9 / 10
Calculated fee for transaction 10 / 10
Mempool updated in 0.243 seconds
Updating mempool
```
### 7. Set Up Mempool Frontend
With the backend configured and running, proceed to set up the [Mempool frontend](../frontend#manual-setup).
## Development Tips
### Set Up Backend Watchers
The Mempool backend is static. TypeScript scripts are compiled into the `dist` folder and served through a Node.js web server.
As a result, for development purposes, you may find it helpful to set up backend watchers to avoid the manual shutdown/recompile/restart command-line cycle.
First, install `nodemon` and `ts-node`:
```
npm install -g ts-node nodemon
```
Then, run the watcher:
```
nodemon src/index.ts --ignore cache/ --ignore pools.json
```
`nodemon` should be in npm's global binary folder. If needed, you can determine where that is with `npm -g bin`.
### Useful Regtest Commands
Helpful link: https://gist.github.com/System-Glitch/cb4e87bf1ae3fec9925725bb3ebe223a
Run bitcoind on regtest:
```
bitcoind -regtest -rpcport=8332
```
Create a new wallet, if needed:
```
bitcoin-cli -regtest -rpcport=8332 createwallet test
```
Load wallet (this command may take a while if you have lot of UTXOs):
```
bitcoin-cli -regtest -rpcport=8332 loadwallet test
```
Get a new address:
```
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
```
Mine blocks to the previously generated address. You need at least 101 blocks before you can spend. This will take some time to execute (~1 min):
```
bitcoin-cli -regtest -rpcport=8332 generatetoaddress 101 $address
```
Send 0.1 BTC at 5 sat/vB to another address:
```
./src/bitcoin-cli -named -regtest -rpcport=8332 sendtoaddress address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress) amount=0.1 fee_rate=5
```
See more example of `sendtoaddress`:
```
./src/bitcoin-cli sendtoaddress # will print the help
```
Mini script to generate transactions with random TX fee-rate (between 1 to 100 sat/vB). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
```
#!/bin/bash
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
for i in {1..1000000}
do
./src/bitcoin-cli -regtest -rpcport=8332 -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
done
```
Generate block at regular interval (every 10 seconds in this example):
```
watch -n 10 "./src/bitcoin-cli -regtest -rpcport=8332 generatetoaddress 1 $address"
```

View File

@@ -1 +0,0 @@
*.json

View File

@@ -1,20 +0,0 @@
import type { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
verbose: true,
automock: false,
collectCoverage: true,
collectCoverageFrom: ["./src/**/**.ts"],
coverageProvider: "babel",
coverageThreshold: {
global: {
lines: 1
}
},
setupFiles: [
"./testSetup.ts",
],
}
export default config;

View File

@@ -1,119 +1,25 @@
{
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
"POLL_RATE_MS": 2000,
"CACHE_DIR": "./cache",
"CLEAR_PROTECTION_MINUTES": 20,
"RECOMMENDED_FEE_PERCENTILE": 50,
"BLOCK_WEIGHT_UNITS": 4000000,
"INITIAL_BLOCKS_AMOUNT": 8,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"INDEXING_BLOCKS_AMOUNT": 11000,
"BLOCKS_SUMMARIES_INDEXING": false,
"PRICE_FEED_UPDATE_INTERVAL": 600,
"USE_SECOND_NODE_FOR_MINFEE": false,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 1,
"EXTERNAL_RETRY_INTERVAL": 0,
"USER_AGENT": "mempool",
"STDOUT_LOG_MIN_PRIORITY": "debug",
"AUTOMATIC_BLOCK_REINDEXING": false,
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"TRANSACTION_INDEXING": false
},
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
"ELECTRUM": {
"HOST": "127.0.0.1",
"PORT": 50002,
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
},
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
"DATABASE": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 3306,
"SOCKET": "/var/run/mysql/mysql.sock",
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool"
},
"SYSLOG": {
"ENABLED": true,
"HOST": "127.0.0.1",
"PORT": 514,
"MIN_PRIORITY": "info",
"FACILITY": "local7"
},
"STATISTICS": {
"ENABLED": true,
"TX_PER_SECOND_SAMPLE_PERIOD": 150
},
"MAXMIND": {
"ENABLED": false,
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
},
"BISQ": {
"ENABLED": false,
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
},
"LIGHTNING": {
"ENABLED": false,
"BACKEND": "lnd",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
},
"LND": {
"TLS_CERT_PATH": "tls.cert",
"MACAROON_PATH": "readonly.macaroon",
"REST_API_URL": "https://localhost:8080"
},
"CLIGHTNING": {
"SOCKET": "lightning-rpc"
},
"SOCKS5PROXY": {
"ENABLED": false,
"USE_ONION": true,
"HOST": "127.0.0.1",
"PORT": 9050,
"USERNAME": "",
"PASSWORD": ""
},
"PRICE_DATA_SERVER": {
"TOR_URL": "http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices",
"CLEARNET_URL": "https://price.bisq.wiz.biz/getAllMarketPrices"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "https://mempool.space/api/v1",
"MEMPOOL_ONION": "http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1",
"LIQUID_API": "https://liquid.network/api/v1",
"LIQUID_ONION": "http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1",
"BISQ_URL": "https://bisq.markets/api",
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
}
"ENV": "dev",
"DB_HOST": "localhost",
"DB_PORT": 3306,
"DB_USER": "mempool",
"DB_PASSWORD": "mempool",
"DB_DATABASE": "mempool",
"HTTP_PORT": 3000,
"API_ENDPOINT": "/api/v1/",
"CHAT_SSL_ENABLED": false,
"CHAT_SSL_PRIVKEY": "",
"CHAT_SSL_CERT": "",
"CHAT_SSL_CHAIN": "",
"MEMPOOL_REFRESH_RATE_MS": 500,
"INITIAL_BLOCK_AMOUNT": 8,
"DEFAULT_PROJECTED_BLOCKS_AMOUNT": 3,
"KEEP_BLOCK_AMOUNT": 24,
"BITCOIN_NODE_HOST": "localhost",
"BITCOIN_NODE_PORT": 8332,
"BITCOIN_NODE_USER": "",
"BITCOIN_NODE_PASS": "",
"BACKEND_API": "bitcoind",
"ELECTRS_API_URL": "https://mempool.space/api",
"TX_PER_SECOND_SPAN_SECONDS": 150
}

10930
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,68 +1,31 @@
{
"name": "mempool-backend",
"version": "2.5.0-dev",
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
"license": "GNU Affero General Public License v3.0",
"homepage": "https://mempool.space",
"repository": {
"type": "git",
"url": "git+https://github.com/mempool/mempool"
},
"bugs": {
"url": "https://github.com/mempool/mempool/issues"
},
"keywords": [
"bitcoin",
"mempool",
"blockchain",
"explorer",
"liquid",
"lightning"
],
"version": "1.0.0",
"description": "Bitcoin Mempool Visualizer",
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=4096 dist/index.js",
"test": "./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
"build": "tsc",
"start": "npm run build && node dist/index.js"
},
"author": {
"name": "Simon Lindh",
"url": "https://github.com/mempool-space/mempool.space"
},
"license": "MIT",
"dependencies": {
"@babel/core": "^7.20.5",
"@mempool/electrum-client": "^1.1.7",
"@types/node": "^16.11.41",
"axios": "~0.27.2",
"bitcoinjs-lib": "~6.0.2",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.8",
"mysql2": "~2.3.3",
"node-worker-threads-pool": "~1.5.1",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4",
"ws": "~8.11.0"
"bitcoin": "^3.0.1",
"compression": "^1.7.3",
"express": "^4.16.3",
"mysql2": "^1.6.1",
"request": "^2.88.0",
"ws": "^6.0.0"
},
"devDependencies": {
"@babel/core": "^7.20.5",
"@babel/code-frame": "^7.18.6",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.14",
"@types/jest": "^29.2.3",
"@types/ws": "~8.5.3",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"eslint": "^8.28.0",
"eslint-config-prettier": "^8.5.0",
"jest": "^29.3.1",
"prettier": "^2.8.0",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
"@types/express": "^4.16.0",
"@types/mysql2": "github:types/mysql2",
"@types/request": "^2.48.2",
"@types/ws": "^6.0.1",
"tslint": "^5.11.0",
"typescript": "^3.1.1"
}
}

View File

@@ -1,115 +0,0 @@
{
"MEMPOOL": {
"ENABLED": true,
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"ENABLED": true,
"BLOCKS_SUMMARIES_INDEXING": true,
"HTTP_PORT": 1,
"SPAWN_CLUSTER_PROCS": 2,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
"AUTOMATIC_BLOCK_REINDEXING": true,
"POLL_RATE_MS": 3,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CLEAR_PROTECTION_MINUTES": 4,
"RECOMMENDED_FEE_PERCENTILE": 5,
"BLOCK_WEIGHT_UNITS": 6,
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"PRICE_FEED_UPDATE_INTERVAL": 9,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": 11,
"EXTERNAL_MAX_RETRY": 12,
"EXTERNAL_RETRY_INTERVAL": 13,
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"ADVANCED_GBT_AUDIT": "__ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__ADVANCED_GBT_MEMPOOL__",
"TRANSACTION_INDEXING": "__TRANSACTION_INDEXING__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__"
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
"PORT": 16,
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__"
},
"DATABASE": {
"ENABLED": false,
"HOST": "__DATABASE_HOST__",
"SOCKET": "__DATABASE_SOCKET__",
"PORT": 18,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
},
"SYSLOG": {
"ENABLED": false,
"HOST": "__SYSLOG_HOST__",
"PORT": 19,
"MIN_PRIORITY": "__SYSLOG_MIN_PRIORITY__",
"FACILITY": "__SYSLOG_FACILITY__"
},
"STATISTICS": {
"ENABLED": false,
"TX_PER_SECOND_SAMPLE_PERIOD": 20
},
"BISQ": {
"ENABLED": true,
"DATA_PATH": "__BISQ_DATA_PATH__"
},
"SOCKS5PROXY": {
"ENABLED": true,
"USE_ONION": true,
"HOST": "__SOCKS5PROXY_HOST__",
"PORT": "__SOCKS5PROXY_PORT__",
"USERNAME": "__SOCKS5PROXY_USERNAME__",
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
},
"PRICE_DATA_SERVER": {
"TOR_URL": "__PRICE_DATA_SERVER_TOR_URL__",
"CLEARNET_URL": "__PRICE_DATA_SERVER_CLEARNET_URL__"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
"LIQUID_API": "__EXTERNAL_DATA_SERVER_LIQUID_API__",
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"LIGHTNING": {
"ENABLED": "__LIGHTNING_ENABLED__",
"BACKEND": "__LIGHTNING_BACKEND__",
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
},
"LND": {
"TLS_CERT_PATH": "",
"MACAROON_PATH": "",
"REST_API_URL": "https://localhost:8080"
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
}
}

View File

@@ -1,62 +0,0 @@
import { calcDifficultyAdjustment, DifficultyAdjustment } from '../../api/difficulty-adjustment';
describe('Mempool Difficulty Adjustment', () => {
test('should calculate Difficulty Adjustments properly', () => {
const dt = (dtString) => {
return Math.floor(new Date(dtString).getTime() / 1000);
};
const vectors = [
[ // Vector 1
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
0, // If not testnet, not used
],
{ // Expected Result
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: 0,
},
],
[ // Vector 2 (testnet)
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'testnet', // Network
dt('2022-08-19T13:52:46.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result is same other than timeOffset
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
for (const vector of vectors) {
const result = calcDifficultyAdjustment(...vector[0]);
// previousRetarget is passed through untouched
expect(result.previousRetarget).toStrictEqual(vector[0][3]);
expect(result).toStrictEqual(vector[1]);
}
});
});

View File

@@ -1,143 +0,0 @@
import * as fs from 'fs';
describe('Mempool Backend Config', () => {
beforeEach(() => {
jest.resetAllMocks();
jest.resetModules();
});
test('should return defaults when no file is present', () => {
jest.isolateModules(() => {
jest.mock('../../mempool-config.json', () => ({}), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual({
ENABLED: true,
NETWORK: 'mainnet',
BACKEND: 'none',
BLOCKS_SUMMARIES_INDEXING: false,
HTTP_PORT: 8999,
SPAWN_CLUSTER_PROCS: 0,
API_URL_PREFIX: '/api/v1/',
AUTOMATIC_BLOCK_REINDEXING: false,
POLL_RATE_MS: 2000,
CACHE_DIR: './cache',
CLEAR_PROTECTION_MINUTES: 20,
RECOMMENDED_FEE_PERCENTILE: 50,
BLOCK_WEIGHT_UNITS: 4000000,
INITIAL_BLOCKS_AMOUNT: 8,
MEMPOOL_BLOCKS_AMOUNT: 8,
INDEXING_BLOCKS_AMOUNT: 11000,
PRICE_FEED_UPDATE_INTERVAL: 600,
USE_SECOND_NODE_FOR_MINFEE: false,
EXTERNAL_ASSETS: [],
EXTERNAL_MAX_RETRY: 1,
EXTERNAL_RETRY_INTERVAL: 0,
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
TRANSACTION_INDEXING: false,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000' });
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.SECOND_CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.DATABASE).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
SOCKET: '',
PORT: 3306,
DATABASE: 'mempool',
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.SYSLOG).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
PORT: 514,
MIN_PRIORITY: 'info',
FACILITY: 'local7'
});
expect(config.STATISTICS).toStrictEqual({ ENABLED: true, TX_PER_SECOND_SAMPLE_PERIOD: 150 });
expect(config.BISQ).toStrictEqual({ ENABLED: false, DATA_PATH: '/bisq/statsnode-data/btc_mainnet/db' });
expect(config.SOCKS5PROXY).toStrictEqual({
ENABLED: false,
USE_ONION: true,
HOST: '127.0.0.1',
PORT: 9050,
USERNAME: '',
PASSWORD: ''
});
expect(config.PRICE_DATA_SERVER).toStrictEqual({
TOR_URL: 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
CLEARNET_URL: 'https://price.bisq.wiz.biz/getAllMarketPrices'
});
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual({
MEMPOOL_API: 'https://mempool.space/api/v1',
MEMPOOL_ONION: 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
LIQUID_API: 'https://liquid.network/api/v1',
LIQUID_ONION: 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
BISQ_URL: 'https://bisq.markets/api',
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
});
});
});
test('should override the default values with the passed values', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
jest.mock('../../mempool-config.json', () => (fixture), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual(fixture.MEMPOOL);
expect(config.ELECTRUM).toStrictEqual(fixture.ELECTRUM);
expect(config.ESPLORA).toStrictEqual(fixture.ESPLORA);
expect(config.CORE_RPC).toStrictEqual(fixture.CORE_RPC);
expect(config.SECOND_CORE_RPC).toStrictEqual(fixture.SECOND_CORE_RPC);
expect(config.DATABASE).toStrictEqual(fixture.DATABASE);
expect(config.SYSLOG).toStrictEqual(fixture.SYSLOG);
expect(config.STATISTICS).toStrictEqual(fixture.STATISTICS);
expect(config.BISQ).toStrictEqual(fixture.BISQ);
expect(config.SOCKS5PROXY).toStrictEqual(fixture.SOCKS5PROXY);
expect(config.PRICE_DATA_SERVER).toStrictEqual(fixture.PRICE_DATA_SERVER);
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
});
});
});

View File

@@ -1,138 +0,0 @@
import config from '../config';
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { Common } from './common';
import { TransactionExtended, MempoolBlockWithTransactions, AuditScore } from '../mempool.interfaces';
import blocksRepository from '../repositories/BlocksRepository';
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import blocks from '../api/blocks';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
const inBlock = {};
const inTemplate = {};
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// coinbase is always expected
if (transactions[0]) {
inTemplate[transactions[0].txid] = true;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
}
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
let displacedWeightRemaining = displacedWeight;
let index = 0;
let lastFeeRate = Infinity;
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
}
index++;
}
// mark unexpected transactions in the mined block as 'added'
let overflowWeight = 0;
let totalWeight = 0;
for (const tx of transactions) {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
} else {
}
let blockIndex = -1;
let index = -1;
projectedBlocks.forEach((block, bi) => {
const i = block.transactionIds.indexOf(tx.txid);
if (i >= 0) {
blockIndex = bi;
index = i;
}
});
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
}
// transactions missing from near the end of our template are probably not being censored
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
let maxOverflowRate = 0;
let rateThreshold = 0;
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
const numCensored = Object.keys(isCensored).length;
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
return {
censored: Object.keys(isCensored),
added,
fresh,
score
};
}
}
export default new Audit();

View File

@@ -1,38 +0,0 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
import { IBackendInfo } from '../mempool.interfaces';
class BackendInfo {
private backendInfo: IBackendInfo;
constructor() {
// This file is created by ./fetch-version.ts during building
const versionFile = path.join(__dirname, 'version.json')
var versionInfo;
if (fs.existsSync(versionFile)) {
versionInfo = JSON.parse(fs.readFileSync(versionFile).toString());
} else {
// Use dummy values if `versionFile` doesn't exist (e.g., during testing)
versionInfo = {
version: '?',
gitCommit: '?'
};
}
this.backendInfo = {
hostname: os.hostname(),
version: versionInfo.version,
gitCommit: versionInfo.gitCommit
};
}
public getBackendInfo(): IBackendInfo {
return this.backendInfo;
}
public getShortCommitHash() {
return this.backendInfo.gitCommit.slice(0, 7);
}
}
export default new BackendInfo();

View File

@@ -1,381 +0,0 @@
import { Application, Request, Response } from 'express';
import config from '../../config';
import { RequiredSpec } from '../../mempool.interfaces';
import bisq from './bisq';
import { MarketsApiError } from './interfaces';
import marketsApi from './markets-api';
class BisqRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', this.getBisqStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', this.getBisqTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', this.getBisqBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', this.getBisqTip)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', this.getBisqBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', this.getBisqAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', this.getBisqTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', this.getBisqMarketCurrencies.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', this.getBisqMarketDepth.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', this.getBisqMarketHloc.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', this.getBisqMarketMarkets.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', this.getBisqMarketOffers.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', this.getBisqMarketTicker.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', this.getBisqMarketTrades.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', this.getBisqMarketVolumes.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', this.getBisqMarketVolumes7d.bind(this))
;
}
private getBisqStats(req: Request, res: Response) {
const result = bisq.getStats();
res.json(result);
}
private getBisqTip(req: Request, res: Response) {
const result = bisq.getLatestBlockHeight();
res.type('text/plain');
res.send(result.toString());
}
private getBisqTransaction(req: Request, res: Response) {
const result = bisq.getTransaction(req.params.txId);
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq transaction not found');
}
}
private getBisqTransactions(req: Request, res: Response) {
const types: string[] = [];
req.query.types = req.query.types || [];
if (!Array.isArray(req.query.types)) {
res.status(500).send('Types is not an array');
return;
}
for (const _type in req.query.types) {
if (typeof req.query.types[_type] === 'string') {
types.push(req.query.types[_type].toString());
}
}
const index = parseInt(req.params.index, 10) || 0;
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
const [transactions, count] = bisq.getTransactions(index, length, types);
res.header('X-Total-Count', count.toString());
res.json(transactions);
}
private getBisqBlock(req: Request, res: Response) {
const result = bisq.getBlock(req.params.hash);
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq block not found');
}
}
private getBisqBlocks(req: Request, res: Response) {
const index = parseInt(req.params.index, 10) || 0;
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
const [transactions, count] = bisq.getBlocks(index, length);
res.header('X-Total-Count', count.toString());
res.json(transactions);
}
private getBisqAddress(req: Request, res: Response) {
const result = bisq.getAddress(req.params.address.substr(1));
if (result) {
res.json(result);
} else {
res.status(404).send('Bisq address not found');
}
}
private getBisqMarketCurrencies(req: Request, res: Response) {
const constraints: RequiredSpec = {
'type': {
required: false,
types: ['crypto', 'fiat', 'all']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getCurrencies(p.type);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketCurrencies error'));
}
}
private getBisqMarketDepth(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getDepth(p.market);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketDepth error'));
}
}
private getBisqMarketMarkets(req: Request, res: Response) {
const result = marketsApi.getMarkets();
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketMarkets error'));
}
}
private getBisqMarketTrades(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'trade_id_to': {
required: false,
types: ['@string']
},
'trade_id_from': {
required: false,
types: ['@string']
},
'direction': {
required: false,
types: ['buy', 'sell']
},
'limit': {
required: false,
types: ['@number']
},
'sort': {
required: false,
types: ['asc', 'desc']
}
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getTrades(p.market, p.timestamp_from,
p.timestamp_to, p.trade_id_from, p.trade_id_to, p.direction, p.limit, p.sort);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTrades error'));
}
}
private getBisqMarketOffers(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'direction': {
required: false,
types: ['buy', 'sell']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getOffers(p.market, p.direction);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketOffers error'));
}
}
private getBisqMarketVolumes(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: false,
types: ['@string']
},
'interval': {
required: false,
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'milliseconds': {
required: false,
types: ['@boolean']
},
'timestamp': {
required: false,
types: ['no', 'yes']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getVolumes(p.market, p.timestamp_from, p.timestamp_to, p.interval, p.milliseconds, p.timestamp);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes error'));
}
}
private getBisqMarketHloc(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: true,
types: ['@string']
},
'interval': {
required: false,
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
},
'timestamp_from': {
required: false,
types: ['@number']
},
'timestamp_to': {
required: false,
types: ['@number']
},
'milliseconds': {
required: false,
types: ['@boolean']
},
'timestamp': {
required: false,
types: ['no', 'yes']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getHloc(p.market, p.interval, p.timestamp_from, p.timestamp_to, p.milliseconds, p.timestamp);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketHloc error'));
}
}
private getBisqMarketTicker(req: Request, res: Response) {
const constraints: RequiredSpec = {
'market': {
required: false,
types: ['@string']
},
};
const p = this.parseRequestParameters(req.query, constraints);
if (p.error) {
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
return;
}
const result = marketsApi.getTicker(p.market);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTicker error'));
}
}
private getBisqMarketVolumes7d(req: Request, res: Response) {
const result = marketsApi.getVolumesByTime(604800);
if (result) {
res.json(result);
} else {
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes7d error'));
}
}
private parseRequestParameters(requestParams: object, params: RequiredSpec): { [name: string]: any; } {
const final = {};
for (const i in params) {
if (params.hasOwnProperty(i)) {
if (params[i].required && requestParams[i] === undefined) {
return { error: i + ' parameter missing'};
}
if (typeof requestParams[i] === 'string') {
const str = (requestParams[i] || '').toString().toLowerCase();
if (params[i].types.indexOf('@number') > -1) {
const number = parseInt((str).toString(), 10);
final[i] = number;
} else if (params[i].types.indexOf('@string') > -1) {
final[i] = str;
} else if (params[i].types.indexOf('@boolean') > -1) {
final[i] = str === 'true' || str === 'yes';
} else if (params[i].types.indexOf(str) > -1) {
final[i] = str;
} else {
return { error: i + ' parameter invalid'};
}
} else if (typeof requestParams[i] === 'number') {
final[i] = requestParams[i];
}
}
}
return final;
}
private getBisqMarketErrorResponse(message: string): MarketsApiError {
return {
'success': 0,
'error': message
};
}
}
export default new BisqRoutes;

View File

@@ -1,364 +0,0 @@
import config from '../../config';
import * as fs from 'fs';
import axios, { AxiosResponse } from 'axios';
import * as http from 'http';
import * as https from 'https';
import { SocksProxyAgent } from 'socks-proxy-agent';
import { BisqBlocks, BisqBlock, BisqTransaction, BisqStats, BisqTrade } from './interfaces';
import { Common } from '../common';
import { BlockExtended } from '../../mempool.interfaces';
import { StaticPool } from 'node-worker-threads-pool';
import backendInfo from '../backend-info';
import logger from '../../logger';
class Bisq {
private static BLOCKS_JSON_FILE_PATH = config.BISQ.DATA_PATH + '/json/all/blocks.json';
private latestBlockHeight = 0;
private blocks: BisqBlock[] = [];
private allBlocks: BisqBlock[] = [];
private transactions: BisqTransaction[] = [];
private transactionIndex: { [txId: string]: BisqTransaction } = {};
private blockIndex: { [hash: string]: BisqBlock } = {};
private addressIndex: { [address: string]: BisqTransaction[] } = {};
private stats: BisqStats = {
minted: 0,
burnt: 0,
addresses: 0,
unspent_txos: 0,
spent_txos: 0,
};
private price: number = 0;
private priceUpdateCallbackFunction: ((price: number) => void) | undefined;
private topDirectoryWatcher: fs.FSWatcher | undefined;
private subdirectoryWatcher: fs.FSWatcher | undefined;
private jsonParsePool = new StaticPool({
size: 4,
task: (blob: string) => JSON.parse(blob),
});
constructor() {}
startBisqService(): void {
try {
this.checkForBisqDataFolder();
} catch (e) {
logger.info('Retrying to start bisq service in 3 minutes');
setTimeout(this.startBisqService.bind(this), 180000);
return;
}
this.loadBisqDumpFile();
setInterval(this.updatePrice.bind(this), 1000 * 60 * 60);
this.updatePrice();
this.startTopDirectoryWatcher();
this.startSubDirectoryWatcher();
}
handleNewBitcoinBlock(block: BlockExtended): void {
if (block.height - 10 > this.latestBlockHeight && this.latestBlockHeight !== 0) {
logger.warn(`Bitcoin block height (#${block.height}) has diverged from the latest Bisq block height (#${this.latestBlockHeight}). Restarting watchers...`);
this.startTopDirectoryWatcher();
this.startSubDirectoryWatcher();
}
}
getTransaction(txId: string): BisqTransaction | undefined {
return this.transactionIndex[txId];
}
getTransactions(start: number, length: number, types: string[]): [BisqTransaction[], number] {
let transactions = this.transactions;
if (types.length) {
transactions = transactions.filter((tx) => types.indexOf(tx.txType) > -1);
}
return [transactions.slice(start, length + start), transactions.length];
}
getBlock(hash: string): BisqBlock | undefined {
return this.blockIndex[hash];
}
getAddress(hash: string): BisqTransaction[] {
return this.addressIndex[hash];
}
getBlocks(start: number, length: number): [BisqBlock[], number] {
return [this.blocks.slice(start, length + start), this.blocks.length];
}
getStats(): BisqStats {
return this.stats;
}
setPriceCallbackFunction(fn: (price: number) => void) {
this.priceUpdateCallbackFunction = fn;
}
getLatestBlockHeight(): number {
return this.latestBlockHeight;
}
private checkForBisqDataFolder() {
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
logger.warn(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist. Make sure Bisq is running and the config is correct before starting the server.`);
throw new Error(`Cannot load BISQ ${Bisq.BLOCKS_JSON_FILE_PATH} file`);
}
}
private startTopDirectoryWatcher() {
if (this.topDirectoryWatcher) {
this.topDirectoryWatcher.close();
}
let fsWait: NodeJS.Timeout | null = null;
this.topDirectoryWatcher = fs.watch(config.BISQ.DATA_PATH + '/json', () => {
if (fsWait) {
clearTimeout(fsWait);
}
if (this.subdirectoryWatcher) {
this.subdirectoryWatcher.close();
}
fsWait = setTimeout(() => {
logger.debug(`Bisq restart detected. Resetting both watchers in 3 minutes.`);
setTimeout(() => {
this.startTopDirectoryWatcher();
this.startSubDirectoryWatcher();
this.loadBisqDumpFile();
}, 180000);
}, 15000);
});
}
private startSubDirectoryWatcher() {
if (this.subdirectoryWatcher) {
this.subdirectoryWatcher.close();
}
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
logger.warn(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist. Trying to restart sub directory watcher again in 3 minutes.`);
setTimeout(() => this.startSubDirectoryWatcher(), 180000);
return;
}
let fsWait: NodeJS.Timeout | null = null;
this.subdirectoryWatcher = fs.watch(config.BISQ.DATA_PATH + '/json/all', () => {
if (fsWait) {
clearTimeout(fsWait);
}
fsWait = setTimeout(() => {
logger.debug(`Change detected in the Bisq data folder.`);
this.loadBisqDumpFile();
}, 2000);
});
}
private async updatePrice() {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpAgent?: http.Agent;
httpsAgent?: https.Agent;
}
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
const BISQ_URL = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.EXTERNAL_DATA_SERVER.BISQ_ONION : config.EXTERNAL_DATA_SERVER.BISQ_URL;
const isHTTP = (new URL(BISQ_URL).protocol.split(':')[0] === 'http') ? true : false;
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
let retry = 0;
while(retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
try {
if (config.SOCKS5PROXY.ENABLED) {
const socksOptions: any = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT
};
if (config.SOCKS5PROXY.USERNAME && config.SOCKS5PROXY.PASSWORD) {
socksOptions.username = config.SOCKS5PROXY.USERNAME;
socksOptions.password = config.SOCKS5PROXY.PASSWORD;
} else {
// Retry with different tor circuits https://stackoverflow.com/a/64960234
socksOptions.username = `circuit${retry}`;
}
// Handle proxy agent for onion addresses
if (isHTTP) {
axiosOptions.httpAgent = new SocksProxyAgent(socksOptions);
} else {
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
}
const data: AxiosResponse = await axios.get(`${BISQ_URL}/trades/?market=bsq_btc`, axiosOptions);
if (data.statusText === 'error' || !data.data) {
throw new Error(`Could not fetch data from Bisq market, Error: ${data.status}`);
}
const prices: number[] = [];
data.data.forEach((trade) => {
prices.push(parseFloat(trade.price) * 100000000);
});
prices.sort((a, b) => a - b);
this.price = Common.median(prices);
if (this.priceUpdateCallbackFunction) {
this.priceUpdateCallbackFunction(this.price);
}
logger.debug('Successfully updated Bisq market price');
break;
} catch (e) {
logger.err('Error updating Bisq market price: ' + (e instanceof Error ? e.message : e));
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
retry++;
}
}
}
private async loadBisqDumpFile(): Promise<void> {
this.allBlocks = [];
try {
await this.loadData();
this.buildIndex();
this.calculateStats();
} catch (e) {
logger.info('Cannot load bisq dump file because: ' + (e instanceof Error ? e.message : e));
}
}
private buildIndex() {
const start = new Date().getTime();
this.transactions = [];
this.transactionIndex = {};
this.addressIndex = {};
this.allBlocks.forEach((block) => {
/* Build block index */
if (!this.blockIndex[block.hash]) {
this.blockIndex[block.hash] = block;
}
/* Build transactions index */
block.txs.forEach((tx) => {
this.transactions.push(tx);
this.transactionIndex[tx.id] = tx;
});
});
/* Build address index */
this.transactions.forEach((tx) => {
tx.inputs.forEach((input) => {
if (!this.addressIndex[input.address]) {
this.addressIndex[input.address] = [];
}
if (this.addressIndex[input.address].indexOf(tx) === -1) {
this.addressIndex[input.address].push(tx);
}
});
tx.outputs.forEach((output) => {
if (!this.addressIndex[output.address]) {
this.addressIndex[output.address] = [];
}
if (this.addressIndex[output.address].indexOf(tx) === -1) {
this.addressIndex[output.address].push(tx);
}
});
});
const time = new Date().getTime() - start;
logger.debug('Bisq data index rebuilt in ' + time + ' ms');
}
private calculateStats() {
let minted = 0;
let burned = 0;
let unspent = 0;
let spent = 0;
this.transactions.forEach((tx) => {
tx.outputs.forEach((output) => {
if (output.opReturn) {
return;
}
if (output.txOutputType === 'GENESIS_OUTPUT' || output.txOutputType === 'ISSUANCE_CANDIDATE_OUTPUT' && output.isVerified) {
minted += output.bsqAmount;
}
if (output.isUnspent) {
unspent++;
} else {
spent++;
}
});
burned += tx['burntFee'];
});
this.stats = {
addresses: Object.keys(this.addressIndex).length,
minted: minted / 100,
burnt: burned / 100,
spent_txos: spent,
unspent_txos: unspent,
};
}
private async loadData(): Promise<any> {
if (!fs.existsSync(Bisq.BLOCKS_JSON_FILE_PATH)) {
throw new Error(Bisq.BLOCKS_JSON_FILE_PATH + ` doesn't exist`);
}
const readline = require('readline');
const events = require('events');
const rl = readline.createInterface({
input: fs.createReadStream(Bisq.BLOCKS_JSON_FILE_PATH),
crlfDelay: Infinity
});
let blockBuffer = '';
let readingBlock = false;
let lineCount = 1;
const start = new Date().getTime();
logger.debug('Processing Bisq data dump...');
rl.on('line', (line) => {
if (lineCount === 2) {
line = line.replace(' "chainHeight": ', '');
this.latestBlockHeight = parseInt(line, 10);
}
if (line === ' {') {
readingBlock = true;
} else if (line === ' },') {
blockBuffer += '}';
try {
const block: BisqBlock = JSON.parse(blockBuffer);
this.allBlocks.push(block);
readingBlock = false;
blockBuffer = '';
} catch (e) {
logger.debug(blockBuffer);
throw Error(`Unable to parse Bisq data dump at line ${lineCount}` + (e instanceof Error ? e.message : e));
}
}
if (readingBlock === true) {
blockBuffer += line;
}
++lineCount;
});
await events.once(rl, 'close');
this.allBlocks.reverse();
this.blocks = this.allBlocks.filter((block) => block.txs.length > 0);
const time = new Date().getTime() - start;
logger.debug('Bisq dump processed in ' + time + ' ms');
}
}
export default new Bisq();

View File

@@ -1,258 +0,0 @@
export interface BisqBlocks {
chainHeight: number;
blocks: BisqBlock[];
}
export interface BisqBlock {
height: number;
time: number;
hash: string;
previousBlockHash: string;
txs: BisqTransaction[];
}
export interface BisqTransaction {
txVersion: string;
id: string;
blockHeight: number;
blockHash: string;
time: number;
inputs: BisqInput[];
outputs: BisqOutput[];
txType: string;
txTypeDisplayString: string;
burntFee: number;
invalidatedBsq: number;
unlockBlockHeight: number;
}
export interface BisqStats {
minted: number;
burnt: number;
addresses: number;
unspent_txos: number;
spent_txos: number;
}
interface BisqInput {
spendingTxOutputIndex: number;
spendingTxId: string;
bsqAmount: number;
isVerified: boolean;
address: string;
time: number;
}
interface BisqOutput {
txVersion: string;
txId: string;
index: number;
bsqAmount: number;
btcAmount: number;
height: number;
isVerified: boolean;
burntFee: number;
invalidatedBsq: number;
address: string;
scriptPubKey: BisqScriptPubKey;
time: any;
txType: string;
txTypeDisplayString: string;
txOutputType: string;
txOutputTypeDisplayString: string;
lockTime: number;
isUnspent: boolean;
spentInfo: SpentInfo;
opReturn?: string;
}
interface BisqScriptPubKey {
addresses: string[];
asm: string;
hex: string;
reqSigs?: number;
type: string;
}
interface SpentInfo {
height: number;
inputIndex: number;
txId: string;
}
export interface BisqTrade {
direction: string;
price: string;
amount: string;
volume: string;
payment_method: string;
trade_id: string;
trade_date: number;
market?: string;
}
export interface Currencies { [txid: string]: Currency; }
export interface Currency {
code: string;
name: string;
precision: number;
_type: string;
}
export interface Depth { [market: string]: Market; }
interface Market {
'buys': string[];
'sells': string[];
}
export interface HighLowOpenClose {
period_start: number | string;
open: string;
high: string;
low: string;
close: string;
volume_left: string;
volume_right: string;
avg: string;
}
export interface Markets { [txid: string]: Pair; }
interface Pair {
pair: string;
lname: string;
rname: string;
lsymbol: string;
rsymbol: string;
lprecision: number;
rprecision: number;
ltype: string;
rtype: string;
name: string;
}
export interface Offers { [market: string]: OffersMarket; }
interface OffersMarket {
buys: Offer[] | null;
sells: Offer[] | null;
}
export interface OffersData {
direction: string;
currencyCode: string;
minAmount: number;
amount: number;
price: number;
date: number;
useMarketBasedPrice: boolean;
marketPriceMargin: number;
paymentMethod: string;
id: string;
currencyPair: string;
primaryMarketDirection: string;
priceDisplayString: string;
primaryMarketAmountDisplayString: string;
primaryMarketMinAmountDisplayString: string;
primaryMarketVolumeDisplayString: string;
primaryMarketMinVolumeDisplayString: string;
primaryMarketPrice: number;
primaryMarketAmount: number;
primaryMarketMinAmount: number;
primaryMarketVolume: number;
primaryMarketMinVolume: number;
}
export interface Offer {
offer_id: string;
offer_date: number;
direction: string;
min_amount: string;
amount: string;
price: string;
volume: string;
payment_method: string;
offer_fee_txid: any;
}
export interface Tickers { [market: string]: Ticker | null; }
export interface Ticker {
last: string;
high: string;
low: string;
volume_left: string;
volume_right: string;
buy: string | null;
sell: string | null;
}
export interface Trade {
direction: string;
price: string;
amount: string;
volume: string;
payment_method: string;
trade_id: string;
trade_date: number;
}
export interface TradesData {
currency: string;
direction: string;
tradePrice: number;
tradeAmount: number;
tradeDate: number;
paymentMethod: string;
offerDate: number;
useMarketBasedPrice: boolean;
marketPriceMargin: number;
offerAmount: number;
offerMinAmount: number;
offerId: string;
depositTxId?: string;
currencyPair: string;
primaryMarketDirection: string;
primaryMarketTradePrice: number;
primaryMarketTradeAmount: number;
primaryMarketTradeVolume: number;
_market: string;
_tradePriceStr: string;
_tradeAmountStr: string;
_tradeVolumeStr: string;
_offerAmountStr: string;
_tradePrice: number;
_tradeAmount: number;
_tradeVolume: number;
_offerAmount: number;
}
export interface MarketVolume {
period_start: number;
num_trades: number;
volume: string;
}
export interface MarketsApiError {
success: number;
error: string;
}
export type Interval = 'minute' | 'half_hour' | 'hour' | 'half_day' | 'day' | 'week' | 'month' | 'year' | 'auto';
export interface SummarizedIntervals { [market: string]: SummarizedInterval; }
export interface SummarizedInterval {
'period_start': number;
'open': number;
'close': number;
'high': number;
'low': number;
'avg': number;
'volume_right': number;
'volume_left': number;
}

View File

@@ -1,679 +0,0 @@
import { Currencies, OffersData, TradesData, Depth, Currency, Interval, HighLowOpenClose,
Markets, Offers, Offer, BisqTrade, MarketVolume, Tickers, Ticker, SummarizedIntervals, SummarizedInterval } from './interfaces';
const strtotime = require('./strtotime');
class BisqMarketsApi {
private cryptoCurrencyData: Currency[] = [];
private fiatCurrencyData: Currency[] = [];
private activeCryptoCurrencyData: Currency[] = [];
private activeFiatCurrencyData: Currency[] = [];
private offersData: OffersData[] = [];
private tradesData: TradesData[] = [];
private fiatCurrenciesIndexed: { [code: string]: true } = {};
private allCurrenciesIndexed: { [code: string]: Currency } = {};
private tradeDataByMarket: { [market: string]: TradesData[] } = {};
private tickersCache: Ticker | Tickers | null = null;
constructor() { }
setOffersData(offers: OffersData[]) {
this.offersData = offers;
}
setTradesData(trades: TradesData[]) {
this.tradesData = trades;
this.tradeDataByMarket = {};
this.tradesData.forEach((trade) => {
trade._market = trade.currencyPair.toLowerCase().replace('/', '_');
if (!this.tradeDataByMarket[trade._market]) {
this.tradeDataByMarket[trade._market] = [];
}
this.tradeDataByMarket[trade._market].push(trade);
});
}
setCurrencyData(cryptoCurrency: Currency[], fiatCurrency: Currency[], activeCryptoCurrency: Currency[], activeFiatCurrency: Currency[]) {
this.cryptoCurrencyData = cryptoCurrency,
this.fiatCurrencyData = fiatCurrency,
this.activeCryptoCurrencyData = activeCryptoCurrency,
this.activeFiatCurrencyData = activeFiatCurrency;
this.fiatCurrenciesIndexed = {};
this.allCurrenciesIndexed = {};
this.fiatCurrencyData.forEach((currency) => {
currency._type = 'fiat';
this.fiatCurrenciesIndexed[currency.code] = true;
this.allCurrenciesIndexed[currency.code] = currency;
});
this.cryptoCurrencyData.forEach((currency) => {
currency._type = 'crypto';
this.allCurrenciesIndexed[currency.code] = currency;
});
}
updateCache() {
this.tickersCache = null;
this.tickersCache = this.getTicker();
}
getCurrencies(
type: 'crypto' | 'fiat' | 'active' | 'all' = 'all',
): Currencies {
let currencies: Currency[];
switch (type) {
case 'fiat':
currencies = this.fiatCurrencyData;
break;
case 'crypto':
currencies = this.cryptoCurrencyData;
break;
case 'active':
currencies = this.activeCryptoCurrencyData.concat(this.activeFiatCurrencyData);
break;
case 'all':
default:
currencies = this.cryptoCurrencyData.concat(this.fiatCurrencyData);
}
const result = {};
currencies.forEach((currency) => {
result[currency.code] = currency;
});
return result;
}
getDepth(
market: string,
): Depth {
const currencyPair = market.replace('_', '/').toUpperCase();
const buys = this.offersData
.filter((offer) => offer.currencyPair === currencyPair && offer.primaryMarketDirection === 'BUY')
.map((offer) => offer.price)
.sort((a, b) => b - a)
.map((price) => this.intToBtc(price));
const sells = this.offersData
.filter((offer) => offer.currencyPair === currencyPair && offer.primaryMarketDirection === 'SELL')
.map((offer) => offer.price)
.sort((a, b) => a - b)
.map((price) => this.intToBtc(price));
const result = {};
result[market] = {
'buys': buys,
'sells': sells,
};
return result;
}
getOffers(
market: string,
direction?: 'buy' | 'sell',
): Offers {
const currencyPair = market.replace('_', '/').toUpperCase();
let buys: Offer[] | null = null;
let sells: Offer[] | null = null;
if (!direction || direction === 'buy') {
buys = this.offersData
.filter((offer) => offer.currencyPair === currencyPair && offer.primaryMarketDirection === 'BUY')
.sort((a, b) => b.price - a.price)
.map((offer) => this.offerDataToOffer(offer, market));
}
if (!direction || direction === 'sell') {
sells = this.offersData
.filter((offer) => offer.currencyPair === currencyPair && offer.primaryMarketDirection === 'SELL')
.sort((a, b) => a.price - b.price)
.map((offer) => this.offerDataToOffer(offer, market));
}
const result: Offers = {};
result[market] = {
'buys': buys,
'sells': sells,
};
return result;
}
getMarkets(): Markets {
const allCurrencies = this.getCurrencies();
const activeCurrencies = this.getCurrencies('active');
const markets = {};
for (const currency of Object.keys(activeCurrencies)) {
if (allCurrencies[currency].code === 'BTC') {
continue;
}
const isFiat = allCurrencies[currency]._type === 'fiat';
const pmarketname = allCurrencies['BTC']['name'];
const lsymbol = isFiat ? 'BTC' : currency;
const rsymbol = isFiat ? currency : 'BTC';
const lname = isFiat ? pmarketname : allCurrencies[currency].name;
const rname = isFiat ? allCurrencies[currency].name : pmarketname;
const ltype = isFiat ? 'crypto' : allCurrencies[currency]._type;
const rtype = isFiat ? 'fiat' : 'crypto';
const lprecision = 8;
const rprecision = isFiat ? 2 : 8;
const pair = lsymbol.toLowerCase() + '_' + rsymbol.toLowerCase();
markets[pair] = {
'pair': pair,
'lname': lname,
'rname': rname,
'lsymbol': lsymbol,
'rsymbol': rsymbol,
'lprecision': lprecision,
'rprecision': rprecision,
'ltype': ltype,
'rtype': rtype,
'name': lname + '/' + rname,
};
}
return markets;
}
getTrades(
market: string,
timestamp_from?: number,
timestamp_to?: number,
trade_id_from?: string,
trade_id_to?: string,
direction?: 'buy' | 'sell',
limit: number = 100,
sort: 'asc' | 'desc' = 'desc',
): BisqTrade[] {
limit = Math.min(limit, 2000);
const _market = market === 'all' ? undefined : market;
if (!timestamp_from) {
timestamp_from = new Date('2016-01-01').getTime() / 1000;
}
if (!timestamp_to) {
timestamp_to = new Date().getTime() / 1000;
}
const matches = this.getTradesByCriteria(_market, timestamp_to, timestamp_from,
trade_id_to, trade_id_from, direction, sort, limit, false);
if (sort === 'asc') {
matches.sort((a, b) => a.tradeDate - b.tradeDate);
} else {
matches.sort((a, b) => b.tradeDate - a.tradeDate);
}
return matches.map((trade) => {
const bsqTrade: BisqTrade = {
direction: trade.primaryMarketDirection,
price: trade._tradePriceStr,
amount: trade._tradeAmountStr,
volume: trade._tradeVolumeStr,
payment_method: trade.paymentMethod,
trade_id: trade.offerId,
trade_date: trade.tradeDate,
};
if (market === 'all') {
bsqTrade.market = trade._market;
}
return bsqTrade;
});
}
getVolumes(
market?: string,
timestamp_from?: number,
timestamp_to?: number,
interval: Interval = 'auto',
milliseconds?: boolean,
timestamp: 'no' | 'yes' = 'yes',
): MarketVolume[] {
if (milliseconds) {
timestamp_from = timestamp_from ? timestamp_from / 1000 : timestamp_from;
timestamp_to = timestamp_to ? timestamp_to / 1000 : timestamp_to;
}
if (!timestamp_from) {
timestamp_from = new Date('2016-01-01').getTime() / 1000;
}
if (!timestamp_to) {
timestamp_to = new Date().getTime() / 1000;
}
const trades = this.getTradesByCriteria(market, timestamp_to, timestamp_from,
undefined, undefined, undefined, 'asc', Number.MAX_SAFE_INTEGER);
if (interval === 'auto') {
const range = timestamp_to - timestamp_from;
interval = this.getIntervalFromRange(range);
}
const intervals: any = {};
const marketVolumes: MarketVolume[] = [];
for (const trade of trades) {
const traded_at = trade['tradeDate'] / 1000;
const interval_start = this.intervalStart(traded_at, interval);
if (!intervals[interval_start]) {
intervals[interval_start] = {
'volume': 0,
'num_trades': 0,
};
}
const period = intervals[interval_start];
period['period_start'] = interval_start;
period['volume'] += this.fiatCurrenciesIndexed[trade.currency] ? trade._tradeAmount : trade._tradeVolume;
period['num_trades']++;
}
for (const p in intervals) {
if (intervals.hasOwnProperty(p)) {
const period = intervals[p];
marketVolumes.push({
period_start: timestamp === 'no' ? new Date(period['period_start'] * 1000).toISOString() : period['period_start'],
num_trades: period['num_trades'],
volume: this.intToBtc(period['volume']),
});
}
}
return marketVolumes;
}
getTicker(
market?: string,
): Tickers | Ticker | null {
if (market) {
return this.getTickerFromMarket(market);
}
if (this.tickersCache) {
return this.tickersCache;
}
const allMarkets = this.getMarkets();
const tickers = {};
for (const m in allMarkets) {
if (allMarkets.hasOwnProperty(m)) {
tickers[allMarkets[m].pair] = this.getTickerFromMarket(allMarkets[m].pair);
}
}
return tickers;
}
getTickerFromMarket(market: string): Ticker | null {
let ticker: Ticker;
const timestamp_from = strtotime('-24 hour');
const timestamp_to = new Date().getTime() / 1000;
const trades = this.getTradesByCriteria(market, timestamp_to, timestamp_from,
undefined, undefined, undefined, 'asc', Number.MAX_SAFE_INTEGER);
const periods: SummarizedInterval[] = Object.values(this.getTradesSummarized(trades, timestamp_from));
const allCurrencies = this.getCurrencies();
const currencyRight = allCurrencies[market.split('_')[1].toUpperCase()];
if (periods[0]) {
ticker = {
'last': this.intToBtc(periods[0].close),
'high': this.intToBtc(periods[0].high),
'low': this.intToBtc(periods[0].low),
'volume_left': this.intToBtc(periods[0].volume_left),
'volume_right': this.intToBtc(periods[0].volume_right),
'buy': null,
'sell': null,
};
} else {
const lastTrade = this.tradeDataByMarket[market];
if (!lastTrade) {
return null;
}
const tradePrice = lastTrade[0].primaryMarketTradePrice * Math.pow(10, 8 - currencyRight.precision);
const lastTradePrice = this.intToBtc(tradePrice);
ticker = {
'last': lastTradePrice,
'high': lastTradePrice,
'low': lastTradePrice,
'volume_left': '0',
'volume_right': '0',
'buy': null,
'sell': null,
};
}
const timestampFromMilli = timestamp_from * 1000;
const timestampToMilli = timestamp_to * 1000;
const currencyPair = market.replace('_', '/').toUpperCase();
const offersData = this.offersData.slice().sort((a, b) => a.price - b.price);
const buy = offersData.find((offer) => offer.currencyPair === currencyPair
&& offer.primaryMarketDirection === 'BUY'
&& offer.date >= timestampFromMilli
&& offer.date <= timestampToMilli
);
const sell = offersData.find((offer) => offer.currencyPair === currencyPair
&& offer.primaryMarketDirection === 'SELL'
&& offer.date >= timestampFromMilli
&& offer.date <= timestampToMilli
);
if (buy) {
ticker.buy = this.intToBtc(buy.primaryMarketPrice * Math.pow(10, 8 - currencyRight.precision));
}
if (sell) {
ticker.sell = this.intToBtc(sell.primaryMarketPrice * Math.pow(10, 8 - currencyRight.precision));
}
return ticker;
}
getHloc(
market: string,
interval: Interval = 'auto',
timestamp_from?: number,
timestamp_to?: number,
milliseconds?: boolean,
timestamp: 'no' | 'yes' = 'yes',
): HighLowOpenClose[] {
if (milliseconds) {
timestamp_from = timestamp_from ? timestamp_from / 1000 : timestamp_from;
timestamp_to = timestamp_to ? timestamp_to / 1000 : timestamp_to;
}
if (!timestamp_from) {
timestamp_from = new Date('2016-01-01').getTime() / 1000;
}
if (!timestamp_to) {
timestamp_to = new Date().getTime() / 1000;
}
const trades = this.getTradesByCriteria(market, timestamp_to, timestamp_from,
undefined, undefined, undefined, 'asc', Number.MAX_SAFE_INTEGER);
if (interval === 'auto') {
const range = timestamp_to - timestamp_from;
interval = this.getIntervalFromRange(range);
}
const intervals = this.getTradesSummarized(trades, timestamp_from, interval);
const hloc: HighLowOpenClose[] = [];
for (const p in intervals) {
if (intervals.hasOwnProperty(p)) {
const period = intervals[p];
hloc.push({
period_start: timestamp === 'no' ? new Date(period['period_start'] * 1000).toISOString() : period['period_start'],
open: this.intToBtc(period['open']),
close: this.intToBtc(period['close']),
high: this.intToBtc(period['high']),
low: this.intToBtc(period['low']),
avg: this.intToBtc(period['avg']),
volume_right: this.intToBtc(period['volume_right']),
volume_left: this.intToBtc(period['volume_left']),
});
}
}
return hloc;
}
private getIntervalFromRange(range: number): Interval {
// two days range loads minute data
if (range <= 3600) {
// up to one hour range loads minutely data
return 'minute';
} else if (range <= 1 * 24 * 3600) {
// up to one day range loads half-hourly data
return 'half_hour';
} else if (range <= 3 * 24 * 3600) {
// up to 3 day range loads hourly data
return 'hour';
} else if (range <= 7 * 24 * 3600) {
// up to 7 day range loads half-daily data
return 'half_day';
} else if (range <= 60 * 24 * 3600) {
// up to 2 month range loads daily data
return 'day';
} else if (range <= 12 * 31 * 24 * 3600) {
// up to one year range loads weekly data
return 'week';
} else if (range <= 12 * 31 * 24 * 3600) {
// up to 5 year range loads monthly data
return 'month';
} else {
// greater range loads yearly data
return 'year';
}
}
getVolumesByTime(time: number): MarketVolume[] {
const timestamp_from = new Date().getTime() / 1000 - time;
const timestamp_to = new Date().getTime() / 1000;
const trades = this.getTradesByCriteria(undefined, timestamp_to, timestamp_from,
undefined, undefined, undefined, 'asc', Number.MAX_SAFE_INTEGER);
const markets: any = {};
for (const trade of trades) {
if (!markets[trade._market]) {
markets[trade._market] = {
'volume': 0,
'num_trades': 0,
};
}
markets[trade._market]['volume'] += this.fiatCurrenciesIndexed[trade.currency] ? trade._tradeAmount : trade._tradeVolume;
markets[trade._market]['num_trades']++;
}
return markets;
}
private getTradesSummarized(trades: TradesData[], timestamp_from: number, interval?: string): SummarizedIntervals {
const intervals: any = {};
const intervals_prices: any = {};
for (const trade of trades) {
const traded_at = trade.tradeDate / 1000;
const interval_start = !interval ? timestamp_from : this.intervalStart(traded_at, interval);
if (!intervals[interval_start]) {
intervals[interval_start] = {
'open': 0,
'close': 0,
'high': 0,
'low': 0,
'avg': 0,
'volume_right': 0,
'volume_left': 0,
};
intervals_prices[interval_start] = [];
}
const period = intervals[interval_start];
const price = trade._tradePrice;
if (!intervals_prices[interval_start]['leftvol']) {
intervals_prices[interval_start]['leftvol'] = [];
}
if (!intervals_prices[interval_start]['rightvol']) {
intervals_prices[interval_start]['rightvol'] = [];
}
intervals_prices[interval_start]['leftvol'].push(trade._tradeAmount);
intervals_prices[interval_start]['rightvol'].push(trade._tradeVolume);
if (price) {
const plow = period['low'];
period['period_start'] = interval_start;
period['open'] = period['open'] || price;
period['close'] = price;
period['high'] = price > period['high'] ? price : period['high'];
period['low'] = (plow && price > plow) ? period['low'] : price;
period['avg'] = intervals_prices[interval_start]['rightvol'].reduce((p: number, c: number) => c + p, 0)
/ intervals_prices[interval_start]['leftvol'].reduce((c: number, p: number) => c + p, 0) * 100000000;
period['volume_left'] += trade._tradeAmount;
period['volume_right'] += trade._tradeVolume;
}
}
return intervals;
}
private getTradesByCriteria(
market: string | undefined,
timestamp_to: number,
timestamp_from: number,
trade_id_to: string | undefined,
trade_id_from: string | undefined,
direction: 'buy' | 'sell' | undefined,
sort: string,
limit: number,
integerAmounts: boolean = true,
): TradesData[] {
let trade_id_from_ts: number | null = null;
let trade_id_to_ts: number | null = null;
const allCurrencies = this.getCurrencies();
const timestampFromMilli = timestamp_from * 1000;
const timestampToMilli = timestamp_to * 1000;
// note: the offer_id_from/to depends on iterating over trades in
// descending chronological order.
const tradesDataSorted = this.tradesData.slice();
if (sort === 'asc') {
tradesDataSorted.reverse();
}
let matches: TradesData[] = [];
for (const trade of tradesDataSorted) {
if (trade_id_from === trade.offerId) {
trade_id_from_ts = trade.tradeDate;
}
if (trade_id_to === trade.offerId) {
trade_id_to_ts = trade.tradeDate;
}
if (trade_id_to && trade_id_to_ts === null) {
continue;
}
if (trade_id_from && trade_id_from_ts != null && trade_id_from_ts !== trade.tradeDate) {
continue;
}
if (market && market !== trade._market) {
continue;
}
if (timestampFromMilli && timestampFromMilli > trade.tradeDate) {
continue;
}
if (timestampToMilli && timestampToMilli < trade.tradeDate) {
continue;
}
if (direction && direction !== trade.direction.toLowerCase()) {
continue;
}
// Filter out bogus trades with BTC/BTC or XXX/XXX market.
// See github issue: https://github.com/bitsquare/bitsquare/issues/883
const currencyPairs = trade.currencyPair.split('/');
if (currencyPairs[0] === currencyPairs[1]) {
continue;
}
const currencyLeft = allCurrencies[currencyPairs[0]];
const currencyRight = allCurrencies[currencyPairs[1]];
if (!currencyLeft || !currencyRight) {
continue;
}
const tradePrice = trade.primaryMarketTradePrice * Math.pow(10, 8 - currencyRight.precision);
const tradeAmount = trade.primaryMarketTradeAmount * Math.pow(10, 8 - currencyLeft.precision);
const tradeVolume = trade.primaryMarketTradeVolume * Math.pow(10, 8 - currencyRight.precision);
if (integerAmounts) {
trade._tradePrice = tradePrice;
trade._tradeAmount = tradeAmount;
trade._tradeVolume = tradeVolume;
trade._offerAmount = trade.offerAmount;
} else {
trade._tradePriceStr = this.intToBtc(tradePrice);
trade._tradeAmountStr = this.intToBtc(tradeAmount);
trade._tradeVolumeStr = this.intToBtc(tradeVolume);
trade._offerAmountStr = this.intToBtc(trade.offerAmount);
}
matches.push(trade);
if (matches.length >= limit) {
break;
}
}
if ((trade_id_from && !trade_id_from_ts) || (trade_id_to && !trade_id_to_ts)) {
matches = [];
}
return matches;
}
private intervalStart(ts: number, interval: string): number {
switch (interval) {
case 'minute':
return (ts - (ts % 60));
case '10_minute':
return (ts - (ts % 600));
case 'half_hour':
return (ts - (ts % 1800));
case 'hour':
return (ts - (ts % 3600));
case 'half_day':
return (ts - (ts % (3600 * 12)));
case 'day':
return strtotime('midnight today', ts);
case 'week':
return strtotime('midnight sunday last week', ts);
case 'month':
return strtotime('midnight first day of this month', ts);
case 'year':
return strtotime('midnight first day of january', ts);
default:
throw new Error('Unsupported interval: ' + interval);
}
}
private offerDataToOffer(offer: OffersData, market: string): Offer {
const currencyPairs = market.split('_');
const currencyRight = this.allCurrenciesIndexed[currencyPairs[1].toUpperCase()];
const currencyLeft = this.allCurrenciesIndexed[currencyPairs[0].toUpperCase()];
const price = offer['primaryMarketPrice'] * Math.pow( 10, 8 - currencyRight['precision']);
const amount = offer['primaryMarketAmount'] * Math.pow( 10, 8 - currencyLeft['precision']);
const volume = offer['primaryMarketVolume'] * Math.pow( 10, 8 - currencyRight['precision']);
return {
offer_id: offer.id,
offer_date: offer.date,
direction: offer.primaryMarketDirection,
min_amount: this.intToBtc(offer.minAmount),
amount: this.intToBtc(amount),
price: this.intToBtc(price),
volume: this.intToBtc(volume),
payment_method: offer.paymentMethod,
offer_fee_txid: null,
};
}
private intToBtc(val: number): string {
return (val / 100000000).toFixed(8);
}
}
export default new BisqMarketsApi();

View File

@@ -1,137 +0,0 @@
import config from '../../config';
import * as fs from 'fs';
import { OffersData as OffersData, TradesData, Currency } from './interfaces';
import bisqMarket from './markets-api';
import logger from '../../logger';
class Bisq {
private static FOLDER_WATCH_CHANGE_DETECTION_DEBOUNCE = 4000;
private static MARKET_JSON_PATH = config.BISQ.DATA_PATH;
private static MARKET_JSON_FILE_PATHS = {
activeCryptoCurrency: '/active_crypto_currency_list.json',
activeFiatCurrency: '/active_fiat_currency_list.json',
cryptoCurrency: '/crypto_currency_list.json',
fiatCurrency: '/fiat_currency_list.json',
offers: '/offers_statistics.json',
trades: '/trade_statistics.json',
};
private cryptoCurrencyLastMtime = new Date('2016-01-01');
private fiatCurrencyLastMtime = new Date('2016-01-01');
private offersLastMtime = new Date('2016-01-01');
private tradesLastMtime = new Date('2016-01-01');
private subdirectoryWatcher: fs.FSWatcher | undefined;
constructor() {}
startBisqService(): void {
try {
this.checkForBisqDataFolder();
} catch (e) {
logger.info('Retrying to start bisq service (markets) in 3 minutes');
setTimeout(this.startBisqService.bind(this), 180000);
return;
}
this.loadBisqDumpFile();
this.startBisqDirectoryWatcher();
}
private checkForBisqDataFolder() {
if (!fs.existsSync(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency)) {
logger.err(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency + ` doesn't exist. Make sure Bisq is running and the config is correct before starting the server.`);
throw new Error(`Cannot load BISQ ${Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency} file`);
}
}
private startBisqDirectoryWatcher() {
if (this.subdirectoryWatcher) {
this.subdirectoryWatcher.close();
}
if (!fs.existsSync(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency)) {
logger.warn(Bisq.MARKET_JSON_PATH + Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency + ` doesn't exist. Trying to restart sub directory watcher again in 3 minutes.`);
setTimeout(() => this.startBisqDirectoryWatcher(), 180000);
return;
}
let fsWait: NodeJS.Timeout | null = null;
this.subdirectoryWatcher = fs.watch(Bisq.MARKET_JSON_PATH, () => {
if (fsWait) {
clearTimeout(fsWait);
}
fsWait = setTimeout(() => {
logger.debug(`Change detected in the Bisq market data folder.`);
this.loadBisqDumpFile();
}, Bisq.FOLDER_WATCH_CHANGE_DETECTION_DEBOUNCE);
});
}
private async loadBisqDumpFile(): Promise<void> {
const start = new Date().getTime();
try {
let marketsDataUpdated = false;
const cryptoMtime = this.getFileMtime(Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency);
const fiatMtime = this.getFileMtime(Bisq.MARKET_JSON_FILE_PATHS.fiatCurrency);
if (cryptoMtime > this.cryptoCurrencyLastMtime || fiatMtime > this.fiatCurrencyLastMtime) {
const cryptoCurrencyData = await this.loadData<Currency[]>(Bisq.MARKET_JSON_FILE_PATHS.cryptoCurrency);
const fiatCurrencyData = await this.loadData<Currency[]>(Bisq.MARKET_JSON_FILE_PATHS.fiatCurrency);
const activeCryptoCurrencyData = await this.loadData<Currency[]>(Bisq.MARKET_JSON_FILE_PATHS.activeCryptoCurrency);
const activeFiatCurrencyData = await this.loadData<Currency[]>(Bisq.MARKET_JSON_FILE_PATHS.activeFiatCurrency);
logger.debug('Updating Bisq Market Currency Data');
bisqMarket.setCurrencyData(cryptoCurrencyData, fiatCurrencyData, activeCryptoCurrencyData, activeFiatCurrencyData);
if (cryptoMtime > this.cryptoCurrencyLastMtime) {
this.cryptoCurrencyLastMtime = cryptoMtime;
}
if (fiatMtime > this.fiatCurrencyLastMtime) {
this.fiatCurrencyLastMtime = fiatMtime;
}
marketsDataUpdated = true;
}
const offersMtime = this.getFileMtime(Bisq.MARKET_JSON_FILE_PATHS.offers);
if (offersMtime > this.offersLastMtime) {
const offersData = await this.loadData<OffersData[]>(Bisq.MARKET_JSON_FILE_PATHS.offers);
logger.debug('Updating Bisq Market Offers Data');
bisqMarket.setOffersData(offersData);
this.offersLastMtime = offersMtime;
marketsDataUpdated = true;
}
const tradesMtime = this.getFileMtime(Bisq.MARKET_JSON_FILE_PATHS.trades);
if (tradesMtime > this.tradesLastMtime) {
const tradesData = await this.loadData<TradesData[]>(Bisq.MARKET_JSON_FILE_PATHS.trades);
logger.debug('Updating Bisq Market Trades Data');
bisqMarket.setTradesData(tradesData);
this.tradesLastMtime = tradesMtime;
marketsDataUpdated = true;
}
if (marketsDataUpdated) {
bisqMarket.updateCache();
const time = new Date().getTime() - start;
logger.debug('Bisq market data updated in ' + time + ' ms');
}
} catch (e) {
logger.err('loadBisqMarketDataDumpFile() error.' + (e instanceof Error ? e.message : e));
}
}
private getFileMtime(path: string): Date {
const stats = fs.statSync(Bisq.MARKET_JSON_PATH + path);
return stats.mtime;
}
private loadData<T>(path: string): Promise<T> {
return new Promise((resolve, reject) => {
fs.readFile(Bisq.MARKET_JSON_PATH + path, 'utf8', (err, data) => {
if (err) {
reject(err);
}
try {
const parsedData = JSON.parse(data);
resolve(parsedData);
} catch (e) {
reject('JSON parse error (' + path + ')');
}
});
});
}
}
export default new Bisq();

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,19 @@
import { IEsploraApi } from './esplora-api.interface';
import { IMempoolInfo, ITransaction, IBlock } from '../../interfaces';
export interface AbstractBitcoinApi {
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
$getTransactionHex(txId: string): Promise<string>;
$getBlockHeightTip(): Promise<number>;
$getBlockHashTip(): Promise<string>;
$getTxIdsForBlock(hash: string): Promise<string[]>;
$getBlockHash(height: number): Promise<string>;
$getBlockHeader(hash: string): Promise<string>;
$getBlock(hash: string): Promise<IEsploraApi.Block>;
$getRawBlock(hash: string): Promise<Buffer>;
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];
$sendRawTransaction(rawTransaction: string): Promise<string>;
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
$getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
}
export interface BitcoinRpcCredentials {
host: string;
port: number;
user: string;
pass: string;
timeout: number;
getMempoolInfo(): Promise<IMempoolInfo>;
getRawMempool(): Promise<ITransaction['txid'][]>;
getRawTransaction(txId: string): Promise<ITransaction>;
getBlockCount(): Promise<number>;
getBlockAndTransactions(hash: string): Promise<IBlock>;
getBlockHash(height: number): Promise<string>;
getBlock(hash: string): Promise<IBlock>;
getBlockTransactions(hash: string): Promise<IBlock>;
getBlockTransactionsFromIndex(hash: string, index: number): Promise<IBlock>;
getBlocks(): Promise<string>;
getBlocksFromHeight(height: number): Promise<string>;
getAddress(address: string): Promise<IBlock>;
getAddressTransactions(address: string): Promise<IBlock>;
getAddressTransactionsFromLastSeenTxid(address: string, lastSeenTxid: string): Promise<IBlock>;
}

View File

@@ -1,20 +1,16 @@
import config from '../../config';
const config = require('../../../mempool-config.json');
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import EsploraApi from './esplora-api';
import BitcoinApi from './bitcoin-api';
import ElectrumApi from './electrum-api';
import bitcoinClient from './bitcoin-client';
import BitcoindApi from './bitcoind-api';
import ElectrsApi from './electrs-api';
function bitcoinApiFactory(): AbstractBitcoinApi {
switch (config.MEMPOOL.BACKEND) {
case 'esplora':
return new EsploraApi();
case 'electrum':
return new ElectrumApi(bitcoinClient);
case 'none':
function factory(): AbstractBitcoinApi {
switch (config.BACKEND_API) {
case 'electrs':
return new ElectrsApi();
case 'bitcoind':
default:
return new BitcoinApi(bitcoinClient);
return new BitcoindApi();
}
}
export default bitcoinApiFactory();
export default factory();

View File

@@ -1,175 +0,0 @@
export namespace IBitcoinApi {
export interface MempoolInfo {
loaded: boolean; // (boolean) True if the mempool is fully loaded
size: number; // (numeric) Current tx count
bytes: number; // (numeric) Sum of all virtual transaction sizes as defined in BIP 141.
usage: number; // (numeric) Total memory usage for the mempool
total_fee: number; // (numeric) Total fees of transactions in the mempool
maxmempool: number; // (numeric) Maximum memory usage for the mempool
mempoolminfee: number; // (numeric) Minimum fee rate in BTC/kB for tx to be accepted.
minrelaytxfee: number; // (numeric) Current minimum relay fee for transactions
}
export interface RawMempool { [txId: string]: MempoolEntry; }
export interface MempoolEntry {
vsize: number; // (numeric) virtual transaction size as defined in BIP 141.
weight: number; // (numeric) transaction weight as defined in BIP 141.
time: number; // (numeric) local time transaction entered pool in seconds since 1 Jan 1970 GMT
height: number; // (numeric) block height when transaction entered pool
descendantcount: number; // (numeric) number of in-mempool descendant transactions (including this one)
descendantsize: number; // (numeric) virtual transaction size of in-mempool descendants (including this one)
ancestorcount: number; // (numeric) number of in-mempool ancestor transactions (including this one)
ancestorsize: number; // (numeric) virtual transaction size of in-mempool ancestors (including this one)
wtxid: string; // (string) hash of serialized transactionumber; including witness data
fees: {
base: number; // (numeric) transaction fee in BTC
modified: number; // (numeric) transaction fee with fee deltas used for mining priority in BTC
ancestor: number; // (numeric) modified fees (see above) of in-mempool ancestors (including this one) in BTC
descendant: number; // (numeric) modified fees (see above) of in-mempool descendants (including this one) in BTC
};
depends: string[]; // (string) parent transaction id
spentby: string[]; // (array) unconfirmed transactions spending outputs from this transaction
'bip125-replaceable': boolean; // (boolean) Whether this transaction could be replaced due to BIP125 (replace-by-fee)
}
export interface Block {
hash: string; // (string) the block hash (same as provided)
confirmations: number; // (numeric) The number of confirmations, or -1 if the block is not on the main chain
size: number; // (numeric) The block size
strippedsize: number; // (numeric) The block size excluding witness data
weight: number; // (numeric) The block weight as defined in BIP 141
height: number; // (numeric) The block height or index
version: number; // (numeric) The block version
versionHex: string; // (string) The block version formatted in hexadecimal
merkleroot: string; // (string) The merkle root
tx: Transaction[];
time: number; // (numeric) The block time expressed in UNIX epoch time
mediantime: number; // (numeric) The median block time expressed in UNIX epoch time
nonce: number; // (numeric) The nonce
bits: string; // (string) The bits
difficulty: number; // (numeric) The difficulty
chainwork: string; // (string) Expected number of hashes required to produce the chain up to this block (in hex)
nTx: number; // (numeric) The number of transactions in the block
previousblockhash: string; // (string) The hash of the previous block
nextblockhash: string; // (string) The hash of the next block
}
export interface Transaction {
in_active_chain: boolean; // (boolean) Whether specified block is in the active chain or not
hex: string; // (string) The serialized, hex-encoded data for 'txid'
txid: string; // (string) The transaction id (same as provided)
hash: string; // (string) The transaction hash (differs from txid for witness transactions)
size: number; // (numeric) The serialized transaction size
vsize: number; // (numeric) The virtual transaction size (differs from size for witness transactions)
weight: number; // (numeric) The transaction's weight (between vsize*4-3 and vsize*4)
version: number; // (numeric) The version
locktime: number; // (numeric) The lock time
vin: Vin[];
vout: Vout[];
blockhash: string; // (string) the block hash
confirmations: number; // (numeric) The confirmations
blocktime: number; // (numeric) The block time expressed in UNIX epoch time
time: number; // (numeric) Same as blocktime
}
export interface VerboseBlock extends Block {
tx: VerboseTransaction[]; // The transactions in the format of the getrawtransaction RPC. Different from verbosity = 1 "tx" result
}
export interface VerboseTransaction extends Transaction {
fee?: number; // (numeric) The transaction fee in BTC, omitted if block undo data is not available
}
export interface Vin {
txid?: string; // (string) The transaction id
vout?: number; // (string)
scriptSig?: { // (json object) The script
asm: string; // (string) asm
hex: string; // (string) hex
};
sequence: number; // (numeric) The script sequence number
txinwitness?: string[]; // (string) hex-encoded witness data
coinbase?: string;
is_pegin?: boolean; // (boolean) Elements peg-in
}
export interface Vout {
value: number; // (numeric) The value in BTC
n: number; // (numeric) index
asset?: string; // (string) Elements asset id
scriptPubKey: { // (json object)
asm: string; // (string) the asm
hex: string; // (string) the hex
reqSigs?: number; // (numeric) The required sigs
type: string; // (string) The type, eg 'pubkeyhash'
address?: string; // (string) bitcoin address
addresses?: string[]; // (string) bitcoin addresses
pegout_chain?: string; // (string) Elements peg-out chain
pegout_addresses?: string[]; // (string) Elements peg-out addresses
};
}
export interface AddressInformation {
isvalid: boolean; // (boolean) If the address is valid or not. If not, this is the only property returned.
isvalid_parent?: boolean; // (boolean) Elements only
address: string; // (string) The bitcoin address validated
scriptPubKey: string; // (string) The hex-encoded scriptPubKey generated by the address
isscript: boolean; // (boolean) If the key is a script
iswitness: boolean; // (boolean) If the address is a witness
witness_version?: number; // (numeric, optional) The version number of the witness program
witness_program: string; // (string, optional) The hex value of the witness program
confidential_key?: string; // (string) Elements only
unconfidential?: string; // (string) Elements only
}
export interface ChainTips {
height: number; // (numeric) height of the chain tip
hash: string; // (string) block hash of the tip
branchlen: number; // (numeric) zero for main chain, otherwise length of branch connecting the tip to the main chain
status: 'invalid' | 'headers-only' | 'valid-headers' | 'valid-fork' | 'active';
}
export interface BlockchainInfo {
chain: number; // (string) current network name as defined in BIP70 (main, test, regtest)
blocks: number; // (numeric) the current number of blocks processed in the server
headers: number; // (numeric) the current number of headers we have validated
bestblockhash: string, // (string) the hash of the currently best block
difficulty: number; // (numeric) the current difficulty
mediantime: number; // (numeric) median time for the current best block
verificationprogress: number; // (numeric) estimate of verification progress [0..1]
initialblockdownload: boolean; // (bool) (debug information) estimate of whether this node is in Initial Block Download mode.
chainwork: string // (string) total amount of work in active chain, in hexadecimal
size_on_disk: number; // (numeric) the estimated size of the block and undo files on disk
pruned: number; // (boolean) if the blocks are subject to pruning
pruneheight: number; // (numeric) lowest-height complete block stored (only present if pruning is enabled)
automatic_pruning: number; // (boolean) whether automatic pruning is enabled (only present if pruning is enabled)
prune_target_size: number; // (numeric) the target size used by pruning (only present if automatic pruning is enabled)
softforks: SoftFork[]; // (array) status of softforks in progress
bip9_softforks: { [name: string]: Bip9SoftForks[] } // (object) status of BIP9 softforks in progress
warnings: string; // (string) any network and blockchain warnings.
}
interface SoftFork {
id: string; // (string) name of softfork
version: number; // (numeric) block version
reject: { // (object) progress toward rejecting pre-softfork blocks
status: boolean; // (boolean) true if threshold reached
},
}
interface Bip9SoftForks {
status: number; // (string) one of defined, started, locked_in, active, failed
bit: number; // (numeric) the bit (0-28) in the block version field used to signal this softfork (only for started status)
startTime: number; // (numeric) the minimum median time past of a block at which the bit gains its meaning
timeout: number; // (numeric) the median time past of a block at which the deployment is considered failed if not yet locked in
since: number; // (numeric) height of the first block to which the status applies
statistics: { // (object) numeric statistics about BIP9 signalling for a softfork (only for started status)
period: number; // (numeric) the length in blocks of the BIP9 signalling period
threshold: number; // (numeric) the number of blocks with the version bit set required to activate the feature
elapsed: number; // (numeric) the number of blocks elapsed since the beginning of the current period
count: number; // (numeric) the number of blocks with the version bit set in the current period
possible: boolean; // (boolean) returns false if there are not enough blocks left in this period to pass activation threshold
}
}
}

View File

@@ -1,425 +0,0 @@
import * as bitcoinjs from 'bitcoinjs-lib';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IBitcoinApi } from './bitcoin-api.interface';
import { IEsploraApi } from './esplora-api.interface';
import blocks from '../blocks';
import mempool from '../mempool';
import { TransactionExtended } from '../../mempool.interfaces';
class BitcoinApi implements AbstractBitcoinApi {
private rawMempoolCache: IBitcoinApi.RawMempool | null = null;
protected bitcoindClient: any;
constructor(bitcoinClient: any) {
this.bitcoindClient = bitcoinClient;
}
static convertBlock(block: IBitcoinApi.Block): IEsploraApi.Block {
return {
id: block.hash,
height: block.height,
version: block.version,
timestamp: block.time,
bits: parseInt(block.bits, 16),
nonce: block.nonce,
difficulty: block.difficulty,
merkle_root: block.merkleroot,
tx_count: block.nTx,
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
};
}
$getRawTransaction(txId: string, skipConversion = false, addPrevout = false, lazyPrevouts = false): Promise<IEsploraApi.Transaction> {
// If the transaction is in the mempool we already converted and fetched the fee. Only prevouts are missing
const txInMempool = mempool.getMempool()[txId];
if (txInMempool && addPrevout) {
return this.$addPrevouts(txInMempool);
}
return this.bitcoindClient.getRawTransaction(txId, true)
.then((transaction: IBitcoinApi.Transaction) => {
if (skipConversion) {
transaction.vout.forEach((vout) => {
vout.value = Math.round(vout.value * 100000000);
});
return transaction;
}
return this.$convertTransaction(transaction, addPrevout, lazyPrevouts);
})
.catch((e: Error) => {
if (e.message.startsWith('The genesis block coinbase')) {
return this.$returnCoinbaseTransaction();
}
throw e;
});
}
$getTransactionHex(txId: string): Promise<string> {
return this.$getRawTransaction(txId, true)
.then((tx) => tx.hex || '');
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.height;
});
}
$getBlockHashTip(): Promise<string> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.hash;
});
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return this.bitcoindClient.getBlock(hash, 1)
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
}
$getRawBlock(hash: string): Promise<Buffer> {
return this.bitcoindClient.getBlock(hash, 0)
.then((raw: string) => Buffer.from(raw, "hex"));
}
$getBlockHash(height: number): Promise<string> {
return this.bitcoindClient.getBlockHash(height);
}
$getBlockHeader(hash: string): Promise<string> {
return this.bitcoindClient.getBlockHeader(hash, false);
}
async $getBlock(hash: string): Promise<IEsploraApi.Block> {
const foundBlock = blocks.getBlocks().find((block) => block.id === hash);
if (foundBlock) {
return foundBlock;
}
return this.bitcoindClient.getBlock(hash)
.then((block: IBitcoinApi.Block) => BitcoinApi.convertBlock(block));
}
$getAddress(address: string): Promise<IEsploraApi.Address> {
throw new Error('Method getAddress not supported by the Bitcoin RPC API.');
}
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getAddressTransactions not supported by the Bitcoin RPC API.');
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.bitcoindClient.getRawMemPool();
}
$getAddressPrefix(prefix: string): string[] {
const found: { [address: string]: string } = {};
const mp = mempool.getMempool();
for (const tx in mp) {
for (const vout of mp[tx].vout) {
if (vout.scriptpubkey_address.indexOf(prefix) === 0) {
found[vout.scriptpubkey_address] = '';
if (Object.keys(found).length >= 10) {
return Object.keys(found);
}
}
}
}
return Object.keys(found);
}
$sendRawTransaction(rawTransaction: string): Promise<string> {
return this.bitcoindClient.sendRawTransaction(rawTransaction);
}
async $getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
const txOut = await this.bitcoindClient.getTxOut(txId, vout, false);
return {
spent: txOut === null,
status: {
confirmed: true,
}
};
}
async $getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
const outSpends: IEsploraApi.Outspend[] = [];
const tx = await this.$getRawTransaction(txId, true, false);
for (let i = 0; i < tx.vout.length; i++) {
if (tx.status && tx.status.block_height === 0) {
outSpends.push({
spent: false
});
} else {
const txOut = await this.bitcoindClient.getTxOut(txId, i);
outSpends.push({
spent: txOut === null,
});
}
}
return outSpends;
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
const outspends: IEsploraApi.Outspend[][] = [];
for (const tx of txId) {
const outspend = await this.$getOutspends(tx);
outspends.push(outspend);
}
return outspends;
}
$getEstimatedHashrate(blockHeight: number): Promise<number> {
// 120 is the default block span in Core
return this.bitcoindClient.getNetworkHashPs(120, blockHeight);
}
protected async $convertTransaction(transaction: IBitcoinApi.Transaction, addPrevout: boolean, lazyPrevouts = false): Promise<IEsploraApi.Transaction> {
let esploraTransaction: IEsploraApi.Transaction = {
txid: transaction.txid,
version: transaction.version,
locktime: transaction.locktime,
size: transaction.size,
weight: transaction.weight,
fee: 0,
vin: [],
vout: [],
status: { confirmed: false },
};
esploraTransaction.vout = transaction.vout.map((vout) => {
return {
value: Math.round(vout.value * 100000000),
scriptpubkey: vout.scriptPubKey.hex,
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
scriptpubkey_type: this.translateScriptPubKeyType(vout.scriptPubKey.type),
};
});
esploraTransaction.vin = transaction.vin.map((vin) => {
return {
is_coinbase: !!vin.coinbase,
prevout: null,
scriptsig: vin.scriptSig && vin.scriptSig.hex || vin.coinbase || '',
scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.hex) || '',
sequence: vin.sequence,
txid: vin.txid || '',
vout: vin.vout || 0,
witness: vin.txinwitness || [],
inner_redeemscript_asm: '',
inner_witnessscript_asm: '',
};
});
if (transaction.confirmations) {
esploraTransaction.status = {
confirmed: true,
block_height: blocks.getCurrentBlockHeight() - transaction.confirmations + 1,
block_hash: transaction.blockhash,
block_time: transaction.blocktime,
};
}
if (addPrevout) {
esploraTransaction = await this.$calculateFeeFromInputs(esploraTransaction, false, lazyPrevouts);
} else if (!transaction.confirmations) {
esploraTransaction = await this.$appendMempoolFeeData(esploraTransaction);
}
return esploraTransaction;
}
private translateScriptPubKeyType(outputType: string): string {
const map = {
'pubkey': 'p2pk',
'pubkeyhash': 'p2pkh',
'scripthash': 'p2sh',
'witness_v0_keyhash': 'v0_p2wpkh',
'witness_v0_scripthash': 'v0_p2wsh',
'witness_v1_taproot': 'v1_p2tr',
'nonstandard': 'nonstandard',
'multisig': 'multisig',
'nulldata': 'op_return'
};
if (map[outputType]) {
return map[outputType];
} else {
return 'unknown';
}
}
private async $appendMempoolFeeData(transaction: IEsploraApi.Transaction): Promise<IEsploraApi.Transaction> {
if (transaction.fee) {
return transaction;
}
let mempoolEntry: IBitcoinApi.MempoolEntry;
if (!mempool.isInSync() && !this.rawMempoolCache) {
this.rawMempoolCache = await this.$getRawMempoolVerbose();
}
if (this.rawMempoolCache && this.rawMempoolCache[transaction.txid]) {
mempoolEntry = this.rawMempoolCache[transaction.txid];
} else {
mempoolEntry = await this.$getMempoolEntry(transaction.txid);
}
transaction.fee = Math.round(mempoolEntry.fees.base * 100000000);
return transaction;
}
protected async $addPrevouts(transaction: TransactionExtended): Promise<TransactionExtended> {
for (const vin of transaction.vin) {
if (vin.prevout) {
continue;
}
const innerTx = await this.$getRawTransaction(vin.txid, false, false);
vin.prevout = innerTx.vout[vin.vout];
this.addInnerScriptsToVin(vin);
}
return transaction;
}
protected $returnCoinbaseTransaction(): Promise<IEsploraApi.Transaction> {
return this.bitcoindClient.getBlockHash(0).then((hash: string) =>
this.bitcoindClient.getBlock(hash, 2)
.then((block: IBitcoinApi.Block) => {
return this.$convertTransaction(Object.assign(block.tx[0], {
confirmations: blocks.getCurrentBlockHeight() + 1,
blocktime: block.time }), false);
})
);
}
private $getMempoolEntry(txid: string): Promise<IBitcoinApi.MempoolEntry> {
return this.bitcoindClient.getMempoolEntry(txid);
}
private $getRawMempoolVerbose(): Promise<IBitcoinApi.RawMempool> {
return this.bitcoindClient.getRawMemPool(true);
}
private async $calculateFeeFromInputs(transaction: IEsploraApi.Transaction, addPrevout: boolean, lazyPrevouts: boolean): Promise<IEsploraApi.Transaction> {
if (transaction.vin[0].is_coinbase) {
transaction.fee = 0;
return transaction;
}
let totalIn = 0;
for (let i = 0; i < transaction.vin.length; i++) {
if (lazyPrevouts && i > 12) {
transaction.vin[i].lazy = true;
continue;
}
const innerTx = await this.$getRawTransaction(transaction.vin[i].txid, false, false);
transaction.vin[i].prevout = innerTx.vout[transaction.vin[i].vout];
this.addInnerScriptsToVin(transaction.vin[i]);
totalIn += innerTx.vout[transaction.vin[i].vout].value;
}
if (lazyPrevouts && transaction.vin.length > 12) {
transaction.fee = -1;
} else {
const totalOut = transaction.vout.reduce((p, output) => p + output.value, 0);
transaction.fee = parseFloat((totalIn - totalOut).toFixed(8));
}
return transaction;
}
private convertScriptSigAsm(hex: string): string {
const buf = Buffer.from(hex, 'hex');
const b: string[] = [];
let i = 0;
while (i < buf.length) {
const op = buf[i];
if (op >= 0x01 && op <= 0x4e) {
i++;
let push: number;
if (op === 0x4c) {
push = buf.readUInt8(i);
b.push('OP_PUSHDATA1');
i += 1;
} else if (op === 0x4d) {
push = buf.readUInt16LE(i);
b.push('OP_PUSHDATA2');
i += 2;
} else if (op === 0x4e) {
push = buf.readUInt32LE(i);
b.push('OP_PUSHDATA4');
i += 4;
} else {
push = op;
b.push('OP_PUSHBYTES_' + push);
}
const data = buf.slice(i, i + push);
if (data.length !== push) {
break;
}
b.push(data.toString('hex'));
i += data.length;
} else {
if (op === 0x00) {
b.push('OP_0');
} else if (op === 0x4f) {
b.push('OP_PUSHNUM_NEG1');
} else if (op === 0xb1) {
b.push('OP_CLTV');
} else if (op === 0xb2) {
b.push('OP_CSV');
} else if (op === 0xba) {
b.push('OP_CHECKSIGADD');
} else {
const opcode = bitcoinjs.script.toASM([ op ]);
if (opcode && op < 0xfd) {
if (/^OP_(\d+)$/.test(opcode)) {
b.push(opcode.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1'));
} else {
b.push(opcode);
}
} else {
b.push('OP_RETURN_' + op);
}
}
i += 1;
}
}
return b.join(' ');
}
private addInnerScriptsToVin(vin: IEsploraApi.Vin): void {
if (!vin.prevout) {
return;
}
if (vin.prevout.scriptpubkey_type === 'p2sh') {
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
vin.inner_redeemscript_asm = this.convertScriptSigAsm(redeemScript);
if (vin.witness && vin.witness.length > 2) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
const witnessScript = vin.witness[vin.witness.length - 2];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
}
export default BitcoinApi;

View File

@@ -1,13 +0,0 @@
import config from '../../config';
const bitcoin = require('../../rpc-api/index');
import { BitcoinRpcCredentials } from './bitcoin-api-abstract-factory';
const nodeRpcCredentials: BitcoinRpcCredentials = {
host: config.CORE_RPC.HOST,
port: config.CORE_RPC.PORT,
user: config.CORE_RPC.USERNAME,
pass: config.CORE_RPC.PASSWORD,
timeout: 60000,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@@ -1,13 +0,0 @@
import config from '../../config';
const bitcoin = require('../../rpc-api/index');
import { BitcoinRpcCredentials } from './bitcoin-api-abstract-factory';
const nodeRpcCredentials: BitcoinRpcCredentials = {
host: config.SECOND_CORE_RPC.HOST,
port: config.SECOND_CORE_RPC.PORT,
user: config.SECOND_CORE_RPC.USERNAME,
pass: config.SECOND_CORE_RPC.PASSWORD,
timeout: 60000,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@@ -1,648 +0,0 @@
import { Application, Request, Response } from 'express';
import axios from 'axios';
import * as bitcoinjs from 'bitcoinjs-lib';
import config from '../../config';
import websocketHandler from '../websocket-handler';
import mempool from '../mempool';
import feeApi from '../fee-api';
import mempoolBlocks from '../mempool-blocks';
import bitcoinApi from './bitcoin-api-factory';
import { Common } from '../common';
import backendInfo from '../backend-info';
import transactionUtils from '../transaction-utils';
import { IEsploraApi } from './esplora-api.interface';
import loadingIndicators from '../loading-indicators';
import { TransactionExtended } from '../../mempool.interfaces';
import logger from '../../logger';
import blocks from '../blocks';
import bitcoinClient from './bitcoin-client';
import difficultyAdjustment from '../difficulty-adjustment';
import transactionRepository from '../../repositories/TransactionRepository';
class BitcoinRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
responseType: 'stream', timeout: 10000
});
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
})
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
;
if (config.MEMPOOL.BACKEND !== 'esplora') {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', this.getMempool)
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', this.getMempoolTxIds)
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', this.getRecentMempoolTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', this.getTransaction)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', this.$postTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', this.getRawTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', this.getBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', this.getBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
;
}
}
private getInitData(req: Request, res: Response) {
try {
const result = websocketHandler.getInitData();
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getRecommendedFees(req: Request, res: Response) {
if (!mempool.isInSync()) {
res.statusCode = 503;
res.send('Service Unavailable');
return;
}
const result = feeApi.getRecommendedFee();
res.json(result);
}
private getMempoolBlocks(req: Request, res: Response) {
try {
const result = mempoolBlocks.getMempoolBlocks();
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getTransactionTimes(req: Request, res: Response) {
if (!Array.isArray(req.query.txId)) {
res.status(500).send('Not an array');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
const times = mempool.getFirstSeenForTransactions(txIds);
res.json(times);
}
private async $getBatchedOutspends(req: Request, res: Response) {
if (!Array.isArray(req.query.txId)) {
res.status(500).send('Not an array');
return;
}
if (req.query.txId.length > 50) {
res.status(400).send('Too many txids requested');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
try {
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txIds);
res.json(batchedOutspends);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getCpfpInfo(req: Request, res: Response) {
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
res.status(501).send(`Invalid transaction ID.`);
return;
}
const tx = mempool.getMempool()[req.params.txId];
if (tx) {
if (tx?.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
return;
} else {
const cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
if (cpfpInfo) {
res.json(cpfpInfo);
return;
}
}
res.status(404).send(`Transaction has no CPFP info available.`);
}
private getBackendInfo(req: Request, res: Response) {
res.json(backendInfo.getBackendInfo());
}
private async getTransaction(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
res.json(transaction);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
private async getRawTransaction(req: Request, res: Response) {
try {
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(req.params.txId, true);
res.setHeader('content-type', 'text/plain');
res.send(transaction.hex);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
/**
* Takes the PSBT as text/plain body, parses it, and adds the full
* parent transaction to each input that doesn't already have it.
* This is used for BTCPayServer / Trezor users which need access to
* the full parent transaction even with segwit inputs.
* It will respond with a text/plain PSBT in the same format (hex|base64).
*/
private async postPsbtCompletion(req: Request, res: Response): Promise<void> {
res.setHeader('content-type', 'text/plain');
const notFoundError = `Couldn't get transaction hex for parent of input`;
try {
let psbt: bitcoinjs.Psbt;
let format: 'hex' | 'base64';
let isModified = false;
try {
psbt = bitcoinjs.Psbt.fromBase64(req.body);
format = 'base64';
} catch (e1) {
try {
psbt = bitcoinjs.Psbt.fromHex(req.body);
format = 'hex';
} catch (e2) {
throw new Error(`Unable to parse PSBT`);
}
}
for (const [index, input] of psbt.data.inputs.entries()) {
if (!input.nonWitnessUtxo) {
// Buffer.from ensures it won't be modified in place by reverse()
const txid = Buffer.from(psbt.txInputs[index].hash)
.reverse()
.toString('hex');
let transactionHex: string;
// If missing transaction, return 404 status error
try {
transactionHex = await bitcoinApi.$getTransactionHex(txid);
if (!transactionHex) {
throw new Error('');
}
} catch (err) {
throw new Error(`${notFoundError} #${index} @ ${txid}`);
}
psbt.updateInput(index, {
nonWitnessUtxo: Buffer.from(transactionHex, 'hex'),
});
if (!isModified) {
isModified = true;
}
}
}
if (isModified) {
res.send(format === 'hex' ? psbt.toHex() : psbt.toBase64());
} else {
// Not modified
// 422 Unprocessable Entity
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
res.status(422).send(`Psbt had no missing nonWitnessUtxos.`);
}
} catch (e: any) {
if (e instanceof Error && new RegExp(notFoundError).test(e.message)) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async getTransactionStatus(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
res.json(transaction.status);
} catch (e) {
let statusCode = 500;
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
statusCode = 404;
}
res.status(statusCode).send(e instanceof Error ? e.message : e);
}
}
private async getStrippedBlockTransactions(req: Request, res: Response) {
try {
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlock(req: Request, res: Response) {
try {
const block = await blocks.$getBlock(req.params.hash);
const blockAge = new Date().getTime() / 1000 - block.timestamp;
const day = 24 * 3600;
let cacheDuration;
if (blockAge > 365 * day) {
cacheDuration = 30 * day;
} else if (blockAge > 30 * day) {
cacheDuration = 10 * day;
} else {
cacheDuration = 600
}
res.setHeader('Expires', new Date(Date.now() + 1000 * cacheDuration).toUTCString());
res.json(block);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockHeader(req: Request, res: Response) {
try {
const blockHeader = await bitcoinApi.$getBlockHeader(req.params.hash);
res.setHeader('content-type', 'text/plain');
res.send(blockHeader);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockAuditSummary(req: Request, res: Response) {
try {
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlocks(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocks(height, 15));
} else { // Liquid, Bisq
return await this.getLegacyBlocks(req, res);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getLegacyBlocks(req: Request, res: Response) {
try {
const returnBlocks: IEsploraApi.Block[] = [];
const fromHeight = parseInt(req.params.height, 10) || blocks.getCurrentBlockHeight();
// Check if block height exist in local cache to skip the hash lookup
const blockByHeight = blocks.getBlocks().find((b) => b.height === fromHeight);
let startFromHash: string | null = null;
if (blockByHeight) {
startFromHash = blockByHeight.id;
} else {
startFromHash = await bitcoinApi.$getBlockHash(fromHeight);
}
let nextHash = startFromHash;
for (let i = 0; i < 10 && nextHash; i++) {
const localBlock = blocks.getBlocks().find((b) => b.id === nextHash);
if (localBlock) {
returnBlocks.push(localBlock);
nextHash = localBlock.previousblockhash;
} else {
const block = await bitcoinApi.$getBlock(nextHash);
returnBlocks.push(block);
nextHash = block.previousblockhash;
}
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(returnBlocks);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockTransactions(req: Request, res: Response) {
try {
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 0);
const txIds = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
const transactions: TransactionExtended[] = [];
const startingIndex = Math.max(0, parseInt(req.params.index || '0', 10));
const endIndex = Math.min(startingIndex + 10, txIds.length);
for (let i = startingIndex; i < endIndex; i++) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txIds[i], true, true);
transactions.push(transaction);
loadingIndicators.setProgress('blocktxs-' + req.params.hash, (i - startingIndex + 1) / (endIndex - startingIndex) * 100);
} catch (e) {
logger.debug('getBlockTransactions error: ' + (e instanceof Error ? e.message : e));
}
}
res.json(transactions);
} catch (e) {
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 100);
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockHeight(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getBlockHash(parseInt(req.params.height, 10));
res.send(blockHash);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddress(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const addressData = await bitcoinApi.$getAddress(req.params.address);
res.json(addressData);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddressTransactions(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAdressTxChain(req: Request, res: Response) {
res.status(501).send('Not implemented');
}
private async getAddressPrefix(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);
res.send(blockHash);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRecentMempoolTransactions(req: Request, res: Response) {
const latestTransactions = Object.entries(mempool.getMempool())
.sort((a, b) => (b[1].firstSeen || 0) - (a[1].firstSeen || 0))
.slice(0, 10).map((tx) => Common.stripTransaction(tx[1]));
res.json(latestTransactions);
}
private async getMempool(req: Request, res: Response) {
const info = mempool.getMempoolInfo();
res.json({
count: info.size,
vsize: info.bytes,
total_fee: info.total_fee * 1e8,
fee_histogram: []
});
}
private async getMempoolTxIds(req: Request, res: Response) {
try {
const rawMempool = await bitcoinApi.$getRawMempool();
res.send(rawMempool);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockTipHeight(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlockHeightTip();
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlockTipHash(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlockHashTip();
res.setHeader('content-type', 'text/plain');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRawBlock(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getRawBlock(req.params.hash);
res.setHeader('content-type', 'application/octet-stream');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getTxIdsForBlock(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async validateAddress(req: Request, res: Response) {
try {
const result = await bitcoinClient.validateAddress(req.params.address);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getTransactionOutspends(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getOutspends(req.params.txId);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private getDifficultyChange(req: Request, res: Response) {
try {
const da = difficultyAdjustment.getDifficultyAdjustment();
if (da) {
res.json(da);
} else {
res.status(503).send(`Service Temporarily Unavailable`);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $postTransaction(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
try {
let rawTx;
if (typeof req.body === 'object') {
rawTx = Object.keys(req.body)[0];
} else {
rawTx = req.body;
}
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
res.send(txIdResult);
} catch (e: any) {
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
: (e.message || 'Error'));
}
}
private async $postTransactionForm(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
const matches = /tx=([a-z0-9]+)/.exec(req.body);
let txHex = '';
if (matches && matches[1]) {
txHex = matches[1];
}
try {
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
res.send(txIdResult);
} catch (e: any) {
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
: (e.message || 'Error'));
}
}
}
export default new BitcoinRoutes();

View File

@@ -0,0 +1,110 @@
const config = require('../../../mempool-config.json');
import * as bitcoin from 'bitcoin';
import { ITransaction, IMempoolInfo, IBlock } from '../../interfaces';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
class BitcoindApi implements AbstractBitcoinApi {
client: any;
constructor() {
this.client = new bitcoin.Client({
host: config.BITCOIN_NODE_HOST,
port: config.BITCOIN_NODE_PORT,
user: config.BITCOIN_NODE_USER,
pass: config.BITCOIN_NODE_PASS,
});
}
getMempoolInfo(): Promise<IMempoolInfo> {
return new Promise((resolve, reject) => {
this.client.getMempoolInfo((err: Error, mempoolInfo: any) => {
if (err) {
return reject(err);
}
resolve(mempoolInfo);
});
});
}
getRawMempool(): Promise<ITransaction['txid'][]> {
return new Promise((resolve, reject) => {
this.client.getRawMemPool((err: Error, transactions: ITransaction['txid'][]) => {
if (err) {
return reject(err);
}
resolve(transactions);
});
});
}
getRawTransaction(txId: string): Promise<ITransaction> {
return new Promise((resolve, reject) => {
this.client.getRawTransaction(txId, true, (err: Error, txData: ITransaction) => {
if (err) {
return reject(err);
}
resolve(txData);
});
});
}
getBlockCount(): Promise<number> {
return new Promise((resolve, reject) => {
this.client.getBlockCount((err: Error, response: number) => {
if (err) {
return reject(err);
}
resolve(response);
});
});
}
getBlockAndTransactions(hash: string, verbosity: 1 | 2 = 1): Promise<IBlock> {
return new Promise((resolve, reject) => {
this.client.getBlock(hash, verbosity, (err: Error, block: IBlock) => {
if (err) {
return reject(err);
}
resolve(block);
});
});
}
getBlockHash(height: number): Promise<string> {
return new Promise((resolve, reject) => {
this.client.getBlockHash(height, (err: Error, response: string) => {
if (err) {
return reject(err);
}
resolve(response);
});
});
}
getBlock(hash: string): Promise<IBlock> {
throw new Error('Method not implemented.');
}
getBlocks(): Promise<string> {
throw new Error('Method not implemented.');
}
getBlocksFromHeight(height: number): Promise<string> {
throw new Error('Method not implemented.');
}
getBlockTransactions(hash: string): Promise<IBlock> {
throw new Error('Method not implemented.');
}
getBlockTransactionsFromIndex(hash: string, index: number): Promise<IBlock> {
throw new Error('Method not implemented.');
}
getAddress(address: string): Promise<IBlock> {
throw new Error('Method not implemented.');
}
getAddressTransactions(address: string): Promise<IBlock> {
throw new Error('Method not implemented.');
}
getAddressTransactionsFromLastSeenTxid(address: string, lastSeenTxid: string): Promise<IBlock> {
throw new Error('Method not implemented.');
}
}
export default BitcoindApi;

View File

@@ -0,0 +1,229 @@
const config = require('../../../mempool-config.json');
import { ITransaction, IMempoolInfo, IBlock } from '../../interfaces';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import * as request from 'request';
class ElectrsApi implements AbstractBitcoinApi {
constructor() {
}
getMempoolInfo(): Promise<IMempoolInfo> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/mempool', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve({
size: response.count,
bytes: response.vsize,
});
}
});
});
}
getRawMempool(): Promise<ITransaction['txid'][]> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/mempool/txids', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getRawTransaction(txId: string): Promise<ITransaction> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/tx/' + txId, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
response.vsize = Math.round(response.weight / 4);
response.fee = response.fee / 100000000;
response.blockhash = response.status.block_hash;
resolve(response);
}
});
});
}
getBlockCount(): Promise<number> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/blocks/tip/height', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlockAndTransactions(hash: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/block/' + hash, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
request(config.ELECTRS_API_URL + '/block/' + hash + '/txids', { json: true, timeout: 10000 }, (err2, res2, response2) => {
if (err2) {
reject(err2);
} else if (res.statusCode !== 200) {
reject(response);
} else {
const block = response;
block.hash = hash;
block.nTx = block.tx_count;
block.time = block.timestamp;
block.tx = response2;
resolve(block);
}
});
}
});
});
}
getBlockHash(height: number): Promise<string> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/block-height/' + height, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlocks(): Promise<string> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/blocks', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlocksFromHeight(height: number): Promise<string> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/blocks/' + height, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlock(hash: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/block/' + hash, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlockTransactions(hash: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/block/' + hash + '/txs', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getBlockTransactionsFromIndex(hash: string, index: number): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/block/' + hash + '/txs/' + index, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getAddress(address: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/address/' + address, { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getAddressTransactions(address: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/address/' + address + '/txs', { json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
getAddressTransactionsFromLastSeenTxid(address: string, lastSeenTxid: string): Promise<IBlock> {
return new Promise((resolve, reject) => {
request(config.ELECTRS_API_URL + '/address/' + address + '/txs/chain/' + lastSeenTxid,
{ json: true, timeout: 10000 }, (err, res, response) => {
if (err) {
reject(err);
} else if (res.statusCode !== 200) {
reject(response);
} else {
resolve(response);
}
});
});
}
}
export default ElectrsApi;

View File

@@ -1,12 +0,0 @@
export namespace IElectrumApi {
export interface ScriptHashBalance {
confirmed: number;
unconfirmed: number;
}
export interface ScriptHashHistory {
height: number;
tx_hash: string;
fee?: number;
}
}

View File

@@ -1,152 +0,0 @@
import config from '../../config';
import Client from '@mempool/electrum-client';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import { IElectrumApi } from './electrum-api.interface';
import BitcoinApi from './bitcoin-api';
import logger from '../../logger';
import crypto from "crypto-js";
import loadingIndicators from '../loading-indicators';
import memoryCache from '../memory-cache';
class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
private electrumClient: any;
constructor(bitcoinClient: any) {
super(bitcoinClient);
const electrumConfig = { client: 'mempool-v2', version: '1.4' };
const electrumPersistencePolicy = { retryPeriod: 10000, maxRetry: 1000, callback: null };
const electrumCallbacks = {
onConnect: (client, versionInfo) => { logger.info(`Connected to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT} (${JSON.stringify(versionInfo)})`); },
onClose: (client) => { logger.info(`Disconnected from Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT}`); },
onError: (err) => { logger.err(`Electrum error: ${JSON.stringify(err)}`); },
onLog: (str) => { logger.debug(str); },
};
this.electrumClient = new Client(
config.ELECTRUM.PORT,
config.ELECTRUM.HOST,
config.ELECTRUM.TLS_ENABLED ? 'tls' : 'tcp',
null,
electrumCallbacks
);
this.electrumClient.initElectrum(electrumConfig, electrumPersistencePolicy)
.then(() => { })
.catch((err) => {
logger.err(`Error connecting to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT}`);
});
}
async $getAddress(address: string): Promise<IEsploraApi.Address> {
const addressInfo = await this.bitcoindClient.validateAddress(address);
if (!addressInfo || !addressInfo.isvalid) {
return ({
'address': address,
'chain_stats': {
'funded_txo_count': 0,
'funded_txo_sum': 0,
'spent_txo_count': 0,
'spent_txo_sum': 0,
'tx_count': 0
},
'mempool_stats': {
'funded_txo_count': 0,
'funded_txo_sum': 0,
'spent_txo_count': 0,
'spent_txo_sum': 0,
'tx_count': 0
}
});
}
try {
const balance = await this.$getScriptHashBalance(addressInfo.scriptPubKey);
const history = await this.$getScriptHashHistory(addressInfo.scriptPubKey);
const unconfirmed = history.filter((h) => h.fee).length;
return {
'address': addressInfo.address,
'chain_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.confirmed ? balance.confirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.confirmed < 0 ? balance.confirmed : 0,
'tx_count': history.length - unconfirmed,
},
'mempool_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.unconfirmed > 0 ? balance.unconfirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.unconfirmed < 0 ? -balance.unconfirmed : 0,
'tx_count': unconfirmed,
},
'electrum': true,
};
} catch (e: any) {
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
async $getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
const addressInfo = await this.bitcoindClient.validateAddress(address);
if (!addressInfo || !addressInfo.isvalid) {
return [];
}
try {
loadingIndicators.setProgress('address-' + address, 0);
const transactions: IEsploraApi.Transaction[] = [];
const history = await this.$getScriptHashHistory(addressInfo.scriptPubKey);
history.sort((a, b) => (b.height || 9999999) - (a.height || 9999999));
let startingIndex = 0;
if (lastSeenTxId) {
const pos = history.findIndex((historicalTx) => historicalTx.tx_hash === lastSeenTxId);
if (pos) {
startingIndex = pos + 1;
}
}
const endIndex = Math.min(startingIndex + 10, history.length);
for (let i = startingIndex; i < endIndex; i++) {
const tx = await this.$getRawTransaction(history[i].tx_hash, false, true);
transactions.push(tx);
loadingIndicators.setProgress('address-' + address, (i + 1) / endIndex * 100);
}
return transactions;
} catch (e: any) {
loadingIndicators.setProgress('address-' + address, 100);
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
private $getScriptHashBalance(scriptHash: string): Promise<IElectrumApi.ScriptHashBalance> {
return this.electrumClient.blockchainScripthash_getBalance(this.encodeScriptHash(scriptHash));
}
private $getScriptHashHistory(scriptHash: string): Promise<IElectrumApi.ScriptHashHistory[]> {
const fromCache = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scriptHash);
if (fromCache) {
return Promise.resolve(fromCache);
}
return this.electrumClient.blockchainScripthash_getHistory(this.encodeScriptHash(scriptHash))
.then((history) => {
memoryCache.set('Scripthash_getHistory', scriptHash, history, 2);
return history;
});
}
private encodeScriptHash(scriptPubKey: string): string {
const addrScripthash = crypto.enc.Hex.stringify(crypto.SHA256(crypto.enc.Hex.parse(scriptPubKey)));
return addrScripthash!.match(/.{2}/g)!.reverse().join('');
}
}
export default BitcoindElectrsApi;

View File

@@ -1,172 +0,0 @@
export namespace IEsploraApi {
export interface Transaction {
txid: string;
version: number;
locktime: number;
size: number;
weight: number;
fee: number;
vin: Vin[];
vout: Vout[];
status: Status;
hex?: string;
}
export interface Recent {
txid: string;
fee: number;
vsize: number;
value: number;
}
export interface Vin {
txid: string;
vout: number;
is_coinbase: boolean;
scriptsig: string;
scriptsig_asm: string;
inner_redeemscript_asm: string;
inner_witnessscript_asm: string;
sequence: any;
witness: string[];
prevout: Vout | null;
// Elements
is_pegin?: boolean;
issuance?: Issuance;
// Custom
lazy?: boolean;
}
interface Issuance {
asset_id: string;
is_reissuance: string;
asset_blinding_nonce: string;
asset_entropy: string;
contract_hash: string;
assetamount?: number;
assetamountcommitment?: string;
tokenamount?: number;
tokenamountcommitment?: string;
}
export interface Vout {
scriptpubkey: string;
scriptpubkey_asm: string;
scriptpubkey_type: string;
scriptpubkey_address: string;
value: number;
// Elements
valuecommitment?: number;
asset?: string;
pegout?: Pegout;
}
interface Pegout {
genesis_hash: string;
scriptpubkey: string;
scriptpubkey_asm: string;
scriptpubkey_address: string;
}
export interface Status {
confirmed: boolean;
block_height?: number;
block_hash?: string;
block_time?: number;
}
export interface Block {
id: string;
height: number;
version: number;
timestamp: number;
bits: number;
nonce: number;
difficulty: number;
merkle_root: string;
tx_count: number;
size: number;
weight: number;
previousblockhash: string;
}
export interface Address {
address: string;
chain_stats: ChainStats;
mempool_stats: MempoolStats;
electrum?: boolean;
}
export interface ChainStats {
funded_txo_count: number;
funded_txo_sum: number;
spent_txo_count: number;
spent_txo_sum: number;
tx_count: number;
}
export interface MempoolStats {
funded_txo_count: number;
funded_txo_sum: number;
spent_txo_count: number;
spent_txo_sum: number;
tx_count: number;
}
export interface Outspend {
spent: boolean;
txid?: string;
vin?: number;
status?: Status;
}
export interface Asset {
asset_id: string;
issuance_txin: IssuanceTxin;
issuance_prevout: IssuancePrevout;
reissuance_token: string;
contract_hash: string;
status: Status;
chain_stats: AssetStats;
mempool_stats: AssetStats;
}
export interface AssetExtended extends Asset {
name: string;
ticker: string;
precision: number;
entity: Entity;
version: number;
issuer_pubkey: string;
}
export interface Entity {
domain: string;
}
interface IssuanceTxin {
txid: string;
vin: number;
}
interface IssuancePrevout {
txid: string;
vout: number;
}
interface AssetStats {
tx_count: number;
issuance_count: number;
issued_amount: number;
burned_amount: number;
has_blinded_issuances: boolean;
reissuance_tokens: number;
burned_reissuance_tokens: number;
peg_in_count: number;
peg_in_amount: number;
peg_out_count: number;
peg_out_amount: number;
burn_count: number;
}
}

View File

@@ -1,99 +0,0 @@
import config from '../../config';
import axios, { AxiosRequestConfig } from 'axios';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
class ElectrsApi implements AbstractBitcoinApi {
axiosConfig: AxiosRequestConfig = {
timeout: 10000,
};
constructor() { }
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return axios.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
.then((response) => response.data);
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return axios.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
.then((response) => response.data);
}
$getTransactionHex(txId: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeightTip(): Promise<number> {
return axios.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHashTip(): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
.then((response) => response.data);
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return axios.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHash(height: number): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeader(hash: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
.then((response) => response.data);
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return axios.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
.then((response) => response.data);
}
$getRawBlock(hash: string): Promise<Buffer> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
.then((response) => { return Buffer.from(response.data); });
}
$getAddress(address: string): Promise<IEsploraApi.Address> {
throw new Error('Method getAddress not implemented.');
}
$getAddressTransactions(address: string, txId?: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getAddressTransactions not implemented.');
}
$getAddressPrefix(prefix: string): string[] {
throw new Error('Method not implemented.');
}
$sendRawTransaction(rawTransaction: string): Promise<string> {
throw new Error('Method not implemented.');
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return axios.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
.then((response) => response.data);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return axios.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
.then((response) => response.data);
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
const outspends: IEsploraApi.Outspend[][] = [];
for (const tx of txId) {
const outspend = await this.$getOutspends(tx);
outspends.push(outspend);
}
return outspends;
}
}
export default ElectrsApi;

View File

@@ -1,799 +1,214 @@
import config from '../config';
const config = require('../../mempool-config.json');
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import logger from '../logger';
import { DB } from '../database';
import { IBlock, ITransaction } from '../interfaces';
import memPool from './mempool';
import { BlockExtended, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { Common } from './common';
import diskCache from './disk-cache';
import transactionUtils from './transaction-utils';
import bitcoinClient from './bitcoin/bitcoin-client';
import { IBitcoinApi } from './bitcoin/bitcoin-api.interface';
import { IEsploraApi } from './bitcoin/esplora-api.interface';
import poolsRepository from '../repositories/PoolsRepository';
import blocksRepository from '../repositories/BlocksRepository';
import loadingIndicators from './loading-indicators';
import BitcoinApi from './bitcoin/bitcoin-api';
import { prepareBlock } from '../utils/blocks-utils';
import BlocksRepository from '../repositories/BlocksRepository';
import HashratesRepository from '../repositories/HashratesRepository';
import indexer from '../indexer';
import fiatConversion from './fiat-conversion';
import poolsParser from './pools-parser';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import cpfpRepository from '../repositories/CpfpRepository';
import transactionRepository from '../repositories/TransactionRepository';
import mining from './mining/mining';
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import { Block } from 'bitcoinjs-lib';
class Blocks {
private blocks: BlockExtended[] = [];
private blockSummaries: BlockSummary[] = [];
private blocks: IBlock[] = [];
private newBlockCallback: Function | undefined;
private currentBlockHeight = 0;
private currentDifficulty = 0;
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
constructor() { }
constructor() {
setInterval(this.$clearOldTransactionsAndBlocksFromDatabase.bind(this), 86400000);
}
public getBlocks(): BlockExtended[] {
public setNewBlockCallback(fn: Function) {
this.newBlockCallback = fn;
}
public getBlocks(): IBlock[] {
return this.blocks;
}
public setBlocks(blocks: BlockExtended[]) {
this.blocks = blocks;
}
public getBlockSummaries(): BlockSummary[] {
return this.blockSummaries;
}
public setBlockSummaries(blockSummaries: BlockSummary[]) {
this.blockSummaries = blockSummaries;
}
public setNewBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void) {
this.newBlockCallbacks.push(fn);
}
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn);
}
/**
* Return the list of transaction for a block
* @param blockHash
* @param blockHeight
* @param onlyCoinbase - Set to true if you only need the coinbase transaction
* @returns Promise<TransactionExtended[]>
*/
private async $getTransactionsExtended(
blockHash: string,
blockHeight: number,
onlyCoinbase: boolean,
quiet: boolean = false,
): Promise<TransactionExtended[]> {
const transactions: TransactionExtended[] = [];
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const mempool = memPool.getMempool();
let transactionsFound = 0;
let transactionsFetched = 0;
for (let i = 0; i < txIds.length; i++) {
if (mempool[txIds[i]]) {
// We update blocks before the mempool (index.ts), therefore we can
// optimize here by directly fetching txs in the "outdated" mempool
transactions.push(mempool[txIds[i]]);
transactionsFound++;
} else if (config.MEMPOOL.BACKEND === 'esplora' || !memPool.hasPriority() || i === 0) {
// Otherwise we fetch the tx data through backend services (esplora, electrum, core rpc...)
if (!quiet && (i % (Math.round((txIds.length) / 10)) === 0 || i + 1 === txIds.length)) { // Avoid log spam
logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`);
}
try {
const tx = await transactionUtils.$getTransactionExtended(txIds[i]);
transactions.push(tx);
transactionsFetched++;
} catch (e) {
if (i === 0) {
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
} else {
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
if (onlyCoinbase === true) {
break; // Fetch the first transaction and exit
}
}
transactions.forEach((tx) => {
if (!tx.cpfpChecked) {
Common.setRelativesAndGetCpfpInfo(tx, mempool); // Child Pay For Parent
}
});
if (!quiet) {
logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`);
}
return transactions;
}
/**
* Return a block summary (list of stripped transactions)
* @param block
* @returns BlockSummary
*/
private summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
const stripped = block.tx.map((tx) => {
return {
txid: tx.txid,
vsize: tx.weight / 4,
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
};
});
public formatBlock(block: IBlock) {
return {
id: block.hash,
transactions: stripped
hash: block.hash,
height: block.height,
nTx: block.nTx - 1,
size: block.size,
time: block.time,
weight: block.weight,
fees: block.fees,
minFee: block.minFee,
maxFee: block.maxFee,
medianFee: block.medianFee,
};
}
/**
* Return a block with additional data (reward, coinbase, fees...)
* @param block
* @param transactions
* @returns BlockExtended
*/
private async $getBlockExtended(block: IEsploraApi.Block, transactions: TransactionExtended[]): Promise<BlockExtended> {
const blockExtended: BlockExtended = Object.assign({ extras: {} }, block);
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
blockExtended.extras.usd = fiatConversion.getConversionRates().USD;
if (block.height === 0) {
blockExtended.extras.medianFee = 0; // 50th percentiles
blockExtended.extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
blockExtended.extras.totalFees = 0;
blockExtended.extras.avgFee = 0;
blockExtended.extras.avgFeeRate = 0;
} else {
const stats = await bitcoinClient.getBlockStats(block.id, [
'feerate_percentiles', 'minfeerate', 'maxfeerate', 'totalfee', 'avgfee', 'avgfeerate'
]);
blockExtended.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
blockExtended.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
blockExtended.extras.totalFees = stats.totalfee;
blockExtended.extras.avgFee = stats.avgfee;
blockExtended.extras.avgFeeRate = stats.avgfeerate;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
let pool: PoolTag;
if (blockExtended.extras?.coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx);
} else {
if (config.DATABASE.ENABLED === true) {
pool = await poolsRepository.$getUnknownPool();
} else {
pool = poolsParser.unknownPool;
}
}
if (!pool) { // We should never have this situation in practise
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
`Check your "pools" table entries`);
} else {
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
}
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
blockExtended.extras.matchRate = auditScore.matchRate;
}
}
return blockExtended;
}
/**
* Try to find which miner found the block
* @param txMinerInfo
* @returns
*/
private async $findBlockMiner(txMinerInfo: TransactionMinerInfo | undefined): Promise<PoolTag> {
if (txMinerInfo === undefined || txMinerInfo.vout.length < 1) {
if (config.DATABASE.ENABLED === true) {
return await poolsRepository.$getUnknownPool();
} else {
return poolsParser.unknownPool;
}
}
const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig);
const address = txMinerInfo.vout[0].scriptpubkey_address;
let pools: PoolTag[] = [];
if (config.DATABASE.ENABLED === true) {
pools = await poolsRepository.$getPools();
} else {
pools = poolsParser.miningPools;
}
for (let i = 0; i < pools.length; ++i) {
if (address !== undefined) {
const addresses: string[] = JSON.parse(pools[i].addresses);
if (addresses.indexOf(address) !== -1) {
return pools[i];
}
}
const regexes: string[] = JSON.parse(pools[i].regexes);
for (let y = 0; y < regexes.length; ++y) {
const regex = new RegExp(regexes[y], 'i');
const match = asciiScriptSig.match(regex);
if (match !== null) {
return pools[i];
}
}
}
if (config.DATABASE.ENABLED === true) {
return await poolsRepository.$getUnknownPool();
} else {
return poolsParser.unknownPool;
}
}
/**
* [INDEXING] Index all blocks summaries for the block txs visualization
*/
public async $generateBlocksSummariesDatabase(): Promise<void> {
if (Common.blocksSummariesIndexingEnabled() === false) {
return;
}
public async updateBlocks() {
try {
// Get all indexed block hash
const indexedBlocks = await blocksRepository.$getIndexedBlocks();
const indexedBlockSummariesHashesArray = await BlocksSummariesRepository.$getIndexedSummariesId();
const blockCount = await bitcoinApi.getBlockCount();
const indexedBlockSummariesHashes = {}; // Use a map for faster seek during the indexing loop
for (const hash of indexedBlockSummariesHashesArray) {
indexedBlockSummariesHashes[hash] = true;
}
// Logging
let newlyIndexed = 0;
let totalIndexed = indexedBlockSummariesHashesArray.length;
let indexedThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
for (const block of indexedBlocks) {
if (indexedBlockSummariesHashes[block.hash] === true) {
continue;
}
// Logging
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
}
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
// Logging
indexedThisRun++;
totalIndexed++;
newlyIndexed++;
}
if (newlyIndexed > 0) {
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
if (this.blocks.length === 0) {
this.currentBlockHeight = blockCount - config.INITIAL_BLOCK_AMOUNT;
} else {
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
}
} catch (e) {
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
/**
* [INDEXING] Index transaction CPFP data for all blocks
*/
public async $generateCPFPDatabase(): Promise<void> {
if (Common.cpfpIndexingEnabled() === false) {
return;
}
try {
// Get all indexed block hash
const unindexedBlocks = await blocksRepository.$getCPFPUnindexedBlocks();
if (!unindexedBlocks?.length) {
return;
this.currentBlockHeight = this.blocks[this.blocks.length - 1].height;
}
// Logging
let count = 0;
let countThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
for (const block of unindexedBlocks) {
// Logging
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, countThisRun / elapsedSeconds);
const progress = Math.round(count / unindexedBlocks.length * 10000) / 100;
logger.debug(`Indexing cpfp clusters for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
countThisRun = 0;
}
await this.$indexCPFP(block.hash, block.height); // Calculate and save CPFP data for transactions in this block
// Logging
count++;
countThisRun++;
}
if (count > 0) {
logger.notice(`CPFP indexing completed: indexed ${count} blocks`);
} else {
logger.debug(`CPFP indexing completed: indexed ${count} blocks`);
}
} catch (e) {
logger.err(`CPFP indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
/**
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
public async $generateBlockDatabase(): Promise<boolean> {
try {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
let currentBlockHeight = blockchainInfo.blocks;
let indexingBlockAmount = Math.min(config.MEMPOOL.INDEXING_BLOCKS_AMOUNT, blockchainInfo.blocks);
if (indexingBlockAmount <= -1) {
indexingBlockAmount = currentBlockHeight + 1;
}
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
loadingIndicators.setProgress('block-indexing', 0);
const chunkSize = 10000;
let totalIndexed = await blocksRepository.$blockCountBetweenHeight(currentBlockHeight, lastBlockToIndex);
let indexedThisRun = 0;
let newlyIndexed = 0;
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
while (currentBlockHeight >= lastBlockToIndex) {
const endBlock = Math.max(0, lastBlockToIndex, currentBlockHeight - chunkSize + 1);
const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
currentBlockHeight, endBlock);
if (missingBlockHeights.length <= 0) {
currentBlockHeight -= chunkSize;
continue;
}
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
break;
}
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
newlyIndexed++;
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
currentBlockHeight -= chunkSize;
}
if (newlyIndexed > 0) {
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
} else {
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
}
loadingIndicators.setProgress('block-indexing', 100);
} catch (e) {
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
loadingIndicators.setProgress('block-indexing', 100);
throw e;
}
return await BlocksRepository.$validateChain();
}
public async $updateBlocks() {
let fastForwarded = false;
const blockHeightTip = await bitcoinApi.$getBlockHeightTip();
if (this.blocks.length === 0) {
this.currentBlockHeight = Math.max(blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT, -1);
} else {
this.currentBlockHeight = this.blocks[this.blocks.length - 1].height;
}
if (blockHeightTip - this.currentBlockHeight > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 2) {
logger.info(`${blockHeightTip - this.currentBlockHeight} blocks since tip. Fast forwarding to the ${config.MEMPOOL.INITIAL_BLOCKS_AMOUNT} recent blocks`);
this.currentBlockHeight = blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT;
fastForwarded = true;
logger.info(`Re-indexing skipped blocks and corresponding hashrates data`);
indexer.reindex(); // Make sure to index the skipped blocks #1619
}
if (!this.lastDifficultyAdjustmentTime) {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
if (blockchainInfo.blocks === blockchainInfo.headers) {
const heightDiff = blockHeightTip % 2016;
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
if (blockHeightTip >= 2016) {
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
const previousPeriodBlock = await bitcoinClient.getBlock(previousPeriodBlockHash)
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
logger.debug(`Initial difficulty adjustment data set.`);
}
} else {
logger.debug(`Blockchain headers (${blockchainInfo.headers}) and blocks (${blockchainInfo.blocks}) not in sync. Waiting...`);
}
}
while (this.currentBlockHeight < blockHeightTip) {
if (this.currentBlockHeight < blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT) {
this.currentBlockHeight = blockHeightTip;
} else {
while (this.currentBlockHeight < blockCount) {
this.currentBlockHeight++;
logger.debug(`New block found (#${this.currentBlockHeight})!`);
}
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
let block: IBlock | undefined;
// start async callbacks
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
const storedBlock = await this.$getBlockFromDatabase(this.currentBlockHeight);
if (storedBlock) {
block = storedBlock;
} else {
const blockHash = await bitcoinApi.getBlockHash(this.currentBlockHeight);
block = await bitcoinApi.getBlockAndTransactions(blockHash);
if (Common.indexingEnabled()) {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock['hash']) {
logger.warn(`Chain divergence detected at block ${lastBlock['height']}, re-indexing most recent data`);
// We assume there won't be a reorg with more than 10 block depth
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock['height'] - 10);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
if (config.MEMPOOL.TRANSACTION_INDEXING) {
await this.$indexCPFP(newBlock.id, lastBlock['height'] - i);
const coinbase = await memPool.getRawTransaction(block.tx[0], true);
if (coinbase && coinbase.totalOut) {
block.fees = coinbase.totalOut;
}
const mempool = memPool.getMempool();
let found = 0;
let notFound = 0;
let transactions: ITransaction[] = [];
for (let i = 1; i < block.tx.length; i++) {
if (mempool[block.tx[i]]) {
transactions.push(mempool[block.tx[i]]);
found++;
} else {
console.log(`Fetching block tx ${i} of ${block.tx.length}`);
const tx = await memPool.getRawTransaction(block.tx[i]);
if (tx) {
transactions.push(tx);
}
notFound++;
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`);
indexer.reindex();
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
await blocksRepository.$saveBlockPrices([{
height: blockExtended.height,
priceId: lastestPriceId,
}]);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
}
// Save blocks summary for visualization if it's enabled
if (Common.blocksSummariesIndexingEnabled() === true) {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
}
if (config.MEMPOOL.TRANSACTION_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
}
}
}
transactions.sort((a, b) => b.feePerVsize - a.feePerVsize);
transactions = transactions.filter((tx: ITransaction) => tx.feePerVsize);
if (block.height % 2016 === 0) {
if (Common.indexingEnabled()) {
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: block.timestamp,
height: block.height,
difficulty: block.difficulty,
adjustment: Math.round((block.difficulty / this.currentDifficulty) * 1000000) / 1000000, // Remove float point noise
});
block.minFee = transactions[transactions.length - 1] ? transactions[transactions.length - 1].feePerVsize : 0;
block.maxFee = transactions[0] ? transactions[0].feePerVsize : 0;
block.medianFee = this.median(transactions.map((tx) => tx.feePerVsize));
console.log(`New block found (#${this.currentBlockHeight})! `
+ `${found} of ${block.tx.length} found in mempool. ${notFound} not found.`);
if (this.newBlockCallback) {
this.newBlockCallback(block);
}
this.$saveBlockToDatabase(block);
this.$saveTransactionsToDatabase(block.height, transactions);
}
this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100;
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
}
this.blocks.push(blockExtended);
if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
}
this.blockSummaries.push(blockSummary);
if (this.blockSummaries.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
this.blockSummaries = this.blockSummaries.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
}
if (this.newBlockCallbacks.length) {
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
}
if (!memPool.hasPriority()) {
diskCache.$saveCacheToDisk();
}
// wait for pending async callbacks to finish
await Promise.all(callbackPromises);
}
}
/**
* Index a block if it's missing from the database. Returns the block after indexing
*/
public async $indexBlock(height: number): Promise<BlockExtended> {
const dbBlock = await blocksRepository.$getBlockByHeight(height);
if (dbBlock != null) {
return prepareBlock(dbBlock);
}
const blockHash = await bitcoinApi.$getBlockHash(height);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
await blocksRepository.$saveBlockInDatabase(blockExtended);
return prepareBlock(blockExtended);
}
/**
* Index a block by hash if it's missing from the database. Returns the block after indexing
*/
public async $getBlock(hash: string): Promise<BlockExtended | IEsploraApi.Block> {
// Check the memory cache
const blockByHash = this.getBlocks().find((b) => b.id === hash);
if (blockByHash) {
return blockByHash;
}
// Block has already been indexed
if (Common.indexingEnabled()) {
const dbBlock = await blocksRepository.$getBlockByHash(hash);
if (dbBlock != null) {
return prepareBlock(dbBlock);
}
}
// Not Bitcoin network, return the block as it
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return await bitcoinApi.$getBlock(hash);
}
let block = await bitcoinClient.getBlock(hash);
block = prepareBlock(block);
// Bitcoin network, add our custom data on top
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
if (Common.indexingEnabled()) {
delete(blockExtended['coinbaseTx']);
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
return blockExtended;
}
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
skipDBLookup = false): Promise<TransactionStripped[]>
{
if (skipMemoryCache === false) {
// Check the memory cache
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
if (cachedSummary?.transactions?.length) {
return cachedSummary.transactions;
}
}
// Check if it's indexed in db
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
if (indexedSummary !== undefined && indexedSummary?.transactions?.length) {
return indexedSummary.transactions;
}
}
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
const summary = this.summarizeBlock(block);
// Index the response if needed
if (Common.blocksSummariesIndexingEnabled() === true) {
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
}
return summary.transactions;
}
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
const returnBlocks: BlockExtended[] = [];
if (currentHeight < 0) {
return returnBlocks;
}
// Check if block height exist in local cache to skip the hash lookup
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
let startFromHash: string | null = null;
if (blockByHeight) {
startFromHash = blockByHeight.id;
} else if (!Common.indexingEnabled()) {
startFromHash = await bitcoinApi.$getBlockHash(currentHeight);
}
let nextHash = startFromHash;
for (let i = 0; i < limit && currentHeight >= 0; i++) {
let block = this.getBlocks().find((b) => b.height === currentHeight);
if (block) {
returnBlocks.push(block);
} else if (Common.indexingEnabled()) {
block = await this.$indexBlock(currentHeight);
returnBlocks.push(block);
} else if (nextHash != null) {
block = prepareBlock(await bitcoinClient.getBlock(nextHash));
nextHash = block.previousblockhash;
returnBlocks.push(block);
}
currentHeight--;
}
return returnBlocks;
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
}
// fallback to non-audited transaction summary
if (!summary?.transactions?.length) {
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
summary = {
transactions: strippedTransactions
};
}
return summary;
}
public getLastDifficultyAdjustmentTime(): number {
return this.lastDifficultyAdjustmentTime;
}
public getPreviousDifficultyRetarget(): number {
return this.previousDifficultyRetarget;
}
public getCurrentBlockHeight(): number {
return this.currentBlockHeight;
}
public async $indexCPFP(hash: string, height: number): Promise<void> {
let transactions;
if (false/*Common.blocksSummariesIndexingEnabled()*/) {
transactions = await this.$getStrippedBlockTransactions(hash);
const rawBlock = await bitcoinApi.$getRawBlock(hash);
const block = Block.fromBuffer(rawBlock);
const txMap = {};
for (const tx of block.transactions || []) {
txMap[tx.getId()] = tx;
}
for (const tx of transactions) {
if (txMap[tx.txid]?.ins) {
tx.vin = txMap[tx.txid].ins.map(vin => {
return {
txid: vin.hash
};
});
this.blocks.push(block);
if (this.blocks.length > config.KEEP_BLOCK_AMOUNT) {
this.blocks.shift();
}
}
} catch (err) {
console.log('Error getBlockCount', err);
}
}
private async $getBlockFromDatabase(height: number): Promise<IBlock | undefined> {
try {
const connection = await DB.pool.getConnection();
const query = `
SELECT * FROM blocks WHERE height = ?
`;
const [rows] = await connection.query<any>(query, [height]);
connection.release();
if (rows[0]) {
return rows[0];
}
} catch (e) {
console.log('$get() block error', e);
}
}
private async $saveBlockToDatabase(block: IBlock) {
try {
const connection = await DB.pool.getConnection();
const query = `
INSERT IGNORE INTO blocks
(height, hash, size, weight, minFee, maxFee, time, fees, nTx, medianFee)
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`;
const params: (any)[] = [
block.height,
block.hash,
block.size,
block.weight,
block.minFee,
block.maxFee,
block.time,
block.fees,
block.nTx - 1,
block.medianFee,
];
await connection.query(query, params);
connection.release();
} catch (e) {
console.log('$create() block error', e);
}
}
private async $saveTransactionsToDatabase(blockheight: number, transactions: ITransaction[]) {
try {
const connection = await DB.pool.getConnection();
for (let i = 0; i < transactions.length; i++) {
const query = `
INSERT IGNORE INTO transactions
(blockheight, txid, fee, feePerVsize)
VALUES(?, ?, ?, ?)
`;
const params: (any)[] = [
blockheight,
transactions[i].txid,
transactions[i].fee,
transactions[i].feePerVsize,
];
await connection.query(query, params);
}
connection.release();
} catch (e) {
console.log('$create() transaction error', e);
}
}
private async $clearOldTransactionsAndBlocksFromDatabase() {
try {
const connection = await DB.pool.getConnection();
let query = `DELETE FROM blocks WHERE height < ?`;
await connection.query<any>(query, [this.currentBlockHeight - config.KEEP_BLOCK_AMOUNT]);
query = `DELETE FROM transactions WHERE blockheight < ?`;
await connection.query<any>(query, [this.currentBlockHeight - config.KEEP_BLOCK_AMOUNT]);
connection.release();
} catch (e) {
console.log('$clearOldTransactionsFromDatabase() error', e);
}
}
private median(numbers: number[]) {
if (!numbers.length) { return 0; }
let medianNr = 0;
const numsLen = numbers.length;
if (numsLen % 2 === 0) {
medianNr = (numbers[numsLen / 2 - 1] + numbers[numsLen / 2]) / 2;
} else {
const block = await bitcoinClient.getBlock(hash, 2);
transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
return tx;
});
medianNr = numbers[(numsLen - 1) / 2];
}
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = (totalFee * 100_000_000) / totalVSize;
if (cluster.length > 1) {
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: (tx.fee || 0) * 100_000_000 }; }), effectiveFeePerVsize);
for (const tx of cluster) {
await transactionRepository.$setCluster(tx.txid, cluster[0].txid);
}
}
cluster = [];
ancestors = {};
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
await blocksRepository.$setCPFPIndexed(hash);
return medianNr;
}
}

View File

@@ -1,309 +0,0 @@
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
export class Common {
static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
'144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
: '6f0279e9ed041c3d710a9f57d0c02928416460c4b722ae3457a11eec381c526d';
static _isLiquid = config.MEMPOOL.NETWORK === 'liquid' || config.MEMPOOL.NETWORK === 'liquidtestnet';
static isLiquid(): boolean {
return this._isLiquid;
}
static median(numbers: number[]) {
let medianNr = 0;
const numsLen = numbers.length;
if (numsLen % 2 === 0) {
medianNr = (numbers[numsLen / 2 - 1] + numbers[numsLen / 2]) / 2;
} else {
medianNr = numbers[(numsLen - 1) / 2];
}
return medianNr;
}
static percentile(numbers: number[], percentile: number) {
if (percentile === 50) {
return this.median(numbers);
}
const index = Math.ceil(numbers.length * (100 - percentile) * 1e-2);
if (index < 0 || index > numbers.length - 1) {
return 0;
}
return numbers[index];
}
static getFeesInRange(transactions: TransactionExtended[], rangeLength: number) {
const arr = [transactions[transactions.length - 1].effectiveFeePerVsize];
const chunk = 1 / (rangeLength - 1);
let itemsToAdd = rangeLength - 2;
while (itemsToAdd > 0) {
arr.push(transactions[Math.floor(transactions.length * chunk * itemsToAdd)].effectiveFeePerVsize);
itemsToAdd--;
}
arr.push(transactions[0].effectiveFeePerVsize);
return arr;
}
static findRbfTransactions(added: TransactionExtended[], deleted: TransactionExtended[]): { [txid: string]: TransactionExtended } {
const matches: { [txid: string]: TransactionExtended } = {};
deleted
// The replaced tx must have at least one input with nSequence < maxint-1 (Thats the opt-in)
.filter((tx) => tx.vin.some((vin) => vin.sequence < 0xfffffffe))
.forEach((deletedTx) => {
const foundMatches = added.find((addedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
return addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.feePerVsize > deletedTx.feePerVsize
// Spends one or more of the same inputs
&& deletedTx.vin.some((deletedVin) =>
addedTx.vin.some((vin) => vin.txid === deletedVin.txid));
});
if (foundMatches) {
matches[deletedTx.txid] = foundMatches;
}
});
return matches;
}
static stripTransaction(tx: TransactionExtended): TransactionStripped {
return {
txid: tx.txid,
fee: tx.fee,
vsize: tx.weight / 4,
value: tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0),
};
}
static sleep$(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
}, ms);
});
}
static shuffleArray(array: any[]) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}
static setRelativesAndGetCpfpInfo(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): CpfpInfo {
const parents = this.findAllParents(tx, memPool);
const lowerFeeParents = parents.filter((parent) => parent.feePerVsize < tx.effectiveFeePerVsize);
let totalWeight = tx.weight + lowerFeeParents.reduce((prev, val) => prev + val.weight, 0);
let totalFees = tx.fee + lowerFeeParents.reduce((prev, val) => prev + val.fee, 0);
tx.ancestors = parents
.map((t) => {
return {
txid: t.txid,
weight: t.weight,
fee: t.fee,
};
});
// Add high (high fee) decendant weight and fees
if (tx.bestDescendant) {
totalWeight += tx.bestDescendant.weight;
totalFees += tx.bestDescendant.fee;
}
tx.effectiveFeePerVsize = Math.max(0, totalFees / (totalWeight / 4));
tx.cpfpChecked = true;
return {
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
};
}
private static findAllParents(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): TransactionExtended[] {
let parents: TransactionExtended[] = [];
tx.vin.forEach((parent) => {
if (parents.find((p) => p.txid === parent.txid)) {
return;
}
const parentTx = memPool[parent.txid];
if (parentTx) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.feePerVsize) {
if (parentTx.bestDescendant && parentTx.bestDescendant.fee < tx.fee + tx.bestDescendant.fee) {
parentTx.bestDescendant = {
weight: tx.weight + tx.bestDescendant.weight,
fee: tx.fee + tx.bestDescendant.fee,
txid: tx.txid,
};
}
} else if (tx.feePerVsize > parentTx.feePerVsize) {
parentTx.bestDescendant = {
weight: tx.weight,
fee: tx.fee,
txid: tx.txid
};
}
parents.push(parentTx);
parents = parents.concat(this.findAllParents(parentTx, memPool));
}
});
return parents;
}
static getSqlInterval(interval: string | null): string | null {
switch (interval) {
case '24h': return '1 DAY';
case '3d': return '3 DAY';
case '1w': return '1 WEEK';
case '1m': return '1 MONTH';
case '3m': return '3 MONTH';
case '6m': return '6 MONTH';
case '1y': return '1 YEAR';
case '2y': return '2 YEAR';
case '3y': return '3 YEAR';
default: return null;
}
}
static indexingEnabled(): boolean {
return (
['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) &&
config.DATABASE.ENABLED === true &&
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
);
}
static blocksSummariesIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
);
}
static cpfpIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.TRANSACTION_INDEXING === true
);
}
static setDateMidnight(date: Date): void {
date.setUTCHours(0);
date.setUTCMinutes(0);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
}
static channelShortIdToIntegerId(channelId: string): string {
if (channelId.indexOf('x') === -1) { // Already an integer id
return channelId;
}
if (channelId.indexOf('/') !== -1) { // Topology import
channelId = channelId.slice(0, -2);
}
const s = channelId.split('x').map(part => BigInt(part));
return ((s[0] << 40n) | (s[1] << 16n) | s[2]).toString();
}
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
static channelIntegerIdToShortId(id: string): string {
if (id.indexOf('/') !== -1) {
id = id.slice(0, -2);
}
if (id.indexOf('x') !== -1) { // Already a short id
return id;
}
const n = BigInt(id);
return [
n >> 40n, // nth block
(n >> 16n) & 0xffffffn, // nth tx of the block
n & 0xffffn // nth output of the tx
].join('x');
}
static utcDateToMysql(date?: number): string {
const d = new Date((date || 0) * 1000);
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
}
static findSocketNetwork(addr: string): {network: string | null, url: string} {
let network: string | null = null;
let url = addr.split('://')[1];
if (!url) {
return {
network: null,
url: addr,
};
}
if (addr.indexOf('onion') !== -1) {
if (url.split('.')[0].length >= 56) {
network = 'torv3';
} else {
network = 'torv2';
}
} else if (addr.indexOf('i2p') !== -1) {
network = 'i2p';
} else if (addr.indexOf('ipv4') !== -1) {
const ipv = isIP(url.split(':')[0]);
if (ipv === 4) {
network = 'ipv4';
} else {
return {
network: null,
url: addr,
};
}
} else if (addr.indexOf('ipv6') !== -1) {
url = url.split('[')[1].split(']')[0];
const ipv = isIP(url);
if (ipv === 6) {
const parts = addr.split(':');
network = 'ipv6';
url = `[${url}]:${parts[parts.length - 1]}`;
} else {
return {
network: null,
url: addr,
};
}
} else {
return {
network: null,
url: addr,
};
}
return {
network: network,
url: url,
};
}
static formatSocket(publicKey: string, socket: {network: string, addr: string}): NodeSocket {
if (config.LIGHTNING.BACKEND === 'cln') {
return {
publicKey: publicKey,
network: socket.network,
addr: socket.addr,
};
} else /* if (config.LIGHTNING.BACKEND === 'lnd') */ {
const formatted = this.findSocketNetwork(socket.addr);
return {
publicKey: publicKey,
network: formatted.network,
addr: formatted.url,
};
}
}
}

View File

@@ -1,938 +0,0 @@
import config from '../config';
import DB from '../database';
import logger from '../logger';
import { Common } from './common';
class DatabaseMigration {
private static currentVersion = 49;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
private blocksTruncatedMessage = `'blocks' table has been truncated.`;
private hashratesTruncatedMessage = `'hashrates' table has been truncated.`;
/**
* Avoid printing multiple time the same message
*/
private uniqueLog(loggerFunction: any, msg: string) {
if (this.uniqueLogs.includes(msg)) {
return;
}
this.uniqueLogs.push(msg);
loggerFunction(msg);
}
/**
* Entry point
*/
public async $initializeOrMigrateDatabase(): Promise<void> {
logger.debug('MIGRATIONS: Running migrations');
await this.$printDatabaseVersion();
// First of all, if the `state` database does not exist, create it so we can track migration version
if (!await this.$checkIfTableExists('state')) {
logger.debug('MIGRATIONS: `state` table does not exist. Creating it.');
try {
await this.$createMigrationStateTable();
} catch (e) {
logger.err('MIGRATIONS: Unable to create `state` table, aborting in 10 seconds. ' + e);
await Common.sleep$(10000);
process.exit(-1);
}
logger.debug('MIGRATIONS: `state` table initialized.');
}
let databaseSchemaVersion = 0;
try {
databaseSchemaVersion = await this.$getSchemaVersionFromDatabase();
} catch (e) {
logger.err('MIGRATIONS: Unable to get current database migration version, aborting in 10 seconds. ' + e);
await Common.sleep$(10000);
process.exit(-1);
}
if (databaseSchemaVersion === 0) {
logger.info('Initializing database (first run, clean install)');
}
if (databaseSchemaVersion <= 2) {
// Disable some spam logs when they're not relevant
this.uniqueLogs.push(this.blocksTruncatedMessage);
this.uniqueLogs.push(this.hashratesTruncatedMessage);
}
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
logger.debug('MIGRATIONS: Latest DatabaseMigration.version is ' + DatabaseMigration.currentVersion);
if (databaseSchemaVersion >= DatabaseMigration.currentVersion) {
logger.debug('MIGRATIONS: Nothing to do.');
return;
}
// Now, create missing tables. Those queries cannot be wrapped into a transaction unfortunately
try {
await this.$createMissingTablesAndIndexes(databaseSchemaVersion);
} catch (e) {
logger.err('MIGRATIONS: Unable to create required tables, aborting in 10 seconds. ' + e);
await Common.sleep$(10000);
process.exit(-1);
}
if (DatabaseMigration.currentVersion > databaseSchemaVersion) {
try {
await this.$migrateTableSchemaFromVersion(databaseSchemaVersion);
if (databaseSchemaVersion === 0) {
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
} else {
logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
}
} catch (e) {
logger.err('MIGRATIONS: Unable to migrate database, aborting. ' + e);
}
}
return;
}
/**
* Create all missing tables
*/
private async $createMissingTablesAndIndexes(databaseSchemaVersion: number) {
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
await this.updateToSchemaVersion(2);
}
if (databaseSchemaVersion < 3) {
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
await this.updateToSchemaVersion(3);
}
if (databaseSchemaVersion < 4) {
await this.$executeQuery('DROP table IF EXISTS blocks;');
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
await this.updateToSchemaVersion(4);
}
if (databaseSchemaVersion < 5 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(5);
}
if (databaseSchemaVersion < 6 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
// Cleanup original blocks fields type
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
// We also fix the pools.id type so we need to drop/re-create the foreign key
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
// Add new block indexing fields
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
await this.updateToSchemaVersion(6);
}
if (databaseSchemaVersion < 7 && isBitcoin === true) {
await this.$executeQuery('DROP table IF EXISTS hashrates;');
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
await this.updateToSchemaVersion(7);
}
if (databaseSchemaVersion < 8 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
await this.updateToSchemaVersion(8);
}
if (databaseSchemaVersion < 9 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
await this.updateToSchemaVersion(9);
}
if (databaseSchemaVersion < 10 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
await this.updateToSchemaVersion(10);
}
if (databaseSchemaVersion < 11 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.$executeQuery(`ALTER TABLE blocks
ADD avg_fee INT UNSIGNED NULL,
ADD avg_fee_rate INT UNSIGNED NULL
`);
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(11);
}
if (databaseSchemaVersion < 12 && isBitcoin === true) {
// No need to re-index because the new data type can contain larger values
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(12);
}
if (databaseSchemaVersion < 13 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(13);
}
if (databaseSchemaVersion < 14 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(14);
}
if (databaseSchemaVersion < 16 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
await this.updateToSchemaVersion(16);
}
if (databaseSchemaVersion < 17 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
await this.updateToSchemaVersion(17);
}
if (databaseSchemaVersion < 18 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
await this.updateToSchemaVersion(18);
}
if (databaseSchemaVersion < 19) {
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
await this.updateToSchemaVersion(19);
}
if (databaseSchemaVersion < 20 && isBitcoin === true) {
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
await this.updateToSchemaVersion(20);
}
if (databaseSchemaVersion < 21) {
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
await this.updateToSchemaVersion(21);
}
if (databaseSchemaVersion < 22 && isBitcoin === true) {
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
await this.updateToSchemaVersion(22);
}
if (databaseSchemaVersion < 23) {
await this.$executeQuery('TRUNCATE `prices`');
await this.$executeQuery('ALTER TABLE `prices` DROP `avg_prices`');
await this.$executeQuery('ALTER TABLE `prices` ADD `USD` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `EUR` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `GBP` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `CAD` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
await this.updateToSchemaVersion(23);
}
if (databaseSchemaVersion < 24 && isBitcoin == true) {
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
await this.updateToSchemaVersion(24);
}
if (databaseSchemaVersion < 25 && isBitcoin === true) {
await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats'));
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
await this.updateToSchemaVersion(25);
}
if (databaseSchemaVersion < 26 && isBitcoin === true) {
if (config.LIGHTNING.ENABLED) {
this.uniqueLog(logger.notice, `'lightning_stats' table has been truncated.`);
}
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(26);
}
if (databaseSchemaVersion < 27 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(27);
}
if (databaseSchemaVersion < 28 && isBitcoin === true) {
if (config.LIGHTNING.ENABLED) {
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
}
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
await this.updateToSchemaVersion(28);
}
if (databaseSchemaVersion < 29 && isBitcoin === true) {
await this.$executeQuery(this.getCreateGeoNamesTableQuery(), await this.$checkIfTableExists('geo_names'));
await this.$executeQuery('ALTER TABLE `nodes` ADD as_number int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD city_id int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD country_id int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD accuracy_radius int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
await this.updateToSchemaVersion(29);
}
if (databaseSchemaVersion < 30 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
await this.updateToSchemaVersion(30);
}
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
await this.updateToSchemaVersion(31);
}
if (databaseSchemaVersion < 32 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
await this.updateToSchemaVersion(32);
}
if (databaseSchemaVersion < 33 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
await this.updateToSchemaVersion(33);
}
if (databaseSchemaVersion < 34 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(34);
}
if (databaseSchemaVersion < 35 && isBitcoin == true) {
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
await this.updateToSchemaVersion(35);
}
if (databaseSchemaVersion < 36 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
await this.updateToSchemaVersion(36);
}
if (databaseSchemaVersion < 37 && isBitcoin == true) {
await this.$executeQuery(this.getCreateLNNodesSocketsTableQuery(), await this.$checkIfTableExists('nodes_sockets'));
await this.updateToSchemaVersion(37);
}
if (databaseSchemaVersion < 38 && isBitcoin == true) {
if (config.LIGHTNING.ENABLED) {
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
}
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery('ALTER TABLE `lightning_stats` CHANGE `added` `added` timestamp NULL');
await this.$executeQuery('ALTER TABLE `node_stats` CHANGE `added` `added` timestamp NULL');
await this.updateToSchemaVersion(38);
}
if (databaseSchemaVersion < 39 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
await this.updateToSchemaVersion(39);
}
if (databaseSchemaVersion < 40 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
await this.updateToSchemaVersion(40);
}
if (databaseSchemaVersion < 41 && isBitcoin === true) {
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
await this.updateToSchemaVersion(41);
}
if (databaseSchemaVersion < 42 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0');
await this.updateToSchemaVersion(42);
}
if (databaseSchemaVersion < 43 && isBitcoin === true) {
await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records'));
await this.updateToSchemaVersion(43);
}
if (databaseSchemaVersion < 44 && isBitcoin === true) {
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
await this.updateToSchemaVersion(44);
}
if (databaseSchemaVersion < 45 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fresh_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(45);
}
if (databaseSchemaVersion < 46) {
await this.$executeQuery(`ALTER TABLE blocks MODIFY blockTimestamp timestamp NOT NULL DEFAULT 0`);
await this.updateToSchemaVersion(46);
}
if (databaseSchemaVersion < 47) {
await this.$executeQuery('ALTER TABLE `blocks` ADD cpfp_indexed tinyint(1) DEFAULT 0');
await this.$executeQuery(this.getCreateCPFPTableQuery(), await this.$checkIfTableExists('cpfp_clusters'));
await this.$executeQuery(this.getCreateTransactionsTableQuery(), await this.$checkIfTableExists('transactions'));
await this.updateToSchemaVersion(47);
}
if (databaseSchemaVersion < 48 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD source_checked tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD closing_fee bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD funding_ratio float unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD closed_by varchar(66) DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD single_funded tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD outputs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(48);
}
if (databaseSchemaVersion < 49 && isBitcoin === true) {
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
await this.updateToSchemaVersion(49);
}
}
/**
* Special case here for the `statistics` table - It appeared that somehow some dbs already had the `added` field indexed
* while it does not appear in previous schemas. The mariadb command "CREATE INDEX IF NOT EXISTS" is not supported on
* older mariadb version. Therefore we set a flag here in order to know if the index needs to be created or not before
* running the migration process
*/
private async $setStatisticsAddedIndexedFlag(databaseSchemaVersion: number) {
if (databaseSchemaVersion >= 2) {
this.statisticsAddedIndexed = true;
return;
}
try {
// We don't use "CREATE INDEX IF NOT EXISTS" because it is not supported on old mariadb version 5.X
const query = `SELECT COUNT(1) hasIndex FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_schema=DATABASE() AND table_name='statistics' AND index_name='added';`;
const [rows] = await this.$executeQuery(query, true);
if (rows[0].hasIndex === 0) {
logger.debug('MIGRATIONS: `statistics.added` is not indexed');
this.statisticsAddedIndexed = false;
} else if (rows[0].hasIndex === 1) {
logger.debug('MIGRATIONS: `statistics.added` is already indexed');
this.statisticsAddedIndexed = true;
}
} catch (e) {
// Should really never happen but just in case it fails, we just don't execute
// any query related to this indexing so it won't fail if the index actually already exists
logger.err('MIGRATIONS: Unable to check if `statistics.added` INDEX exist or not.');
this.statisticsAddedIndexed = true;
}
}
/**
* Small query execution wrapper to log all executed queries
*/
private async $executeQuery(query: string, silent = false): Promise<any> {
if (!silent) {
logger.debug('MIGRATIONS: Execute query:\n' + query);
}
return DB.query({ sql: query, timeout: this.queryTimeout });
}
/**
* Check if 'table' exists in the database
*/
private async $checkIfTableExists(table: string): Promise<boolean> {
const query = `SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = '${config.DATABASE.DATABASE}' AND TABLE_NAME = '${table}'`;
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return rows[0]['COUNT(*)'] === 1;
}
/**
* Get current database version
*/
private async $getSchemaVersionFromDatabase(): Promise<number> {
const query = `SELECT number FROM state WHERE name = 'schema_version';`;
const [rows] = await this.$executeQuery(query, true);
return rows[0]['number'];
}
/**
* Create the `state` table
*/
private async $createMigrationStateTable(): Promise<void> {
const query = `CREATE TABLE IF NOT EXISTS state (
name varchar(25) NOT NULL,
number int(11) NULL,
string varchar(100) NULL,
CONSTRAINT name_unique UNIQUE (name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
await this.$executeQuery(query);
// Set initial values
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
}
/**
* We actually execute the migrations queries here
*/
private async $migrateTableSchemaFromVersion(version: number): Promise<void> {
const transactionQueries: string[] = [];
for (const query of this.getMigrationQueriesFromVersion(version)) {
transactionQueries.push(query);
}
logger.notice(`MIGRATIONS: ${version > 0 ? 'Upgrading' : 'Initializing'} database schema version number to ${DatabaseMigration.currentVersion}`);
transactionQueries.push(this.getUpdateToLatestSchemaVersionQuery());
try {
await this.$executeQuery('START TRANSACTION;');
for (const query of transactionQueries) {
await this.$executeQuery(query);
}
await this.$executeQuery('COMMIT;');
} catch (e) {
await this.$executeQuery('ROLLBACK;');
throw e;
}
}
/**
* Generate migration queries based on schema version
*/
private getMigrationQueriesFromVersion(version: number): string[] {
const queries: string[] = [];
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
if (version < 1) {
if (config.MEMPOOL.NETWORK !== 'liquid' && config.MEMPOOL.NETWORK !== 'liquidtestnet') {
if (version > 0) {
logger.notice(`MIGRATIONS: Migrating (shifting) statistics table data`);
}
queries.push(this.getShiftStatisticsQuery());
}
}
if (version < 7 && isBitcoin === true) {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_hashrates_indexing', 0, NULL)`);
}
if (version < 9 && isBitcoin === true) {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_weekly_hashrates_indexing', 0, NULL)`);
}
return queries;
}
/**
* Save the schema version in the database
*/
private getUpdateToLatestSchemaVersionQuery(): string {
return `UPDATE state SET number = ${DatabaseMigration.currentVersion} WHERE name = 'schema_version';`;
}
private async updateToSchemaVersion(version): Promise<void> {
await this.$executeQuery(`UPDATE state SET number = ${version} WHERE name = 'schema_version';`);
}
/**
* Print current database version
*/
private async $printDatabaseVersion() {
try {
const [rows] = await this.$executeQuery('SELECT VERSION() as version;', true);
logger.debug(`MIGRATIONS: Database engine version '${rows[0].version}'`);
} catch (e) {
logger.debug(`MIGRATIONS: Could not fetch database engine version. ` + e);
}
}
// Couple of wrappers to clean the main logic
private getShiftStatisticsQuery(): string {
return `UPDATE statistics SET
vsize_1 = vsize_1 + vsize_2, vsize_2 = vsize_3,
vsize_3 = vsize_4, vsize_4 = vsize_5,
vsize_5 = vsize_6, vsize_6 = vsize_8,
vsize_8 = vsize_10, vsize_10 = vsize_12,
vsize_12 = vsize_15, vsize_15 = vsize_20,
vsize_20 = vsize_30, vsize_30 = vsize_40,
vsize_40 = vsize_50, vsize_50 = vsize_60,
vsize_60 = vsize_70, vsize_70 = vsize_80,
vsize_80 = vsize_90, vsize_90 = vsize_100,
vsize_100 = vsize_125, vsize_125 = vsize_150,
vsize_150 = vsize_175, vsize_175 = vsize_200,
vsize_200 = vsize_250, vsize_250 = vsize_300,
vsize_300 = vsize_350, vsize_350 = vsize_400,
vsize_400 = vsize_500, vsize_500 = vsize_600,
vsize_600 = vsize_700, vsize_700 = vsize_800,
vsize_800 = vsize_900, vsize_900 = vsize_1000,
vsize_1000 = vsize_1200, vsize_1200 = vsize_1400,
vsize_1400 = vsize_1800, vsize_1800 = vsize_2000, vsize_2000 = 0;`;
}
private getCreateStatisticsQuery(): string {
return `CREATE TABLE IF NOT EXISTS statistics (
id int(11) NOT NULL AUTO_INCREMENT,
added datetime NOT NULL,
unconfirmed_transactions int(11) UNSIGNED NOT NULL,
tx_per_second float UNSIGNED NOT NULL,
vbytes_per_second int(10) UNSIGNED NOT NULL,
mempool_byte_weight int(10) UNSIGNED NOT NULL,
fee_data longtext NOT NULL,
total_fee double UNSIGNED NOT NULL,
vsize_1 int(11) NOT NULL,
vsize_2 int(11) NOT NULL,
vsize_3 int(11) NOT NULL,
vsize_4 int(11) NOT NULL,
vsize_5 int(11) NOT NULL,
vsize_6 int(11) NOT NULL,
vsize_8 int(11) NOT NULL,
vsize_10 int(11) NOT NULL,
vsize_12 int(11) NOT NULL,
vsize_15 int(11) NOT NULL,
vsize_20 int(11) NOT NULL,
vsize_30 int(11) NOT NULL,
vsize_40 int(11) NOT NULL,
vsize_50 int(11) NOT NULL,
vsize_60 int(11) NOT NULL,
vsize_70 int(11) NOT NULL,
vsize_80 int(11) NOT NULL,
vsize_90 int(11) NOT NULL,
vsize_100 int(11) NOT NULL,
vsize_125 int(11) NOT NULL,
vsize_150 int(11) NOT NULL,
vsize_175 int(11) NOT NULL,
vsize_200 int(11) NOT NULL,
vsize_250 int(11) NOT NULL,
vsize_300 int(11) NOT NULL,
vsize_350 int(11) NOT NULL,
vsize_400 int(11) NOT NULL,
vsize_500 int(11) NOT NULL,
vsize_600 int(11) NOT NULL,
vsize_700 int(11) NOT NULL,
vsize_800 int(11) NOT NULL,
vsize_900 int(11) NOT NULL,
vsize_1000 int(11) NOT NULL,
vsize_1200 int(11) NOT NULL,
vsize_1400 int(11) NOT NULL,
vsize_1600 int(11) NOT NULL,
vsize_1800 int(11) NOT NULL,
vsize_2000 int(11) NOT NULL,
CONSTRAINT PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateElementsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS elements_pegs (
block int(11) NOT NULL,
datetime int(11) NOT NULL,
amount bigint(20) NOT NULL,
txid varchar(65) NOT NULL,
txindex int(11) NOT NULL,
bitcoinaddress varchar(100) NOT NULL,
bitcointxid varchar(65) NOT NULL,
bitcoinindex int(11) NOT NULL,
final_tx int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreatePoolsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS pools (
id int(11) NOT NULL AUTO_INCREMENT,
name varchar(50) NOT NULL,
link varchar(255) NOT NULL,
addresses text NOT NULL,
regexes text NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;`;
}
private getCreateBlocksTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS blocks (
height int(11) unsigned NOT NULL,
hash varchar(65) NOT NULL,
blockTimestamp timestamp NOT NULL,
size int(11) unsigned NOT NULL,
weight int(11) unsigned NOT NULL,
tx_count int(11) unsigned NOT NULL,
coinbase_raw text,
difficulty bigint(20) unsigned NOT NULL,
pool_id int(11) DEFAULT -1,
fees double unsigned NOT NULL,
fee_span json NOT NULL,
median_fee double unsigned NOT NULL,
PRIMARY KEY (height),
INDEX (pool_id),
FOREIGN KEY (pool_id) REFERENCES pools (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateDailyStatsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS hashrates (
hashrate_timestamp timestamp NOT NULL,
avg_hashrate double unsigned DEFAULT '0',
pool_id smallint unsigned NULL,
PRIMARY KEY (hashrate_timestamp),
INDEX (pool_id),
FOREIGN KEY (pool_id) REFERENCES pools (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateRatesTableQuery(): string { // This table has been replaced by the prices table
return `CREATE TABLE IF NOT EXISTS rates (
height int(10) unsigned NOT NULL,
bisq_rates JSON NOT NULL,
PRIMARY KEY (height)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateBlocksSummariesTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS blocks_summaries (
height int(10) unsigned NOT NULL,
id varchar(65) NOT NULL,
transactions JSON NOT NULL,
PRIMARY KEY (id),
INDEX (height)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreatePricesTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS prices (
time timestamp NOT NULL,
avg_prices JSON NOT NULL,
PRIMARY KEY (time)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateDifficultyAdjustmentsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS difficulty_adjustments (
time timestamp NOT NULL,
height int(10) unsigned NOT NULL,
difficulty double unsigned NOT NULL,
adjustment float NOT NULL,
PRIMARY KEY (height),
INDEX (time)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateLightningStatisticsQuery(): string {
return `CREATE TABLE IF NOT EXISTS lightning_stats (
id int(11) NOT NULL AUTO_INCREMENT,
added datetime NOT NULL,
channel_count int(11) NOT NULL,
node_count int(11) NOT NULL,
total_capacity double unsigned NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateNodesQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes (
public_key varchar(66) NOT NULL,
first_seen datetime NOT NULL,
updated_at datetime NOT NULL,
alias varchar(200) CHARACTER SET utf8mb4 NOT NULL,
color varchar(200) NOT NULL,
sockets text DEFAULT NULL,
PRIMARY KEY (public_key),
KEY alias (alias(10))
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateChannelsQuery(): string {
return `CREATE TABLE IF NOT EXISTS channels (
id bigint(11) unsigned NOT NULL,
short_id varchar(15) NOT NULL DEFAULT '',
capacity bigint(20) unsigned NOT NULL,
transaction_id varchar(64) NOT NULL,
transaction_vout int(11) NOT NULL,
updated_at datetime DEFAULT NULL,
created datetime DEFAULT NULL,
status int(11) NOT NULL DEFAULT 0,
closing_transaction_id varchar(64) DEFAULT NULL,
closing_date datetime DEFAULT NULL,
closing_reason int(11) DEFAULT NULL,
node1_public_key varchar(66) NOT NULL,
node1_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
node1_cltv_delta int(11) DEFAULT NULL,
node1_fee_rate bigint(11) DEFAULT NULL,
node1_is_disabled tinyint(1) DEFAULT NULL,
node1_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
node1_min_htlc_mtokens bigint(20) DEFAULT NULL,
node1_updated_at datetime DEFAULT NULL,
node2_public_key varchar(66) NOT NULL,
node2_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
node2_cltv_delta int(11) DEFAULT NULL,
node2_fee_rate bigint(11) DEFAULT NULL,
node2_is_disabled tinyint(1) DEFAULT NULL,
node2_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
node2_min_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
node2_updated_at datetime DEFAULT NULL,
PRIMARY KEY (id),
KEY node1_public_key (node1_public_key),
KEY node2_public_key (node2_public_key),
KEY status (status),
KEY short_id (short_id),
KEY transaction_id (transaction_id),
KEY closing_transaction_id (closing_transaction_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateNodesStatsQuery(): string {
return `CREATE TABLE IF NOT EXISTS node_stats (
id int(11) unsigned NOT NULL AUTO_INCREMENT,
public_key varchar(66) NOT NULL DEFAULT '',
added date NOT NULL,
capacity bigint(20) unsigned NOT NULL DEFAULT 0,
channels int(11) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (id),
UNIQUE KEY added (added,public_key),
KEY public_key (public_key)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateBlocksAuditsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS blocks_audits (
time timestamp NOT NULL,
hash varchar(65) NOT NULL,
height int(10) unsigned NOT NULL,
missing_txs JSON NOT NULL,
added_txs JSON NOT NULL,
match_rate float unsigned NOT NULL,
PRIMARY KEY (hash),
INDEX (height)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateGeoNamesTableQuery(): string {
return `CREATE TABLE geo_names (
id int(11) unsigned NOT NULL,
type enum('city','country','division','continent') NOT NULL,
names text DEFAULT NULL,
UNIQUE KEY id (id,type),
KEY id_2 (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateBlocksPricesTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS blocks_prices (
height int(10) unsigned NOT NULL,
price_id int(10) unsigned NOT NULL,
PRIMARY KEY (height),
INDEX (price_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateLNNodesSocketsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes_sockets (
public_key varchar(66) NOT NULL,
socket varchar(100) NOT NULL,
type enum('ipv4', 'ipv6', 'torv2', 'torv3', 'i2p', 'dns', 'websocket') NULL,
UNIQUE KEY public_key_socket (public_key, socket),
INDEX (public_key)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateLNNodeRecordsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes_records (
public_key varchar(66) NOT NULL,
type int(10) unsigned NOT NULL,
payload blob NOT NULL,
UNIQUE KEY public_key_type (public_key, type),
INDEX (public_key),
FOREIGN KEY (public_key)
REFERENCES nodes (public_key)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateCPFPTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS cpfp_clusters (
root varchar(65) NOT NULL,
height int(10) NOT NULL,
txs JSON DEFAULT NULL,
fee_rate double unsigned NOT NULL,
PRIMARY KEY (root)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateTransactionsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS transactions (
txid varchar(65) NOT NULL,
cluster varchar(65) DEFAULT NULL,
PRIMARY KEY (txid),
FOREIGN KEY (cluster) REFERENCES cpfp_clusters (root) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
public async $truncateIndexedData(tables: string[]) {
const allowedTables = ['blocks', 'hashrates', 'prices'];
try {
for (const table of tables) {
if (!allowedTables.includes(table)) {
logger.debug(`Table ${table} cannot to be re-indexed (not allowed)`);
continue;
}
await this.$executeQuery(`TRUNCATE ${table}`, true);
if (table === 'hashrates') {
await this.$executeQuery('UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
}
logger.notice(`Table ${table} has been truncated`);
}
} catch (e) {
logger.warn(`Unable to erase indexed data`);
}
}
}
export default new DatabaseMigration();

View File

@@ -1,102 +0,0 @@
import config from '../config';
import { IDifficultyAdjustment } from '../mempool.interfaces';
import blocks from './blocks';
export interface DifficultyAdjustment {
progressPercent: number; // Percent: 0 to 100
difficultyChange: number; // Percent: -75 to 300
estimatedRetargetDate: number; // Unix time in ms
remainingBlocks: number; // Block count
remainingTime: number; // Duration of time in ms
previousRetarget: number; // Percent: -75 to 300
nextRetargetHeight: number; // Block Height
timeAvg: number; // Duration of time in ms
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
}
export function calcDifficultyAdjustment(
DATime: number,
nowSeconds: number,
blockHeight: number,
previousRetarget: number,
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = nowSeconds - DATime;
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
let difficultyChange = 0;
let timeAvgSecs = BLOCK_SECONDS_TARGET;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
timeAvgSecs = diffSeconds / blocksInEpoch;
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
// therefore the time between blocks will always be below 20 minutes (1200s).
let timeOffset = 0;
if (network === 'testnet') {
if (timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeAvgSecs = TESTNET_MAX_BLOCK_SECONDS;
}
const secondsSinceLastBlock = nowSeconds - latestBlockTimestamp;
if (secondsSinceLastBlock + timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeOffset = -Math.min(secondsSinceLastBlock, TESTNET_MAX_BLOCK_SECONDS) * 1000;
}
}
const timeAvg = Math.floor(timeAvgSecs * 1000);
const remainingTime = remainingBlocks * timeAvg;
const estimatedRetargetDate = remainingTime + nowSeconds * 1000;
return {
progressPercent,
difficultyChange,
estimatedRetargetDate,
remainingBlocks,
remainingTime,
previousRetarget,
nextRetargetHeight,
timeAvg,
timeOffset,
};
}
class DifficultyAdjustmentApi {
public getDifficultyAdjustment(): IDifficultyAdjustment | null {
const DATime = blocks.getLastDifficultyAdjustmentTime();
const previousRetarget = blocks.getPreviousDifficultyRetarget();
const blockHeight = blocks.getCurrentBlockHeight();
const blocksCache = blocks.getBlocks();
const latestBlock = blocksCache[blocksCache.length - 1];
if (!latestBlock) {
return null;
}
const nowSeconds = Math.floor(new Date().getTime() / 1000);
return calcDifficultyAdjustment(
DATime, nowSeconds, blockHeight, previousRetarget,
config.MEMPOOL.NETWORK, latestBlock.timestamp
);
}
}
export default new DifficultyAdjustmentApi();

View File

@@ -1,119 +1,15 @@
import * as fs from 'fs';
const fsPromises = fs.promises;
import cluster from 'cluster';
import memPool from './mempool';
import blocks from './blocks';
import logger from '../logger';
import config from '../config';
import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
class DiskCache {
private cacheSchemaVersion = 1;
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
static FILE_NAME = './cache.json';
constructor() { }
async $saveCacheToDisk(): Promise<void> {
if (!cluster.isPrimary) {
return;
}
if (this.isWritingCache) {
logger.debug('Saving cache already in progress. Skipping.')
return;
}
try {
logger.debug('Writing mempool and blocks data to disk cache (async)...');
this.isWritingCache = true;
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
mempoolArray.push(mempool[tx]);
}
Common.shuffleArray(mempoolArray);
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
logger.debug('Mempool and blocks data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
saveData(dataBlob: string) {
fs.writeFileSync(DiskCache.FILE_NAME, dataBlob, 'utf8');
}
wipeCache() {
fs.unlinkSync(DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.unlinkSync(DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
}
loadMempoolCache() {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
try {
let data: any = {};
const cacheData = fs.readFileSync(DiskCache.FILE_NAME, 'utf8');
if (cacheData) {
logger.info('Restoring mempool and blocks data from disk cache');
data = JSON.parse(cacheData);
if (data.cacheSchemaVersion === undefined || data.cacheSchemaVersion !== this.cacheSchemaVersion) {
logger.notice('Disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.mempoolArray) {
for (const tx of data.mempoolArray) {
data.mempool[tx.txid] = tx;
}
}
}
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
const fileName = DiskCache.FILE_NAMES.replace('{number}', i.toString());
try {
if (fs.existsSync(fileName)) {
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
if (cacheData2.mempoolArray) {
for (const tx of cacheData2.mempoolArray) {
data.mempool[tx.txid] = tx;
}
} else {
Object.assign(data.mempool, cacheData2.mempool);
}
}
} catch (e) {
logger.info('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
memPool.setMempool(data.mempool);
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
} catch (e) {
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
loadData(): string {
return fs.readFileSync(DiskCache.FILE_NAME, 'utf8');
}
}

View File

@@ -1,700 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import nodesApi from './nodes.api';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { Common } from '../common';
class ChannelsApi {
public async $getAllChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getAllChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getAllChannelsGeo(publicKey?: string, style?: string): Promise<any[]> {
try {
let select: string;
if (style === 'widget') {
select = `
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
} else {
select = `
nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
}
const params: string[] = [];
let query = `SELECT ${select}
FROM channels
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
WHERE channels.status = 1
AND nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
`;
if (publicKey !== undefined) {
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
params.push(publicKey);
params.push(publicKey);
} else {
query += ` AND channels.capacity > 1000000
GROUP BY nodes_1.public_key, nodes_2.public_key
ORDER BY channels.capacity DESC
LIMIT 10000
`;
}
const [rows]: any = await DB.query(query, params);
return rows.map((row) => {
if (style === 'widget') {
return [
row.node1_longitude, row.node1_latitude,
row.node2_longitude, row.node2_latitude,
];
} else {
return [
row.node1_public_key, row.node1_alias,
row.node1_longitude, row.node1_latitude,
row.node2_public_key, row.node2_alias,
row.node2_longitude, row.node2_latitude,
];
}
});
} catch (e) {
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $searchChannelsById(search: string): Promise<any[]> {
try {
const searchStripped = search.replace('%', '') + '%';
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;
} catch (e) {
logger.err('$searchChannelsById error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsByStatus(status: number | number[]): Promise<any[]> {
try {
let query: string;
if (Array.isArray(status)) {
query = `SELECT * FROM channels WHERE status IN (${status.join(',')})`;
} else {
query = `SELECT * FROM channels WHERE status = ?`;
}
const [rows]: any = await DB.query(query, [status]);
return rows;
} catch (e) {
logger.err('$getChannelsByStatus error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getClosedChannelsWithoutReason(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason IS NULL AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getClosedChannelsWithoutReason error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutSourceChecked(): Promise<any[]> {
try {
const query = `
SELECT channels.*
FROM channels
WHERE channels.source_checked != 1
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE created IS NULL`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getChannelsWithoutCreatedDate error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannel(id: string): Promise<any> {
try {
const query = `
SELECT n1.alias AS alias_left, n1.longitude as node1_longitude, n1.latitude as node1_latitude,
n2.alias AS alias_right, n2.longitude as node2_longitude, n2.latitude as node2_latitude,
channels.*,
ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key
LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key
WHERE (
ns1.id = (
SELECT MAX(id)
FROM node_stats
WHERE public_key = channels.node1_public_key
)
AND ns2.id = (
SELECT MAX(id)
FROM node_stats
WHERE public_key = channels.node2_public_key
)
)
AND channels.id = ?
`;
const [rows]: any = await DB.query(query, [id]);
if (rows[0]) {
return this.convertChannel(rows[0]);
}
} catch (e) {
logger.err('$getChannel error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsStats(): Promise<any> {
try {
// Feedback from zerofeerouting:
// "I would argue > 5000ppm can be ignored. Channels charging more than .5% fee are ignored by CLN for example."
const ignoredFeeRateThreshold = 5000;
const ignoredBaseFeeThreshold = 5000;
// Capacity
let query = `SELECT AVG(capacity) AS avgCapacity FROM channels WHERE status = 1 ORDER BY capacity`;
const [avgCapacity]: any = await DB.query(query);
query = `SELECT capacity FROM channels WHERE status = 1 ORDER BY capacity`;
let [capacity]: any = await DB.query(query);
capacity = capacity.map(capacity => capacity.capacity);
const medianCapacity = capacity[Math.floor(capacity.length / 2)];
// Fee rates
query = `SELECT node1_fee_rate FROM channels WHERE node1_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
let [feeRates1]: any = await DB.query(query);
feeRates1 = feeRates1.map(rate => rate.node1_fee_rate);
query = `SELECT node2_fee_rate FROM channels WHERE node2_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
let [feeRates2]: any = await DB.query(query);
feeRates2 = feeRates2.map(rate => rate.node2_fee_rate);
let feeRates = (feeRates1.concat(feeRates2)).sort((a, b) => a - b);
let avgFeeRate = 0;
for (const rate of feeRates) {
avgFeeRate += rate;
}
avgFeeRate /= feeRates.length;
const medianFeeRate = feeRates[Math.floor(feeRates.length / 2)];
// Base fees
query = `SELECT node1_base_fee_mtokens FROM channels WHERE node1_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
let [baseFees1]: any = await DB.query(query);
baseFees1 = baseFees1.map(rate => rate.node1_base_fee_mtokens);
query = `SELECT node2_base_fee_mtokens FROM channels WHERE node2_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
let [baseFees2]: any = await DB.query(query);
baseFees2 = baseFees2.map(rate => rate.node2_base_fee_mtokens);
let baseFees = (baseFees1.concat(baseFees2)).sort((a, b) => a - b);
let avgBaseFee = 0;
for (const fee of baseFees) {
avgBaseFee += fee;
}
avgBaseFee /= baseFees.length;
const medianBaseFee = feeRates[Math.floor(baseFees.length / 2)];
return {
avgCapacity: parseInt(avgCapacity[0].avgCapacity, 10),
avgFeeRate: avgFeeRate,
avgBaseFee: avgBaseFee,
medianCapacity: medianCapacity,
medianFeeRate: medianFeeRate,
medianBaseFee: medianBaseFee,
}
} catch (e) {
logger.err(`Cannot calculate channels statistics. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.transaction_id IN ? OR channels.closing_transaction_id IN ?
`;
const [rows]: any = await DB.query(query, [[transactionIds], [transactionIds]]);
const channels = rows.map((row) => this.convertChannel(row));
return channels;
} catch (e) {
logger.err('$getChannelByTransactionId error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelByClosingId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.closing_transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
rows[0].outputs = JSON.parse(rows[0].outputs);
return rows[0];
}
} catch (e) {
logger.err('$getChannelByClosingId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsByOpeningId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
return rows.map(row => {
row.outputs = JSON.parse(row.outputs);
return row;
});
}
} catch (e) {
logger.err('$getChannelsByOpeningId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateClosingInfo(channelInfo: { id: string, node1_closing_balance: number, node2_closing_balance: number, closed_by: string | null, closing_fee: number, outputs: ILightningApi.ForensicOutput[]}): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_closing_balance = ?,
node2_closing_balance = ?,
closed_by = ?,
closing_fee = ?,
outputs = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_closing_balance || 0,
channelInfo.node2_closing_balance || 0,
channelInfo.closed_by,
channelInfo.closing_fee || 0,
JSON.stringify(channelInfo.outputs),
channelInfo.id,
]);
} catch (e) {
logger.err('$updateClosingInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateOpeningInfo(channelInfo: { id: string, node1_funding_balance: number, node2_funding_balance: number, funding_ratio: number, single_funded: boolean | void }): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_funding_balance = ?,
node2_funding_balance = ?,
funding_ratio = ?,
single_funded = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_funding_balance || 0,
channelInfo.node2_funding_balance || 0,
channelInfo.funding_ratio,
channelInfo.single_funded ? 1 : 0,
channelInfo.id,
]);
} catch (e) {
logger.err('$updateOpeningInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $markChannelSourceChecked(id: string): Promise<void> {
try {
const query = `
UPDATE channels
SET source_checked = 1
WHERE id = ?
`;
await DB.query<ResultSetHeader>(query, [id]);
} catch (e) {
logger.err('$markChannelSourceChecked error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
try {
let channelStatusFilter;
if (status === 'open') {
channelStatusFilter = '< 2';
} else if (status === 'active') {
channelStatusFilter = '= 1';
} else if (status === 'closed') {
channelStatusFilter = '= 2';
} else {
throw new Error('getChannelsForNode: Invalid status requested');
}
// Channels originating from node
let query = `
SELECT COALESCE(node2.alias, SUBSTRING(node2_public_key, 0, 20)) AS alias, COALESCE(node2.public_key, node2_public_key) AS public_key,
channels.status, channels.node1_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
FROM channels
LEFT JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsFromNode]: any = await DB.query(query, [public_key]);
// Channels incoming to node
query = `
SELECT COALESCE(node1.alias, SUBSTRING(node1_public_key, 0, 20)) AS alias, COALESCE(node1.public_key, node1_public_key) AS public_key,
channels.status, channels.node2_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
FROM channels
LEFT JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsToNode]: any = await DB.query(query, [public_key]);
let allChannels = channelsFromNode.concat(channelsToNode);
allChannels.sort((a, b) => {
if (status === 'closed') {
if (!b.closing_date && !a.closing_date) {
return (b.updated_at ?? 0) - (a.updated_at ?? 0);
} else {
return (b.closing_date ?? 0) - (a.closing_date ?? 0);
}
} else {
return b.capacity - a.capacity;
}
});
if (index >= 0) {
allChannels = allChannels.slice(index, index + length);
} else if (index === -1) { // Node channels tree chart
allChannels = allChannels.slice(0, 1000);
}
const channels: any[] = []
for (const row of allChannels) {
let channel;
if (index >= 0) {
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
channel = {
status: row.status,
closing_reason: row.closing_reason,
closing_date: row.closing_date,
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
channels: activeChannelsStats.active_channel_count ?? 0,
capacity: activeChannelsStats.capacity ?? 0,
}
};
} else if (index === -1) {
channel = {
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
}
};
}
channels.push(channel);
}
return channels;
} catch (e) {
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsCountForNode(public_key: string, status: string): Promise<any> {
try {
// Default active and inactive channels
let statusQuery = '< 2';
// Closed channels only
if (status === 'closed') {
statusQuery = '= 2';
}
const query = `
SELECT COUNT(*) AS count
FROM channels
WHERE (node1_public_key = ? OR node2_public_key = ?)
AND status ${statusQuery}
`;
const [rows]: any = await DB.query(query, [public_key, public_key]);
return rows[0]['count'];
} catch (e) {
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
private convertChannel(channel: any): any {
return {
'id': channel.id,
'short_id': channel.short_id,
'capacity': channel.capacity,
'transaction_id': channel.transaction_id,
'transaction_vout': channel.transaction_vout,
'closing_transaction_id': channel.closing_transaction_id,
'closing_fee': channel.closing_fee,
'closing_reason': channel.closing_reason,
'closing_date': channel.closing_date,
'updated_at': channel.updated_at,
'created': channel.created,
'status': channel.status,
'funding_ratio': channel.funding_ratio,
'closed_by': channel.closed_by,
'single_funded': !!channel.single_funded,
'node_left': {
'alias': channel.alias_left,
'public_key': channel.node1_public_key,
'channels': channel.channels_left,
'capacity': channel.capacity_left,
'base_fee_mtokens': channel.node1_base_fee_mtokens,
'cltv_delta': channel.node1_cltv_delta,
'fee_rate': channel.node1_fee_rate,
'is_disabled': channel.node1_is_disabled,
'max_htlc_mtokens': channel.node1_max_htlc_mtokens,
'min_htlc_mtokens': channel.node1_min_htlc_mtokens,
'updated_at': channel.node1_updated_at,
'longitude': channel.node1_longitude,
'latitude': channel.node1_latitude,
'funding_balance': channel.node1_funding_balance,
'closing_balance': channel.node1_closing_balance,
'initiated_close': channel.closed_by === channel.node1_public_key ? true : undefined,
},
'node_right': {
'alias': channel.alias_right,
'public_key': channel.node2_public_key,
'channels': channel.channels_right,
'capacity': channel.capacity_right,
'base_fee_mtokens': channel.node2_base_fee_mtokens,
'cltv_delta': channel.node2_cltv_delta,
'fee_rate': channel.node2_fee_rate,
'is_disabled': channel.node2_is_disabled,
'max_htlc_mtokens': channel.node2_max_htlc_mtokens,
'min_htlc_mtokens': channel.node2_min_htlc_mtokens,
'updated_at': channel.node2_updated_at,
'longitude': channel.node2_longitude,
'latitude': channel.node2_latitude,
'funding_balance': channel.node2_funding_balance,
'closing_balance': channel.node2_closing_balance,
'initiated_close': channel.closed_by === channel.node2_public_key ? true : undefined,
},
};
}
/**
* Save or update a channel present in the graph
*/
public async $saveChannel(channel: ILightningApi.Channel, status = 1): Promise<void> {
const [ txid, vout ] = channel.chan_point.split(':');
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
const query = `INSERT INTO channels
(
id,
short_id,
capacity,
transaction_id,
transaction_vout,
updated_at,
status,
node1_public_key,
node1_base_fee_mtokens,
node1_cltv_delta,
node1_fee_rate,
node1_is_disabled,
node1_max_htlc_mtokens,
node1_min_htlc_mtokens,
node1_updated_at,
node2_public_key,
node2_base_fee_mtokens,
node2_cltv_delta,
node2_fee_rate,
node2_is_disabled,
node2_max_htlc_mtokens,
node2_min_htlc_mtokens,
node2_updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ${status}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
capacity = ?,
updated_at = ?,
status = ${status},
node1_public_key = ?,
node1_base_fee_mtokens = ?,
node1_cltv_delta = ?,
node1_fee_rate = ?,
node1_is_disabled = ?,
node1_max_htlc_mtokens = ?,
node1_min_htlc_mtokens = ?,
node1_updated_at = ?,
node2_public_key = ?,
node2_base_fee_mtokens = ?,
node2_cltv_delta = ?,
node2_fee_rate = ?,
node2_is_disabled = ?,
node2_max_htlc_mtokens = ?,
node2_min_htlc_mtokens = ?,
node2_updated_at = ?
;`;
await DB.query(query, [
Common.channelShortIdToIntegerId(channel.channel_id),
Common.channelIntegerIdToShortId(channel.channel_id),
channel.capacity,
txid,
vout,
Common.utcDateToMysql(channel.last_update),
channel.node1_pub,
policy1.fee_base_msat,
policy1.time_lock_delta,
policy1.fee_rate_milli_msat,
policy1.disabled,
policy1.max_htlc_msat,
policy1.min_htlc,
Common.utcDateToMysql(policy1.last_update),
channel.node2_pub,
policy2.fee_base_msat,
policy2.time_lock_delta,
policy2.fee_rate_milli_msat,
policy2.disabled,
policy2.max_htlc_msat,
policy2.min_htlc,
Common.utcDateToMysql(policy2.last_update),
channel.capacity,
Common.utcDateToMysql(channel.last_update),
channel.node1_pub,
policy1.fee_base_msat,
policy1.time_lock_delta,
policy1.fee_rate_milli_msat,
policy1.disabled,
policy1.max_htlc_msat,
policy1.min_htlc,
Common.utcDateToMysql(policy1.last_update),
channel.node2_pub,
policy2.fee_base_msat,
policy2.time_lock_delta,
policy2.fee_rate_milli_msat,
policy2.disabled,
policy2.max_htlc_msat,
policy2.min_htlc,
Common.utcDateToMysql(policy2.last_update)
]);
}
/**
* Set all channels not in `graphChannelsIds` as inactive (status = 0)
*/
public async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
if (graphChannelsIds.length === 0) {
return;
}
try {
const result = await DB.query<ResultSetHeader>(`
UPDATE channels
SET status = 0
WHERE id NOT IN (
${graphChannelsIds.map(id => `"${id}"`).join(',')}
)
AND status != 2
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
}
} catch (e) {
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
public async $getLatestChannelUpdateForNode(publicKey: string): Promise<number> {
try {
const query = `
SELECT MAX(UNIX_TIMESTAMP(updated_at)) as updated_at
FROM channels
WHERE node1_public_key = ?
`;
const [rows]: any[] = await DB.query(query, [publicKey]);
if (rows.length > 0) {
return rows[0].updated_at;
}
} catch (e) {
logger.err(`Can't getLatestChannelUpdateForNode for ${publicKey}. Reason ${e instanceof Error ? e.message : e}`);
}
return 0;
}
}
export default new ChannelsApi();

View File

@@ -1,126 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import channelsApi from './channels.api';
class ChannelsRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/txids', this.$getChannelsByTransactionIds)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
;
}
private async $searchChannelsById(req: Request, res: Response) {
try {
const channels = await channelsApi.$searchChannelsById(req.params.search);
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannel(req: Request, res: Response) {
try {
const channel = await channelsApi.$getChannel(req.params.short_id);
if (!channel) {
res.status(404).send('Channel not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channel);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannelsForNode(req: Request, res: Response) {
try {
if (typeof req.query.public_key !== 'string') {
res.status(400).send('Missing parameter: public_key');
return;
}
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
if (index < -1) {
res.status(400).send('Invalid index');
}
if (['open', 'active', 'closed'].includes(status) === false) {
res.status(400).send('Invalid status');
}
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.header('X-Total-Count', channelsCount.toString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getChannelsByTransactionIds(req: Request, res: Response): Promise<void> {
try {
if (!Array.isArray(req.query.txId)) {
res.status(400).send('Not an array');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
const result: any[] = [];
for (const txid of txIds) {
const inputs: any = {};
const outputs: any = {};
// Assuming that we only have one lightning close input in each transaction. This may not be true in the future
const foundChannelsFromInput = channels.find((channel) => channel.closing_transaction_id === txid);
if (foundChannelsFromInput) {
inputs[0] = foundChannelsFromInput;
}
const foundChannelsFromOutputs = channels.filter((channel) => channel.transaction_id === txid);
for (const output of foundChannelsFromOutputs) {
outputs[output.transaction_vout] = output;
}
result.push({
inputs,
outputs,
});
}
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getAllChannelsGeo(req: Request, res: Response) {
try {
const style: string = typeof req.query.style === 'string' ? req.query.style : '';
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey, style);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new ChannelsRoutes();

View File

@@ -1,58 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import nodesApi from './nodes.api';
import channelsApi from './channels.api';
import statisticsApi from './statistics.api';
class GeneralLightningRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/search', this.$searchNodesAndChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/latest', this.$getGeneralStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/:interval', this.$getStatistics)
;
}
private async $searchNodesAndChannels(req: Request, res: Response) {
if (typeof req.query.searchText !== 'string') {
res.status(400).send('Missing parameter: searchText');
return;
}
try {
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.query.searchText);
const channels = await channelsApi.$searchChannelsById(req.query.searchText);
res.json({
nodes: nodes,
channels: channels,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getStatistics(req: Request, res: Response) {
try {
const statistics = await statisticsApi.$getStatistics(req.params.interval);
const statisticsCount = await statisticsApi.$getStatisticsCount();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', statisticsCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getGeneralStats(req: Request, res: Response) {
try {
const statistics = await statisticsApi.$getLatestStatistics();
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new GeneralLightningRoutes();

View File

@@ -1,702 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
class NodesApi {
public async $getWorldNodes(): Promise<any> {
try {
let query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.longitude, nodes.latitude,
geo_names_country.names as country, geo_names_iso.names as isoCode
FROM nodes
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE status = 1 AND nodes.as_number IS NOT NULL
ORDER BY capacity
`;
const [nodes]: any[] = await DB.query(query);
for (let i = 0; i < nodes.length; ++i) {
nodes[i].country = JSON.parse(nodes[i].country);
}
query = `
SELECT MAX(nodes.capacity) as maxLiquidity, MAX(nodes.channels) as maxChannels
FROM nodes
WHERE status = 1 AND nodes.as_number IS NOT NULL
`;
const [maximums]: any[] = await DB.query(query);
return {
maxLiquidity: maximums[0].maxLiquidity,
maxChannels: maximums[0].maxChannels,
nodes: nodes.map(node => [
node.longitude, node.latitude,
node.publicKey, node.alias, node.capacity, node.channels,
node.country, node.isoCode
])
};
} catch (e) {
logger.err(`Can't get world nodes list. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public async $getNode(public_key: string): Promise<any> {
try {
// General info
let query = `
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = subdivision_id
LEFT JOIN geo_names geo_names_country on geo_names_country.id = country_id
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE public_key = ?
`;
let [rows]: any[] = await DB.query(query, [public_key]);
if (rows.length === 0) {
throw new Error(`This node does not exist, or our node is not seeing it yet`);
}
const node = rows[0];
node.as_organization = JSON.parse(node.as_organization);
node.subdivision = JSON.parse(node.subdivision);
node.city = JSON.parse(node.city);
node.country = JSON.parse(node.country);
// Active channels and capacity
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
node.capacity = activeChannelsStats.capacity ?? 0;
// Opened channels count
query = `
SELECT count(short_id) as opened_channel_count
FROM channels
WHERE status != 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
[rows] = await DB.query(query, [public_key, public_key]);
node.opened_channel_count = 0;
if (rows.length > 0) {
node.opened_channel_count = rows[0].opened_channel_count;
}
// Closed channels count
query = `
SELECT count(short_id) as closed_channel_count
FROM channels
WHERE status = 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
[rows] = await DB.query(query, [public_key, public_key]);
node.closed_channel_count = 0;
if (rows.length > 0) {
node.closed_channel_count = rows[0].closed_channel_count;
}
// Custom records
query = `
SELECT type, payload
FROM nodes_records
WHERE public_key = ?
`;
[rows] = await DB.query(query, [public_key]);
node.custom_records = {};
for (const record of rows) {
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
}
return node;
} catch (e) {
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getActiveChannelsStats(node_public_key: string): Promise<unknown> {
const query = `
SELECT count(short_id) as active_channel_count, sum(capacity) as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
`;
const [rows]: any[] = await DB.query(query, [node_public_key, node_public_key]);
if (rows.length > 0) {
return {
active_channel_count: rows[0].active_channel_count,
capacity: rows[0].capacity
};
} else {
return null;
}
}
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
try {
const inQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
const outQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
return {
incoming: inRows.length > 0 ? inRows : [],
outgoing: outRows.length > 0 ? outRows : [],
};
} catch (e) {
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getAllNodes(): Promise<any> {
try {
const query = `SELECT * FROM nodes`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getAllNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getNodeStats(public_key: string): Promise<any> {
try {
const query = `
SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels
FROM node_stats
WHERE public_key = ?
ORDER BY added DESC
`;
const [rows]: any = await DB.query(query, [public_key]);
return rows;
} catch (e) {
logger.err('$getNodeStats error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.capacity
FROM nodes
ORDER BY capacity DESC
LIMIT 100
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY capacity DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopCapacityNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.channels
FROM nodes
ORDER BY channels DESC
LIMIT 100;
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY channels DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getOldestNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
try {
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
const latestDate = rows[0].maxAdded;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
node_stats.channels
FROM node_stats
JOIN nodes ON nodes.public_key = node_stats.public_key
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100;
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM node_stats
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace('%', '') + '%';
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
return rows;
} catch (e) {
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getNodesISPRanking() {
try {
let query = '';
// List all channels and the two linked ISP
query = `
SELECT short_id, channels.capacity,
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1
ORDER BY short_id DESC
`;
const [channelsIsp]: any = await DB.query(query);
// Sum channels capacity and node count per ISP
const ispList = {};
for (const channel of channelsIsp) {
const isp1 = JSON.parse(channel.isp1);
const isp2 = JSON.parse(channel.isp2);
if (!ispList[isp1]) {
ispList[isp1] = {
id: channel.isp1ID.toString(),
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) {
ispList[isp1].id += ',' + channel.isp1ID.toString();
}
if (!ispList[isp2]) {
ispList[isp2] = {
id: channel.isp2ID.toString(),
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) {
ispList[isp2].id += ',' + channel.isp2ID.toString();
}
ispList[isp1].capacity += channel.capacity;
ispList[isp1].channels += 1;
ispList[isp1].nodes[channel.node1PublicKey] = true;
ispList[isp2].capacity += channel.capacity;
ispList[isp2].channels += 1;
ispList[isp2].nodes[channel.node2PublicKey] = true;
}
const ispRanking: any[] = [];
for (const isp of Object.keys(ispList)) {
ispRanking.push([
ispList[isp].id,
isp,
ispList[isp].capacity,
ispList[isp].channels,
Object.keys(ispList[isp].nodes).length,
]);
}
// Total active channels capacity
query = `SELECT SUM(capacity) AS capacity FROM channels WHERE status = 1`;
const [totalCapacity]: any = await DB.query(query);
// Get the total capacity of all channels which have at least one node on clearnet
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks LIKE '%ipv%'
`;
const [clearnetCapacity]: any = await DB.query(query);
// Get the total capacity of all channels which have both nodes on Tor
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks NOT LIKE '%ipv%' AND
channels_tmp.networks NOT LIKE '%dns%' AND
channels_tmp.networks NOT LIKE '%websocket%'
`;
const [torCapacity]: any = await DB.query(query);
const clearnetCapacityValue = parseInt(clearnetCapacity[0].capacity, 10);
const torCapacityValue = parseInt(torCapacity[0].capacity, 10);
const unknownCapacityValue = parseInt(totalCapacity[0].capacity) - clearnetCapacityValue - torCapacityValue;
return {
clearnetCapacity: clearnetCapacityValue,
torCapacity: torCapacityValue,
unknownCapacity: unknownCapacityValue,
ispRanking: ispRanking,
};
} catch (e) {
logger.err(`Cannot get LN ISP ranking. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesPerCountry(countryId: string) {
try {
const query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude, nodes.as_number, geo_names_isp.names as isp
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
LEFT JOIN geo_names geo_names_isp on geo_names_isp.id = nodes.as_number AND geo_names_isp.type = 'as_organization'
WHERE geo_names_country.id = ?
ORDER BY capacity DESC
`;
const [rows]: any = await DB.query(query, [countryId]);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
rows[i].subdivision = JSON.parse(rows[i].subdivision);
rows[i].isp = JSON.parse(rows[i].isp);
}
return rows;
} catch (e) {
logger.err(`Cannot get nodes for country id ${countryId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesPerISP(ISPId: string) {
try {
let query = `
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
ORDER BY short_id DESC
`;
const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {};
const intISPIds: number[] = [];
for (const ispId of IPSIds) {
intISPIds.push(parseInt(ispId, 10));
}
for (const channel of rows) {
if (intISPIds.includes(channel.isp1ID)) {
nodes[channel.node1PublicKey] = true;
}
if (intISPIds.includes(channel.isp2ID)) {
nodes[channel.node2PublicKey] = true;
}
}
query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE nodes.public_key IN (?)
ORDER BY capacity DESC
`;
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
for (let i = 0; i < rows2.length; ++i) {
rows2[i].country = JSON.parse(rows2[i].country);
rows2[i].city = JSON.parse(rows2[i].city);
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
}
return rows2;
} catch (e) {
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $getNodesCountries() {
try {
let query = `SELECT geo_names.names as names, geo_names_iso.names as iso_code, COUNT(DISTINCT nodes.public_key) as nodesCount, SUM(capacity) as capacity
FROM nodes
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
GROUP BY country_id
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
`;
const [nodesCountPerCountry]: any = await DB.query(query);
query = `SELECT COUNT(*) as total FROM nodes WHERE country_id IS NOT NULL`;
const [nodesWithAS]: any = await DB.query(query);
const nodesPerCountry: any[] = [];
for (const country of nodesCountPerCountry) {
nodesPerCountry.push({
name: JSON.parse(country.names),
iso: country.iso_code,
count: country.nodesCount,
share: Math.floor(country.nodesCount / nodesWithAS[0].total * 10000) / 100,
capacity: country.capacity,
})
}
return nodesPerCountry;
} catch (e) {
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
/**
* Save or update a node present in the graph
*/
public async $saveNode(node: ILightningApi.Node): Promise<void> {
try {
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
const query = `INSERT INTO nodes(
public_key,
first_seen,
updated_at,
alias,
alias_search,
color,
sockets,
status
)
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
await DB.query(query, [
node.pub_key,
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
]);
} catch (e) {
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Update node sockets
*/
public async $updateNodeSockets(publicKey: string, sockets: {network: string; addr: string}[]): Promise<void> {
const formattedSockets = (sockets.map(a => a.addr).join(',')) ?? '';
try {
await DB.query(`UPDATE nodes SET sockets = ? WHERE public_key = ?`, [formattedSockets, publicKey]);
} catch (e) {
logger.err(`Cannot update node sockets for ${publicKey}. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Set all nodes not in `nodesPubkeys` as inactive (status = 0)
*/
public async $setNodesInactive(graphNodesPubkeys: string[]): Promise<void> {
if (graphNodesPubkeys.length === 0) {
return;
}
try {
const result = await DB.query<ResultSetHeader>(`
UPDATE nodes
SET status = 0
WHERE public_key NOT IN (
${graphNodesPubkeys.map(pubkey => `"${pubkey}"`).join(',')}
)
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
}
} catch (e) {
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
private aliasToSearchText(str: string): string {
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
}
}
export default new NodesApi();

View File

@@ -1,259 +0,0 @@
import config from '../../config';
import { Application, Request, Response } from 'express';
import nodesApi from './nodes.api';
import DB from '../../database';
import { INodesRanking } from '../../mempool.interfaces';
class NodesRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/world', this.$getWorldNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings', this.$getNodesRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/liquidity', this.$getTopNodesByCapacity)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
;
}
private async $searchNode(req: Request, res: Response) {
try {
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.params.search);
res.json(nodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodeGroup(req: Request, res: Response) {
try {
let nodesList;
let nodes: any[] = [];
switch (config.MEMPOOL.NETWORK) {
case 'testnet':
nodesList = ['032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b', '025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7', '0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55', '032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0', '029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3', '0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af', '03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab', '032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8', '02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605', '039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205', '033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18', '029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584'];
break;
case 'signet':
nodesList = ['03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956', '033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de', '02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781', '025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029', '027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe', '03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43', '02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991', '02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34', '02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86', '038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7', '03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761', '028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7'];
break;
default:
nodesList = ['03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61', '03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437', '03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144', '0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108', '03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c', '03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5', '021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a', '032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9', '02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34', '02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf', '03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c', '0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43'];
}
for (let pubKey of nodesList) {
try {
const node = await nodesApi.$getNode(pubKey);
if (node) {
nodes.push(node);
}
} catch (e) {}
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(nodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNode(req: Request, res: Response) {
try {
const node = await nodesApi.$getNode(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalNodeStats(req: Request, res: Response) {
try {
const statistics = await nodesApi.$getNodeStats(req.params.public_key);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(statistics);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getFeeHistogram(req: Request, res: Response) {
try {
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);
const topChannelsNodes = await nodesApi.$getTopChannelsNodes(false);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(<INodesRanking>{
topByCapacity: topCapacityNodes,
topByChannels: topChannelsNodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getTopNodesByCapacity(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getTopNodesByChannels(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopChannelsNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getOldestNodes(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getOldestNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getISPRanking(req: Request, res: Response): Promise<void> {
try {
const nodesPerAs = await nodesApi.$getNodesISPRanking();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(nodesPerAs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getWorldNodes(req: Request, res: Response) {
try {
const worldNodes = await nodesApi.$getWorldNodes();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(worldNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesPerCountry(req: Request, res: Response) {
try {
const [country]: any[] = await DB.query(
`SELECT geo_names.id, geo_names_country.names as country_names
FROM geo_names
JOIN geo_names geo_names_country on geo_names.id = geo_names_country.id AND geo_names_country.type = 'country'
WHERE geo_names.type = 'country_iso_code' AND geo_names.names = ?`,
[req.params.country]
);
if (country.length === 0) {
res.status(404).send(`This country does not exist or does not host any lightning nodes on clearnet`);
return;
}
const nodes = await nodesApi.$getNodesPerCountry(country[0].id);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json({
country: JSON.parse(country[0].country_names),
nodes: nodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesPerISP(req: Request, res: Response) {
try {
const [isp]: any[] = await DB.query(
`SELECT geo_names.names as isp_name
FROM geo_names
WHERE geo_names.type = 'as_organization' AND geo_names.id = ?`,
[req.params.isp]
);
if (isp.length === 0) {
res.status(404).send(`This ISP does not exist or does not host any lightning nodes on clearnet`);
return;
}
const nodes = await nodesApi.$getNodesPerISP(req.params.isp);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json({
isp: JSON.parse(isp[0].isp_name),
nodes: nodes,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesCountries(req: Request, res: Response) {
try {
const nodesPerAs = await nodesApi.$getNodesCountries();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(nodesPerAs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new NodesRoutes();

View File

@@ -1,53 +0,0 @@
import logger from '../../logger';
import DB from '../../database';
import { Common } from '../common';
class StatisticsApi {
public async $getStatistics(interval: string | null = null): Promise<any> {
interval = Common.getSqlInterval(interval);
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity,
tor_nodes, clearnet_nodes, unannounced_nodes, clearnet_tor_nodes
FROM lightning_stats`;
if (interval) {
query += ` WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
query += ` ORDER BY added DESC`;
try {
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getLatestStatistics(): Promise<any> {
try {
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats WHERE DATE(added) = DATE(NOW() - INTERVAL 7 DAY)`);
return {
latest: rows[0],
previous: rows2[0],
};
} catch (e) {
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getStatisticsCount(): Promise<number> {
try {
const [rows]: any = await DB.query(`SELECT count(*) as count FROM lightning_stats`);
return rows[0].count;
} catch (e) {
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new StatisticsApi();

View File

@@ -1,52 +1,47 @@
import { MempoolBlock } from '../mempool.interfaces';
import { Common } from './common';
import mempool from './mempool';
import projectedBlocks from './mempool-blocks';
import projectedBlocks from './projected-blocks';
import { DB } from '../database';
class FeeApi {
constructor() { }
defaultFee = Common.isLiquid() ? 0.1 : 1;
public getRecommendedFee() {
const pBlocks = projectedBlocks.getMempoolBlocks();
const mPool = mempool.getMempoolInfo();
const minimumFee = Math.ceil(mPool.mempoolminfee * 100000);
const pBlocks = projectedBlocks.getProjectedBlocks();
if (!pBlocks.length) {
return {
'fastestFee': this.defaultFee,
'halfHourFee': this.defaultFee,
'hourFee': this.defaultFee,
'economyFee': minimumFee,
'minimumFee': minimumFee,
'fastestFee': 0,
'halfHourFee': 0,
'hourFee': 0,
};
}
let firstMedianFee = Math.ceil(pBlocks[0].medianFee);
const firstMedianFee = this.optimizeMedianFee(pBlocks[0], pBlocks[1]);
const secondMedianFee = pBlocks[1] ? this.optimizeMedianFee(pBlocks[1], pBlocks[2], firstMedianFee) : this.defaultFee;
const thirdMedianFee = pBlocks[2] ? this.optimizeMedianFee(pBlocks[2], pBlocks[3], secondMedianFee) : this.defaultFee;
if (pBlocks.length === 1 && pBlocks[0].blockWeight <= 2000000) {
firstMedianFee = 1;
}
const secondMedianFee = pBlocks[1] ? Math.ceil(pBlocks[1].medianFee) : firstMedianFee;
const thirdMedianFee = pBlocks[2] ? Math.ceil(pBlocks[2].medianFee) : secondMedianFee;
return {
'fastestFee': firstMedianFee,
'halfHourFee': secondMedianFee,
'hourFee': thirdMedianFee,
'economyFee': Math.min(2 * minimumFee, thirdMedianFee),
'minimumFee': minimumFee,
};
}
private optimizeMedianFee(pBlock: MempoolBlock, nextBlock: MempoolBlock | undefined, previousFee?: number): number {
const useFee = previousFee ? (pBlock.medianFee + previousFee) / 2 : pBlock.medianFee;
if (pBlock.blockVSize <= 500000) {
return this.defaultFee;
public async $getTransactionsForBlock(blockHeight: number): Promise<any[]> {
try {
const connection = await DB.pool.getConnection();
const query = `SELECT feePerVsize AS fpv FROM transactions WHERE blockheight = ? ORDER BY feePerVsize ASC`;
const [rows] = await connection.query<any>(query, [blockHeight]);
connection.release();
return rows;
} catch (e) {
console.log('$getTransactionsForBlock() error', e);
return [];
}
if (pBlock.blockVSize <= 950000 && !nextBlock) {
const multiplier = (pBlock.blockVSize - 500000) / 500000;
return Math.max(Math.round(useFee * multiplier), this.defaultFee);
}
return Math.ceil(useFee);
}
}
export default new FeeApi();

View File

@@ -1,37 +0,0 @@
import fs from 'fs';
import path from "path";
const { spawnSync } = require('child_process');
function getVersion(): string {
const packageJson = fs.readFileSync('package.json').toString();
return JSON.parse(packageJson).version;
}
function getGitCommit(): string {
if (process.env.MEMPOOL_COMMIT_HASH) {
return process.env.MEMPOOL_COMMIT_HASH;
} else {
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
if (!gitRevParse.error) {
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
if (output) {
return output;
} else {
console.log('Could not fetch git commit: No repo available');
}
} else if (gitRevParse.error.code === 'ENOENT') {
console.log('Could not fetch git commit: Command `git` is unavailable');
}
}
return '?';
}
const versionInfo = {
version: getVersion(),
gitCommit: getGitCommit()
}
fs.writeFileSync(
path.join(__dirname, 'version.json'),
JSON.stringify(versionInfo, null, 2) + "\n"
);

View File

@@ -1,122 +1,30 @@
import logger from '../logger';
import * as http from 'http';
import * as https from 'https';
import axios, { AxiosResponse } from 'axios';
import { IConversionRates } from '../mempool.interfaces';
import config from '../config';
import backendInfo from './backend-info';
import { SocksProxyAgent } from 'socks-proxy-agent';
import * as request from 'request';
class FiatConversion {
private debasingFiatCurrencies = ['AED', 'AUD', 'BDT', 'BHD', 'BMD', 'BRL', 'CAD', 'CHF', 'CLP',
'CNY', 'CZK', 'DKK', 'EUR', 'GBP', 'HKD', 'HUF', 'IDR', 'ILS', 'INR', 'JPY', 'KRW', 'KWD',
'LKR', 'MMK', 'MXN', 'MYR', 'NGN', 'NOK', 'NZD', 'PHP', 'PKR', 'PLN', 'RUB', 'SAR', 'SEK',
'SGD', 'THB', 'TRY', 'TWD', 'UAH', 'USD', 'VND', 'ZAR'];
private conversionRates: IConversionRates = {};
private ratesChangedCallback: ((rates: IConversionRates) => void) | undefined;
public ratesInitialized = false; // If true, it means rates are ready for use
private tickers = {
'BTCUSD': {
'USD': 4110.78
},
};
constructor() {
for (const fiat of this.debasingFiatCurrencies) {
this.conversionRates[fiat] = 0;
}
}
public setProgressChangedCallback(fn: (rates: IConversionRates) => void) {
this.ratesChangedCallback = fn;
}
constructor() { }
public startService() {
const fiatConversionUrl = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.PRICE_DATA_SERVER.TOR_URL : config.PRICE_DATA_SERVER.CLEARNET_URL;
logger.info('Starting currency rates service');
if (config.SOCKS5PROXY.ENABLED) {
logger.info(`Currency rates service will be queried over the Tor network using ${fiatConversionUrl}`);
} else {
logger.info(`Currency rates service will be queried over clearnet using ${config.PRICE_DATA_SERVER.CLEARNET_URL}`);
}
setInterval(this.updateCurrency.bind(this), 1000 * config.MEMPOOL.PRICE_FEED_UPDATE_INTERVAL);
setInterval(this.updateCurrency.bind(this), 1000 * 60 * 60);
this.updateCurrency();
}
public getConversionRates() {
return this.conversionRates;
public getTickers() {
return this.tickers;
}
private async updateCurrency(): Promise<void> {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpAgent?: http.Agent;
httpsAgent?: https.Agent;
}
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
const fiatConversionUrl = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.PRICE_DATA_SERVER.TOR_URL : config.PRICE_DATA_SERVER.CLEARNET_URL;
const isHTTP = (new URL(fiatConversionUrl).protocol.split(':')[0] === 'http') ? true : false;
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
let retry = 0;
while(retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
try {
if (config.SOCKS5PROXY.ENABLED) {
let socksOptions: any = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT
};
if (config.SOCKS5PROXY.USERNAME && config.SOCKS5PROXY.PASSWORD) {
socksOptions.username = config.SOCKS5PROXY.USERNAME;
socksOptions.password = config.SOCKS5PROXY.PASSWORD;
} else {
// Retry with different tor circuits https://stackoverflow.com/a/64960234
socksOptions.username = `circuit${retry}`;
}
// Handle proxy agent for onion addresses
if (isHTTP) {
axiosOptions.httpAgent = new SocksProxyAgent(socksOptions);
} else {
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
}
logger.debug('Querying currency rates service...');
const response: AxiosResponse = await axios.get(`${fiatConversionUrl}`, axiosOptions);
if (response.statusText === 'error' || !response.data) {
throw new Error(`Could not fetch data from ${fiatConversionUrl}, Error: ${response.status}`);
}
for (const rate of response.data.data) {
if (this.debasingFiatCurrencies.includes(rate.currencyCode) && rate.provider === 'Bisq-Aggregate') {
this.conversionRates[rate.currencyCode] = Math.round(100 * rate.price) / 100;
}
}
this.ratesInitialized = true;
logger.debug(`USD Conversion Rate: ${this.conversionRates.USD}`);
if (this.ratesChangedCallback) {
this.ratesChangedCallback(this.conversionRates);
}
break;
} catch (e) {
logger.err('Error updating fiat conversion rates: ' + (e instanceof Error ? e.message : e));
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
retry++;
private updateCurrency() {
request('https://api.opennode.co/v1/rates', { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
if (body && body.data) {
this.tickers = body.data;
}
}
});
}
}

View File

@@ -1,272 +0,0 @@
// Imported from https://github.com/shesek/lightning-client-js
'use strict';
const methods = [
'addgossip',
'autocleaninvoice',
'check',
'checkmessage',
'close',
'connect',
'createinvoice',
'createinvoicerequest',
'createoffer',
'createonion',
'decode',
'decodepay',
'delexpiredinvoice',
'delinvoice',
'delpay',
'dev-listaddrs',
'dev-rescan-outputs',
'disableoffer',
'disconnect',
'estimatefees',
'feerates',
'fetchinvoice',
'fundchannel',
'fundchannel_cancel',
'fundchannel_complete',
'fundchannel_start',
'fundpsbt',
'getchaininfo',
'getinfo',
'getlog',
'getrawblockbyheight',
'getroute',
'getsharedsecret',
'getutxout',
'help',
'invoice',
'keysend',
'legacypay',
'listchannels',
'listconfigs',
'listforwards',
'listfunds',
'listinvoices',
'listnodes',
'listoffers',
'listpays',
'listpeers',
'listsendpays',
'listtransactions',
'multifundchannel',
'multiwithdraw',
'newaddr',
'notifications',
'offer',
'offerout',
'openchannel_abort',
'openchannel_bump',
'openchannel_init',
'openchannel_signed',
'openchannel_update',
'pay',
'payersign',
'paystatus',
'ping',
'plugin',
'reserveinputs',
'sendinvoice',
'sendonion',
'sendonionmessage',
'sendpay',
'sendpsbt',
'sendrawtransaction',
'setchannelfee',
'signmessage',
'signpsbt',
'stop',
'txdiscard',
'txprepare',
'txsend',
'unreserveinputs',
'utxopsbt',
'waitanyinvoice',
'waitblockheight',
'waitinvoice',
'waitsendpay',
'withdraw'
];
import EventEmitter from 'events';
import { existsSync, statSync } from 'fs';
import { createConnection, Socket } from 'net';
import { homedir } from 'os';
import path from 'path';
import { createInterface, Interface } from 'readline';
import logger from '../../../logger';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import { convertAndmergeBidirectionalChannels, convertNode } from './clightning-convert';
class LightningError extends Error {
type: string = 'lightning';
message: string = 'lightning-client error';
constructor(error) {
super();
this.type = error.type;
this.message = error.message;
}
}
const defaultRpcPath = path.join(homedir(), '.lightning')
, fStat = (...p) => statSync(path.join(...p))
, fExists = (...p) => existsSync(path.join(...p))
export default class CLightningClient extends EventEmitter implements AbstractLightningApi {
private rpcPath: string;
private reconnectWait: number;
private reconnectTimeout;
private reqcount: number;
private client: Socket;
private rl: Interface;
private clientConnectionPromise: Promise<unknown>;
constructor(rpcPath = defaultRpcPath) {
if (!path.isAbsolute(rpcPath)) {
throw new Error('The rpcPath must be an absolute path');
}
if (!fExists(rpcPath) || !fStat(rpcPath).isSocket()) {
// network directory provided, use the lightning-rpc within in
if (fExists(rpcPath, 'lightning-rpc')) {
rpcPath = path.join(rpcPath, 'lightning-rpc');
}
// main data directory provided, default to using the bitcoin mainnet subdirectory
// to be removed in v0.2.0
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
}
}
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
super();
this.rpcPath = rpcPath;
this.reconnectWait = 0.5;
this.reconnectTimeout = null;
this.reqcount = 0;
const _self = this;
this.client = createConnection(rpcPath).on(
'error', () => {
_self.increaseWaitTime();
_self.reconnect();
}
);
this.rl = createInterface({ input: this.client }).on(
'error', () => {
_self.increaseWaitTime();
_self.reconnect();
}
);
this.clientConnectionPromise = new Promise<void>(resolve => {
_self.client.on('connect', () => {
logger.info(`[CLightningClient] Lightning client connected`);
_self.reconnectWait = 1;
resolve();
});
_self.client.on('end', () => {
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
_self.increaseWaitTime();
_self.reconnect();
});
_self.client.on('error', error => {
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
_self.increaseWaitTime();
_self.reconnect();
});
});
this.rl.on('line', line => {
line = line.trim();
if (!line) {
return;
}
const data = JSON.parse(line);
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
_self.emit('res:' + data.id, data);
});
}
increaseWaitTime(): void {
if (this.reconnectWait >= 16) {
this.reconnectWait = 16;
} else {
this.reconnectWait *= 2;
}
}
reconnect(): void {
const _self = this;
if (this.reconnectTimeout) {
return;
}
this.reconnectTimeout = setTimeout(() => {
logger.debug('[CLightningClient] Trying to reconnect...');
_self.client.connect(_self.rpcPath);
_self.reconnectTimeout = null;
}, this.reconnectWait * 1000);
}
call(method, args = []): Promise<any> {
const _self = this;
const callInt = ++this.reqcount;
const sendObj = {
jsonrpc: '2.0',
method,
params: args,
id: '' + callInt
};
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
// Wait for the client to connect
return this.clientConnectionPromise
.then(() => new Promise((resolve, reject) => {
// Wait for a response
this.once('res:' + callInt, res => res.error == null
? resolve(res.result)
: reject(new LightningError(res.error))
);
// Send the command
_self.client.write(JSON.stringify(sendObj));
}));
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
const listnodes: any[] = await this.call('listnodes');
const listchannels: any[] = await this.call('listchannels');
const channelsList = await convertAndmergeBidirectionalChannels(listchannels['channels']);
return {
nodes: listnodes['nodes'].map(node => convertNode(node)),
edges: channelsList,
};
}
}
const protify = s => s.replace(/-([a-z])/g, m => m[1].toUpperCase());
methods.forEach(k => {
CLightningClient.prototype[protify(k)] = function (...args: any) {
return this.call(k, args);
};
});

View File

@@ -1,147 +0,0 @@
import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
/**
* Convert a clightning "listnode" entry to a lnd node entry
*/
export function convertNode(clNode: any): ILightningApi.Node {
let custom_records: { [type: number]: string } | undefined = undefined;
if (clNode.option_will_fund) {
try {
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
} catch (e) {
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
custom_records = undefined;
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
features: [], // TODO parse and return clNode.feature
pub_key: clNode.nodeid,
addresses: clNode.addresses?.map((addr) => {
let address = addr.address;
if (addr.type === 'ipv6') {
address = `[${address}]`;
}
return {
network: addr.type,
addr: `${address}:${addr.port}`
};
}) ?? [],
last_update: clNode?.last_timestamp ?? 0,
custom_records
};
}
/**
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
*/
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
logger.info('Converting clightning nodes and channels to lnd graph format');
let loggerTimer = new Date().getTime() / 1000;
let channelProcessed = 0;
const consolidatedChannelList: ILightningApi.Channel[] = [];
const clChannelsDict = {};
const clChannelsDictCount = {};
for (const clChannel of clChannels) {
if (!clChannelsDict[clChannel.short_channel_id]) {
clChannelsDict[clChannel.short_channel_id] = clChannel;
clChannelsDictCount[clChannel.short_channel_id] = 1;
} else {
consolidatedChannelList.push(
await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id])
);
delete clChannelsDict[clChannel.short_channel_id];
clChannelsDictCount[clChannel.short_channel_id]++;
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
loggerTimer = new Date().getTime() / 1000;
}
++channelProcessed;
}
channelProcessed = 0;
const keys = Object.keys(clChannelsDict);
for (const short_channel_id of keys) {
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}
channelProcessed++;
}
return consolidatedChannelList;
}
/**
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy for both nodes
*/
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel> {
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
const parts = clChannelA.short_channel_id.split('x');
const outputIdx = parts[2];
return {
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
capacity: clChannelA.satoshis,
last_update: lastUpdate,
node1_policy: convertPolicy(clChannelA),
node2_policy: convertPolicy(clChannelB),
chan_point: `${tx.txid}:${outputIdx}`,
node1_pub: clChannelA.source,
node2_pub: clChannelB.source,
};
}
/**
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy of only one node
*/
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
const parts = clChannel.short_channel_id.split('x');
const outputIdx = parts[2];
return {
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
capacity: clChannel.satoshis,
last_update: clChannel.last_update ?? 0,
node1_policy: convertPolicy(clChannel),
node2_policy: null,
chan_point: `${tx.txid}:${outputIdx}`,
node1_pub: clChannel.source,
node2_pub: clChannel.destination,
};
}
/**
* Convert a clightning "listnode" response to a lnd channel policy format
*/
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
return {
time_lock_delta: clChannel.delay,
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
fee_base_msat: clChannel.base_fee_millisatoshi,
fee_rate_milli_msat: clChannel.fee_per_millionth,
disabled: !clChannel.active,
last_update: clChannel.last_update ?? 0,
};
}

View File

@@ -1,5 +0,0 @@
import { ILightningApi } from './lightning-api.interface';
export interface AbstractLightningApi {
$getNetworkGraph(): Promise<ILightningApi.NetworkGraph>;
}

View File

@@ -1,16 +0,0 @@
import config from '../../config';
import CLightningClient from './clightning/clightning-client';
import { AbstractLightningApi } from './lightning-api-abstract-factory';
import LndApi from './lnd/lnd-api';
function lightningApiFactory(): AbstractLightningApi {
switch (config.LIGHTNING.ENABLED === true && config.LIGHTNING.BACKEND) {
case 'cln':
return new CLightningClient(config.CLIGHTNING.SOCKET);
case 'lnd':
default:
return new LndApi();
}
}
export default lightningApiFactory();

View File

@@ -1,92 +0,0 @@
export namespace ILightningApi {
export interface NetworkInfo {
graph_diameter: number;
avg_out_degree: number;
max_out_degree: number;
num_nodes: number;
num_channels: number;
total_network_capacity: string;
avg_channel_size: number;
min_channel_size: string;
max_channel_size: string;
median_channel_size_sat: string;
num_zombie_chans: string;
}
export interface NetworkGraph {
nodes: Node[];
edges: Channel[];
}
export interface Channel {
channel_id: string;
chan_point: string;
last_update: number;
node1_pub: string;
node2_pub: string;
capacity: string;
node1_policy: RoutingPolicy | null;
node2_policy: RoutingPolicy | null;
}
export interface RoutingPolicy {
time_lock_delta: number;
min_htlc: string;
fee_base_msat: string;
fee_rate_milli_msat: string;
disabled: boolean;
max_htlc_msat: string;
last_update: number;
}
export interface Node {
last_update: number;
pub_key: string;
alias: string;
addresses: {
network: string;
addr: string;
}[];
color: string;
features: { [key: number]: Feature };
custom_records?: { [type: number]: string };
}
export interface Info {
identity_pubkey: string;
alias: string;
num_pending_channels: number;
num_active_channels: number;
num_peers: number;
block_height: number;
block_hash: string;
synced_to_chain: boolean;
testnet: boolean;
uris: string[];
best_header_timestamp: string;
version: string;
num_inactive_channels: number;
chains: {
chain: string;
network: string;
}[];
color: string;
synced_to_graph: boolean;
features: { [key: number]: Feature };
commit_hash: string;
/** Available on LND since v0.15.0-beta */
require_htlc_interceptor?: boolean;
}
export interface Feature {
name: string;
is_required: boolean;
is_known: boolean;
}
export interface ForensicOutput {
node?: 1 | 2;
type: number;
value: number;
}
}

View File

@@ -1,41 +0,0 @@
import axios, { AxiosRequestConfig } from 'axios';
import { Agent } from 'https';
import * as fs from 'fs';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import config from '../../../config';
class LndApi implements AbstractLightningApi {
axiosConfig: AxiosRequestConfig = {};
constructor() {
if (config.LIGHTNING.ENABLED) {
this.axiosConfig = {
headers: {
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
},
httpsAgent: new Agent({
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
}),
timeout: 10000
};
}
}
async $getNetworkInfo(): Promise<ILightningApi.NetworkInfo> {
return axios.get<ILightningApi.NetworkInfo>(config.LND.REST_API_URL + '/v1/graph/info', this.axiosConfig)
.then((response) => response.data);
}
async $getInfo(): Promise<ILightningApi.Info> {
return axios.get<ILightningApi.Info>(config.LND.REST_API_URL + '/v1/getinfo', this.axiosConfig)
.then((response) => response.data);
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
.then((response) => response.data);
}
}
export default LndApi;

View File

@@ -1,103 +0,0 @@
import { IBitcoinApi } from '../bitcoin/bitcoin-api.interface';
import bitcoinClient from '../bitcoin/bitcoin-client';
import bitcoinSecondClient from '../bitcoin/bitcoin-second-client';
import { Common } from '../common';
import DB from '../../database';
import logger from '../../logger';
class ElementsParser {
private isRunning = false;
constructor() { }
public async $parse() {
if (this.isRunning) {
return;
}
try {
this.isRunning = true;
const result = await bitcoinClient.getChainTips();
const tip = result[0].height;
const latestBlockHeight = await this.$getLatestBlockHeightFromDatabase();
for (let height = latestBlockHeight + 1; height <= tip; height++) {
const blockHash: IBitcoinApi.ChainTips = await bitcoinClient.getBlockHash(height);
const block: IBitcoinApi.Block = await bitcoinClient.getBlock(blockHash, 2);
await this.$parseBlock(block);
await this.$saveLatestBlockToDatabase(block.height);
}
this.isRunning = false;
} catch (e) {
this.isRunning = false;
throw new Error(e instanceof Error ? e.message : 'Error');
}
}
public async $getPegDataByMonth(): Promise<any> {
const query = `SELECT SUM(amount) AS amount, DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y-%m-01') AS date FROM elements_pegs GROUP BY DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y%m')`;
const [rows] = await DB.query(query);
return rows;
}
protected async $parseBlock(block: IBitcoinApi.Block) {
for (const tx of block.tx) {
await this.$parseInputs(tx, block);
await this.$parseOutputs(tx, block);
}
}
protected async $parseInputs(tx: IBitcoinApi.Transaction, block: IBitcoinApi.Block) {
for (const [index, input] of tx.vin.entries()) {
if (input.is_pegin) {
await this.$parsePegIn(input, index, tx.txid, block);
}
}
}
protected async $parsePegIn(input: IBitcoinApi.Vin, vindex: number, txid: string, block: IBitcoinApi.Block) {
const bitcoinTx: IBitcoinApi.Transaction = await bitcoinSecondClient.getRawTransaction(input.txid, true);
const prevout = bitcoinTx.vout[input.vout || 0];
const outputAddress = prevout.scriptPubKey.address || (prevout.scriptPubKey.addresses && prevout.scriptPubKey.addresses[0]) || '';
await this.$savePegToDatabase(block.height, block.time, prevout.value * 100000000, txid, vindex,
outputAddress, bitcoinTx.txid, prevout.n, 1);
}
protected async $parseOutputs(tx: IBitcoinApi.Transaction, block: IBitcoinApi.Block) {
for (const output of tx.vout) {
if (output.scriptPubKey.pegout_chain) {
await this.$savePegToDatabase(block.height, block.time, 0 - output.value * 100000000, tx.txid, output.n,
(output.scriptPubKey.pegout_addresses && output.scriptPubKey.pegout_addresses[0] || ''), '', 0, 0);
}
if (!output.scriptPubKey.pegout_chain && output.scriptPubKey.type === 'nulldata'
&& output.value && output.value > 0 && output.asset && output.asset === Common.nativeAssetId) {
await this.$savePegToDatabase(block.height, block.time, 0 - output.value * 100000000, tx.txid, output.n,
(output.scriptPubKey.pegout_addresses && output.scriptPubKey.pegout_addresses[0] || ''), '', 0, 1);
}
}
}
protected async $savePegToDatabase(height: number, blockTime: number, amount: number, txid: string,
txindex: number, bitcoinaddress: string, bitcointxid: string, bitcoinindex: number, final_tx: number): Promise<void> {
const query = `INSERT INTO elements_pegs(
block, datetime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`;
const params: (string | number)[] = [
height, blockTime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
];
await DB.query(query, params);
logger.debug(`Saved L-BTC peg from block height #${height} with TXID ${txid}.`);
}
protected async $getLatestBlockHeightFromDatabase(): Promise<number> {
const query = `SELECT number FROM state WHERE name = 'last_elements_block'`;
const [rows] = await DB.query(query);
return rows[0]['number'];
}
protected async $saveLatestBlockToDatabase(blockHeight: number) {
const query = `UPDATE state SET number = ? WHERE name = 'last_elements_block'`;
await DB.query(query, [blockHeight]);
}
}
export default new ElementsParser();

View File

@@ -1,38 +0,0 @@
import * as fs from 'fs';
import logger from '../../logger';
class Icons {
private static FILE_NAME = './icons.json';
private iconIds: string[] = [];
private icons: { [assetId: string]: string; } = {};
constructor() {}
public loadIcons() {
if (!fs.existsSync(Icons.FILE_NAME)) {
logger.warn(`${Icons.FILE_NAME} does not exist. No Liquid icons loaded.`);
return;
}
const cacheData = fs.readFileSync(Icons.FILE_NAME, 'utf8');
this.icons = JSON.parse(cacheData);
for (const i in this.icons) {
this.iconIds.push(i);
}
logger.debug(`Liquid icons has been loaded.`);
}
public getIconByAssetId(assetId: string): Buffer | undefined {
const icon = this.icons[assetId];
if (icon) {
return Buffer.from(icon, 'base64');
}
}
public getAllIconIds() {
return this.iconIds;
}
}
export default new Icons();

View File

@@ -1,73 +0,0 @@
import axios from 'axios';
import { Application, Request, Response } from 'express';
import config from '../../config';
import elementsParser from './elements-parser';
import icons from './icons';
class LiquidRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', this.getAllLiquidIcon)
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', this.$getAllFeaturedLiquidAssets)
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', this.getLiquidIcon)
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', this.$getAssetGroup)
;
if (config.DATABASE.ENABLED) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', this.$getElementsPegsByMonth)
;
}
}
private getLiquidIcon(req: Request, res: Response) {
const result = icons.getIconByAssetId(req.params.assetId);
if (result) {
res.setHeader('content-type', 'image/png');
res.setHeader('content-length', result.length);
res.send(result);
} else {
res.status(404).send('Asset icon not found');
}
}
private getAllLiquidIcon(req: Request, res: Response) {
const result = icons.getAllIconIds();
if (result) {
res.json(result);
} else {
res.status(404).send('Asset icons not found');
}
}
private async $getAllFeaturedLiquidAssets(req: Request, res: Response) {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/featured`, { responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
}
private async $getAssetGroup(req: Request, res: Response) {
try {
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/group/${parseInt(req.params.id, 10)}`,
{ responseType: 'stream', timeout: 10000 });
response.data.pipe(res);
} catch (e) {
res.status(500).end();
}
}
private async $getElementsPegsByMonth(req: Request, res: Response) {
try {
const pegs = await elementsParser.$getPegDataByMonth();
res.json(pegs);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new LiquidRoutes();

View File

@@ -1,32 +0,0 @@
import { ILoadingIndicators } from '../mempool.interfaces';
class LoadingIndicators {
private loadingIndicators: ILoadingIndicators = {
'mempool': 0,
};
private progressChangedCallback: ((loadingIndicators: ILoadingIndicators) => void) | undefined;
constructor() { }
public setProgressChangedCallback(fn: (loadingIndicators: ILoadingIndicators) => void) {
this.progressChangedCallback = fn;
}
public setProgress(name: string, progressPercent: number, rounded: boolean = true) {
const newProgress = rounded === true ? Math.round(progressPercent) : progressPercent;
if (newProgress >= 100) {
delete this.loadingIndicators[name];
} else {
this.loadingIndicators[name] = newProgress;
}
if (this.progressChangedCallback) {
this.progressChangedCallback(this.loadingIndicators);
}
}
public getLoadingIndicators() {
return this.loadingIndicators;
}
}
export default new LoadingIndicators();

View File

@@ -1,38 +0,0 @@
interface ICache {
type: string;
id: string;
expires: Date;
data: any;
}
class MemoryCache {
private cache: ICache[] = [];
constructor() {
setInterval(this.cleanup.bind(this), 1000);
}
public set(type: string, id: string, data: any, secondsExpiry: number) {
const expiry = new Date();
expiry.setSeconds(expiry.getSeconds() + secondsExpiry);
this.cache.push({
type: type,
id: id,
data: data,
expires: expiry,
});
}
public get<T>(type: string, id: string): T | null {
const found = this.cache.find((cache) => cache.type === type && cache.id === id);
if (found) {
return found.data;
}
return null;
}
private cleanup() {
this.cache = this.cache.filter((cache) => cache.expires < (new Date()));
}
}
export default new MemoryCache();

View File

@@ -1,192 +0,0 @@
import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import { StaticPool } from 'node-worker-threads-pool';
import path from 'path';
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private makeTemplatesPool = new StaticPool({
size: 1,
task: path.resolve(__dirname, './tx-selection-worker.js'),
});
constructor() {}
public getMempoolBlocks(): MempoolBlock[] {
return this.mempoolBlocks.map((block) => {
return {
blockSize: block.blockSize,
blockVSize: block.blockVSize,
nTx: block.nTx,
totalFees: block.totalFees,
medianFee: block.medianFee,
feeRange: block.feeRange,
};
});
}
public getMempoolBlocksWithTransactions(): MempoolBlockWithTransactions[] {
return this.mempoolBlocks;
}
public getMempoolBlockDeltas(): MempoolBlockDelta[] {
return this.mempoolBlockDeltas;
}
public updateMempoolBlocks(memPool: { [txid: string]: TransactionExtended }): void {
const latestMempool = memPool;
const memPoolArray: TransactionExtended[] = [];
for (const i in latestMempool) {
if (latestMempool.hasOwnProperty(i)) {
memPoolArray.push(latestMempool[i]);
}
}
const start = new Date().getTime();
// Clear bestDescendants & ancestors
memPoolArray.forEach((tx) => {
tx.bestDescendant = null;
tx.ancestors = [];
tx.cpfpChecked = false;
if (!tx.effectiveFeePerVsize) {
tx.effectiveFeePerVsize = tx.feePerVsize;
}
});
// First sort
memPoolArray.sort((a, b) => b.feePerVsize - a.feePerVsize);
// Loop through and traverse all ancestors and sum up all the sizes + fees
// Pass down size + fee to all unconfirmed children
let sizes = 0;
memPoolArray.forEach((tx, i) => {
sizes += tx.weight;
if (sizes > 4000000 * 8) {
return;
}
Common.setRelativesAndGetCpfpInfo(tx, memPool);
});
// Final sort, by effective fee
memPoolArray.sort((a, b) => b.effectiveFeePerVsize - a.effectiveFeePerVsize);
const end = new Date().getTime();
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 0;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
transactionsSorted.forEach((tx) => {
if (blockWeight + tx.weight <= config.MEMPOOL.BLOCK_WEIGHT_UNITS
|| mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
blockWeight += tx.weight;
blockSize += tx.size;
transactions.push(tx);
} else {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
blockWeight = tx.weight;
blockSize = tx.size;
transactions = [tx];
}
});
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
}
return mempoolBlocks;
}
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let removed: string[] = [];
if (mempoolBlocks[i] && !prevBlocks[i]) {
added = mempoolBlocks[i].transactions;
} else if (!mempoolBlocks[i] && prevBlocks[i]) {
removed = prevBlocks[i].transactions.map(tx => tx.txid);
} else if (mempoolBlocks[i] && prevBlocks[i]) {
const prevIds = {};
const newIds = {};
prevBlocks[i].transactions.forEach(tx => {
prevIds[tx.txid] = true;
});
mempoolBlocks[i].transactions.forEach(tx => {
newIds[tx.txid] = true;
});
prevBlocks[i].transactions.forEach(tx => {
if (!newIds[tx.txid]) {
removed.push(tx.txid);
}
});
mempoolBlocks[i].transactions.forEach(tx => {
if (!prevIds[tx.txid]) {
added.push(tx);
}
});
}
mempoolBlockDeltas.push({
added,
removed
});
}
return mempoolBlockDeltas;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> {
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest });
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
// copy CPFP info across to main thread's mempool
Object.keys(newMempool).forEach((txid) => {
if (newMempool[txid] && mempool[txid]) {
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize;
newMempool[txid].ancestors = mempool[txid].ancestors;
newMempool[txid].descendants = mempool[txid].descendants;
newMempool[txid].bestDescendant = mempool[txid].bestDescendant;
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked;
}
});
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
}
private dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}
}
export default new MempoolBlocks();

View File

@@ -1,94 +1,35 @@
import config from '../config';
const config = require('../../mempool-config.json');
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
import logger from '../logger';
import { Common } from './common';
import transactionUtils from './transaction-utils';
import { IBitcoinApi } from './bitcoin/bitcoin-api.interface';
import loadingIndicators from './loading-indicators';
import bitcoinClient from './bitcoin/bitcoin-client';
import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
import rbfCache from './rbf-cache';
import { ITransaction, IMempoolInfo, IMempool } from '../interfaces';
class Mempool {
private static WEBSOCKET_REFRESH_RATE_MS = 10000;
private static LAZY_DELETE_AFTER_SECONDS = 30;
private inSync: boolean = false;
private mempoolCacheDelta: number = -1;
private mempoolCache: { [txId: string]: TransactionExtended } = {};
private mempoolInfo: IBitcoinApi.MempoolInfo = { loaded: false, size: 0, bytes: 0, usage: 0, total_fee: 0,
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private mempool: IMempool = {};
private mempoolInfo: IMempoolInfo | undefined;
private mempoolChangedCallback: Function | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
private vBytesPerSecondArray: VbytesPerSecond[] = [];
private vBytesPerSecondArray: any[] = [];
private vBytesPerSecond: number = 0;
private mempoolProtection = 0;
private latestTransactions: any[] = [];
constructor() {
setInterval(this.updateTxPerSecond.bind(this), 1000);
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
}
/**
* Return true if we should leave resources available for mempool tx caching
*/
public hasPriority(): boolean {
if (this.inSync) {
return false;
} else {
return this.mempoolCacheDelta == -1 || this.mempoolCacheDelta > 25;
}
}
public isInSync(): boolean {
return this.inSync;
}
public setOutOfSync(): void {
this.inSync = false;
loadingIndicators.setProgress('mempool', 99);
}
public getLatestTransactions() {
return this.latestTransactions;
}
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => void) {
public setMempoolChangedCallback(fn: Function) {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
this.asyncMempoolChangedCallback = fn;
public getMempool(): { [txid: string]: ITransaction } {
return this.mempool;
}
public getMempool(): { [txid: string]: TransactionExtended } {
return this.mempoolCache;
public setMempool(mempoolData: any) {
this.mempool = mempoolData;
}
public setMempool(mempoolData: { [txId: string]: TransactionExtended }) {
this.mempoolCache = mempoolData;
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
}
if (this.asyncMempoolChangedCallback) {
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
}
}
public async $updateMemPoolInfo() {
this.mempoolInfo = await this.$getMempoolInfo();
}
public getMempoolInfo(): IBitcoinApi.MempoolInfo {
public getMempoolInfo(): IMempoolInfo | undefined {
return this.mempoolInfo;
}
@@ -100,160 +41,128 @@ class Mempool {
return this.vBytesPerSecond;
}
public getFirstSeenForTransactions(txIds: string[]): number[] {
const txTimes: number[] = [];
txIds.forEach((txId: string) => {
const tx = this.mempoolCache[txId];
if (tx && tx.firstSeen) {
txTimes.push(tx.firstSeen);
} else {
txTimes.push(0);
}
});
return txTimes;
public async updateMemPoolInfo() {
try {
this.mempoolInfo = await bitcoinApi.getMempoolInfo();
} catch (err) {
console.log('Error getMempoolInfo', err);
}
}
public async $updateMempool(): Promise<void> {
logger.debug(`Updating mempool...`);
public async getRawTransaction(txId: string, isCoinbase = false): Promise<ITransaction | false> {
try {
const transaction = await bitcoinApi.getRawTransaction(txId);
let totalOut = 0;
transaction.vout.forEach((output) => totalOut += output.value);
if (config.BACKEND_API === 'electrs') {
transaction.feePerWeightUnit = (transaction.fee * 100000000) / transaction.weight || 0;
transaction.feePerVsize = (transaction.fee * 100000000) / (transaction.vsize) || 0;
transaction.totalOut = totalOut / 100000000;
} else {
let totalIn = 0;
if (!isCoinbase) {
for (let i = 0; i < transaction.vin.length; i++) {
try {
const result = await bitcoinApi.getRawTransaction(transaction.vin[i].txid);
transaction.vin[i]['value'] = result.vout[transaction.vin[i].vout].value;
totalIn += result.vout[transaction.vin[i].vout].value;
} catch (err) {
console.log('Locating historical tx error');
}
}
}
if (totalIn > totalOut) {
transaction.fee = parseFloat((totalIn - totalOut).toFixed(8));
transaction.feePerWeightUnit = (transaction.fee * 100000000) / (transaction.vsize * 4) || 0;
transaction.feePerVsize = (transaction.fee * 100000000) / (transaction.vsize) || 0;
} else if (!isCoinbase) {
transaction.fee = 0;
transaction.feePerVsize = 0;
transaction.feePerWeightUnit = 0;
console.log('Minus fee error!');
}
transaction.totalOut = totalOut;
}
return transaction;
} catch (e) {
console.log(txId + ' not found');
return false;
}
}
public async updateMempool() {
console.log('Updating mempool');
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
const transactions = await bitcoinApi.$getRawMempool();
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
this.mempoolCacheDelta = Math.abs(diff);
if (!this.inSync) {
loadingIndicators.setProgress('mempool', Object.keys(this.mempoolCache).length / transactions.length * 100);
}
for (const txid of transactions) {
if (!this.mempoolCache[txid]) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
this.mempoolCache[txid] = transaction;
if (this.inSync) {
let txCount = 0;
try {
const transactions = await bitcoinApi.getRawMempool();
const diff = transactions.length - Object.keys(this.mempool).length;
for (const tx of transactions) {
if (!this.mempool[tx]) {
const transaction = await this.getRawTransaction(tx);
if (transaction) {
this.mempool[tx] = transaction;
txCount++;
this.txPerSecondArray.push(new Date().getTime());
this.vBytesPerSecondArray.push({
unixTime: new Date().getTime(),
vSize: transaction.vsize,
});
hasChange = true;
if (diff > 0) {
console.log('Calculated fee for transaction ' + txCount + ' / ' + diff);
} else {
console.log('Calculated fee for transaction ' + txCount);
}
} else {
console.log('Error finding transaction in mempool.');
}
}
if ((new Date().getTime()) - start > config.MEMPOOL_REFRESH_RATE_MS * 10) {
break;
}
}
const newMempool: IMempool = {};
transactions.forEach((tx) => {
if (this.mempool[tx]) {
newMempool[tx] = this.mempool[tx];
} else {
hasChange = true;
newTransactions.push(transaction);
} catch (e) {
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
}
});
this.mempool = newMempool;
if (hasChange && this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempool);
}
if ((new Date().getTime()) - start > Mempool.WEBSOCKET_REFRESH_RATE_MS) {
break;
}
}
// Prevent mempool from clear on bitcoind restart by delaying the deletion
if (this.mempoolProtection === 0
&& currentMempoolSize > 20000
&& transactions.length / currentMempoolSize <= 0.80
) {
this.mempoolProtection = 1;
this.inSync = false;
logger.warn(`Mempool clear protection triggered because transactions.length: ${transactions.length} and currentMempoolSize: ${currentMempoolSize}.`);
setTimeout(() => {
this.mempoolProtection = 2;
logger.warn('Mempool clear protection resumed.');
}, 1000 * 60 * config.MEMPOOL.CLEAR_PROTECTION_MINUTES);
}
const deletedTransactions: TransactionExtended[] = [];
if (this.mempoolProtection !== 1) {
this.mempoolProtection = 0;
// Index object for faster search
const transactionsObject = {};
transactions.forEach((txId) => transactionsObject[txId] = true);
// Flag transactions for lazy deletion
for (const tx in this.mempoolCache) {
if (!transactionsObject[tx] && !this.mempoolCache[tx].deleteAfter) {
deletedTransactions.push(this.mempoolCache[tx]);
this.mempoolCache[tx].deleteAfter = new Date().getTime() + Mempool.LAZY_DELETE_AFTER_SECONDS * 1000;
}
}
}
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
if (!this.inSync && transactions.length === Object.keys(this.mempoolCache).length) {
this.inSync = true;
logger.notice('The mempool is now in sync!');
loadingIndicators.setProgress('mempool', 100);
}
this.mempoolCacheDelta = Math.abs(transactions.length - Object.keys(this.mempoolCache).length);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
const end = new Date().getTime();
const time = end - start;
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
for (const rbfTransaction in rbfTransactions) {
if (this.mempoolCache[rbfTransaction]) {
// Store replaced transactions
rbfCache.add(rbfTransaction, rbfTransactions[rbfTransaction].txid);
// Erase the replaced transactions from the local mempool
delete this.mempoolCache[rbfTransaction];
}
const end = new Date().getTime();
const time = end - start;
console.log('Mempool updated in ' + time / 1000 + ' seconds');
} catch (err) {
console.log('getRawMempool error.', err);
}
}
private updateTxPerSecond() {
const nowMinusTimeSpan = new Date().getTime() - (1000 * config.STATISTICS.TX_PER_SECOND_SAMPLE_PERIOD);
const nowMinusTimeSpan = new Date().getTime() - (1000 * config.TX_PER_SECOND_SPAN_SECONDS);
this.txPerSecondArray = this.txPerSecondArray.filter((unixTime) => unixTime > nowMinusTimeSpan);
this.txPerSecond = this.txPerSecondArray.length / config.STATISTICS.TX_PER_SECOND_SAMPLE_PERIOD || 0;
this.txPerSecond = this.txPerSecondArray.length / config.TX_PER_SECOND_SPAN_SECONDS || 0;
this.vBytesPerSecondArray = this.vBytesPerSecondArray.filter((data) => data.unixTime > nowMinusTimeSpan);
if (this.vBytesPerSecondArray.length) {
this.vBytesPerSecond = Math.round(
this.vBytesPerSecondArray.map((data) => data.vSize).reduce((a, b) => a + b) / config.STATISTICS.TX_PER_SECOND_SAMPLE_PERIOD
this.vBytesPerSecondArray.map((data) => data.vSize).reduce((a, b) => a + b) / config.TX_PER_SECOND_SPAN_SECONDS
);
}
}
private deleteExpiredTransactions() {
const now = new Date().getTime();
for (const tx in this.mempoolCache) {
const lazyDeleteAt = this.mempoolCache[tx].deleteAfter;
if (lazyDeleteAt && lazyDeleteAt < now) {
delete this.mempoolCache[tx];
}
}
}
private $getMempoolInfo() {
if (config.MEMPOOL.USE_SECOND_NODE_FOR_MINFEE) {
return Promise.all([
bitcoinClient.getMempoolInfo(),
bitcoinSecondClient.getMempoolInfo()
]).then(([mempoolInfo, secondMempoolInfo]) => {
mempoolInfo.maxmempool = secondMempoolInfo.maxmempool;
mempoolInfo.mempoolminfee = secondMempoolInfo.mempoolminfee;
mempoolInfo.minrelaytxfee = secondMempoolInfo.minrelaytxfee;
return mempoolInfo;
});
}
return bitcoinClient.getMempoolInfo();
}
}
export default new Mempool();

View File

@@ -1,311 +0,0 @@
import { Application, Request, Response } from 'express';
import config from "../../config";
import logger from '../../logger';
import audits from '../audit';
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import BlocksRepository from '../../repositories/BlocksRepository';
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
import HashratesRepository from '../../repositories/HashratesRepository';
import bitcoinClient from '../bitcoin/bitcoin-client';
import mining from "./mining";
class MiningRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', this.$getPools)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', this.$getPoolHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', this.$getPoolBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks/:height', this.$getPoolBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug', this.$getPool)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/pools/:interval', this.$getPoolsHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', this.$getHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', this.$getRewardStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', this.$getHistoricalBlockFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', this.$getHistoricalBlockRewards)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', this.$getHistoricalBlockFeeRates)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
;
}
private async $getPool(req: Request, res: Response): Promise<void> {
try {
const stats = await mining.$getPoolStat(req.params.slug);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(stats);
} catch (e) {
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async $getPoolBlocks(req: Request, res: Response) {
try {
const poolBlocks = await BlocksRepository.$getBlocksByPool(
req.params.slug,
req.params.height === undefined ? undefined : parseInt(req.params.height, 10),
);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(poolBlocks);
} catch (e) {
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async $getPools(req: Request, res: Response) {
try {
const stats = await mining.$getPoolsStats(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(stats);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getPoolsHistoricalHashrate(req: Request, res: Response) {
try {
const hashrates = await HashratesRepository.$getPoolsWeeklyHashrate(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(hashrates);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getPoolHistoricalHashrate(req: Request, res: Response) {
try {
const hashrates = await HashratesRepository.$getPoolWeeklyHashrate(req.params.slug);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(hashrates);
} catch (e) {
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async $getHistoricalHashrate(req: Request, res: Response) {
let currentHashrate = 0, currentDifficulty = 0;
try {
currentHashrate = await bitcoinClient.getNetworkHashPs();
currentDifficulty = await bitcoinClient.getDifficulty();
} catch (e) {
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate and difficulty');
}
try {
const hashrates = await HashratesRepository.$getNetworkDailyHashrate(req.params.interval);
const difficulty = await DifficultyAdjustmentsRepository.$getAdjustments(req.params.interval, false);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json({
hashrates: hashrates,
difficulty: difficulty,
currentHashrate: currentHashrate,
currentDifficulty: currentDifficulty,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalBlockFees(req: Request, res: Response) {
try {
const blockFees = await mining.$getHistoricalBlockFees(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockFees);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalBlockRewards(req: Request, res: Response) {
try {
const blockRewards = await mining.$getHistoricalBlockRewards(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockRewards);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalBlockFeeRates(req: Request, res: Response) {
try {
const blockFeeRates = await mining.$getHistoricalBlockFeeRates(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockFeeRates);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHistoricalBlockSizeAndWeight(req: Request, res: Response) {
try {
const blockSizes = await mining.$getHistoricalBlockSizes(req.params.interval);
const blockWeights = await mining.$getHistoricalBlockWeights(req.params.interval);
const blockCount = await BlocksRepository.$blockCount(null, null);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json({
sizes: blockSizes,
weights: blockWeights
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getDifficultyAdjustments(req: Request, res: Response) {
try {
const difficulty = await DifficultyAdjustmentsRepository.$getRawAdjustments(req.params.interval, true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(difficulty.map(adj => [adj.time, adj.height, adj.difficulty, adj.adjustment]));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getRewardStats(req: Request, res: Response) {
try {
const response = await mining.$getRewardStats(parseInt(req.params.blockCount, 10));
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(response);
} catch (e) {
res.status(500).end();
}
}
private async $getHistoricalBlockPrediction(req: Request, res: Response) {
try {
const blockPredictions = await mining.$getBlockPredictionsHistory(req.params.interval);
const blockCount = await BlocksAuditsRepository.$getPredictionsCount();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockPredictions.map(prediction => [prediction.time, prediction.height, prediction.match_rate]));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async $getBlockAudit(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
if (!audit) {
res.status(404).send(`This block has not been audited.`);
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
res.json(audit);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHeightFromTimestamp(req: Request, res: Response) {
try {
const timestamp = parseInt(req.params.timestamp, 10);
// This will prevent people from entering milliseconds etc.
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
// will never put the maximum value before the most recent block
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
// Prevent non-integers that are not seconds
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
throw new Error(`Invalid timestamp, value must be Unix seconds`);
}
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
timestamp,
);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getBlockAuditScores(req: Request, res: Response) {
try {
let height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
if (height == null) {
height = await BlocksRepository.$mostRecentBlockHeight();
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await BlocksAuditsRepository.$getBlockAuditScores(height, height - 15));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async $getBlockAuditScore(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
res.json(audit || 'null');
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new MiningRoutes();

View File

@@ -1,547 +0,0 @@
import { BlockPrice, PoolInfo, PoolStats, RewardStats } from '../../mempool.interfaces';
import BlocksRepository from '../../repositories/BlocksRepository';
import PoolsRepository from '../../repositories/PoolsRepository';
import HashratesRepository from '../../repositories/HashratesRepository';
import bitcoinClient from '../bitcoin/bitcoin-client';
import logger from '../../logger';
import { Common } from '../common';
import loadingIndicators from '../loading-indicators';
import { escape } from 'mysql2';
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
import config from '../../config';
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import PricesRepository from '../../repositories/PricesRepository';
class Mining {
blocksPriceIndexingRunning = false;
constructor() {
}
/**
* Get historical block predictions match rate
*/
public async $getBlockPredictionsHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlockPredictionsHistory(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block total fee
*/
public async $getHistoricalBlockFees(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockFees(
this.getTimeRange(interval, 5),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block rewards
*/
public async $getHistoricalBlockRewards(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockRewards(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block fee rates percentiles
*/
public async $getHistoricalBlockFeeRates(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockFeeRates(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block sizes
*/
public async $getHistoricalBlockSizes(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockSizes(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block weights
*/
public async $getHistoricalBlockWeights(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockWeights(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Generate high level overview of the pool ranks and general stats
*/
public async $getPoolsStats(interval: string | null): Promise<object> {
const poolsStatistics = {};
const poolsInfo: PoolInfo[] = await PoolsRepository.$getPoolsInfo(interval);
const emptyBlocks: any[] = await BlocksRepository.$countEmptyBlocks(null, interval);
const poolsStats: PoolStats[] = [];
let rank = 1;
poolsInfo.forEach((poolInfo: PoolInfo) => {
const emptyBlocksCount = emptyBlocks.filter((emptyCount) => emptyCount.poolId === poolInfo.poolId);
const poolStat: PoolStats = {
poolId: poolInfo.poolId, // mysql row id
name: poolInfo.name,
link: poolInfo.link,
blockCount: poolInfo.blockCount,
rank: rank++,
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
slug: poolInfo.slug,
};
poolsStats.push(poolStat);
});
poolsStatistics['pools'] = poolsStats;
const blockCount: number = await BlocksRepository.$blockCount(null, interval);
poolsStatistics['blockCount'] = blockCount;
const totalBlock24h: number = await BlocksRepository.$blockCount(null, '24h');
try {
poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) {
poolsStatistics['lastEstimatedHashrate'] = 0;
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
}
return poolsStatistics;
}
/**
* Get all mining pool stats for a pool
*/
public async $getPoolStat(slug: string): Promise<object> {
const pool = await PoolsRepository.$getPool(slug);
if (!pool) {
throw new Error('This mining pool does not exist ' + escape(slug));
}
const blockCount: number = await BlocksRepository.$blockCount(pool.id);
const totalBlock: number = await BlocksRepository.$blockCount(null, null);
const blockCount24h: number = await BlocksRepository.$blockCount(pool.id, '24h');
const totalBlock24h: number = await BlocksRepository.$blockCount(null, '24h');
const blockCount1w: number = await BlocksRepository.$blockCount(pool.id, '1w');
const totalBlock1w: number = await BlocksRepository.$blockCount(null, '1w');
let currentEstimatedHashrate = 0;
try {
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) {
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
}
return {
pool: pool,
blockCount: {
'all': blockCount,
'24h': blockCount24h,
'1w': blockCount1w,
},
blockShare: {
'all': blockCount / totalBlock,
'24h': blockCount24h / totalBlock24h,
'1w': blockCount1w / totalBlock1w,
},
estimatedHashrate: currentEstimatedHashrate * (blockCount24h / totalBlock24h),
reportedHashrate: null,
};
}
/**
* Get miner reward stats
*/
public async $getRewardStats(blockCount: number): Promise<RewardStats> {
return await BlocksRepository.$getBlockStats(blockCount);
}
/**
* [INDEXING] Generate weekly mining pool hashrate history
*/
public async $generatePoolHashrateHistory(): Promise<void> {
const now = new Date();
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
// Run only if:
// * lastestRunDate is set to 0 (node backend restart, reorg)
// * we started a new week (around Monday midnight)
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
if (!runIndexing) {
return;
}
try {
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
const hashrates: any[] = [];
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
const lastMondayMidnight = this.getDateMidnight(lastMonday);
let toTimestamp = lastMondayMidnight.getTime();
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
let indexedThisRun = 0;
let totalIndexed = 0;
let newlyIndexed = 0;
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
logger.debug(`Indexing weekly mining pool hashrate`);
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
const fromTimestamp = toTimestamp - 604800000;
// Skip already indexed weeks
if (indexedTimestamp.includes(toTimestamp / 1000)) {
toTimestamp -= 604800000;
++totalIndexed;
continue;
}
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, fromTimestamp / 1000, toTimestamp / 1000);
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
blockStats.lastBlockHeight);
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000);
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
if (totalBlocks > 0) {
pools = pools.map((pool: any) => {
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
pool.share = (pool.blockCount / totalBlocks);
return pool;
});
for (const pool of pools) {
hashrates.push({
hashrateTimestamp: toTimestamp / 1000,
avgHashrate: pool['hashrate'] ,
poolId: pool.poolId,
share: pool['share'],
type: 'weekly',
});
}
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
}
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 1) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
}
toTimestamp -= 604800000;
++indexedThisRun;
++totalIndexed;
}
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
if (newlyIndexed > 0) {
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
} else {
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
}
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
} catch (e) {
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
/**
* [INDEXING] Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// We only run this once a day around midnight
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
const now = new Date().getUTCDate();
if (now === latestRunDate) {
return;
}
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
try {
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const lastMidnight = this.getDateMidnight(new Date());
let toTimestamp = Math.round(lastMidnight.getTime());
const hashrates: any[] = [];
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
let indexedThisRun = 0;
let totalIndexed = 0;
let newlyIndexed = 0;
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
logger.debug(`Indexing daily network hashrate`);
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
const fromTimestamp = toTimestamp - 86400000;
// Skip already indexed days
if (indexedTimestamp.includes(toTimestamp / 1000)) {
toTimestamp -= 86400000;
++totalIndexed;
continue;
}
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, fromTimestamp / 1000, toTimestamp / 1000);
const lastBlockHashrate = blockStats.blockCount === 0 ? 0 : await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
blockStats.lastBlockHeight);
hashrates.push({
hashrateTimestamp: toTimestamp / 1000,
avgHashrate: lastBlockHashrate,
poolId: 0,
share: 1,
type: 'daily',
});
if (hashrates.length > 10) {
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
}
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 1) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('daily-hashrate-indexing', progress);
}
toTimestamp -= 86400000;
++indexedThisRun;
++totalIndexed;
}
// Add genesis block manually
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && !indexedTimestamp.includes(genesisTimestamp / 1000)) {
hashrates.push({
hashrateTimestamp: genesisTimestamp / 1000,
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
poolId: 0,
share: 1,
type: 'daily',
});
}
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
if (newlyIndexed > 0) {
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
} else {
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
}
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
} catch (e) {
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
/**
* Index difficulty adjustments
*/
public async $indexDifficultyAdjustments(): Promise<void> {
const indexedHeightsArray = await DifficultyAdjustmentsRepository.$getAdjustmentsHeights();
const indexedHeights = {};
for (const height of indexedHeightsArray) {
indexedHeights[height] = true;
}
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
let currentDifficulty = genesisBlock.difficulty;
let totalIndexed = 0;
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: genesisBlock.time,
height: 0,
difficulty: currentDifficulty,
adjustment: 0.0,
});
}
const oldestConsecutiveBlock = await BlocksRepository.$getOldestConsecutiveBlock();
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== -1) {
currentDifficulty = oldestConsecutiveBlock.difficulty;
}
let totalBlockChecked = 0;
let timer = new Date().getTime() / 1000;
for (const block of blocks) {
if (block.difficulty !== currentDifficulty) {
if (indexedHeights[block.height] === true) { // Already indexed
if (block.height >= oldestConsecutiveBlock.height) {
currentDifficulty = block.difficulty;
}
continue;
}
let adjustment = block.difficulty / currentDifficulty;
adjustment = Math.round(adjustment * 1000000) / 1000000; // Remove float point noise
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: block.time,
height: block.height,
difficulty: block.difficulty,
adjustment: adjustment,
});
totalIndexed++;
if (block.height >= oldestConsecutiveBlock.height) {
currentDifficulty = block.difficulty;
}
}
totalBlockChecked++;
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
timer = new Date().getTime() / 1000;
}
}
if (totalIndexed > 0) {
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
} else {
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
}
}
/**
* Create a link between blocks and the latest price at when they were mined
*/
public async $indexBlockPrices() {
if (this.blocksPriceIndexingRunning === true) {
return;
}
this.blocksPriceIndexingRunning = true;
try {
const prices: any[] = await PricesRepository.$getPricesTimesAndId();
const blocksWithoutPrices: any[] = await BlocksRepository.$getBlocksWithoutPrice();
let totalInserted = 0;
const blocksPrices: BlockPrice[] = [];
for (const block of blocksWithoutPrices) {
// Quick optimisation, out mtgox feed only goes back to 2010-07-19 02:00:00, so skip the first 68951 blocks
if (['mainnet', 'testnet'].includes(config.MEMPOOL.NETWORK) && block.height < 68951) {
blocksPrices.push({
height: block.height,
priceId: prices[0].id,
});
continue;
}
for (const price of prices) {
if (block.timestamp < price.time) {
blocksPrices.push({
height: block.height,
priceId: price.id,
});
break;
};
}
if (blocksPrices.length >= 100000) {
totalInserted += blocksPrices.length;
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
}
logger.debug(logStr);
await BlocksRepository.$saveBlockPrices(blocksPrices);
blocksPrices.length = 0;
}
}
if (blocksPrices.length > 0) {
totalInserted += blocksPrices.length;
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
}
logger.debug(logStr);
await BlocksRepository.$saveBlockPrices(blocksPrices);
}
} catch (e) {
this.blocksPriceIndexingRunning = false;
throw e;
}
this.blocksPriceIndexingRunning = false;
}
private getDateMidnight(date: Date): Date {
date.setUTCHours(0);
date.setUTCMinutes(0);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
return date;
}
private getTimeRange(interval: string | null, scale = 1): number {
switch (interval) {
case '3y': return 43200 * scale; // 12h
case '2y': return 28800 * scale; // 8h
case '1y': return 28800 * scale; // 8h
case '6m': return 10800 * scale; // 3h
case '3m': return 7200 * scale; // 2h
case '1m': return 1800 * scale; // 30min
case '1w': return 300 * scale; // 5min
case '3d': return 1 * scale;
case '24h': return 1 * scale;
default: return 86400 * scale;
}
}
}
export default new Mining();

View File

@@ -1,290 +0,0 @@
import DB from '../database';
import logger from '../logger';
import config from '../config';
import BlocksRepository from '../repositories/BlocksRepository';
interface Pool {
name: string;
link: string;
regexes: string[];
addresses: string[];
slug: string;
}
class PoolsParser {
miningPools: any[] = [];
unknownPool: any = {
'name': 'Unknown',
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
'regexes': '[]',
'addresses': '[]',
'slug': 'unknown'
};
slugWarnFlag = false;
/**
* Parse the pools.json file, consolidate the data and dump it into the database
*/
public async migratePoolsJson(poolsJson: object): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
// First we save every entries without paying attention to pool duplication
const poolsDuplicated: Pool[] = [];
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
for (let i = 0; i < coinbaseTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>coinbaseTags[i][1]).name,
'link': (<Pool>coinbaseTags[i][1]).link,
'regexes': [coinbaseTags[i][0]],
'addresses': [],
'slug': ''
});
}
const addressesTags = Object.entries(poolsJson['payout_addresses']);
for (let i = 0; i < addressesTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>addressesTags[i][1]).name,
'link': (<Pool>addressesTags[i][1]).link,
'regexes': [],
'addresses': [addressesTags[i][0]],
'slug': ''
});
}
// Then, we find unique mining pool names
const poolNames: string[] = [];
for (let i = 0; i < poolsDuplicated.length; ++i) {
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
poolNames.push(poolsDuplicated[i].name);
}
}
logger.debug(`Found ${poolNames.length} unique mining pools`);
// Get existing pools from the db
let existingPools;
try {
if (config.DATABASE.ENABLED === true) {
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
} else {
existingPools = [];
}
} catch (e) {
logger.err('Cannot get existing pools from the database, skipping pools.json import');
return;
}
this.miningPools = [];
// Finally, we generate the final consolidated pools data
const finalPoolDataAdd: Pool[] = [];
const finalPoolDataUpdate: Pool[] = [];
const finalPoolDataRename: Pool[] = [];
for (let i = 0; i < poolNames.length; ++i) {
let allAddresses: string[] = [];
let allRegexes: string[] = [];
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
for (let y = 0; y < match.length; ++y) {
allAddresses = allAddresses.concat(match[y].addresses);
allRegexes = allRegexes.concat(match[y].regexes);
}
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
let slug: string | undefined;
try {
slug = poolsJson['slugs'][poolNames[i]];
} catch (e) {
if (this.slugWarnFlag === false) {
logger.warn(`pools.json does not seem to contain the 'slugs' object`);
this.slugWarnFlag = true;
}
}
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
}
const poolObj = {
'name': finalPoolName,
'link': match[0].link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
};
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
if (existingPool !== undefined) {
// Check if any data was actually updated
const equals = (a, b) =>
a.length === b.length &&
a.every((v, i) => v === b[i]);
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
finalPoolDataUpdate.push(poolObj);
}
} else {
// Double check that if we're not just renaming a pool (same address same regex)
const [poolToRename]: any[] = await DB.query(`
SELECT * FROM pools
WHERE addresses = ? OR regexes = ?`,
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
);
if (poolToRename && poolToRename.length > 0) {
// We're actually renaming an existing pool
finalPoolDataRename.push({
'name': poolObj.name,
'link': poolObj.link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
});
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
} else {
logger.debug(`Add '${finalPoolName}' mining pool`);
finalPoolDataAdd.push(poolObj);
}
}
this.miningPools.push({
'name': finalPoolName,
'link': match[0].link,
'regexes': JSON.stringify(allRegexes),
'addresses': JSON.stringify(allAddresses),
'slug': slug
});
}
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools.json import completed (no database)');
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
finalPoolDataRename.length > 0
) {
logger.debug(`Update pools table now`);
// Add new mining pools into the database
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
}
queryAdd = queryAdd.slice(0, -1) + ';';
// Updated existing mining pools in the database
const updateQueries: string[] = [];
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
updateQueries.push(`
UPDATE pools
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
slug='${finalPoolDataUpdate[i].slug}'
WHERE name='${finalPoolDataUpdate[i].name}'
;`);
}
// Rename mining pools
const renameQueries: string[] = [];
for (let i = 0; i < finalPoolDataRename.length; ++i) {
renameQueries.push(`
UPDATE pools
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
slug='${finalPoolDataRename[i].slug}'
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
;`);
}
try {
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
}
if (finalPoolDataAdd.length > 0) {
await DB.query({ sql: queryAdd, timeout: 120000 });
}
for (const query of updateQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
for (const query of renameQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed');
} catch (e) {
logger.err(`Cannot import pools in the database`);
throw e;
}
}
try {
await this.insertUnknownPool();
} catch (e) {
logger.err(`Cannot insert unknown pool in the database`);
throw e;
}
}
/**
* Manually add the 'unknown pool'
*/
private async insertUnknownPool() {
try {
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
if (rows.length === 0) {
await DB.query({
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
`});
} else {
await DB.query(`UPDATE pools
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
regexes='[]', addresses='[]',
slug='unknown'
WHERE name='Unknown'
`);
}
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool');
}
}
/**
* Delete blocks which needs to be reindexed
*/
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
const blockCount = await BlocksRepository.$blockCount(null, null);
if (blockCount === 0) {
return;
}
for (const updatedPool of finalPoolDataUpdate) {
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
if (pool.length > 0) {
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
}
}
// Ignore early days of Bitcoin as there were not mining pool yet
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
logger.notice('Truncating hashrates for future re-indexing');
await DB.query(`DELETE FROM hashrates`);
}
}
export default new PoolsParser();

View File

@@ -0,0 +1,101 @@
const config = require('../../mempool-config.json');
import { ITransaction, IProjectedBlock, IMempool, IProjectedBlockInternal } from '../interfaces';
class ProjectedBlocks {
private transactionsSorted: ITransaction[] = [];
constructor() {}
public getProjectedBlockFeesForBlock(index: number) {
const projectedBlock = this.getProjectedBlocksInternal()[index];
if (!projectedBlock) {
throw new Error('No projected block for that index');
}
return projectedBlock.txFeePerVsizes.map((fpv) => {
return {'fpv': fpv};
});
}
public updateProjectedBlocks(memPool: IMempool): void {
const latestMempool = memPool;
const memPoolArray: ITransaction[] = [];
for (const i in latestMempool) {
if (latestMempool.hasOwnProperty(i)) {
memPoolArray.push(latestMempool[i]);
}
}
memPoolArray.sort((a, b) => b.feePerWeightUnit - a.feePerWeightUnit);
this.transactionsSorted = memPoolArray.filter((tx) => tx.feePerWeightUnit);
}
public getProjectedBlocks(txId?: string, numberOfBlocks: number = config.DEFAULT_PROJECTED_BLOCKS_AMOUNT): IProjectedBlock[] {
return this.getProjectedBlocksInternal(numberOfBlocks).map((projectedBlock) => {
return {
blockSize: projectedBlock.blockSize,
blockWeight: projectedBlock.blockWeight,
nTx: projectedBlock.nTx,
minFee: projectedBlock.minFee,
maxFee: projectedBlock.maxFee,
minWeightFee: projectedBlock.minWeightFee,
maxWeightFee: projectedBlock.maxWeightFee,
medianFee: projectedBlock.medianFee,
fees: projectedBlock.fees,
hasMytx: txId ? projectedBlock.txIds.some((tx) => tx === txId) : false
};
});
}
private getProjectedBlocksInternal(numberOfBlocks: number = config.DEFAULT_PROJECTED_BLOCKS_AMOUNT): IProjectedBlockInternal[] {
const projectedBlocks: IProjectedBlockInternal[] = [];
let blockWeight = 0;
let blockSize = 0;
let transactions: ITransaction[] = [];
this.transactionsSorted.forEach((tx) => {
if (blockWeight + tx.vsize * 4 < 4000000 || projectedBlocks.length === numberOfBlocks) {
blockWeight += tx.weight || tx.vsize * 4;
blockSize += tx.size;
transactions.push(tx);
} else {
projectedBlocks.push(this.dataToProjectedBlock(transactions, blockSize, blockWeight));
blockWeight = 0;
blockSize = 0;
transactions = [];
}
});
if (transactions.length) {
projectedBlocks.push(this.dataToProjectedBlock(transactions, blockSize, blockWeight));
}
return projectedBlocks;
}
private dataToProjectedBlock(transactions: ITransaction[], blockSize: number, blockWeight: number): IProjectedBlockInternal {
return {
blockSize: blockSize,
blockWeight: blockWeight,
nTx: transactions.length,
minFee: transactions[transactions.length - 1].feePerVsize,
maxFee: transactions[0].feePerVsize,
minWeightFee: transactions[transactions.length - 1].feePerWeightUnit,
maxWeightFee: transactions[0].feePerWeightUnit,
medianFee: this.median(transactions.map((tx) => tx.feePerVsize)),
txIds: transactions.map((tx) => tx.txid),
txFeePerVsizes: transactions.map((tx) => tx.feePerVsize).reverse(),
fees: transactions.map((tx) => tx.fee).reduce((acc, currValue) => acc + currValue),
};
}
private median(numbers: number[]) {
let medianNr = 0;
const numsLen = numbers.length;
if (numsLen % 2 === 0) {
medianNr = (numbers[numsLen / 2 - 1] + numbers[numsLen / 2]) / 2;
} else {
medianNr = numbers[(numsLen - 1) / 2];
}
return medianNr;
}
}
export default new ProjectedBlocks();

View File

@@ -1,34 +0,0 @@
export interface CachedRbf {
txid: string;
expires: Date;
}
class RbfCache {
private cache: { [txid: string]: CachedRbf; } = {};
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 60);
}
public add(replacedTxId: string, newTxId: string): void {
this.cache[replacedTxId] = {
expires: new Date(Date.now() + 1000 * 604800), // 1 week
txid: newTxId,
};
}
public get(txId: string): CachedRbf | undefined {
return this.cache[txId];
}
private cleanup(): void {
const currentDate = new Date();
for (const c in this.cache) {
if (this.cache[c].expires < currentDate) {
delete this.cache[c];
}
}
}
}
export default new RbfCache();

View File

@@ -0,0 +1,389 @@
import memPool from './mempool';
import { DB } from '../database';
import { ITransaction, IMempoolStats } from '../interfaces';
class Statistics {
protected intervalTimer: NodeJS.Timer | undefined;
protected newStatisticsEntryCallback: Function | undefined;
public setNewStatisticsEntryCallback(fn: Function) {
this.newStatisticsEntryCallback = fn;
}
constructor() {
}
public startStatistics(): void {
const now = new Date();
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
const difference = nextInterval.getTime() - now.getTime();
setTimeout(() => {
this.runStatistics();
this.intervalTimer = setInterval(() => { this.runStatistics(); }, 1 * 60 * 1000);
}, difference);
}
private async runStatistics(): Promise<void> {
const currentMempool = memPool.getMempool();
const txPerSecond = memPool.getTxPerSecond();
const vBytesPerSecond = memPool.getVBytesPerSecond();
if (txPerSecond === 0) {
return;
}
console.log('Running statistics');
let memPoolArray: ITransaction[] = [];
for (const i in currentMempool) {
if (currentMempool.hasOwnProperty(i)) {
memPoolArray.push(currentMempool[i]);
}
}
// Remove 0 and undefined
memPoolArray = memPoolArray.filter((tx) => tx.feePerWeightUnit);
if (!memPoolArray.length) {
return;
}
memPoolArray.sort((a, b) => a.feePerWeightUnit - b.feePerWeightUnit);
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
const weightUnitFees: { [feePerWU: number]: number } = {};
const weightVsizeFees: { [feePerWU: number]: number } = {};
memPoolArray.forEach((transaction) => {
for (let i = 0; i < logFees.length; i++) {
if ((logFees[i] === 2000 && transaction.feePerWeightUnit >= 2000) || transaction.feePerWeightUnit <= logFees[i]) {
if (weightUnitFees[logFees[i]]) {
weightUnitFees[logFees[i]] += transaction.vsize * 4;
} else {
weightUnitFees[logFees[i]] = transaction.vsize * 4;
}
break;
}
}
});
memPoolArray.forEach((transaction) => {
for (let i = 0; i < logFees.length; i++) {
if ((logFees[i] === 2000 && transaction.feePerVsize >= 2000) || transaction.feePerVsize <= logFees[i]) {
if (weightVsizeFees[logFees[i]]) {
weightVsizeFees[logFees[i]] += transaction.vsize;
} else {
weightVsizeFees[logFees[i]] = transaction.vsize;
}
break;
}
}
});
const insertId = await this.$create({
added: 'NOW()',
unconfirmed_transactions: memPoolArray.length,
tx_per_second: txPerSecond,
vbytes_per_second: Math.round(vBytesPerSecond),
mempool_byte_weight: totalWeight,
total_fee: totalFee,
fee_data: JSON.stringify({
'wu': weightUnitFees,
'vsize': weightVsizeFees
}),
vsize_1: weightVsizeFees['1'] || 0,
vsize_2: weightVsizeFees['2'] || 0,
vsize_3: weightVsizeFees['3'] || 0,
vsize_4: weightVsizeFees['4'] || 0,
vsize_5: weightVsizeFees['5'] || 0,
vsize_6: weightVsizeFees['6'] || 0,
vsize_8: weightVsizeFees['8'] || 0,
vsize_10: weightVsizeFees['10'] || 0,
vsize_12: weightVsizeFees['12'] || 0,
vsize_15: weightVsizeFees['15'] || 0,
vsize_20: weightVsizeFees['20'] || 0,
vsize_30: weightVsizeFees['30'] || 0,
vsize_40: weightVsizeFees['40'] || 0,
vsize_50: weightVsizeFees['50'] || 0,
vsize_60: weightVsizeFees['60'] || 0,
vsize_70: weightVsizeFees['70'] || 0,
vsize_80: weightVsizeFees['80'] || 0,
vsize_90: weightVsizeFees['90'] || 0,
vsize_100: weightVsizeFees['100'] || 0,
vsize_125: weightVsizeFees['125'] || 0,
vsize_150: weightVsizeFees['150'] || 0,
vsize_175: weightVsizeFees['175'] || 0,
vsize_200: weightVsizeFees['200'] || 0,
vsize_250: weightVsizeFees['250'] || 0,
vsize_300: weightVsizeFees['300'] || 0,
vsize_350: weightVsizeFees['350'] || 0,
vsize_400: weightVsizeFees['400'] || 0,
vsize_500: weightVsizeFees['500'] || 0,
vsize_600: weightVsizeFees['600'] || 0,
vsize_700: weightVsizeFees['700'] || 0,
vsize_800: weightVsizeFees['800'] || 0,
vsize_900: weightVsizeFees['900'] || 0,
vsize_1000: weightVsizeFees['1000'] || 0,
vsize_1200: weightVsizeFees['1200'] || 0,
vsize_1400: weightVsizeFees['1400'] || 0,
vsize_1600: weightVsizeFees['1600'] || 0,
vsize_1800: weightVsizeFees['1800'] || 0,
vsize_2000: weightVsizeFees['2000'] || 0,
});
if (this.newStatisticsEntryCallback && insertId) {
const newStats = await this.$get(insertId);
this.newStatisticsEntryCallback(newStats);
}
}
private async $create(statistics: IMempoolStats): Promise<number | undefined> {
try {
const connection = await DB.pool.getConnection();
const query = `INSERT INTO statistics(
added,
unconfirmed_transactions,
tx_per_second,
vbytes_per_second,
mempool_byte_weight,
fee_data,
total_fee,
vsize_1,
vsize_2,
vsize_3,
vsize_4,
vsize_5,
vsize_6,
vsize_8,
vsize_10,
vsize_12,
vsize_15,
vsize_20,
vsize_30,
vsize_40,
vsize_50,
vsize_60,
vsize_70,
vsize_80,
vsize_90,
vsize_100,
vsize_125,
vsize_150,
vsize_175,
vsize_200,
vsize_250,
vsize_300,
vsize_350,
vsize_400,
vsize_500,
vsize_600,
vsize_700,
vsize_800,
vsize_900,
vsize_1000,
vsize_1200,
vsize_1400,
vsize_1600,
vsize_1800,
vsize_2000
)
VALUES (${statistics.added}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
const params: (string | number)[] = [
statistics.unconfirmed_transactions,
statistics.tx_per_second,
statistics.vbytes_per_second,
statistics.mempool_byte_weight,
statistics.fee_data,
statistics.total_fee,
statistics.vsize_1,
statistics.vsize_2,
statistics.vsize_3,
statistics.vsize_4,
statistics.vsize_5,
statistics.vsize_6,
statistics.vsize_8,
statistics.vsize_10,
statistics.vsize_12,
statistics.vsize_15,
statistics.vsize_20,
statistics.vsize_30,
statistics.vsize_40,
statistics.vsize_50,
statistics.vsize_60,
statistics.vsize_70,
statistics.vsize_80,
statistics.vsize_90,
statistics.vsize_100,
statistics.vsize_125,
statistics.vsize_150,
statistics.vsize_175,
statistics.vsize_200,
statistics.vsize_250,
statistics.vsize_300,
statistics.vsize_350,
statistics.vsize_400,
statistics.vsize_500,
statistics.vsize_600,
statistics.vsize_700,
statistics.vsize_800,
statistics.vsize_900,
statistics.vsize_1000,
statistics.vsize_1200,
statistics.vsize_1400,
statistics.vsize_1600,
statistics.vsize_1800,
statistics.vsize_2000,
];
const [result]: any = await connection.query(query, params);
connection.release();
return result.insertId;
} catch (e) {
console.log('$create() error', e);
}
}
private getQueryForDays(days: number, groupBy: number) {
return `SELECT id, added, unconfirmed_transactions,
AVG(tx_per_second) AS tx_per_second,
AVG(vbytes_per_second) AS vbytes_per_second,
AVG(vsize_1) AS vsize_1,
AVG(vsize_2) AS vsize_2,
AVG(vsize_3) AS vsize_3,
AVG(vsize_4) AS vsize_4,
AVG(vsize_5) AS vsize_5,
AVG(vsize_6) AS vsize_6,
AVG(vsize_8) AS vsize_8,
AVG(vsize_10) AS vsize_10,
AVG(vsize_12) AS vsize_12,
AVG(vsize_15) AS vsize_15,
AVG(vsize_20) AS vsize_20,
AVG(vsize_30) AS vsize_30,
AVG(vsize_40) AS vsize_40,
AVG(vsize_50) AS vsize_50,
AVG(vsize_60) AS vsize_60,
AVG(vsize_70) AS vsize_70,
AVG(vsize_80) AS vsize_80,
AVG(vsize_90) AS vsize_90,
AVG(vsize_100) AS vsize_100,
AVG(vsize_125) AS vsize_125,
AVG(vsize_150) AS vsize_150,
AVG(vsize_175) AS vsize_175,
AVG(vsize_200) AS vsize_200,
AVG(vsize_250) AS vsize_250,
AVG(vsize_300) AS vsize_300,
AVG(vsize_350) AS vsize_350,
AVG(vsize_400) AS vsize_400,
AVG(vsize_500) AS vsize_500,
AVG(vsize_600) AS vsize_600,
AVG(vsize_700) AS vsize_700,
AVG(vsize_800) AS vsize_800,
AVG(vsize_900) AS vsize_900,
AVG(vsize_1000) AS vsize_1000,
AVG(vsize_1200) AS vsize_1200,
AVG(vsize_1400) AS vsize_1400,
AVG(vsize_1600) AS vsize_1600,
AVG(vsize_1800) AS vsize_1800,
AVG(vsize_2000) AS vsize_2000 FROM statistics GROUP BY UNIX_TIMESTAMP(added) DIV ${groupBy} ORDER BY id DESC LIMIT ${days}`;
}
public async $get(id: number): Promise<IMempoolStats | undefined> {
try {
const connection = await DB.pool.getConnection();
const query = `SELECT * FROM statistics WHERE id = ?`;
const [rows] = await connection.query<any>(query, [id]);
connection.release();
return rows[0];
} catch (e) {
console.log('$list2H() error', e);
}
}
public async $list2H(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = `SELECT * FROM statistics ORDER BY id DESC LIMIT 120`;
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
console.log('$list2H() error', e);
return [];
}
}
public async $list24H(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = this.getQueryForDays(120, 720);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
return [];
}
}
public async $list1W(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = this.getQueryForDays(120, 5040);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
console.log('$list1W() error', e);
return [];
}
}
public async $list1M(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = this.getQueryForDays(120, 20160);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
console.log('$list1M() error', e);
return [];
}
}
public async $list3M(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = this.getQueryForDays(120, 60480);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
console.log('$list3M() error', e);
return [];
}
}
public async $list6M(): Promise<IMempoolStats[]> {
try {
const connection = await DB.pool.getConnection();
const query = this.getQueryForDays(120, 120960);
const [rows] = await connection.query<any>(query);
connection.release();
return rows;
} catch (e) {
console.log('$list6M() error', e);
return [];
}
}
}
export default new Statistics();

View File

@@ -1,430 +0,0 @@
import DB from '../../database';
import logger from '../../logger';
import { Statistic, OptimizedStatistic } from '../../mempool.interfaces';
class StatisticsApi {
protected queryTimeout = 120000;
public async $createZeroedStatistic(): Promise<number | undefined> {
try {
const query = `INSERT INTO statistics(
added,
unconfirmed_transactions,
tx_per_second,
vbytes_per_second,
mempool_byte_weight,
fee_data,
total_fee,
vsize_1,
vsize_2,
vsize_3,
vsize_4,
vsize_5,
vsize_6,
vsize_8,
vsize_10,
vsize_12,
vsize_15,
vsize_20,
vsize_30,
vsize_40,
vsize_50,
vsize_60,
vsize_70,
vsize_80,
vsize_90,
vsize_100,
vsize_125,
vsize_150,
vsize_175,
vsize_200,
vsize_250,
vsize_300,
vsize_350,
vsize_400,
vsize_500,
vsize_600,
vsize_700,
vsize_800,
vsize_900,
vsize_1000,
vsize_1200,
vsize_1400,
vsize_1600,
vsize_1800,
vsize_2000
)
VALUES (NOW(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)`;
const [result]: any = await DB.query(query);
return result.insertId;
} catch (e) {
logger.err('$create() error' + (e instanceof Error ? e.message : e));
}
}
public async $create(statistics: Statistic): Promise<number | undefined> {
try {
const query = `INSERT INTO statistics(
added,
unconfirmed_transactions,
tx_per_second,
vbytes_per_second,
mempool_byte_weight,
fee_data,
total_fee,
vsize_1,
vsize_2,
vsize_3,
vsize_4,
vsize_5,
vsize_6,
vsize_8,
vsize_10,
vsize_12,
vsize_15,
vsize_20,
vsize_30,
vsize_40,
vsize_50,
vsize_60,
vsize_70,
vsize_80,
vsize_90,
vsize_100,
vsize_125,
vsize_150,
vsize_175,
vsize_200,
vsize_250,
vsize_300,
vsize_350,
vsize_400,
vsize_500,
vsize_600,
vsize_700,
vsize_800,
vsize_900,
vsize_1000,
vsize_1200,
vsize_1400,
vsize_1600,
vsize_1800,
vsize_2000
)
VALUES (${statistics.added}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
const params: (string | number)[] = [
statistics.unconfirmed_transactions,
statistics.tx_per_second,
statistics.vbytes_per_second,
statistics.mempool_byte_weight,
statistics.fee_data,
statistics.total_fee,
statistics.vsize_1,
statistics.vsize_2,
statistics.vsize_3,
statistics.vsize_4,
statistics.vsize_5,
statistics.vsize_6,
statistics.vsize_8,
statistics.vsize_10,
statistics.vsize_12,
statistics.vsize_15,
statistics.vsize_20,
statistics.vsize_30,
statistics.vsize_40,
statistics.vsize_50,
statistics.vsize_60,
statistics.vsize_70,
statistics.vsize_80,
statistics.vsize_90,
statistics.vsize_100,
statistics.vsize_125,
statistics.vsize_150,
statistics.vsize_175,
statistics.vsize_200,
statistics.vsize_250,
statistics.vsize_300,
statistics.vsize_350,
statistics.vsize_400,
statistics.vsize_500,
statistics.vsize_600,
statistics.vsize_700,
statistics.vsize_800,
statistics.vsize_900,
statistics.vsize_1000,
statistics.vsize_1200,
statistics.vsize_1400,
statistics.vsize_1600,
statistics.vsize_1800,
statistics.vsize_2000,
];
const [result]: any = await DB.query(query, params);
return result.insertId;
} catch (e) {
logger.err('$create() error' + (e instanceof Error ? e.message : e));
}
}
private getQueryForDaysAvg(div: number, interval: string) {
return `SELECT
UNIX_TIMESTAMP(added) as added,
CAST(avg(vbytes_per_second) as DOUBLE) as vbytes_per_second,
CAST(avg(vsize_1) as DOUBLE) as vsize_1,
CAST(avg(vsize_2) as DOUBLE) as vsize_2,
CAST(avg(vsize_3) as DOUBLE) as vsize_3,
CAST(avg(vsize_4) as DOUBLE) as vsize_4,
CAST(avg(vsize_5) as DOUBLE) as vsize_5,
CAST(avg(vsize_6) as DOUBLE) as vsize_6,
CAST(avg(vsize_8) as DOUBLE) as vsize_8,
CAST(avg(vsize_10) as DOUBLE) as vsize_10,
CAST(avg(vsize_12) as DOUBLE) as vsize_12,
CAST(avg(vsize_15) as DOUBLE) as vsize_15,
CAST(avg(vsize_20) as DOUBLE) as vsize_20,
CAST(avg(vsize_30) as DOUBLE) as vsize_30,
CAST(avg(vsize_40) as DOUBLE) as vsize_40,
CAST(avg(vsize_50) as DOUBLE) as vsize_50,
CAST(avg(vsize_60) as DOUBLE) as vsize_60,
CAST(avg(vsize_70) as DOUBLE) as vsize_70,
CAST(avg(vsize_80) as DOUBLE) as vsize_80,
CAST(avg(vsize_90) as DOUBLE) as vsize_90,
CAST(avg(vsize_100) as DOUBLE) as vsize_100,
CAST(avg(vsize_125) as DOUBLE) as vsize_125,
CAST(avg(vsize_150) as DOUBLE) as vsize_150,
CAST(avg(vsize_175) as DOUBLE) as vsize_175,
CAST(avg(vsize_200) as DOUBLE) as vsize_200,
CAST(avg(vsize_250) as DOUBLE) as vsize_250,
CAST(avg(vsize_300) as DOUBLE) as vsize_300,
CAST(avg(vsize_350) as DOUBLE) as vsize_350,
CAST(avg(vsize_400) as DOUBLE) as vsize_400,
CAST(avg(vsize_500) as DOUBLE) as vsize_500,
CAST(avg(vsize_600) as DOUBLE) as vsize_600,
CAST(avg(vsize_700) as DOUBLE) as vsize_700,
CAST(avg(vsize_800) as DOUBLE) as vsize_800,
CAST(avg(vsize_900) as DOUBLE) as vsize_900,
CAST(avg(vsize_1000) as DOUBLE) as vsize_1000,
CAST(avg(vsize_1200) as DOUBLE) as vsize_1200,
CAST(avg(vsize_1400) as DOUBLE) as vsize_1400,
CAST(avg(vsize_1600) as DOUBLE) as vsize_1600,
CAST(avg(vsize_1800) as DOUBLE) as vsize_1800,
CAST(avg(vsize_2000) as DOUBLE) as vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
private getQueryForDays(div: number, interval: string) {
return `SELECT
UNIX_TIMESTAMP(added) as added,
CAST(avg(vbytes_per_second) as DOUBLE) as vbytes_per_second,
vsize_1,
vsize_2,
vsize_3,
vsize_4,
vsize_5,
vsize_6,
vsize_8,
vsize_10,
vsize_12,
vsize_15,
vsize_20,
vsize_30,
vsize_40,
vsize_50,
vsize_60,
vsize_70,
vsize_80,
vsize_90,
vsize_100,
vsize_125,
vsize_150,
vsize_175,
vsize_200,
vsize_250,
vsize_300,
vsize_350,
vsize_400,
vsize_500,
vsize_600,
vsize_700,
vsize_800,
vsize_900,
vsize_1000,
vsize_1200,
vsize_1400,
vsize_1600,
vsize_1800,
vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
public async $get(id: number): Promise<OptimizedStatistic | undefined> {
try {
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics WHERE id = ?`;
const [rows] = await DB.query(query, [id]);
if (rows[0]) {
return this.mapStatisticToOptimizedStatistic([rows[0]])[0];
}
} catch (e) {
logger.err('$list2H() error' + (e instanceof Error ? e.message : e));
}
}
public async $list2H(): Promise<OptimizedStatistic[]> {
try {
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 120`;
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list2H() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list24H(): Promise<OptimizedStatistic[]> {
try {
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 1440`;
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list24h() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list1W(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDaysAvg(300, '1 WEEK'); // 5m interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1W() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list1M(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDaysAvg(1800, '1 MONTH'); // 30m interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1M() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list3M(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDaysAvg(7200, '3 MONTH'); // 2h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list3M() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list6M(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDaysAvg(10800, '6 MONTH'); // 3h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list6M() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list1Y(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(28800, '1 YEAR'); // 8h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1Y() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list2Y(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(28800, '2 YEAR'); // 8h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list2Y() error' + (e instanceof Error ? e.message : e));
return [];
}
}
public async $list3Y(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(43200, '3 YEAR'); // 12h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list3Y() error' + (e instanceof Error ? e.message : e));
return [];
}
}
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
return statistic.map((s) => {
return {
added: s.added,
vbytes_per_second: s.vbytes_per_second,
mempool_byte_weight: s.mempool_byte_weight,
total_fee: s.total_fee,
vsizes: [
s.vsize_1,
s.vsize_2,
s.vsize_3,
s.vsize_4,
s.vsize_5,
s.vsize_6,
s.vsize_8,
s.vsize_10,
s.vsize_12,
s.vsize_15,
s.vsize_20,
s.vsize_30,
s.vsize_40,
s.vsize_50,
s.vsize_60,
s.vsize_70,
s.vsize_80,
s.vsize_90,
s.vsize_100,
s.vsize_125,
s.vsize_150,
s.vsize_175,
s.vsize_200,
s.vsize_250,
s.vsize_300,
s.vsize_350,
s.vsize_400,
s.vsize_500,
s.vsize_600,
s.vsize_700,
s.vsize_800,
s.vsize_900,
s.vsize_1000,
s.vsize_1200,
s.vsize_1400,
s.vsize_1600,
s.vsize_1800,
s.vsize_2000,
]
};
});
}
}
export default new StatisticsApi();

View File

@@ -1,67 +0,0 @@
import { Application, Request, Response } from 'express';
import config from '../../config';
import statisticsApi from './statistics-api';
class StatisticsRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2h', this.$getStatisticsByTime.bind(this, '2h'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/24h', this.$getStatisticsByTime.bind(this, '24h'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1w', this.$getStatisticsByTime.bind(this, '1w'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1m', this.$getStatisticsByTime.bind(this, '1m'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3m', this.$getStatisticsByTime.bind(this, '3m'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/6m', this.$getStatisticsByTime.bind(this, '6m'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', this.$getStatisticsByTime.bind(this, '1y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
;
}
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y', req: Request, res: Response) {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
try {
let result;
switch (time as string) {
case '2h':
result = await statisticsApi.$list2H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
break;
case '24h':
result = await statisticsApi.$list24H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
break;
case '1w':
result = await statisticsApi.$list1W();
break;
case '1m':
result = await statisticsApi.$list1M();
break;
case '3m':
result = await statisticsApi.$list3M();
break;
case '6m':
result = await statisticsApi.$list6M();
break;
case '1y':
result = await statisticsApi.$list1Y();
break;
case '2y':
result = await statisticsApi.$list2Y();
break;
case '3y':
result = await statisticsApi.$list3Y();
break;
default:
result = await statisticsApi.$list2H();
}
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new StatisticsRoutes();

View File

@@ -1,153 +0,0 @@
import memPool from '../mempool';
import logger from '../../logger';
import { TransactionExtended, OptimizedStatistic } from '../../mempool.interfaces';
import { Common } from '../common';
import statisticsApi from './statistics-api';
class Statistics {
protected intervalTimer: NodeJS.Timer | undefined;
protected newStatisticsEntryCallback: ((stats: OptimizedStatistic) => void) | undefined;
public setNewStatisticsEntryCallback(fn: (stats: OptimizedStatistic) => void) {
this.newStatisticsEntryCallback = fn;
}
public startStatistics(): void {
logger.info('Starting statistics service');
const now = new Date();
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
const difference = nextInterval.getTime() - now.getTime();
setTimeout(() => {
this.runStatistics();
this.intervalTimer = setInterval(() => {
this.runStatistics();
}, 1 * 60 * 1000);
}, difference);
}
private async runStatistics(): Promise<void> {
if (!memPool.isInSync()) {
return;
}
const currentMempool = memPool.getMempool();
const txPerSecond = memPool.getTxPerSecond();
const vBytesPerSecond = memPool.getVBytesPerSecond();
logger.debug('Running statistics');
let memPoolArray: TransactionExtended[] = [];
for (const i in currentMempool) {
if (currentMempool.hasOwnProperty(i)) {
memPoolArray.push(currentMempool[i]);
}
}
// Remove 0 and undefined
memPoolArray = memPoolArray.filter((tx) => tx.effectiveFeePerVsize);
if (!memPoolArray.length) {
try {
const insertIdZeroed = await statisticsApi.$createZeroedStatistic();
if (this.newStatisticsEntryCallback && insertIdZeroed) {
const newStats = await statisticsApi.$get(insertIdZeroed);
if (newStats) {
this.newStatisticsEntryCallback(newStats);
}
}
} catch (e) {
logger.err('Unable to insert zeroed statistics. ' + e);
}
return;
}
memPoolArray.sort((a, b) => a.effectiveFeePerVsize - b.effectiveFeePerVsize);
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
const weightVsizeFees: { [feePerWU: number]: number } = {};
const lastItem = logFees.length - 1;
memPoolArray.forEach((transaction) => {
for (let i = 0; i < logFees.length; i++) {
if (
(Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize * 10 < logFees[i + 1]))
||
(!Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize < logFees[i + 1]))
) {
if (weightVsizeFees[logFees[i]]) {
weightVsizeFees[logFees[i]] += transaction.vsize;
} else {
weightVsizeFees[logFees[i]] = transaction.vsize;
}
break;
}
}
});
try {
const insertId = await statisticsApi.$create({
added: 'NOW()',
unconfirmed_transactions: memPoolArray.length,
tx_per_second: txPerSecond,
vbytes_per_second: Math.round(vBytesPerSecond),
mempool_byte_weight: totalWeight,
total_fee: totalFee,
fee_data: '',
vsize_1: weightVsizeFees['1'] || 0,
vsize_2: weightVsizeFees['2'] || 0,
vsize_3: weightVsizeFees['3'] || 0,
vsize_4: weightVsizeFees['4'] || 0,
vsize_5: weightVsizeFees['5'] || 0,
vsize_6: weightVsizeFees['6'] || 0,
vsize_8: weightVsizeFees['8'] || 0,
vsize_10: weightVsizeFees['10'] || 0,
vsize_12: weightVsizeFees['12'] || 0,
vsize_15: weightVsizeFees['15'] || 0,
vsize_20: weightVsizeFees['20'] || 0,
vsize_30: weightVsizeFees['30'] || 0,
vsize_40: weightVsizeFees['40'] || 0,
vsize_50: weightVsizeFees['50'] || 0,
vsize_60: weightVsizeFees['60'] || 0,
vsize_70: weightVsizeFees['70'] || 0,
vsize_80: weightVsizeFees['80'] || 0,
vsize_90: weightVsizeFees['90'] || 0,
vsize_100: weightVsizeFees['100'] || 0,
vsize_125: weightVsizeFees['125'] || 0,
vsize_150: weightVsizeFees['150'] || 0,
vsize_175: weightVsizeFees['175'] || 0,
vsize_200: weightVsizeFees['200'] || 0,
vsize_250: weightVsizeFees['250'] || 0,
vsize_300: weightVsizeFees['300'] || 0,
vsize_350: weightVsizeFees['350'] || 0,
vsize_400: weightVsizeFees['400'] || 0,
vsize_500: weightVsizeFees['500'] || 0,
vsize_600: weightVsizeFees['600'] || 0,
vsize_700: weightVsizeFees['700'] || 0,
vsize_800: weightVsizeFees['800'] || 0,
vsize_900: weightVsizeFees['900'] || 0,
vsize_1000: weightVsizeFees['1000'] || 0,
vsize_1200: weightVsizeFees['1200'] || 0,
vsize_1400: weightVsizeFees['1400'] || 0,
vsize_1600: weightVsizeFees['1600'] || 0,
vsize_1800: weightVsizeFees['1800'] || 0,
vsize_2000: weightVsizeFees['2000'] || 0,
});
if (this.newStatisticsEntryCallback && insertId) {
const newStats = await statisticsApi.$get(insertId);
if (newStats) {
this.newStatisticsEntryCallback(newStats);
}
}
} catch (e) {
logger.err('Unable to insert statistics. ' + e);
}
}
}
export default new Statistics();

View File

@@ -1,57 +0,0 @@
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { TransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { IEsploraApi } from './bitcoin/esplora-api.interface';
import config from '../config';
import { Common } from './common';
class TransactionUtils {
constructor() { }
public stripCoinbaseTransaction(tx: TransactionExtended): TransactionMinerInfo {
return {
vin: [{
scriptsig: tx.vin[0].scriptsig || tx.vin[0]['coinbase']
}],
vout: tx.vout
.map((vout) => ({
scriptpubkey_address: vout.scriptpubkey_address,
value: vout.value
}))
.filter((vout) => vout.value)
};
}
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false): Promise<TransactionExtended> {
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
return this.extendTransaction(transaction);
}
private extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
// @ts-ignore
if (transaction.vsize) {
// @ts-ignore
return transaction;
}
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / (transaction.weight / 4));
const transactionExtended: TransactionExtended = Object.assign({
vsize: Math.round(transaction.weight / 4),
feePerVsize: feePerVbytes,
effectiveFeePerVsize: feePerVbytes,
}, transaction);
if (!transaction.status.confirmed) {
transactionExtended.firstSeen = Math.round((new Date().getTime() / 1000));
}
return transactionExtended;
}
public hex2ascii(hex: string) {
let str = '';
for (let i = 0; i < hex.length; i += 2) {
str += String.fromCharCode(parseInt(hex.substr(i, 2), 16));
}
return str;
}
}
export default new TransactionUtils();

View File

@@ -1,338 +0,0 @@
import config from '../config';
import logger from '../logger';
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
if (parentPort) {
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => {
const { mempool, blocks } = makeBlockTemplates(params);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ mempool, blocks });
}
});
}
/*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*
* blockLimit: number of blocks to build in total.
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
* condenseRest: whether to ignore excess transactions or append them to the final block.
*/
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null })
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = [];
const restOfArray: TransactionExtended[] = [];
let weight = 0;
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
weight += tx.weight;
if (weight >= maxWeight) {
restOfArray.push(tx);
return;
}
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
fee: tx.fee,
size: tx.size,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
vin: tx.vin,
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
};
mempoolArray.push(auditPool[tx.txid]);
});
// Build relatives graph & calculate ancestor scores
for (const tx of mempoolArray) {
if (!tx.relativesSet) {
setRelatives(tx, auditPool);
}
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 4000;
let blockSize = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
// skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++;
}
// Select best next package
let nextTx;
const nextPoolTx = mempoolArray[top];
const nextModifiedTx = modified.peek();
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
nextTx = nextPoolTx;
top++;
} else {
modified.pop();
if (nextModifiedTx) {
nextTx = nextModifiedTx;
nextTx.modifiedNode = undefined;
}
}
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
const descendants: AuditTransaction[] = [];
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.ancestors = sortedTxSet.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
}).reverse();
mempoolTx.descendants = descendants.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
descendants.push(ancestor);
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
// remove these as valid package ancestors for any descendants remaining in the mempool
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
});
}
failures = 0;
} else {
// hold this package in an overflow list while we check for smaller options
overflow.push(nextTx);
failures++;
}
}
// this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) {
// construct this block
if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
for (const overflowTx of overflow.reverse()) {
if (overflowTx.modified) {
overflowTx.modifiedNode = modified.add(overflowTx);
} else {
top--;
mempoolArray[top] = overflowTx;
}
}
overflow = [];
}
}
if (condenseRest) {
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
blockSize += tx.size;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
blockSize += tx.size;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
tx.ancestors = [];
tx.bestDescendant = null;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
}
transactions = [];
} else if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return {
mempool,
blocks
};
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
function setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent.txid];
if (parentTx && !tx.ancestorMap?.has(parent.txid)) {
tx.ancestorMap.set(parent.txid, parentTx);
parentTx.children.add(tx);
// visit each node only once
if (!parentTx.relativesSet) {
setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
});
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
tx.relativesSet = true;
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
// avoids recursion to limit call stack depth
function updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
modified: PairingHeap<AuditTransaction>,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
const descendants: AuditTransaction[] = [];
let descendantTx;
let tmpScore;
rootTx.children.forEach(childTx => {
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;
descendantTx.modifiedNode = modified.add(descendantTx);
} else {
// rebalance modified heap if score has changed
if (descendantTx.score < tmpScore) {
modified.decreasePriority(descendantTx.modifiedNode);
} else if (descendantTx.score > tmpScore) {
modified.increasePriority(descendantTx.modifiedNode);
}
}
// add this node's children to the stack
descendantTx.children.forEach(childTx => {
// visit each node only once
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
}
}
}
function dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}

View File

@@ -1,584 +0,0 @@
import logger from '../logger';
import * as WebSocket from 'ws';
import {
BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta,
OptimizedStatistic, ILoadingIndicators, IConversionRates
} from '../mempool.interfaces';
import blocks from './blocks';
import memPool from './mempool';
import backendInfo from './backend-info';
import mempoolBlocks from './mempool-blocks';
import fiatConversion from './fiat-conversion';
import { Common } from './common';
import loadingIndicators from './loading-indicators';
import config from '../config';
import transactionUtils from './transaction-utils';
import rbfCache from './rbf-cache';
import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import Audit from './audit';
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
private extraInitProperties = {};
constructor() { }
setWebsocketServer(wss: WebSocket.Server) {
this.wss = wss;
}
setExtraInitProperties(property: string, value: any) {
this.extraInitProperties[property] = value;
}
setupConnectionHandling() {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.wss.on('connection', (client: WebSocket) => {
client.on('error', logger.info);
client.on('message', async (message: string) => {
try {
const parsedMessage: WebsocketResponse = JSON.parse(message);
const response = {};
if (parsedMessage.action === 'want') {
client['want-blocks'] = parsedMessage.data.indexOf('blocks') > -1;
client['want-mempool-blocks'] = parsedMessage.data.indexOf('mempool-blocks') > -1;
client['want-live-2h-chart'] = parsedMessage.data.indexOf('live-2h-chart') > -1;
client['want-stats'] = parsedMessage.data.indexOf('stats') > -1;
}
if (parsedMessage && parsedMessage['track-tx']) {
if (/^[a-fA-F0-9]{64}$/.test(parsedMessage['track-tx'])) {
client['track-tx'] = parsedMessage['track-tx'];
// Client is telling the transaction wasn't found
if (parsedMessage['watch-mempool']) {
const rbfCacheTx = rbfCache.get(client['track-tx']);
if (rbfCacheTx) {
response['txReplaced'] = {
txid: rbfCacheTx.txid,
};
client['track-tx'] = null;
} else {
// It might have appeared before we had the time to start watching for it
const tx = memPool.getMempool()[client['track-tx']];
if (tx) {
if (config.MEMPOOL.BACKEND === 'esplora') {
response['tx'] = tx;
} else {
// tx.prevout is missing from transactions when in bitcoind mode
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
} catch (e) {
logger.debug('Error finding transaction: ' + (e instanceof Error ? e.message : e));
}
}
} else {
try {
const fullTx = await transactionUtils.$getTransactionExtended(client['track-tx'], true);
response['tx'] = fullTx;
} catch (e) {
logger.debug('Error finding transaction. ' + (e instanceof Error ? e.message : e));
client['track-mempool-tx'] = parsedMessage['track-tx'];
}
}
}
}
} else {
client['track-tx'] = null;
}
}
if (parsedMessage && parsedMessage['track-address']) {
if (/^([a-km-zA-HJ-NP-Z1-9]{26,35}|[a-km-zA-HJ-NP-Z1-9]{80}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100})$/
.test(parsedMessage['track-address'])) {
let matchedAddress = parsedMessage['track-address'];
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}$/.test(parsedMessage['track-address'])) {
matchedAddress = matchedAddress.toLowerCase();
}
client['track-address'] = matchedAddress;
} else {
client['track-address'] = null;
}
}
if (parsedMessage && parsedMessage['track-asset']) {
if (/^[a-fA-F0-9]{64}$/.test(parsedMessage['track-asset'])) {
client['track-asset'] = parsedMessage['track-asset'];
} else {
client['track-asset'] = null;
}
}
if (parsedMessage && parsedMessage['track-mempool-block'] !== undefined) {
if (Number.isInteger(parsedMessage['track-mempool-block']) && parsedMessage['track-mempool-block'] >= 0) {
const index = parsedMessage['track-mempool-block'];
client['track-mempool-block'] = index;
const mBlocksWithTransactions = mempoolBlocks.getMempoolBlocksWithTransactions();
response['projected-block-transactions'] = {
index: index,
blockTransactions: mBlocksWithTransactions[index]?.transactions || [],
};
} else {
client['track-mempool-block'] = null;
}
}
if (parsedMessage.action === 'init') {
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
if (!_blocks) {
return;
}
client.send(JSON.stringify(this.getInitData(_blocks)));
}
if (parsedMessage.action === 'ping') {
response['pong'] = true;
}
if (parsedMessage['track-donation'] && parsedMessage['track-donation'].length === 22) {
client['track-donation'] = parsedMessage['track-donation'];
}
if (parsedMessage['track-bisq-market']) {
if (/^[a-z]{3}_[a-z]{3}$/.test(parsedMessage['track-bisq-market'])) {
client['track-bisq-market'] = parsedMessage['track-bisq-market'];
} else {
client['track-bisq-market'] = null;
}
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
}
} catch (e) {
logger.debug('Error parsing websocket message: ' + (e instanceof Error ? e.message : e));
}
});
});
}
handleNewDonation(id: string) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (client['track-donation'] === id) {
client.send(JSON.stringify({ donationConfirmed: true }));
}
});
}
handleLoadingChanged(indicators: ILoadingIndicators) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ loadingIndicators: indicators }));
});
}
handleNewConversionRates(conversionRates: IConversionRates) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ conversions: conversionRates }));
});
}
getInitData(_blocks?: BlockExtended[]) {
if (!_blocks) {
_blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
}
return {
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': fiatConversion.getConversionRates(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': difficultyAdjustment.getDifficultyAdjustment(),
'fees': feeApi.getRecommendedFee(),
...this.extraInitProperties
};
}
handleNewStatistic(stats: OptimizedStatistic) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (!client['want-live-2h-chart']) {
return;
}
client.send(JSON.stringify({
'live-2h-chart': stats
}));
});
}
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true);
} else {
mempoolBlocks.updateMempoolBlocks(newMempool);
}
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const mempoolInfo = memPool.getMempoolInfo();
const vBytesPerSecond = memPool.getVBytesPerSecond();
const rbfTransactions = Common.findRbfTransactions(newTransactions, deletedTransactions);
const da = difficultyAdjustment.getDifficultyAdjustment();
memPool.handleRbfTransactions(rbfTransactions);
const recommendedFees = feeApi.getRecommendedFee();
this.wss.clients.forEach(async (client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
const response = {};
if (client['want-stats']) {
response['mempoolInfo'] = mempoolInfo;
response['vBytesPerSecond'] = vBytesPerSecond;
response['transactions'] = newTransactions.slice(0, 6).map((tx) => Common.stripTransaction(tx));
response['da'] = da;
response['fees'] = recommendedFees;
}
if (client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
}
if (client['track-mempool-tx']) {
const tx = newTransactions.find((t) => t.txid === client['track-mempool-tx']);
if (tx) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
response['tx'] = tx;
}
client['track-mempool-tx'] = null;
}
}
if (client['track-address']) {
const foundTransactions: TransactionExtended[] = [];
for (const tx of newTransactions) {
const someVin = tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_address === client['track-address']);
if (someVin) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
foundTransactions.push(tx);
}
return;
}
const someVout = tx.vout.some((vout) => vout.scriptpubkey_address === client['track-address']);
if (someVout) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
foundTransactions.push(tx);
}
}
}
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
}
}
if (client['track-asset']) {
const foundTransactions: TransactionExtended[] = [];
newTransactions.forEach((tx) => {
if (client['track-asset'] === Common.nativeAssetId) {
if (tx.vin.some((vin) => !!vin.is_pegin)) {
foundTransactions.push(tx);
return;
}
if (tx.vout.some((vout) => !!vout.pegout)) {
foundTransactions.push(tx);
}
} else {
if (tx.vin.some((vin) => !!vin.issuance && vin.issuance.asset_id === client['track-asset'])) {
foundTransactions.push(tx);
return;
}
if (tx.vout.some((vout) => !!vout.asset && vout.asset === client['track-asset'])) {
foundTransactions.push(tx);
}
}
});
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
}
}
if (client['track-tx']) {
const outspends: object = {};
newTransactions.forEach((tx) => tx.vin.forEach((vin, i) => {
if (vin.txid === client['track-tx']) {
outspends[vin.vout] = {
vin: i,
txid: tx.txid,
};
}
}));
if (Object.keys(outspends).length) {
response['utxoSpent'] = outspends;
}
if (rbfTransactions[client['track-tx']]) {
for (const rbfTransaction in rbfTransactions) {
if (client['track-tx'] === rbfTransaction) {
response['rbfTransaction'] = {
txid: rbfTransactions[rbfTransaction].txid,
};
break;
}
}
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas[index]) {
response['projected-block-transactions'] = {
index: index,
delta: mBlockDeltas[index],
};
}
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
}
});
}
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
const _memPool = memPool.getMempool();
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
}) : [];
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
transactions: stripped
}
});
BlocksAuditsRepository.$saveAudit({
time: block.timestamp,
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: censored,
freshTxs: fresh,
matchRate: matchRate,
});
if (block.extras) {
block.extras.matchRate = matchRate;
}
}
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(_memPool, 8, null, true);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (!client['want-blocks']) {
return;
}
const response = {
'block': block,
'mempoolInfo': memPool.getMempoolInfo(),
'da': da,
'fees': fees,
};
if (mBlocks && client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
}
if (client['track-tx'] && txIds.indexOf(client['track-tx']) > -1) {
response['txConfirmed'] = true;
}
if (client['track-address']) {
const foundTransactions: TransactionExtended[] = [];
transactions.forEach((tx) => {
if (tx.vin && tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_address === client['track-address'])) {
foundTransactions.push(tx);
return;
}
if (tx.vout && tx.vout.some((vout) => vout.scriptpubkey_address === client['track-address'])) {
foundTransactions.push(tx);
}
});
if (foundTransactions.length) {
foundTransactions.forEach((tx) => {
tx.status = {
confirmed: true,
block_height: block.height,
block_hash: block.id,
block_time: block.timestamp,
};
});
response['block-transactions'] = foundTransactions;
}
}
if (client['track-asset']) {
const foundTransactions: TransactionExtended[] = [];
transactions.forEach((tx) => {
if (client['track-asset'] === Common.nativeAssetId) {
if (tx.vin && tx.vin.some((vin) => !!vin.is_pegin)) {
foundTransactions.push(tx);
return;
}
if (tx.vout && tx.vout.some((vout) => !!vout.pegout)) {
foundTransactions.push(tx);
}
} else {
if (tx.vin && tx.vin.some((vin) => !!vin.issuance && vin.issuance.asset_id === client['track-asset'])) {
foundTransactions.push(tx);
return;
}
if (tx.vout && tx.vout.some((vout) => !!vout.asset && vout.asset === client['track-asset'])) {
foundTransactions.push(tx);
}
}
});
if (foundTransactions.length) {
foundTransactions.forEach((tx) => {
tx.status = {
confirmed: true,
block_height: block.height,
block_hash: block.id,
block_time: block.timestamp,
};
});
response['block-transactions'] = foundTransactions;
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas && mBlockDeltas[index]) {
response['projected-block-transactions'] = {
index: index,
delta: mBlockDeltas[index],
};
}
}
client.send(JSON.stringify(response));
});
}
}
export default new WebsocketHandler();

View File

@@ -1,296 +0,0 @@
const configFromFile = require(
process.env.MEMPOOL_CONFIG_FILE ? process.env.MEMPOOL_CONFIG_FILE : '../mempool-config.json'
);
interface IConfig {
MEMPOOL: {
ENABLED: boolean;
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
BACKEND: 'esplora' | 'electrum' | 'none';
HTTP_PORT: number;
SPAWN_CLUSTER_PROCS: number;
API_URL_PREFIX: string;
POLL_RATE_MS: number;
CACHE_DIR: string;
CLEAR_PROTECTION_MINUTES: number;
RECOMMENDED_FEE_PERCENTILE: number;
BLOCK_WEIGHT_UNITS: number;
INITIAL_BLOCKS_AMOUNT: number;
MEMPOOL_BLOCKS_AMOUNT: number;
INDEXING_BLOCKS_AMOUNT: number;
BLOCKS_SUMMARIES_INDEXING: boolean;
PRICE_FEED_UPDATE_INTERVAL: number;
USE_SECOND_NODE_FOR_MINFEE: boolean;
EXTERNAL_ASSETS: string[];
EXTERNAL_MAX_RETRY: number;
EXTERNAL_RETRY_INTERVAL: number;
USER_AGENT: string;
STDOUT_LOG_MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
AUTOMATIC_BLOCK_REINDEXING: boolean;
POOLS_JSON_URL: string,
POOLS_JSON_TREE_URL: string,
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
TRANSACTION_INDEXING: boolean;
};
ESPLORA: {
REST_API_URL: string;
};
LIGHTNING: {
ENABLED: boolean;
BACKEND: 'lnd' | 'cln' | 'ldk';
TOPOLOGY_FOLDER: string;
STATS_REFRESH_INTERVAL: number;
GRAPH_REFRESH_INTERVAL: number;
LOGGER_UPDATE_INTERVAL: number;
FORENSICS_INTERVAL: number;
FORENSICS_RATE_LIMIT: number;
};
LND: {
TLS_CERT_PATH: string;
MACAROON_PATH: string;
REST_API_URL: string;
};
CLIGHTNING: {
SOCKET: string;
};
ELECTRUM: {
HOST: string;
PORT: number;
TLS_ENABLED: boolean;
};
CORE_RPC: {
HOST: string;
PORT: number;
USERNAME: string;
PASSWORD: string;
};
SECOND_CORE_RPC: {
HOST: string;
PORT: number;
USERNAME: string;
PASSWORD: string;
};
DATABASE: {
ENABLED: boolean;
HOST: string,
SOCKET: string,
PORT: number;
DATABASE: string;
USERNAME: string;
PASSWORD: string;
};
SYSLOG: {
ENABLED: boolean;
HOST: string;
PORT: number;
MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
FACILITY: string;
};
STATISTICS: {
ENABLED: boolean;
TX_PER_SECOND_SAMPLE_PERIOD: number;
};
BISQ: {
ENABLED: boolean;
DATA_PATH: string;
};
SOCKS5PROXY: {
ENABLED: boolean;
USE_ONION: boolean;
HOST: string;
PORT: number;
USERNAME: string;
PASSWORD: string;
};
PRICE_DATA_SERVER: {
TOR_URL: string;
CLEARNET_URL: string;
};
EXTERNAL_DATA_SERVER: {
MEMPOOL_API: string;
MEMPOOL_ONION: string;
LIQUID_API: string;
LIQUID_ONION: string;
BISQ_URL: string;
BISQ_ONION: string;
};
MAXMIND: {
ENABLED: boolean;
GEOLITE2_CITY: string;
GEOLITE2_ASN: string;
GEOIP2_ISP: string;
},
}
const defaults: IConfig = {
'MEMPOOL': {
'ENABLED': true,
'NETWORK': 'mainnet',
'BACKEND': 'none',
'HTTP_PORT': 8999,
'SPAWN_CLUSTER_PROCS': 0,
'API_URL_PREFIX': '/api/v1/',
'POLL_RATE_MS': 2000,
'CACHE_DIR': './cache',
'CLEAR_PROTECTION_MINUTES': 20,
'RECOMMENDED_FEE_PERCENTILE': 50,
'BLOCK_WEIGHT_UNITS': 4000000,
'INITIAL_BLOCKS_AMOUNT': 8,
'MEMPOOL_BLOCKS_AMOUNT': 8,
'INDEXING_BLOCKS_AMOUNT': 11000, // 0 = disable indexing, -1 = index all blocks
'BLOCKS_SUMMARIES_INDEXING': false,
'PRICE_FEED_UPDATE_INTERVAL': 600,
'USE_SECOND_NODE_FOR_MINFEE': false,
'EXTERNAL_ASSETS': [],
'EXTERNAL_MAX_RETRY': 1,
'EXTERNAL_RETRY_INTERVAL': 0,
'USER_AGENT': 'mempool',
'STDOUT_LOG_MIN_PRIORITY': 'debug',
'AUTOMATIC_BLOCK_REINDEXING': false,
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'TRANSACTION_INDEXING': false,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
},
'ELECTRUM': {
'HOST': '127.0.0.1',
'PORT': 3306,
'TLS_ENABLED': true,
},
'CORE_RPC': {
'HOST': '127.0.0.1',
'PORT': 8332,
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
},
'SECOND_CORE_RPC': {
'HOST': '127.0.0.1',
'PORT': 8332,
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
},
'DATABASE': {
'ENABLED': true,
'HOST': '127.0.0.1',
'SOCKET': '',
'PORT': 3306,
'DATABASE': 'mempool',
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
},
'SYSLOG': {
'ENABLED': true,
'HOST': '127.0.0.1',
'PORT': 514,
'MIN_PRIORITY': 'info',
'FACILITY': 'local7'
},
'STATISTICS': {
'ENABLED': true,
'TX_PER_SECOND_SAMPLE_PERIOD': 150
},
'BISQ': {
'ENABLED': false,
'DATA_PATH': '/bisq/statsnode-data/btc_mainnet/db'
},
'LIGHTNING': {
'ENABLED': false,
'BACKEND': 'lnd',
'TOPOLOGY_FOLDER': '',
'STATS_REFRESH_INTERVAL': 600,
'GRAPH_REFRESH_INTERVAL': 600,
'LOGGER_UPDATE_INTERVAL': 30,
'FORENSICS_INTERVAL': 43200,
'FORENSICS_RATE_LIMIT': 20,
},
'LND': {
'TLS_CERT_PATH': '',
'MACAROON_PATH': '',
'REST_API_URL': 'https://localhost:8080',
},
'CLIGHTNING': {
'SOCKET': '',
},
'SOCKS5PROXY': {
'ENABLED': false,
'USE_ONION': true,
'HOST': '127.0.0.1',
'PORT': 9050,
'USERNAME': '',
'PASSWORD': ''
},
'PRICE_DATA_SERVER': {
'TOR_URL': 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
'CLEARNET_URL': 'https://price.bisq.wiz.biz/getAllMarketPrices'
},
'EXTERNAL_DATA_SERVER': {
'MEMPOOL_API': 'https://mempool.space/api/v1',
'MEMPOOL_ONION': 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
'LIQUID_API': 'https://liquid.network/api/v1',
'LIQUID_ONION': 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
'BISQ_URL': 'https://bisq.markets/api',
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
},
'MAXMIND': {
'ENABLED': false,
'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
},
};
class Config implements IConfig {
MEMPOOL: IConfig['MEMPOOL'];
ESPLORA: IConfig['ESPLORA'];
ELECTRUM: IConfig['ELECTRUM'];
CORE_RPC: IConfig['CORE_RPC'];
SECOND_CORE_RPC: IConfig['SECOND_CORE_RPC'];
DATABASE: IConfig['DATABASE'];
SYSLOG: IConfig['SYSLOG'];
STATISTICS: IConfig['STATISTICS'];
BISQ: IConfig['BISQ'];
LIGHTNING: IConfig['LIGHTNING'];
LND: IConfig['LND'];
CLIGHTNING: IConfig['CLIGHTNING'];
SOCKS5PROXY: IConfig['SOCKS5PROXY'];
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
MAXMIND: IConfig['MAXMIND'];
constructor() {
const configs = this.merge(configFromFile, defaults);
this.MEMPOOL = configs.MEMPOOL;
this.ESPLORA = configs.ESPLORA;
this.ELECTRUM = configs.ELECTRUM;
this.CORE_RPC = configs.CORE_RPC;
this.SECOND_CORE_RPC = configs.SECOND_CORE_RPC;
this.DATABASE = configs.DATABASE;
this.SYSLOG = configs.SYSLOG;
this.STATISTICS = configs.STATISTICS;
this.BISQ = configs.BISQ;
this.LIGHTNING = configs.LIGHTNING;
this.LND = configs.LND;
this.CLIGHTNING = configs.CLIGHTNING;
this.SOCKS5PROXY = configs.SOCKS5PROXY;
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
this.MAXMIND = configs.MAXMIND;
}
merge = (...objects: object[]): IConfig => {
// @ts-ignore
return objects.reduce((prev, next) => {
Object.keys(prev).forEach(key => {
next[key] = { ...next[key], ...prev[key] };
});
return next;
});
}
}
export default new Config();

View File

@@ -1,61 +1,26 @@
import config from './config';
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
import logger from './logger';
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
const config = require('../mempool-config.json');
import { createPool } from 'mysql2/promise';
class DB {
constructor() {
if (config.DATABASE.SOCKET !== '') {
this.poolConfig.socketPath = config.DATABASE.SOCKET;
} else {
this.poolConfig.host = config.DATABASE.HOST;
}
}
private pool: Pool | null = null;
private poolConfig: PoolOptions = {
port: config.DATABASE.PORT,
database: config.DATABASE.DATABASE,
user: config.DATABASE.USERNAME,
password: config.DATABASE.PASSWORD,
export class DB {
static pool = createPool({
host: config.DB_HOST,
port: config.DB_PORT,
database: config.DB_DATABASE,
user: config.DB_USER,
password: config.DB_PASSWORD,
connectionLimit: 10,
supportBigNumbers: true,
timezone: '+00:00',
};
private checkDBFlag() {
if (config.DATABASE.ENABLED === false) {
logger.err('Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue');
}
}
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
{
this.checkDBFlag();
const pool = await this.getPool();
return pool.query(query, params);
}
public async checkDbConnection() {
this.checkDBFlag();
try {
await this.query('SELECT ?', [1]);
logger.info('Database connection established.');
} catch (e) {
logger.err('Could not connect to database: ' + (e instanceof Error ? e.message : e));
process.exit(1);
}
}
private async getPool(): Promise<Pool> {
if (this.pool === null) {
this.pool = createPool(this.poolConfig);
this.pool.on('connection', function (newConnection: PoolConnection) {
newConnection.query(`SET time_zone='+00:00'`);
});
}
return this.pool;
}
});
}
export default new DB();
export async function checkDbConnection() {
try {
const connection = await DB.pool.getConnection();
console.log('MySQL connection established.');
connection.release();
} catch (e) {
console.log('Could not connect to MySQL.');
console.log(e);
process.exit(1);
}
}

View File

@@ -1,250 +1,284 @@
import express from 'express';
import { Application, Request, Response, NextFunction } from 'express';
const config = require('../mempool-config.json');
import * as fs from 'fs';
import * as express from 'express';
import * as compression from 'compression';
import * as http from 'http';
import * as https from 'https';
import * as WebSocket from 'ws';
import cluster from 'cluster';
import DB from './database';
import config from './config';
import blocks from './api/blocks';
import memPool from './api/mempool';
import diskCache from './api/disk-cache';
import statistics from './api/statistics/statistics';
import websocketHandler from './api/websocket-handler';
import fiatConversion from './api/fiat-conversion';
import bisq from './api/bisq/bisq';
import bisqMarkets from './api/bisq/markets';
import logger from './logger';
import backendInfo from './api/backend-info';
import loadingIndicators from './api/loading-indicators';
import mempool from './api/mempool';
import elementsParser from './api/liquid/elements-parser';
import databaseMigration from './api/database-migration';
import syncAssets from './sync-assets';
import icons from './api/liquid/icons';
import { Common } from './api/common';
import poolsUpdater from './tasks/pools-updater';
import indexer from './indexer';
import nodesRoutes from './api/explorer/nodes.routes';
import channelsRoutes from './api/explorer/channels.routes';
import generalLightningRoutes from './api/explorer/general.routes';
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
import networkSyncService from './tasks/lightning/network-sync.service';
import statisticsRoutes from './api/statistics/statistics.routes';
import miningRoutes from './api/mining/mining-routes';
import bisqRoutes from './api/bisq/bisq.routes';
import liquidRoutes from './api/liquid/liquid.routes';
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
import forensicsService from './tasks/lightning/forensics.service';
class Server {
private wss: WebSocket.Server | undefined;
private server: http.Server | undefined;
private app: Application;
private currentBackendRetryInterval = 5;
import bitcoinApi from './api/bitcoin/bitcoin-api-factory';
import diskCache from './api/disk-cache';
import memPool from './api/mempool';
import blocks from './api/blocks';
import projectedBlocks from './api/projected-blocks';
import statistics from './api/statistics';
import { IBlock, IMempool, ITransaction, IMempoolStats } from './interfaces';
import routes from './routes';
import fiatConversion from './api/fiat-conversion';
class MempoolSpace {
private wss: WebSocket.Server;
private server: https.Server | http.Server;
private app: any;
constructor() {
this.app = express();
if (!config.MEMPOOL.SPAWN_CLUSTER_PROCS) {
this.startServer();
return;
}
if (cluster.isPrimary) {
logger.notice(`Mempool Server (Master) is running on port ${config.MEMPOOL.HTTP_PORT} (${backendInfo.getShortCommitHash()})`);
const numCPUs = config.MEMPOOL.SPAWN_CLUSTER_PROCS;
for (let i = 0; i < numCPUs; i++) {
const env = { workerId: i };
const worker = cluster.fork(env);
worker.process['env'] = env;
}
cluster.on('exit', (worker, code, signal) => {
const workerId = worker.process['env'].workerId;
logger.warn(`Mempool Worker PID #${worker.process.pid} workerId: ${workerId} died. Restarting in 10 seconds... ${signal || code}`);
setTimeout(() => {
const env = { workerId: workerId };
const newWorker = cluster.fork(env);
newWorker.process['env'] = env;
}, 10000);
});
} else {
this.startServer(true);
}
}
async startServer(worker = false): Promise<void> {
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
this.app
.use((req: Request, res: Response, next: NextFunction) => {
.use((req, res, next) => {
res.setHeader('Access-Control-Allow-Origin', '*');
next();
})
.use(express.urlencoded({ extended: true }))
.use(express.text({ type: ['text/plain', 'application/base64'] }))
;
this.server = http.createServer(this.app);
this.wss = new WebSocket.Server({ server: this.server });
.use(compression());
if (config.ENV === 'dev') {
this.server = http.createServer(this.app);
this.wss = new WebSocket.Server({ server: this.server });
} else {
const credentials = {
cert: fs.readFileSync('/etc/letsencrypt/live/mempool.space/fullchain.pem'),
key: fs.readFileSync('/etc/letsencrypt/live/mempool.space/privkey.pem'),
};
this.server = https.createServer(credentials, this.app);
this.wss = new WebSocket.Server({ server: this.server });
}
this.setUpRoutes();
this.setUpWebsocketHandling();
this.setUpMempoolCache();
this.runMempoolIntervalFunctions();
await syncAssets.syncAssets$();
if (config.MEMPOOL.ENABLED) {
diskCache.loadMempoolCache();
}
if (config.DATABASE.ENABLED) {
await DB.checkDbConnection();
try {
if (process.env.npm_config_reindex !== undefined) { // Re-index requests
const tables = process.env.npm_config_reindex.split(',');
logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
await Common.sleep$(5000);
await databaseMigration.$truncateIndexedData(tables);
}
await databaseMigration.$initializeOrMigrateDatabase();
if (Common.indexingEnabled()) {
await indexer.$resetHashratesIndexingState();
}
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
}
}
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
statistics.startStatistics();
}
if (Common.isLiquid()) {
try {
icons.loadIcons();
} catch (e) {
logger.err('Cannot load liquid icons. Ignoring. Reason: ' + (e instanceof Error ? e.message : e));
}
}
statistics.startStatistics();
fiatConversion.startService();
this.setUpHttpApiRoutes();
if (config.MEMPOOL.ENABLED) {
this.runMainUpdateLoop();
}
if (config.BISQ.ENABLED) {
bisq.startBisqService();
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitProperties('bsq-price', price));
blocks.setNewBlockCallback(bisq.handleNewBitcoinBlock.bind(bisq));
bisqMarkets.startBisqService();
}
if (config.LIGHTNING.ENABLED) {
this.$runLightningBackend();
}
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
if (worker) {
logger.info(`Mempool Server worker #${process.pid} started`);
} else {
logger.notice(`Mempool Server is running on port ${config.MEMPOOL.HTTP_PORT}`);
}
const opts = {
host: '127.0.0.1',
port: 8999
};
this.server.listen(opts, () => {
console.log(`Server started on ${opts.host}:${opts.port}`);
});
}
async runMainUpdateLoop(): Promise<void> {
try {
try {
await memPool.$updateMemPoolInfo();
} catch (e) {
const msg = `updateMempoolInfo: ${(e instanceof Error ? e.message : e)}`;
if (config.MEMPOOL.USE_SECOND_NODE_FOR_MINFEE) {
logger.warn(msg);
} else {
logger.debug(msg);
}
}
await poolsUpdater.updatePoolsJson();
await blocks.$updateBlocks();
await memPool.$updateMempool();
indexer.$run();
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
this.currentBackendRetryInterval = 5;
} catch (e) {
const loggerMsg = `runMainLoop error: ${(e instanceof Error ? e.message : e)}. Retrying in ${this.currentBackendRetryInterval} sec.`;
if (this.currentBackendRetryInterval > 5) {
logger.warn(loggerMsg);
mempool.setOutOfSync();
} else {
logger.debug(loggerMsg);
}
logger.debug(JSON.stringify(e));
setTimeout(this.runMainUpdateLoop.bind(this), 1000 * this.currentBackendRetryInterval);
this.currentBackendRetryInterval *= 2;
this.currentBackendRetryInterval = Math.min(this.currentBackendRetryInterval, 60);
}
private async runMempoolIntervalFunctions() {
await blocks.updateBlocks();
await memPool.updateMemPoolInfo();
await memPool.updateMempool();
setTimeout(this.runMempoolIntervalFunctions.bind(this), config.MEMPOOL_REFRESH_RATE_MS);
}
async $runLightningBackend(): Promise<void> {
try {
await fundingTxFetcher.$init();
await networkSyncService.$startService();
await forensicsService.$startService();
await lightningStatsUpdater.$startService();
} catch(e) {
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
await Common.sleep$(1000 * 60);
this.$runLightningBackend();
};
}
setUpWebsocketHandling(): void {
if (this.wss) {
websocketHandler.setWebsocketServer(this.wss);
private setUpMempoolCache() {
const cacheData = diskCache.loadData();
if (cacheData) {
memPool.setMempool(JSON.parse(cacheData));
}
if (Common.isLiquid() && config.DATABASE.ENABLED) {
blocks.setNewBlockCallback(async () => {
process.on('SIGINT', (options) => {
console.log('SIGINT');
diskCache.saveData(JSON.stringify(memPool.getMempool()));
process.exit(2);
});
}
private setUpWebsocketHandling() {
this.wss.on('connection', (client: WebSocket) => {
let theBlocks = blocks.getBlocks();
theBlocks = theBlocks.concat([]).splice(theBlocks.length - config.INITIAL_BLOCK_AMOUNT);
const formatedBlocks = theBlocks.map((b) => blocks.formatBlock(b));
client.send(JSON.stringify({
'mempoolInfo': memPool.getMempoolInfo(),
'blocks': formatedBlocks,
'projectedBlocks': projectedBlocks.getProjectedBlocks(),
'txPerSecond': memPool.getTxPerSecond(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'conversions': fiatConversion.getTickers()['BTCUSD'],
}));
client.on('message', async (message: any) => {
try {
await elementsParser.$parse();
const parsedMessage = JSON.parse(message);
if (parsedMessage.action === 'want') {
client['want-stats'] = parsedMessage.data.indexOf('stats') > -1;
client['want-blocks'] = parsedMessage.data.indexOf('blocks') > -1;
client['want-projected-blocks'] = parsedMessage.data.indexOf('projected-blocks') > -1;
client['want-live-2h-chart'] = parsedMessage.data.indexOf('live-2h-chart') > -1;
}
if (parsedMessage.action === 'track-tx' && parsedMessage.txId && /^[a-fA-F0-9]{64}$/.test(parsedMessage.txId)) {
const tx = await memPool.getRawTransaction(parsedMessage.txId);
if (tx) {
console.log('Now tracking: ' + parsedMessage.txId);
client['trackingTx'] = true;
client['txId'] = parsedMessage.txId;
client['tx'] = tx;
if (tx.blockhash) {
const currentBlocks = blocks.getBlocks();
const foundBlock = currentBlocks.find((block) => block.tx && block.tx.some((i: string) => i === parsedMessage.txId));
if (foundBlock) {
console.log('Found block by looking in local cache');
client['blockHeight'] = foundBlock.height;
} else {
const theBlock = await bitcoinApi.getBlockAndTransactions(tx.blockhash);
if (theBlock) {
client['blockHeight'] = theBlock.height;
}
}
} else {
client['blockHeight'] = 0;
}
client.send(JSON.stringify({
'projectedBlocks': projectedBlocks.getProjectedBlocks(client['txId']),
'track-tx': {
tracking: true,
blockHeight: client['blockHeight'],
tx: client['tx'],
}
}));
} else {
console.log('TX NOT FOUND, NOT TRACKING');
client['trackingTx'] = false;
client['blockHeight'] = 0;
client['tx'] = null;
client.send(JSON.stringify({
'track-tx': {
tracking: false,
blockHeight: 0,
message: 'not-found',
}
}));
}
}
if (parsedMessage.action === 'stop-tracking-tx') {
console.log('STOP TRACKING');
client['trackingTx'] = false;
client.send(JSON.stringify({
'track-tx': {
tracking: false,
blockHeight: 0,
message: 'not-found',
}
}));
}
} catch (e) {
logger.warn('Elements parsing error: ' + (e instanceof Error ? e.message : e));
console.log(e);
}
});
}
websocketHandler.setupConnectionHandling();
if (config.MEMPOOL.ENABLED) {
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
}
fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler));
}
setUpHttpApiRoutes(): void {
bitcoinRoutes.initRoutes(this.app);
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
statisticsRoutes.initRoutes(this.app);
}
if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) {
miningRoutes.initRoutes(this.app);
}
if (config.BISQ.ENABLED) {
bisqRoutes.initRoutes(this.app);
}
if (Common.isLiquid()) {
liquidRoutes.initRoutes(this.app);
}
if (config.LIGHTNING.ENABLED) {
generalLightningRoutes.initRoutes(this.app);
nodesRoutes.initRoutes(this.app);
channelsRoutes.initRoutes(this.app);
}
}
}
((): Server => new Server())();
client.on('close', () => {
client['trackingTx'] = false;
});
});
blocks.setNewBlockCallback((block: IBlock) => {
const formattedBlocks = blocks.formatBlock(block);
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
const response = {};
if (client['trackingTx'] === true && client['blockHeight'] === 0) {
if (block.tx.some((tx: ITransaction) => tx === client['txId'])) {
client['blockHeight'] = block.height;
}
}
response['track-tx'] = {
tracking: client['trackingTx'] || false,
blockHeight: client['blockHeight'],
};
response['block'] = formattedBlocks;
client.send(JSON.stringify(response));
});
});
memPool.setMempoolChangedCallback((newMempool: IMempool) => {
projectedBlocks.updateProjectedBlocks(newMempool);
const pBlocks = projectedBlocks.getProjectedBlocks();
const mempoolInfo = memPool.getMempoolInfo();
const txPerSecond = memPool.getTxPerSecond();
const vBytesPerSecond = memPool.getVBytesPerSecond();
this.wss.clients.forEach((client: WebSocket) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
const response = {};
if (client['want-stats']) {
response['mempoolInfo'] = mempoolInfo;
response['txPerSecond'] = txPerSecond;
response['vBytesPerSecond'] = vBytesPerSecond;
response['track-tx'] = {
tracking: client['trackingTx'] || false,
blockHeight: client['blockHeight'],
};
}
if (client['want-projected-blocks'] && client['trackingTx'] && client['blockHeight'] === 0) {
response['projectedBlocks'] = projectedBlocks.getProjectedBlocks(client['txId']);
} else if (client['want-projected-blocks']) {
response['projectedBlocks'] = pBlocks;
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
}
});
});
statistics.setNewStatisticsEntryCallback((stats: IMempoolStats) => {
this.wss.clients.forEach((client: WebSocket) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (client['want-live-2h-chart']) {
client.send(JSON.stringify({
'live-2h-chart': stats
}));
}
});
});
}
private setUpRoutes() {
this.app
.get(config.API_ENDPOINT + 'transactions/height/:id', routes.$getgetTransactionsForBlock)
.get(config.API_ENDPOINT + 'transactions/projected/:id', routes.getgetTransactionsForProjectedBlock)
.get(config.API_ENDPOINT + 'fees/recommended', routes.getRecommendedFees)
.get(config.API_ENDPOINT + 'fees/projected-blocks', routes.getProjectedBlocks)
.get(config.API_ENDPOINT + 'statistics/2h', routes.get2HStatistics)
.get(config.API_ENDPOINT + 'statistics/24h', routes.get24HStatistics.bind(routes))
.get(config.API_ENDPOINT + 'statistics/1w', routes.get1WHStatistics.bind(routes))
.get(config.API_ENDPOINT + 'statistics/1m', routes.get1MStatistics.bind(routes))
.get(config.API_ENDPOINT + 'statistics/3m', routes.get3MStatistics.bind(routes))
.get(config.API_ENDPOINT + 'statistics/6m', routes.get6MStatistics.bind(routes))
;
if (config.BACKEND_API === 'electrs') {
this.app
.get(config.API_ENDPOINT + 'explorer/blocks', routes.getBlocks)
.get(config.API_ENDPOINT + 'explorer/blocks/:height', routes.getBlocks)
.get(config.API_ENDPOINT + 'explorer/tx/:id', routes.getRawTransaction)
.get(config.API_ENDPOINT + 'explorer/block/:hash', routes.getBlock)
.get(config.API_ENDPOINT + 'explorer/block/:hash/tx', routes.getBlockTransactions)
.get(config.API_ENDPOINT + 'explorer/block/:hash/tx/:index', routes.getBlockTransactionsFromIndex)
.get(config.API_ENDPOINT + 'explorer/address/:address', routes.getAddress)
.get(config.API_ENDPOINT + 'explorer/address/:address/tx', routes.getAddressTransactions)
.get(config.API_ENDPOINT + 'explorer/address/:address/tx/chain/:txid', routes.getAddressTransactionsFromTxid)
;
}
}
}
const mempoolSpace = new MempoolSpace();

View File

@@ -1,107 +0,0 @@
import { Common } from './api/common';
import blocks from './api/blocks';
import mempool from './api/mempool';
import mining from './api/mining/mining';
import logger from './logger';
import HashratesRepository from './repositories/HashratesRepository';
import bitcoinClient from './api/bitcoin/bitcoin-client';
import priceUpdater from './tasks/price-updater';
import PricesRepository from './repositories/PricesRepository';
class Indexer {
runIndexer = true;
indexerRunning = false;
tasksRunning: string[] = [];
public reindex() {
if (Common.indexingEnabled()) {
this.runIndexer = true;
}
}
public async runSingleTask(task: 'blocksPrices') {
if (!Common.indexingEnabled()) {
return;
}
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
setTimeout(() => {
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
this.runSingleTask('blocksPrices');
}, 10000);
} else {
logger.debug(`Blocks prices indexer will run now`)
await mining.$indexBlockPrices();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
}
}
}
public async $run() {
if (!Common.indexingEnabled() || this.runIndexer === false ||
this.indexerRunning === true || mempool.hasPriority()
) {
return;
}
// Do not attempt to index anything unless Bitcoin Core is fully synced
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
if (blockchainInfo.blocks !== blockchainInfo.headers) {
return;
}
this.runIndexer = false;
this.indexerRunning = true;
logger.debug(`Running mining indexer`);
try {
await priceUpdater.$run();
const chainValid = await blocks.$generateBlockDatabase();
if (chainValid === false) {
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`);
setTimeout(() => this.reindex(), 10000);
this.indexerRunning = false;
return;
}
this.runSingleTask('blocksPrices');
await mining.$indexDifficultyAdjustments();
await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
await mining.$generateNetworkHashrateHistory();
await mining.$generatePoolHashrateHistory();
await blocks.$generateBlocksSummariesDatabase();
await blocks.$generateCPFPDatabase();
} catch (e) {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
setTimeout(() => this.reindex(), 10000);
this.indexerRunning = false;
return;
}
this.indexerRunning = false;
const runEvery = 1000 * 3600; // 1 hour
logger.debug(`Indexing completed. Next run planned at ${new Date(new Date().getTime() + runEvery).toUTCString()}`);
setTimeout(() => this.reindex(), runEvery);
}
async $resetHashratesIndexingState() {
try {
await HashratesRepository.$setLatestRun('last_hashrates_indexing', 0);
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', 0);
} catch (e) {
logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new Indexer();

Some files were not shown because too many files have changed in this diff Show More