Compare commits

..

1 Commits

Author SHA1 Message Date
Alekos Filini
9c4e5b4d25 [wip] Multiparty descriptor creation 2020-05-19 22:29:04 +02:00
54 changed files with 3296 additions and 10734 deletions

View File

@@ -1,61 +1,18 @@
language: rust
rust:
- stable
env:
global:
- MAGICAL_RPC_COOKIEFILE=/home/travis/.bitcoin/regtest/.cookie
- MAGICAL_ELECTRUM_URL=tcp://127.0.0.1:60401
jobs:
- TARGET=x86_64-unknown-linux-gnu CHECK_FMT=1
- TARGET=x86_64-unknown-linux-gnu RUN_TESTS=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=minimal NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=minimal,esplora NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=key-value-db NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=electrum NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=compact_filters NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=cli-utils,esplora NO_DEFAULT_FEATURES=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=compiler NO_DEFAULT_FEATURES=1 RUN_TESTS=1 # Test the `miniscriptc` example
- TARGET=x86_64-unknown-linux-gnu FEATURES=test-electrum NO_DEFAULT_FEATURES=1 RUN_TESTS=1 RUN_CORE=1
- TARGET=x86_64-unknown-linux-gnu FEATURES=test-md-docs NO_DEFAULT_FEATURES=1 RUN_TESTS=1 NIGHTLY=1
- TARGET=wasm32-unknown-unknown FEATURES=cli-utils,esplora NO_DEFAULT_FEATURES=1
# - 1.31.0
# - 1.22.0
before_script:
- |
if [[ "$TARGET" = "wasm32-unknown-unknown" ]]; then
# Install a recent version of clang that supports wasm32
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - || exit 1
sudo apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-10 main" || exit 1
sudo apt-get update || exit 1
sudo apt-get install -y clang-10 libc6-dev-i386 || exit 1
export CC="clang-10"
export CFLAGS="-I/usr/include"
fi
- |
if [[ $CHECK_FMT -eq 1 ]]; then
rustup component add rustfmt
fi
- |
if [[ $NIGHTLY -eq 1 ]]; then
rustup toolchain install nightly
rustup default nightly
fi
- rustup target add "$TARGET"
- rustup component add rustfmt
script:
- |
if [[ $CHECK_FMT -eq 1 ]]; then
cargo fmt -- --check || exit 1
fi
- |
if [[ $RUN_TESTS -eq 1 ]]; then
CMD=test
else
CMD=build
fi
- |
if [[ $RUN_CORE -eq 1 ]]; then
./ci/start-core.sh || exit 1
fi
- cargo $CMD --verbose --target=$TARGET --features=$FEATURES $( (( NO_DEFAULT_FEATURES == 1 )) && printf %s '--no-default-features' )
- cargo fmt -- --check --verbose
- cargo test --verbose --all
- cargo build --verbose --all
- cargo build --verbose --no-default-features --features=minimal
- cargo build --verbose --no-default-features --features=minimal,esplora
- cargo build --verbose --no-default-features --features=key-value-db
- cargo build --verbose --no-default-features --features=electrum
notifications:
email: false

View File

@@ -1,62 +1,37 @@
[package]
name = "magical"
name = "magical-bitcoin-wallet"
version = "0.1.0"
edition = "2018"
authors = ["Alekos Filini <alekos.filini@gmail.com>", "Riccardo Casatta <riccardo@casatta.it>"]
authors = ["Riccardo Casatta <riccardo@casatta.it>", "Alekos Filini <alekos.filini@gmail.com>"]
[dependencies]
magical-macros = { version = "0.1.0-beta.1", path = "./macros" }
log = "^0.4"
bitcoin = { version = "0.23", features = ["use-serde"] }
miniscript = { version = "1.0" }
miniscript = { version = "0.12" }
serde = { version = "^1.0", features = ["derive"] }
serde_json = { version = "^1.0" }
rand = "^0.7"
base64 = "^0.11"
async-trait = "0.1"
# Optional dependencies
sled = { version = "0.34", optional = true }
electrum-client = { version = "0.2.0-beta.1", optional = true }
sled = { version = "0.31.0", optional = true }
electrum-client = { git = "https://github.com/MagicalBitcoin/rust-electrum-client.git", optional = true }
reqwest = { version = "0.10", optional = true, features = ["json"] }
futures = { version = "0.3", optional = true }
clap = { version = "2.33", optional = true }
base64 = { version = "^0.11", optional = true }
async-trait = { version = "0.1", optional = true }
rocksdb = { version = "0.14", optional = true }
socks = { version = "0.3", optional = true }
lazy_static = { version = "1.4", optional = true }
[patch.crates-io]
bitcoin = { git = "https://github.com/rust-bitcoin/rust-bitcoin/", rev = "478e091" }
miniscript = { git = "https://github.com/MagicalBitcoin/rust-miniscript", branch = "descriptor-public-key" }
# Platform-specific dependencies
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
tokio = { version = "0.2", features = ["rt-core"] }
[target.'cfg(target_arch = "wasm32")'.dependencies]
async-trait = "0.1"
js-sys = "0.3"
[features]
minimal = []
compiler = ["clap", "miniscript/compiler"]
compiler = ["miniscript/compiler"]
default = ["key-value-db", "electrum"]
electrum = ["electrum-client"]
esplora = ["reqwest", "futures"]
compact_filters = ["rocksdb", "socks", "lazy_static"]
key-value-db = ["sled"]
cli-utils = ["clap", "base64"]
async-interface = ["async-trait"]
# Debug/Test features
debug-proc-macros = ["magical-macros/debug", "magical-testutils-macros/debug"]
test-electrum = ["electrum"]
test-md-docs = ["base64", "electrum"]
cli-utils = ["clap"]
multiparty = []
[dev-dependencies]
magical-testutils = { version = "0.1.0-beta.1", path = "./testutils" }
magical-testutils-macros = { version = "0.1.0-beta.1", path = "./testutils-macros" }
serial_test = "0.4"
tokio = { version = "0.2", features = ["macros"] }
lazy_static = "1.4"
rustyline = "6.0"
dirs = "2.0"
@@ -67,27 +42,22 @@ rand = "0.7"
name = "repl"
required-features = ["cli-utils"]
[[example]]
name = "parse_descriptor"
name = "psbt"
[[example]]
name = "address_validator"
name = "parse_descriptor"
[[example]]
name = "miniscriptc"
path = "examples/compiler.rs"
required-features = ["compiler"]
[[example]]
name = "multiparty"
required-features = ["multiparty","compiler"]
# Provide a more user-friendly alias for the REPL
[[example]]
name = "magic"
path = "examples/repl.rs"
required-features = ["cli-utils"]
[workspace]
members = ["macros", "testutils", "testutils-macros"]
# Generate docs with nightly to add the "features required" badge
# https://stackoverflow.com/questions/61417452/how-to-get-a-feature-requirement-tag-in-the-documentation-generated-by-cargo-do
[package.metadata.docs.rs]
features = ["compiler", "electrum", "esplora", "compact_filters", "key-value-db"]
# defines the configuration attribute `docsrs`
rustdoc-args = ["--cfg", "docsrs"]

147
README.md
View File

@@ -1,146 +1,7 @@
<div align="center">
<h1>Magical Bitcoin Library</h1>
# Magical Bitcoin Wallet
<img src="./static/wizard.svg" width="220" />
A modern, lightweight, descriptor-based wallet written in Rust!
<p>
<strong>A modern, lightweight, descriptor-based wallet library written in Rust!</strong>
</p>
## Getting Started
<p>
<!-- <a href="https://crates.io/crates/magical"><img alt="Crate Info" src="https://img.shields.io/crates/v/magical.svg"/></a> -->
<a href="https://travis-ci.org/MagicalBitcoin/magical-bitcoin-wallet"><img alt="Traivs Status" src="https://travis-ci.org/MagicalBitcoin/magical-bitcoin-wallet.svg?branch=master"></a>
<a href="https://magicalbitcoin.org/docs-rs/magical"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-magical-green"/></a>
<a href="https://blog.rust-lang.org/2020/07/16/Rust-1.45.0.html"><img alt="Rustc Version 1.45+" src="https://img.shields.io/badge/rustc-1.45%2B-lightgrey.svg"/></a>
</p>
<h4>
<a href="https://magicalbitcoin.org">Project Homepage</a>
<span> | </span>
<a href="https://magicalbitcoin.org/docs-rs/magical">Documentation</a>
</h4>
</div>
## About
The `magical` library aims to be the core building block for Bitcoin wallets of any kind.
* It uses [Miniscript](https://github.com/rust-bitcoin/rust-miniscript) to support descriptors with generalized conditions. This exact same library can be used to build
single-sig wallets, multisigs, timelocked contracts and more.
* It supports multiple blockchain backends and databases, allowing developers to choose exactly what's right for their projects.
* It's built to be cross-platform: the core logic works on desktop, mobile, and even WebAssembly.
* It's very easy to extend: developers can implement customized logic for blockchain backends, databases, signers, coin selection, and more, without having to fork and modify this library.
## Examples
### Sync the balance of a descriptor
```no_run
use magical::Wallet;
use magical::database::MemoryDatabase;
use magical::blockchain::{noop_progress, ElectrumBlockchain};
use magical::electrum_client::Client;
fn main() -> Result<(), magical::Error> {
let client = Client::new("ssl://electrum.blockstream.info:60002", None)?;
let wallet = Wallet::new(
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
bitcoin::Network::Testnet,
MemoryDatabase::default(),
ElectrumBlockchain::from(client)
)?;
wallet.sync(noop_progress(), None)?;
println!("Descriptor balance: {} SAT", wallet.get_balance()?);
Ok(())
}
```
### Generate a few addresses
```
use magical::{Wallet, OfflineWallet};
use magical::database::MemoryDatabase;
fn main() -> Result<(), magical::Error> {
let wallet: OfflineWallet<_> = Wallet::new_offline(
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
bitcoin::Network::Testnet,
MemoryDatabase::default(),
)?;
println!("Address #0: {}", wallet.get_new_address()?);
println!("Address #1: {}", wallet.get_new_address()?);
println!("Address #2: {}", wallet.get_new_address()?);
Ok(())
}
```
### Create a transaction
```no_run
use magical::{FeeRate, TxBuilder, Wallet};
use magical::database::MemoryDatabase;
use magical::blockchain::{noop_progress, ElectrumBlockchain};
use magical::electrum_client::Client;
use bitcoin::consensus::serialize;
fn main() -> Result<(), magical::Error> {
let client = Client::new("ssl://electrum.blockstream.info:60002", None)?;
let wallet = Wallet::new(
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
bitcoin::Network::Testnet,
MemoryDatabase::default(),
ElectrumBlockchain::from(client)
)?;
wallet.sync(noop_progress(), None)?;
let send_to = wallet.get_new_address()?;
let (psbt, details) = wallet.create_tx(
TxBuilder::with_recipients(vec![(send_to.script_pubkey(), 50_000)])
.enable_rbf()
.do_not_spend_change()
.fee_rate(FeeRate::from_sat_per_vb(5.0))
)?;
println!("Transaction details: {:#?}", details);
println!("Unsigned PSBT: {}", base64::encode(&serialize(&psbt)));
Ok(())
}
```
### Sign a transaction
```no_run
use magical::{Wallet, OfflineWallet};
use magical::database::MemoryDatabase;
use bitcoin::consensus::deserialize;
fn main() -> Result<(), magical::Error> {
let wallet: OfflineWallet<_> = Wallet::new_offline(
"wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)",
Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"),
bitcoin::Network::Testnet,
MemoryDatabase::default(),
)?;
let psbt = "...";
let psbt = deserialize(&base64::decode(psbt).unwrap())?;
let (signed_psbt, finalized) = wallet.sign(psbt, None)?;
Ok(())
}
```
See the documentation at [magicalbitcoin.org](https://magicalbitcoin.org)

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env sh
set -e
BITCOIN_VERSION=0.20.1
# This should be cached by Travis
cargo install --git https://github.com/romanz/electrs --bin electrs
curl -O -L https://bitcoincore.org/bin/bitcoin-core-$BITCOIN_VERSION/bitcoin-$BITCOIN_VERSION-x86_64-linux-gnu.tar.gz
tar xf bitcoin-$BITCOIN_VERSION-x86_64-linux-gnu.tar.gz
export PATH=$PATH:./bitcoin-$BITCOIN_VERSION/bin
bitcoind -regtest=1 -daemon=1 -fallbackfee=0.0002
until bitcoin-cli -regtest getblockchaininfo; do
sleep 1
done
ADDR=$(bitcoin-cli -regtest getnewaddress)
bitcoin-cli -regtest generatetoaddress 150 $ADDR
nohup electrs --network regtest --jsonrpc-import --cookie-file /home/travis/.bitcoin/regtest/.cookie &
sleep 5

View File

@@ -1,72 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::sync::Arc;
use magical::bitcoin;
use magical::database::MemoryDatabase;
use magical::descriptor::HDKeyPaths;
use magical::wallet::address_validator::{AddressValidator, AddressValidatorError};
use magical::ScriptType;
use magical::{OfflineWallet, Wallet};
use bitcoin::hashes::hex::FromHex;
use bitcoin::util::bip32::Fingerprint;
use bitcoin::{Network, Script};
struct DummyValidator;
impl AddressValidator for DummyValidator {
fn validate(
&self,
script_type: ScriptType,
hd_keypaths: &HDKeyPaths,
script: &Script,
) -> Result<(), AddressValidatorError> {
let (_, path) = hd_keypaths
.values()
.find(|(fing, _)| fing == &Fingerprint::from_hex("bc123c3e").unwrap())
.ok_or(AddressValidatorError::InvalidScript)?;
println!(
"Validating `{:?}` {} address, script: {}",
script_type, path, script
);
Ok(())
}
}
fn main() -> Result<(), magical::Error> {
let descriptor = "sh(and_v(v:pk(tpubDDpWvmUrPZrhSPmUzCMBHffvC3HyMAPnWDSAQNBTnj1iZeJa7BZQEttFiP4DS4GCcXQHezdXhn86Hj6LHX5EDstXPWrMaSneRWM8yUf6NFd/*),after(630000)))";
let mut wallet: OfflineWallet<_> =
Wallet::new_offline(descriptor, None, Network::Regtest, MemoryDatabase::new())?;
wallet.add_address_validator(Arc::new(Box::new(DummyValidator)));
wallet.get_new_address()?;
wallet.get_new_address()?;
wallet.get_new_address()?;
Ok(())
}

View File

@@ -1,46 +1,27 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
extern crate bitcoin;
extern crate clap;
extern crate log;
extern crate magical;
extern crate magical_bitcoin_wallet;
extern crate miniscript;
extern crate rand;
extern crate serde_json;
extern crate sled;
use std::str::FromStr;
use log::info;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use clap::{App, Arg};
use bitcoin::Network;
use miniscript::policy::Concrete;
use miniscript::Descriptor;
use magical::database::memory::MemoryDatabase;
use magical::{OfflineWallet, ScriptType, Wallet};
use magical_bitcoin_wallet::types::ScriptType;
use magical_bitcoin_wallet::{OfflineWallet, Wallet};
fn main() {
env_logger::init_from_env(
@@ -82,32 +63,48 @@ fn main() {
info!("Compiling policy: {}", policy_str);
let policy = Concrete::<String>::from_str(&policy_str).unwrap();
let compiled = policy.compile().unwrap();
let descriptor = match matches.value_of("TYPE").unwrap() {
"sh" => Descriptor::Sh(policy.compile().unwrap()),
"wsh" => Descriptor::Wsh(policy.compile().unwrap()),
"sh-wsh" => Descriptor::ShWsh(policy.compile().unwrap()),
"sh" => Descriptor::Sh(compiled),
"wsh" => Descriptor::Wsh(compiled),
"sh-wsh" => Descriptor::ShWsh(compiled),
_ => panic!("Invalid type"),
};
info!("... Descriptor: {}", descriptor);
let database = MemoryDatabase::new();
let temp_db = {
let mut temp_db = std::env::temp_dir();
let rand_string: String = thread_rng().sample_iter(&Alphanumeric).take(15).collect();
temp_db.push(rand_string);
let network = match matches.value_of("network") {
Some("regtest") => Network::Regtest,
Some("testnet") | _ => Network::Testnet,
let database = sled::open(&temp_db).unwrap();
let network = match matches.value_of("network") {
Some("regtest") => Network::Regtest,
Some("testnet") | _ => Network::Testnet,
};
let wallet: OfflineWallet<_> = Wallet::new_offline(
&format!("{}", descriptor),
None,
network,
database.open_tree("").unwrap(),
)
.unwrap();
info!("... First address: {}", wallet.get_new_address().unwrap());
if matches.is_present("parsed_policy") {
let spending_policy = wallet.policies(ScriptType::External).unwrap();
info!(
"... Spending policy:\n{}",
serde_json::to_string_pretty(&spending_policy).unwrap()
);
}
temp_db
};
let wallet: OfflineWallet<_> =
Wallet::new_offline(&format!("{}", descriptor), None, network, database).unwrap();
info!("... First address: {}", wallet.get_new_address().unwrap());
if matches.is_present("parsed_policy") {
let spending_policy = wallet.policies(ScriptType::External).unwrap();
info!(
"... Spending policy:\n{}",
serde_json::to_string_pretty(&spending_policy).unwrap()
);
}
std::fs::remove_dir_all(temp_db).unwrap();
}

96
examples/multiparty.rs Normal file
View File

@@ -0,0 +1,96 @@
extern crate bitcoin;
extern crate clap;
extern crate log;
extern crate magical_bitcoin_wallet;
extern crate miniscript;
extern crate rand;
extern crate serde_json;
extern crate sled;
use std::str::FromStr;
use log::info;
use clap::{App, Arg};
use bitcoin::PublicKey;
use miniscript::policy::Concrete;
use miniscript::Descriptor;
use magical_bitcoin_wallet::multiparty::{Coordinator, Participant, Peer};
fn main() {
env_logger::init_from_env(
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
);
let matches = App::new("Multiparty Tools")
.arg(
Arg::with_name("POLICY")
.help("Sets the spending policy to compile")
.required(true)
.index(1),
)
.arg(
Arg::with_name("TYPE")
.help("Sets the script type used to embed the compiled policy")
.required(true)
.index(2)
.possible_values(&["sh", "wsh", "sh-wsh"]),
)
.get_matches();
let policy_str = matches.value_of("POLICY").unwrap();
info!("Compiling policy: {}", policy_str);
let policy = Concrete::<String>::from_str(&policy_str).unwrap();
let compiled = policy.compile().unwrap();
let descriptor = match matches.value_of("TYPE").unwrap() {
"sh" => Descriptor::Sh(compiled),
"wsh" => Descriptor::Wsh(compiled),
"sh-wsh" => Descriptor::ShWsh(compiled),
_ => panic!("Invalid type"),
};
info!("Descriptor: {}", descriptor);
let mut coordinator: Participant<Coordinator> = Participant::new(descriptor).unwrap();
/*let policy = coordinator.policy_for(vec![]).unwrap();
info!(
"Policy:\n{}",
serde_json::to_string_pretty(&policy).unwrap()
);*/
let missing_keys = coordinator.missing_keys();
info!("Missing keys: {:?}", missing_keys);
let pk =
PublicKey::from_str("02c65413e56b343a0a31c18d506f1502a17fc64dfbcef6bfb00d1c0d6229bb6f61")
.unwrap();
coordinator.add_key("Alice", pk.into()).unwrap();
coordinator.add_key("Carol", pk.into()).unwrap();
let for_bob = coordinator.descriptor_for("Bob").unwrap();
info!("Descriptor for Bob: {}", for_bob);
let mut bob_peer: Participant<Peer> = Participant::new(for_bob).unwrap();
info!(
"Bob's policy: {}",
serde_json::to_string(&bob_peer.policy().unwrap().unwrap()).unwrap()
);
bob_peer.use_key(pk.into()).unwrap();
info!("Bob's my_key: {}", bob_peer.my_key().unwrap());
coordinator.add_key("Bob", pk.into()).unwrap();
info!("Coordinator completed: {}", coordinator.completed());
let coord_map = coordinator.get_map().unwrap();
let finalized = coordinator.finalize().unwrap();
info!("Coordinator final: {}", finalized);
let bob_finalized = bob_peer.apply_map(coord_map).unwrap();
info!("Bob final: {}", bob_finalized);
}

View File

@@ -1,52 +1,26 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
extern crate magical;
extern crate magical_bitcoin_wallet;
extern crate serde_json;
use std::sync::Arc;
use std::str::FromStr;
use magical::bitcoin::util::bip32::ChildNumber;
use magical::bitcoin::*;
use magical::descriptor::*;
use magical_bitcoin_wallet::bitcoin::*;
use magical_bitcoin_wallet::descriptor::*;
fn main() {
let desc = "wsh(or_d(\
multi(\
thresh_m(\
2,[d34db33f/44'/0'/0']xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/1/*,tprv8ZgxMBicQKsPduL5QnGihpprdHyypMGi4DhimjtzYemu7se5YQNcZfAPLqXRuGHb5ZX2eTQj62oNqMnyxJ7B7wz54Uzswqw8fFqMVdcmVF7/1/*\
),\
and_v(vc:pk_h(cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy),older(1000))\
))";
let (extended_desc, key_map) = ExtendedDescriptor::parse_secret(desc).unwrap();
let extended_desc = ExtendedDescriptor::from_str(desc).unwrap();
println!("{:?}", extended_desc);
let signers = Arc::new(key_map.into());
let policy = extended_desc.extract_policy(signers).unwrap();
let policy = extended_desc.extract_policy().unwrap();
println!("policy: {}", serde_json::to_string(&policy).unwrap());
let derived_desc = extended_desc.derive(&[ChildNumber::from_normal_idx(42).unwrap()]);
let derived_desc = extended_desc.derive(42).unwrap();
println!("{:?}", derived_desc);
let addr = derived_desc.address(Network::Testnet).unwrap();

50
examples/psbt.rs Normal file
View File

@@ -0,0 +1,50 @@
extern crate base64;
extern crate magical_bitcoin_wallet;
use std::str::FromStr;
use magical_bitcoin_wallet::bitcoin;
use magical_bitcoin_wallet::descriptor::*;
use magical_bitcoin_wallet::psbt::*;
use magical_bitcoin_wallet::signer::Signer;
use bitcoin::consensus::encode::{deserialize, serialize};
use bitcoin::util::psbt::PartiallySignedTransaction;
use bitcoin::SigHashType;
fn main() {
let desc = "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/*)";
let extended_desc = ExtendedDescriptor::from_str(desc).unwrap();
let psbt_str = "cHNidP8BAFMCAAAAAd9SiQfxXZ+CKjgjRNonWXsnlA84aLvjxtwCmMfRc0ZbAQAAAAD+////ASjS9QUAAAAAF6kUYJR3oB0lS1M0W1RRMMiENSX45IuHAAAAAAABAPUCAAAAA9I7/OqeFeOFdr5VTLnj3UI/CNRw2eWmMPf7qDv6uIF6AAAAABcWABTG+kgr0g44V0sK9/9FN9oG/CxMK/7///+d0ffphPcV6FE9J/3ZPKWu17YxBnWWTJQyRJs3HUo1gwEAAAAA/v///835mYd9DmnjVnUKd2421MDoZmIxvB4XyJluN3SPUV9hAAAAABcWABRfvwFGp+x/yWdXeNgFs9v0duyeS/7///8CFbH+AAAAAAAXqRSEnTOAjJN/X6ZgR9ftKmwisNSZx4cA4fUFAAAAABl2qRTs6pS4x17MSQ4yNs/1GPsfdlv2NIisAAAAACIGApVE9PPtkcqp8Da43yrXGv4nLOotZdyxwJoTWQxuLxIuCAxfmh4JAAAAAAA=";
let psbt_buf = base64::decode(psbt_str).unwrap();
let mut psbt: PartiallySignedTransaction = deserialize(&psbt_buf).unwrap();
let signer = PSBTSigner::from_descriptor(&psbt.global.unsigned_tx, &extended_desc).unwrap();
for (index, input) in psbt.inputs.iter_mut().enumerate() {
for (pubkey, (fing, path)) in &input.hd_keypaths {
let sighash = input.sighash_type.unwrap_or(SigHashType::All);
// Ignore the "witness_utxo" case because we know this psbt is a legacy tx
if let Some(non_wit_utxo) = &input.non_witness_utxo {
let prev_script = &non_wit_utxo.output
[psbt.global.unsigned_tx.input[index].previous_output.vout as usize]
.script_pubkey;
let (signature, sighash) = signer
.sig_legacy_from_fingerprint(index, sighash, fing, path, prev_script)
.unwrap()
.unwrap();
let mut concat_sig = Vec::new();
concat_sig.extend_from_slice(&signature.serialize_der());
concat_sig.extend_from_slice(&[sighash as u8]);
input.partial_sigs.insert(*pubkey, concat_sig);
}
}
}
println!("signed: {}", base64::encode(&serialize(&psbt)));
}

View File

@@ -1,27 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
@@ -36,11 +12,11 @@ use log::{debug, error, info, trace, LevelFilter};
use bitcoin::Network;
use magical::bitcoin;
use magical::blockchain::compact_filters::*;
use magical::cli;
use magical::sled;
use magical::Wallet;
use magical_bitcoin_wallet::bitcoin;
use magical_bitcoin_wallet::blockchain::ElectrumBlockchain;
use magical_bitcoin_wallet::cli;
use magical_bitcoin_wallet::sled;
use magical_bitcoin_wallet::{Client, Wallet};
fn prepare_home_dir() -> PathBuf {
let mut dir = PathBuf::new();
@@ -56,7 +32,8 @@ fn prepare_home_dir() -> PathBuf {
dir
}
fn main() {
#[tokio::main]
async fn main() {
env_logger::init();
let app = cli::make_cli_subcommands();
@@ -88,17 +65,18 @@ fn main() {
.unwrap();
debug!("database opened successfully");
let num_threads = 1;
let mempool = Arc::new(Mempool::default());
let peers = (0..num_threads)
.map(|_| Peer::connect("192.168.1.136:8333", Arc::clone(&mempool), Network::Bitcoin))
.collect::<Result<_, _>>()
let client = Client::new(matches.value_of("server").unwrap())
.await
.unwrap();
let blockchain =
CompactFiltersBlockchain::new(peers, "./wallet-filters", Some(500_000)).unwrap();
let wallet = Wallet::new(descriptor, change_descriptor, network, tree, blockchain).unwrap();
let wallet = Wallet::new(
descriptor,
change_descriptor,
network,
tree,
ElectrumBlockchain::from(client),
)
.await
.unwrap();
let wallet = Arc::new(wallet);
if let Some(_sub_matches) = matches.subcommand_matches("repl") {
@@ -123,9 +101,12 @@ fn main() {
continue;
}
let result =
cli::handle_matches(&Arc::clone(&wallet), matches.unwrap()).unwrap();
println!("{}", serde_json::to_string_pretty(&result).unwrap());
if let Some(s) = cli::handle_matches(&Arc::clone(&wallet), matches.unwrap())
.await
.unwrap()
{
println!("{}", s);
}
}
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
@@ -138,7 +119,8 @@ fn main() {
// rl.save_history("history.txt").unwrap();
} else {
let result = cli::handle_matches(&wallet, matches).unwrap();
println!("{}", serde_json::to_string_pretty(&result).unwrap());
if let Some(s) = cli::handle_matches(&wallet, matches).await.unwrap() {
println!("{}", s);
}
}
}

View File

@@ -1,18 +0,0 @@
[package]
name = "magical-macros"
version = "0.1.0-beta.1"
authors = ["Alekos Filini <alekos.filini@gmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
syn = { version = "1.0", features = ["parsing", "full"] }
proc-macro2 = "1.0"
quote = "1.0"
[features]
debug = ["syn/extra-traits"]
[lib]
proc-macro = true

View File

@@ -1,159 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use syn::spanned::Spanned;
use syn::{parse, ImplItemMethod, ItemImpl, ItemTrait, Token};
fn add_async_trait(mut parsed: ItemTrait) -> TokenStream {
let output = quote! {
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
#parsed
};
for mut item in &mut parsed.items {
if let syn::TraitItem::Method(m) = &mut item {
m.sig.asyncness = Some(Token![async](m.span()));
}
}
let output = quote! {
#output
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
#[async_trait(?Send)]
#parsed
};
output.into()
}
fn add_async_method(mut parsed: ImplItemMethod) -> TokenStream {
let output = quote! {
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
#parsed
};
parsed.sig.asyncness = Some(Token![async](parsed.span()));
let output = quote! {
#output
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
#parsed
};
output.into()
}
fn add_async_impl_trait(mut parsed: ItemImpl) -> TokenStream {
let output = quote! {
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
#parsed
};
for mut item in &mut parsed.items {
if let syn::ImplItem::Method(m) = &mut item {
m.sig.asyncness = Some(Token![async](m.span()));
}
}
let output = quote! {
#output
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
#[async_trait(?Send)]
#parsed
};
output.into()
}
/// Makes a method or every method of a trait "async" only if the target_arch is "wasm32"
///
/// Requires the `async-trait` crate as a dependency whenever this attribute is used on a trait
/// definition or trait implementation.
#[proc_macro_attribute]
pub fn maybe_async(_attr: TokenStream, item: TokenStream) -> TokenStream {
if let Ok(parsed) = parse(item.clone()) {
add_async_trait(parsed)
} else if let Ok(parsed) = parse(item.clone()) {
add_async_method(parsed)
} else if let Ok(parsed) = parse(item) {
add_async_impl_trait(parsed)
} else {
(quote! {
compile_error!("#[maybe_async] can only be used on methods, trait or trait impl blocks")
})
.into()
}
}
/// Awaits if target_arch is "wasm32", does nothing otherwise
#[proc_macro]
pub fn maybe_await(expr: TokenStream) -> TokenStream {
let expr: proc_macro2::TokenStream = expr.into();
let quoted = quote! {
{
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
{
#expr
}
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
{
#expr.await
}
}
};
quoted.into()
}
/// Awaits if target_arch is "wasm32", uses `tokio::Runtime::block_on()` otherwise
///
/// Requires the `tokio` crate as a dependecy with `rt-core` or `rt-threaded` to build on non-wasm32 platforms.
#[proc_macro]
pub fn await_or_block(expr: TokenStream) -> TokenStream {
let expr: proc_macro2::TokenStream = expr.into();
let quoted = quote! {
{
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
{
tokio::runtime::Runtime::new().unwrap().block_on(#expr)
}
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
{
#expr.await
}
}
};
quoted.into()
}

View File

@@ -1,562 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Compact Filters
//!
//! This module contains a multithreaded implementation of an [`OnlineBlockchain`] backend that
//! uses BIP157 (aka "Neutrino") to populate the wallet's [database](crate::database::Database)
//! by downloading compact filters from the P2P network.
//!
//! Since there are currently very few peers "in the wild" that advertise the required service
//! flag, this implementation requires that one or more known peers are provided by the user.
//! No dns or other kinds of peer discovery are done internally.
//!
//! Moreover, this module doesn't currently support detecting and resolving conflicts between
//! messages received by different peers. Thus, it's recommended to use this module by only
//! connecting to a single peer at a time, optionally by opening multiple connections if it's
//! desirable to use multiple threads at once to sync in parallel.
//!
//! ## Example
//!
//! ```no_run
//! # use std::sync::Arc;
//! # use bitcoin::*;
//! # use magical::*;
//! # use magical::blockchain::compact_filters::*;
//! let num_threads = 4;
//!
//! let mempool = Arc::new(Mempool::default());
//! let peers = (0..num_threads)
//! .map(|_| Peer::connect(
//! "btcd-mainnet.lightning.computer:8333",
//! Arc::clone(&mempool),
//! Network::Bitcoin,
//! ))
//! .collect::<Result<_, _>>()?;
//! let blockchain = CompactFiltersBlockchain::new(peers, "./wallet-filters", Some(500_000))?;
//! # Ok::<(), magical::error::Error>(())
//! ```
use std::collections::HashSet;
use std::fmt;
use std::path::Path;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use bitcoin::network::message_blockdata::Inventory;
use bitcoin::{OutPoint, Transaction, Txid};
use rocksdb::{Options, SliceTransform, DB};
mod peer;
mod store;
mod sync;
use super::{Blockchain, Capability, OnlineBlockchain, Progress};
use crate::database::{BatchDatabase, BatchOperations, DatabaseUtils};
use crate::error::Error;
use crate::types::{ScriptType, TransactionDetails, UTXO};
use crate::FeeRate;
use peer::*;
use store::*;
use sync::*;
pub use peer::{Mempool, Peer};
const SYNC_HEADERS_COST: f32 = 1.0;
const SYNC_FILTERS_COST: f32 = 11.6 * 1_000.0;
const PROCESS_BLOCKS_COST: f32 = 20_000.0;
/// Structure implementing the required blockchain traits
///
/// ## Example
/// See the [`blockchain::compact_filters`](crate::blockchain::compact_filters) module for a usage example.
#[derive(Debug)]
pub struct CompactFiltersBlockchain(Option<CompactFilters>);
impl CompactFiltersBlockchain {
/// Construct a new instance given a list of peers, a path to store headers and block
/// filters downloaded during the sync and optionally a number of blocks to ignore starting
/// from the genesis while scanning for the wallet's outputs.
///
/// For each [`Peer`] specified a new thread will be spawned to download and verify the filters
/// in parallel. It's currently recommended to only connect to a single peer to avoid
/// inconsistencies in the data returned, optionally with multiple connections in parallel to
/// speed-up the sync process.
pub fn new<P: AsRef<Path>>(
peers: Vec<Peer>,
storage_dir: P,
skip_blocks: Option<usize>,
) -> Result<Self, CompactFiltersError> {
Ok(CompactFiltersBlockchain(Some(CompactFilters::new(
peers,
storage_dir,
skip_blocks,
)?)))
}
}
/// Internal struct that contains the state of a [`CompactFiltersBlockchain`]
#[derive(Debug)]
struct CompactFilters {
peers: Vec<Arc<Peer>>,
headers: Arc<ChainStore<Full>>,
skip_blocks: Option<usize>,
}
impl CompactFilters {
/// Constructor, see [`CompactFiltersBlockchain::new`] for the documentation
pub fn new<P: AsRef<Path>>(
peers: Vec<Peer>,
storage_dir: P,
skip_blocks: Option<usize>,
) -> Result<Self, CompactFiltersError> {
if peers.is_empty() {
return Err(CompactFiltersError::NoPeers);
}
let mut opts = Options::default();
opts.create_if_missing(true);
opts.set_prefix_extractor(SliceTransform::create_fixed_prefix(16));
let network = peers[0].get_network();
let cfs = DB::list_cf(&opts, &storage_dir).unwrap_or(vec!["default".to_string()]);
let db = DB::open_cf(&opts, &storage_dir, &cfs)?;
let headers = Arc::new(ChainStore::new(db, network)?);
// try to recover partial snapshots
for cf_name in &cfs {
if !cf_name.starts_with("_headers:") {
continue;
}
info!("Trying to recover: {:?}", cf_name);
headers.recover_snapshot(cf_name)?;
}
Ok(CompactFilters {
peers: peers.into_iter().map(Arc::new).collect(),
headers,
skip_blocks,
})
}
/// Process a transaction by looking for inputs that spend from a UTXO in the database or
/// outputs that send funds to a know script_pubkey.
fn process_tx<D: BatchDatabase>(
&self,
database: &mut D,
tx: &Transaction,
height: Option<u32>,
timestamp: u64,
internal_max_deriv: &mut Option<u32>,
external_max_deriv: &mut Option<u32>,
) -> Result<(), Error> {
let mut updates = database.begin_batch();
let mut incoming: u64 = 0;
let mut outgoing: u64 = 0;
let mut inputs_sum: u64 = 0;
let mut outputs_sum: u64 = 0;
// look for our own inputs
for (i, input) in tx.input.iter().enumerate() {
if let Some(previous_output) = database.get_previous_output(&input.previous_output)? {
inputs_sum += previous_output.value;
if database.is_mine(&previous_output.script_pubkey)? {
outgoing += previous_output.value;
debug!("{} input #{} is mine, removing from utxo", tx.txid(), i);
updates.del_utxo(&input.previous_output)?;
}
}
}
for (i, output) in tx.output.iter().enumerate() {
// to compute the fees later
outputs_sum += output.value;
// this output is ours, we have a path to derive it
if let Some((script_type, child)) =
database.get_path_from_script_pubkey(&output.script_pubkey)?
{
debug!("{} output #{} is mine, adding utxo", tx.txid(), i);
updates.set_utxo(&UTXO {
outpoint: OutPoint::new(tx.txid(), i as u32),
txout: output.clone(),
is_internal: script_type.is_internal(),
})?;
incoming += output.value;
if script_type == ScriptType::Internal
&& (internal_max_deriv.is_none() || child > internal_max_deriv.unwrap_or(0))
{
*internal_max_deriv = Some(child);
} else if script_type == ScriptType::External
&& (external_max_deriv.is_none() || child > external_max_deriv.unwrap_or(0))
{
*external_max_deriv = Some(child);
}
}
}
if incoming > 0 || outgoing > 0 {
let tx = TransactionDetails {
txid: tx.txid(),
transaction: Some(tx.clone()),
received: incoming,
sent: outgoing,
height,
timestamp,
fees: inputs_sum.checked_sub(outputs_sum).unwrap_or(0),
};
info!("Saving tx {}", tx.txid);
updates.set_tx(&tx)?;
}
database.commit_batch(updates)?;
Ok(())
}
}
impl Blockchain for CompactFiltersBlockchain {
fn offline() -> Self {
CompactFiltersBlockchain(None)
}
fn is_online(&self) -> bool {
self.0.is_some()
}
}
impl OnlineBlockchain for CompactFiltersBlockchain {
fn get_capabilities(&self) -> HashSet<Capability> {
vec![Capability::FullHistory].into_iter().collect()
}
fn setup<D: BatchDatabase, P: 'static + Progress>(
&self,
_stop_gap: Option<usize>, // TODO: move to electrum and esplora only
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
let inner = self.0.as_ref().ok_or(Error::OfflineClient)?;
let first_peer = &inner.peers[0];
let skip_blocks = inner.skip_blocks.unwrap_or(0);
let cf_sync = Arc::new(CFSync::new(Arc::clone(&inner.headers), skip_blocks, 0x00)?);
let initial_height = inner.headers.get_height()?;
let total_bundles = (first_peer.get_version().start_height as usize)
.checked_sub(skip_blocks)
.map(|x| x / 1000)
.unwrap_or(0)
+ 1;
let expected_bundles_to_sync = total_bundles
.checked_sub(cf_sync.pruned_bundles()?)
.unwrap_or(0);
let headers_cost = (first_peer.get_version().start_height as usize)
.checked_sub(initial_height)
.unwrap_or(0) as f32
* SYNC_HEADERS_COST;
let filters_cost = expected_bundles_to_sync as f32 * SYNC_FILTERS_COST;
let total_cost = headers_cost + filters_cost + PROCESS_BLOCKS_COST;
if let Some(snapshot) = sync::sync_headers(
Arc::clone(&first_peer),
Arc::clone(&inner.headers),
|new_height| {
let local_headers_cost =
new_height.checked_sub(initial_height).unwrap_or(0) as f32 * SYNC_HEADERS_COST;
progress_update.update(
local_headers_cost / total_cost * 100.0,
Some(format!("Synced headers to {}", new_height)),
)
},
)? {
if snapshot.work()? > inner.headers.work()? {
info!("Applying snapshot with work: {}", snapshot.work()?);
inner.headers.apply_snapshot(snapshot)?;
}
}
let synced_height = inner.headers.get_height()?;
let buried_height = synced_height
.checked_sub(sync::BURIED_CONFIRMATIONS)
.unwrap_or(0);
info!("Synced headers to height: {}", synced_height);
cf_sync.prepare_sync(Arc::clone(&first_peer))?;
let all_scripts = Arc::new(
database
.iter_script_pubkeys(None)?
.into_iter()
.map(|s| s.to_bytes())
.collect::<Vec<_>>(),
);
let last_synced_block = Arc::new(Mutex::new(synced_height));
let synced_bundles = Arc::new(AtomicUsize::new(0));
let progress_update = Arc::new(Mutex::new(progress_update));
let mut threads = Vec::with_capacity(inner.peers.len());
for peer in &inner.peers {
let cf_sync = Arc::clone(&cf_sync);
let peer = Arc::clone(&peer);
let headers = Arc::clone(&inner.headers);
let all_scripts = Arc::clone(&all_scripts);
let last_synced_block = Arc::clone(&last_synced_block);
let progress_update = Arc::clone(&progress_update);
let synced_bundles = Arc::clone(&synced_bundles);
let thread = std::thread::spawn(move || {
cf_sync.capture_thread_for_sync(
peer,
|block_hash, filter| {
if !filter
.match_any(block_hash, &mut all_scripts.iter().map(AsRef::as_ref))?
{
return Ok(false);
}
let block_height = headers.get_height_for(block_hash)?.unwrap_or(0);
let saved_correct_block = match headers.get_full_block(block_height)? {
Some(block) if &block.block_hash() == block_hash => true,
_ => false,
};
if saved_correct_block {
Ok(false)
} else {
let mut last_synced_block = last_synced_block.lock().unwrap();
// If we download a block older than `last_synced_block`, we update it so that
// we know to delete and re-process all txs starting from that height
if block_height < *last_synced_block {
*last_synced_block = block_height;
}
Ok(true)
}
},
|index| {
let synced_bundles = synced_bundles.fetch_add(1, Ordering::SeqCst);
let local_filters_cost = synced_bundles as f32 * SYNC_FILTERS_COST;
progress_update.lock().unwrap().update(
(headers_cost + local_filters_cost) / total_cost * 100.0,
Some(format!(
"Synced filters {} - {}",
index * 1000 + 1,
(index + 1) * 1000
)),
)
},
)
});
threads.push(thread);
}
for t in threads {
t.join().unwrap()?;
}
progress_update.lock().unwrap().update(
(headers_cost + filters_cost) / total_cost * 100.0,
Some("Processing downloaded blocks and mempool".into()),
)?;
// delete all txs newer than last_synced_block
let last_synced_block = *last_synced_block.lock().unwrap();
log::debug!(
"Dropping transactions newer than `last_synced_block` = {}",
last_synced_block
);
let mut updates = database.begin_batch();
for details in database.iter_txs(false)? {
match details.height {
Some(height) if (height as usize) < last_synced_block => continue,
_ => updates.del_tx(&details.txid, false)?,
};
}
database.commit_batch(updates)?;
first_peer.ask_for_mempool()?;
let mut internal_max_deriv = None;
let mut external_max_deriv = None;
for (height, block) in inner.headers.iter_full_blocks()? {
for tx in &block.txdata {
inner.process_tx(
database,
tx,
Some(height as u32),
0,
&mut internal_max_deriv,
&mut external_max_deriv,
)?;
}
}
for tx in first_peer.get_mempool().iter_txs().iter() {
inner.process_tx(
database,
tx,
None,
0,
&mut internal_max_deriv,
&mut external_max_deriv,
)?;
}
let current_ext = database.get_last_index(ScriptType::External)?.unwrap_or(0);
let first_ext_new = external_max_deriv.map(|x| x + 1).unwrap_or(0);
if first_ext_new > current_ext {
info!("Setting external index to {}", first_ext_new);
database.set_last_index(ScriptType::External, first_ext_new)?;
}
let current_int = database.get_last_index(ScriptType::Internal)?.unwrap_or(0);
let first_int_new = internal_max_deriv.map(|x| x + 1).unwrap_or(0);
if first_int_new > current_int {
info!("Setting internal index to {}", first_int_new);
database.set_last_index(ScriptType::Internal, first_int_new)?;
}
info!("Dropping blocks until {}", buried_height);
inner.headers.delete_blocks_until(buried_height)?;
progress_update
.lock()
.unwrap()
.update(100.0, Some("Done".into()))?;
Ok(())
}
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
let inner = self.0.as_ref().ok_or(Error::OfflineClient)?;
Ok(inner.peers[0]
.get_mempool()
.get_tx(&Inventory::Transaction(*txid)))
}
fn broadcast(&self, tx: &Transaction) -> Result<(), Error> {
let inner = self.0.as_ref().ok_or(Error::OfflineClient)?;
inner.peers[0].broadcast_tx(tx.clone())?;
Ok(())
}
fn get_height(&self) -> Result<u32, Error> {
let inner = self.0.as_ref().ok_or(Error::OfflineClient)?;
Ok(inner.headers.get_height()? as u32)
}
fn estimate_fee(&self, _target: usize) -> Result<FeeRate, Error> {
// TODO
Ok(FeeRate::default())
}
}
/// An error that can occur during sync with a [`CompactFiltersBlockchain`]
#[derive(Debug)]
pub enum CompactFiltersError {
/// A peer sent an invalid or unexpected response
InvalidResponse,
/// The headers returned are invalid
InvalidHeaders,
/// The compact filter headers returned are invalid
InvalidFilterHeader,
/// The compact filter returned is invalid
InvalidFilter,
/// The peer is missing a block in the valid chain
MissingBlock,
/// The data stored in the block filters storage are corrupted
DataCorruption,
/// A peer is not connected
NotConnected,
/// A peer took too long to reply to one of our messages
Timeout,
/// No peers have been specified
NoPeers,
/// Internal database error
DB(rocksdb::Error),
/// Internal I/O error
IO(std::io::Error),
/// Invalid BIP158 filter
BIP158(bitcoin::util::bip158::Error),
/// Internal system time error
Time(std::time::SystemTimeError),
/// Wrapper for [`crate::error::Error`]
Global(Box<crate::error::Error>),
}
impl fmt::Display for CompactFiltersError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for CompactFiltersError {}
macro_rules! impl_error {
( $from:ty, $to:ident ) => {
impl std::convert::From<$from> for CompactFiltersError {
fn from(err: $from) -> Self {
CompactFiltersError::$to(err)
}
}
};
}
impl_error!(rocksdb::Error, DB);
impl_error!(std::io::Error, IO);
impl_error!(bitcoin::util::bip158::Error, BIP158);
impl_error!(std::time::SystemTimeError, Time);
impl From<crate::error::Error> for CompactFiltersError {
fn from(err: crate::error::Error) -> Self {
CompactFiltersError::Global(Box::new(err))
}
}

View File

@@ -1,550 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::collections::HashMap;
use std::net::{TcpStream, ToSocketAddrs};
use std::sync::{Arc, Condvar, Mutex, RwLock};
use std::thread;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use socks::{Socks5Stream, ToTargetAddr};
use rand::{thread_rng, Rng};
use bitcoin::consensus::Encodable;
use bitcoin::hash_types::BlockHash;
use bitcoin::hashes::Hash;
use bitcoin::network::constants::ServiceFlags;
use bitcoin::network::message::{NetworkMessage, RawNetworkMessage};
use bitcoin::network::message_blockdata::*;
use bitcoin::network::message_filter::*;
use bitcoin::network::message_network::VersionMessage;
use bitcoin::network::stream_reader::StreamReader;
use bitcoin::network::Address;
use bitcoin::{Block, Network, Transaction, Txid};
use super::CompactFiltersError;
type ResponsesMap = HashMap<&'static str, Arc<(Mutex<Vec<NetworkMessage>>, Condvar)>>;
pub(crate) const TIMEOUT_SECS: u64 = 30;
/// Container for unconfirmed, but valid Bitcoin transactions
///
/// It is normally shared between [`Peer`]s with the use of [`Arc`], so that transactions are not
/// duplicated in memory.
#[derive(Debug, Default)]
pub struct Mempool {
txs: RwLock<HashMap<Txid, Transaction>>,
}
impl Mempool {
/// Add a transaction to the mempool
///
/// Note that this doesn't propagate the transaction to other
/// peers. To do that, [`broadcast`](crate::blockchain::OnlineBlockchain::broadcast) should be used.
pub fn add_tx(&self, tx: Transaction) {
self.txs.write().unwrap().insert(tx.txid(), tx);
}
/// Look-up a transaction in the mempool given an [`Inventory`] request
pub fn get_tx(&self, inventory: &Inventory) -> Option<Transaction> {
let txid = match inventory {
Inventory::Error | Inventory::Block(_) | Inventory::WitnessBlock(_) => return None,
Inventory::Transaction(txid) => *txid,
Inventory::WitnessTransaction(wtxid) => Txid::from_inner(wtxid.into_inner()),
};
self.txs.read().unwrap().get(&txid).cloned()
}
/// Return whether or not the mempool contains a transaction with a given txid
pub fn has_tx(&self, txid: &Txid) -> bool {
self.txs.read().unwrap().contains_key(txid)
}
/// Return the list of transactions contained in the mempool
pub fn iter_txs(&self) -> Vec<Transaction> {
self.txs.read().unwrap().values().cloned().collect()
}
}
/// A Bitcoin peer
#[derive(Debug)]
pub struct Peer {
writer: Arc<Mutex<TcpStream>>,
responses: Arc<RwLock<ResponsesMap>>,
reader_thread: thread::JoinHandle<()>,
connected: Arc<RwLock<bool>>,
mempool: Arc<Mempool>,
version: VersionMessage,
network: Network,
}
impl Peer {
/// Connect to a peer over a plaintext TCP connection
///
/// This function internally spawns a new thread that will monitor incoming messages from the
/// peer, and optionally reply to some of them transparently, like [pings](NetworkMessage::Ping)
pub fn connect<A: ToSocketAddrs>(
address: A,
mempool: Arc<Mempool>,
network: Network,
) -> Result<Self, CompactFiltersError> {
let stream = TcpStream::connect(address)?;
Peer::from_stream(stream, mempool, network)
}
/// Connect to a peer through a SOCKS5 proxy, optionally by using some credentials, specified
/// as a tuple of `(username, password)`
///
/// This function internally spawns a new thread that will monitor incoming messages from the
/// peer, and optionally reply to some of them transparently, like [pings](NetworkMessage::Ping)
pub fn connect_proxy<T: ToTargetAddr, P: ToSocketAddrs>(
target: T,
proxy: P,
credentials: Option<(&str, &str)>,
mempool: Arc<Mempool>,
network: Network,
) -> Result<Self, CompactFiltersError> {
let socks_stream = if let Some((username, password)) = credentials {
Socks5Stream::connect_with_password(proxy, target, username, password)?
} else {
Socks5Stream::connect(proxy, target)?
};
Peer::from_stream(socks_stream.into_inner(), mempool, network)
}
/// Create a [`Peer`] from an already connected TcpStream
fn from_stream(
stream: TcpStream,
mempool: Arc<Mempool>,
network: Network,
) -> Result<Self, CompactFiltersError> {
let writer = Arc::new(Mutex::new(stream.try_clone()?));
let responses: Arc<RwLock<ResponsesMap>> = Arc::new(RwLock::new(HashMap::new()));
let connected = Arc::new(RwLock::new(true));
let mut locked_writer = writer.lock().unwrap();
let reader_thread_responses = Arc::clone(&responses);
let reader_thread_writer = Arc::clone(&writer);
let reader_thread_mempool = Arc::clone(&mempool);
let reader_thread_connected = Arc::clone(&connected);
let reader_thread = thread::spawn(move || {
Self::reader_thread(
network,
stream,
reader_thread_responses,
reader_thread_writer,
reader_thread_mempool,
reader_thread_connected,
)
});
let timestamp = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
let nonce = thread_rng().gen();
let receiver = Address::new(&locked_writer.peer_addr()?, ServiceFlags::NONE);
let sender = Address {
services: ServiceFlags::NONE,
address: [0u16; 8],
port: 0,
};
Self::_send(
&mut locked_writer,
network.magic(),
NetworkMessage::Version(VersionMessage::new(
ServiceFlags::WITNESS,
timestamp,
receiver,
sender,
nonce,
"MagicalBitcoinWallet".into(),
0,
)),
)?;
let version = if let NetworkMessage::Version(version) =
Self::_recv(&responses, "version", None)?.unwrap()
{
version
} else {
return Err(CompactFiltersError::InvalidResponse);
};
if let NetworkMessage::Verack = Self::_recv(&responses, "verack", None)?.unwrap() {
Self::_send(&mut locked_writer, network.magic(), NetworkMessage::Verack)?;
} else {
return Err(CompactFiltersError::InvalidResponse);
}
std::mem::drop(locked_writer);
Ok(Peer {
writer,
reader_thread,
responses,
connected,
mempool,
network,
version,
})
}
/// Send a Bitcoin network message
fn _send(
writer: &mut TcpStream,
magic: u32,
payload: NetworkMessage,
) -> Result<(), CompactFiltersError> {
log::trace!("==> {:?}", payload);
let raw_message = RawNetworkMessage { magic, payload };
raw_message
.consensus_encode(writer)
.map_err(|_| CompactFiltersError::DataCorruption)?;
Ok(())
}
/// Wait for a specific incoming Bitcoin message, optionally with a timeout
fn _recv(
responses: &Arc<RwLock<ResponsesMap>>,
wait_for: &'static str,
timeout: Option<Duration>,
) -> Result<Option<NetworkMessage>, CompactFiltersError> {
let message_resp = {
let mut lock = responses.write().unwrap();
let message_resp = lock.entry(wait_for).or_default();
Arc::clone(&message_resp)
};
let (lock, cvar) = &*message_resp;
let mut messages = lock.lock().unwrap();
while messages.is_empty() {
match timeout {
None => messages = cvar.wait(messages).unwrap(),
Some(t) => {
let result = cvar.wait_timeout(messages, t).unwrap();
if result.1.timed_out() {
return Ok(None);
}
messages = result.0;
}
}
}
Ok(messages.pop())
}
/// Return the [`VersionMessage`] sent by the peer
pub fn get_version(&self) -> &VersionMessage {
&self.version
}
/// Return the Bitcoin [`Network`] in use
pub fn get_network(&self) -> Network {
self.network
}
/// Return the mempool used by this peer
pub fn get_mempool(&self) -> Arc<Mempool> {
Arc::clone(&self.mempool)
}
/// Return whether or not the peer is still connected
pub fn is_connected(&self) -> bool {
*self.connected.read().unwrap()
}
/// Internal function called once the `reader_thread` is spawned
fn reader_thread(
network: Network,
connection: TcpStream,
reader_thread_responses: Arc<RwLock<ResponsesMap>>,
reader_thread_writer: Arc<Mutex<TcpStream>>,
reader_thread_mempool: Arc<Mempool>,
reader_thread_connected: Arc<RwLock<bool>>,
) {
macro_rules! check_disconnect {
($call:expr) => {
match $call {
Ok(good) => good,
Err(e) => {
log::debug!("Error {:?}", e);
*reader_thread_connected.write().unwrap() = false;
break;
}
}
};
}
let mut reader = StreamReader::new(connection, None);
loop {
let raw_message: RawNetworkMessage = check_disconnect!(reader.read_next());
let in_message = if raw_message.magic != network.magic() {
continue;
} else {
raw_message.payload
};
log::trace!("<== {:?}", in_message);
match in_message {
NetworkMessage::Ping(nonce) => {
check_disconnect!(Self::_send(
&mut reader_thread_writer.lock().unwrap(),
network.magic(),
NetworkMessage::Pong(nonce),
));
continue;
}
NetworkMessage::Alert(_) => continue,
NetworkMessage::GetData(ref inv) => {
let (found, not_found): (Vec<_>, Vec<_>) = inv
.into_iter()
.map(|item| (*item, reader_thread_mempool.get_tx(item)))
.partition(|(_, d)| d.is_some());
for (_, found_tx) in found {
check_disconnect!(Self::_send(
&mut reader_thread_writer.lock().unwrap(),
network.magic(),
NetworkMessage::Tx(found_tx.unwrap()),
));
}
if !not_found.is_empty() {
check_disconnect!(Self::_send(
&mut reader_thread_writer.lock().unwrap(),
network.magic(),
NetworkMessage::NotFound(
not_found.into_iter().map(|(i, _)| i).collect(),
),
));
}
}
_ => {}
}
let message_resp = {
let mut lock = reader_thread_responses.write().unwrap();
let message_resp = lock.entry(in_message.cmd()).or_default();
Arc::clone(&message_resp)
};
let (lock, cvar) = &*message_resp;
let mut messages = lock.lock().unwrap();
messages.push(in_message);
cvar.notify_all();
}
}
/// Send a raw Bitcoin message to the peer
pub fn send(&self, payload: NetworkMessage) -> Result<(), CompactFiltersError> {
let mut writer = self.writer.lock().unwrap();
Self::_send(&mut writer, self.network.magic(), payload)
}
/// Waits for a specific incoming Bitcoin message, optionally with a timeout
pub fn recv(
&self,
wait_for: &'static str,
timeout: Option<Duration>,
) -> Result<Option<NetworkMessage>, CompactFiltersError> {
Self::_recv(&self.responses, wait_for, timeout)
}
}
pub trait CompactFiltersPeer {
fn get_cf_checkpt(
&self,
filter_type: u8,
stop_hash: BlockHash,
) -> Result<CFCheckpt, CompactFiltersError>;
fn get_cf_headers(
&self,
filter_type: u8,
start_height: u32,
stop_hash: BlockHash,
) -> Result<CFHeaders, CompactFiltersError>;
fn get_cf_filters(
&self,
filter_type: u8,
start_height: u32,
stop_hash: BlockHash,
) -> Result<(), CompactFiltersError>;
fn pop_cf_filter_resp(&self) -> Result<CFilter, CompactFiltersError>;
}
impl CompactFiltersPeer for Peer {
fn get_cf_checkpt(
&self,
filter_type: u8,
stop_hash: BlockHash,
) -> Result<CFCheckpt, CompactFiltersError> {
self.send(NetworkMessage::GetCFCheckpt(GetCFCheckpt {
filter_type,
stop_hash,
}))?;
let response = self
.recv("cfcheckpt", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?;
let response = match response {
NetworkMessage::CFCheckpt(response) => response,
_ => return Err(CompactFiltersError::InvalidResponse),
};
if response.filter_type != filter_type {
return Err(CompactFiltersError::InvalidResponse);
}
Ok(response)
}
fn get_cf_headers(
&self,
filter_type: u8,
start_height: u32,
stop_hash: BlockHash,
) -> Result<CFHeaders, CompactFiltersError> {
self.send(NetworkMessage::GetCFHeaders(GetCFHeaders {
filter_type,
start_height,
stop_hash,
}))?;
let response = self
.recv("cfheaders", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?;
let response = match response {
NetworkMessage::CFHeaders(response) => response,
_ => return Err(CompactFiltersError::InvalidResponse),
};
if response.filter_type != filter_type {
return Err(CompactFiltersError::InvalidResponse);
}
Ok(response)
}
fn pop_cf_filter_resp(&self) -> Result<CFilter, CompactFiltersError> {
let response = self
.recv("cfilter", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?;
let response = match response {
NetworkMessage::CFilter(response) => response,
_ => return Err(CompactFiltersError::InvalidResponse),
};
Ok(response)
}
fn get_cf_filters(
&self,
filter_type: u8,
start_height: u32,
stop_hash: BlockHash,
) -> Result<(), CompactFiltersError> {
self.send(NetworkMessage::GetCFilters(GetCFilters {
filter_type,
start_height,
stop_hash,
}))?;
Ok(())
}
}
pub trait InvPeer {
fn get_block(&self, block_hash: BlockHash) -> Result<Option<Block>, CompactFiltersError>;
fn ask_for_mempool(&self) -> Result<(), CompactFiltersError>;
fn broadcast_tx(&self, tx: Transaction) -> Result<(), CompactFiltersError>;
}
impl InvPeer for Peer {
fn get_block(&self, block_hash: BlockHash) -> Result<Option<Block>, CompactFiltersError> {
self.send(NetworkMessage::GetData(vec![Inventory::WitnessBlock(
block_hash,
)]))?;
match self.recv("block", Some(Duration::from_secs(TIMEOUT_SECS)))? {
None => Ok(None),
Some(NetworkMessage::Block(response)) => Ok(Some(response)),
_ => Err(CompactFiltersError::InvalidResponse),
}
}
fn ask_for_mempool(&self) -> Result<(), CompactFiltersError> {
self.send(NetworkMessage::MemPool)?;
let inv = match self.recv("inv", Some(Duration::from_secs(5)))? {
None => return Ok(()), // empty mempool
Some(NetworkMessage::Inv(inv)) => inv,
_ => return Err(CompactFiltersError::InvalidResponse),
};
let getdata = inv
.iter()
.cloned()
.filter(|item| match item {
Inventory::Transaction(txid) if !self.mempool.has_tx(txid) => true,
_ => false,
})
.collect::<Vec<_>>();
let num_txs = getdata.len();
self.send(NetworkMessage::GetData(getdata))?;
for _ in 0..num_txs {
let tx = self
.recv("tx", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?;
let tx = match tx {
NetworkMessage::Tx(tx) => tx,
_ => return Err(CompactFiltersError::InvalidResponse),
};
self.mempool.add_tx(tx);
}
Ok(())
}
fn broadcast_tx(&self, tx: Transaction) -> Result<(), CompactFiltersError> {
self.mempool.add_tx(tx.clone());
self.send(NetworkMessage::Tx(tx))?;
Ok(())
}
}

View File

@@ -1,904 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::convert::TryInto;
use std::fmt;
use std::io::{Read, Write};
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::Arc;
use std::sync::RwLock;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use rocksdb::{Direction, IteratorMode, ReadOptions, WriteBatch, DB};
use bitcoin::consensus::{deserialize, encode::VarInt, serialize, Decodable, Encodable};
use bitcoin::hash_types::FilterHash;
use bitcoin::hashes::hex::FromHex;
use bitcoin::hashes::{sha256d, Hash};
use bitcoin::util::bip158::BlockFilter;
use bitcoin::util::uint::Uint256;
use bitcoin::Block;
use bitcoin::BlockHash;
use bitcoin::BlockHeader;
use bitcoin::Network;
use super::CompactFiltersError;
lazy_static! {
static ref MAINNET_GENESIS: Block = deserialize(&Vec::<u8>::from_hex("0100000000000000000000000000000000000000000000000000000000000000000000003BA3EDFD7A7B12B27AC72C3E67768F617FC81BC3888A51323A9FB8AA4B1E5E4A29AB5F49FFFF001D1DAC2B7C0101000000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF4D04FFFF001D0104455468652054696D65732030332F4A616E2F32303039204368616E63656C6C6F72206F6E206272696E6B206F66207365636F6E64206261696C6F757420666F722062616E6B73FFFFFFFF0100F2052A01000000434104678AFDB0FE5548271967F1A67130B7105CD6A828E03909A67962E0EA1F61DEB649F6BC3F4CEF38C4F35504E51EC112DE5C384DF7BA0B8D578A4C702B6BF11D5FAC00000000").unwrap()).unwrap();
static ref TESTNET_GENESIS: Block = deserialize(&Vec::<u8>::from_hex("0100000000000000000000000000000000000000000000000000000000000000000000003BA3EDFD7A7B12B27AC72C3E67768F617FC81BC3888A51323A9FB8AA4B1E5E4ADAE5494DFFFF001D1AA4AE180101000000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF4D04FFFF001D0104455468652054696D65732030332F4A616E2F32303039204368616E63656C6C6F72206F6E206272696E6B206F66207365636F6E64206261696C6F757420666F722062616E6B73FFFFFFFF0100F2052A01000000434104678AFDB0FE5548271967F1A67130B7105CD6A828E03909A67962E0EA1F61DEB649F6BC3F4CEF38C4F35504E51EC112DE5C384DF7BA0B8D578A4C702B6BF11D5FAC00000000").unwrap()).unwrap();
static ref REGTEST_GENESIS: Block = deserialize(&Vec::<u8>::from_hex("0100000000000000000000000000000000000000000000000000000000000000000000003BA3EDFD7A7B12B27AC72C3E67768F617FC81BC3888A51323A9FB8AA4B1E5E4ADAE5494DFFFF7F20020000000101000000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF4D04FFFF001D0104455468652054696D65732030332F4A616E2F32303039204368616E63656C6C6F72206F6E206272696E6B206F66207365636F6E64206261696C6F757420666F722062616E6B73FFFFFFFF0100F2052A01000000434104678AFDB0FE5548271967F1A67130B7105CD6A828E03909A67962E0EA1F61DEB649F6BC3F4CEF38C4F35504E51EC112DE5C384DF7BA0B8D578A4C702B6BF11D5FAC00000000").unwrap()).unwrap();
}
pub trait StoreType: Default + fmt::Debug {}
#[derive(Default, Debug)]
pub struct Full;
impl StoreType for Full {}
#[derive(Default, Debug)]
pub struct Snapshot;
impl StoreType for Snapshot {}
pub enum StoreEntry {
BlockHeader(Option<usize>),
Block(Option<usize>),
BlockHeaderIndex(Option<BlockHash>),
CFilterTable((u8, Option<usize>)),
}
impl StoreEntry {
pub fn get_prefix(&self) -> Vec<u8> {
match self {
StoreEntry::BlockHeader(_) => b"z",
StoreEntry::Block(_) => b"x",
StoreEntry::BlockHeaderIndex(_) => b"i",
StoreEntry::CFilterTable(_) => b"t",
}
.to_vec()
}
pub fn get_key(&self) -> Vec<u8> {
let mut prefix = self.get_prefix();
match self {
StoreEntry::BlockHeader(Some(height)) => {
prefix.extend_from_slice(&height.to_be_bytes())
}
StoreEntry::Block(Some(height)) => prefix.extend_from_slice(&height.to_be_bytes()),
StoreEntry::BlockHeaderIndex(Some(hash)) => {
prefix.extend_from_slice(&hash.into_inner())
}
StoreEntry::CFilterTable((filter_type, bundle_index)) => {
prefix.push(*filter_type);
if let Some(bundle_index) = bundle_index {
prefix.extend_from_slice(&bundle_index.to_be_bytes());
}
}
_ => {}
}
prefix
}
}
pub trait SerializeDb: Sized {
fn serialize(&self) -> Vec<u8>;
fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError>;
}
impl<T> SerializeDb for T
where
T: Encodable + Decodable,
{
fn serialize(&self) -> Vec<u8> {
serialize(self)
}
fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError> {
Ok(deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)?)
}
}
impl Encodable for FilterHeader {
fn consensus_encode<W: Write>(
&self,
mut e: W,
) -> Result<usize, bitcoin::consensus::encode::Error> {
let mut written = self.prev_header_hash.consensus_encode(&mut e)?;
written += self.filter_hash.consensus_encode(&mut e)?;
Ok(written)
}
}
impl Decodable for FilterHeader {
fn consensus_decode<D: Read>(mut d: D) -> Result<Self, bitcoin::consensus::encode::Error> {
let prev_header_hash = FilterHeaderHash::consensus_decode(&mut d)?;
let filter_hash = FilterHash::consensus_decode(&mut d)?;
Ok(FilterHeader {
prev_header_hash,
filter_hash,
})
}
}
impl Encodable for BundleStatus {
fn consensus_encode<W: Write>(
&self,
mut e: W,
) -> Result<usize, bitcoin::consensus::encode::Error> {
let mut written = 0;
match self {
BundleStatus::Init => {
written += 0x00u8.consensus_encode(&mut e)?;
}
BundleStatus::CFHeaders { cf_headers } => {
written += 0x01u8.consensus_encode(&mut e)?;
written += VarInt(cf_headers.len() as u64).consensus_encode(&mut e)?;
for header in cf_headers {
written += header.consensus_encode(&mut e)?;
}
}
BundleStatus::CFilters { cf_filters } => {
written += 0x02u8.consensus_encode(&mut e)?;
written += VarInt(cf_filters.len() as u64).consensus_encode(&mut e)?;
for filter in cf_filters {
written += filter.consensus_encode(&mut e)?;
}
}
BundleStatus::Processed { cf_filters } => {
written += 0x03u8.consensus_encode(&mut e)?;
written += VarInt(cf_filters.len() as u64).consensus_encode(&mut e)?;
for filter in cf_filters {
written += filter.consensus_encode(&mut e)?;
}
}
BundleStatus::Pruned => {
written += 0x04u8.consensus_encode(&mut e)?;
}
BundleStatus::Tip { cf_filters } => {
written += 0x05u8.consensus_encode(&mut e)?;
written += VarInt(cf_filters.len() as u64).consensus_encode(&mut e)?;
for filter in cf_filters {
written += filter.consensus_encode(&mut e)?;
}
}
}
Ok(written)
}
}
impl Decodable for BundleStatus {
fn consensus_decode<D: Read>(mut d: D) -> Result<Self, bitcoin::consensus::encode::Error> {
let byte_type = u8::consensus_decode(&mut d)?;
match byte_type {
0x00 => Ok(BundleStatus::Init),
0x01 => {
let num = VarInt::consensus_decode(&mut d)?;
let num = num.0 as usize;
let mut cf_headers = Vec::with_capacity(num);
for _ in 0..num {
cf_headers.push(FilterHeader::consensus_decode(&mut d)?);
}
Ok(BundleStatus::CFHeaders { cf_headers })
}
0x02 => {
let num = VarInt::consensus_decode(&mut d)?;
let num = num.0 as usize;
let mut cf_filters = Vec::with_capacity(num);
for _ in 0..num {
cf_filters.push(Vec::<u8>::consensus_decode(&mut d)?);
}
Ok(BundleStatus::CFilters { cf_filters })
}
0x03 => {
let num = VarInt::consensus_decode(&mut d)?;
let num = num.0 as usize;
let mut cf_filters = Vec::with_capacity(num);
for _ in 0..num {
cf_filters.push(Vec::<u8>::consensus_decode(&mut d)?);
}
Ok(BundleStatus::Processed { cf_filters })
}
0x04 => Ok(BundleStatus::Pruned),
0x05 => {
let num = VarInt::consensus_decode(&mut d)?;
let num = num.0 as usize;
let mut cf_filters = Vec::with_capacity(num);
for _ in 0..num {
cf_filters.push(Vec::<u8>::consensus_decode(&mut d)?);
}
Ok(BundleStatus::Tip { cf_filters })
}
_ => Err(bitcoin::consensus::encode::Error::ParseFailed(
"Invalid byte type",
)),
}
}
}
pub struct ChainStore<T: StoreType> {
store: Arc<RwLock<DB>>,
cf_name: String,
min_height: usize,
network: Network,
phantom: PhantomData<T>,
}
impl ChainStore<Full> {
pub fn new(store: DB, network: Network) -> Result<Self, CompactFiltersError> {
let genesis = match network {
Network::Bitcoin => MAINNET_GENESIS.deref(),
Network::Testnet => TESTNET_GENESIS.deref(),
Network::Regtest => REGTEST_GENESIS.deref(),
};
let cf_name = "default".to_string();
let cf_handle = store.cf_handle(&cf_name).unwrap();
let genesis_key = StoreEntry::BlockHeader(Some(0)).get_key();
if store.get_pinned_cf(cf_handle, &genesis_key)?.is_none() {
let mut batch = WriteBatch::default();
batch.put_cf(
cf_handle,
genesis_key,
(genesis.header, genesis.header.work()).serialize(),
);
batch.put_cf(
cf_handle,
StoreEntry::BlockHeaderIndex(Some(genesis.block_hash())).get_key(),
&0usize.to_be_bytes(),
);
store.write(batch)?;
}
Ok(ChainStore {
store: Arc::new(RwLock::new(store)),
cf_name,
min_height: 0,
network,
phantom: PhantomData,
})
}
pub fn get_locators(&self) -> Result<Vec<(BlockHash, usize)>, CompactFiltersError> {
let mut step = 1;
let mut index = self.get_height()?;
let mut answer = Vec::new();
let store_read = self.store.read().unwrap();
let cf_handle = store_read.cf_handle(&self.cf_name).unwrap();
loop {
if answer.len() > 10 {
step *= 2;
}
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(
&store_read
.get_pinned_cf(cf_handle, StoreEntry::BlockHeader(Some(index)).get_key())?
.unwrap(),
)?;
answer.push((header.block_hash(), index));
if let Some(new_index) = index.checked_sub(step) {
index = new_index;
} else {
break;
}
}
Ok(answer)
}
pub fn start_snapshot(&self, from: usize) -> Result<ChainStore<Snapshot>, CompactFiltersError> {
let new_cf_name: String = thread_rng().sample_iter(&Alphanumeric).take(16).collect();
let new_cf_name = format!("_headers:{}", new_cf_name);
let mut write_store = self.store.write().unwrap();
write_store.create_cf(&new_cf_name, &Default::default())?;
let cf_handle = write_store.cf_handle(&self.cf_name).unwrap();
let new_cf_handle = write_store.cf_handle(&new_cf_name).unwrap();
let (header, work): (BlockHeader, Uint256) = SerializeDb::deserialize(
&write_store
.get_pinned_cf(cf_handle, StoreEntry::BlockHeader(Some(from)).get_key())?
.ok_or(CompactFiltersError::DataCorruption)?,
)?;
let mut batch = WriteBatch::default();
batch.put_cf(
new_cf_handle,
StoreEntry::BlockHeaderIndex(Some(header.block_hash())).get_key(),
&from.to_be_bytes(),
);
batch.put_cf(
new_cf_handle,
StoreEntry::BlockHeader(Some(from)).get_key(),
(header, work).serialize(),
);
write_store.write(batch)?;
let store = Arc::clone(&self.store);
Ok(ChainStore {
store,
cf_name: new_cf_name,
min_height: from,
network: self.network,
phantom: PhantomData,
})
}
pub fn recover_snapshot(&self, cf_name: &str) -> Result<(), CompactFiltersError> {
let mut write_store = self.store.write().unwrap();
let snapshot_cf_handle = write_store.cf_handle(cf_name).unwrap();
let prefix = StoreEntry::BlockHeader(None).get_key();
let mut iterator = write_store.prefix_iterator_cf(snapshot_cf_handle, prefix);
let min_height = match iterator
.next()
.and_then(|(k, _)| k[1..].try_into().ok())
.map(|bytes| usize::from_be_bytes(bytes))
{
None => {
std::mem::drop(iterator);
write_store.drop_cf(cf_name).ok();
return Ok(());
}
Some(x) => x,
};
std::mem::drop(iterator);
std::mem::drop(write_store);
let snapshot = ChainStore {
store: Arc::clone(&self.store),
cf_name: cf_name.into(),
min_height,
network: self.network,
phantom: PhantomData,
};
if snapshot.work()? > self.work()? {
self.apply_snapshot(snapshot)?;
}
Ok(())
}
pub fn apply_snapshot(
&self,
snaphost: ChainStore<Snapshot>,
) -> Result<(), CompactFiltersError> {
let mut batch = WriteBatch::default();
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let snapshot_cf_handle = read_store.cf_handle(&snaphost.cf_name).unwrap();
let from_key = StoreEntry::BlockHeader(Some(snaphost.min_height)).get_key();
let to_key = StoreEntry::BlockHeader(Some(usize::MAX)).get_key();
let mut opts = ReadOptions::default();
opts.set_iterate_upper_bound(to_key.clone());
log::debug!("Removing items");
batch.delete_range_cf(cf_handle, &from_key, &to_key);
for (_, v) in read_store.iterator_cf_opt(
cf_handle,
opts,
IteratorMode::From(&from_key, Direction::Forward),
) {
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
batch.delete_cf(
cf_handle,
StoreEntry::BlockHeaderIndex(Some(header.block_hash())).get_key(),
);
}
// Delete full blocks overriden by snapshot
let from_key = StoreEntry::Block(Some(snaphost.min_height)).get_key();
let to_key = StoreEntry::Block(Some(usize::MAX)).get_key();
batch.delete_range(&from_key, &to_key);
log::debug!("Copying over new items");
for (k, v) in read_store.iterator_cf(snapshot_cf_handle, IteratorMode::Start) {
batch.put_cf(cf_handle, k, v);
}
read_store.write(batch)?;
std::mem::drop(snapshot_cf_handle);
std::mem::drop(cf_handle);
std::mem::drop(read_store);
self.store.write().unwrap().drop_cf(&snaphost.cf_name)?;
Ok(())
}
pub fn get_height_for(
&self,
block_hash: &BlockHash,
) -> Result<Option<usize>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let key = StoreEntry::BlockHeaderIndex(Some(block_hash.clone())).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
data.as_ref()
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
))
})
.transpose()?)
}
pub fn get_block_hash(&self, height: usize) -> Result<Option<BlockHash>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let key = StoreEntry::BlockHeader(Some(height)).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
let (header, _): (BlockHeader, Uint256) =
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
Ok::<_, CompactFiltersError>(header.block_hash())
})
.transpose()?)
}
pub fn save_full_block(&self, block: &Block, height: usize) -> Result<(), CompactFiltersError> {
let key = StoreEntry::Block(Some(height)).get_key();
self.store.read().unwrap().put(key, block.serialize())?;
Ok(())
}
pub fn get_full_block(&self, height: usize) -> Result<Option<Block>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let key = StoreEntry::Block(Some(height)).get_key();
let opt_block = read_store.get_pinned(key)?;
Ok(opt_block
.map(|data| deserialize(&data))
.transpose()
.map_err(|_| CompactFiltersError::DataCorruption)?)
}
pub fn delete_blocks_until(&self, height: usize) -> Result<(), CompactFiltersError> {
let from_key = StoreEntry::Block(Some(0)).get_key();
let to_key = StoreEntry::Block(Some(height)).get_key();
let mut batch = WriteBatch::default();
batch.delete_range(&from_key, &to_key);
self.store.read().unwrap().write(batch)?;
Ok(())
}
pub fn iter_full_blocks(&self) -> Result<Vec<(usize, Block)>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let prefix = StoreEntry::Block(None).get_key();
let iterator = read_store.prefix_iterator(&prefix);
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
// have the right prefix
iterator
.filter(|(k, _)| k.starts_with(&prefix))
.map(|(k, v)| {
let height: usize = usize::from_be_bytes(
k[1..]
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
);
let block = SerializeDb::deserialize(&v)?;
Ok((height, block))
})
.collect::<Result<_, _>>()
}
}
impl<T: StoreType> ChainStore<T> {
pub fn work(&self) -> Result<Uint256, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let prefix = StoreEntry::BlockHeader(None).get_key();
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
Ok(iterator
.last()
.map(|(_, v)| -> Result<_, CompactFiltersError> {
let (_, work): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
Ok(work)
})
.transpose()?
.unwrap_or_default())
}
pub fn get_height(&self) -> Result<usize, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let prefix = StoreEntry::BlockHeader(None).get_key();
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
Ok(iterator
.last()
.map(|(k, _)| -> Result<_, CompactFiltersError> {
let height = usize::from_be_bytes(
k[1..]
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
);
Ok(height)
})
.transpose()?
.unwrap_or_default())
}
pub fn get_tip_hash(&self) -> Result<Option<BlockHash>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let prefix = StoreEntry::BlockHeader(None).get_key();
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
Ok(iterator
.last()
.map(|(_, v)| -> Result<_, CompactFiltersError> {
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
Ok(header.block_hash())
})
.transpose()?)
}
pub fn apply(
&mut self,
from: usize,
headers: Vec<BlockHeader>,
) -> Result<BlockHash, CompactFiltersError> {
let mut batch = WriteBatch::default();
let read_store = self.store.read().unwrap();
let cf_handle = read_store.cf_handle(&self.cf_name).unwrap();
let (mut last_hash, mut accumulated_work) = read_store
.get_pinned_cf(cf_handle, StoreEntry::BlockHeader(Some(from)).get_key())?
.map(|result| {
let (header, work): (BlockHeader, Uint256) = SerializeDb::deserialize(&result)?;
Ok::<_, CompactFiltersError>((header.block_hash(), work))
})
.transpose()?
.ok_or(CompactFiltersError::DataCorruption)?;
for (index, header) in headers.into_iter().enumerate() {
if header.prev_blockhash != last_hash {
return Err(CompactFiltersError::InvalidHeaders);
}
last_hash = header.block_hash();
accumulated_work = accumulated_work + header.work();
let height = from + index + 1;
batch.put_cf(
cf_handle,
StoreEntry::BlockHeaderIndex(Some(header.block_hash())).get_key(),
&(height).to_be_bytes(),
);
batch.put_cf(
cf_handle,
StoreEntry::BlockHeader(Some(height)).get_key(),
(header, accumulated_work).serialize(),
);
}
std::mem::drop(cf_handle);
std::mem::drop(read_store);
self.store.write().unwrap().write(batch)?;
Ok(last_hash)
}
}
impl<T: StoreType> fmt::Debug for ChainStore<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct(&format!("ChainStore<{:?}>", T::default()))
.field("cf_name", &self.cf_name)
.field("min_height", &self.min_height)
.field("network", &self.network)
.field("headers_height", &self.get_height())
.field("tip_hash", &self.get_tip_hash())
.finish()
}
}
pub type FilterHeaderHash = FilterHash;
#[derive(Debug, Clone)]
pub struct FilterHeader {
prev_header_hash: FilterHeaderHash,
filter_hash: FilterHash,
}
impl FilterHeader {
fn header_hash(&self) -> FilterHeaderHash {
let mut hash_data = self.filter_hash.into_inner().to_vec();
hash_data.extend_from_slice(&self.prev_header_hash);
sha256d::Hash::hash(&hash_data).into()
}
}
pub enum BundleStatus {
Init,
CFHeaders { cf_headers: Vec<FilterHeader> },
CFilters { cf_filters: Vec<Vec<u8>> },
Processed { cf_filters: Vec<Vec<u8>> },
Tip { cf_filters: Vec<Vec<u8>> },
Pruned,
}
pub struct CFStore {
store: Arc<RwLock<DB>>,
filter_type: u8,
}
type BundleEntry = (BundleStatus, FilterHeaderHash);
impl CFStore {
pub fn new(
headers_store: &ChainStore<Full>,
filter_type: u8,
) -> Result<Self, CompactFiltersError> {
let cf_store = CFStore {
store: Arc::clone(&headers_store.store),
filter_type,
};
let genesis = match headers_store.network {
Network::Bitcoin => MAINNET_GENESIS.deref(),
Network::Testnet => TESTNET_GENESIS.deref(),
Network::Regtest => REGTEST_GENESIS.deref(),
};
let filter = BlockFilter::new_script_filter(genesis, |utxo| {
Err(bitcoin::util::bip158::Error::UtxoMissing(*utxo))
})?;
let first_key = StoreEntry::CFilterTable((filter_type, Some(0))).get_key();
// Add the genesis' filter
{
let read_store = cf_store.store.read().unwrap();
if read_store.get_pinned(&first_key)?.is_none() {
read_store.put(
&first_key,
(BundleStatus::Init, filter.filter_id(&FilterHash::default())).serialize(),
)?;
}
}
Ok(cf_store)
}
pub fn get_filter_type(&self) -> u8 {
self.filter_type
}
pub fn get_bundles(&self) -> Result<Vec<BundleEntry>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let prefix = StoreEntry::CFilterTable((self.filter_type, None)).get_key();
let iterator = read_store.prefix_iterator(&prefix);
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
// have the right prefix
iterator
.filter(|(k, _)| k.starts_with(&prefix))
.map(|(_, data)| BundleEntry::deserialize(&data))
.collect::<Result<_, _>>()
}
pub fn get_checkpoints(&self) -> Result<Vec<FilterHash>, CompactFiltersError> {
let read_store = self.store.read().unwrap();
let prefix = StoreEntry::CFilterTable((self.filter_type, None)).get_key();
let iterator = read_store.prefix_iterator(&prefix);
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
// have the right prefix
Ok(iterator
.filter(|(k, _)| k.starts_with(&prefix))
.skip(1)
.map(|(_, data)| Ok::<_, CompactFiltersError>(BundleEntry::deserialize(&data)?.1))
.collect::<Result<_, _>>()?)
}
pub fn replace_checkpoints(
&self,
checkpoints: Vec<FilterHash>,
) -> Result<(), CompactFiltersError> {
let current_checkpoints = self.get_checkpoints()?;
let mut equal_bundles = 0;
for (index, (our, their)) in current_checkpoints
.iter()
.zip(checkpoints.iter())
.enumerate()
{
equal_bundles = index;
if our != their {
break;
}
}
let read_store = self.store.read().unwrap();
let mut batch = WriteBatch::default();
for (index, filter_hash) in checkpoints.iter().enumerate().skip(equal_bundles) {
let key = StoreEntry::CFilterTable((self.filter_type, Some(index + 1))).get_key(); // +1 to skip the genesis' filter
if let Some((BundleStatus::Tip { .. }, _)) = read_store
.get_pinned(&key)?
.map(|data| BundleEntry::deserialize(&data))
.transpose()?
{
println!("Keeping bundle #{} as Tip", index);
} else {
batch.put(&key, (BundleStatus::Init, *filter_hash).serialize());
}
}
read_store.write(batch)?;
Ok(())
}
pub fn advance_to_cf_headers(
&self,
bundle: usize,
checkpoint_hash: FilterHeaderHash,
filter_headers: Vec<FilterHash>,
) -> Result<BundleStatus, CompactFiltersError> {
let mut last_hash = checkpoint_hash;
let cf_headers = filter_headers
.into_iter()
.map(|filter_hash| {
let filter_header = FilterHeader {
prev_header_hash: last_hash,
filter_hash,
};
last_hash = filter_header.header_hash();
filter_header
})
.collect();
let read_store = self.store.read().unwrap();
let next_key = StoreEntry::CFilterTable((self.filter_type, Some(bundle + 1))).get_key(); // +1 to skip the genesis' filter
if let Some((_, next_checkpoint)) = read_store
.get_pinned(&next_key)?
.map(|data| BundleEntry::deserialize(&data))
.transpose()?
{
// check connection with the next bundle if present
if last_hash != next_checkpoint {
return Err(CompactFiltersError::InvalidFilterHeader);
}
}
let key = StoreEntry::CFilterTable((self.filter_type, Some(bundle))).get_key();
let value = (BundleStatus::CFHeaders { cf_headers }, checkpoint_hash);
read_store.put(key, value.serialize())?;
Ok(value.0)
}
pub fn advance_to_cf_filters(
&self,
bundle: usize,
checkpoint_hash: FilterHeaderHash,
headers: Vec<FilterHeader>,
filters: Vec<(usize, Vec<u8>)>,
) -> Result<BundleStatus, CompactFiltersError> {
let cf_filters = filters
.into_iter()
.zip(headers.iter())
.map(|((_, filter_content), header)| {
if header.filter_hash != sha256d::Hash::hash(&filter_content).into() {
return Err(CompactFiltersError::InvalidFilter);
}
Ok::<_, CompactFiltersError>(filter_content)
})
.collect::<Result<_, _>>()?;
let key = StoreEntry::CFilterTable((self.filter_type, Some(bundle))).get_key();
let value = (BundleStatus::CFilters { cf_filters }, checkpoint_hash);
let read_store = self.store.read().unwrap();
read_store.put(key, value.serialize())?;
Ok(value.0)
}
pub fn prune_filters(
&self,
bundle: usize,
checkpoint_hash: FilterHeaderHash,
) -> Result<BundleStatus, CompactFiltersError> {
let key = StoreEntry::CFilterTable((self.filter_type, Some(bundle))).get_key();
let value = (BundleStatus::Pruned, checkpoint_hash);
let read_store = self.store.read().unwrap();
read_store.put(key, value.serialize())?;
Ok(value.0)
}
pub fn mark_as_tip(
&self,
bundle: usize,
cf_filters: Vec<Vec<u8>>,
checkpoint_hash: FilterHeaderHash,
) -> Result<BundleStatus, CompactFiltersError> {
let key = StoreEntry::CFilterTable((self.filter_type, Some(bundle))).get_key();
let value = (BundleStatus::Tip { cf_filters }, checkpoint_hash);
let read_store = self.store.read().unwrap();
read_store.put(key, value.serialize())?;
Ok(value.0)
}
}

View File

@@ -1,313 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::collections::{BTreeMap, HashMap, VecDeque};
use std::sync::{Arc, Mutex};
use std::time::Duration;
use bitcoin::hash_types::{BlockHash, FilterHash};
use bitcoin::network::message::NetworkMessage;
use bitcoin::network::message_blockdata::GetHeadersMessage;
use bitcoin::util::bip158::BlockFilter;
use super::peer::*;
use super::store::*;
use super::CompactFiltersError;
use crate::error::Error;
pub(crate) const BURIED_CONFIRMATIONS: usize = 100;
pub struct CFSync {
headers_store: Arc<ChainStore<Full>>,
cf_store: Arc<CFStore>,
skip_blocks: usize,
bundles: Mutex<VecDeque<(BundleStatus, FilterHash, usize)>>,
}
impl CFSync {
pub fn new(
headers_store: Arc<ChainStore<Full>>,
skip_blocks: usize,
filter_type: u8,
) -> Result<Self, CompactFiltersError> {
let cf_store = Arc::new(CFStore::new(&headers_store, filter_type)?);
Ok(CFSync {
headers_store,
cf_store,
skip_blocks,
bundles: Mutex::new(VecDeque::new()),
})
}
pub fn pruned_bundles(&self) -> Result<usize, CompactFiltersError> {
Ok(self
.cf_store
.get_bundles()?
.into_iter()
.skip(self.skip_blocks / 1000)
.fold(0, |acc, (status, _)| match status {
BundleStatus::Pruned => acc + 1,
_ => acc,
}))
}
pub fn prepare_sync(&self, peer: Arc<Peer>) -> Result<(), CompactFiltersError> {
let mut bundles_lock = self.bundles.lock().unwrap();
let resp = peer.get_cf_checkpt(
self.cf_store.get_filter_type(),
self.headers_store.get_tip_hash()?.unwrap(),
)?;
self.cf_store.replace_checkpoints(resp.filter_headers)?;
bundles_lock.clear();
for (index, (status, checkpoint)) in self.cf_store.get_bundles()?.into_iter().enumerate() {
bundles_lock.push_back((status, checkpoint, index));
}
Ok(())
}
pub fn capture_thread_for_sync<F, Q>(
&self,
peer: Arc<Peer>,
process: F,
completed_bundle: Q,
) -> Result<(), CompactFiltersError>
where
F: Fn(&BlockHash, &BlockFilter) -> Result<bool, CompactFiltersError>,
Q: Fn(usize) -> Result<(), Error>,
{
let current_height = self.headers_store.get_height()?; // TODO: we should update it in case headers_store is also updated
loop {
let (mut status, checkpoint, index) = match self.bundles.lock().unwrap().pop_front() {
None => break,
Some(x) => x,
};
log::debug!(
"Processing bundle #{} - height {} to {}",
index,
index * 1000 + 1,
(index + 1) * 1000
);
let process_received_filters =
|expected_filters| -> Result<BTreeMap<usize, Vec<u8>>, CompactFiltersError> {
let mut filters_map = BTreeMap::new();
for _ in 0..expected_filters {
let filter = peer.pop_cf_filter_resp()?;
if filter.filter_type != self.cf_store.get_filter_type() {
return Err(CompactFiltersError::InvalidResponse);
}
match self.headers_store.get_height_for(&filter.block_hash)? {
Some(height) => filters_map.insert(height, filter.filter),
None => return Err(CompactFiltersError::InvalidFilter),
};
}
Ok(filters_map)
};
let start_height = index * 1000 + 1;
let mut already_processed = 0;
if start_height < self.skip_blocks {
status = self.cf_store.prune_filters(index, checkpoint)?;
}
let stop_height = std::cmp::min(current_height, start_height + 999);
let stop_hash = self.headers_store.get_block_hash(stop_height)?.unwrap();
if let BundleStatus::Init = status {
log::trace!("status: Init");
let resp = peer.get_cf_headers(0x00, start_height as u32, stop_hash)?;
assert!(resp.previous_filter == checkpoint);
status =
self.cf_store
.advance_to_cf_headers(index, checkpoint, resp.filter_hashes)?;
}
if let BundleStatus::Tip { cf_filters } = status {
log::trace!("status: Tip (beginning) ");
already_processed = cf_filters.len();
let headers_resp = peer.get_cf_headers(0x00, start_height as u32, stop_hash)?;
let cf_headers = match self.cf_store.advance_to_cf_headers(
index,
checkpoint,
headers_resp.filter_hashes,
)? {
BundleStatus::CFHeaders { cf_headers } => cf_headers,
_ => return Err(CompactFiltersError::InvalidResponse),
};
peer.get_cf_filters(
self.cf_store.get_filter_type(),
(start_height + cf_filters.len()) as u32,
stop_hash,
)?;
let expected_filters = stop_height - start_height + 1 - cf_filters.len();
let filters_map = process_received_filters(expected_filters)?;
let filters = cf_filters
.into_iter()
.enumerate()
.chain(filters_map.into_iter())
.collect();
status = self
.cf_store
.advance_to_cf_filters(index, checkpoint, cf_headers, filters)?;
}
if let BundleStatus::CFHeaders { cf_headers } = status {
log::trace!("status: CFHeaders");
peer.get_cf_filters(
self.cf_store.get_filter_type(),
start_height as u32,
stop_hash,
)?;
let expected_filters = stop_height - start_height + 1;
let filters_map = process_received_filters(expected_filters)?;
status = self.cf_store.advance_to_cf_filters(
index,
checkpoint,
cf_headers,
filters_map.into_iter().collect(),
)?;
}
if let BundleStatus::CFilters { cf_filters } = status {
log::trace!("status: CFilters");
let last_sync_buried_height = (start_height + already_processed)
.checked_sub(BURIED_CONFIRMATIONS)
.unwrap_or(0);
for (filter_index, filter) in cf_filters.iter().enumerate() {
let height = filter_index + start_height;
// do not download blocks that were already "buried" since the last sync
if height < last_sync_buried_height {
continue;
}
let block_hash = self.headers_store.get_block_hash(height)?.unwrap();
// TODO: also download random blocks?
if process(&block_hash, &BlockFilter::new(&filter))? {
log::debug!("Downloading block {}", block_hash);
let block = peer
.get_block(block_hash)?
.ok_or(CompactFiltersError::MissingBlock)?;
self.headers_store.save_full_block(&block, height)?;
}
}
status = BundleStatus::Processed { cf_filters };
}
if let BundleStatus::Processed { cf_filters } = status {
log::trace!("status: Processed");
if current_height - stop_height > 1000 {
status = self.cf_store.prune_filters(index, checkpoint)?;
} else {
status = self.cf_store.mark_as_tip(index, cf_filters, checkpoint)?;
}
completed_bundle(index)?;
}
if let BundleStatus::Pruned = status {
log::trace!("status: Pruned");
}
if let BundleStatus::Tip { .. } = status {
log::trace!("status: Tip");
}
}
Ok(())
}
}
pub fn sync_headers<F>(
peer: Arc<Peer>,
store: Arc<ChainStore<Full>>,
sync_fn: F,
) -> Result<Option<ChainStore<Snapshot>>, CompactFiltersError>
where
F: Fn(usize) -> Result<(), Error>,
{
let locators = store.get_locators()?;
let locators_vec = locators.iter().map(|(hash, _)| hash).cloned().collect();
let locators_map: HashMap<_, _> = locators.into_iter().collect();
peer.send(NetworkMessage::GetHeaders(GetHeadersMessage::new(
locators_vec,
Default::default(),
)))?;
let (mut snapshot, mut last_hash) = if let NetworkMessage::Headers(headers) = peer
.recv("headers", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?
{
if headers.is_empty() {
return Ok(None);
}
match locators_map.get(&headers[0].prev_blockhash) {
None => return Err(CompactFiltersError::InvalidHeaders),
Some(from) => (
store.start_snapshot(*from)?,
headers[0].prev_blockhash.clone(),
),
}
} else {
return Err(CompactFiltersError::InvalidResponse);
};
let mut sync_height = store.get_height()?;
while sync_height < peer.get_version().start_height as usize {
peer.send(NetworkMessage::GetHeaders(GetHeadersMessage::new(
vec![last_hash],
Default::default(),
)))?;
if let NetworkMessage::Headers(headers) = peer
.recv("headers", Some(Duration::from_secs(TIMEOUT_SECS)))?
.ok_or(CompactFiltersError::Timeout)?
{
let batch_len = headers.len();
last_hash = snapshot.apply(sync_height, headers)?;
sync_height += batch_len;
sync_fn(sync_height)?;
} else {
return Err(CompactFiltersError::InvalidResponse);
}
}
Ok(Some(snapshot))
}

View File

@@ -1,42 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Electrum
//!
//! This module defines an [`OnlineBlockchain`] struct that wraps an [`electrum_client::Client`]
//! and implements the logic required to populate the wallet's [database](crate::database::Database) by
//! querying the inner client.
//!
//! ## Example
//!
//! ```no_run
//! # use magical::blockchain::electrum::ElectrumBlockchain;
//! let client = electrum_client::Client::new("ssl://electrum.blockstream.info:50002", None)?;
//! let blockchain = ElectrumBlockchain::from(client);
//! # Ok::<(), magical::Error>(())
//! ```
use std::collections::HashSet;
#[allow(unused_imports)]
@@ -44,34 +5,23 @@ use log::{debug, error, info, trace};
use bitcoin::{Script, Transaction, Txid};
use electrum_client::{Client, ElectrumApi};
use electrum_client::tokio::io::{AsyncRead, AsyncWrite};
use electrum_client::Client;
use self::utils::{ELSGetHistoryRes, ELSListUnspentRes, ElectrumLikeSync};
use super::*;
use crate::database::BatchDatabase;
use crate::database::{BatchDatabase, DatabaseUtils};
use crate::error::Error;
use crate::FeeRate;
/// Wrapper over an Electrum Client that implements the required blockchain traits
///
/// ## Example
/// See the [`blockchain::electrum`](crate::blockchain::electrum) module for a usage example.
pub struct ElectrumBlockchain(Option<Client>);
pub struct ElectrumBlockchain<T: AsyncRead + AsyncWrite + Send>(Option<Client<T>>);
#[cfg(test)]
#[cfg(feature = "test-electrum")]
#[magical_blockchain_tests(crate)]
fn local_electrs() -> ElectrumBlockchain {
ElectrumBlockchain::from(Client::new(&testutils::get_electrum_url(), None).unwrap())
}
impl std::convert::From<Client> for ElectrumBlockchain {
fn from(client: Client) -> Self {
impl<T: AsyncRead + AsyncWrite + Send> std::convert::From<Client<T>> for ElectrumBlockchain<T> {
fn from(client: Client<T>) -> Self {
ElectrumBlockchain(Some(client))
}
}
impl Blockchain for ElectrumBlockchain {
impl<T: AsyncRead + AsyncWrite + Send> Blockchain for ElectrumBlockchain<T> {
fn offline() -> Self {
ElectrumBlockchain(None)
}
@@ -81,74 +31,68 @@ impl Blockchain for ElectrumBlockchain {
}
}
impl OnlineBlockchain for ElectrumBlockchain {
fn get_capabilities(&self) -> HashSet<Capability> {
vec![
Capability::FullHistory,
Capability::GetAnyTx,
Capability::AccurateFees,
]
.into_iter()
.collect()
#[async_trait(?Send)]
impl<T: AsyncRead + AsyncWrite + Send> OnlineBlockchain for ElectrumBlockchain<T> {
async fn get_capabilities(&self) -> HashSet<Capability> {
vec![Capability::FullHistory, Capability::GetAnyTx]
.into_iter()
.collect()
}
fn setup<D: BatchDatabase, P: Progress>(
&self,
async fn setup<D: BatchDatabase + DatabaseUtils, P: Progress>(
&mut self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
self.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
.electrum_like_setup(stop_gap, database, progress_update)
.await
}
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
async fn get_tx(&mut self, txid: &Txid) -> Result<Option<Transaction>, Error> {
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
.transaction_get(txid)
.await
.map(Option::Some)?)
}
fn broadcast(&self, tx: &Transaction) -> Result<(), Error> {
async fn broadcast(&mut self, tx: &Transaction) -> Result<(), Error> {
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
.transaction_broadcast(tx)
.await
.map(|_| ())?)
}
fn get_height(&self) -> Result<u32, Error> {
async fn get_height(&mut self) -> Result<usize, Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here?
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
.block_headers_subscribe()
.map(|data| data.height as u32)?)
}
fn estimate_fee(&self, target: usize) -> Result<FeeRate, Error> {
Ok(FeeRate::from_btc_per_kvb(
self.0
.as_ref()
.ok_or(Error::OfflineClient)?
.estimate_fee(target)? as f32,
))
.await
.map(|data| data.height)?)
}
}
impl ElectrumLikeSync for Client {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&self,
#[async_trait(?Send)]
impl<T: AsyncRead + AsyncWrite + Send> ElectrumLikeSync for Client<T> {
async fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error> {
self.batch_script_get_history(scripts)
.await
.map(|v| {
v.into_iter()
.map(|v| {
@@ -168,11 +112,12 @@ impl ElectrumLikeSync for Client {
.map_err(Error::Electrum)
}
fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&self,
async fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSListUnspentRes>>, Error> {
self.batch_script_list_unspent(scripts)
.await
.map(|v| {
v.into_iter()
.map(|v| {
@@ -196,7 +141,7 @@ impl ElectrumLikeSync for Client {
.map_err(Error::Electrum)
}
fn els_transaction_get(&self, txid: &Txid) -> Result<Transaction, Error> {
self.transaction_get(txid).map_err(Error::Electrum)
async fn els_transaction_get(&mut self, txid: &Txid) -> Result<Transaction, Error> {
self.transaction_get(txid).await.map_err(Error::Electrum)
}
}

View File

@@ -1,42 +1,4 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Esplora
//!
//! This module defines an [`OnlineBlockchain`] struct that can query an Esplora backend
//! populate the wallet's [database](crate::database::Database) by
//!
//! ## Example
//!
//! ```no_run
//! # use magical::blockchain::esplora::EsploraBlockchain;
//! let blockchain = EsploraBlockchain::new("https://blockstream.info/testnet/api");
//! # Ok::<(), magical::Error>(())
//! ```
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::collections::HashSet;
use futures::stream::{self, StreamExt, TryStreamExt};
@@ -45,7 +7,8 @@ use log::{debug, error, info, trace};
use serde::Deserialize;
use reqwest::{Client, StatusCode};
use reqwest::Client;
use reqwest::StatusCode;
use bitcoin::consensus::{deserialize, serialize};
use bitcoin::hashes::hex::ToHex;
@@ -54,22 +17,15 @@ use bitcoin::{Script, Transaction, Txid};
use self::utils::{ELSGetHistoryRes, ELSListUnspentRes, ElectrumLikeSync};
use super::*;
use crate::database::BatchDatabase;
use crate::database::{BatchDatabase, DatabaseUtils};
use crate::error::Error;
use crate::FeeRate;
#[derive(Debug)]
struct UrlClient {
pub struct UrlClient {
url: String,
// We use the async client instead of the blocking one because it automatically uses `fetch`
// when the target platform is wasm32.
client: Client,
}
/// Structure that implements the logic to sync with Esplora
///
/// ## Example
/// See the [`blockchain::esplora`](crate::blockchain::esplora) module for a usage example.
#[derive(Debug)]
pub struct EsploraBlockchain(Option<UrlClient>);
@@ -80,7 +36,6 @@ impl std::convert::From<UrlClient> for EsploraBlockchain {
}
impl EsploraBlockchain {
/// Create a new instance of the client from a base URL
pub fn new(base_url: &str) -> Self {
EsploraBlockchain(Some(UrlClient {
url: base_url.to_string(),
@@ -99,74 +54,52 @@ impl Blockchain for EsploraBlockchain {
}
}
#[maybe_async]
#[async_trait(?Send)]
impl OnlineBlockchain for EsploraBlockchain {
fn get_capabilities(&self) -> HashSet<Capability> {
vec![
Capability::FullHistory,
Capability::GetAnyTx,
Capability::AccurateFees,
]
.into_iter()
.collect()
async fn get_capabilities(&self) -> HashSet<Capability> {
vec![Capability::FullHistory, Capability::GetAnyTx]
.into_iter()
.collect()
}
fn setup<D: BatchDatabase, P: Progress>(
&self,
async fn setup<D: BatchDatabase + DatabaseUtils, P: Progress>(
&mut self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
maybe_await!(self
.0
.as_ref()
self.0
.as_mut()
.ok_or(Error::OfflineClient)?
.electrum_like_setup(stop_gap, database, progress_update))
.electrum_like_setup(stop_gap, database, progress_update)
.await
}
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
Ok(await_or_block!(self
async fn get_tx(&mut self, txid: &Txid) -> Result<Option<Transaction>, Error> {
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
._get_tx(txid))?)
._get_tx(txid)
.await?)
}
fn broadcast(&self, tx: &Transaction) -> Result<(), Error> {
Ok(await_or_block!(self
async fn broadcast(&mut self, tx: &Transaction) -> Result<(), Error> {
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
._broadcast(tx))?)
._broadcast(tx)
.await?)
}
fn get_height(&self) -> Result<u32, Error> {
Ok(await_or_block!(self
async fn get_height(&mut self) -> Result<usize, Error> {
Ok(self
.0
.as_ref()
.as_mut()
.ok_or(Error::OfflineClient)?
._get_height())?)
}
fn estimate_fee(&self, target: usize) -> Result<FeeRate, Error> {
let estimates = await_or_block!(self
.0
.as_ref()
.ok_or(Error::OfflineClient)?
._get_fee_estimates())?;
let fee_val = estimates
.into_iter()
.map(|(k, v)| Ok::<_, std::num::ParseIntError>((k.parse::<usize>()?, v)))
.collect::<Result<Vec<_>, _>>()
.map_err(|e| Error::Generic(e.to_string()))?
.into_iter()
.take_while(|(k, _)| k <= &target)
.map(|(_, v)| v)
.last()
.unwrap_or(1.0);
Ok(FeeRate::from_sat_per_vb(fee_val as f32))
._get_height()
.await?)
}
}
@@ -178,7 +111,7 @@ impl UrlClient {
async fn _get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, EsploraError> {
let resp = self
.client
.get(&format!("{}/tx/{}/raw", self.url, txid))
.get(&format!("{}/api/tx/{}/raw", self.url, txid))
.send()
.await?;
@@ -191,7 +124,7 @@ impl UrlClient {
async fn _broadcast(&self, transaction: &Transaction) -> Result<(), EsploraError> {
self.client
.post(&format!("{}/tx", self.url))
.post(&format!("{}/api/tx", self.url))
.body(serialize(transaction).to_hex())
.send()
.await?
@@ -200,14 +133,16 @@ impl UrlClient {
Ok(())
}
async fn _get_height(&self) -> Result<u32, EsploraError> {
let req = self
async fn _get_height(&self) -> Result<usize, EsploraError> {
Ok(self
.client
.get(&format!("{}/blocks/tip/height", self.url))
.get(&format!("{}/api/blocks/tip/height", self.url))
.send()
.await?;
Ok(req.error_for_status()?.text().await?.parse()?)
.await?
.error_for_status()?
.text()
.await?
.parse()?)
}
async fn _script_get_history(
@@ -221,7 +156,7 @@ impl UrlClient {
result.extend(
self.client
.get(&format!(
"{}/scripthash/{}/txs/mempool",
"{}/api/scripthash/{}/txs/mempool",
self.url, scripthash
))
.send()
@@ -249,7 +184,7 @@ impl UrlClient {
let response = self
.client
.get(&format!(
"{}/scripthash/{}/txs/chain/{}",
"{}/api/scripthash/{}/txs/chain/{}",
self.url, scripthash, last_txid
))
.send()
@@ -284,7 +219,7 @@ impl UrlClient {
Ok(self
.client
.get(&format!(
"{}/scripthash/{}/utxo",
"{}/api/scripthash/{}/utxo",
self.url,
Self::script_to_scripthash(script)
))
@@ -301,51 +236,34 @@ impl UrlClient {
})
.collect())
}
async fn _get_fee_estimates(&self) -> Result<HashMap<String, f64>, EsploraError> {
Ok(self
.client
.get(&format!("{}/fee-estimates", self.url,))
.send()
.await?
.error_for_status()?
.json::<HashMap<String, f64>>()
.await?)
}
}
#[maybe_async]
#[async_trait(?Send)]
impl ElectrumLikeSync for UrlClient {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&self,
async fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error> {
let future = async {
Ok(stream::iter(scripts)
.then(|script| self._script_get_history(&script))
.try_collect()
.await?)
};
await_or_block!(future)
Ok(stream::iter(scripts)
.then(|script| self._script_get_history(&script))
.try_collect()
.await?)
}
fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&self,
async fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSListUnspentRes>>, Error> {
let future = async {
Ok(stream::iter(scripts)
.then(|script| self._script_list_unspent(&script))
.try_collect()
.await?)
};
await_or_block!(future)
Ok(stream::iter(scripts)
.then(|script| self._script_list_unspent(&script))
.try_collect()
.await?)
}
fn els_transaction_get(&self, txid: &Txid) -> Result<Transaction, Error> {
Ok(await_or_block!(self._get_tx(txid))?
async fn els_transaction_get(&mut self, txid: &Txid) -> Result<Transaction, Error> {
Ok(self
._get_tx(txid)
.await?
.ok_or_else(|| EsploraError::TransactionNotFound(*txid))?)
}
}
@@ -368,28 +286,15 @@ struct EsploraListUnspent {
status: EsploraGetHistoryStatus,
}
/// Errors that can happen during a sync with [`EsploraBlockchain`]
#[derive(Debug)]
pub enum EsploraError {
/// Error with the HTTP call
Reqwest(reqwest::Error),
/// Invalid number returned
Parsing(std::num::ParseIntError),
/// Invalid Bitcoin data returned
BitcoinEncoding(bitcoin::consensus::encode::Error),
/// Transaction not found
TransactionNotFound(Txid),
}
impl fmt::Display for EsploraError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for EsploraError {}
impl From<reqwest::Error> for EsploraError {
fn from(other: reqwest::Error) -> Self {
EsploraError::Reqwest(other)

View File

@@ -1,108 +1,35 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Blockchain backends
//!
//! This module provides the implementation of a few commonly-used backends like
//! [Electrum](crate::blockchain::electrum), [Esplora](crate::blockchain::esplora) and
//! [Compact Filters/Neutrino](crate::blockchain::compact_filters), along with two generalized
//! traits [`Blockchain`] and [`OnlineBlockchain`] that can be implemented to build customized
//! backends.
//!
//! Types that only implement the [`Blockchain`] trait can be used as backends for [`Wallet`](crate::wallet::Wallet)s, but any
//! action that requires interacting with the blockchain won't be available ([`Wallet::sync`](crate::wallet::Wallet::sync) and
//! [`Wallet::broadcast`](crate::wallet::Wallet::broadcast)). This allows the creation of physically air-gapped wallets, that have no
//! ability to contact the outside world. An example of an offline-only client is [`OfflineBlockchain`].
//!
//! Types that also implement [`OnlineBlockchain`] will make the two aforementioned actions
//! available.
use std::collections::HashSet;
use std::ops::Deref;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::sync::Arc;
use bitcoin::{Transaction, Txid};
use crate::database::BatchDatabase;
use crate::database::{BatchDatabase, DatabaseUtils};
use crate::error::Error;
use crate::FeeRate;
pub(crate) mod utils;
pub mod utils;
#[cfg(feature = "electrum")]
#[cfg_attr(docsrs, doc(cfg(feature = "electrum")))]
pub mod electrum;
#[cfg(feature = "electrum")]
pub use self::electrum::ElectrumBlockchain;
#[cfg(feature = "esplora")]
#[cfg_attr(docsrs, doc(cfg(feature = "esplora")))]
pub mod esplora;
#[cfg(feature = "esplora")]
pub use self::esplora::EsploraBlockchain;
#[cfg(feature = "compact_filters")]
#[cfg_attr(docsrs, doc(cfg(feature = "compact_filters")))]
pub mod compact_filters;
#[cfg(feature = "compact_filters")]
pub use self::compact_filters::CompactFiltersBlockchain;
/// Capabilities that can be supported by an [`OnlineBlockchain`] backend
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Capability {
/// Can recover the full history of a wallet and not only the set of currently spendable UTXOs
FullHistory,
/// Can fetch any historical transaction given its txid
GetAnyTx,
/// Can compute accurate fees for the transactions found during sync
AccurateFees,
}
/// Base trait for a blockchain backend
///
/// This trait is always required, even for "air-gapped" backends that don't actually make any
/// external call. Clients that have the ability to make external calls must also implement `OnlineBlockchain`.
pub trait Blockchain {
/// Return whether or not the client has the ability to fullfill requests
///
/// This should always be `false` for offline-only types, and can be true for types that also
/// implement [`OnlineBlockchain`], if they have the ability to fullfill requests.
fn is_online(&self) -> bool;
/// Create a new instance of the client that is offline-only
///
/// For types that also implement [`OnlineBlockchain`], this means creating an instance that
/// returns [`Error::OfflineClient`](crate::error::Error::OfflineClient) if any of the "online"
/// methods are called.
///
/// This is generally implemented by wrapping the client in an [`Option`] that has [`Option::None`] value
/// when created with this method, and is [`Option::Some`] if properly instantiated.
fn offline() -> Self;
}
/// Type that only implements [`Blockchain`] and is always offline
pub struct OfflineBlockchain;
impl Blockchain for OfflineBlockchain {
fn offline() -> Self {
@@ -114,81 +41,37 @@ impl Blockchain for OfflineBlockchain {
}
}
/// Trait that defines the actions that must be supported by an online [`Blockchain`]
#[maybe_async]
#[async_trait(?Send)]
pub trait OnlineBlockchain: Blockchain {
/// Return the set of [`Capability`] supported by this backend
fn get_capabilities(&self) -> HashSet<Capability>;
async fn get_capabilities(&self) -> HashSet<Capability>;
/// Setup the backend and populate the internal database for the first time
///
/// This method is the equivalent of [`OnlineBlockchain::sync`], but it's guaranteed to only be
/// called once, at the first [`Wallet::sync`](crate::wallet::Wallet::sync).
///
/// The rationale behind the distinction between `sync` and `setup` is that some custom backends
/// might need to perform specific actions only the first time they are synced.
///
/// For types that do not have that distinction, only this method can be implemented, since
/// [`OnlineBlockchain::sync`] defaults to calling this internally if not overridden.
fn setup<D: BatchDatabase, P: 'static + Progress>(
&self,
async fn setup<D: BatchDatabase + DatabaseUtils, P: Progress>(
&mut self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error>;
/// Populate the internal database with transactions and UTXOs
///
/// If not overridden, it defaults to calling [`OnlineBlockchain::setup`] internally.
///
/// This method should implement the logic required to iterate over the list of the wallet's
/// script_pubkeys using [`Database::iter_script_pubkeys`] and look for relevant transactions
/// in the blockchain to populate the database with [`BatchOperations::set_tx`] and
/// [`BatchOperations::set_utxo`].
///
/// This method should also take care of removing UTXOs that are seen as spent in the
/// blockchain, using [`BatchOperations::del_utxo`].
///
/// The `progress_update` object can be used to give the caller updates about the progress by using
/// [`Progress::update`].
///
/// [`Database::iter_script_pubkeys`]: crate::database::Database::iter_script_pubkeys
/// [`BatchOperations::set_tx`]: crate::database::BatchOperations::set_tx
/// [`BatchOperations::set_utxo`]: crate::database::BatchOperations::set_utxo
/// [`BatchOperations::del_utxo`]: crate::database::BatchOperations::del_utxo
fn sync<D: BatchDatabase, P: 'static + Progress>(
&self,
async fn sync<D: BatchDatabase + DatabaseUtils, P: Progress>(
&mut self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
maybe_await!(self.setup(stop_gap, database, progress_update))
self.setup(stop_gap, database, progress_update).await
}
/// Fetch a transaction from the blockchain given its txid
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error>;
/// Broadcast a transaction
fn broadcast(&self, tx: &Transaction) -> Result<(), Error>;
async fn get_tx(&mut self, txid: &Txid) -> Result<Option<Transaction>, Error>;
async fn broadcast(&mut self, tx: &Transaction) -> Result<(), Error>;
/// Return the current height
fn get_height(&self) -> Result<u32, Error>;
/// Estimate the fee rate required to confirm a transaction in a given `target` of blocks
fn estimate_fee(&self, target: usize) -> Result<FeeRate, Error>;
async fn get_height(&mut self) -> Result<usize, Error>;
}
/// Data sent with a progress update over a [`channel`]
pub type ProgressData = (f32, Option<String>);
/// Trait for types that can receive and process progress updates during [`OnlineBlockchain::sync`] and
/// [`OnlineBlockchain::setup`]
pub trait Progress: Send {
/// Send a new progress update
///
/// The `progress` value should be in the range 0.0 - 100.0, and the `message` value is an
/// optional text message that can be displayed to the user.
pub trait Progress {
fn update(&self, progress: f32, message: Option<String>) -> Result<(), Error>;
}
/// Shortcut to create a [`channel`] (pair of [`Sender`] and [`Receiver`]) that can transport [`ProgressData`]
pub fn progress() -> (Sender<ProgressData>, Receiver<ProgressData>) {
channel()
}
@@ -204,11 +87,8 @@ impl Progress for Sender<ProgressData> {
}
}
/// Type that implements [`Progress`] and drops every update received
#[derive(Clone)]
pub struct NoopProgress;
/// Create a new instance of [`NoopProgress`]
pub fn noop_progress() -> NoopProgress {
NoopProgress
}
@@ -218,69 +98,3 @@ impl Progress for NoopProgress {
Ok(())
}
}
/// Type that implements [`Progress`] and logs at level `INFO` every update received
#[derive(Clone)]
pub struct LogProgress;
/// Create a nwe instance of [`LogProgress`]
pub fn log_progress() -> LogProgress {
LogProgress
}
impl Progress for LogProgress {
fn update(&self, progress: f32, message: Option<String>) -> Result<(), Error> {
log::info!("Sync {:.3}%: `{}`", progress, message.unwrap_or("".into()));
Ok(())
}
}
impl<T: Blockchain> Blockchain for Arc<T> {
fn is_online(&self) -> bool {
self.deref().is_online()
}
fn offline() -> Self {
Arc::new(T::offline())
}
}
#[maybe_async]
impl<T: OnlineBlockchain> OnlineBlockchain for Arc<T> {
fn get_capabilities(&self) -> HashSet<Capability> {
maybe_await!(self.deref().get_capabilities())
}
fn setup<D: BatchDatabase, P: 'static + Progress>(
&self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
maybe_await!(self.deref().setup(stop_gap, database, progress_update))
}
fn sync<D: BatchDatabase, P: 'static + Progress>(
&self,
stop_gap: Option<usize>,
database: &mut D,
progress_update: P,
) -> Result<(), Error> {
maybe_await!(self.deref().sync(stop_gap, database, progress_update))
}
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
maybe_await!(self.deref().get_tx(txid))
}
fn broadcast(&self, tx: &Transaction) -> Result<(), Error> {
maybe_await!(self.deref().broadcast(tx))
}
fn get_height(&self) -> Result<u32, Error> {
maybe_await!(self.deref().get_height())
}
fn estimate_fee(&self, target: usize) -> Result<FeeRate, Error> {
maybe_await!(self.deref().estimate_fee(target))
}
}

View File

@@ -1,27 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::cmp;
use std::collections::{HashSet, VecDeque};
use std::convert::TryFrom;
@@ -51,24 +27,24 @@ pub struct ELSListUnspentRes {
}
/// Implements the synchronization logic for an Electrum-like client.
#[maybe_async]
#[async_trait(?Send)]
pub trait ElectrumLikeSync {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&self,
async fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error>;
fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&self,
async fn els_batch_script_list_unspent<'s, I: IntoIterator<Item = &'s Script>>(
&mut self,
scripts: I,
) -> Result<Vec<Vec<ELSListUnspentRes>>, Error>;
fn els_transaction_get(&self, txid: &Txid) -> Result<Transaction, Error>;
async fn els_transaction_get(&mut self, txid: &Txid) -> Result<Transaction, Error>;
// Provided methods down here...
fn electrum_like_setup<D: BatchDatabase, P: Progress>(
&self,
async fn electrum_like_setup<D: BatchDatabase + DatabaseUtils, P: Progress>(
&mut self,
stop_gap: Option<usize>,
database: &mut D,
_progress_update: P,
@@ -88,7 +64,7 @@ pub trait ElectrumLikeSync {
database.commit_batch(del_batch)?;
// maximum derivation index for a change address that we've seen during sync
let mut change_max_deriv = None;
let mut change_max_deriv = 0;
let mut already_checked: HashSet<Script> = HashSet::new();
let mut to_check_later = VecDeque::with_capacity(batch_query_size);
@@ -104,29 +80,26 @@ pub trait ElectrumLikeSync {
let mut iterating_external = true;
let mut index = 0;
let mut last_found = None;
let mut last_found = 0;
while !to_check_later.is_empty() {
trace!("to_check_later size {}", to_check_later.len());
let until = cmp::min(to_check_later.len(), batch_query_size);
let chunk: Vec<Script> = to_check_later.drain(..until).collect();
let call_result = maybe_await!(self.els_batch_script_get_history(chunk.iter()))?;
let call_result = self.els_batch_script_get_history(chunk.iter()).await?;
for (script, history) in chunk.into_iter().zip(call_result.into_iter()) {
trace!("received history for {:?}, size {}", script, history.len());
if !history.is_empty() {
last_found = Some(index);
last_found = index;
let mut check_later_scripts = maybe_await!(self.check_history(
database,
script,
history,
&mut change_max_deriv
))?
.into_iter()
.filter(|x| already_checked.insert(x.clone()))
.collect();
let mut check_later_scripts = self
.check_history(database, script, history, &mut change_max_deriv)
.await?
.into_iter()
.filter(|x| already_checked.insert(x.clone()))
.collect();
to_check_later.append(&mut check_later_scripts);
}
@@ -134,9 +107,9 @@ pub trait ElectrumLikeSync {
}
match iterating_external {
true if index - last_found.unwrap_or(0) >= stop_gap => iterating_external = false,
true if index - last_found >= stop_gap => iterating_external = false,
true => {
trace!("pushing one more batch from `iter_scriptpubkeys`. index = {}, last_found = {:?}, stop_gap = {}", index, last_found, stop_gap);
trace!("pushing one more batch from `iter_scriptpubkeys`. index = {}, last_found = {}, stop_gap = {}", index, last_found, stop_gap);
let chunk: Vec<Script> =
iter_scriptpubkeys.by_ref().take(batch_query_size).collect();
@@ -153,7 +126,7 @@ pub trait ElectrumLikeSync {
let mut batch = database.begin_batch();
for chunk in ChunksIterator::new(database.iter_utxos()?.into_iter(), batch_query_size) {
let scripts: Vec<_> = chunk.iter().map(|u| &u.txout.script_pubkey).collect();
let call_result = maybe_await!(self.els_batch_script_list_unspent(scripts))?;
let call_result = self.els_batch_script_list_unspent(scripts).await?;
// check which utxos are actually still unspent
for (utxo, list_unspent) in chunk.into_iter().zip(call_result.iter()) {
@@ -178,14 +151,14 @@ pub trait ElectrumLikeSync {
}
let current_ext = database.get_last_index(ScriptType::External)?.unwrap_or(0);
let first_ext_new = last_found.map(|x| x + 1).unwrap_or(0) as u32;
let first_ext_new = last_found as u32 + 1;
if first_ext_new > current_ext {
info!("Setting external index to {}", first_ext_new);
database.set_last_index(ScriptType::External, first_ext_new)?;
}
let current_int = database.get_last_index(ScriptType::Internal)?.unwrap_or(0);
let first_int_new = change_max_deriv.map(|x| x + 1).unwrap_or(0);
let first_int_new = change_max_deriv + 1;
if first_int_new > current_int {
info!("Setting internal index to {}", first_int_new);
database.set_last_index(ScriptType::Internal, first_int_new)?;
@@ -196,13 +169,13 @@ pub trait ElectrumLikeSync {
Ok(())
}
fn check_tx_and_descendant<D: BatchDatabase>(
&self,
async fn check_tx_and_descendant<D: DatabaseUtils + BatchDatabase>(
&mut self,
database: &mut D,
txid: &Txid,
height: Option<u32>,
cur_script: &Script,
change_max_deriv: &mut Option<u32>,
change_max_deriv: &mut u32,
) -> Result<Vec<Script>, Error> {
debug!(
"check_tx_and_descendant of {}, height: {:?}, script: {}",
@@ -210,6 +183,7 @@ pub trait ElectrumLikeSync {
);
let mut updates = database.begin_batch();
let tx = match database.get_tx(&txid, true)? {
// TODO: do we need the raw?
Some(mut saved_tx) => {
// update the height if it's different (in case of reorg)
if saved_tx.height != height {
@@ -227,20 +201,12 @@ pub trait ElectrumLikeSync {
// went wrong
saved_tx.transaction.unwrap()
}
None => {
let fetched_tx = maybe_await!(self.els_transaction_get(&txid))?;
database.set_raw_tx(&fetched_tx)?;
fetched_tx
}
None => self.els_transaction_get(&txid).await?,
};
let mut incoming: u64 = 0;
let mut outgoing: u64 = 0;
let mut inputs_sum: u64 = 0;
let mut outputs_sum: u64 = 0;
// look for our own inputs
for (i, input) in tx.input.iter().enumerate() {
// the fact that we visit addresses in a BFS fashion starting from the external addresses
@@ -248,46 +214,25 @@ pub trait ElectrumLikeSync {
// the transactions at a lower depth have already been indexed, so if an outpoint is ours
// we are guaranteed to have it in the db).
if let Some(previous_output) = database.get_previous_output(&input.previous_output)? {
inputs_sum += previous_output.value;
if database.is_mine(&previous_output.script_pubkey)? {
outgoing += previous_output.value;
debug!("{} input #{} is mine, removing from utxo", txid, i);
updates.del_utxo(&input.previous_output)?;
}
} else {
// The input is not ours, but we still need to count it for the fees. so fetch the
// tx (from the database or from network) and check it
let tx = match database.get_tx(&input.previous_output.txid, true)? {
Some(saved_tx) => saved_tx.transaction.unwrap(),
None => {
let fetched_tx =
maybe_await!(self.els_transaction_get(&input.previous_output.txid))?;
database.set_raw_tx(&fetched_tx)?;
fetched_tx
}
};
inputs_sum += tx.output[input.previous_output.vout as usize].value;
}
}
let mut to_check_later = vec![];
for (i, output) in tx.output.iter().enumerate() {
// to compute the fees later
outputs_sum += output.value;
// this output is ours, we have a path to derive it
if let Some((script_type, child)) =
if let Some((script_type, path)) =
database.get_path_from_script_pubkey(&output.script_pubkey)?
{
debug!("{} output #{} is mine, adding utxo", txid, i);
updates.set_utxo(&UTXO {
outpoint: OutPoint::new(tx.txid(), i as u32),
txout: output.clone(),
is_internal: script_type.is_internal(),
})?;
incoming += output.value;
@@ -298,9 +243,9 @@ pub trait ElectrumLikeSync {
// derive as many change addrs as external addresses that we've seen
if script_type == ScriptType::Internal
&& (change_max_deriv.is_none() || child > change_max_deriv.unwrap_or(0))
&& u32::from(path.as_ref()[0]) > *change_max_deriv
{
*change_max_deriv = Some(child);
*change_max_deriv = u32::from(path.as_ref()[0]);
}
}
}
@@ -312,7 +257,6 @@ pub trait ElectrumLikeSync {
sent: outgoing,
height,
timestamp: 0,
fees: inputs_sum - outputs_sum,
};
info!("Saving tx {}", txid);
updates.set_tx(&tx)?;
@@ -322,12 +266,12 @@ pub trait ElectrumLikeSync {
Ok(to_check_later)
}
fn check_history<D: BatchDatabase>(
&self,
async fn check_history<D: DatabaseUtils + BatchDatabase>(
&mut self,
database: &mut D,
script_pubkey: Script,
txs: Vec<ELSGetHistoryRes>,
change_max_deriv: &mut Option<u32>,
change_max_deriv: &mut u32,
) -> Result<Vec<Script>, Error> {
let mut to_check_later = Vec::new();
@@ -344,13 +288,17 @@ pub trait ElectrumLikeSync {
x => u32::try_from(x).ok(),
};
to_check_later.extend_from_slice(&maybe_await!(self.check_tx_and_descendant(
database,
&tx.tx_hash,
height,
&script_pubkey,
change_max_deriv,
))?);
to_check_later.extend_from_slice(
&self
.check_tx_and_descendant(
database,
&tx.tx_hash,
height,
&script_pubkey,
change_max_deriv,
)
.await?,
);
}
Ok(to_check_later)

View File

@@ -1,27 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::collections::BTreeMap;
use std::str::FromStr;
@@ -31,16 +7,15 @@ use clap::{App, Arg, ArgMatches, SubCommand};
use log::{debug, error, info, trace, LevelFilter};
use bitcoin::consensus::encode::{deserialize, serialize, serialize_hex};
use bitcoin::hashes::hex::FromHex;
use bitcoin::hashes::hex::{FromHex, ToHex};
use bitcoin::util::psbt::PartiallySignedTransaction;
use bitcoin::{Address, OutPoint, Script, Txid};
use bitcoin::{Address, OutPoint};
use crate::blockchain::log_progress;
use crate::error::Error;
use crate::types::ScriptType;
use crate::{FeeRate, TxBuilder, Wallet};
use crate::Wallet;
fn parse_recipient(s: &str) -> Result<(Script, u64), String> {
fn parse_addressee(s: &str) -> Result<(Address, u64), String> {
let parts: Vec<_> = s.split(":").collect();
if parts.len() != 2 {
return Err("Invalid format".to_string());
@@ -55,15 +30,15 @@ fn parse_recipient(s: &str) -> Result<(Script, u64), String> {
return Err(format!("{:?}", e));
}
Ok((addr.unwrap().script_pubkey(), val.unwrap()))
Ok((addr.unwrap(), val.unwrap()))
}
fn parse_outpoint(s: &str) -> Result<OutPoint, String> {
OutPoint::from_str(s).map_err(|e| format!("{:?}", e))
}
fn recipient_validator(s: String) -> Result<(), String> {
parse_recipient(&s).map(|_| ())
fn addressee_validator(s: String) -> Result<(), String> {
parse_addressee(&s).map(|_| ())
}
fn outpoint_validator(s: String) -> Result<(), String> {
@@ -82,9 +57,6 @@ pub fn make_cli_subcommands<'a, 'b>() -> App<'a, 'b> {
.subcommand(
SubCommand::with_name("list_unspent").about("Lists the available spendable UTXOs"),
)
.subcommand(
SubCommand::with_name("list_transactions").about("Lists all the incoming and outgoing transactions of the wallet"),
)
.subcommand(
SubCommand::with_name("get_balance").about("Returns the current wallet balance"),
)
@@ -95,24 +67,18 @@ pub fn make_cli_subcommands<'a, 'b>() -> App<'a, 'b> {
Arg::with_name("to")
.long("to")
.value_name("ADDRESS:SAT")
.help("Adds a recipient to the transaction")
.help("Adds an addressee to the transaction")
.takes_value(true)
.number_of_values(1)
.required(true)
.multiple(true)
.validator(recipient_validator),
.validator(addressee_validator),
)
.arg(
Arg::with_name("send_all")
.short("all")
.long("send_all")
.help("Sends all the funds (or all the selected utxos). Requires only one recipients of value 0"),
)
.arg(
Arg::with_name("enable_rbf")
.short("rbf")
.long("enable_rbf")
.help("Enables Replace-By-Fee (BIP125)"),
.help("Sends all the funds (or all the selected utxos). Requires only one addressees of value 0"),
)
.arg(
Arg::with_name("utxos")
@@ -151,53 +117,6 @@ pub fn make_cli_subcommands<'a, 'b>() -> App<'a, 'b> {
.number_of_values(1),
),
)
.subcommand(
SubCommand::with_name("bump_fee")
.about("Bumps the fees of an RBF transaction")
.arg(
Arg::with_name("txid")
.required(true)
.takes_value(true)
.short("txid")
.long("txid")
.help("TXID of the transaction to update"),
)
.arg(
Arg::with_name("send_all")
.short("all")
.long("send_all")
.help("Allows the wallet to reduce the amount of the only output in order to increase fees. This is generally the expected behavior for transactions originally created with `send_all`"),
)
.arg(
Arg::with_name("utxos")
.long("utxos")
.value_name("TXID:VOUT")
.help("Selects which utxos *must* be added to the tx. Unconfirmed utxos cannot be used")
.takes_value(true)
.number_of_values(1)
.multiple(true)
.validator(outpoint_validator),
)
.arg(
Arg::with_name("unspendable")
.long("unspendable")
.value_name("TXID:VOUT")
.help("Marks an utxo as unspendable, in case more inputs are needed to cover the extra fees")
.takes_value(true)
.number_of_values(1)
.multiple(true)
.validator(outpoint_validator),
)
.arg(
Arg::with_name("fee_rate")
.required(true)
.short("fee")
.long("fee_rate")
.value_name("SATS_VBYTE")
.help("The new targeted fee rate in sat/vbyte")
.takes_value(true),
),
)
.subcommand(
SubCommand::with_name("policies")
.about("Returns the available spending policies for the descriptor")
@@ -321,15 +240,7 @@ pub fn add_global_flags<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
.value_name("SERVER:PORT")
.help("Sets the Electrum server to use")
.takes_value(true)
.default_value("ssl://electrum.blockstream.info:60002"),
)
.arg(
Arg::with_name("proxy")
.short("p")
.long("proxy")
.value_name("SERVER:PORT")
.help("Sets the SOCKS5 proxy for the Electrum client")
.takes_value(true),
.default_value("tn.not.fyi:55001"),
)
.arg(
Arg::with_name("descriptor")
@@ -357,119 +268,83 @@ pub fn add_global_flags<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
.subcommand(SubCommand::with_name("repl").about("Opens an interactive shell"))
}
#[maybe_async]
pub fn handle_matches<C, D>(
pub async fn handle_matches<C, D>(
wallet: &Wallet<C, D>,
matches: ArgMatches<'_>,
) -> Result<serde_json::Value, Error>
) -> Result<Option<String>, Error>
where
C: crate::blockchain::OnlineBlockchain,
D: crate::database::BatchDatabase,
{
if let Some(_sub_matches) = matches.subcommand_matches("get_new_address") {
Ok(json!({
"address": wallet.get_new_address()?
}))
Ok(Some(format!("{}", wallet.get_new_address()?)))
} else if let Some(_sub_matches) = matches.subcommand_matches("sync") {
maybe_await!(wallet.sync(log_progress(), None))?;
Ok(json!({}))
wallet.sync(None, None).await?;
Ok(None)
} else if let Some(_sub_matches) = matches.subcommand_matches("list_unspent") {
Ok(serde_json::to_value(&wallet.list_unspent()?)?)
} else if let Some(_sub_matches) = matches.subcommand_matches("list_transactions") {
Ok(serde_json::to_value(&wallet.list_transactions(false)?)?)
let mut res = String::new();
for utxo in wallet.list_unspent()? {
res += &format!("{} value {} SAT\n", utxo.outpoint, utxo.txout.value);
}
Ok(Some(res))
} else if let Some(_sub_matches) = matches.subcommand_matches("get_balance") {
Ok(json!({
"satoshi": wallet.get_balance()?
}))
Ok(Some(format!("{} SAT", wallet.get_balance()?)))
} else if let Some(sub_matches) = matches.subcommand_matches("create_tx") {
let recipients = sub_matches
let addressees = sub_matches
.values_of("to")
.unwrap()
.map(|s| parse_recipient(s))
.map(|s| parse_addressee(s))
.collect::<Result<Vec<_>, _>>()
.map_err(|s| Error::Generic(s))?;
let mut tx_builder = TxBuilder::with_recipients(recipients);
let send_all = sub_matches.is_present("send_all");
let fee_rate = sub_matches
.value_of("fee_rate")
.map(|s| f32::from_str(s).unwrap())
.unwrap_or(1.0);
let utxos = sub_matches
.values_of("utxos")
.map(|s| s.map(|i| parse_outpoint(i).unwrap()).collect());
let unspendable = sub_matches
.values_of("unspendable")
.map(|s| s.map(|i| parse_outpoint(i).unwrap()).collect());
let policy: Option<_> = sub_matches
.value_of("policy")
.map(|s| serde_json::from_str::<BTreeMap<String, Vec<usize>>>(&s).unwrap());
if sub_matches.is_present("send_all") {
tx_builder = tx_builder.send_all();
}
if sub_matches.is_present("enable_rbf") {
tx_builder = tx_builder.enable_rbf();
}
if let Some(fee_rate) = sub_matches.value_of("fee_rate") {
let fee_rate = f32::from_str(fee_rate).map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.fee_rate(FeeRate::from_sat_per_vb(fee_rate));
}
if let Some(utxos) = sub_matches.values_of("utxos") {
let utxos = utxos
.map(|i| parse_outpoint(i))
.collect::<Result<Vec<_>, _>>()
.map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.utxos(utxos);
}
if let Some(unspendable) = sub_matches.values_of("unspendable") {
let unspendable = unspendable
.map(|i| parse_outpoint(i))
.collect::<Result<Vec<_>, _>>()
.map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.unspendable(unspendable);
}
if let Some(policy) = sub_matches.value_of("policy") {
let policy = serde_json::from_str::<BTreeMap<String, Vec<usize>>>(&policy)
.map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.policy_path(policy);
}
let (psbt, details) = wallet.create_tx(tx_builder)?;
Ok(json!({
"psbt": base64::encode(&serialize(&psbt)),
"details": details,
}))
} else if let Some(sub_matches) = matches.subcommand_matches("bump_fee") {
let txid = Txid::from_str(sub_matches.value_of("txid").unwrap())
.map_err(|s| Error::Generic(s.to_string()))?;
let fee_rate = f32::from_str(sub_matches.value_of("fee_rate").unwrap())
.map_err(|s| Error::Generic(s.to_string()))?;
let mut tx_builder = TxBuilder::new().fee_rate(FeeRate::from_sat_per_vb(fee_rate));
if sub_matches.is_present("send_all") {
tx_builder = tx_builder.send_all();
}
if let Some(utxos) = sub_matches.values_of("utxos") {
let utxos = utxos
.map(|i| parse_outpoint(i))
.collect::<Result<Vec<_>, _>>()
.map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.utxos(utxos);
}
if let Some(unspendable) = sub_matches.values_of("unspendable") {
let unspendable = unspendable
.map(|i| parse_outpoint(i))
.collect::<Result<Vec<_>, _>>()
.map_err(|s| Error::Generic(s.to_string()))?;
tx_builder = tx_builder.unspendable(unspendable);
}
let (psbt, details) = wallet.bump_fee(&txid, tx_builder)?;
Ok(json!({
"psbt": base64::encode(&serialize(&psbt)),
"details": details,
}))
let result = wallet.create_tx(
addressees,
send_all,
fee_rate * 1e-5,
policy,
utxos,
unspendable,
)?;
Ok(Some(format!(
"{:#?}\nPSBT: {}",
result.1,
base64::encode(&serialize(&result.0))
)))
} else if let Some(_sub_matches) = matches.subcommand_matches("policies") {
Ok(json!({
"external": wallet.policies(ScriptType::External)?,
"internal": wallet.policies(ScriptType::Internal)?,
}))
Ok(Some(format!(
"External: {}\nInternal:{}",
serde_json::to_string(&wallet.policies(ScriptType::External)?).unwrap(),
serde_json::to_string(&wallet.policies(ScriptType::Internal)?).unwrap(),
)))
} else if let Some(_sub_matches) = matches.subcommand_matches("public_descriptor") {
Ok(json!({
"external": wallet.public_descriptor(ScriptType::External)?.map(|d| d.to_string()),
"internal": wallet.public_descriptor(ScriptType::Internal)?.map(|d| d.to_string()),
}))
let external = match wallet.public_descriptor(ScriptType::External)? {
Some(desc) => format!("{}", desc),
None => "<NONE>".into(),
};
let internal = match wallet.public_descriptor(ScriptType::Internal)? {
Some(desc) => format!("{}", desc),
None => "<NONE>".into(),
};
Ok(Some(format!(
"External: {}\nInternal:{}",
external, internal
)))
} else if let Some(sub_matches) = matches.subcommand_matches("sign") {
let psbt = base64::decode(sub_matches.value_of("psbt").unwrap()).unwrap();
let psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap();
@@ -477,10 +352,16 @@ where
.value_of("assume_height")
.and_then(|s| Some(s.parse().unwrap()));
let (psbt, finalized) = wallet.sign(psbt, assume_height)?;
Ok(json!({
"psbt": base64::encode(&serialize(&psbt)),
"is_finalized": finalized,
}))
let mut res = String::new();
res += &format!("PSBT: {}\n", base64::encode(&serialize(&psbt)));
res += &format!("Finalized: {}", finalized);
if finalized {
res += &format!("\nExtracted: {}", serialize_hex(&psbt.extract_tx()));
}
Ok(Some(res))
} else if let Some(sub_matches) = matches.subcommand_matches("broadcast") {
let tx = if sub_matches.value_of("psbt").is_some() {
let psbt = base64::decode(&sub_matches.value_of("psbt").unwrap()).unwrap();
@@ -493,27 +374,35 @@ where
panic!("Missing `psbt` and `tx` option");
};
let txid = maybe_await!(wallet.broadcast(tx))?;
Ok(json!({ "txid": txid }))
let txid = wallet.broadcast(tx).await?;
Ok(Some(format!("TXID: {}", txid)))
} else if let Some(sub_matches) = matches.subcommand_matches("extract_psbt") {
let psbt = base64::decode(&sub_matches.value_of("psbt").unwrap()).unwrap();
let psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap();
Ok(json!({
"raw_tx": serialize_hex(&psbt.extract_tx()),
}))
Ok(Some(format!(
"TX: {}",
serialize(&psbt.extract_tx()).to_hex()
)))
} else if let Some(sub_matches) = matches.subcommand_matches("finalize_psbt") {
let psbt = base64::decode(&sub_matches.value_of("psbt").unwrap()).unwrap();
let psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap();
let mut psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap();
let assume_height = sub_matches
.value_of("assume_height")
.and_then(|s| Some(s.parse().unwrap()));
let (psbt, finalized) = wallet.finalize_psbt(psbt, assume_height)?;
Ok(json!({
"psbt": base64::encode(&serialize(&psbt)),
"is_finalized": finalized,
}))
let finalized = wallet.finalize_psbt(&mut psbt, assume_height)?;
let mut res = String::new();
res += &format!("PSBT: {}\n", base64::encode(&serialize(&psbt)));
res += &format!("Finalized: {}", finalized);
if finalized {
res += &format!("\nExtracted: {}", serialize_hex(&psbt.extract_tx()));
}
Ok(Some(res))
} else if let Some(sub_matches) = matches.subcommand_matches("combine_psbt") {
let mut psbts = sub_matches
.values_of("psbt")
@@ -537,8 +426,11 @@ where
},
)?;
Ok(json!({ "psbt": base64::encode(&serialize(&final_psbt)) }))
Ok(Some(format!(
"PSBT: {}",
base64::encode(&serialize(&final_psbt))
)))
} else {
Ok(serde_json::Value::Null)
Ok(None)
}
}

View File

@@ -1,33 +1,10 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::convert::TryInto;
use std::convert::{From, TryInto};
use sled::{Batch, Tree};
use bitcoin::consensus::encode::{deserialize, serialize};
use bitcoin::hash_types::Txid;
use bitcoin::util::bip32::{ChildNumber, DerivationPath};
use bitcoin::{OutPoint, Script, Transaction};
use crate::database::memory::MapKey;
@@ -37,14 +14,15 @@ use crate::types::*;
macro_rules! impl_batch_operations {
( { $($after_insert:tt)* }, $process_delete:ident ) => {
fn set_script_pubkey(&mut self, script: &Script, script_type: ScriptType, path: u32) -> Result<(), Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
fn set_script_pubkey<P: AsRef<[ChildNumber]>>(&mut self, script: &Script, script_type: ScriptType, path: &P) -> Result<(), Error> {
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
self.insert(key, serialize(script))$($after_insert)*;
let key = MapKey::Script(Some(script)).as_map_key();
let value = json!({
"t": script_type,
"p": path,
"p": deriv_path,
});
self.insert(key, serde_json::to_vec(&value)?)$($after_insert)*;
@@ -53,11 +31,8 @@ macro_rules! impl_batch_operations {
fn set_utxo(&mut self, utxo: &UTXO) -> Result<(), Error> {
let key = MapKey::UTXO(Some(&utxo.outpoint)).as_map_key();
let value = json!({
"t": utxo.txout,
"i": utxo.is_internal,
});
self.insert(key, serde_json::to_vec(&value)?)$($after_insert)*;
let value = serialize(&utxo.txout);
self.insert(key, value)$($after_insert)*;
Ok(())
}
@@ -95,15 +70,16 @@ macro_rules! impl_batch_operations {
Ok(())
}
fn del_script_pubkey_from_path(&mut self, script_type: ScriptType, path: u32) -> Result<Option<Script>, Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
fn del_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(&mut self, script_type: ScriptType, path: &P) -> Result<Option<Script>, Error> {
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
let res = self.remove(key);
let res = $process_delete!(res);
Ok(res.map_or(Ok(None), |x| Some(deserialize(&x)).transpose())?)
}
fn del_path_from_script_pubkey(&mut self, script: &Script) -> Result<Option<(ScriptType, u32)>, Error> {
fn del_path_from_script_pubkey(&mut self, script: &Script) -> Result<Option<(ScriptType, DerivationPath)>, Error> {
let key = MapKey::Script(Some(script)).as_map_key();
let res = self.remove(key);
let res = $process_delete!(res);
@@ -128,11 +104,8 @@ macro_rules! impl_batch_operations {
match res {
None => Ok(None),
Some(b) => {
let mut val: serde_json::Value = serde_json::from_slice(&b)?;
let txout = serde_json::from_value(val["t"].take())?;
let is_internal = serde_json::from_value(val["i"].take())?;
Ok(Some(UTXO { outpoint: outpoint.clone(), txout, is_internal }))
let txout = deserialize(&b)?;
Ok(Some(UTXO { outpoint: outpoint.clone(), txout }))
}
}
}
@@ -240,16 +213,8 @@ impl Database for Tree {
.map(|x| -> Result<_, Error> {
let (k, v) = x?;
let outpoint = deserialize(&k[1..])?;
let mut val: serde_json::Value = serde_json::from_slice(&v)?;
let txout = serde_json::from_value(val["t"].take())?;
let is_internal = serde_json::from_value(val["i"].take())?;
Ok(UTXO {
outpoint,
txout,
is_internal,
})
let txout = deserialize(&v)?;
Ok(UTXO { outpoint, txout })
})
.collect()
}
@@ -280,19 +245,20 @@ impl Database for Tree {
.collect()
}
fn get_script_pubkey_from_path(
fn get_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(
&self,
script_type: ScriptType,
path: u32,
path: &P,
) -> Result<Option<Script>, Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
Ok(self.get(key)?.map(|b| deserialize(&b)).transpose()?)
}
fn get_path_from_script_pubkey(
&self,
script: &Script,
) -> Result<Option<(ScriptType, u32)>, Error> {
) -> Result<Option<(ScriptType, DerivationPath)>, Error> {
let key = MapKey::Script(Some(script)).as_map_key();
self.get(key)?
.map(|b| -> Result<_, Error> {
@@ -309,14 +275,10 @@ impl Database for Tree {
let key = MapKey::UTXO(Some(outpoint)).as_map_key();
self.get(key)?
.map(|b| -> Result<_, Error> {
let mut val: serde_json::Value = serde_json::from_slice(&b)?;
let txout = serde_json::from_value(val["t"].take())?;
let is_internal = serde_json::from_value(val["i"].take())?;
let txout = deserialize(&b)?;
Ok(UTXO {
outpoint: outpoint.clone(),
txout,
is_internal,
})
})
.transpose()
@@ -396,11 +358,18 @@ impl BatchDatabase for Tree {
#[cfg(test)]
mod test {
use std::str::FromStr;
use std::sync::{Arc, Condvar, Mutex, Once};
use std::time::{SystemTime, UNIX_EPOCH};
use sled::{Db, Tree};
use bitcoin::consensus::encode::deserialize;
use bitcoin::hashes::hex::*;
use bitcoin::*;
use crate::database::*;
static mut COUNT: usize = 0;
lazy_static! {
@@ -441,41 +410,191 @@ mod test {
#[test]
fn test_script_pubkey() {
crate::database::test::test_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
#[test]
fn test_batch_script_pubkey() {
crate::database::test::test_batch_script_pubkey(get_tree());
let mut tree = get_tree();
let mut batch = tree.begin_batch();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
batch
.set_script_pubkey(&script, script_type, &path)
.unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
None
);
assert_eq!(tree.get_path_from_script_pubkey(&script).unwrap(), None);
tree.commit_batch(batch).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
#[test]
fn test_iter_script_pubkey() {
crate::database::test::test_iter_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
}
#[test]
fn test_del_script_pubkey() {
crate::database::test::test_del_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
tree.del_script_pubkey_from_path(script_type, &path)
.unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 0);
}
#[test]
fn test_utxo() {
crate::database::test::test_utxo(get_tree());
let mut tree = get_tree();
let outpoint = OutPoint::from_str(
"5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456:0",
)
.unwrap();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let txout = TxOut {
value: 133742,
script_pubkey: script,
};
let utxo = UTXO { txout, outpoint };
tree.set_utxo(&utxo).unwrap();
assert_eq!(tree.get_utxo(&outpoint).unwrap(), Some(utxo));
}
#[test]
fn test_raw_tx() {
crate::database::test::test_raw_tx(get_tree());
let mut tree = get_tree();
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
tree.set_raw_tx(&tx).unwrap();
let txid = tx.txid();
assert_eq!(tree.get_raw_tx(&txid).unwrap(), Some(tx));
}
#[test]
fn test_tx() {
crate::database::test::test_tx(get_tree());
let mut tree = get_tree();
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
let txid = tx.txid();
let mut tx_details = TransactionDetails {
transaction: Some(tx),
txid,
timestamp: 123456,
received: 1337,
sent: 420420,
height: Some(1000),
};
tree.set_tx(&tx_details).unwrap();
// get with raw tx too
assert_eq!(
tree.get_tx(&tx_details.txid, true).unwrap(),
Some(tx_details.clone())
);
// get only raw_tx
assert_eq!(
tree.get_raw_tx(&tx_details.txid).unwrap(),
tx_details.transaction
);
// now get without raw_tx
tx_details.transaction = None;
assert_eq!(
tree.get_tx(&tx_details.txid, false).unwrap(),
Some(tx_details)
);
}
#[test]
fn test_last_index() {
crate::database::test::test_last_index(get_tree());
let mut tree = get_tree();
tree.set_last_index(ScriptType::External, 1337).unwrap();
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1337)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), None);
let res = tree.increment_last_index(ScriptType::External).unwrap();
assert_eq!(res, 1338);
let res = tree.increment_last_index(ScriptType::Internal).unwrap();
assert_eq!(res, 0);
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1338)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), Some(0));
}
// TODO: more tests...
}

View File

@@ -1,37 +1,9 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! In-memory ephemeral database
//!
//! This module defines an in-memory database type called [`MemoryDatabase`] that is based on a
//! [`BTreeMap`].
use std::collections::BTreeMap;
use std::ops::Bound::{Excluded, Included};
use bitcoin::consensus::encode::{deserialize, serialize};
use bitcoin::hash_types::Txid;
use bitcoin::util::bip32::{ChildNumber, DerivationPath};
use bitcoin::{OutPoint, Script, Transaction};
use crate::database::{BatchDatabase, BatchOperations, Database};
@@ -47,7 +19,7 @@ use crate::types::*;
// descriptor checksum d{i,e} -> vec<u8>
pub(crate) enum MapKey<'a> {
Path((Option<ScriptType>, Option<u32>)),
Path((Option<ScriptType>, Option<&'a DerivationPath>)),
Script(Option<&'a Script>),
UTXO(Option<&'a OutPoint>),
RawTx(Option<&'a Txid>),
@@ -77,7 +49,13 @@ impl MapKey<'_> {
fn serialize_content(&self) -> Vec<u8> {
match self {
MapKey::Path((_, Some(child))) => u32::from(*child).to_be_bytes().to_vec(),
MapKey::Path((_, Some(path))) => {
let mut res = vec![];
for val in *path {
res.extend(&u32::from(*val).to_be_bytes());
}
res
}
MapKey::Script(Some(s)) => serialize(*s),
MapKey::UTXO(Some(s)) => serialize(*s),
MapKey::RawTx(Some(s)) => serialize(*s),
@@ -96,39 +74,22 @@ impl MapKey<'_> {
fn after(key: &Vec<u8>) -> Vec<u8> {
let mut key = key.clone();
let mut idx = key.len();
while idx > 0 {
if key[idx - 1] == 0xFF {
idx -= 1;
continue;
} else {
key[idx - 1] += 1;
break;
}
let len = key.len();
if len > 0 {
// TODO i guess it could break if the value is 0xFF, but it's fine for now
key[len - 1] += 1;
}
key
}
/// In-memory ephemeral database
///
/// This database can be used as a temporary storage for wallets that are not kept permanently on
/// a device, or on platforms that don't provide a filesystem, like `wasm32`.
///
/// Once it's dropped its content will be lost.
///
/// If you are looking for a permanent storage solution, you can try with the default key-value
/// database called [`sled`]. See the [`database`] module documentation for more defailts.
///
/// [`database`]: crate::database
#[derive(Debug, Default)]
#[derive(Debug)]
pub struct MemoryDatabase {
map: BTreeMap<Vec<u8>, Box<dyn std::any::Any>>,
deleted_keys: Vec<Vec<u8>>,
}
impl MemoryDatabase {
/// Create a new empty database
pub fn new() -> Self {
MemoryDatabase {
map: BTreeMap::new(),
@@ -138,19 +99,20 @@ impl MemoryDatabase {
}
impl BatchOperations for MemoryDatabase {
fn set_script_pubkey(
fn set_script_pubkey<P: AsRef<[ChildNumber]>>(
&mut self,
script: &Script,
script_type: ScriptType,
path: u32,
path: &P,
) -> Result<(), Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
self.map.insert(key, Box::new(script.clone()));
let key = MapKey::Script(Some(script)).as_map_key();
let value = json!({
"t": script_type,
"p": path,
"p": deriv_path,
});
self.map.insert(key, Box::new(value));
@@ -159,8 +121,7 @@ impl BatchOperations for MemoryDatabase {
fn set_utxo(&mut self, utxo: &UTXO) -> Result<(), Error> {
let key = MapKey::UTXO(Some(&utxo.outpoint)).as_map_key();
self.map
.insert(key, Box::new((utxo.txout.clone(), utxo.is_internal)));
self.map.insert(key, Box::new(utxo.txout.clone()));
Ok(())
}
@@ -193,12 +154,13 @@ impl BatchOperations for MemoryDatabase {
Ok(())
}
fn del_script_pubkey_from_path(
fn del_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(
&mut self,
script_type: ScriptType,
path: u32,
path: &P,
) -> Result<Option<Script>, Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
let res = self.map.remove(&key);
self.deleted_keys.push(key);
@@ -207,7 +169,7 @@ impl BatchOperations for MemoryDatabase {
fn del_path_from_script_pubkey(
&mut self,
script: &Script,
) -> Result<Option<(ScriptType, u32)>, Error> {
) -> Result<Option<(ScriptType, DerivationPath)>, Error> {
let key = MapKey::Script(Some(script)).as_map_key();
let res = self.map.remove(&key);
self.deleted_keys.push(key);
@@ -231,11 +193,10 @@ impl BatchOperations for MemoryDatabase {
match res {
None => Ok(None),
Some(b) => {
let (txout, is_internal) = b.downcast_ref().cloned().unwrap();
let txout = b.downcast_ref().cloned().unwrap();
Ok(Some(UTXO {
outpoint: outpoint.clone(),
txout,
is_internal,
}))
}
}
@@ -322,12 +283,8 @@ impl Database for MemoryDatabase {
.range::<Vec<u8>, _>((Included(&key), Excluded(&after(&key))))
.map(|(k, v)| {
let outpoint = deserialize(&k[1..]).unwrap();
let (txout, is_internal) = v.downcast_ref().cloned().unwrap();
Ok(UTXO {
outpoint,
txout,
is_internal,
})
let txout = v.downcast_ref().cloned().unwrap();
Ok(UTXO { outpoint, txout })
})
.collect()
}
@@ -356,12 +313,13 @@ impl Database for MemoryDatabase {
.collect()
}
fn get_script_pubkey_from_path(
fn get_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(
&self,
script_type: ScriptType,
path: u32,
path: &P,
) -> Result<Option<Script>, Error> {
let key = MapKey::Path((Some(script_type), Some(path))).as_map_key();
let deriv_path = DerivationPath::from(path.as_ref());
let key = MapKey::Path((Some(script_type), Some(&deriv_path))).as_map_key();
Ok(self
.map
.get(&key)
@@ -371,7 +329,7 @@ impl Database for MemoryDatabase {
fn get_path_from_script_pubkey(
&self,
script: &Script,
) -> Result<Option<(ScriptType, u32)>, Error> {
) -> Result<Option<(ScriptType, DerivationPath)>, Error> {
let key = MapKey::Script(Some(script)).as_map_key();
Ok(self.map.get(&key).map(|b| {
let mut val: serde_json::Value = b.downcast_ref().cloned().unwrap();
@@ -385,11 +343,10 @@ impl Database for MemoryDatabase {
fn get_utxo(&self, outpoint: &OutPoint) -> Result<Option<UTXO>, Error> {
let key = MapKey::UTXO(Some(outpoint)).as_map_key();
Ok(self.map.get(&key).map(|b| {
let (txout, is_internal) = b.downcast_ref().cloned().unwrap();
let txout = b.downcast_ref().cloned().unwrap();
UTXO {
outpoint: outpoint.clone(),
txout,
is_internal,
}
}))
}
@@ -450,67 +407,18 @@ impl BatchDatabase for MemoryDatabase {
}
}
#[cfg(test)]
impl MemoryDatabase {
// Artificially insert a tx in the database, as if we had found it with a `sync`
pub fn received_tx(
&mut self,
tx_meta: testutils::TestIncomingTx,
current_height: Option<u32>,
) -> bitcoin::Txid {
use std::str::FromStr;
let tx = Transaction {
version: 1,
lock_time: 0,
input: vec![],
output: tx_meta
.output
.iter()
.map(|out_meta| bitcoin::TxOut {
value: out_meta.value,
script_pubkey: bitcoin::Address::from_str(&out_meta.to_address)
.unwrap()
.script_pubkey(),
})
.collect(),
};
let txid = tx.txid();
let height = tx_meta
.min_confirmations
.map(|conf| current_height.unwrap().checked_sub(conf as u32).unwrap());
let tx_details = TransactionDetails {
transaction: Some(tx.clone()),
txid,
timestamp: 0,
height,
received: 0,
sent: 0,
fees: 0,
};
self.set_tx(&tx_details).unwrap();
for (vout, out) in tx.output.iter().enumerate() {
self.set_utxo(&UTXO {
txout: out.clone(),
outpoint: OutPoint {
txid,
vout: vout as u32,
},
is_internal: false,
})
.unwrap();
}
txid
}
}
#[cfg(test)]
mod test {
use super::MemoryDatabase;
use std::str::FromStr;
use std::sync::{Arc, Condvar, Mutex, Once};
use std::time::{SystemTime, UNIX_EPOCH};
use bitcoin::consensus::encode::deserialize;
use bitcoin::hashes::hex::*;
use bitcoin::*;
use super::*;
use crate::database::*;
fn get_tree() -> MemoryDatabase {
MemoryDatabase::new()
@@ -518,41 +426,215 @@ mod test {
#[test]
fn test_script_pubkey() {
crate::database::test::test_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
#[test]
fn test_batch_script_pubkey() {
crate::database::test::test_batch_script_pubkey(get_tree());
let mut tree = get_tree();
let mut batch = tree.begin_batch();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
batch
.set_script_pubkey(&script, script_type, &path)
.unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
None
);
assert_eq!(tree.get_path_from_script_pubkey(&script).unwrap(), None);
tree.commit_batch(batch).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, &path)
.unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
#[test]
fn test_iter_script_pubkey() {
crate::database::test::test_iter_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
}
#[test]
fn test_del_script_pubkey() {
crate::database::test::test_del_script_pubkey(get_tree());
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
tree.del_script_pubkey_from_path(script_type, &path)
.unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 0);
}
#[test]
fn test_del_script_pubkey_batch() {
let mut tree = get_tree();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = DerivationPath::from_str("m/0/1/2/3").unwrap();
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, &path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
let mut batch = tree.begin_batch();
batch
.del_script_pubkey_from_path(script_type, &path)
.unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
tree.commit_batch(batch);
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 0);
}
#[test]
fn test_utxo() {
crate::database::test::test_utxo(get_tree());
let mut tree = get_tree();
let outpoint = OutPoint::from_str(
"5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456:0",
)
.unwrap();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let txout = TxOut {
value: 133742,
script_pubkey: script,
};
let utxo = UTXO { txout, outpoint };
tree.set_utxo(&utxo).unwrap();
assert_eq!(tree.get_utxo(&outpoint).unwrap(), Some(utxo));
}
#[test]
fn test_raw_tx() {
crate::database::test::test_raw_tx(get_tree());
let mut tree = get_tree();
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
tree.set_raw_tx(&tx).unwrap();
let txid = tx.txid();
assert_eq!(tree.get_raw_tx(&txid).unwrap(), Some(tx));
}
#[test]
fn test_tx() {
crate::database::test::test_tx(get_tree());
let mut tree = get_tree();
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
let txid = tx.txid();
let mut tx_details = TransactionDetails {
transaction: Some(tx),
txid,
timestamp: 123456,
received: 1337,
sent: 420420,
height: Some(1000),
};
tree.set_tx(&tx_details).unwrap();
// get with raw tx too
assert_eq!(
tree.get_tx(&tx_details.txid, true).unwrap(),
Some(tx_details.clone())
);
// get only raw_tx
assert_eq!(
tree.get_raw_tx(&tx_details.txid).unwrap(),
tx_details.transaction
);
// now get without raw_tx
tx_details.transaction = None;
assert_eq!(
tree.get_tx(&tx_details.txid, false).unwrap(),
Some(tx_details)
);
}
#[test]
fn test_last_index() {
crate::database::test::test_last_index(get_tree());
let mut tree = get_tree();
tree.set_last_index(ScriptType::External, 1337).unwrap();
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1337)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), None);
let res = tree.increment_last_index(ScriptType::External).unwrap();
assert_eq!(res, 1338);
let res = tree.increment_last_index(ScriptType::Internal).unwrap();
assert_eq!(res, 0);
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1338)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), Some(0));
}
// TODO: more tests...
}

View File

@@ -1,165 +1,83 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Database types
//!
//! This module provides the implementation of some defaults database types, along with traits that
//! can be implemented externally to let [`Wallet`]s use customized databases.
//!
//! It's important to note that the databases defined here only contains "blockchain-related" data.
//! They can be seen more as a cache than a critical piece of storage that contains secrets and
//! keys.
//!
//! The currently recommended database is [`sled`], which is a pretty simple key-value embedded
//! database written in Rust. If the `key-value-db` feature is enabled (which by default is),
//! this library automatically implements all the required traits for [`sled::Tree`].
//!
//! [`Wallet`]: crate::wallet::Wallet
use bitcoin::hash_types::Txid;
use bitcoin::util::bip32::{ChildNumber, DerivationPath};
use bitcoin::{OutPoint, Script, Transaction, TxOut};
use crate::error::Error;
use crate::types::*;
#[cfg(feature = "key-value-db")]
pub(crate) mod keyvalue;
#[cfg(any(feature = "key-value-db", feature = "default"))]
pub mod keyvalue;
pub mod memory;
pub use memory::MemoryDatabase;
/// Trait for operations that can be batched
///
/// This trait defines the list of operations that must be implemented on the [`Database`] type and
/// the [`BatchDatabase::Batch`] type.
pub trait BatchOperations {
/// Store a script_pubkey along with its script type and child number
fn set_script_pubkey(
fn set_script_pubkey<P: AsRef<[ChildNumber]>>(
&mut self,
script: &Script,
script_type: ScriptType,
child: u32,
path: &P,
) -> Result<(), Error>;
/// Store a [`UTXO`]
fn set_utxo(&mut self, utxo: &UTXO) -> Result<(), Error>;
/// Store a raw transaction
fn set_raw_tx(&mut self, transaction: &Transaction) -> Result<(), Error>;
/// Store the metadata of a transaction
fn set_tx(&mut self, transaction: &TransactionDetails) -> Result<(), Error>;
/// Store the last derivation index for a given script type
fn set_last_index(&mut self, script_type: ScriptType, value: u32) -> Result<(), Error>;
/// Delete a script_pubkey given the script type and its child number
fn del_script_pubkey_from_path(
fn del_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(
&mut self,
script_type: ScriptType,
child: u32,
path: &P,
) -> Result<Option<Script>, Error>;
/// Delete the data related to a specific script_pubkey, meaning the script type and the child
/// number
fn del_path_from_script_pubkey(
&mut self,
script: &Script,
) -> Result<Option<(ScriptType, u32)>, Error>;
/// Delete a [`UTXO`] given its [`OutPoint`]
) -> Result<Option<(ScriptType, DerivationPath)>, Error>;
fn del_utxo(&mut self, outpoint: &OutPoint) -> Result<Option<UTXO>, Error>;
/// Delete a raw transaction given its [`Txid`]
fn del_raw_tx(&mut self, txid: &Txid) -> Result<Option<Transaction>, Error>;
/// Delete the metadata of a transaction and optionally the raw transaction itself
fn del_tx(
&mut self,
txid: &Txid,
include_raw: bool,
) -> Result<Option<TransactionDetails>, Error>;
/// Delete the last derivation index for a script type
fn del_last_index(&mut self, script_type: ScriptType) -> Result<Option<u32>, Error>;
}
/// Trait for reading data from a database
///
/// This traits defines the operations that can be used to read data out of a database
pub trait Database: BatchOperations {
/// Read and checks the descriptor checksum for a given script type
///
/// Should return [`Error::ChecksumMismatch`](crate::error::Error::ChecksumMismatch) if the
/// checksum doesn't match. If there's no checksum in the database, simply store it for the
/// next time.
fn check_descriptor_checksum<B: AsRef<[u8]>>(
&mut self,
script_type: ScriptType,
bytes: B,
) -> Result<(), Error>;
/// Return the list of script_pubkeys
fn iter_script_pubkeys(&self, script_type: Option<ScriptType>) -> Result<Vec<Script>, Error>;
/// Return the list of [`UTXO`]s
fn iter_utxos(&self) -> Result<Vec<UTXO>, Error>;
/// Return the list of raw transactions
fn iter_raw_txs(&self) -> Result<Vec<Transaction>, Error>;
/// Return the list of transactions metadata
fn iter_txs(&self, include_raw: bool) -> Result<Vec<TransactionDetails>, Error>;
/// Fetch a script_pubkey given the script type and child number
fn get_script_pubkey_from_path(
fn get_script_pubkey_from_path<P: AsRef<[ChildNumber]>>(
&self,
script_type: ScriptType,
child: u32,
path: &P,
) -> Result<Option<Script>, Error>;
/// Fetch the script type and child number of a given script_pubkey
fn get_path_from_script_pubkey(
&self,
script: &Script,
) -> Result<Option<(ScriptType, u32)>, Error>;
/// Fetch a [`UTXO`] given its [`OutPoint`]
) -> Result<Option<(ScriptType, DerivationPath)>, Error>;
fn get_utxo(&self, outpoint: &OutPoint) -> Result<Option<UTXO>, Error>;
/// Fetch a raw transaction given its [`Txid`]
fn get_raw_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error>;
/// Fetch the transaction metadata and optionally also the raw transaction
fn get_tx(&self, txid: &Txid, include_raw: bool) -> Result<Option<TransactionDetails>, Error>;
/// Return the last defivation index for a script type
fn get_last_index(&self, script_type: ScriptType) -> Result<Option<u32>, Error>;
/// Increment the last derivation index for a script type and returns it
///
/// It should insert and return `0` if not present in the database
// inserts 0 if not present
fn increment_last_index(&mut self, script_type: ScriptType) -> Result<u32, Error>;
}
/// Trait for a database that supports batch operations
///
/// This trait defines the methods to start and apply a batch of operations.
pub trait BatchDatabase: Database {
/// Container for the operations
type Batch: BatchOperations;
/// Create a new batch container
fn begin_batch(&self) -> Self::Batch;
/// Consume and apply a batch of operations
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error>;
}
pub(crate) trait DatabaseUtils: Database {
pub trait DatabaseUtils: Database {
fn is_mine(&self, script: &Script) -> Result<bool, Error> {
self.get_path_from_script_pubkey(script)
.map(|o| o.is_some())
@@ -177,11 +95,11 @@ pub(crate) trait DatabaseUtils: Database {
fn get_previous_output(&self, outpoint: &OutPoint) -> Result<Option<TxOut>, Error> {
self.get_raw_tx(&outpoint.txid)?
.map(|previous_tx| {
.and_then(|previous_tx| {
if outpoint.vout as usize >= previous_tx.output.len() {
Err(Error::InvalidOutpoint(outpoint.clone()))
Some(Err(Error::InvalidOutpoint(outpoint.clone())))
} else {
Ok(previous_tx.output[outpoint.vout as usize].clone())
Some(Ok(previous_tx.output[outpoint.vout as usize].clone()))
}
})
.transpose()
@@ -189,180 +107,3 @@ pub(crate) trait DatabaseUtils: Database {
}
impl<T: Database> DatabaseUtils for T {}
#[cfg(test)]
pub mod test {
use std::str::FromStr;
use bitcoin::consensus::encode::deserialize;
use bitcoin::hashes::hex::*;
use bitcoin::*;
use super::*;
pub fn test_script_pubkey<D: Database>(mut tree: D) {
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = 42;
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, path).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, path).unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
pub fn test_batch_script_pubkey<D: BatchDatabase>(mut tree: D) {
let mut batch = tree.begin_batch();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = 42;
let script_type = ScriptType::External;
batch.set_script_pubkey(&script, script_type, path).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, path).unwrap(),
None
);
assert_eq!(tree.get_path_from_script_pubkey(&script).unwrap(), None);
tree.commit_batch(batch).unwrap();
assert_eq!(
tree.get_script_pubkey_from_path(script_type, path).unwrap(),
Some(script.clone())
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((script_type, path.clone()))
);
}
pub fn test_iter_script_pubkey<D: Database>(mut tree: D) {
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = 42;
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
}
pub fn test_del_script_pubkey<D: Database>(mut tree: D) {
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let path = 42;
let script_type = ScriptType::External;
tree.set_script_pubkey(&script, script_type, path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 1);
tree.del_script_pubkey_from_path(script_type, path).unwrap();
assert_eq!(tree.iter_script_pubkeys(None).unwrap().len(), 0);
}
pub fn test_utxo<D: Database>(mut tree: D) {
let outpoint = OutPoint::from_str(
"5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456:0",
)
.unwrap();
let script = Script::from(
Vec::<u8>::from_hex("76a91402306a7c23f3e8010de41e9e591348bb83f11daa88ac").unwrap(),
);
let txout = TxOut {
value: 133742,
script_pubkey: script,
};
let utxo = UTXO {
txout,
outpoint,
is_internal: false,
};
tree.set_utxo(&utxo).unwrap();
assert_eq!(tree.get_utxo(&outpoint).unwrap(), Some(utxo));
}
pub fn test_raw_tx<D: Database>(mut tree: D) {
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
tree.set_raw_tx(&tx).unwrap();
let txid = tx.txid();
assert_eq!(tree.get_raw_tx(&txid).unwrap(), Some(tx));
}
pub fn test_tx<D: Database>(mut tree: D) {
let hex_tx = Vec::<u8>::from_hex("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap();
let tx: Transaction = deserialize(&hex_tx).unwrap();
let txid = tx.txid();
let mut tx_details = TransactionDetails {
transaction: Some(tx),
txid,
timestamp: 123456,
received: 1337,
sent: 420420,
fees: 140,
height: Some(1000),
};
tree.set_tx(&tx_details).unwrap();
// get with raw tx too
assert_eq!(
tree.get_tx(&tx_details.txid, true).unwrap(),
Some(tx_details.clone())
);
// get only raw_tx
assert_eq!(
tree.get_raw_tx(&tx_details.txid).unwrap(),
tx_details.transaction
);
// now get without raw_tx
tx_details.transaction = None;
assert_eq!(
tree.get_tx(&tx_details.txid, false).unwrap(),
Some(tx_details)
);
}
pub fn test_last_index<D: Database>(mut tree: D) {
tree.set_last_index(ScriptType::External, 1337).unwrap();
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1337)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), None);
let res = tree.increment_last_index(ScriptType::External).unwrap();
assert_eq!(res, 1338);
let res = tree.increment_last_index(ScriptType::Internal).unwrap();
assert_eq!(res, 0);
assert_eq!(
tree.get_last_index(ScriptType::External).unwrap(),
Some(1338)
);
assert_eq!(tree.get_last_index(ScriptType::Internal).unwrap(), Some(0));
}
// TODO: more tests...
}

View File

@@ -1,32 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Descriptor checksum
//!
//! This module contains a re-implementation of the function used by Bitcoin Core to calculate the
//! checksum of a descriptor
use std::iter::FromIterator;
use crate::descriptor::Error;
@@ -56,7 +27,6 @@ fn poly_mod(mut c: u64, val: u64) -> u64 {
c
}
/// Compute the checksum of a descriptor
pub fn get_checksum(desc: &str) -> Result<String, Error> {
let mut c = 1;
let mut cls = 0;

View File

@@ -1,30 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Descriptor errors
/// Errors related to the parsing and usage of descriptors
#[derive(Debug)]
pub enum Error {
InternalError,
@@ -33,6 +6,12 @@ pub enum Error {
MalformedInput,
KeyParsingError(String),
AliasAsPublicKey,
KeyHasSecret,
Incomplete,
MissingAlias(String),
InvalidAlias(String),
Policy(crate::descriptor::policy::PolicyError),
InputIndexDoesntExist,
@@ -56,8 +35,6 @@ impl std::fmt::Display for Error {
}
}
impl std::error::Error for Error {}
impl_error!(bitcoin::util::bip32::Error, BIP32);
impl_error!(bitcoin::util::base58::Error, Base58);
impl_error!(bitcoin::util::key::Error, PK);

View File

@@ -0,0 +1,372 @@
use std::fmt::{self, Display};
use std::str::FromStr;
use bitcoin::hashes::hex::{FromHex, ToHex};
use bitcoin::secp256k1;
use bitcoin::util::base58;
use bitcoin::util::bip32::{
ChildNumber, DerivationPath, ExtendedPrivKey, ExtendedPubKey, Fingerprint,
};
use bitcoin::PublicKey;
#[allow(unused_imports)]
use log::{debug, error, info, trace};
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum DerivationIndex {
Fixed,
Normal,
Hardened,
}
impl DerivationIndex {
fn as_path(&self, index: u32) -> DerivationPath {
match self {
DerivationIndex::Fixed => vec![],
DerivationIndex::Normal => vec![ChildNumber::Normal { index }],
DerivationIndex::Hardened => vec![ChildNumber::Hardened { index }],
}
.into()
}
}
impl Display for DerivationIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let chars = match *self {
Self::Fixed => "",
Self::Normal => "/*",
Self::Hardened => "/*'",
};
write!(f, "{}", chars)
}
}
#[derive(Clone, Debug)]
pub struct DescriptorExtendedKey {
pub master_fingerprint: Option<Fingerprint>,
pub master_derivation: Option<DerivationPath>,
pub pubkey: ExtendedPubKey,
pub secret: Option<ExtendedPrivKey>,
pub path: DerivationPath,
pub final_index: DerivationIndex,
}
impl DescriptorExtendedKey {
pub fn full_path(&self, index: u32) -> DerivationPath {
let mut final_path: Vec<ChildNumber> = Vec::new();
if let Some(path) = &self.master_derivation {
let path_as_vec: Vec<ChildNumber> = path.clone().into();
final_path.extend_from_slice(&path_as_vec);
}
let our_path: Vec<ChildNumber> = self.path_with_index(index).into();
final_path.extend_from_slice(&our_path);
final_path.into()
}
pub fn path_with_index(&self, index: u32) -> DerivationPath {
let mut final_path: Vec<ChildNumber> = Vec::new();
let our_path: Vec<ChildNumber> = self.path.clone().into();
final_path.extend_from_slice(&our_path);
let other_path: Vec<ChildNumber> = self.final_index.as_path(index).into();
final_path.extend_from_slice(&other_path);
final_path.into()
}
pub fn derive<C: secp256k1::Verification + secp256k1::Signing>(
&self,
ctx: &secp256k1::Secp256k1<C>,
index: u32,
) -> Result<PublicKey, super::Error> {
Ok(self.derive_xpub(ctx, index)?.public_key)
}
pub fn derive_xpub<C: secp256k1::Verification + secp256k1::Signing>(
&self,
ctx: &secp256k1::Secp256k1<C>,
index: u32,
) -> Result<ExtendedPubKey, super::Error> {
if let Some(xprv) = self.secret {
let derive_priv = xprv.derive_priv(ctx, &self.path_with_index(index))?;
Ok(ExtendedPubKey::from_private(ctx, &derive_priv))
} else {
Ok(self.pubkey.derive_pub(ctx, &self.path_with_index(index))?)
}
}
pub fn root_xpub<C: secp256k1::Verification + secp256k1::Signing>(
&self,
ctx: &secp256k1::Secp256k1<C>,
) -> ExtendedPubKey {
if let Some(ref xprv) = self.secret {
ExtendedPubKey::from_private(ctx, xprv)
} else {
self.pubkey
}
}
}
impl Display for DescriptorExtendedKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ref fingerprint) = self.master_fingerprint {
write!(f, "[{}", fingerprint.to_hex())?;
if let Some(ref path) = self.master_derivation {
write!(f, "{}", &path.to_string()[1..])?;
}
write!(f, "]")?;
}
if let Some(xprv) = self.secret {
write!(f, "{}", xprv)?
} else {
write!(f, "{}", self.pubkey)?
}
write!(f, "{}{}", &self.path.to_string()[1..], self.final_index)
}
}
impl FromStr for DescriptorExtendedKey {
type Err = super::Error;
fn from_str(inp: &str) -> Result<DescriptorExtendedKey, Self::Err> {
let len = inp.len();
let (master_fingerprint, master_derivation, offset) = match inp.starts_with("[") {
false => (None, None, 0),
true => {
if inp.len() < 9 {
return Err(super::Error::MalformedInput);
}
let master_fingerprint = &inp[1..9];
let close_bracket_index =
&inp[9..].find("]").ok_or(super::Error::MalformedInput)?;
let path = if *close_bracket_index > 0 {
Some(DerivationPath::from_str(&format!(
"m{}",
&inp[9..9 + *close_bracket_index]
))?)
} else {
None
};
(
Some(Fingerprint::from_hex(master_fingerprint)?),
path,
9 + *close_bracket_index + 1,
)
}
};
let (key_range, offset) = match &inp[offset..].find("/") {
Some(index) => (offset..offset + *index, offset + *index),
None => (offset..len, len),
};
let data = base58::from_check(&inp[key_range.clone()])?;
let secp = secp256k1::Secp256k1::new();
let (pubkey, secret) = match &data[0..4] {
[0x04u8, 0x88, 0xB2, 0x1E] | [0x04u8, 0x35, 0x87, 0xCF] => {
(ExtendedPubKey::from_str(&inp[key_range])?, None)
}
[0x04u8, 0x88, 0xAD, 0xE4] | [0x04u8, 0x35, 0x83, 0x94] => {
let private = ExtendedPrivKey::from_str(&inp[key_range])?;
(ExtendedPubKey::from_private(&secp, &private), Some(private))
}
data => return Err(super::Error::InvalidPrefix(data.into())),
};
let (path, final_index, _) = match &inp[offset..].starts_with("/") {
false => (DerivationPath::from(vec![]), DerivationIndex::Fixed, offset),
true => {
let (all, skip) = match &inp[len - 2..len] {
"/*" => (DerivationIndex::Normal, 2),
"*'" | "*h" => (DerivationIndex::Hardened, 3),
_ => (DerivationIndex::Fixed, 0),
};
if all == DerivationIndex::Hardened && secret.is_none() {
return Err(super::Error::HardenedDerivationOnXpub);
}
(
DerivationPath::from_str(&format!("m{}", &inp[offset..len - skip]))?,
all,
len,
)
}
};
if secret.is_none()
&& path.into_iter().any(|child| match child {
ChildNumber::Hardened { .. } => true,
_ => false,
})
{
return Err(super::Error::HardenedDerivationOnXpub);
}
Ok(DescriptorExtendedKey {
master_fingerprint,
master_derivation,
pubkey,
secret,
path,
final_index,
})
}
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use bitcoin::hashes::hex::FromHex;
use bitcoin::util::bip32::{ChildNumber, DerivationPath};
use crate::descriptor::*;
macro_rules! hex_fingerprint {
($hex:expr) => {
Fingerprint::from_hex($hex).unwrap()
};
}
macro_rules! deriv_path {
($str:expr) => {
DerivationPath::from_str($str).unwrap()
};
() => {
DerivationPath::from(vec![])
};
}
#[test]
fn test_derivation_index_fixed() {
let index = DerivationIndex::Fixed;
assert_eq!(index.as_path(1337), DerivationPath::from(vec![]));
assert_eq!(format!("{}", index), "");
}
#[test]
fn test_derivation_index_normal() {
let index = DerivationIndex::Normal;
assert_eq!(
index.as_path(1337),
DerivationPath::from(vec![ChildNumber::Normal { index: 1337 }])
);
assert_eq!(format!("{}", index), "/*");
}
#[test]
fn test_derivation_index_hardened() {
let index = DerivationIndex::Hardened;
assert_eq!(
index.as_path(1337),
DerivationPath::from(vec![ChildNumber::Hardened { index: 1337 }])
);
assert_eq!(format!("{}", index), "/*'");
}
#[test]
fn test_parse_xpub_no_path_fixed() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.pubkey.fingerprint(), hex_fingerprint!("31a507b8"));
assert_eq!(ek.path, deriv_path!());
assert_eq!(ek.final_index, DerivationIndex::Fixed);
}
#[test]
fn test_parse_xpub_with_path_fixed() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/1/2/3";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.pubkey.fingerprint(), hex_fingerprint!("31a507b8"));
assert_eq!(ek.path, deriv_path!("m/1/2/3"));
assert_eq!(ek.final_index, DerivationIndex::Fixed);
}
#[test]
fn test_parse_xpub_with_path_normal() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/1/2/3/*";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.pubkey.fingerprint(), hex_fingerprint!("31a507b8"));
assert_eq!(ek.path, deriv_path!("m/1/2/3"));
assert_eq!(ek.final_index, DerivationIndex::Normal);
}
#[test]
#[should_panic(expected = "HardenedDerivationOnXpub")]
fn test_parse_xpub_with_path_hardened() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/*'";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.pubkey.fingerprint(), hex_fingerprint!("31a507b8"));
assert_eq!(ek.path, deriv_path!("m/1/2/3"));
assert_eq!(ek.final_index, DerivationIndex::Fixed);
}
#[test]
fn test_parse_tprv_with_path_hardened() {
let key = "tprv8ZgxMBicQKsPduL5QnGihpprdHyypMGi4DhimjtzYemu7se5YQNcZfAPLqXRuGHb5ZX2eTQj62oNqMnyxJ7B7wz54Uzswqw8fFqMVdcmVF7/1/2/3/*'";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert!(ek.secret.is_some());
assert_eq!(ek.pubkey.fingerprint(), hex_fingerprint!("5ea4190e"));
assert_eq!(ek.path, deriv_path!("m/1/2/3"));
assert_eq!(ek.final_index, DerivationIndex::Hardened);
}
#[test]
fn test_parse_xpub_master_details() {
let key = "[d34db33f/44'/0'/0']xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.master_fingerprint, Some(hex_fingerprint!("d34db33f")));
assert_eq!(ek.master_derivation, Some(deriv_path!("m/44'/0'/0'")));
}
#[test]
fn test_parse_xpub_master_details_empty_derivation() {
let key = "[d34db33f]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.master_fingerprint, Some(hex_fingerprint!("d34db33f")));
assert_eq!(ek.master_derivation, None);
}
#[test]
#[should_panic(expected = "MalformedInput")]
fn test_parse_xpub_short_input() {
let key = "[d34d";
DescriptorExtendedKey::from_str(key).unwrap();
}
#[test]
#[should_panic(expected = "MalformedInput")]
fn test_parse_xpub_missing_closing_bracket() {
let key = "[d34db33fxpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL";
DescriptorExtendedKey::from_str(key).unwrap();
}
#[test]
#[should_panic(expected = "InvalidChar")]
fn test_parse_xpub_invalid_fingerprint() {
let key = "[d34db33z]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL";
DescriptorExtendedKey::from_str(key).unwrap();
}
#[test]
fn test_xpub_normal_full_path() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/1/2/*";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.full_path(42), deriv_path!("m/1/2/42"));
}
#[test]
fn test_xpub_fixed_full_path() {
let key = "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL/1/2";
let ek = DescriptorExtendedKey::from_str(key).unwrap();
assert_eq!(ek.full_path(42), deriv_path!("m/1/2"));
assert_eq!(ek.full_path(1337), deriv_path!("m/1/2"));
}
}

280
src/descriptor/keys.rs Normal file
View File

@@ -0,0 +1,280 @@
use std::fmt;
use std::str::FromStr;
use bitcoin::secp256k1::{All, Secp256k1};
use bitcoin::{PrivateKey, PublicKey};
use bitcoin::util::bip32::{
ChildNumber, DerivationPath, ExtendedPrivKey, ExtendedPubKey, Fingerprint,
};
use super::error::Error;
use super::extended_key::DerivationIndex;
use super::DescriptorExtendedKey;
#[derive(Debug, Clone)]
pub struct KeyAlias {
alias: String,
has_secret: bool,
}
impl KeyAlias {
pub(crate) fn new_boxed(alias: &str, has_secret: bool) -> Box<dyn Key> {
Box::new(KeyAlias {
alias: alias.into(),
has_secret,
})
}
}
pub(crate) fn parse_key(string: &str) -> Result<(String, Box<dyn RealKey>), Error> {
if let Ok(pk) = PublicKey::from_str(string) {
return Ok((string.to_string(), Box::new(pk)));
} else if let Ok(sk) = PrivateKey::from_wif(string) {
return Ok((string.to_string(), Box::new(sk)));
} else if let Ok(ext_key) = DescriptorExtendedKey::from_str(string) {
return Ok((string.to_string(), Box::new(ext_key)));
}
return Err(Error::KeyParsingError(string.to_string()));
}
pub trait Key: std::fmt::Debug + std::fmt::Display {
fn as_public_key(&self, secp: &Secp256k1<All>, index: Option<u32>) -> Result<PublicKey, Error>;
fn is_fixed(&self) -> bool;
fn alias(&self) -> Option<&str> {
None
}
fn as_secret_key(&self) -> Option<PrivateKey> {
None
}
fn xprv(&self) -> Option<ExtendedPrivKey> {
None
}
fn full_path(&self, _index: u32) -> Option<DerivationPath> {
None
}
fn fingerprint(&self, _secp: &Secp256k1<All>) -> Option<Fingerprint> {
None
}
fn has_secret(&self) -> bool {
self.xprv().is_some() || self.as_secret_key().is_some()
}
fn public(&self, secp: &Secp256k1<All>) -> Result<Box<dyn RealKey>, Error> {
Ok(Box::new(self.as_public_key(secp, None)?))
}
}
pub trait RealKey: Key {
fn into_key(&self) -> Box<dyn Key>;
}
impl<T: RealKey + 'static> From<T> for Box<dyn RealKey> {
fn from(key: T) -> Self {
Box::new(key)
}
}
impl Key for PublicKey {
fn as_public_key(
&self,
_secp: &Secp256k1<All>,
_index: Option<u32>,
) -> Result<PublicKey, Error> {
Ok(PublicKey::clone(self))
}
fn is_fixed(&self) -> bool {
true
}
}
impl RealKey for PublicKey {
fn into_key(&self) -> Box<dyn Key> {
Box::new(self.clone())
}
}
impl Key for PrivateKey {
fn as_public_key(
&self,
secp: &Secp256k1<All>,
_index: Option<u32>,
) -> Result<PublicKey, Error> {
Ok(self.public_key(secp))
}
fn as_secret_key(&self) -> Option<PrivateKey> {
Some(PrivateKey::clone(self))
}
fn is_fixed(&self) -> bool {
true
}
}
impl RealKey for PrivateKey {
fn into_key(&self) -> Box<dyn Key> {
Box::new(self.clone())
}
}
impl Key for DescriptorExtendedKey {
fn fingerprint(&self, secp: &Secp256k1<All>) -> Option<Fingerprint> {
if let Some(fing) = self.master_fingerprint {
Some(fing.clone())
} else {
Some(self.root_xpub(secp).fingerprint())
}
}
fn as_public_key(&self, secp: &Secp256k1<All>, index: Option<u32>) -> Result<PublicKey, Error> {
Ok(self.derive_xpub(secp, index.unwrap_or(0))?.public_key)
}
fn public(&self, secp: &Secp256k1<All>) -> Result<Box<dyn RealKey>, Error> {
if self.final_index == DerivationIndex::Hardened {
return Err(Error::HardenedDerivationOnXpub);
}
if self.xprv().is_none() {
return Ok(Box::new(self.clone()));
}
// copy the part of the path that can be derived on the xpub
let path = self
.path
.into_iter()
.rev()
.take_while(|child| match child {
ChildNumber::Normal { .. } => true,
_ => false,
})
.cloned()
.collect::<Vec<_>>();
// take the prefix that has to be derived on the xprv
let master_derivation_add = self
.path
.into_iter()
.take(self.path.as_ref().len() - path.len())
.cloned()
.collect::<Vec<_>>();
let has_derived = !master_derivation_add.is_empty();
let derived_xprv = self
.secret
.as_ref()
.unwrap()
.derive_priv(secp, &master_derivation_add)?;
let pubkey = ExtendedPubKey::from_private(secp, &derived_xprv);
let master_derivation = self
.master_derivation
.as_ref()
.map_or(vec![], |path| path.as_ref().to_vec())
.into_iter()
.chain(master_derivation_add.into_iter())
.collect::<Vec<_>>();
let master_derivation = match &master_derivation[..] {
&[] => None,
child_vec => Some(child_vec.into()),
};
let master_fingerprint = match self.master_fingerprint {
Some(desc) => Some(desc.clone()),
None if has_derived => Some(self.fingerprint(secp).unwrap()),
_ => None,
};
Ok(Box::new(DescriptorExtendedKey {
master_fingerprint,
master_derivation,
pubkey,
secret: None,
path: path.into(),
final_index: self.final_index,
}))
}
fn xprv(&self) -> Option<ExtendedPrivKey> {
self.secret
}
fn full_path(&self, index: u32) -> Option<DerivationPath> {
Some(self.full_path(index))
}
fn is_fixed(&self) -> bool {
self.final_index == DerivationIndex::Fixed
}
}
impl RealKey for DescriptorExtendedKey {
fn into_key(&self) -> Box<dyn Key> {
Box::new(self.clone())
}
}
impl std::fmt::Display for KeyAlias {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let flag = if self.has_secret { "#" } else { "" };
write!(f, "{}{}", flag, self.alias)
}
}
impl Key for KeyAlias {
fn as_public_key(
&self,
_secp: &Secp256k1<All>,
_index: Option<u32>,
) -> Result<PublicKey, Error> {
Err(Error::AliasAsPublicKey)
}
fn is_fixed(&self) -> bool {
true
}
fn alias(&self) -> Option<&str> {
Some(self.alias.as_str())
}
fn has_secret(&self) -> bool {
self.has_secret
}
fn public(&self, _secp: &Secp256k1<All>) -> Result<Box<dyn RealKey>, Error> {
Err(Error::AliasAsPublicKey)
}
}
#[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord, Default)]
pub(crate) struct DummyKey();
impl fmt::Display for DummyKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "DummyKey")
}
}
impl std::str::FromStr for DummyKey {
type Err = ();
fn from_str(_: &str) -> Result<Self, Self::Err> {
Ok(DummyKey::default())
}
}
impl miniscript::MiniscriptKey for DummyKey {
type Hash = DummyKey;
fn to_pubkeyhash(&self) -> DummyKey {
DummyKey::default()
}
}

View File

@@ -1,133 +1,75 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Descriptors
//!
//! This module contains generic utilities to work with descriptors, plus some re-exported types
//! from [`miniscript`].
use std::collections::{BTreeMap, HashMap};
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::convert::{Into, TryFrom};
use std::fmt;
use std::sync::Arc;
use std::str::FromStr;
use bitcoin::hashes::hash160;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::util::bip32::{ChildNumber, DerivationPath, Fingerprint};
use bitcoin::util::psbt;
use bitcoin::{PublicKey, Script, TxOut};
use bitcoin::hashes::{hash160, Hash};
use bitcoin::secp256k1::{All, Secp256k1};
use bitcoin::util::bip32::{DerivationPath, ExtendedPrivKey, Fingerprint};
use bitcoin::util::psbt::PartiallySignedTransaction as PSBT;
use bitcoin::{PrivateKey, PublicKey, Script};
use miniscript::descriptor::{DescriptorPublicKey, DescriptorXKey, InnerXKey};
pub use miniscript::{
Descriptor, Legacy, Miniscript, MiniscriptKey, ScriptContext, Segwitv0, Terminal, ToPublicKey,
};
pub use miniscript::{Descriptor, Miniscript, MiniscriptKey, Terminal};
use serde::{Deserialize, Serialize};
use crate::psbt::utils::PSBTUtils;
pub mod checksum;
pub mod error;
pub mod extended_key;
pub mod keys;
pub mod policy;
pub use self::checksum::get_checksum;
use self::error::Error;
pub use self::extended_key::{DerivationIndex, DescriptorExtendedKey};
pub use self::policy::Policy;
use crate::wallet::signer::SignersContainer;
/// Alias for a [`Descriptor`] that can contain extended keys using [`DescriptorPublicKey`]
pub type ExtendedDescriptor = Descriptor<DescriptorPublicKey>;
use self::keys::{parse_key, DummyKey, Key, RealKey};
/// Alias for the type of maps that represent derivation paths in a [`psbt::Input`] or
/// [`psbt::Output`]
///
/// [`psbt::Input`]: bitcoin::util::psbt::Input
/// [`psbt::Output`]: bitcoin::util::psbt::Output
pub type HDKeyPaths = BTreeMap<PublicKey, (Fingerprint, DerivationPath)>;
/// Trait implemented on [`Descriptor`]s to add a method to extract the spending [`policy`]
pub trait ExtractPolicy {
pub(crate) trait MiniscriptExtractPolicy {
fn extract_policy(
&self,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error>;
}
pub(crate) trait XKeyUtils {
fn full_path(&self, append: &[ChildNumber]) -> DerivationPath;
fn root_fingerprint(&self) -> Fingerprint;
pub trait ExtractPolicy {
fn extract_policy(&self) -> Result<Option<Policy>, Error>;
}
impl<K: InnerXKey> XKeyUtils for DescriptorXKey<K> {
fn full_path(&self, append: &[ChildNumber]) -> DerivationPath {
let full_path = match &self.source {
&Some((_, ref path)) => path
.into_iter()
.chain(self.derivation_path.into_iter())
.cloned()
.collect(),
&None => self.derivation_path.clone(),
};
pub type DerivedDescriptor = Descriptor<PublicKey>;
pub type StringDescriptor = Descriptor<String>;
if self.is_wildcard {
full_path
.into_iter()
.chain(append.into_iter())
.cloned()
.collect()
} else {
full_path
}
}
fn root_fingerprint(&self) -> Fingerprint {
match &self.source {
&Some((fingerprint, _)) => fingerprint.clone(),
&None => self.xkey.xkey_fingerprint(),
}
}
}
pub(crate) trait DescriptorMeta: Sized {
pub trait DescriptorMeta {
fn is_witness(&self) -> bool;
fn get_hd_keypaths(&self, index: u32) -> Result<HDKeyPaths, Error>;
fn is_fixed(&self) -> bool;
fn derive_from_hd_keypaths(&self, hd_keypaths: &HDKeyPaths) -> Option<Self>;
fn derive_from_psbt_input(&self, psbt_input: &psbt::Input, utxo: Option<TxOut>)
-> Option<Self>;
}
pub(crate) trait DescriptorScripts {
fn psbt_redeem_script(&self) -> Option<Script>;
fn psbt_witness_script(&self) -> Option<Script>;
}
impl<T> DescriptorScripts for Descriptor<T>
impl<T> DescriptorMeta for Descriptor<T>
where
T: miniscript::MiniscriptKey + miniscript::ToPublicKey,
{
fn is_witness(&self) -> bool {
match self {
Descriptor::Bare(_) | Descriptor::Pk(_) | Descriptor::Pkh(_) | Descriptor::Sh(_) => {
false
}
Descriptor::Wpkh(_)
| Descriptor::ShWpkh(_)
| Descriptor::Wsh(_)
| Descriptor::ShWsh(_) => true,
}
}
fn psbt_redeem_script(&self) -> Option<Script> {
match self {
Descriptor::ShWpkh(_) => Some(self.witness_script()),
Descriptor::ShWsh(ref script) => Some(script.encode().to_v0_p2wsh()),
Descriptor::Sh(ref script) => Some(script.encode()),
Descriptor::Bare(ref script) => Some(script.encode()),
_ => None,
}
}
@@ -141,181 +83,284 @@ where
}
}
impl DescriptorMeta for Descriptor<DescriptorPublicKey> {
fn is_witness(&self) -> bool {
match self {
Descriptor::Bare(_) | Descriptor::Pk(_) | Descriptor::Pkh(_) | Descriptor::Sh(_) => {
false
}
Descriptor::Wpkh(_)
| Descriptor::ShWpkh(_)
| Descriptor::Wsh(_)
| Descriptor::ShWsh(_) => true,
#[serde(try_from = "&str", into = "String")]
#[derive(Debug, Serialize, Deserialize)]
pub struct ExtendedDescriptor {
#[serde(flatten)]
pub(crate) internal: StringDescriptor,
#[serde(skip)]
pub(crate) keys: BTreeMap<String, Box<dyn RealKey>>,
#[serde(skip)]
pub(crate) ctx: Secp256k1<All>,
}
impl fmt::Display for ExtendedDescriptor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.internal)
}
}
impl std::clone::Clone for ExtendedDescriptor {
fn clone(&self) -> Self {
Self {
internal: self.internal.clone(),
ctx: self.ctx.clone(),
keys: BTreeMap::new(),
}
}
}
fn get_hd_keypaths(&self, index: u32) -> Result<HDKeyPaths, Error> {
let mut answer = BTreeMap::new();
impl std::convert::AsRef<StringDescriptor> for ExtendedDescriptor {
fn as_ref(&self) -> &StringDescriptor {
&self.internal
}
}
let translatefpk = |key: &DescriptorPublicKey| -> Result<_, Error> {
match key {
DescriptorPublicKey::PubKey(_) => {}
DescriptorPublicKey::XPub(xpub) => {
let derive_path = if xpub.is_wildcard {
xpub.derivation_path
.into_iter()
.chain([ChildNumber::from_normal_idx(index)?].iter())
.cloned()
.collect()
} else {
xpub.derivation_path.clone()
};
let derived_pubkey = xpub
.xkey
.derive_pub(&Secp256k1::verification_only(), &derive_path)?;
impl ExtendedDescriptor {
fn new(sd: StringDescriptor) -> Result<Self, Error> {
let ctx = Secp256k1::gen_new();
let keys: RefCell<BTreeMap<String, Box<dyn RealKey>>> = RefCell::new(BTreeMap::new());
answer.insert(
derived_pubkey.public_key,
(
xpub.root_fingerprint(),
xpub.full_path(&[ChildNumber::from_normal_idx(index)?]),
),
);
}
}
let translatefpk = |string: &String| -> Result<_, Error> {
let (key, parsed) = parse_key(string)?;
keys.borrow_mut().insert(key, parsed);
Ok(DummyKey::default())
};
let translatefpkh = |_: &hash160::Hash| -> Result<_, Error> { Ok(DummyKey::default()) };
let translatefpkh = |string: &String| -> Result<_, Error> {
let (key, parsed) = parse_key(string)?;
keys.borrow_mut().insert(key, parsed);
self.translate_pk(translatefpk, translatefpkh)?;
Ok(DummyKey::default())
};
sd.translate_pk(translatefpk, translatefpkh)?;
Ok(ExtendedDescriptor {
internal: sd,
keys: keys.into_inner(),
ctx,
})
}
pub fn derive_with_miniscript(
&self,
miniscript: Miniscript<PublicKey>,
) -> Result<DerivedDescriptor, Error> {
let derived_desc = match self.internal {
Descriptor::Bare(_) => Descriptor::Bare(miniscript),
Descriptor::Sh(_) => Descriptor::Sh(miniscript),
Descriptor::Wsh(_) => Descriptor::Wsh(miniscript),
Descriptor::ShWsh(_) => Descriptor::ShWsh(miniscript),
_ => return Err(Error::CantDeriveWithMiniscript),
};
// if !self.same_structure(&derived_desc) {
// Err(Error::CantDeriveWithMiniscript)
// } else {
Ok(derived_desc)
// }
}
pub fn derive_from_psbt_input(
&self,
psbt: &PSBT,
input_index: usize,
) -> Result<DerivedDescriptor, Error> {
let get_pk_from_partial_sigs = || {
// here we need the public key.. since it's a single sig, there are only two
// options: we can either find it in the `partial_sigs`, or we can't. if we
// can't, it means that we can't even satisfy the input, so we can exit knowing
// that we did our best to try to find it.
psbt.inputs[input_index]
.partial_sigs
.keys()
.nth(0)
.ok_or(Error::MissingPublicKey)
};
if let Some(wit_script) = &psbt.inputs[input_index].witness_script {
self.derive_with_miniscript(Miniscript::parse(wit_script)?)
} else if let Some(p2sh_script) = &psbt.inputs[input_index].redeem_script {
if p2sh_script.is_v0_p2wpkh() {
// wrapped p2wpkh
get_pk_from_partial_sigs().map(|pk| Descriptor::ShWpkh(*pk))
} else {
self.derive_with_miniscript(Miniscript::parse(p2sh_script)?)
}
} else if let Some(utxo) = psbt.get_utxo_for(input_index) {
if utxo.script_pubkey.is_p2pkh() {
get_pk_from_partial_sigs().map(|pk| Descriptor::Pkh(*pk))
} else if utxo.script_pubkey.is_p2pk() {
get_pk_from_partial_sigs().map(|pk| Descriptor::Pk(*pk))
} else if utxo.script_pubkey.is_v0_p2wpkh() {
get_pk_from_partial_sigs().map(|pk| Descriptor::Wpkh(*pk))
} else {
// try as bare script
self.derive_with_miniscript(Miniscript::parse(&utxo.script_pubkey)?)
}
} else {
Err(Error::MissingDetails)
}
}
pub fn derive(&self, index: u32) -> Result<DerivedDescriptor, Error> {
let translatefpk = |xpub: &String| {
self.keys
.get(xpub)
.unwrap()
.as_public_key(&self.ctx, Some(index))
};
let translatefpkh =
|xpub: &String| Ok(hash160::Hash::hash(&translatefpk(xpub)?.to_bytes()));
Ok(self.internal.translate_pk(translatefpk, translatefpkh)?)
}
pub fn get_xprv(&self) -> impl IntoIterator<Item = ExtendedPrivKey> + '_ {
self.keys
.iter()
.filter(|(_, v)| v.xprv().is_some())
.map(|(_, v)| v.xprv().unwrap())
}
pub fn get_secret_keys(&self) -> impl IntoIterator<Item = PrivateKey> + '_ {
self.keys
.iter()
.filter(|(_, v)| v.as_secret_key().is_some())
.map(|(_, v)| v.as_secret_key().unwrap())
}
pub fn get_hd_keypaths(
&self,
index: u32,
) -> Result<BTreeMap<PublicKey, (Fingerprint, DerivationPath)>, Error> {
let mut answer = BTreeMap::new();
for (_, key) in &self.keys {
if let Some(fingerprint) = key.fingerprint(&self.ctx) {
let derivation_path = key.full_path(index).unwrap();
let pubkey = key.as_public_key(&self.ctx, Some(index))?;
answer.insert(pubkey, (fingerprint, derivation_path));
}
}
Ok(answer)
}
fn is_fixed(&self) -> bool {
let mut found_wildcard = false;
pub fn max_satisfaction_weight(&self) -> usize {
let fake_pk = PublicKey::from_slice(&[
2, 140, 40, 169, 123, 248, 41, 139, 192, 210, 61, 140, 116, 148, 82, 163, 46, 105, 75,
101, 227, 10, 148, 114, 163, 149, 74, 179, 15, 229, 50, 76, 170,
])
.unwrap();
let translated: Descriptor<PublicKey> = self
.internal
.translate_pk(
|_| -> Result<_, ()> { Ok(fake_pk.clone()) },
|_| -> Result<_, ()> { Ok(Default::default()) },
)
.unwrap();
let translatefpk = |key: &DescriptorPublicKey| -> Result<_, Error> {
match key {
DescriptorPublicKey::PubKey(_) => {}
DescriptorPublicKey::XPub(xpub) => {
if xpub.is_wildcard {
found_wildcard = true;
}
}
}
Ok(DummyKey::default())
};
let translatefpkh = |_: &hash160::Hash| -> Result<_, Error> { Ok(DummyKey::default()) };
self.translate_pk(translatefpk, translatefpkh).unwrap();
!found_wildcard
translated.max_satisfaction_weight()
}
fn derive_from_hd_keypaths(&self, hd_keypaths: &HDKeyPaths) -> Option<Self> {
let index: HashMap<_, _> = hd_keypaths.values().cloned().collect();
let mut derive_path = None::<DerivationPath>;
let translatefpk = |key: &DescriptorPublicKey| -> Result<_, Error> {
if derive_path.is_some() {
// already found a matching path, we are done
return Ok(DummyKey::default());
}
if let DescriptorPublicKey::XPub(xpub) = key {
// Check if the key matches one entry in our `index`. If it does, `matches()` will
// return the "prefix" that matched, so we remove that prefix from the full path
// found in `index` and save it in `derive_path`
let root_fingerprint = xpub.root_fingerprint();
derive_path = index
.get_key_value(&root_fingerprint)
.and_then(|(fingerprint, path)| xpub.matches(*fingerprint, path))
.map(|prefix_path| prefix_path.into_iter().cloned().collect::<Vec<_>>())
.map(|prefix| {
index
.get(&xpub.root_fingerprint())
.unwrap()
.into_iter()
.skip(prefix.len())
.cloned()
.collect()
});
}
Ok(DummyKey::default())
};
let translatefpkh = |_: &hash160::Hash| -> Result<_, Error> { Ok(DummyKey::default()) };
self.translate_pk(translatefpk, translatefpkh).unwrap();
derive_path.map(|path| self.derive(path.as_ref()))
pub fn is_fixed(&self) -> bool {
self.keys.iter().all(|(_, key)| key.is_fixed())
}
fn derive_from_psbt_input(
&self,
psbt_input: &psbt::Input,
utxo: Option<TxOut>,
) -> Option<Self> {
if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.hd_keypaths) {
return Some(derived);
} else if !self.is_fixed() {
// If the descriptor is not fixed we can't brute-force the derivation address, so just
// exit here
return None;
}
pub fn same_structure<K: MiniscriptKey>(&self, other: &Descriptor<K>) -> bool {
// Translate all the public keys to () and then check if the two descriptors are equal.
// TODO: translate hashes to their default value before checking for ==
match self {
Descriptor::Pk(_)
| Descriptor::Pkh(_)
| Descriptor::Wpkh(_)
| Descriptor::ShWpkh(_)
if utxo.is_some()
&& self.script_pubkey() == utxo.as_ref().unwrap().script_pubkey =>
{
Some(self.clone())
}
Descriptor::Bare(ms) | Descriptor::Sh(ms)
if psbt_input.redeem_script.is_some()
&& &ms.encode() == psbt_input.redeem_script.as_ref().unwrap() =>
{
Some(self.clone())
}
Descriptor::Wsh(ms) | Descriptor::ShWsh(ms)
if psbt_input.witness_script.is_some()
&& &ms.encode() == psbt_input.witness_script.as_ref().unwrap() =>
{
Some(self.clone())
}
_ => None,
let func_string = |_string: &String| -> Result<_, Error> { Ok(DummyKey::default()) };
let func_generic_pk = |_data: &K| -> Result<_, Error> { Ok(DummyKey::default()) };
let func_generic_pkh =
|_data: &<K as MiniscriptKey>::Hash| -> Result<_, Error> { Ok(DummyKey::default()) };
let translated_a = self.internal.translate_pk(func_string, func_string);
let translated_b = other.translate_pk(func_generic_pk, func_generic_pkh);
match (translated_a, translated_b) {
(Ok(a), Ok(b)) => a == b,
_ => false,
}
}
}
#[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord, Default)]
struct DummyKey();
pub fn as_public_version(&self) -> Result<ExtendedDescriptor, Error> {
let keys: RefCell<BTreeMap<String, Box<dyn RealKey>>> = RefCell::new(BTreeMap::new());
impl fmt::Display for DummyKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "DummyKey")
let translatefpk = |string: &String| -> Result<_, Error> {
let public = self.keys.get(string).unwrap().public(&self.ctx)?;
let result = format!("{}", public);
keys.borrow_mut().insert(string.clone(), public);
Ok(result)
};
let translatefpkh = |string: &String| -> Result<_, Error> {
let public = self.keys.get(string).unwrap().public(&self.ctx)?;
let result = format!("{}", public);
keys.borrow_mut().insert(string.clone(), public);
Ok(result)
};
let internal = self.internal.translate_pk(translatefpk, translatefpkh)?;
Ok(ExtendedDescriptor {
internal,
keys: keys.into_inner(),
ctx: self.ctx.clone(),
})
}
}
impl std::str::FromStr for DummyKey {
type Err = ();
fn from_str(_: &str) -> Result<Self, Self::Err> {
Ok(DummyKey::default())
impl ExtractPolicy for ExtendedDescriptor {
fn extract_policy(&self) -> Result<Option<Policy>, Error> {
self.internal.extract_policy(
&self
.keys
.iter()
.map(|(k, v)| (k.into(), v.into_key()))
.collect(),
)
}
}
impl miniscript::MiniscriptKey for DummyKey {
type Hash = DummyKey;
impl TryFrom<&str> for ExtendedDescriptor {
type Error = Error;
fn to_pubkeyhash(&self) -> DummyKey {
DummyKey::default()
fn try_from(value: &str) -> Result<Self, Self::Error> {
let internal = StringDescriptor::from_str(value)?;
ExtendedDescriptor::new(internal)
}
}
impl TryFrom<StringDescriptor> for ExtendedDescriptor {
type Error = Error;
fn try_from(other: StringDescriptor) -> Result<Self, Self::Error> {
ExtendedDescriptor::new(other)
}
}
impl FromStr for ExtendedDescriptor {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::try_from(s)
}
}
impl Into<String> for ExtendedDescriptor {
fn into(self) -> String {
format!("{}", self.internal)
}
}
@@ -323,118 +368,131 @@ impl miniscript::MiniscriptKey for DummyKey {
mod test {
use std::str::FromStr;
use bitcoin::consensus::encode::deserialize;
use bitcoin::hashes::hex::FromHex;
use bitcoin::util::psbt;
use bitcoin::{Network, PublicKey};
use super::*;
use crate::psbt::PSBTUtils;
use crate::descriptor::*;
#[test]
fn test_derive_from_psbt_input_wpkh_wif() {
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
"wpkh(02b4632d08485ff1df2db55b9dafd23347d1c47a457072a1e87be26896549a8737)",
)
.unwrap();
let psbt: psbt::PartiallySignedTransaction = deserialize(
&Vec::<u8>::from_hex(
"70736274ff010052010000000162307be8e431fbaff807cdf9cdc3fde44d7402\
11bc8342c31ffd6ec11fe35bcc0100000000ffffffff01328601000000000016\
001493ce48570b55c42c2af816aeaba06cfee1224fae000000000001011fa086\
01000000000016001493ce48570b55c42c2af816aeaba06cfee1224fae010304\
010000000000",
)
.unwrap(),
)
.unwrap();
macro_rules! hex_fingerprint {
($hex:expr) => {
Fingerprint::from_hex($hex).unwrap()
};
}
assert!(descriptor
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0))
.is_some());
macro_rules! hex_pubkey {
($hex:expr) => {
PublicKey::from_str($hex).unwrap()
};
}
macro_rules! deriv_path {
($str:expr) => {
DerivationPath::from_str($str).unwrap()
};
() => {
DerivationPath::from(vec![])
};
}
#[test]
fn test_derive_from_psbt_input_pkh_tpub() {
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
"pkh([0f056943/44h/0h/0h]tpubDDpWvmUrPZrhSPmUzCMBHffvC3HyMAPnWDSAQNBTnj1iZeJa7BZQEttFiP4DS4GCcXQHezdXhn86Hj6LHX5EDstXPWrMaSneRWM8yUf6NFd/10/*)",
)
.unwrap();
let psbt: psbt::PartiallySignedTransaction = deserialize(
&Vec::<u8>::from_hex(
"70736274ff010053010000000145843b86be54a3cd8c9e38444e1162676c00df\
e7964122a70df491ea12fd67090100000000ffffffff01c19598000000000017\
a91432bb94283282f72b2e034709e348c44d5a4db0ef8700000000000100f902\
0000000001010167e99c0eb67640f3a1b6805f2d8be8238c947f8aaf49eb0a9c\
bee6a42c984200000000171600142b29a22019cca05b9c2b2d283a4c4489e1cf\
9f8ffeffffff02a01dced06100000017a914e2abf033cadbd74f0f4c74946201\
decd20d5c43c8780969800000000001976a9148b0fce5fb1264e599a65387313\
3c95478b902eb288ac02473044022015d9211576163fa5b001e84dfa3d44efd9\
86b8f3a0d3d2174369288b2b750906022048dacc0e5d73ae42512fd2b97e2071\
a8d0bce443b390b1fe0b8128fe70ec919e01210232dad1c5a67dcb0116d407e2\
52584228ab7ec00e8b9779d0c3ffe8114fc1a7d2c80600000103040100000022\
0603433b83583f8c4879b329dd08bbc7da935e4cc02f637ff746e05f0466ffb2\
a6a2180f0569432c00008000000080000000800a000000000000000000",
)
.unwrap(),
)
.unwrap();
assert!(descriptor
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0))
.is_some());
fn test_descriptor_parse_wif() {
let string = "pkh(cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy)";
let desc = ExtendedDescriptor::from_str(string).unwrap();
assert!(desc.is_fixed());
assert_eq!(
desc.derive(0)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
);
assert_eq!(
desc.derive(42)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
);
assert_eq!(
desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(),
1
);
}
#[test]
fn test_derive_from_psbt_input_wsh() {
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
"wsh(and_v(v:pk(03b6633fef2397a0a9de9d7b6f23aef8368a6e362b0581f0f0af70d5ecfd254b14),older(6)))",
)
.unwrap();
let psbt: psbt::PartiallySignedTransaction = deserialize(
&Vec::<u8>::from_hex(
"70736274ff01005302000000011c8116eea34408ab6529223c9a176606742207\
67a1ff1d46a6e3c4a88243ea6e01000000000600000001109698000000000017\
a914ad105f61102e0d01d7af40d06d6a5c3ae2f7fde387000000000001012b80\
969800000000002200203ca72f106a72234754890ca7640c43f65d2174e44d33\
336030f9059345091044010304010000000105252103b6633fef2397a0a9de9d\
7b6f23aef8368a6e362b0581f0f0af70d5ecfd254b14ad56b20000",
)
.unwrap(),
)
.unwrap();
assert!(descriptor
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0))
.is_some());
fn test_descriptor_parse_pubkey() {
let string = "pkh(039b6347398505f5ec93826dc61c19f47c66c0283ee9be980e29ce325a0f4679ef)";
let desc = ExtendedDescriptor::from_str(string).unwrap();
assert!(desc.is_fixed());
assert_eq!(
desc.derive(0)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
);
assert_eq!(
desc.derive(42)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
);
assert_eq!(
desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(),
0
);
}
#[test]
fn test_derive_from_psbt_input_sh() {
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
"sh(and_v(v:pk(021403881a5587297818fcaf17d239cefca22fce84a45b3b1d23e836c4af671dbb),after(630000)))",
)
.unwrap();
let psbt: psbt::PartiallySignedTransaction = deserialize(
&Vec::<u8>::from_hex(
"70736274ff0100530100000001bc8c13df445dfadcc42afa6dc841f85d22b01d\
a6270ebf981740f4b7b1d800390000000000feffffff01ba9598000000000017\
a91457b148ba4d3e5fa8608a8657875124e3d1c9390887f09c0900000100e002\
0000000001016ba1bbe05cc93574a0d611ec7d93ad0ab6685b28d0cd80e8a82d\
debb326643c90100000000feffffff02809698000000000017a914d9a6e8c455\
8e16c8253afe53ce37ad61cf4c38c487403504cf6100000017a9144044fb6e0b\
757dfc1b34886b6a95aef4d3db137e870247304402202a9b72d939bcde8ba2a1\
e0980597e47af4f5c152a78499143c3d0a78ac2286a602207a45b1df9e93b8c9\
6f09f5c025fe3e413ca4b905fe65ee55d32a3276439a9b8f012102dc1fcc2636\
4da1aa718f03d8d9bd6f2ff410ed2cf1245a168aa3bcc995ac18e0a806000001\
03040100000001042821021403881a5587297818fcaf17d239cefca22fce84a4\
5b3b1d23e836c4af671dbbad03f09c09b10000",
)
.unwrap(),
)
.unwrap();
fn test_descriptor_parse_xpub() {
let string = "pkh(tpubDEnoLuPdBep9bzw5LoGYpsxUQYheRQ9gcgrJhJEcdKFB9cWQRyYmkCyRoTqeD4tJYiVVgt6A3rN6rWn9RYhR9sBsGxji29LYWHuKKbdb1ev/*)";
let desc = ExtendedDescriptor::from_str(string).unwrap();
assert!(!desc.is_fixed());
assert_eq!(
desc.derive(0)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mxbXpnVkwARGtYXk5yeGYf59bGWuPpdE4X"
);
assert_eq!(
desc.derive(42)
.unwrap()
.address(Network::Testnet)
.unwrap()
.to_string(),
"mhtuS1QaEV4HPcK4bWk4Wvpd64SUjiC5Zt"
);
assert_eq!(desc.get_xprv().into_iter().collect::<Vec<_>>().len(), 0);
}
assert!(descriptor
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0))
.is_some());
#[test]
#[should_panic(expected = "KeyParsingError")]
fn test_descriptor_parse_fail() {
let string = "pkh(this_is_not_a_valid_key)";
ExtendedDescriptor::from_str(string).unwrap();
}
#[test]
fn test_descriptor_hd_keypaths() {
let string = "pkh(tpubDEnoLuPdBep9bzw5LoGYpsxUQYheRQ9gcgrJhJEcdKFB9cWQRyYmkCyRoTqeD4tJYiVVgt6A3rN6rWn9RYhR9sBsGxji29LYWHuKKbdb1ev/*)";
let desc = ExtendedDescriptor::from_str(string).unwrap();
let keypaths = desc.get_hd_keypaths(0).unwrap();
assert!(keypaths.contains_key(&hex_pubkey!(
"025d5fc65ebb8d44a5274b53bac21ff8307fec2334a32df05553459f8b1f7fe1b6"
)));
assert_eq!(
keypaths.get(&hex_pubkey!(
"025d5fc65ebb8d44a5274b53bac21ff8307fec2334a32df05553459f8b1f7fe1b6"
)),
Some(&(hex_fingerprint!("31a507b8"), deriv_path!("m/0")))
)
}
}

View File

@@ -1,74 +1,24 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Descriptor policy
//!
//! This module implements the logic to extract and represent the spending policies of a descriptor
//! in a more human-readable format.
//!
//! ## Example
//!
//! ```
//! # use std::sync::Arc;
//! # use magical::descriptor::*;
//! let desc = "wsh(and_v(v:pk(cV3oCth6zxZ1UVsHLnGothsWNsaoxRhC6aeNi5VbSdFpwUkgkEci),or_d(pk(cVMTy7uebJgvFaSBwcgvwk8qn8xSLc97dKow4MBetjrrahZoimm2),older(12960))))";
//!
//! let (extended_desc, key_map) = ExtendedDescriptor::parse_secret(desc)?;
//! println!("{:?}", extended_desc);
//!
//! let signers = Arc::new(key_map.into());
//! let policy = extended_desc.extract_policy(signers)?;
//! println!("policy: {}", serde_json::to_string(&policy)?);
//! # Ok::<(), magical::Error>(())
//! ```
use std::cmp::max;
use std::collections::{BTreeMap, HashSet, VecDeque};
use std::fmt;
use std::sync::Arc;
use serde::ser::SerializeMap;
use serde::{Serialize, Serializer};
use bitcoin::hashes::*;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::util::bip32::Fingerprint;
use bitcoin::PublicKey;
use miniscript::descriptor::DescriptorPublicKey;
use miniscript::{Descriptor, Miniscript, MiniscriptKey, ScriptContext, Terminal};
use miniscript::{Descriptor, Miniscript, Terminal};
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use crate::descriptor::ExtractPolicy;
use crate::wallet::signer::{SignerId, SignersContainer};
use super::checksum::get_checksum;
use super::error::Error;
use super::XKeyUtils;
use crate::descriptor::{Key, MiniscriptExtractPolicy};
use crate::psbt::PSBTSatisfier;
/// Raw public key or extended key fingerprint
#[derive(Debug, Clone, Default, Serialize)]
pub struct PKOrF {
#[serde(skip_serializing_if = "Option::is_none")]
@@ -77,31 +27,35 @@ pub struct PKOrF {
pubkey_hash: Option<hash160::Hash>,
#[serde(skip_serializing_if = "Option::is_none")]
fingerprint: Option<Fingerprint>,
#[serde(skip_serializing_if = "Option::is_none")]
alias: Option<String>,
}
impl PKOrF {
fn from_key(k: &DescriptorPublicKey) -> Self {
match k {
DescriptorPublicKey::PubKey(pubkey) => PKOrF {
pubkey: Some(*pubkey),
..Default::default()
},
DescriptorPublicKey::XPub(xpub) => PKOrF {
fingerprint: Some(xpub.root_fingerprint()),
..Default::default()
},
}
}
fn from_key(k: &Box<dyn Key>) -> Self {
let secp = Secp256k1::gen_new();
fn from_key_hash(k: hash160::Hash) -> Self {
PKOrF {
pubkey_hash: Some(k),
..Default::default()
if let Some(alias) = k.alias() {
PKOrF {
alias: Some(alias.into()),
..Default::default()
}
} else if let Some(fing) = k.fingerprint(&secp) {
PKOrF {
fingerprint: Some(fing),
..Default::default()
}
} else {
let pubkey = k.as_public_key(&secp, None).unwrap();
PKOrF {
pubkey: Some(pubkey),
..Default::default()
}
}
}
}
/// An item that need to be satisfied
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "UPPERCASE")]
pub enum SatisfiableItem {
@@ -231,44 +185,30 @@ where
map.end()
}
/// Represent if and how much a policy item is satisfied by the wallet's descriptor
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "UPPERCASE")]
pub enum Satisfaction {
/// Only a partial satisfaction of some kind of threshold policy
Partial {
/// Total number of items
n: usize,
/// Threshold
m: usize,
/// The items that can be satisfied by the descriptor
items: Vec<usize>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
/// Extra conditions that also need to be satisfied
conditions: ConditionMap,
},
/// Can reach the threshold of some kind of threshold policy
PartialComplete {
/// Total number of items
n: usize,
/// Threshold
m: usize,
/// The items that can be satisfied by the descriptor
items: Vec<usize>,
#[serde(
serialize_with = "serialize_folded_cond_map",
skip_serializing_if = "BTreeMap::is_empty"
)]
/// Extra conditions that also need to be satisfied
conditions: FoldedConditionMap,
},
/// Can satisfy the policy item
Complete {
/// Extra conditions that also need to be satisfied
condition: Condition,
},
/// Cannot satisfy or contribute to the policy item
None,
}
@@ -400,22 +340,16 @@ impl From<bool> for Satisfaction {
}
}
/// Descriptor spending policy
#[derive(Debug, Clone, Serialize)]
pub struct Policy {
/// Identifier for this policy node
pub id: String,
id: String,
/// Type of this policy node
#[serde(flatten)]
pub item: SatisfiableItem,
/// How a much given PSBT already satisfies this polcy node **(currently unused)**
pub satisfaction: Satisfaction,
/// How the wallet's descriptor can satisfy this policy node
pub contribution: Satisfaction,
item: SatisfiableItem,
satisfaction: Satisfaction,
contribution: Satisfaction,
}
/// An extra condition that must be satisfied but that is out of control of the user
#[derive(Hash, Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Serialize)]
pub struct Condition {
#[serde(skip_serializing_if = "Option::is_none")]
@@ -456,7 +390,6 @@ impl Condition {
}
}
/// Errors that can happen while extracting and manipulating policies
#[derive(Debug)]
pub enum PolicyError {
NotEnoughItemsSelected(String),
@@ -468,16 +401,8 @@ pub enum PolicyError {
IncompatibleConditions,
}
impl fmt::Display for PolicyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for PolicyError {}
impl Policy {
fn new(item: SatisfiableItem) -> Self {
pub fn new(item: SatisfiableItem) -> Self {
Policy {
id: item.id(),
item,
@@ -486,7 +411,7 @@ impl Policy {
}
}
fn make_and(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
pub fn make_and(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
match (a, b) {
(None, None) => Ok(None),
(Some(x), None) | (None, Some(x)) => Ok(Some(x)),
@@ -494,7 +419,7 @@ impl Policy {
}
}
fn make_or(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
pub fn make_or(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
match (a, b) {
(None, None) => Ok(None),
(Some(x), None) | (None, Some(x)) => Ok(Some(x)),
@@ -502,7 +427,10 @@ impl Policy {
}
}
fn make_thresh(items: Vec<Policy>, threshold: usize) -> Result<Option<Policy>, PolicyError> {
pub fn make_thresh(
items: Vec<Policy>,
threshold: usize,
) -> Result<Option<Policy>, PolicyError> {
if threshold == 0 {
return Ok(None);
}
@@ -525,15 +453,14 @@ impl Policy {
}
fn make_multisig(
keys: &Vec<DescriptorPublicKey>,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
keys: Vec<Option<&Box<dyn Key>>>,
threshold: usize,
) -> Result<Option<Policy>, PolicyError> {
if threshold == 0 {
return Ok(None);
}
let parsed_keys = keys.iter().map(|k| PKOrF::from_key(k)).collect();
let parsed_keys = keys.iter().map(|k| PKOrF::from_key(k.unwrap())).collect();
let mut contribution = Satisfaction::Partial {
n: keys.len(),
@@ -542,14 +469,14 @@ impl Policy {
conditions: Default::default(),
};
for (index, key) in keys.iter().enumerate() {
if let Some(_) = signers.find(signer_id(key)) {
contribution.add(
&Satisfaction::Complete {
condition: Default::default(),
},
index,
)?;
}
let val = if key.is_some() && key.unwrap().has_secret() {
Satisfaction::Complete {
condition: Default::default(),
}
} else {
Satisfaction::None
};
contribution.add(&val, index)?;
}
contribution.finalize()?;
@@ -563,19 +490,16 @@ impl Policy {
Ok(Some(policy))
}
/// Return whether or not a specific path in the policy tree is required to unambiguously
/// create a transaction
///
/// What this means is that for some spending policies the user should select which paths in
/// the tree it intends to satisfy while signing, because the transaction must be created differently based
/// on that.
pub fn requires_path(&self) -> bool {
self.get_condition(&BTreeMap::new()).is_err()
pub fn satisfy(&mut self, _satisfier: &PSBTSatisfier, _desc_node: &Terminal<PublicKey>) {
//self.satisfaction = self.item.satisfy(satisfier, desc_node);
//self.contribution += &self.satisfaction;
}
/// Return the conditions that are set by the spending policy for a given path in the
/// policy tree
pub fn get_condition(
pub fn requires_path(&self) -> bool {
self.get_requirements(&BTreeMap::new()).is_err()
}
pub fn get_requirements(
&self,
path: &BTreeMap<String, Vec<usize>>,
) -> Result<Condition, PolicyError> {
@@ -596,7 +520,7 @@ impl Policy {
SatisfiableItem::Thresh { items, threshold } => {
let mapped_req = items
.iter()
.map(|i| i.get_condition(path))
.map(|i| i.get_requirements(path))
.collect::<Result<Vec<_>, _>>()?;
// if all the requirements are null we don't care about `selected` because there
@@ -646,55 +570,60 @@ impl From<SatisfiableItem> for Policy {
}
}
fn signer_id(key: &DescriptorPublicKey) -> SignerId<DescriptorPublicKey> {
match key {
DescriptorPublicKey::PubKey(pubkey) => pubkey.to_pubkeyhash().into(),
DescriptorPublicKey::XPub(xpub) => xpub.root_fingerprint().into(),
}
fn signature_from_string(key: Option<&Box<dyn Key>>) -> Option<Policy> {
key.map(|k| {
let mut policy: Policy = SatisfiableItem::Signature(PKOrF::from_key(k)).into();
policy.contribution = if k.has_secret() {
Satisfaction::Complete {
condition: Default::default(),
}
} else {
Satisfaction::None
};
policy
})
}
fn signature(
key: &DescriptorPublicKey,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
) -> Policy {
let mut policy: Policy = SatisfiableItem::Signature(PKOrF::from_key(key)).into();
fn signature_key_from_string(key: Option<&Box<dyn Key>>) -> Option<Policy> {
let secp = Secp256k1::gen_new();
policy.contribution = if signers.find(signer_id(key)).is_some() {
Satisfaction::Complete {
condition: Default::default(),
key.map(|k| {
let pubkey = k.as_public_key(&secp, None).unwrap();
let mut policy: Policy = if let Some(fing) = k.fingerprint(&secp) {
SatisfiableItem::SignatureKey(PKOrF {
fingerprint: Some(fing),
..Default::default()
})
} else {
SatisfiableItem::SignatureKey(PKOrF {
pubkey_hash: Some(hash160::Hash::hash(&pubkey.to_bytes())),
..Default::default()
})
}
} else {
Satisfaction::None
};
.into();
policy.contribution = if k.has_secret() {
Satisfaction::Complete {
condition: Default::default(),
}
} else {
Satisfaction::None
};
policy
policy
})
}
fn signature_key(
key_hash: &<DescriptorPublicKey as MiniscriptKey>::Hash,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
) -> Policy {
let mut policy: Policy = SatisfiableItem::Signature(PKOrF::from_key_hash(*key_hash)).into();
if let Some(_) = signers.find(SignerId::PkHash(*key_hash)) {
policy.contribution = Satisfaction::Complete {
condition: Default::default(),
}
}
policy
}
impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx> {
impl MiniscriptExtractPolicy for Miniscript<String> {
fn extract_policy(
&self,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error> {
Ok(match &self.node {
// Leaves
Terminal::True | Terminal::False => None,
Terminal::PkK(pubkey) => Some(signature(pubkey, Arc::clone(&signers))),
Terminal::PkH(pubkey_hash) => Some(signature_key(pubkey_hash, Arc::clone(&signers))),
Terminal::Pk(pubkey) => signature_from_string(lookup_map.get(pubkey)),
Terminal::PkH(pubkey_hash) => signature_key_from_string(lookup_map.get(pubkey_hash)),
Terminal::After(value) => {
let mut policy: Policy = SatisfiableItem::AbsoluteTimelock { value: *value }.into();
policy.contribution = Satisfaction::Complete {
@@ -727,7 +656,9 @@ impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx>
Terminal::Hash160(hash) => {
Some(SatisfiableItem::HASH160Preimage { hash: *hash }.into())
}
Terminal::Multi(k, pks) => Policy::make_multisig(pks, Arc::clone(&signers), *k)?,
Terminal::ThreshM(k, pks) => {
Policy::make_multisig(pks.iter().map(|s| lookup_map.get(s)).collect(), *k)?
}
// Identities
Terminal::Alt(inner)
| Terminal::Swap(inner)
@@ -735,31 +666,26 @@ impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx>
| Terminal::DupIf(inner)
| Terminal::Verify(inner)
| Terminal::NonZero(inner)
| Terminal::ZeroNotEqual(inner) => inner.extract_policy(Arc::clone(&signers))?,
| Terminal::ZeroNotEqual(inner) => inner.extract_policy(lookup_map)?,
// Complex policies
Terminal::AndV(a, b) | Terminal::AndB(a, b) => Policy::make_and(
a.extract_policy(Arc::clone(&signers))?,
b.extract_policy(Arc::clone(&signers))?,
)?,
Terminal::AndV(a, b) | Terminal::AndB(a, b) => {
Policy::make_and(a.extract_policy(lookup_map)?, b.extract_policy(lookup_map)?)?
}
Terminal::AndOr(x, y, z) => Policy::make_or(
Policy::make_and(
x.extract_policy(Arc::clone(&signers))?,
y.extract_policy(Arc::clone(&signers))?,
)?,
z.extract_policy(Arc::clone(&signers))?,
Policy::make_and(x.extract_policy(lookup_map)?, y.extract_policy(lookup_map)?)?,
z.extract_policy(lookup_map)?,
)?,
Terminal::OrB(a, b)
| Terminal::OrD(a, b)
| Terminal::OrC(a, b)
| Terminal::OrI(a, b) => Policy::make_or(
a.extract_policy(Arc::clone(&signers))?,
b.extract_policy(Arc::clone(&signers))?,
)?,
| Terminal::OrI(a, b) => {
Policy::make_or(a.extract_policy(lookup_map)?, b.extract_policy(lookup_map)?)?
}
Terminal::Thresh(k, nodes) => {
let mut threshold = *k;
let mapped: Vec<_> = nodes
.iter()
.map(|n| n.extract_policy(Arc::clone(&signers)))
.map(|n| n.extract_policy(lookup_map))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.filter_map(|x| x)
@@ -778,18 +704,20 @@ impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx>
}
}
impl ExtractPolicy for Descriptor<DescriptorPublicKey> {
impl MiniscriptExtractPolicy for Descriptor<String> {
fn extract_policy(
&self,
signers: Arc<SignersContainer<DescriptorPublicKey>>,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error> {
match self {
Descriptor::Pk(pubkey)
| Descriptor::Pkh(pubkey)
| Descriptor::Wpkh(pubkey)
| Descriptor::ShWpkh(pubkey) => Ok(Some(signature(pubkey, signers))),
Descriptor::Bare(inner) | Descriptor::Sh(inner) => Ok(inner.extract_policy(signers)?),
Descriptor::Wsh(inner) | Descriptor::ShWsh(inner) => Ok(inner.extract_policy(signers)?),
| Descriptor::ShWpkh(pubkey) => Ok(signature_from_string(lookup_map.get(pubkey))),
Descriptor::Bare(inner)
| Descriptor::Sh(inner)
| Descriptor::Wsh(inner)
| Descriptor::ShWsh(inner) => Ok(inner.extract_policy(lookup_map)?),
}
}
}

View File

@@ -1,3 +0,0 @@
#[doc(include = "../README.md")]
#[cfg(doctest)]
pub struct ReadmeDoctests;

View File

@@ -1,32 +1,5 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use bitcoin::{OutPoint, Script, Txid};
use std::fmt;
use bitcoin::{Address, OutPoint};
/// Errors that can be thrown by the [`Wallet`](crate::wallet::Wallet)
#[derive(Debug)]
pub enum Error {
KeyMismatch(bitcoin::secp256k1::PublicKey, bitcoin::secp256k1::PublicKey),
@@ -35,18 +8,10 @@ pub enum Error {
Generic(String),
ScriptDoesntHaveAddressForm,
SendAllMultipleOutputs,
NoAddressees,
OutputBelowDustLimit(usize),
InsufficientFunds,
InvalidAddressNetwork(Address),
UnknownUTXO,
DifferentTransactions,
TransactionNotFound,
TransactionConfirmed,
IrreplaceableTransaction,
FeeRateTooLow {
required: crate::types::FeeRate,
},
ChecksumMismatch,
DifferentDescriptorStructure,
@@ -54,7 +19,13 @@ pub enum Error {
SpendingPolicyRequired,
InvalidPolicyPathError(crate::descriptor::policy::PolicyError),
Signer(crate::wallet::signer::SignerError),
// Signing errors (expected, received)
InputTxidMismatch((Txid, OutPoint)),
InputRedeemScriptMismatch((Script, Script)), // scriptPubKey, redeemScript
InputWitnessScriptMismatch((Script, Script)), // scriptPubKey, redeemScript
InputUnknownSegwitScript(Script),
InputMissingWitnessScript(usize),
MissingUTXO,
// Blockchain interface errors
Uncapable(crate::blockchain::Capability),
@@ -65,10 +36,8 @@ pub enum Error {
InvalidOutpoint(OutPoint),
Descriptor(crate::descriptor::error::Error),
AddressValidator(crate::wallet::address_validator::AddressValidatorError),
Encode(bitcoin::consensus::encode::Error),
Miniscript(miniscript::Error),
BIP32(bitcoin::util::bip32::Error),
Secp256k1(bitcoin::secp256k1::Error),
JSON(serde_json::Error),
@@ -79,20 +48,10 @@ pub enum Error {
Electrum(electrum_client::Error),
#[cfg(feature = "esplora")]
Esplora(crate::blockchain::esplora::EsploraError),
#[cfg(feature = "compact_filters")]
CompactFilters(crate::blockchain::compact_filters::CompactFiltersError),
#[cfg(feature = "key-value-db")]
Sled(sled::Error),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
macro_rules! impl_error {
( $from:ty, $to:ident ) => {
impl std::convert::From<$from> for Error {
@@ -104,18 +63,12 @@ macro_rules! impl_error {
}
impl_error!(crate::descriptor::error::Error, Descriptor);
impl_error!(
crate::wallet::address_validator::AddressValidatorError,
AddressValidator
);
impl_error!(
crate::descriptor::policy::PolicyError,
InvalidPolicyPathError
);
impl_error!(crate::wallet::signer::SignerError, Signer);
impl_error!(bitcoin::consensus::encode::Error, Encode);
impl_error!(miniscript::Error, Miniscript);
impl_error!(bitcoin::util::bip32::Error, BIP32);
impl_error!(bitcoin::secp256k1::Error, Secp256k1);
impl_error!(serde_json::Error, JSON);
@@ -128,13 +81,3 @@ impl_error!(electrum_client::Error, Electrum);
impl_error!(crate::blockchain::esplora::EsploraError, Esplora);
#[cfg(feature = "key-value-db")]
impl_error!(sled::Error, Sled);
#[cfg(feature = "compact_filters")]
impl From<crate::blockchain::compact_filters::CompactFiltersError> for Error {
fn from(other: crate::blockchain::compact_filters::CompactFiltersError) -> Self {
match other {
crate::blockchain::compact_filters::CompactFiltersError::Global(e) => *e,
err @ _ => Error::CompactFilters(err),
}
}
}

View File

@@ -1,34 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// only enables the `doc_cfg` feature when
// the `docsrs` configuration attribute is defined
#![cfg_attr(docsrs, feature(doc_cfg))]
// only enables the nightly `external_doc` feature when
// `test-md-docs` is enabled
#![cfg_attr(feature = "test-md-docs", feature(external_doc))]
pub extern crate bitcoin;
extern crate log;
pub extern crate miniscript;
@@ -36,21 +5,22 @@ extern crate serde;
#[macro_use]
extern crate serde_json;
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
#[macro_use]
extern crate async_trait;
#[macro_use]
extern crate magical_macros;
#[cfg(any(test, feature = "compact_filters"))]
#[cfg(test)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate async_trait;
#[cfg(feature = "electrum")]
pub extern crate electrum_client;
#[cfg(feature = "electrum")]
pub use electrum_client::client::Client;
#[cfg(feature = "esplora")]
pub extern crate reqwest;
#[cfg(feature = "esplora")]
pub use blockchain::esplora::EsploraBlockchain;
#[cfg(feature = "key-value-db")]
pub extern crate sled;
@@ -58,31 +28,17 @@ pub extern crate sled;
#[cfg(feature = "cli-utils")]
pub mod cli;
#[cfg(test)]
#[macro_use]
extern crate testutils;
#[cfg(test)]
#[macro_use]
extern crate testutils_macros;
#[cfg(test)]
#[macro_use]
extern crate serial_test;
#[macro_use]
pub(crate) mod error;
pub mod error;
pub mod blockchain;
pub mod database;
pub mod descriptor;
#[cfg(feature = "test-md-docs")]
mod doctest;
pub(crate) mod psbt;
pub(crate) mod types;
#[cfg(feature = "multiparty")]
pub mod multiparty;
pub mod psbt;
pub mod signer;
pub mod types;
pub mod wallet;
pub use descriptor::HDKeyPaths;
pub use error::Error;
pub use types::*;
pub use wallet::address_validator;
pub use wallet::signer;
pub use wallet::tx_builder::TxBuilder;
pub use descriptor::ExtendedDescriptor;
pub use wallet::{OfflineWallet, Wallet};

231
src/multiparty/mod.rs Normal file
View File

@@ -0,0 +1,231 @@
use std::cell::RefCell;
use std::collections::BTreeMap;
use bitcoin::secp256k1::Secp256k1;
use crate::descriptor::error::Error;
use crate::descriptor::keys::{parse_key, DummyKey, Key, KeyAlias, RealKey};
use crate::descriptor::{ExtendedDescriptor, MiniscriptExtractPolicy, Policy, StringDescriptor};
pub trait ParticipantType: Default {
fn validate_aliases(aliases: Vec<&String>) -> Result<(), Error>;
}
#[derive(Default)]
pub struct Coordinator {}
impl ParticipantType for Coordinator {
fn validate_aliases(aliases: Vec<&String>) -> Result<(), Error> {
if aliases.into_iter().any(|a| a == "[PEER]") {
Err(Error::InvalidAlias("[PEER]".into()))
} else {
Ok(())
}
}
}
#[derive(Default)]
pub struct Peer;
impl ParticipantType for Peer {
fn validate_aliases(aliases: Vec<&String>) -> Result<(), Error> {
if !aliases.into_iter().any(|a| a == "[PEER]") {
Err(Error::MissingAlias("[PEER]".into()))
} else {
Ok(())
}
}
}
pub struct Participant<T: ParticipantType> {
descriptor: StringDescriptor,
parsed_keys: BTreeMap<String, Box<dyn Key>>,
received_keys: BTreeMap<String, Box<dyn RealKey>>,
_data: T,
}
impl<T: ParticipantType> Participant<T> {
pub fn new(sd: StringDescriptor) -> Result<Self, Error> {
let parsed_keys = Self::parse_keys(&sd, vec![]);
T::validate_aliases(parsed_keys.keys().collect())?;
Ok(Participant {
descriptor: sd,
parsed_keys,
received_keys: Default::default(),
_data: Default::default(),
})
}
fn parse_keys(
sd: &StringDescriptor,
with_secrets: Vec<&str>,
) -> BTreeMap<String, Box<dyn Key>> {
let keys: RefCell<BTreeMap<String, Box<dyn Key>>> = RefCell::new(BTreeMap::new());
let translatefpk = |string: &String| -> Result<_, Error> {
let (key, parsed) = match parse_key(string) {
Ok((key, parsed)) => (key, parsed.into_key()),
Err(_) => (
string.clone(),
KeyAlias::new_boxed(string.as_str(), with_secrets.contains(&string.as_str())),
),
};
keys.borrow_mut().insert(key, parsed);
Ok(DummyKey::default())
};
let translatefpkh = |string: &String| -> Result<_, Error> {
let (key, parsed) = match parse_key(string) {
Ok((key, parsed)) => (key, parsed.into_key()),
Err(_) => (
string.clone(),
KeyAlias::new_boxed(string.as_str(), with_secrets.contains(&string.as_str())),
),
};
keys.borrow_mut().insert(key, parsed);
Ok(DummyKey::default())
};
sd.translate_pk(translatefpk, translatefpkh).unwrap();
keys.into_inner()
}
pub fn policy_for(&self, with_secrets: Vec<&str>) -> Result<Option<Policy>, Error> {
let keys = Self::parse_keys(&self.descriptor, with_secrets);
self.descriptor.extract_policy(&keys)
}
fn _missing_keys(&self) -> Vec<&String> {
self.parsed_keys
.keys()
.filter(|k| !self.received_keys.contains_key(*k))
.collect()
}
pub fn completed(&self) -> bool {
self._missing_keys().is_empty()
}
pub fn finalize(self) -> Result<ExtendedDescriptor, Error> {
if !self.completed() {
return Err(Error::Incomplete);
}
let translatefpk = |string: &String| -> Result<_, Error> {
Ok(format!(
"{}",
self.received_keys
.get(string)
.expect(&format!("Missing key: `{}`", string))
))
};
let translatefpkh = |string: &String| -> Result<_, Error> {
Ok(format!(
"{}",
self.received_keys
.get(string)
.expect(&format!("Missing key: `{}`", string))
))
};
let internal = self.descriptor.translate_pk(translatefpk, translatefpkh)?;
Ok(ExtendedDescriptor {
internal,
keys: self.received_keys,
ctx: Secp256k1::gen_new(),
})
}
}
impl Participant<Coordinator> {
pub fn descriptor(&self) -> &StringDescriptor {
&self.descriptor
}
pub fn add_key(&mut self, alias: &str, key: Box<dyn RealKey>) -> Result<(), Error> {
// TODO: check network
if key.has_secret() {
return Err(Error::KeyHasSecret);
}
self.received_keys.insert(alias.into(), key);
Ok(())
}
pub fn received_keys(&self) -> Vec<&String> {
self.received_keys.keys().collect()
}
pub fn missing_keys(&self) -> Vec<&String> {
self._missing_keys()
}
pub fn descriptor_for(&self, alias: &str) -> Result<StringDescriptor, Error> {
if !self.parsed_keys.contains_key(alias) {
return Err(Error::MissingAlias(alias.into()));
}
let map_name = |s: &String| {
if s == alias {
"[PEER]".into()
} else {
s.into()
}
};
let translatefpk = |string: &String| -> Result<_, Error> { Ok(map_name(string)) };
let translatefpkh = |string: &String| -> Result<_, Error> { Ok(map_name(string)) };
Ok(self.descriptor.translate_pk(translatefpk, translatefpkh)?)
}
pub fn get_map(&self) -> Result<BTreeMap<String, String>, Error> {
if !self.completed() {
return Err(Error::Incomplete);
}
Ok(self
.received_keys
.iter()
.map(|(k, v)| (k.into(), format!("{}", v)))
.collect())
}
}
impl Participant<Peer> {
pub fn policy(&self) -> Result<Option<Policy>, Error> {
self.policy_for(vec!["[PEER]"])
}
pub fn use_key(&mut self, key: Box<dyn RealKey>) -> Result<(), Error> {
let secp = Secp256k1::gen_new();
self.received_keys
.insert("[PEER]".into(), key.public(&secp)?);
Ok(())
}
pub fn my_key(&mut self) -> Option<&Box<dyn RealKey>> {
self.received_keys.get("[PEER]".into())
}
pub fn apply_map(mut self, map: BTreeMap<String, String>) -> Result<ExtendedDescriptor, Error> {
let mut parsed_map: BTreeMap<_, _> = map
.into_iter()
.map(|(k, v)| -> Result<_, Error> {
let (_, parsed) = parse_key(&v)?;
Ok((k, parsed))
})
.collect::<Result<_, _>>()?;
self.received_keys.append(&mut parsed_map);
self.finalize()
}
}

View File

@@ -1,52 +1,271 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::collections::BTreeMap;
use bitcoin::util::psbt::PartiallySignedTransaction as PSBT;
use bitcoin::TxOut;
use bitcoin::hashes::{hash160, Hash};
use bitcoin::util::bip143::SighashComponents;
use bitcoin::util::bip32::{DerivationPath, ExtendedPrivKey, Fingerprint};
use bitcoin::util::psbt;
use bitcoin::{PrivateKey, PublicKey, Script, SigHashType, Transaction};
pub trait PSBTUtils {
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut>;
use bitcoin::secp256k1::{self, All, Message, Secp256k1};
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use miniscript::{BitcoinSig, MiniscriptKey, Satisfier};
use crate::descriptor::ExtendedDescriptor;
use crate::error::Error;
use crate::signer::Signer;
pub mod utils;
pub struct PSBTSatisfier<'a> {
input: &'a psbt::Input,
assume_height_reached: bool,
create_height: Option<u32>,
current_height: Option<u32>,
}
impl PSBTUtils for PSBT {
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut> {
let tx = &self.global.unsigned_tx;
if input_index >= tx.input.len() {
return None;
impl<'a> PSBTSatisfier<'a> {
pub fn new(
input: &'a psbt::Input,
assume_height_reached: bool,
create_height: Option<u32>,
current_height: Option<u32>,
) -> Self {
PSBTSatisfier {
input,
assume_height_reached,
create_height,
current_height,
}
}
}
if let Some(input) = self.inputs.get(input_index) {
if let Some(wit_utxo) = &input.witness_utxo {
Some(wit_utxo.clone())
} else if let Some(in_tx) = &input.non_witness_utxo {
Some(in_tx.output[tx.input[input_index].previous_output.vout as usize].clone())
} else {
None
}
impl<'a> PSBTSatisfier<'a> {
fn parse_sig(rawsig: &Vec<u8>) -> Option<BitcoinSig> {
let (flag, sig) = rawsig.split_last().unwrap();
let flag = bitcoin::SigHashType::from_u32(*flag as u32);
let sig = match secp256k1::Signature::from_der(sig) {
Ok(sig) => sig,
Err(..) => return None,
};
Some((sig, flag))
}
}
// TODO: also support hash preimages through the "unknown" section of PSBT
impl<'a> Satisfier<bitcoin::PublicKey> for PSBTSatisfier<'a> {
// from https://docs.rs/miniscript/0.12.0/src/miniscript/psbt/mod.rs.html#96
fn lookup_sig(&self, pk: &bitcoin::PublicKey) -> Option<BitcoinSig> {
debug!("lookup_sig: {}", pk);
if let Some(rawsig) = self.input.partial_sigs.get(pk) {
Self::parse_sig(&rawsig)
} else {
None
}
}
fn lookup_pkh_pk(&self, hash: &hash160::Hash) -> Option<bitcoin::PublicKey> {
debug!("lookup_pkh_pk: {}", hash);
for (pk, _) in &self.input.partial_sigs {
if &pk.to_pubkeyhash() == hash {
return Some(*pk);
}
}
None
}
fn lookup_pkh_sig(&self, hash: &hash160::Hash) -> Option<(bitcoin::PublicKey, BitcoinSig)> {
debug!("lookup_pkh_sig: {}", hash);
for (pk, sig) in &self.input.partial_sigs {
if &pk.to_pubkeyhash() == hash {
return match Self::parse_sig(&sig) {
Some(bitcoinsig) => Some((*pk, bitcoinsig)),
None => None,
};
}
}
None
}
fn check_older(&self, height: u32) -> bool {
// TODO: also check if `nSequence` right
debug!("check_older: {}", height);
if let Some(current_height) = self.current_height {
// TODO: test >= / >
current_height as u64 >= self.create_height.unwrap_or(0) as u64 + height as u64
} else {
self.assume_height_reached
}
}
fn check_after(&self, height: u32) -> bool {
// TODO: also check if `nLockTime` is right
debug!("check_after: {}", height);
if let Some(current_height) = self.current_height {
current_height > height
} else {
self.assume_height_reached
}
}
}
#[derive(Debug)]
pub struct PSBTSigner<'a> {
tx: &'a Transaction,
secp: Secp256k1<All>,
// psbt: &'b psbt::PartiallySignedTransaction,
extended_keys: BTreeMap<Fingerprint, ExtendedPrivKey>,
private_keys: BTreeMap<PublicKey, PrivateKey>,
}
impl<'a> PSBTSigner<'a> {
pub fn from_descriptor(tx: &'a Transaction, desc: &ExtendedDescriptor) -> Result<Self, Error> {
let secp = Secp256k1::gen_new();
let mut extended_keys = BTreeMap::new();
for xprv in desc.get_xprv() {
let fing = xprv.fingerprint(&secp);
extended_keys.insert(fing, xprv);
}
let mut private_keys = BTreeMap::new();
for privkey in desc.get_secret_keys() {
let pubkey = privkey.public_key(&secp);
private_keys.insert(pubkey, privkey);
}
Ok(PSBTSigner {
tx,
secp,
extended_keys,
private_keys,
})
}
pub fn extend(&mut self, mut other: PSBTSigner) -> Result<(), Error> {
if self.tx.txid() != other.tx.txid() {
return Err(Error::DifferentTransactions);
}
self.extended_keys.append(&mut other.extended_keys);
self.private_keys.append(&mut other.private_keys);
Ok(())
}
// TODO: temporary
pub fn all_public_keys(&self) -> impl IntoIterator<Item = &PublicKey> {
self.private_keys.keys()
}
}
impl<'a> Signer for PSBTSigner<'a> {
fn sig_legacy_from_fingerprint(
&self,
index: usize,
sighash: SigHashType,
fingerprint: &Fingerprint,
path: &DerivationPath,
script: &Script,
) -> Result<Option<BitcoinSig>, Error> {
self.extended_keys
.get(fingerprint)
.map_or(Ok(None), |xprv| {
let privkey = xprv.derive_priv(&self.secp, path)?;
// let derived_pubkey = secp256k1::PublicKey::from_secret_key(&self.secp, &privkey.private_key.key);
let hash = self.tx.signature_hash(index, script, sighash.as_u32());
let signature = self.secp.sign(
&Message::from_slice(&hash.into_inner()[..])?,
&privkey.private_key.key,
);
Ok(Some((signature, sighash)))
})
}
fn sig_legacy_from_pubkey(
&self,
index: usize,
sighash: SigHashType,
public_key: &PublicKey,
script: &Script,
) -> Result<Option<BitcoinSig>, Error> {
self.private_keys
.get(public_key)
.map_or(Ok(None), |privkey| {
let hash = self.tx.signature_hash(index, script, sighash.as_u32());
let signature = self
.secp
.sign(&Message::from_slice(&hash.into_inner()[..])?, &privkey.key);
Ok(Some((signature, sighash)))
})
}
fn sig_segwit_from_fingerprint(
&self,
index: usize,
sighash: SigHashType,
fingerprint: &Fingerprint,
path: &DerivationPath,
script: &Script,
value: u64,
) -> Result<Option<BitcoinSig>, Error> {
self.extended_keys
.get(fingerprint)
.map_or(Ok(None), |xprv| {
let privkey = xprv.derive_priv(&self.secp, path)?;
let hash = SighashComponents::new(self.tx).sighash_all(
&self.tx.input[index],
script,
value,
);
let signature = self.secp.sign(
&Message::from_slice(&hash.into_inner()[..])?,
&privkey.private_key.key,
);
Ok(Some((signature, sighash)))
})
}
fn sig_segwit_from_pubkey(
&self,
index: usize,
sighash: SigHashType,
public_key: &PublicKey,
script: &Script,
value: u64,
) -> Result<Option<BitcoinSig>, Error> {
self.private_keys
.get(public_key)
.map_or(Ok(None), |privkey| {
let hash = SighashComponents::new(self.tx).sighash_all(
&self.tx.input[index],
script,
value,
);
let signature = self
.secp
.sign(&Message::from_slice(&hash.into_inner()[..])?, &privkey.key);
Ok(Some((signature, sighash)))
})
}
}

28
src/psbt/utils.rs Normal file
View File

@@ -0,0 +1,28 @@
use bitcoin::util::psbt::PartiallySignedTransaction as PSBT;
use bitcoin::TxOut;
pub trait PSBTUtils {
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut>;
}
impl PSBTUtils for PSBT {
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut> {
let tx = &self.global.unsigned_tx;
if input_index >= tx.input.len() {
return None;
}
if let Some(input) = self.inputs.get(input_index) {
if let Some(wit_utxo) = &input.witness_utxo {
Some(wit_utxo.clone())
} else if let Some(in_tx) = &input.non_witness_utxo {
Some(in_tx.output[tx.input[input_index].previous_output.vout as usize].clone())
} else {
None
}
} else {
None
}
}
}

87
src/signer.rs Normal file
View File

@@ -0,0 +1,87 @@
use bitcoin::util::bip32::{DerivationPath, Fingerprint};
use bitcoin::{PublicKey, Script, SigHashType};
use miniscript::miniscript::satisfy::BitcoinSig;
use crate::error::Error;
pub trait Signer {
fn sig_legacy_from_fingerprint(
&self,
index: usize,
sighash: SigHashType,
fingerprint: &Fingerprint,
path: &DerivationPath,
script: &Script,
) -> Result<Option<BitcoinSig>, Error>;
fn sig_legacy_from_pubkey(
&self,
index: usize,
sighash: SigHashType,
public_key: &PublicKey,
script: &Script,
) -> Result<Option<BitcoinSig>, Error>;
fn sig_segwit_from_fingerprint(
&self,
index: usize,
sighash: SigHashType,
fingerprint: &Fingerprint,
path: &DerivationPath,
script: &Script,
value: u64,
) -> Result<Option<BitcoinSig>, Error>;
fn sig_segwit_from_pubkey(
&self,
index: usize,
sighash: SigHashType,
public_key: &PublicKey,
script: &Script,
value: u64,
) -> Result<Option<BitcoinSig>, Error>;
}
#[allow(dead_code)]
impl dyn Signer {
fn sig_legacy_from_fingerprint(
&self,
_index: usize,
_sighash: SigHashType,
_fingerprint: &Fingerprint,
_path: &DerivationPath,
_script: &Script,
) -> Result<Option<BitcoinSig>, Error> {
Ok(None)
}
fn sig_legacy_from_pubkey(
&self,
_index: usize,
_sighash: SigHashType,
_public_key: &PublicKey,
_script: &Script,
) -> Result<Option<BitcoinSig>, Error> {
Ok(None)
}
fn sig_segwit_from_fingerprint(
&self,
_index: usize,
_sighash: SigHashType,
_fingerprint: &Fingerprint,
_path: &DerivationPath,
_script: &Script,
_value: u64,
) -> Result<Option<BitcoinSig>, Error> {
Ok(None)
}
fn sig_segwit_from_pubkey(
&self,
_index: usize,
_sighash: SigHashType,
_public_key: &PublicKey,
_script: &Script,
_value: u64,
) -> Result<Option<BitcoinSig>, Error> {
Ok(None)
}
}

View File

@@ -1,27 +1,3 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::convert::AsRef;
use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut};
@@ -29,8 +5,8 @@ use bitcoin::hash_types::Txid;
use serde::{Deserialize, Serialize};
/// Types of script
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
// TODO serde flatten?
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub enum ScriptType {
External = 0,
Internal = 1,
@@ -43,10 +19,6 @@ impl ScriptType {
ScriptType::Internal => 'i' as u8,
}
}
pub fn is_internal(&self) -> bool {
self == &ScriptType::Internal
}
}
impl AsRef<[u8]> for ScriptType {
@@ -58,48 +30,12 @@ impl AsRef<[u8]> for ScriptType {
}
}
/// Fee rate
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
// Internally stored as satoshi/vbyte
pub struct FeeRate(f32);
impl FeeRate {
/// Create a new instance of [`FeeRate`] given a float fee rate in btc/kvbytes
pub fn from_btc_per_kvb(btc_per_kvb: f32) -> Self {
FeeRate(btc_per_kvb * 1e5)
}
/// Create a new instance of [`FeeRate`] given a float fee rate in satoshi/vbyte
pub fn from_sat_per_vb(sat_per_vb: f32) -> Self {
FeeRate(sat_per_vb)
}
/// Create a new [`FeeRate`] with the default min relay fee value
pub fn default_min_relay_fee() -> Self {
FeeRate(1.0)
}
/// Return the value as satoshi/vbyte
pub fn as_sat_vb(&self) -> f32 {
self.0
}
}
impl std::default::Default for FeeRate {
fn default() -> Self {
FeeRate::default_min_relay_fee()
}
}
/// A wallet unspent output
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UTXO {
pub outpoint: OutPoint,
pub txout: TxOut,
pub is_internal: bool,
}
/// A wallet transaction
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub struct TransactionDetails {
pub transaction: Option<Transaction>,
@@ -107,6 +43,5 @@ pub struct TransactionDetails {
pub timestamp: u64,
pub received: u64,
pub sent: u64,
pub fees: u64,
pub height: Option<u32>,
}

View File

@@ -1,162 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Address validation callbacks
//!
//! The typical usage of those callbacks is for displaying the newly-generated address on a
//! hardware wallet, so that the user can cross-check its correctness.
//!
//! More generally speaking though, these callbacks can also be used to "do something" every time
//! an address is generated, without necessarily checking or validating it.
//!
//! An address validator can be attached to a [`Wallet`](super::Wallet) by using the
//! [`Wallet::add_address_validator`](super::Wallet::add_address_validator) method, and
//! whenever a new address is generated (either explicitly by the user with
//! [`Wallet::get_new_address`](super::Wallet::get_new_address) or internally to create a change
//! address) all the attached validators will be polled, in sequence. All of them must complete
//! successfully to continue.
//!
//! ## Example
//!
//! ```
//! # use std::sync::Arc;
//! # use bitcoin::*;
//! # use magical::address_validator::*;
//! # use magical::database::*;
//! # use magical::*;
//! struct PrintAddressAndContinue;
//!
//! impl AddressValidator for PrintAddressAndContinue {
//! fn validate(
//! &self,
//! script_type: ScriptType,
//! hd_keypaths: &HDKeyPaths,
//! script: &Script
//! ) -> Result<(), AddressValidatorError> {
//! let address = Address::from_script(script, Network::Testnet)
//! .as_ref()
//! .map(Address::to_string)
//! .unwrap_or(script.to_string());
//! println!("New address of type {:?}: {}", script_type, address);
//! println!("HD keypaths: {:#?}", hd_keypaths);
//!
//! Ok(())
//! }
//! }
//!
//! let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)";
//! let mut wallet: OfflineWallet<_> = Wallet::new_offline(descriptor, None, Network::Testnet, MemoryDatabase::default())?;
//! wallet.add_address_validator(Arc::new(Box::new(PrintAddressAndContinue)));
//!
//! let address = wallet.get_new_address()?;
//! println!("Address: {}", address);
//! # Ok::<(), magical::Error>(())
//! ```
use std::fmt;
use bitcoin::Script;
use crate::descriptor::HDKeyPaths;
use crate::types::ScriptType;
/// Errors that can be returned to fail the validation of an address
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AddressValidatorError {
UserRejected,
ConnectionError,
TimeoutError,
InvalidScript,
Message(String),
}
impl fmt::Display for AddressValidatorError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for AddressValidatorError {}
/// Trait to build address validators
///
/// All the address validators attached to a wallet with [`Wallet::add_address_validator`](super::Wallet::add_address_validator) will be polled
/// every time an address (external or internal) is generated by the wallet. Errors returned in the
/// validator will be propagated up to the original caller that triggered the address generation.
///
/// For a usage example see [this module](crate::address_validator)'s documentation.
pub trait AddressValidator {
/// Validate or inspect an address
fn validate(
&self,
script_type: ScriptType,
hd_keypaths: &HDKeyPaths,
script: &Script,
) -> Result<(), AddressValidatorError>;
}
#[cfg(test)]
mod test {
use std::sync::Arc;
use super::*;
use crate::wallet::test::{get_funded_wallet, get_test_wpkh};
use crate::wallet::TxBuilder;
struct TestValidator;
impl AddressValidator for TestValidator {
fn validate(
&self,
_script_type: ScriptType,
_hd_keypaths: &HDKeyPaths,
_script: &bitcoin::Script,
) -> Result<(), AddressValidatorError> {
Err(AddressValidatorError::InvalidScript)
}
}
#[test]
#[should_panic(expected = "InvalidScript")]
fn test_address_validator_external() {
let (mut wallet, _, _) = get_funded_wallet(get_test_wpkh());
wallet.add_address_validator(Arc::new(Box::new(TestValidator)));
wallet.get_new_address().unwrap();
}
#[test]
#[should_panic(expected = "InvalidScript")]
fn test_address_validator_internal() {
let (mut wallet, descriptors, _) = get_funded_wallet(get_test_wpkh());
wallet.add_address_validator(Arc::new(Box::new(TestValidator)));
let addr = testutils!(@external descriptors, 10);
wallet
.create_tx(TxBuilder::with_recipients(vec![(
addr.script_pubkey(),
25_000,
)]))
.unwrap();
}
}

View File

@@ -1,349 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Coin selection
//!
//! This module provides the trait [`CoinSelectionAlgorithm`] that can be implemented to
//! define custom coin selection algorithms.
//!
//! The coin selection algorithm is not globally part of a [`Wallet`](super::Wallet), instead it
//! is selected whenever a [`Wallet::create_tx`](super::Wallet::create_tx) call is made, through
//! the use of the [`TxBuilder`] structure, specifically with
//! [`TxBuilder::coin_selection`](super::tx_builder::TxBuilder::coin_selection) method.
//!
//! The [`DefaultCoinSelectionAlgorithm`] selects the default coin selection algorithm that
//! [`TxBuilder`] uses, if it's not explicitly overridden.
//!
//! [`TxBuilder`]: super::tx_builder::TxBuilder
//!
//! ## Example
//!
//! ```no_run
//! # use std::str::FromStr;
//! # use bitcoin::*;
//! # use bitcoin::consensus::serialize;
//! # use magical::wallet::coin_selection::*;
//! # use magical::*;
//! #[derive(Debug)]
//! struct AlwaysSpendEverything;
//!
//! impl CoinSelectionAlgorithm for AlwaysSpendEverything {
//! fn coin_select(
//! &self,
//! utxos: Vec<UTXO>,
//! _use_all_utxos: bool,
//! fee_rate: FeeRate,
//! amount_needed: u64,
//! input_witness_weight: usize,
//! fee_amount: f32,
//! ) -> Result<CoinSelectionResult, magical::Error> {
//! let selected_amount = utxos.iter().fold(0, |acc, utxo| acc + utxo.txout.value);
//! let all_utxos_selected = utxos
//! .into_iter()
//! .map(|utxo| {
//! (
//! TxIn {
//! previous_output: utxo.outpoint,
//! ..Default::default()
//! },
//! utxo.txout.script_pubkey,
//! )
//! })
//! .collect::<Vec<_>>();
//! let additional_weight = all_utxos_selected.iter().fold(0, |acc, (txin, _)| {
//! acc + serialize(txin).len() * 4 + input_witness_weight
//! });
//! let additional_fees = additional_weight as f32 * fee_rate.as_sat_vb() / 4.0;
//!
//! if (fee_amount + additional_fees).ceil() as u64 + amount_needed > selected_amount {
//! return Err(magical::Error::InsufficientFunds);
//! }
//!
//! Ok(CoinSelectionResult {
//! txin: all_utxos_selected,
//! selected_amount,
//! fee_amount: fee_amount + additional_fees,
//! })
//! }
//! }
//!
//! # let wallet: OfflineWallet<_> = Wallet::new_offline("", None, Network::Testnet, magical::database::MemoryDatabase::default())?;
//! // create wallet, sync, ...
//!
//! let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
//! let (psbt, details) = wallet.create_tx(
//! TxBuilder::with_recipients(vec![(to_address.script_pubkey(), 50_000)])
//! .coin_selection(AlwaysSpendEverything),
//! )?;
//!
//! // inspect, sign, broadcast, ...
//!
//! # Ok::<(), magical::Error>(())
//! ```
use bitcoin::consensus::encode::serialize;
use bitcoin::{Script, TxIn};
use crate::error::Error;
use crate::types::{FeeRate, UTXO};
/// Default coin selection algorithm used by [`TxBuilder`](super::tx_builder::TxBuilder) if not
/// overridden
pub type DefaultCoinSelectionAlgorithm = DumbCoinSelection;
/// Result of a successful coin selection
#[derive(Debug)]
pub struct CoinSelectionResult {
/// List of inputs to use, with the respective previous script_pubkey
pub txin: Vec<(TxIn, Script)>,
/// Sum of the selected inputs' value
pub selected_amount: u64,
/// Total fee amount in satoshi
pub fee_amount: f32,
}
/// Trait for generalized coin selection algorithms
///
/// This trait can be implemented to make the [`Wallet`](super::Wallet) use a customized coin
/// selection algorithm when it creates transactions.
///
/// For an example see [this module](crate::wallet::coin_selection)'s documentation.
pub trait CoinSelectionAlgorithm: std::fmt::Debug {
/// Perform the coin selection
///
/// - `utxos`: the list of spendable UTXOs
/// - `use_all_utxos`: if true all utxos should be spent unconditionally
/// - `fee_rate`: fee rate to use
/// - `amount_needed`: the amount in satoshi to select
/// - `input_witness_weight`: the weight of an input's witness to keep into account for the fees
/// - `fee_amount`: the amount of fees in satoshi already accumulated from adding outputs
fn coin_select(
&self,
utxos: Vec<UTXO>,
use_all_utxos: bool,
fee_rate: FeeRate,
amount_needed: u64,
input_witness_weight: usize,
fee_amount: f32,
) -> Result<CoinSelectionResult, Error>;
}
/// Simple and dumb coin selection
///
/// This coin selection algorithm sorts the available UTXOs by value and then picks them starting
/// from the largest ones until the required amount is reached.
#[derive(Debug, Default)]
pub struct DumbCoinSelection;
impl CoinSelectionAlgorithm for DumbCoinSelection {
fn coin_select(
&self,
mut utxos: Vec<UTXO>,
use_all_utxos: bool,
fee_rate: FeeRate,
outgoing_amount: u64,
input_witness_weight: usize,
mut fee_amount: f32,
) -> Result<CoinSelectionResult, Error> {
let mut txin = Vec::new();
let calc_fee_bytes = |wu| (wu as f32) * fee_rate.as_sat_vb() / 4.0;
log::debug!(
"outgoing_amount = `{}`, fee_amount = `{}`, fee_rate = `{:?}`",
outgoing_amount,
fee_amount,
fee_rate
);
// sort so that we pick them starting from the larger.
utxos.sort_by(|a, b| a.txout.value.partial_cmp(&b.txout.value).unwrap());
let mut selected_amount: u64 = 0;
while use_all_utxos || selected_amount < outgoing_amount + (fee_amount.ceil() as u64) {
let utxo = match utxos.pop() {
Some(utxo) => utxo,
None if selected_amount < outgoing_amount + (fee_amount.ceil() as u64) => {
return Err(Error::InsufficientFunds)
}
None if use_all_utxos => break,
None => return Err(Error::InsufficientFunds),
};
let new_in = TxIn {
previous_output: utxo.outpoint,
script_sig: Script::default(),
sequence: 0, // Let the caller choose the right nSequence
witness: vec![],
};
fee_amount += calc_fee_bytes(serialize(&new_in).len() * 4 + input_witness_weight);
log::debug!(
"Selected {}, updated fee_amount = `{}`",
new_in.previous_output,
fee_amount
);
txin.push((new_in, utxo.txout.script_pubkey));
selected_amount += utxo.txout.value;
}
Ok(CoinSelectionResult {
txin,
fee_amount,
selected_amount,
})
}
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use bitcoin::{OutPoint, Script, TxOut};
use super::*;
use crate::types::*;
const P2WPKH_WITNESS_SIZE: usize = 73 + 33 + 2;
fn get_test_utxos() -> Vec<UTXO> {
vec![
UTXO {
outpoint: OutPoint::from_str(
"ebd9813ecebc57ff8f30797de7c205e3c7498ca950ea4341ee51a685ff2fa30a:0",
)
.unwrap(),
txout: TxOut {
value: 100_000,
script_pubkey: Script::new(),
},
is_internal: false,
},
UTXO {
outpoint: OutPoint::from_str(
"65d92ddff6b6dc72c89624a6491997714b90f6004f928d875bc0fd53f264fa85:0",
)
.unwrap(),
txout: TxOut {
value: 200_000,
script_pubkey: Script::new(),
},
is_internal: true,
},
]
}
#[test]
fn test_dumb_coin_selection_success() {
let utxos = get_test_utxos();
let result = DumbCoinSelection
.coin_select(
utxos,
false,
FeeRate::from_sat_per_vb(1.0),
250_000,
P2WPKH_WITNESS_SIZE,
50.0,
)
.unwrap();
assert_eq!(result.txin.len(), 2);
assert_eq!(result.selected_amount, 300_000);
assert_eq!(result.fee_amount, 186.0);
}
#[test]
fn test_dumb_coin_selection_use_all() {
let utxos = get_test_utxos();
let result = DumbCoinSelection
.coin_select(
utxos,
true,
FeeRate::from_sat_per_vb(1.0),
20_000,
P2WPKH_WITNESS_SIZE,
50.0,
)
.unwrap();
assert_eq!(result.txin.len(), 2);
assert_eq!(result.selected_amount, 300_000);
assert_eq!(result.fee_amount, 186.0);
}
#[test]
fn test_dumb_coin_selection_use_only_necessary() {
let utxos = get_test_utxos();
let result = DumbCoinSelection
.coin_select(
utxos,
false,
FeeRate::from_sat_per_vb(1.0),
20_000,
P2WPKH_WITNESS_SIZE,
50.0,
)
.unwrap();
assert_eq!(result.txin.len(), 1);
assert_eq!(result.selected_amount, 200_000);
assert_eq!(result.fee_amount, 118.0);
}
#[test]
#[should_panic(expected = "InsufficientFunds")]
fn test_dumb_coin_selection_insufficient_funds() {
let utxos = get_test_utxos();
DumbCoinSelection
.coin_select(
utxos,
false,
FeeRate::from_sat_per_vb(1.0),
500_000,
P2WPKH_WITNESS_SIZE,
50.0,
)
.unwrap();
}
#[test]
#[should_panic(expected = "InsufficientFunds")]
fn test_dumb_coin_selection_insufficient_funds_high_fees() {
let utxos = get_test_utxos();
DumbCoinSelection
.coin_select(
utxos,
false,
FeeRate::from_sat_per_vb(1000.0),
250_000,
P2WPKH_WITNESS_SIZE,
50.0,
)
.unwrap();
}
}

View File

@@ -1,340 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Wallet export
//!
//! This modules implements the wallet export format used by [FullyNoded](https://github.com/Fonta1n3/FullyNoded/blob/10b7808c8b929b171cca537fb50522d015168ac9/Docs/Wallets/Wallet-Export-Spec.md).
//!
//! ## Examples
//!
//! ### Import from JSON
//!
//! ```
//! # use std::str::FromStr;
//! # use bitcoin::*;
//! # use magical::database::*;
//! # use magical::wallet::export::*;
//! # use magical::*;
//! let import = r#"{
//! "descriptor": "wpkh([c258d2e4\/84h\/1h\/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe\/0\/*)",
//! "blockheight":1782088,
//! "label":"testnet"
//! }"#;
//!
//! let import = WalletExport::from_str(import)?;
//! let wallet: OfflineWallet<_> = Wallet::new_offline(&import.descriptor(), import.change_descriptor().as_deref(), Network::Testnet, MemoryDatabase::default())?;
//! # Ok::<_, magical::Error>(())
//! ```
//!
//! ### Export a `Wallet`
//! ```
//! # use bitcoin::*;
//! # use magical::database::*;
//! # use magical::wallet::export::*;
//! # use magical::*;
//! let wallet: OfflineWallet<_> = Wallet::new_offline(
//! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/0/*)",
//! Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"),
//! Network::Testnet,
//! MemoryDatabase::default()
//! )?;
//! let export = WalletExport::export_wallet(&wallet, "exported wallet", true)
//! .map_err(ToString::to_string)
//! .map_err(magical::Error::Generic)?;
//!
//! println!("Exported: {}", export.to_string());
//! # Ok::<_, magical::Error>(())
//! ```
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use miniscript::{Descriptor, ScriptContext, Terminal};
use crate::blockchain::Blockchain;
use crate::database::BatchDatabase;
use crate::wallet::Wallet;
/// Structure that contains the export of a wallet
///
/// For a usage example see [this module](crate::wallet::export)'s documentation.
#[derive(Debug, Serialize, Deserialize)]
pub struct WalletExport {
descriptor: String,
/// Earliest block to rescan when looking for the wallet's transactions
pub blockheight: u32,
/// Arbitrary label for the wallet
pub label: String,
}
impl ToString for WalletExport {
fn to_string(&self) -> String {
serde_json::to_string(self).unwrap()
}
}
impl FromStr for WalletExport {
type Err = serde_json::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s)
}
}
impl WalletExport {
/// Export a wallet
///
/// This function returns an error if it determines that the `wallet`'s descriptor(s) are not
/// supported by Bitcoin Core or don't follow the standard derivation paths defined by BIP44
/// and others.
///
/// If `include_blockheight` is `true`, this function will look into the `wallet`'s database
/// for the oldest transaction it knows and use that as the earliest block to rescan.
///
/// If the database is empty or `include_blockheight` is false, the `blockheight` field
/// returned will be `0`.
pub fn export_wallet<B: Blockchain, D: BatchDatabase>(
wallet: &Wallet<B, D>,
label: &str,
include_blockheight: bool,
) -> Result<Self, &'static str> {
let descriptor = wallet
.descriptor
.to_string_with_secret(&wallet.signers.as_key_map());
Self::is_compatible_with_core(&descriptor)?;
let blockheight = match wallet.database.borrow().iter_txs(false) {
_ if !include_blockheight => 0,
Err(_) => 0,
Ok(txs) => {
let mut heights = txs
.into_iter()
.map(|tx| tx.height.unwrap_or(0))
.collect::<Vec<_>>();
heights.sort();
*heights.last().unwrap_or(&0)
}
};
let export = WalletExport {
descriptor,
label: label.into(),
blockheight,
};
if export.change_descriptor()
!= wallet
.change_descriptor
.as_ref()
.map(|d| d.to_string_with_secret(&wallet.change_signers.as_key_map()))
{
return Err("Incompatible change descriptor");
}
Ok(export)
}
fn is_compatible_with_core(descriptor: &str) -> Result<(), &'static str> {
fn check_ms<Ctx: ScriptContext>(
terminal: Terminal<String, Ctx>,
) -> Result<(), &'static str> {
if let Terminal::Multi(_, _) = terminal {
Ok(())
} else {
Err("The descriptor contains operators not supported by Bitcoin Core")
}
}
match Descriptor::<String>::from_str(descriptor).map_err(|_| "Invalid descriptor")? {
Descriptor::Pk(_)
| Descriptor::Pkh(_)
| Descriptor::Wpkh(_)
| Descriptor::ShWpkh(_) => Ok(()),
Descriptor::Sh(ms) => check_ms(ms.node),
Descriptor::Wsh(ms) | Descriptor::ShWsh(ms) => check_ms(ms.node),
_ => Err("The descriptor is not compatible with Bitcoin Core"),
}
}
/// Return the external descriptor
pub fn descriptor(&self) -> String {
self.descriptor.clone()
}
/// Return the internal descriptor, if present
pub fn change_descriptor(&self) -> Option<String> {
let replaced = self.descriptor.replace("/0/*", "/1/*");
if replaced != self.descriptor {
Some(replaced)
} else {
None
}
}
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use bitcoin::{Network, Txid};
use super::*;
use crate::database::{memory::MemoryDatabase, BatchOperations};
use crate::types::TransactionDetails;
use crate::wallet::{OfflineWallet, Wallet};
fn get_test_db() -> MemoryDatabase {
let mut db = MemoryDatabase::new();
db.set_tx(&TransactionDetails {
transaction: None,
txid: Txid::from_str(
"4ddff1fa33af17f377f62b72357b43107c19110a8009b36fb832af505efed98a",
)
.unwrap(),
timestamp: 12345678,
received: 100_000,
sent: 0,
fees: 500,
height: Some(5000),
})
.unwrap();
db
}
#[test]
fn test_export_bip44() {
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
let wallet: OfflineWallet<_> = Wallet::new_offline(
descriptor,
Some(change_descriptor),
Network::Testnet,
get_test_db(),
)
.unwrap();
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
assert_eq!(export.descriptor(), descriptor);
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
assert_eq!(export.blockheight, 5000);
assert_eq!(export.label, "Test Label");
}
#[test]
#[should_panic(expected = "Incompatible change descriptor")]
fn test_export_no_change() {
// This wallet explicitly doesn't have a change descriptor. It should be impossible to
// export, because exporting this kind of external descriptor normally implies the
// existence of an internal descriptor
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
let wallet: OfflineWallet<_> =
Wallet::new_offline(descriptor, None, Network::Testnet, get_test_db()).unwrap();
WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
}
#[test]
#[should_panic(expected = "Incompatible change descriptor")]
fn test_export_incompatible_change() {
// This wallet has a change descriptor, but the derivation path is not in the "standard"
// bip44/49/etc format
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/50'/0'/1/*)";
let wallet: OfflineWallet<_> = Wallet::new_offline(
descriptor,
Some(change_descriptor),
Network::Testnet,
get_test_db(),
)
.unwrap();
WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
}
#[test]
fn test_export_multi() {
let descriptor = "wsh(multi(2,\
[73756c7f/48'/0'/0'/2']tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3/0/*,\
[f9f62194/48'/0'/0'/2']tpubDDp3ZSH1yCwusRppH7zgSxq2t1VEUyXSeEp8E5aFS8m43MknUjiF1bSLo3CGWAxbDyhF1XowA5ukPzyJZjznYk3kYi6oe7QxtX2euvKWsk4/0/*,\
[c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/0/*\
))";
let change_descriptor = "wsh(multi(2,\
[73756c7f/48'/0'/0'/2']tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3/1/*,\
[f9f62194/48'/0'/0'/2']tpubDDp3ZSH1yCwusRppH7zgSxq2t1VEUyXSeEp8E5aFS8m43MknUjiF1bSLo3CGWAxbDyhF1XowA5ukPzyJZjznYk3kYi6oe7QxtX2euvKWsk4/1/*,\
[c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/1/*\
))";
let wallet: OfflineWallet<_> = Wallet::new_offline(
descriptor,
Some(change_descriptor),
Network::Testnet,
get_test_db(),
)
.unwrap();
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
assert_eq!(export.descriptor(), descriptor);
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
assert_eq!(export.blockheight, 5000);
assert_eq!(export.label, "Test Label");
}
#[test]
fn test_export_to_json() {
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
let wallet: OfflineWallet<_> = Wallet::new_offline(
descriptor,
Some(change_descriptor),
Network::Testnet,
get_test_db(),
)
.unwrap();
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
assert_eq!(export.to_string(), "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}");
}
#[test]
fn test_export_from_json() {
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
let import_str = "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}";
let export = WalletExport::from_str(import_str).unwrap();
assert_eq!(export.descriptor(), descriptor);
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
assert_eq!(export.blockheight, 5000);
assert_eq!(export.label, "Test Label");
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,52 @@
use std::io::{self, Error, ErrorKind, Read, Write};
#[derive(Clone, Debug)]
pub struct OfflineStream;
impl Read for OfflineStream {
fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
Err(Error::new(
ErrorKind::NotConnected,
"Trying to read from an OfflineStream",
))
}
}
impl Write for OfflineStream {
fn write(&mut self, _buf: &[u8]) -> io::Result<usize> {
Err(Error::new(
ErrorKind::NotConnected,
"Trying to read from an OfflineStream",
))
}
fn flush(&mut self) -> io::Result<()> {
Err(Error::new(
ErrorKind::NotConnected,
"Trying to read from an OfflineStream",
))
}
}
// #[cfg(any(feature = "electrum", feature = "default"))]
// use electrum_client::Client;
//
// #[cfg(any(feature = "electrum", feature = "default"))]
// impl OfflineStream {
// fn new_client() -> {
// use std::io::bufreader;
//
// let stream = OfflineStream{};
// let buf_reader = BufReader::new(stream.clone());
//
// Client {
// stream,
// buf_reader,
// headers: VecDeque::new(),
// script_notifications: BTreeMap::new(),
//
// #[cfg(feature = "debug-calls")]
// calls: 0,
// }
// }
// }

View File

@@ -1,127 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use crate::database::Database;
use crate::error::Error;
use crate::types::*;
/// Filters unspent utxos
pub(super) fn filter_available<I: Iterator<Item = UTXO>, D: Database>(
database: &D,
iter: I,
) -> Result<Vec<UTXO>, Error> {
Ok(iter
.map(|utxo| {
Ok(match database.get_tx(&utxo.outpoint.txid, true)? {
None => None,
Some(tx) if tx.height.is_none() => None,
Some(_) => Some(utxo),
})
})
.collect::<Result<Vec<_>, Error>>()?
.into_iter()
.filter_map(|x| x)
.collect())
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use bitcoin::{OutPoint, Transaction, TxIn, TxOut, Txid};
use super::*;
use crate::database::{BatchOperations, MemoryDatabase};
fn add_transaction(
database: &mut MemoryDatabase,
spend: Vec<OutPoint>,
outputs: Vec<u64>,
) -> Txid {
let tx = Transaction {
version: 1,
lock_time: 0,
input: spend
.iter()
.cloned()
.map(|previous_output| TxIn {
previous_output,
..Default::default()
})
.collect(),
output: outputs
.iter()
.cloned()
.map(|value| TxOut {
value,
..Default::default()
})
.collect(),
};
let txid = tx.txid();
for input in &spend {
database.del_utxo(input).unwrap();
}
for vout in 0..outputs.len() {
database
.set_utxo(&UTXO {
txout: tx.output[vout].clone(),
outpoint: OutPoint {
txid,
vout: vout as u32,
},
is_internal: true,
})
.unwrap();
}
database
.set_tx(&TransactionDetails {
txid,
transaction: Some(tx),
height: None,
..Default::default()
})
.unwrap();
txid
}
#[test]
fn test_filter_available() {
let mut database = MemoryDatabase::new();
add_transaction(
&mut database,
vec![OutPoint::from_str(
"aad194c72fd5cfd16d23da9462930ca91e35df1cfee05242b62f4034f50c3d41:5",
)
.unwrap()],
vec![50_000],
);
let filtered =
filter_available(&database, database.iter_utxos().unwrap().into_iter()).unwrap();
assert_eq!(filtered, &[]);
}
}

View File

@@ -1,513 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Generalized signers
//!
//! This module provides the ability to add customized signers to a [`Wallet`](super::Wallet)
//! through the [`Wallet::add_signer`](super::Wallet::add_signer) function.
//!
//! ```
//! # use std::sync::Arc;
//! # use std::str::FromStr;
//! # use bitcoin::*;
//! # use bitcoin::util::psbt;
//! # use bitcoin::util::bip32::Fingerprint;
//! # use magical::signer::*;
//! # use magical::database::*;
//! # use magical::*;
//! # #[derive(Debug)]
//! # struct CustomHSM;
//! # impl CustomHSM {
//! # fn sign_input(&self, _psbt: &mut psbt::PartiallySignedTransaction, _input: usize) -> Result<(), SignerError> {
//! # Ok(())
//! # }
//! # fn connect() -> Self {
//! # CustomHSM
//! # }
//! # }
//! #[derive(Debug)]
//! struct CustomSigner {
//! device: CustomHSM,
//! }
//!
//! impl CustomSigner {
//! fn connect() -> Self {
//! CustomSigner { device: CustomHSM::connect() }
//! }
//! }
//!
//! impl Signer for CustomSigner {
//! fn sign(
//! &self,
//! psbt: &mut psbt::PartiallySignedTransaction,
//! input_index: Option<usize>,
//! ) -> Result<(), SignerError> {
//! let input_index = input_index.ok_or(SignerError::InputIndexOutOfRange)?;
//! self.device.sign_input(psbt, input_index)?;
//!
//! Ok(())
//! }
//!
//! fn sign_whole_tx(&self) -> bool {
//! false
//! }
//! }
//!
//! let custom_signer = CustomSigner::connect();
//!
//! let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)";
//! let mut wallet: OfflineWallet<_> = Wallet::new_offline(descriptor, None, Network::Testnet, MemoryDatabase::default())?;
//! wallet.add_signer(
//! ScriptType::External,
//! Fingerprint::from_str("e30f11b8").unwrap().into(),
//! SignerOrdering(200),
//! Arc::new(Box::new(custom_signer))
//! );
//!
//! # Ok::<_, magical::Error>(())
//! ```
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::fmt;
use std::ops::Bound::Included;
use std::sync::Arc;
use bitcoin::blockdata::opcodes;
use bitcoin::blockdata::script::Builder as ScriptBuilder;
use bitcoin::hashes::{hash160, Hash};
use bitcoin::secp256k1::{Message, Secp256k1};
use bitcoin::util::bip32::{ExtendedPrivKey, Fingerprint};
use bitcoin::util::{bip143, psbt};
use bitcoin::{PrivateKey, SigHash, SigHashType};
use miniscript::descriptor::{DescriptorPublicKey, DescriptorSecretKey, DescriptorXKey, KeyMap};
use miniscript::{Legacy, MiniscriptKey, Segwitv0};
use crate::descriptor::XKeyUtils;
/// Identifier of a signer in the `SignersContainers`. Used as a key to find the right signer among
/// multiple of them
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum SignerId<Pk: MiniscriptKey> {
PkHash(<Pk as MiniscriptKey>::Hash),
Fingerprint(Fingerprint),
}
impl From<hash160::Hash> for SignerId<DescriptorPublicKey> {
fn from(hash: hash160::Hash) -> SignerId<DescriptorPublicKey> {
SignerId::PkHash(hash)
}
}
impl From<Fingerprint> for SignerId<DescriptorPublicKey> {
fn from(fing: Fingerprint) -> SignerId<DescriptorPublicKey> {
SignerId::Fingerprint(fing)
}
}
/// Signing error
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum SignerError {
/// The private key is missing for the required public key
MissingKey,
/// The user canceled the operation
UserCanceled,
/// The sighash is missing in the PSBT input
MissingSighash,
/// Input index is out of range
InputIndexOutOfRange,
/// The `non_witness_utxo` field of the transaction is required to sign this input
MissingNonWitnessUtxo,
/// The `non_witness_utxo` specified is invalid
InvalidNonWitnessUtxo,
/// The `witness_utxo` field of the transaction is required to sign this input
MissingWitnessUtxo,
/// The `witness_script` field of the transaction is requied to sign this input
MissingWitnessScript,
/// The fingerprint and derivation path are missing from the psbt input
MissingHDKeypath,
}
impl fmt::Display for SignerError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for SignerError {}
/// Trait for signers
///
/// This trait can be implemented to provide customized signers to the wallet. For an example see
/// [`this module`](crate::wallet::signer)'s documentation.
pub trait Signer: fmt::Debug {
/// Sign a PSBT
///
/// The `input_index` argument is only provided if the wallet doesn't declare to sign the whole
/// transaction in one go (see [`Signer::sign_whole_tx`]). Otherwise its value is `None` and
/// can be ignored.
fn sign(
&self,
psbt: &mut psbt::PartiallySignedTransaction,
input_index: Option<usize>,
) -> Result<(), SignerError>;
/// Return whether or not the signer signs the whole transaction in one go instead of every
/// input individually
fn sign_whole_tx(&self) -> bool;
/// Return the secret key for the signer
///
/// This is used internally to reconstruct the original descriptor that may contain secrets.
/// External signers that are meant to keep key isolated should just return `None` here (which
/// is the default for this method, if not overridden).
fn descriptor_secret_key(&self) -> Option<DescriptorSecretKey> {
None
}
}
impl Signer for DescriptorXKey<ExtendedPrivKey> {
fn sign(
&self,
psbt: &mut psbt::PartiallySignedTransaction,
input_index: Option<usize>,
) -> Result<(), SignerError> {
let input_index = input_index.unwrap();
if input_index >= psbt.inputs.len() {
return Err(SignerError::InputIndexOutOfRange);
}
let deriv_path = match psbt.inputs[input_index]
.hd_keypaths
.iter()
.filter_map(|(_, &(fingerprint, ref path))| self.matches(fingerprint.clone(), &path))
.next()
{
Some(deriv_path) => deriv_path,
None => return Ok(()), // TODO: should report an error maybe?
};
let ctx = Secp256k1::signing_only();
let derived_key = self.xkey.derive_priv(&ctx, &deriv_path).unwrap();
derived_key.private_key.sign(psbt, Some(input_index))
}
fn sign_whole_tx(&self) -> bool {
false
}
fn descriptor_secret_key(&self) -> Option<DescriptorSecretKey> {
Some(DescriptorSecretKey::XPrv(self.clone()))
}
}
impl Signer for PrivateKey {
fn sign(
&self,
psbt: &mut psbt::PartiallySignedTransaction,
input_index: Option<usize>,
) -> Result<(), SignerError> {
let input_index = input_index.unwrap();
if input_index >= psbt.inputs.len() {
return Err(SignerError::InputIndexOutOfRange);
}
let ctx = Secp256k1::signing_only();
let pubkey = self.public_key(&ctx);
if psbt.inputs[input_index].partial_sigs.contains_key(&pubkey) {
return Ok(());
}
// FIXME: use the presence of `witness_utxo` as an indication that we should make a bip143
// sig. Does this make sense? Should we add an extra argument to explicitly swith between
// these? The original idea was to declare sign() as sign<Ctx: ScriptContex>() and use Ctx,
// but that violates the rules for trait-objects, so we can't do it.
let (hash, sighash) = match psbt.inputs[input_index].witness_utxo {
Some(_) => Segwitv0::sighash(psbt, input_index)?,
None => Legacy::sighash(psbt, input_index)?,
};
let signature = ctx.sign(
&Message::from_slice(&hash.into_inner()[..]).unwrap(),
&self.key,
);
let mut final_signature = Vec::with_capacity(75);
final_signature.extend_from_slice(&signature.serialize_der());
final_signature.push(sighash.as_u32() as u8);
psbt.inputs[input_index]
.partial_sigs
.insert(pubkey, final_signature);
Ok(())
}
fn sign_whole_tx(&self) -> bool {
false
}
fn descriptor_secret_key(&self) -> Option<DescriptorSecretKey> {
Some(DescriptorSecretKey::PrivKey(self.clone()))
}
}
/// Defines the order in which signers are called
///
/// The default value is `100`. Signers with an ordering above that will be called later,
/// and they will thus see the partial signatures added to the transaction once they get to sign
/// themselves.
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq)]
pub struct SignerOrdering(pub usize);
impl std::default::Default for SignerOrdering {
fn default() -> Self {
SignerOrdering(100)
}
}
#[derive(Debug, Clone)]
struct SignersContainerKey<Pk: MiniscriptKey> {
id: SignerId<Pk>,
ordering: SignerOrdering,
}
impl<Pk: MiniscriptKey> From<(SignerId<Pk>, SignerOrdering)> for SignersContainerKey<Pk> {
fn from(tuple: (SignerId<Pk>, SignerOrdering)) -> Self {
SignersContainerKey {
id: tuple.0,
ordering: tuple.1,
}
}
}
/// Container for multiple signers
#[derive(Debug, Default, Clone)]
pub struct SignersContainer<Pk: MiniscriptKey>(
BTreeMap<SignersContainerKey<Pk>, Arc<Box<dyn Signer>>>,
);
impl SignersContainer<DescriptorPublicKey> {
pub fn as_key_map(&self) -> KeyMap {
self.0
.values()
.filter_map(|signer| signer.descriptor_secret_key())
.filter_map(|secret| secret.as_public().ok().map(|public| (public, secret)))
.collect()
}
}
impl From<KeyMap> for SignersContainer<DescriptorPublicKey> {
fn from(keymap: KeyMap) -> SignersContainer<DescriptorPublicKey> {
let mut container = SignersContainer::new();
for (_, secret) in keymap {
match secret {
DescriptorSecretKey::PrivKey(private_key) => container.add_external(
SignerId::from(
private_key
.public_key(&Secp256k1::signing_only())
.to_pubkeyhash(),
),
SignerOrdering::default(),
Arc::new(Box::new(private_key)),
),
DescriptorSecretKey::XPrv(xprv) => container.add_external(
SignerId::from(xprv.root_fingerprint()),
SignerOrdering::default(),
Arc::new(Box::new(xprv)),
),
};
}
container
}
}
impl<Pk: MiniscriptKey> SignersContainer<Pk> {
/// Default constructor
pub fn new() -> Self {
SignersContainer(Default::default())
}
/// Adds an external signer to the container for the specified id. Optionally returns the
/// signer that was previosuly in the container, if any
pub fn add_external(
&mut self,
id: SignerId<Pk>,
ordering: SignerOrdering,
signer: Arc<Box<dyn Signer>>,
) -> Option<Arc<Box<dyn Signer>>> {
self.0.insert((id, ordering).into(), signer)
}
/// Removes a signer from the container and returns it
pub fn remove(
&mut self,
id: SignerId<Pk>,
ordering: SignerOrdering,
) -> Option<Arc<Box<dyn Signer>>> {
self.0.remove(&(id, ordering).into())
}
/// Returns the list of identifiers of all the signers in the container
pub fn ids(&self) -> Vec<&SignerId<Pk>> {
self.0
.keys()
.map(|SignersContainerKey { id, .. }| id)
.collect()
}
/// Returns the list of signers in the container, sorted by lowest to highest `ordering`
pub fn signers(&self) -> Vec<&Arc<Box<dyn Signer>>> {
self.0.values().collect()
}
/// Finds the signer with lowest ordering for a given id in the container.
pub fn find(&self, id: SignerId<Pk>) -> Option<&Arc<Box<dyn Signer>>> {
self.0
.range((
Included(&(id.clone(), SignerOrdering(0)).into()),
Included(&(id, SignerOrdering(usize::MAX)).into()),
))
.map(|(_, v)| v)
.nth(0)
}
}
pub(crate) trait ComputeSighash {
fn sighash(
psbt: &psbt::PartiallySignedTransaction,
input_index: usize,
) -> Result<(SigHash, SigHashType), SignerError>;
}
impl ComputeSighash for Legacy {
fn sighash(
psbt: &psbt::PartiallySignedTransaction,
input_index: usize,
) -> Result<(SigHash, SigHashType), SignerError> {
if input_index >= psbt.inputs.len() {
return Err(SignerError::InputIndexOutOfRange);
}
let psbt_input = &psbt.inputs[input_index];
let tx_input = &psbt.global.unsigned_tx.input[input_index];
let sighash = psbt_input.sighash_type.ok_or(SignerError::MissingSighash)?;
let script = match &psbt_input.redeem_script {
&Some(ref redeem_script) => redeem_script.clone(),
&None => {
let non_witness_utxo = psbt_input
.non_witness_utxo
.as_ref()
.ok_or(SignerError::MissingNonWitnessUtxo)?;
let prev_out = non_witness_utxo
.output
.get(tx_input.previous_output.vout as usize)
.ok_or(SignerError::InvalidNonWitnessUtxo)?;
prev_out.script_pubkey.clone()
}
};
Ok((
psbt.global
.unsigned_tx
.signature_hash(input_index, &script, sighash.as_u32()),
sighash,
))
}
}
impl ComputeSighash for Segwitv0 {
fn sighash(
psbt: &psbt::PartiallySignedTransaction,
input_index: usize,
) -> Result<(SigHash, SigHashType), SignerError> {
if input_index >= psbt.inputs.len() {
return Err(SignerError::InputIndexOutOfRange);
}
let psbt_input = &psbt.inputs[input_index];
let sighash = psbt_input.sighash_type.ok_or(SignerError::MissingSighash)?;
let witness_utxo = psbt_input
.witness_utxo
.as_ref()
.ok_or(SignerError::MissingNonWitnessUtxo)?;
let value = witness_utxo.value;
let script = match &psbt_input.witness_script {
&Some(ref witness_script) => witness_script.clone(),
&None => {
if witness_utxo.script_pubkey.is_v0_p2wpkh() {
ScriptBuilder::new()
.push_opcode(opcodes::all::OP_DUP)
.push_opcode(opcodes::all::OP_HASH160)
.push_slice(&witness_utxo.script_pubkey[2..])
.push_opcode(opcodes::all::OP_EQUALVERIFY)
.push_opcode(opcodes::all::OP_CHECKSIG)
.into_script()
} else {
return Err(SignerError::MissingWitnessScript);
}
}
};
Ok((
bip143::SigHashCache::new(&psbt.global.unsigned_tx).signature_hash(
input_index,
&script,
value,
sighash,
),
sighash,
))
}
}
impl<Pk: MiniscriptKey> PartialOrd for SignersContainerKey<Pk> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<Pk: MiniscriptKey> Ord for SignersContainerKey<Pk> {
fn cmp(&self, other: &Self) -> Ordering {
self.ordering.cmp(&other.ordering)
}
}
impl<Pk: MiniscriptKey> PartialEq for SignersContainerKey<Pk> {
fn eq(&self, other: &Self) -> bool {
self.ordering == other.ordering
}
}
impl<Pk: MiniscriptKey> Eq for SignersContainerKey<Pk> {}

View File

@@ -1,86 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Cross-platform time
//!
//! This module provides a function to get the current timestamp that works on all the platforms
//! supported by the library.
//!
//! It can be useful to compare it with the timestamps found in
//! [`TransactionDetails`](crate::types::TransactionDetails).
use std::time::Duration;
#[cfg(target_arch = "wasm32")]
use js_sys::Date;
#[cfg(not(target_arch = "wasm32"))]
use std::time::{Instant as SystemInstant, SystemTime, UNIX_EPOCH};
/// Return the current timestamp in seconds
#[cfg(not(target_arch = "wasm32"))]
pub fn get_timestamp() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
}
/// Return the current timestamp in seconds
#[cfg(target_arch = "wasm32")]
pub fn get_timestamp() -> u64 {
let millis = Date::now();
(millis / 1000.0) as u64
}
#[cfg(not(target_arch = "wasm32"))]
pub(crate) struct Instant(SystemInstant);
#[cfg(target_arch = "wasm32")]
pub(crate) struct Instant(Duration);
impl Instant {
#[cfg(not(target_arch = "wasm32"))]
pub fn new() -> Self {
Instant(SystemInstant::now())
}
#[cfg(target_arch = "wasm32")]
pub fn new() -> Self {
let millis = Date::now();
let secs = millis / 1000.0;
let nanos = (millis % 1000.0) * 1e6;
Instant(Duration::new(secs as u64, nanos as u32))
}
#[cfg(not(target_arch = "wasm32"))]
pub fn elapsed(&self) -> Duration {
self.0.elapsed()
}
#[cfg(target_arch = "wasm32")]
pub fn elapsed(&self) -> Duration {
let now = Instant::new();
now.0.checked_sub(self.0).unwrap_or(Duration::new(0, 0))
}
}

View File

@@ -1,488 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Transaction builder
//!
//! ## Example
//!
//! ```
//! # use std::str::FromStr;
//! # use bitcoin::*;
//! # use magical::*;
//! # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
//! // Create a transaction with one output to `to_address` of 50_000 satoshi, with a custom fee rate
//! // of 5.0 satoshi/vbyte, only spending non-change outputs and with RBF signaling
//! // enabled
//! let builder = TxBuilder::with_recipients(vec![(to_address.script_pubkey(), 50_000)])
//! .fee_rate(FeeRate::from_sat_per_vb(5.0))
//! .do_not_spend_change()
//! .enable_rbf();
//! ```
use std::collections::BTreeMap;
use std::default::Default;
use bitcoin::{OutPoint, Script, SigHashType, Transaction};
use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm};
use crate::types::{FeeRate, UTXO};
/// A transaction builder
///
/// This structure contains the configuration that the wallet must follow to build a transaction.
///
/// For an example see [this module](super::tx_builder)'s documentation;
#[derive(Debug, Default)]
pub struct TxBuilder<Cs: CoinSelectionAlgorithm> {
pub(crate) recipients: Vec<(Script, u64)>,
pub(crate) send_all: bool,
pub(crate) fee_rate: Option<FeeRate>,
pub(crate) policy_path: Option<BTreeMap<String, Vec<usize>>>,
pub(crate) utxos: Option<Vec<OutPoint>>,
pub(crate) unspendable: Option<Vec<OutPoint>>,
pub(crate) sighash: Option<SigHashType>,
pub(crate) ordering: TxOrdering,
pub(crate) locktime: Option<u32>,
pub(crate) rbf: Option<u32>,
pub(crate) version: Option<Version>,
pub(crate) change_policy: ChangeSpendPolicy,
pub(crate) force_non_witness_utxo: bool,
pub(crate) coin_selection: Cs,
}
impl TxBuilder<DefaultCoinSelectionAlgorithm> {
/// Create an empty builder
pub fn new() -> Self {
Self::default()
}
/// Create a builder starting from a list of recipients
pub fn with_recipients(recipients: Vec<(Script, u64)>) -> Self {
Self::default().set_recipients(recipients)
}
}
impl<Cs: CoinSelectionAlgorithm> TxBuilder<Cs> {
/// Replace the recipients already added with a new list
pub fn set_recipients(mut self, recipients: Vec<(Script, u64)>) -> Self {
self.recipients = recipients;
self
}
/// Add a recipient to the internal list
pub fn add_recipient(mut self, script_pubkey: Script, amount: u64) -> Self {
self.recipients.push((script_pubkey, amount));
self
}
/// Send all the selected utxos to a single output
///
/// Adding more than one recipients with this option enabled will result in an error.
///
/// The value associated with the only recipient is irrelevant and will be replaced by the wallet.
pub fn send_all(mut self) -> Self {
self.send_all = true;
self
}
/// Set a custom fee rate
pub fn fee_rate(mut self, fee_rate: FeeRate) -> Self {
self.fee_rate = Some(fee_rate);
self
}
/// Set the policy path to use while creating the transaction
///
/// This method accepts a map where the key is the policy node id (see
/// [`Policy::id`](crate::descriptor::Policy::id)) and the value is the list of the indexes of
/// the items that are intended to be satisfied from the policy node (see
/// [`SatisfiableItem::Thresh::items`](crate::descriptor::policy::SatisfiableItem::Thresh::items)).
pub fn policy_path(mut self, policy_path: BTreeMap<String, Vec<usize>>) -> Self {
self.policy_path = Some(policy_path);
self
}
/// Replace the internal list of utxos that **must** be spent with a new list
///
/// These have priority over the "unspendable" utxos, meaning that if a utxo is present both in
/// the "utxos" and the "unspendable" list, it will be spent.
pub fn utxos(mut self, utxos: Vec<OutPoint>) -> Self {
self.utxos = Some(utxos);
self
}
/// Add a utxo to the internal list of utxos that **must** be spent
///
/// These have priority over the "unspendable" utxos, meaning that if a utxo is present both in
/// the "utxos" and the "unspendable" list, it will be spent.
pub fn add_utxo(mut self, utxo: OutPoint) -> Self {
self.utxos.get_or_insert(vec![]).push(utxo);
self
}
/// Replace the internal list of unspendable utxos with a new list
///
/// It's important to note that the "must-be-spent" utxos added with [`TxBuilder::utxos`] and
/// [`TxBuilder::add_utxo`] have priority over these. See the docs of the two linked methods
/// for more details.
pub fn unspendable(mut self, unspendable: Vec<OutPoint>) -> Self {
self.unspendable = Some(unspendable);
self
}
/// Add a utxo to the internal list of unspendable utxos
///
/// It's important to note that the "must-be-spent" utxos added with [`TxBuilder::utxos`] and
/// [`TxBuilder::add_utxo`] have priority over this. See the docs of the two linked methods
/// for more details.
pub fn add_unspendable(mut self, unspendable: OutPoint) -> Self {
self.unspendable.get_or_insert(vec![]).push(unspendable);
self
}
/// Sign with a specific sig hash
///
/// **Use this option very carefully**
pub fn sighash(mut self, sighash: SigHashType) -> Self {
self.sighash = Some(sighash);
self
}
/// Choose the ordering for inputs and outputs of the transaction
pub fn ordering(mut self, ordering: TxOrdering) -> Self {
self.ordering = ordering;
self
}
/// Use a specific nLockTime while creating the transaction
///
/// This can cause conflicts if the wallet's descriptors contain an "after" (OP_CLTV) operator.
pub fn nlocktime(mut self, locktime: u32) -> Self {
self.locktime = Some(locktime);
self
}
/// Enable signaling RBF
///
/// This will use the default nSequence value of `0xFFFFFFFD`.
pub fn enable_rbf(self) -> Self {
self.enable_rbf_with_sequence(0xFFFFFFFD)
}
/// Enable signaling RBF with a specific nSequence value
///
/// This can cause conflicts if the wallet's descriptors contain an "older" (OP_CSV) operator
/// and the given `nsequence` is lower than the CSV value.
///
/// If the `nsequence` is higher than `0xFFFFFFFD` an error will be thrown, since it would not
/// be a valid nSequence to signal RBF.
pub fn enable_rbf_with_sequence(mut self, nsequence: u32) -> Self {
self.rbf = Some(nsequence);
self
}
/// Build a transaction with a specific version
///
/// The `version` should always be greater than `0` and greater than `1` if the wallet's
/// descriptors contain an "older" (OP_CSV) operator.
pub fn version(mut self, version: u32) -> Self {
self.version = Some(Version(version));
self
}
/// Do not spend change outputs
///
/// This effectively adds all the change outputs to the "unspendable" list. See
/// [`TxBuilder::unspendable`].
pub fn do_not_spend_change(mut self) -> Self {
self.change_policy = ChangeSpendPolicy::ChangeForbidden;
self
}
/// Only spend change outputs
///
/// This effectively adds all the non-change outputs to the "unspendable" list. See
/// [`TxBuilder::unspendable`].
pub fn only_spend_change(mut self) -> Self {
self.change_policy = ChangeSpendPolicy::OnlyChange;
self
}
/// Set a specific [`ChangeSpendPolicy`]. See [`TxBuilder::do_not_spend_change`] and
/// [`TxBuilder::only_spend_change`] for some shortcuts.
pub fn change_policy(mut self, change_policy: ChangeSpendPolicy) -> Self {
self.change_policy = change_policy;
self
}
/// Fill-in the [`psbt::Input::non_witness_utxo`](bitcoin::util::psbt::Input::non_witness_utxo) field even if the wallet only has SegWit
/// descriptors.
///
/// This is useful for signers which always require it, like Trezor hardware wallets.
pub fn force_non_witness_utxo(mut self) -> Self {
self.force_non_witness_utxo = true;
self
}
/// Choose the coin selection algorithm
///
/// Overrides the [`DefaultCoinSelectionAlgorithm`](super::coin_selection::DefaultCoinSelectionAlgorithm).
pub fn coin_selection<P: CoinSelectionAlgorithm>(self, coin_selection: P) -> TxBuilder<P> {
TxBuilder {
recipients: self.recipients,
send_all: self.send_all,
fee_rate: self.fee_rate,
policy_path: self.policy_path,
utxos: self.utxos,
unspendable: self.unspendable,
sighash: self.sighash,
ordering: self.ordering,
locktime: self.locktime,
rbf: self.rbf,
version: self.version,
change_policy: self.change_policy,
force_non_witness_utxo: self.force_non_witness_utxo,
coin_selection,
}
}
}
/// Ordering of the transaction's inputs and outputs
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
pub enum TxOrdering {
/// Randomized (default)
Shuffle,
/// Unchanged
Untouched,
/// BIP69 / Lexicographic
BIP69Lexicographic,
}
impl Default for TxOrdering {
fn default() -> Self {
TxOrdering::Shuffle
}
}
impl TxOrdering {
pub fn sort_tx(&self, tx: &mut Transaction) {
match self {
TxOrdering::Untouched => {}
TxOrdering::Shuffle => {
use rand::seq::SliceRandom;
#[cfg(test)]
use rand::SeedableRng;
#[cfg(not(test))]
let mut rng = rand::thread_rng();
#[cfg(test)]
let mut rng = rand::rngs::StdRng::seed_from_u64(0);
tx.output.shuffle(&mut rng);
}
TxOrdering::BIP69Lexicographic => {
tx.input.sort_unstable_by_key(|txin| {
(txin.previous_output.txid, txin.previous_output.vout)
});
tx.output
.sort_unstable_by_key(|txout| (txout.value, txout.script_pubkey.clone()));
}
}
}
}
/// Transaction version
///
/// Has a default value of `1`
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
pub(crate) struct Version(pub(crate) u32);
impl Default for Version {
fn default() -> Self {
Version(1)
}
}
/// Policy regarding the use of change outputs when creating a transaction
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
pub enum ChangeSpendPolicy {
/// Use both change and non-change outputs (default)
ChangeAllowed,
/// Only use change outputs (see [`TxBuilder::only_spend_change`])
OnlyChange,
/// Only use non-change outputs (see [`TxBuilder::do_not_spend_change`])
ChangeForbidden,
}
impl Default for ChangeSpendPolicy {
fn default() -> Self {
ChangeSpendPolicy::ChangeAllowed
}
}
impl ChangeSpendPolicy {
pub(crate) fn filter_utxos<I: Iterator<Item = UTXO>>(&self, iter: I) -> Vec<UTXO> {
match self {
ChangeSpendPolicy::ChangeAllowed => iter.collect(),
ChangeSpendPolicy::OnlyChange => iter.filter(|utxo| utxo.is_internal).collect(),
ChangeSpendPolicy::ChangeForbidden => iter.filter(|utxo| !utxo.is_internal).collect(),
}
}
}
#[cfg(test)]
mod test {
const ORDERING_TEST_TX: &'static str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
85d1fd600f0100000000ffffffffc26f3eb7932f7acddc5ddd26602b77e75160\
79b03090a16e2c2f5485d1fd600f0000000000ffffffff571fb3e02278217852\
dd5d299947e2b7354a639adc32ec1fa7b82cfb5dec530e0500000000ffffffff\
03e80300000000000002aaeee80300000000000001aa200300000000000001ff\
00000000";
macro_rules! ordering_test_tx {
() => {
deserialize::<bitcoin::Transaction>(&Vec::<u8>::from_hex(ORDERING_TEST_TX).unwrap())
.unwrap()
};
}
use bitcoin::consensus::deserialize;
use bitcoin::hashes::hex::FromHex;
use super::*;
#[test]
fn test_output_ordering_default_shuffle() {
assert_eq!(TxOrdering::default(), TxOrdering::Shuffle);
}
#[test]
fn test_output_ordering_untouched() {
let original_tx = ordering_test_tx!();
let mut tx = original_tx.clone();
TxOrdering::Untouched.sort_tx(&mut tx);
assert_eq!(original_tx, tx);
}
#[test]
fn test_output_ordering_shuffle() {
let original_tx = ordering_test_tx!();
let mut tx = original_tx.clone();
TxOrdering::Shuffle.sort_tx(&mut tx);
assert_eq!(original_tx.input, tx.input);
assert_ne!(original_tx.output, tx.output);
}
#[test]
fn test_output_ordering_bip69() {
use std::str::FromStr;
let original_tx = ordering_test_tx!();
let mut tx = original_tx.clone();
TxOrdering::BIP69Lexicographic.sort_tx(&mut tx);
assert_eq!(
tx.input[0].previous_output,
bitcoin::OutPoint::from_str(
"0e53ec5dfb2cb8a71fec32dc9a634a35b7e24799295ddd5278217822e0b31f57:5"
)
.unwrap()
);
assert_eq!(
tx.input[1].previous_output,
bitcoin::OutPoint::from_str(
"0f60fdd185542f2c6ea19030b0796051e7772b6026dd5ddccd7a2f93b73e6fc2:0"
)
.unwrap()
);
assert_eq!(
tx.input[2].previous_output,
bitcoin::OutPoint::from_str(
"0f60fdd185542f2c6ea19030b0796051e7772b6026dd5ddccd7a2f93b73e6fc2:1"
)
.unwrap()
);
assert_eq!(tx.output[0].value, 800);
assert_eq!(tx.output[1].script_pubkey, From::from(vec![0xAA]));
assert_eq!(tx.output[2].script_pubkey, From::from(vec![0xAA, 0xEE]));
}
fn get_test_utxos() -> Vec<UTXO> {
vec![
UTXO {
outpoint: OutPoint {
txid: Default::default(),
vout: 0,
},
txout: Default::default(),
is_internal: false,
},
UTXO {
outpoint: OutPoint {
txid: Default::default(),
vout: 1,
},
txout: Default::default(),
is_internal: true,
},
]
}
#[test]
fn test_change_spend_policy_default() {
let change_spend_policy = ChangeSpendPolicy::default();
let filtered = change_spend_policy.filter_utxos(get_test_utxos().into_iter());
assert_eq!(filtered.len(), 2);
}
#[test]
fn test_change_spend_policy_no_internal() {
let change_spend_policy = ChangeSpendPolicy::ChangeForbidden;
let filtered = change_spend_policy.filter_utxos(get_test_utxos().into_iter());
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].is_internal, false);
}
#[test]
fn test_change_spend_policy_only_internal() {
let change_spend_policy = ChangeSpendPolicy::OnlyChange;
let filtered = change_spend_policy.filter_utxos(get_test_utxos().into_iter());
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].is_internal, true);
}
#[test]
fn test_default_tx_version_1() {
let version = Version::default();
assert_eq!(version.0, 1);
}
}

View File

@@ -1,38 +1,10 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use miniscript::{MiniscriptKey, Satisfier};
// De-facto standard "dust limit" (even though it should change based on the output type)
const DUST_LIMIT_SATOSHI: u64 = 546;
/// Trait to check if a value is below the dust limit
// we implement this trait to make sure we don't mess up the comparison with off-by-one like a <
// instead of a <= etc. The constant value for the dust limit is not public on purpose, to
// encourage the usage of this trait.
pub trait IsDust {
/// Check whether or not a value is below dust limit
fn is_dust(&self) -> bool;
}
@@ -42,61 +14,6 @@ impl IsDust for u64 {
}
}
pub struct After {
pub current_height: Option<u32>,
pub assume_height_reached: bool,
}
impl After {
pub(crate) fn new(current_height: Option<u32>, assume_height_reached: bool) -> After {
After {
current_height,
assume_height_reached,
}
}
}
impl<Pk: MiniscriptKey> Satisfier<Pk> for After {
fn check_after(&self, n: u32) -> bool {
if let Some(current_height) = self.current_height {
current_height >= n
} else {
self.assume_height_reached
}
}
}
pub struct Older {
pub current_height: Option<u32>,
pub create_height: Option<u32>,
pub assume_height_reached: bool,
}
impl Older {
pub(crate) fn new(
current_height: Option<u32>,
create_height: Option<u32>,
assume_height_reached: bool,
) -> Older {
Older {
current_height,
create_height,
assume_height_reached,
}
}
}
impl<Pk: MiniscriptKey> Satisfier<Pk> for Older {
fn check_older(&self, n: u32) -> bool {
if let Some(current_height) = self.current_height {
// TODO: test >= / >
current_height as u64 >= self.create_height.unwrap_or(0) as u64 + n as u64
} else {
self.assume_height_reached
}
}
}
pub struct ChunksIterator<I: Iterator> {
iter: I,
size: usize,
@@ -129,26 +46,3 @@ impl<I: Iterator> Iterator for ChunksIterator<I> {
Some(v)
}
}
#[cfg(test)]
mod test {
use crate::types::FeeRate;
#[test]
fn test_fee_from_btc_per_kb() {
let fee = FeeRate::from_btc_per_kvb(1e-5);
assert!((fee.as_sat_vb() - 1.0).abs() < 0.0001);
}
#[test]
fn test_fee_from_sats_vbyte() {
let fee = FeeRate::from_sat_per_vb(1.0);
assert!((fee.as_sat_vb() - 1.0).abs() < 0.0001);
}
#[test]
fn test_fee_default_min_relay_fee() {
let fee = FeeRate::default_min_relay_fee();
assert!((fee.as_sat_vb() - 1.0).abs() < 0.0001);
}
}

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 125 KiB

View File

@@ -1,19 +0,0 @@
[package]
name = "magical-testutils-macros"
version = "0.1.0-beta.1"
authors = ["Alekos Filini <alekos.filini@gmail.com>"]
edition = "2018"
[lib]
proc-macro = true
name = "testutils_macros"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
syn = { version = "1.0", features = ["parsing"] }
proc-macro2 = "1.0"
quote = "1.0"
[features]
debug = ["syn/extra-traits"]

View File

@@ -1,530 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use syn::spanned::Spanned;
use syn::{parse, parse2, Ident, ReturnType};
#[proc_macro_attribute]
pub fn magical_blockchain_tests(attr: TokenStream, item: TokenStream) -> TokenStream {
let root_ident = if !attr.is_empty() {
match parse::<syn::ExprPath>(attr) {
Ok(parsed) => parsed,
Err(e) => {
let error_string = e.to_string();
return (quote! {
compile_error!("Invalid crate path: {:?}", #error_string)
})
.into();
}
}
} else {
parse2::<syn::ExprPath>(quote! { magical }).unwrap()
};
match parse::<syn::ItemFn>(item) {
Err(_) => (quote! {
compile_error!("#[magical_blockchain_tests] can only be used on `fn`s")
})
.into(),
Ok(parsed) => {
let parsed_sig_ident = parsed.sig.ident.clone();
let mod_name = Ident::new(
&format!("generated_tests_{}", parsed_sig_ident.to_string()),
parsed.span(),
);
let return_type = match parsed.sig.output {
ReturnType::Type(_, ref t) => t.clone(),
ReturnType::Default => {
return (quote! {
compile_error!("The tagged function must return a type that impl `OnlineBlockchain`")
}).into();
}
};
let output = quote! {
#parsed
mod #mod_name {
use bitcoin::Network;
use miniscript::Descriptor;
use testutils::{TestClient, serial};
use #root_ident::blockchain::{OnlineBlockchain, noop_progress};
use #root_ident::descriptor::ExtendedDescriptor;
use #root_ident::database::MemoryDatabase;
use #root_ident::types::ScriptType;
use #root_ident::{Wallet, TxBuilder, FeeRate};
use super::*;
fn get_blockchain() -> #return_type {
#parsed_sig_ident()
}
fn get_wallet_from_descriptors(descriptors: &(String, Option<String>)) -> Wallet<#return_type, MemoryDatabase> {
Wallet::new(&descriptors.0.to_string(), descriptors.1.as_deref(), Network::Regtest, MemoryDatabase::new(), get_blockchain()).unwrap()
}
fn init_single_sig() -> (Wallet<#return_type, MemoryDatabase>, (String, Option<String>), TestClient) {
let descriptors = testutils! {
@descriptors ( "wpkh(Alice)" ) ( "wpkh(Alice)" ) ( @keys ( "Alice" => (@generate_xprv "/44'/0'/0'/0/*", "/44'/0'/0'/1/*") ) )
};
let test_client = TestClient::new();
let wallet = get_wallet_from_descriptors(&descriptors);
(wallet, descriptors, test_client)
}
#[test]
#[serial]
fn test_sync_simple() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let tx = testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
};
println!("{:?}", tx);
let txid = test_client.receive(tx);
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
assert_eq!(wallet.list_unspent().unwrap()[0].is_internal, false);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, txid);
assert_eq!(list_tx_item.received, 50_000);
assert_eq!(list_tx_item.sent, 0);
assert_eq!(list_tx_item.height, None);
}
#[test]
#[serial]
fn test_sync_stop_gap_20() {
let (wallet, descriptors, mut test_client) = init_single_sig();
test_client.receive(testutils! {
@tx ( (@external descriptors, 5) => 50_000 )
});
test_client.receive(testutils! {
@tx ( (@external descriptors, 25) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 100_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 2);
}
#[test]
#[serial]
fn test_sync_before_and_after_receive() {
let (wallet, descriptors, mut test_client) = init_single_sig();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 0);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 1);
}
#[test]
#[serial]
fn test_sync_multiple_outputs_same_tx() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let txid = test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000, (@external descriptors, 1) => 25_000, (@external descriptors, 5) => 30_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 105_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 1);
assert_eq!(wallet.list_unspent().unwrap().len(), 3);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, txid);
assert_eq!(list_tx_item.received, 105_000);
assert_eq!(list_tx_item.sent, 0);
assert_eq!(list_tx_item.height, None);
}
#[test]
#[serial]
fn test_sync_receive_multi() {
let (wallet, descriptors, mut test_client) = init_single_sig();
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
test_client.receive(testutils! {
@tx ( (@external descriptors, 5) => 25_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 75_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 2);
assert_eq!(wallet.list_unspent().unwrap().len(), 2);
}
#[test]
#[serial]
fn test_sync_address_reuse() {
let (wallet, descriptors, mut test_client) = init_single_sig();
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 25_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 75_000);
}
#[test]
#[serial]
fn test_sync_receive_rbf_replaced() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let txid = test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 ) ( @replaceable true )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 1);
assert_eq!(wallet.list_unspent().unwrap().len(), 1);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, txid);
assert_eq!(list_tx_item.received, 50_000);
assert_eq!(list_tx_item.sent, 0);
assert_eq!(list_tx_item.height, None);
let new_txid = test_client.bump_fee(&txid);
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 1);
assert_eq!(wallet.list_unspent().unwrap().len(), 1);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, new_txid);
assert_eq!(list_tx_item.received, 50_000);
assert_eq!(list_tx_item.sent, 0);
assert_eq!(list_tx_item.height, None);
}
#[test]
#[serial]
fn test_sync_reorg_block() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let txid = test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 ) ( @confirmations 1 ) ( @replaceable true )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 1);
assert_eq!(wallet.list_unspent().unwrap().len(), 1);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, txid);
assert!(list_tx_item.height.is_some());
// Invalidate 1 block
test_client.invalidate(1);
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let list_tx_item = &wallet.list_transactions(false).unwrap()[0];
assert_eq!(list_tx_item.txid, txid);
assert_eq!(list_tx_item.height, None);
}
#[test]
#[serial]
fn test_sync_after_send() {
let (wallet, descriptors, mut test_client) = init_single_sig();
println!("{}", descriptors.0);
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey(), 25_000)])).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
let tx = psbt.extract_tx();
println!("{}", bitcoin::consensus::encode::serialize_hex(&tx));
wallet.broadcast(tx).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), details.received);
assert_eq!(wallet.list_transactions(false).unwrap().len(), 2);
assert_eq!(wallet.list_unspent().unwrap().len(), 1);
}
#[test]
#[serial]
fn test_sync_outgoing_from_scratch() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
let received_txid = test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey(), 25_000)])).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
let sent_txid = wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), details.received);
// empty wallet
let wallet = get_wallet_from_descriptors(&descriptors);
wallet.sync(noop_progress(), None).unwrap();
let tx_map = wallet.list_transactions(false).unwrap().into_iter().map(|tx| (tx.txid, tx)).collect::<std::collections::HashMap<_, _>>();
let received = tx_map.get(&received_txid).unwrap();
assert_eq!(received.received, 50_000);
assert_eq!(received.sent, 0);
let sent = tx_map.get(&sent_txid).unwrap();
assert_eq!(sent.received, details.received);
assert_eq!(sent.sent, details.sent);
assert_eq!(sent.fees, details.fees);
}
#[test]
#[serial]
fn test_sync_long_change_chain() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 )
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let mut total_sent = 0;
for _ in 0..5 {
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey().clone(), 5_000)])).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
total_sent += 5_000 + details.fees;
}
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000 - total_sent);
// empty wallet
let wallet = get_wallet_from_descriptors(&descriptors);
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000 - total_sent);
}
#[test]
#[serial]
fn test_sync_bump_fee() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 ) (@confirmations 1)
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey().clone(), 5_000)]).enable_rbf()).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000 - details.fees - 5_000);
assert_eq!(wallet.get_balance().unwrap(), details.received);
let (new_psbt, new_details) = wallet.bump_fee(&details.txid, TxBuilder::new().fee_rate(FeeRate::from_sat_per_vb(2.1))).unwrap();
let (new_psbt, finalized) = wallet.sign(new_psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(new_psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000 - new_details.fees - 5_000);
assert_eq!(wallet.get_balance().unwrap(), new_details.received);
assert!(new_details.fees > details.fees);
}
#[test]
#[serial]
fn test_sync_bump_fee_remove_change() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000 ) (@confirmations 1)
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 50_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey().clone(), 49_000)]).enable_rbf()).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 1_000 - details.fees);
assert_eq!(wallet.get_balance().unwrap(), details.received);
let (new_psbt, new_details) = wallet.bump_fee(&details.txid, TxBuilder::new().fee_rate(FeeRate::from_sat_per_vb(5.0))).unwrap();
let (new_psbt, finalized) = wallet.sign(new_psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(new_psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 0);
assert_eq!(new_details.received, 0);
assert!(new_details.fees > details.fees);
}
#[test]
#[serial]
fn test_sync_bump_fee_add_input() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000, (@external descriptors, 1) => 25_000 ) (@confirmations 1)
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 75_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey().clone(), 49_000)]).enable_rbf()).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 26_000 - details.fees);
assert_eq!(details.received, 1_000 - details.fees);
let (new_psbt, new_details) = wallet.bump_fee(&details.txid, TxBuilder::new().fee_rate(FeeRate::from_sat_per_vb(10.0))).unwrap();
let (new_psbt, finalized) = wallet.sign(new_psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(new_psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(new_details.sent, 75_000);
assert_eq!(wallet.get_balance().unwrap(), new_details.received);
}
#[test]
#[serial]
fn test_sync_bump_fee_add_input_no_change() {
let (wallet, descriptors, mut test_client) = init_single_sig();
let node_addr = test_client.get_node_address(None);
test_client.receive(testutils! {
@tx ( (@external descriptors, 0) => 50_000, (@external descriptors, 1) => 25_000 ) (@confirmations 1)
});
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 75_000);
let (psbt, details) = wallet.create_tx(TxBuilder::with_recipients(vec![(node_addr.script_pubkey().clone(), 49_000)]).enable_rbf()).unwrap();
let (psbt, finalized) = wallet.sign(psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(wallet.get_balance().unwrap(), 26_000 - details.fees);
assert_eq!(details.received, 1_000 - details.fees);
let (new_psbt, new_details) = wallet.bump_fee(&details.txid, TxBuilder::new().fee_rate(FeeRate::from_sat_per_vb(123.0))).unwrap();
println!("{:#?}", new_details);
let (new_psbt, finalized) = wallet.sign(new_psbt, None).unwrap();
assert!(finalized, "Cannot finalize transaction");
wallet.broadcast(new_psbt.extract_tx()).unwrap();
wallet.sync(noop_progress(), None).unwrap();
assert_eq!(new_details.sent, 75_000);
assert_eq!(wallet.get_balance().unwrap(), 0);
assert_eq!(new_details.received, 0);
}
}
};
output.into()
}
}
}

View File

@@ -1,2 +0,0 @@
target/
Cargo.lock

View File

@@ -1,25 +0,0 @@
[package]
name = "magical-testutils"
version = "0.1.0-beta.1"
authors = ["Alekos Filini <alekos.filini@gmail.com>"]
edition = "2018"
[lib]
name = "testutils"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
# The latest bitcoincore-rpc depends on an older version of bitcoin, which in turns depends on an
# older version of secp256k1, which causes conflicts during linking. Use my fork right now, we can
# switch back to crates.io as soon as rust-bitcoin is updated in rust-bitcoincore-rpc.
#
# Tracking issue: https://github.com/rust-bitcoin/rust-bitcoincore-rpc/pull/80
[dependencies]
log = "0.4.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serial_test = "0.4"
bitcoin = "0.23"
bitcoincore-rpc = "0.11"
electrum-client = "0.2.0-beta.1"

View File

@@ -1,530 +0,0 @@
// Magical Bitcoin Library
// Written in 2020 by
// Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020 Magical Bitcoin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate serial_test;
pub use serial_test::serial;
use std::collections::HashMap;
use std::env;
use std::ops::Deref;
use std::path::PathBuf;
use std::str::FromStr;
use std::time::Duration;
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use bitcoin::consensus::encode::{deserialize, serialize};
use bitcoin::hashes::hex::{FromHex, ToHex};
use bitcoin::hashes::sha256d;
use bitcoin::{Address, Amount, Script, Transaction, Txid};
pub use bitcoincore_rpc::bitcoincore_rpc_json::AddressType;
pub use bitcoincore_rpc::{Auth, Client as RpcClient, RpcApi};
pub use electrum_client::{Client as ElectrumClient, ElectrumApi};
// TODO: we currently only support env vars, we could also parse a toml file
fn get_auth() -> Auth {
match env::var("MAGICAL_RPC_AUTH").as_ref().map(String::as_ref) {
Ok("USER_PASS") => Auth::UserPass(
env::var("MAGICAL_RPC_USER").unwrap(),
env::var("MAGICAL_RPC_PASS").unwrap(),
),
_ => Auth::CookieFile(PathBuf::from(
env::var("MAGICAL_RPC_COOKIEFILE")
.unwrap_or("/home/user/.bitcoin/regtest/.cookie".to_string()),
)),
}
}
pub fn get_electrum_url() -> String {
env::var("MAGICAL_ELECTRUM_URL").unwrap_or("tcp://127.0.0.1:50001".to_string())
}
pub struct TestClient {
client: RpcClient,
electrum: ElectrumClient,
}
#[derive(Clone, Debug)]
pub struct TestIncomingOutput {
pub value: u64,
pub to_address: String,
}
impl TestIncomingOutput {
pub fn new(value: u64, to_address: Address) -> Self {
Self {
value,
to_address: to_address.to_string(),
}
}
}
#[derive(Clone, Debug)]
pub struct TestIncomingTx {
pub output: Vec<TestIncomingOutput>,
pub min_confirmations: Option<u64>,
pub locktime: Option<i64>,
pub replaceable: Option<bool>,
}
impl TestIncomingTx {
pub fn new(
output: Vec<TestIncomingOutput>,
min_confirmations: Option<u64>,
locktime: Option<i64>,
replaceable: Option<bool>,
) -> Self {
Self {
output,
min_confirmations,
locktime,
replaceable,
}
}
pub fn add_output(&mut self, output: TestIncomingOutput) {
self.output.push(output);
}
}
#[macro_export]
macro_rules! testutils {
( @external $descriptors:expr, $child:expr ) => ({
use miniscript::descriptor::{Descriptor, DescriptorPublicKey};
let parsed = Descriptor::<DescriptorPublicKey>::parse_secret(&$descriptors.0).expect("Failed to parse descriptor in `testutils!(@external)`").0;
parsed.derive(&[bitcoin::util::bip32::ChildNumber::from_normal_idx($child).unwrap()]).address(bitcoin::Network::Regtest).expect("No address form")
});
( @internal $descriptors:expr, $child:expr ) => ({
use miniscript::descriptor::{Descriptor, DescriptorPublicKey};
let parsed = Descriptor::<DescriptorPublicKey>::parse_secret(&$descriptors.1.expect("Missing internal descriptor")).expect("Failed to parse descriptor in `testutils!(@internal)`").0;
parsed.derive(&[bitcoin::util::bip32::ChildNumber::from_normal_idx($child).unwrap()]).address(bitcoin::Network::Regtest).expect("No address form")
});
( @e $descriptors:expr, $child:expr ) => ({ testutils!(@external $descriptors, $child) });
( @i $descriptors:expr, $child:expr ) => ({ testutils!(@internal $descriptors, $child) });
( @tx ( $( ( $( $addr:tt )* ) => $amount:expr ),+ ) $( ( @locktime $locktime:expr ) )* $( ( @confirmations $confirmations:expr ) )* $( ( @replaceable $replaceable:expr ) )* ) => ({
let mut outs = Vec::new();
$( outs.push(testutils::TestIncomingOutput::new($amount, testutils!( $($addr)* ))); )+
let mut locktime = None::<i64>;
$( locktime = Some($locktime); )*
let mut min_confirmations = None::<u64>;
$( min_confirmations = Some($confirmations); )*
let mut replaceable = None::<bool>;
$( replaceable = Some($replaceable); )*
testutils::TestIncomingTx::new(outs, min_confirmations, locktime, replaceable)
});
( @literal $key:expr ) => ({
let key = $key.to_string();
(key, None::<String>, None::<String>)
});
( @generate_xprv $( $external_path:expr )* $( ,$internal_path:expr )* ) => ({
use rand::Rng;
let mut seed = [0u8; 32];
rand::thread_rng().fill(&mut seed[..]);
let key = bitcoin::util::bip32::ExtendedPrivKey::new_master(
bitcoin::Network::Testnet,
&seed,
);
let mut external_path = None::<String>;
$( external_path = Some($external_path.to_string()); )*
let mut internal_path = None::<String>;
$( internal_path = Some($internal_path.to_string()); )*
(key.unwrap().to_string(), external_path, internal_path)
});
( @generate_wif ) => ({
use rand::Rng;
let mut key = [0u8; bitcoin::secp256k1::constants::SECRET_KEY_SIZE];
rand::thread_rng().fill(&mut key[..]);
(bitcoin::PrivateKey {
compressed: true,
network: bitcoin::Network::Testnet,
key: bitcoin::secp256k1::SecretKey::from_slice(&key).unwrap(),
}.to_string(), None::<String>, None::<String>)
});
( @keys ( $( $alias:expr => ( $( $key_type:tt )* ) ),+ ) ) => ({
let mut map = std::collections::HashMap::new();
$(
let alias: &str = $alias;
map.insert(alias, testutils!( $($key_type)* ));
)+
map
});
( @descriptors ( $external_descriptor:expr ) $( ( $internal_descriptor:expr ) )* $( ( @keys $( $keys:tt )* ) )* ) => ({
use std::str::FromStr;
use std::collections::HashMap;
use std::convert::TryInto;
use miniscript::descriptor::{Descriptor, DescriptorPublicKey};
let mut keys: HashMap<&'static str, (String, Option<String>, Option<String>)> = HashMap::new();
$(
keys = testutils!{ @keys $( $keys )* };
)*
let external: Descriptor<String> = FromStr::from_str($external_descriptor).unwrap();
let external: Descriptor<String> = external.translate_pk::<_, _, _, &'static str>(|k| {
if let Some((key, ext_path, _)) = keys.get(&k.as_str()) {
Ok(format!("{}{}", key, ext_path.as_ref().unwrap_or(&"".into())))
} else {
Ok(k.clone())
}
}, |kh| {
if let Some((key, ext_path, _)) = keys.get(&kh.as_str()) {
Ok(format!("{}{}", key, ext_path.as_ref().unwrap_or(&"".into())))
} else {
Ok(kh.clone())
}
}).unwrap();
let external = external.to_string();
let mut internal = None::<String>;
$(
let string_internal: Descriptor<String> = FromStr::from_str($internal_descriptor).unwrap();
let string_internal: Descriptor<String> = string_internal.translate_pk::<_, _, _, &'static str>(|k| {
if let Some((key, _, int_path)) = keys.get(&k.as_str()) {
Ok(format!("{}{}", key, int_path.as_ref().unwrap_or(&"".into())))
} else {
Ok(k.clone())
}
}, |kh| {
if let Some((key, _, int_path)) = keys.get(&kh.as_str()) {
Ok(format!("{}{}", key, int_path.as_ref().unwrap_or(&"".into())))
} else {
Ok(kh.clone())
}
}).unwrap();
internal = Some(string_internal.to_string());
)*
(external, internal)
})
}
fn exponential_backoff_poll<T, F>(mut poll: F) -> T
where
F: FnMut() -> Option<T>,
{
let mut delay = Duration::from_millis(64);
loop {
match poll() {
Some(data) => break data,
None if delay.as_millis() < 512 => delay = delay.mul_f32(2.0),
None => {}
}
std::thread::sleep(delay);
}
}
impl TestClient {
pub fn new() -> Self {
let url = env::var("MAGICAL_RPC_URL").unwrap_or("127.0.0.1:18443".to_string());
let client = RpcClient::new(format!("http://{}", url), get_auth()).unwrap();
let electrum = ElectrumClient::new(&get_electrum_url(), None).unwrap();
TestClient { client, electrum }
}
fn wait_for_tx(&mut self, txid: Txid, monitor_script: &Script) {
// wait for electrs to index the tx
exponential_backoff_poll(|| {
trace!("wait_for_tx {}", txid);
self.electrum
.script_get_history(monitor_script)
.unwrap()
.iter()
.position(|entry| entry.tx_hash == txid)
});
}
fn wait_for_block(&mut self, min_height: usize) {
self.electrum.block_headers_subscribe().unwrap();
loop {
let header = exponential_backoff_poll(|| {
self.electrum.ping().unwrap();
self.electrum.block_headers_pop().unwrap()
});
if header.height >= min_height {
break;
}
}
}
pub fn receive(&mut self, meta_tx: TestIncomingTx) -> Txid {
assert!(
meta_tx.output.len() > 0,
"can't create a transaction with no outputs"
);
let mut map = HashMap::new();
let mut required_balance = 0;
for out in &meta_tx.output {
required_balance += out.value;
map.insert(out.to_address.clone(), Amount::from_sat(out.value));
}
if self.get_balance(None, None).unwrap() < Amount::from_sat(required_balance) {
panic!("Insufficient funds in bitcoind. Plase generate a few blocks with: `bitcoin-cli generatetoaddress 10 {}`", self.get_new_address(None, None).unwrap());
}
// FIXME: core can't create a tx with two outputs to the same address
let tx = self
.create_raw_transaction_hex(&[], &map, meta_tx.locktime, meta_tx.replaceable)
.unwrap();
let tx = self.fund_raw_transaction(tx, None, None).unwrap();
let mut tx: Transaction = deserialize(&tx.hex).unwrap();
if let Some(true) = meta_tx.replaceable {
// for some reason core doesn't set this field right
for input in &mut tx.input {
input.sequence = 0xFFFFFFFD;
}
}
let tx = self
.sign_raw_transaction_with_wallet(&serialize(&tx), None, None)
.unwrap();
// broadcast through electrum so that it caches the tx immediately
let txid = self
.electrum
.transaction_broadcast(&deserialize(&tx.hex).unwrap())
.unwrap();
if let Some(num) = meta_tx.min_confirmations {
self.generate(num);
}
let monitor_script = Address::from_str(&meta_tx.output[0].to_address)
.unwrap()
.script_pubkey();
self.wait_for_tx(txid, &monitor_script);
debug!("Sent tx: {}", txid);
txid
}
pub fn bump_fee(&mut self, txid: &Txid) -> Txid {
let tx = self.get_raw_transaction_info(txid, None).unwrap();
assert!(
tx.confirmations.is_none(),
"Can't bump tx {} because it's already confirmed",
txid
);
let bumped: serde_json::Value = self.call("bumpfee", &[txid.to_string().into()]).unwrap();
let new_txid = Txid::from_str(&bumped["txid"].as_str().unwrap().to_string()).unwrap();
let monitor_script =
tx.vout[0].script_pub_key.addresses.as_ref().unwrap()[0].script_pubkey();
self.wait_for_tx(new_txid, &monitor_script);
debug!("Bumped {}, new txid {}", txid, new_txid);
new_txid
}
pub fn generate_manually(&mut self, txs: Vec<Transaction>) -> String {
use bitcoin::blockdata::block::{Block, BlockHeader};
use bitcoin::blockdata::script::Builder;
use bitcoin::blockdata::transaction::{OutPoint, TxIn, TxOut};
use bitcoin::hash_types::{BlockHash, TxMerkleNode};
let block_template: serde_json::Value = self
.call("getblocktemplate", &[json!({"rules": ["segwit"]})])
.unwrap();
trace!("getblocktemplate: {:#?}", block_template);
let header = BlockHeader {
version: block_template["version"].as_u64().unwrap() as u32,
prev_blockhash: BlockHash::from_hex(
block_template["previousblockhash"].as_str().unwrap(),
)
.unwrap(),
merkle_root: TxMerkleNode::default(),
time: block_template["curtime"].as_u64().unwrap() as u32,
bits: u32::from_str_radix(block_template["bits"].as_str().unwrap(), 16).unwrap(),
nonce: 0,
};
debug!("header: {:#?}", header);
let height = block_template["height"].as_u64().unwrap() as i64;
let witness_reserved_value: Vec<u8> = sha256d::Hash::default().as_ref().into();
// burn block subsidy and fees, not a big deal
let mut coinbase_tx = Transaction {
version: 1,
lock_time: 0,
input: vec![TxIn {
previous_output: OutPoint::null(),
script_sig: Builder::new().push_int(height).into_script(),
sequence: 0xFFFFFFFF,
witness: vec![witness_reserved_value],
}],
output: vec![],
};
let mut txdata = vec![coinbase_tx.clone()];
txdata.extend_from_slice(&txs);
let mut block = Block { header, txdata };
let witness_root = block.witness_root();
let witness_commitment =
Block::compute_witness_commitment(&witness_root, &coinbase_tx.input[0].witness[0]);
// now update and replace the coinbase tx
let mut coinbase_witness_commitment_script = vec![0x6a, 0x24, 0xaa, 0x21, 0xa9, 0xed];
coinbase_witness_commitment_script.extend_from_slice(&witness_commitment);
coinbase_tx.output.push(TxOut {
value: 0,
script_pubkey: coinbase_witness_commitment_script.into(),
});
block.txdata[0] = coinbase_tx;
// set merkle root
let merkle_root = block.merkle_root();
block.header.merkle_root = merkle_root;
assert!(block.check_merkle_root());
assert!(block.check_witness_commitment());
// now do PoW :)
let target = block.header.target();
while block.header.validate_pow(&target).is_err() {
block.header.nonce = block.header.nonce.checked_add(1).unwrap(); // panic if we run out of nonces
}
let block_hex: String = serialize(&block).to_hex();
debug!("generated block hex: {}", block_hex);
self.electrum.block_headers_subscribe().unwrap();
let submit_result: serde_json::Value =
self.call("submitblock", &[block_hex.into()]).unwrap();
debug!("submitblock: {:?}", submit_result);
assert!(
submit_result.is_null(),
"submitblock error: {:?}",
submit_result.as_str()
);
self.wait_for_block(height as usize);
block.header.block_hash().to_hex()
}
pub fn generate(&mut self, num_blocks: u64) {
let our_addr = self.get_new_address(None, None).unwrap();
let hashes = self.generate_to_address(num_blocks, &our_addr).unwrap();
let best_hash = hashes.last().unwrap();
let height = self.get_block_info(best_hash).unwrap().height;
self.wait_for_block(height);
debug!("Generated blocks to new height {}", height);
}
pub fn invalidate(&mut self, num_blocks: u64) {
self.electrum.block_headers_subscribe().unwrap();
let best_hash = self.get_best_block_hash().unwrap();
let initial_height = self.get_block_info(&best_hash).unwrap().height;
let mut to_invalidate = best_hash;
for i in 1..=num_blocks {
trace!(
"Invalidating block {}/{} ({})",
i,
num_blocks,
to_invalidate
);
self.invalidate_block(&to_invalidate).unwrap();
to_invalidate = self.get_best_block_hash().unwrap();
}
self.wait_for_block(initial_height - num_blocks as usize);
debug!(
"Invalidated {} blocks to new height of {}",
num_blocks,
initial_height - num_blocks as usize
);
}
pub fn reorg(&mut self, num_blocks: u64) {
self.invalidate(num_blocks);
self.generate(num_blocks);
}
pub fn get_node_address(&self, address_type: Option<AddressType>) -> Address {
Address::from_str(
&self
.get_new_address(None, address_type)
.unwrap()
.to_string(),
)
.unwrap()
}
}
impl Deref for TestClient {
type Target = RpcClient;
fn deref(&self) -> &Self::Target {
&self.client
}
}