Compare commits
993 Commits
v0.2.0
...
v1.0.0-alp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30c11904a7 | ||
|
|
82f9caddab | ||
|
|
3b68a7bcc0 | ||
|
|
919e74aa8d | ||
|
|
72b1e2a485 | ||
|
|
2ae69ca10b | ||
|
|
877b658787 | ||
|
|
82f5d9c81e | ||
|
|
24df03afd6 | ||
|
|
cd4945af3a | ||
|
|
bc3e05c6c6 | ||
|
|
352f95f558 | ||
|
|
2fcf9c4adb | ||
|
|
5dd4ce74cf | ||
|
|
ae9b19d84c | ||
|
|
def0c9ed39 | ||
|
|
26ab2e2d6c | ||
|
|
ab9242d10d | ||
|
|
0aaf420f6d | ||
|
|
47faa881fb | ||
|
|
9d26121dbc | ||
|
|
eddd748870 | ||
|
|
0505cd7242 | ||
|
|
de9457fce6 | ||
|
|
69cf6d7924 | ||
|
|
b3836cb308 | ||
|
|
b082932268 | ||
|
|
d267517dbd | ||
|
|
0c7a0abb19 | ||
|
|
dfcbafd6b1 | ||
|
|
0ba41c5751 | ||
|
|
a38f63359d | ||
|
|
38ef170ed1 | ||
|
|
3a5d727899 | ||
|
|
96d932c830 | ||
|
|
5708bf0c8c | ||
|
|
5acee82496 | ||
|
|
8c9bcebc71 | ||
|
|
c61b3604e1 | ||
|
|
1805bd35c0 | ||
|
|
3f5a78ae3b | ||
|
|
303a1703c9 | ||
|
|
b5559767db | ||
|
|
2e82cd8c04 | ||
|
|
c069b0fb41 | ||
|
|
949608ab1f | ||
|
|
03deafb553 | ||
|
|
37dfa77d9d | ||
|
|
f2188f9dcd | ||
|
|
1c970a9295 | ||
|
|
94a084aafd | ||
|
|
9edbdf54c9 | ||
|
|
20e45b7af0 | ||
|
|
6d05598407 | ||
|
|
b60820a7b5 | ||
|
|
22bec6d363 | ||
|
|
8a6de3aa2d | ||
|
|
fdfc9b9ede | ||
|
|
e1eb0253cf | ||
|
|
3baf9721ec | ||
|
|
b310a7afdd | ||
|
|
5985706c1a | ||
|
|
57538e53e4 | ||
|
|
a40da9ba6c | ||
|
|
aab2b12f7a | ||
|
|
544c397a38 | ||
|
|
ced2d05e64 | ||
|
|
843807b08f | ||
|
|
231a1fba61 | ||
|
|
74119e70c3 | ||
|
|
8b2943c49b | ||
|
|
a1a70a5011 | ||
|
|
2d173a17f7 | ||
|
|
5b9e0e392a | ||
|
|
c2a42493fd | ||
|
|
0c2570ae07 | ||
|
|
e83bb7c4dc | ||
|
|
46273fe72f | ||
|
|
0b26fa75dc | ||
|
|
35bbe2beef | ||
|
|
9e7bad8afa | ||
|
|
4ada11f358 | ||
|
|
cf8cd2f2b4 | ||
|
|
147a4ed141 | ||
|
|
97f8fe3fd1 | ||
|
|
f0cec015b5 | ||
|
|
ff72078095 | ||
|
|
e678aad3c7 | ||
|
|
41dc7f7d0d | ||
|
|
6b92a169ab | ||
|
|
45d41416ed | ||
|
|
9019793bd4 | ||
|
|
32912eaa05 | ||
|
|
2e7a220e39 | ||
|
|
b02bfb347d | ||
|
|
0cce1ce982 | ||
|
|
fb76c9ed9a | ||
|
|
3a782b3b0d | ||
|
|
eac739d395 | ||
|
|
6e5873ebba | ||
|
|
3205f0c16d | ||
|
|
5f0870a741 | ||
|
|
5a483472c1 | ||
|
|
8d4cc3920a | ||
|
|
14bc9c0e35 | ||
|
|
2451c00268 | ||
|
|
4cad18bbca | ||
|
|
634a0575cb | ||
|
|
d3d07564f2 | ||
|
|
0b768d6f0b | ||
|
|
ec9aefac6b | ||
|
|
d72aa7ebc0 | ||
|
|
99930af12e | ||
|
|
d6e730f18a | ||
|
|
d1e5b87bfc | ||
|
|
c101dea460 | ||
|
|
9ddd502538 | ||
|
|
a5d345fff2 | ||
|
|
11dcc14374 | ||
|
|
4c5ceaff14 | ||
|
|
b5fcddcf1a | ||
|
|
d570ff2c65 | ||
|
|
21c96c9c81 | ||
|
|
c51d544932 | ||
|
|
5e56c3b3c1 | ||
|
|
235961a934 | ||
|
|
df905a8d5e | ||
|
|
8b68cf9546 | ||
|
|
150f4d6f41 | ||
|
|
1c95ca33a8 | ||
|
|
108edc3a6b | ||
|
|
f99a6b9f43 | ||
|
|
aedbc8c97d | ||
|
|
5c42102c79 | ||
|
|
5d5b2fb88c | ||
|
|
9cb6f70fc0 | ||
|
|
5720e38033 | ||
|
|
e9bbb8724f | ||
|
|
648282e602 | ||
|
|
c7a43d941f | ||
|
|
1ffd59d469 | ||
|
|
ae4f4e5416 | ||
|
|
9854fd34ea | ||
|
|
60057a7bf7 | ||
|
|
ea47d7a35b | ||
|
|
f2181f5467 | ||
|
|
34987d58ec | ||
|
|
1c76084db8 | ||
|
|
68dd6d2031 | ||
|
|
1437e1ecfe | ||
|
|
1a71eb1f47 | ||
|
|
0695e9fb3e | ||
|
|
a4a43ea860 | ||
|
|
b627455b8f | ||
|
|
1331193800 | ||
|
|
7de8be46c0 | ||
|
|
55145f57a1 | ||
|
|
8e8fd49e04 | ||
|
|
d7bfe68e2d | ||
|
|
b11c86d074 | ||
|
|
e2a4a5884b | ||
|
|
fd34956c29 | ||
|
|
b5b92248c7 | ||
|
|
cf2bc388f2 | ||
|
|
5baf46f84d | ||
|
|
a8cf34e809 | ||
|
|
af0b3698c6 | ||
|
|
92ad4876c4 | ||
|
|
b14e4ee3a0 | ||
|
|
e6f2d029fa | ||
|
|
bbf524b3f9 | ||
|
|
dbf6bf5fdf | ||
|
|
aff41d6e1c | ||
|
|
e2bf9734b1 | ||
|
|
c3faf05be9 | ||
|
|
aad5461ee1 | ||
|
|
0a7a1f4ef2 | ||
|
|
5e9965fca7 | ||
|
|
54d768412a | ||
|
|
97b6fb06aa | ||
|
|
da7670801b | ||
|
|
e1fa0b6695 | ||
|
|
dfeb08fa00 | ||
|
|
8963e8c9f4 | ||
|
|
562cb81cad | ||
|
|
b12dec3620 | ||
|
|
e65edbf53c | ||
|
|
88307045b0 | ||
|
|
e06c3f945c | ||
|
|
ab41679368 | ||
|
|
7b12f35698 | ||
|
|
aa0ea6aeff | ||
|
|
c3a7bbb3ff | ||
|
|
1c4d47825b | ||
|
|
fa998de4b1 | ||
|
|
06310f1dd0 | ||
|
|
8dd02094df | ||
|
|
0010ecd94a | ||
|
|
690411722e | ||
|
|
7001b14b4c | ||
|
|
13cf72ffa7 | ||
|
|
d7163c3a97 | ||
|
|
cf13c80991 | ||
|
|
7c57965999 | ||
|
|
3d69f1c291 | ||
|
|
3451d1c12e | ||
|
|
4fbd8520e6 | ||
|
|
bfd7b2f65d | ||
|
|
061f15af00 | ||
|
|
369e17b801 | ||
|
|
2bff4e5e56 | ||
|
|
138acc3b7d | ||
|
|
d6e1dd1040 | ||
|
|
76034772cb | ||
|
|
12507c707f | ||
|
|
de358f8cdc | ||
|
|
08668ac462 | ||
|
|
0a3734ed2b | ||
|
|
a5d1a3d65c | ||
|
|
7bc2980905 | ||
|
|
34e792e193 | ||
|
|
7b1ad1b629 | ||
|
|
a8f9f6c43a | ||
|
|
c9b1b6d076 | ||
|
|
cd078903a7 | ||
|
|
588c17ff69 | ||
|
|
baf7eaace6 | ||
|
|
8026bd9476 | ||
|
|
e2bd96012a | ||
|
|
9be63e66ec | ||
|
|
9f9ffd0efd | ||
|
|
2db881519a | ||
|
|
d9adfbe047 | ||
|
|
74e2c477f1 | ||
|
|
c5952dd09a | ||
|
|
134b19a9cb | ||
|
|
2c01b6118f | ||
|
|
03d3c786f2 | ||
|
|
0f03831274 | ||
|
|
dc7adb7161 | ||
|
|
5eeba6cced | ||
|
|
5eb74af414 | ||
|
|
ac19c19f21 | ||
|
|
ef03da0a76 | ||
|
|
9d85c9667f | ||
|
|
85bd126c6c | ||
|
|
7fdacdbad4 | ||
|
|
9c0a769675 | ||
|
|
11865fddff | ||
|
|
e8df3d2d91 | ||
|
|
a63c51f35d | ||
|
|
1730e0150f | ||
|
|
5a415979af | ||
|
|
a713a5a062 | ||
|
|
419dc248b6 | ||
|
|
632dabaa07 | ||
|
|
2756411ef7 | ||
|
|
50af51da5a | ||
|
|
ae919061e2 | ||
|
|
7ac87b8f99 | ||
|
|
ac051d7ae9 | ||
|
|
00d426b885 | ||
|
|
42fde6d457 | ||
|
|
8e0d00a3ea | ||
|
|
235011feef | ||
|
|
a1477405d1 | ||
|
|
558e37afa7 | ||
|
|
6bae52e6f2 | ||
|
|
32ae95f463 | ||
|
|
3644a452c1 | ||
|
|
5c940c33cb | ||
|
|
277e18f5cb | ||
|
|
8d3b2a9581 | ||
|
|
45a4ae5828 | ||
|
|
6db5b4a094 | ||
|
|
9d2024434e | ||
|
|
9165faef95 | ||
|
|
46c344feb0 | ||
|
|
78d26f6eb3 | ||
|
|
844856d39e | ||
|
|
b5a120c649 | ||
|
|
92b9597f8b | ||
|
|
556105780b | ||
|
|
af6bde3997 | ||
|
|
4bd1fd2441 | ||
|
|
45db468c9b | ||
|
|
2c02a44586 | ||
|
|
01141bed5a | ||
|
|
87e8646743 | ||
|
|
dd51380520 | ||
|
|
73d4f6d3b1 | ||
|
|
2af678aa84 | ||
|
|
1c94108d7e | ||
|
|
5d00f82388 | ||
|
|
98748906f6 | ||
|
|
dd832cb57a | ||
|
|
e3a17f67d9 | ||
|
|
c2e4ba8cbd | ||
|
|
1d9fdd01fa | ||
|
|
db9d43ed2f | ||
|
|
ec22fa2ad0 | ||
|
|
0e92820af4 | ||
|
|
e85aa247cb | ||
|
|
612da165f8 | ||
|
|
1fd62a7afc | ||
|
|
8a5f89e129 | ||
|
|
063d51fd75 | ||
|
|
0e0d5a0e95 | ||
|
|
bb55923a7d | ||
|
|
f184557fa0 | ||
|
|
77c7d0aae9 | ||
|
|
5ff8320e3b | ||
|
|
e68d3b9e63 | ||
|
|
97bc9dc717 | ||
|
|
6a15036867 | ||
|
|
17d0ae0f71 | ||
|
|
d020dede37 | ||
|
|
5c566bb05e | ||
|
|
b289c4ec2d | ||
|
|
2283444f72 | ||
|
|
a0e5820c32 | ||
|
|
04dc28d2b4 | ||
|
|
fa4c73a4d1 | ||
|
|
2bf8121b18 | ||
|
|
688ff96c8e | ||
|
|
ed3ef94071 | ||
|
|
ed78d18f60 | ||
|
|
e1a1372bae | ||
|
|
3283a200bc | ||
|
|
3f9b4cdca9 | ||
|
|
a85ef62698 | ||
|
|
32699234b6 | ||
|
|
8fbe40a918 | ||
|
|
d9b9b3dc46 | ||
|
|
20d36c71d4 | ||
|
|
ef08fbd3c7 | ||
|
|
5320c8353e | ||
|
|
c1bfaf9b1e | ||
|
|
0643f76c1f | ||
|
|
89cb425e69 | ||
|
|
461397e590 | ||
|
|
c67116fb55 | ||
|
|
572c3ee70d | ||
|
|
ff1abc63e0 | ||
|
|
308708952b | ||
|
|
fe1877fb18 | ||
|
|
cdc7057813 | ||
|
|
c121dd0252 | ||
|
|
8553821133 | ||
|
|
8a5a87b075 | ||
|
|
1312184ed7 | ||
|
|
906598ad92 | ||
|
|
fbd98b4c5a | ||
|
|
87b07456bd | ||
|
|
82de8b50da | ||
|
|
35feb107ed | ||
|
|
2471908151 | ||
|
|
0b1a399f4e | ||
|
|
cea79872d7 | ||
|
|
4c1749a13a | ||
|
|
939a1156c6 | ||
|
|
e5486536ae | ||
|
|
00164588f2 | ||
|
|
a16c18255c | ||
|
|
7aa2746c51 | ||
|
|
616aa8259a | ||
|
|
8795da4839 | ||
|
|
9c405e9c70 | ||
|
|
2d83af4905 | ||
|
|
b4100a7189 | ||
|
|
cfb67fc25b | ||
|
|
7201e09db9 | ||
|
|
e7a56a9268 | ||
|
|
4628a10191 | ||
|
|
2f325328c5 | ||
|
|
cca69481eb | ||
|
|
6e8744d59d | ||
|
|
79f73df545 | ||
|
|
9db8d3a410 | ||
|
|
b5c8ce924b | ||
|
|
e3ce50059f | ||
|
|
8a2a6bbcee | ||
|
|
122e6e7140 | ||
|
|
1018bb2b17 | ||
|
|
9ed36875f1 | ||
|
|
502882d27c | ||
|
|
a328607d27 | ||
|
|
f90e3f978e | ||
|
|
68e1b32d81 | ||
|
|
44758f9483 | ||
|
|
c350064dae | ||
|
|
92746440db | ||
|
|
e4eb95fb9c | ||
|
|
c307bacb9c | ||
|
|
a111d25476 | ||
|
|
c752ccbdde | ||
|
|
0621ca89d5 | ||
|
|
adef166b22 | ||
|
|
213f18f7b7 | ||
|
|
8cd055090d | ||
|
|
1b9014846c | ||
|
|
9c0141b5e3 | ||
|
|
2698fc0219 | ||
|
|
6931d0bd1f | ||
|
|
545beec743 | ||
|
|
bac15bb207 | ||
|
|
06b80fdb15 | ||
|
|
ff6db18726 | ||
|
|
86abd8698f | ||
|
|
0d9c2f76e0 | ||
|
|
63d5bcee93 | ||
|
|
8a98e69e78 | ||
|
|
c6eeb7b989 | ||
|
|
3334c8da07 | ||
|
|
ce09203431 | ||
|
|
cac312d34f | ||
|
|
4b1be68965 | ||
|
|
559cfc4373 | ||
|
|
1e9a684b54 | ||
|
|
52bc63e48f | ||
|
|
9a6db15d26 | ||
|
|
52bcd105eb | ||
|
|
1803f5ea8a | ||
|
|
f2f0efc0b3 | ||
|
|
3e4678d8e3 | ||
|
|
0cc4700bd6 | ||
|
|
660faab1e2 | ||
|
|
45767fcaf7 | ||
|
|
d03aa85108 | ||
|
|
adf7d0c126 | ||
|
|
4291f84d79 | ||
|
|
f0188f49a8 | ||
|
|
edf2f0ce06 | ||
|
|
364ad95e85 | ||
|
|
fbb50ad1c8 | ||
|
|
035307ef54 | ||
|
|
c0e75fc1a8 | ||
|
|
dcd90f8b61 | ||
|
|
410a51355b | ||
|
|
326bfe82a8 | ||
|
|
b23a0747b5 | ||
|
|
022256c91a | ||
|
|
00f0901bac | ||
|
|
19f028714b | ||
|
|
ad65dd5c23 | ||
|
|
1999d97aeb | ||
|
|
0195bc0636 | ||
|
|
760a6ca1a1 | ||
|
|
552765bb58 | ||
|
|
f3e479fa7f | ||
|
|
5698c683c6 | ||
|
|
a83aa0461c | ||
|
|
bfd0d13779 | ||
|
|
128c37595c | ||
|
|
5c5bb7833c | ||
|
|
b04bb590f3 | ||
|
|
0efbece41a | ||
|
|
b6fe01c466 | ||
|
|
1d7ea89d8a | ||
|
|
b05ee78c73 | ||
|
|
53c30b0479 | ||
|
|
6a09075d1a | ||
|
|
61a95d0d15 | ||
|
|
08f312a82f | ||
|
|
acbf0ae08e | ||
|
|
4761155707 | ||
|
|
98a3b3282a | ||
|
|
e745122bf5 | ||
|
|
07c270db03 | ||
|
|
375674ffff | ||
|
|
fcf422752b | ||
|
|
6fb42fdea1 | ||
|
|
3f65e8c64b | ||
|
|
3f0101d317 | ||
|
|
b1346d4ccf | ||
|
|
5107ff80c1 | ||
|
|
5ac51dfe74 | ||
|
|
04d58f7903 | ||
|
|
380a4f2588 | ||
|
|
9e30a79027 | ||
|
|
fdb272e039 | ||
|
|
d2b6b5545e | ||
|
|
db6ffb90f0 | ||
|
|
947a9c29db | ||
|
|
61ee2a9c1c | ||
|
|
44e4c5dac5 | ||
|
|
e09aaf055a | ||
|
|
c40898ba08 | ||
|
|
2f98db8549 | ||
|
|
4d7c4bc810 | ||
|
|
a0c140bb29 | ||
|
|
bf5994b14a | ||
|
|
ca682819b3 | ||
|
|
ee41d88f25 | ||
|
|
beb1e4114d | ||
|
|
af047f90db | ||
|
|
d01ec6d259 | ||
|
|
77bce06caf | ||
|
|
98c26a1ad9 | ||
|
|
1a907f8a53 | ||
|
|
e82edbb7ac | ||
|
|
57a1185aef | ||
|
|
64e88f0e00 | ||
|
|
f7f9bd2409 | ||
|
|
68a3d2b1cc | ||
|
|
aa13186fb0 | ||
|
|
02980881ac | ||
|
|
69b184a0a4 | ||
|
|
084ec036a5 | ||
|
|
c1af456e58 | ||
|
|
d20b649eb8 | ||
|
|
fed4a59728 | ||
|
|
c175dd2aae | ||
|
|
8534cd3943 | ||
|
|
3a07614fdb | ||
|
|
b2ac4a0dfd | ||
|
|
7f8103dd76 | ||
|
|
b9fc06195b | ||
|
|
a630685a0a | ||
|
|
2fc8114180 | ||
|
|
6b1cbcc4b7 | ||
|
|
afa1ab4ff8 | ||
|
|
632422a3ab | ||
|
|
54f61d17f2 | ||
|
|
5830226216 | ||
|
|
2c77329333 | ||
|
|
3e5bb077ac | ||
|
|
7c06f52a07 | ||
|
|
12e51b3c06 | ||
|
|
2892edf94b | ||
|
|
9c5770831d | ||
|
|
0f0a01a742 | ||
|
|
1a64fd9c95 | ||
|
|
d3779fac73 | ||
|
|
d39401162f | ||
|
|
dfb63d389b | ||
|
|
188d9a4a8b | ||
|
|
5eadf5ccf9 | ||
|
|
aaad560a91 | ||
|
|
e7c13575c8 | ||
|
|
808d7d8463 | ||
|
|
732166fcb6 | ||
|
|
3f5cb6997f | ||
|
|
aa075f0b2f | ||
|
|
8010d692e9 | ||
|
|
b2d7412d6d | ||
|
|
fd51029197 | ||
|
|
711510006b | ||
|
|
d21b6e47ab | ||
|
|
5922c216a1 | ||
|
|
9e29e2d2b1 | ||
|
|
16e832533c | ||
|
|
7f91bcdf1a | ||
|
|
35695d8795 | ||
|
|
756858e882 | ||
|
|
d2ce2714f2 | ||
|
|
3b2b559910 | ||
|
|
3c8416bf31 | ||
|
|
f6f736609f | ||
|
|
5cb0726780 | ||
|
|
8781599740 | ||
|
|
ee8b992f8b | ||
|
|
3d8efbf8bf | ||
|
|
a2e26f1b57 | ||
|
|
5f5744e897 | ||
|
|
e106136227 | ||
|
|
d75d221540 | ||
|
|
548e43d928 | ||
|
|
a348dbdcfe | ||
|
|
b638039655 | ||
|
|
7e085a86dd | ||
|
|
59f795f176 | ||
|
|
2da10382e7 | ||
|
|
6d18502733 | ||
|
|
81b263f235 | ||
|
|
2f38d3e526 | ||
|
|
2ee125655b | ||
|
|
22c39b7b78 | ||
|
|
18f1107c41 | ||
|
|
763bcc22ab | ||
|
|
9e4ca516a8 | ||
|
|
b60465f31e | ||
|
|
1469a3487a | ||
|
|
8c21bcf40a | ||
|
|
c9ed8bdf6c | ||
|
|
919522a456 | ||
|
|
678607e673 | ||
|
|
c06d9f1d33 | ||
|
|
5a6a2cefdd | ||
|
|
3fe2380d6c | ||
|
|
eea8b135a4 | ||
|
|
a685b22aa6 | ||
|
|
c601ae3271 | ||
|
|
c23692824d | ||
|
|
46f7b440f5 | ||
|
|
562fde7953 | ||
|
|
9e508748a3 | ||
|
|
84b8579df5 | ||
|
|
7cb0116c44 | ||
|
|
6e12468b12 | ||
|
|
326b64de3a | ||
|
|
5edf663f3d | ||
|
|
e3dd755396 | ||
|
|
b500cfe4e5 | ||
|
|
10b53a56d7 | ||
|
|
8d1d92e71e | ||
|
|
a41a0030dc | ||
|
|
2459740f72 | ||
|
|
5694b98304 | ||
|
|
aa786fbb21 | ||
|
|
8c570ae7eb | ||
|
|
56a7bc9874 | ||
|
|
dd4bd96f79 | ||
|
|
2caa590438 | ||
|
|
2a53cfc23f | ||
|
|
cf1815a1c0 | ||
|
|
acf157a99a | ||
|
|
fb813427eb | ||
|
|
721748e98f | ||
|
|
976e641ba6 | ||
|
|
7117557dea | ||
|
|
fa013aeb83 | ||
|
|
470d02c81c | ||
|
|
38d1d0b0e2 | ||
|
|
582d2f3814 | ||
|
|
5e0011e1a8 | ||
|
|
39d2bd0d21 | ||
|
|
0e10952b80 | ||
|
|
19d74955e2 | ||
|
|
73a7faf144 | ||
|
|
ea56a87b4b | ||
|
|
67f5f45e07 | ||
|
|
b8680b299d | ||
|
|
a5d3a4d31a | ||
|
|
d03d3c0dbd | ||
|
|
9aba3196ff | ||
|
|
c8593ecf70 | ||
|
|
cbec0b0bcf | ||
|
|
e80be49d1e | ||
|
|
fe30716fa2 | ||
|
|
e52550cfec | ||
|
|
f57c0ca98e | ||
|
|
c54e1e9652 | ||
|
|
5cdc5fb58a | ||
|
|
27cd9bbcd6 | ||
|
|
f37e735b43 | ||
|
|
adceafa40c | ||
|
|
2b0c4f0817 | ||
|
|
e9428433a0 | ||
|
|
63592f169f | ||
|
|
27600f4a11 | ||
|
|
77eae76459 | ||
|
|
ad69702aa3 | ||
|
|
fd254536d3 | ||
|
|
c4d5dd14fa | ||
|
|
13bed2667a | ||
|
|
2db24fb8c5 | ||
|
|
d2d37fc06d | ||
|
|
2986fce7c6 | ||
|
|
1dc648508c | ||
|
|
474620e6a5 | ||
|
|
a5919f4ab0 | ||
|
|
7e986fd904 | ||
|
|
77379e9262 | ||
|
|
ea699a6ec1 | ||
|
|
81c1ccb185 | ||
|
|
4f4802b0f3 | ||
|
|
bab9d99a00 | ||
|
|
22f4db0de1 | ||
|
|
a6ce75fa2d | ||
|
|
7597645ed6 | ||
|
|
618e0d3700 | ||
|
|
44d0e8d07c | ||
|
|
7a9b691f68 | ||
|
|
4e813e8869 | ||
|
|
53409ef3ae | ||
|
|
f8a6e1c3f4 | ||
|
|
c1077b95cf | ||
|
|
fa5103b0eb | ||
|
|
e5d4994329 | ||
|
|
d1658a2eda | ||
|
|
879e5cf319 | ||
|
|
928f9c6112 | ||
|
|
814ab4c855 | ||
|
|
58cf46050f | ||
|
|
b6beef77e7 | ||
|
|
7ed0676e44 | ||
|
|
595e1bdbe1 | ||
|
|
7555d3b430 | ||
|
|
fbdee52f2f | ||
|
|
50597fd73f | ||
|
|
975905c8ea | ||
|
|
a67aca32c0 | ||
|
|
7873dd5e40 | ||
|
|
a186d82f9a | ||
|
|
7109f7d9b4 | ||
|
|
f52fda4b4b | ||
|
|
a6be470fe4 | ||
|
|
8e41c4587d | ||
|
|
2ecae348ea | ||
|
|
f4ecfa0d49 | ||
|
|
696647b893 | ||
|
|
18dcda844f | ||
|
|
6394c3e209 | ||
|
|
42adad7dbd | ||
|
|
4498e0f7f8 | ||
|
|
476fa3fd7d | ||
|
|
2755b09e7b | ||
|
|
5e6286a493 | ||
|
|
67714adc80 | ||
|
|
9ff86ea37c | ||
|
|
ceeb3a40cf | ||
|
|
e3316aee4c | ||
|
|
c2567b61aa | ||
|
|
e1a77b87ab | ||
|
|
5bf758b03a | ||
|
|
0bbfa5f989 | ||
|
|
18254110c6 | ||
|
|
44217539e5 | ||
|
|
33b45ebe82 | ||
|
|
2faed425ed | ||
|
|
2cc05c07a5 | ||
|
|
fe371f9d92 | ||
|
|
12de13b95c | ||
|
|
9205295332 | ||
|
|
3b446c9e14 | ||
|
|
378167efca | ||
|
|
224be27aa8 | ||
|
|
4a23070cc8 | ||
|
|
ba2e3042cc | ||
|
|
f8117c0f9f | ||
|
|
1639984b56 | ||
|
|
ab54a17eb7 | ||
|
|
ae5aa06586 | ||
|
|
ab98283159 | ||
|
|
81851190f0 | ||
|
|
e1b037a921 | ||
|
|
9b7ed08891 | ||
|
|
dffb753ce3 | ||
|
|
0b969657cd | ||
|
|
bfef2e3cfe | ||
|
|
0ec064ef13 | ||
|
|
6b60914ca1 | ||
|
|
881ca8d1e3 | ||
|
|
5633475ce8 | ||
|
|
ea8488b2a7 | ||
|
|
d2a981efee | ||
|
|
4c92daf517 | ||
|
|
aba2a05d83 | ||
|
|
5b194c268d | ||
|
|
00bdf08f2a | ||
|
|
38b0470b14 | ||
|
|
d60c5003bf | ||
|
|
fcae5adabd | ||
|
|
9f04a9d82d | ||
|
|
465ef6e674 | ||
|
|
aaa9943a5f | ||
|
|
3897e29740 | ||
|
|
8f06e45872 | ||
|
|
766570abfd | ||
|
|
934ec366d9 | ||
|
|
d0733e9496 | ||
|
|
3c7a1f5918 | ||
|
|
85aadaccd2 | ||
|
|
fad0fe9f30 | ||
|
|
6546b77c08 | ||
|
|
e1066e955c | ||
|
|
7f06dc3330 | ||
|
|
de40351710 | ||
|
|
de811bea30 | ||
|
|
74cc80d127 | ||
|
|
009f68a06a | ||
|
|
47f26447da | ||
|
|
12641b9e8f | ||
|
|
aa3707b5b4 | ||
|
|
f6631e35b8 | ||
|
|
3608ff9f14 | ||
|
|
7fdb98e147 | ||
|
|
9aea90bd81 | ||
|
|
898dfe6cf1 | ||
|
|
7961ae7f8e | ||
|
|
8bf77c8f07 | ||
|
|
3c7bae9ce9 | ||
|
|
17bcd8ed7d | ||
|
|
b5e9589803 | ||
|
|
1d628d84b5 | ||
|
|
b84fd6ea5c | ||
|
|
8fe4222c33 | ||
|
|
e626f2e255 | ||
|
|
5a0c150ff9 | ||
|
|
00f07818f9 | ||
|
|
136a4bddb2 | ||
|
|
ff7b74ec27 | ||
|
|
8c00326990 | ||
|
|
afcd26032d | ||
|
|
8f422a1bf9 | ||
|
|
45983d2166 | ||
|
|
89cb4de7f6 | ||
|
|
7ca0e0e2bd | ||
|
|
2bddd9baed | ||
|
|
0135ba29c5 | ||
|
|
549cd24812 | ||
|
|
a841b5d635 | ||
|
|
16ceb6cb30 | ||
|
|
edfd7d454c | ||
|
|
1d874e50c2 | ||
|
|
98127cc5da | ||
|
|
e243107bb6 | ||
|
|
237a8d4e69 | ||
|
|
7f4042ba1b | ||
|
|
3ed44ce8cf | ||
|
|
8e7d8312a9 | ||
|
|
4da7488dc4 | ||
|
|
e37680af96 | ||
|
|
5f873ae500 | ||
|
|
2380634496 | ||
|
|
af98b8da06 | ||
|
|
b68ec050e2 | ||
|
|
ac7df09200 | ||
|
|
192965413c | ||
|
|
745be7bea8 | ||
|
|
b6007e05c1 | ||
|
|
f53654d9f4 | ||
|
|
e5ecc7f541 | ||
|
|
882a9c27cc | ||
|
|
1e6b8e12b2 | ||
|
|
b226658977 | ||
|
|
6d6776eb58 | ||
|
|
f1f844a5b6 | ||
|
|
a3e45358de | ||
|
|
07e79f6e8a | ||
|
|
d94b8f87a3 | ||
|
|
fdb895d26c | ||
|
|
7041e96737 | ||
|
|
199f716ebb | ||
|
|
b12e358c1d | ||
|
|
f786f0e624 | ||
|
|
71e0472dc9 | ||
|
|
f7944e871b | ||
|
|
2fea1761c1 | ||
|
|
fa27ae210f | ||
|
|
46fa41470e | ||
|
|
c456a252f8 | ||
|
|
d837a762fc | ||
|
|
e82dfa971e | ||
|
|
cc17ac8859 | ||
|
|
3798b4d115 | ||
|
|
2d0f6c4ec5 | ||
|
|
f3b475ff0e | ||
|
|
41ae202d02 | ||
|
|
fef6176275 | ||
|
|
8ebe7f0ea5 | ||
|
|
eb85390846 | ||
|
|
dc83db273a | ||
|
|
201bd6ee02 | ||
|
|
396ffb42f9 | ||
|
|
9cf62ce874 | ||
|
|
9c6b98d98b | ||
|
|
14ae64e09d | ||
|
|
48215675b0 | ||
|
|
37fa35b24a | ||
|
|
23ec9c3ba0 | ||
|
|
e33a6a12c1 | ||
|
|
12ae1c3479 | ||
|
|
fdde0e691e | ||
|
|
1cbd47b988 | ||
|
|
e0183ed5c7 | ||
|
|
dae900cc59 | ||
|
|
4c2042ab01 | ||
|
|
2f0ca206f3 | ||
|
|
ac7c1bd97b | ||
|
|
d9a102afa9 | ||
|
|
7c1dcd8a72 | ||
|
|
1fbfeabd77 | ||
|
|
9a918f285d | ||
|
|
a7183f34ef | ||
|
|
bda416df0a | ||
|
|
a838c2bacc | ||
|
|
d2a094aa4c | ||
|
|
bdb2a53597 | ||
|
|
97ad0f1b4f | ||
|
|
2b5e177ab2 | ||
|
|
bfe29c4ef6 | ||
|
|
e35601bb19 | ||
|
|
24df438607 | ||
|
|
cb3b8cf21b | ||
|
|
0e6add0cfb | ||
|
|
343e97da0e | ||
|
|
ba8ce7233d | ||
|
|
35184e6908 | ||
|
|
824b00c9e0 | ||
|
|
79cab93d49 | ||
|
|
2afc9faa08 | ||
|
|
0e99d02fbe | ||
|
|
3a0a1e6d4a | ||
|
|
2057c35468 | ||
|
|
5eaa3b0916 | ||
|
|
4ad0f54c30 | ||
|
|
eeff3b5049 | ||
|
|
5e352489a0 | ||
|
|
7ee262ef4b | ||
|
|
2759231f7b | ||
|
|
e3f893dbd1 | ||
|
|
3f5513a2d6 | ||
|
|
fcf5e971a6 | ||
|
|
cdf7b33104 | ||
|
|
7bbff79d4b | ||
|
|
3a2b8bdb85 | ||
|
|
7843732e17 | ||
|
|
fa5a5c8c05 | ||
|
|
6092c6e789 | ||
|
|
7fe5a30424 | ||
|
|
a82b2155e9 | ||
|
|
b61427c07b | ||
|
|
fa2610538f | ||
|
|
d0ffcdd009 | ||
|
|
1c6864aee8 | ||
|
|
d638da2f10 | ||
|
|
2f7513753c | ||
|
|
c90a1f70a6 | ||
|
|
04348d0090 | ||
|
|
eda23491c0 | ||
|
|
dccf09861c | ||
|
|
02b9eda6fa | ||
|
|
6611ef0e5f | ||
|
|
db5e663f05 | ||
|
|
c4f21799a6 | ||
|
|
fedd92c022 | ||
|
|
19eca4e2d1 | ||
|
|
023dabd9b2 | ||
|
|
b44d1f7a92 | ||
|
|
3d9d6fee07 | ||
|
|
4c36020e95 | ||
|
|
6d01c51c63 | ||
|
|
693fb24e02 | ||
|
|
6689384c8a | ||
|
|
35a61f5759 | ||
|
|
d0ffd5606a | ||
|
|
c2b1268675 | ||
|
|
ccbbad3e9e | ||
|
|
dbf8cf7674 | ||
|
|
eb96ac374b | ||
|
|
c431a60171 | ||
|
|
2e0ca4fe05 | ||
|
|
df32c849bb | ||
|
|
33426d4c3a | ||
|
|
03e6e8126d | ||
|
|
ff10aa5ceb | ||
|
|
21d382315a | ||
|
|
6fe3be0243 | ||
|
|
10fcba9439 | ||
|
|
890d6191a1 | ||
|
|
735db02850 | ||
|
|
7bf46c7d71 | ||
|
|
8319b32466 | ||
|
|
0faca43744 | ||
|
|
6f66de3d16 | ||
|
|
5fb7fdffe1 | ||
|
|
7553b905c4 | ||
|
|
f74f17e227 | ||
|
|
7566904926 | ||
|
|
1420cf8d0f | ||
|
|
bddd418c8e | ||
|
|
49db898acb | ||
|
|
01585227c5 | ||
|
|
03b7c1b46b | ||
|
|
4686ebb420 | ||
|
|
082db351c0 | ||
|
|
84db6ce453 | ||
|
|
52b45c5b89 | ||
|
|
5c82789e57 | ||
|
|
7bc8c3c380 | ||
|
|
813c1ddcd0 | ||
|
|
733355a6ae | ||
|
|
6955a7776d | ||
|
|
bf04a2cf69 | ||
|
|
2b669afd3e | ||
|
|
8510b2b86e | ||
|
|
a95a9f754c | ||
|
|
3980b90bff | ||
|
|
b2bd1b5831 | ||
|
|
aa31c96821 | ||
|
|
f74bfdd493 | ||
|
|
5034ca2267 | ||
|
|
8094263028 | ||
|
|
0c9c0716a4 | ||
|
|
c2b2da7601 | ||
|
|
407f14add9 | ||
|
|
656c9c9da8 | ||
|
|
a578d20282 | ||
|
|
2e222c7ad9 | ||
|
|
7d6cd6d4f5 | ||
|
|
e31bd812ed |
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: 'bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
<!-- A clear and concise description of what the bug is. -->
|
||||
|
||||
**To Reproduce**
|
||||
<!-- Steps or code to reproduce the behavior. -->
|
||||
|
||||
**Expected behavior**
|
||||
<!-- A clear and concise description of what you expected to happen. -->
|
||||
|
||||
**Build environment**
|
||||
- BDK tag/commit: <!-- e.g. v0.13.0, 3a07614 -->
|
||||
- OS+version: <!-- e.g. ubuntu 20.04.01, macOS 12.0.1, windows -->
|
||||
- Rust/Cargo version: <!-- e.g. 1.56.0 -->
|
||||
- Rust/Cargo target: <!-- e.g. x86_64-apple-darwin, x86_64-unknown-linux-gnu, etc. -->
|
||||
|
||||
**Additional context**
|
||||
<!-- Add any other context about the problem here. -->
|
||||
17
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
name: Enhancement request
|
||||
about: Request a new feature or change to an existing feature
|
||||
title: ''
|
||||
labels: 'enhancement'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the enhancement**
|
||||
<!-- A clear and concise description of what you would like added or changed. -->
|
||||
|
||||
**Use case**
|
||||
<!-- Tell us how you or others will use this new feature or change to an existing feature. -->
|
||||
|
||||
**Additional context**
|
||||
<!-- Add any other context about the enhancement here. -->
|
||||
99
.github/ISSUE_TEMPLATE/minor_release.md
vendored
Normal file
99
.github/ISSUE_TEMPLATE/minor_release.md
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
---
|
||||
name: Minor Release
|
||||
about: Create a new minor release [for release managers only]
|
||||
title: 'Release MAJOR.MINOR+1.0'
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Create a new minor release
|
||||
|
||||
### Summary
|
||||
|
||||
<--release summary to be used in announcements-->
|
||||
|
||||
### Commit
|
||||
|
||||
<--latest commit ID to include in this release-->
|
||||
|
||||
### Changelog
|
||||
|
||||
<--add notices from PRs merged since the prior release, see ["keep a changelog"]-->
|
||||
|
||||
### Checklist
|
||||
|
||||
Release numbering must follow [Semantic Versioning]. These steps assume the current `master`
|
||||
branch **development** version is *MAJOR.MINOR.0*.
|
||||
|
||||
#### On the day of the feature freeze
|
||||
|
||||
Change the `master` branch to the next MINOR+1 version:
|
||||
|
||||
- [ ] Switch to the `master` branch.
|
||||
- [ ] Create a new PR branch called `bump_dev_MAJOR_MINOR+1`, eg. `bump_dev_0_22`.
|
||||
- [ ] Bump the `bump_dev_MAJOR_MINOR+1` branch to the next development MINOR+1 version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR+1.0`.
|
||||
- Update the `CHANGELOG.md` file.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR+1.0".
|
||||
- [ ] Create PR and merge the `bump_dev_MAJOR_MINOR+1` branch to `master`.
|
||||
- Title PR "Bump version to MAJOR.MINOR+1.0".
|
||||
|
||||
Create a new release branch and release candidate tag:
|
||||
|
||||
- [ ] Double check that your local `master` is up-to-date with the upstream repo.
|
||||
- [ ] Create a new branch called `release/MAJOR.MINOR+1` from `master`.
|
||||
- [ ] Bump the `release/MAJOR.MINOR+1` branch to `MAJOR.MINOR+1.0-rc.1` version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR+1.0-rc.1`.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR+1.0-rc.1".
|
||||
- [ ] Add a tag to the `HEAD` commit in the `release/MAJOR.MINOR+1` branch.
|
||||
- The tag name should be `vMAJOR.MINOR+1.0-rc.1`
|
||||
- Use message "Release MAJOR.MINOR+1.0 rc.1".
|
||||
- Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
|
||||
- [ ] Push the `release/MAJOR.MINOR` branch and new tag to the `bitcoindevkit/bdk` repo.
|
||||
- Use `git push --tags` option to push the new `vMAJOR.MINOR+1.0-rc.1` tag.
|
||||
|
||||
If any issues need to be fixed before the *MAJOR.MINOR+1.0* version is released:
|
||||
|
||||
- [ ] Merge fix PRs to the `master` branch.
|
||||
- [ ] Git cherry-pick fix commits to the `release/MAJOR.MINOR+1` branch.
|
||||
- [ ] Verify fixes in `release/MAJOR.MINOR+1` branch.
|
||||
- [ ] Bump the `release/MAJOR.MINOR+1` branch to `MAJOR.MINOR+1.0-rc.x+1` version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR+1.0-rc.x+1`.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR+1.0-rc.x+1".
|
||||
- [ ] Add a tag to the `HEAD` commit in the `release/MAJOR.MINOR+1` branch.
|
||||
- The tag name should be `vMAJOR.MINOR+1.0-rc.x+1`, where x is the current release candidate number.
|
||||
- Use tag message "Release MAJOR.MINOR+1.0 rc.x+1".
|
||||
- Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
|
||||
- [ ] Push the new tag to the `bitcoindevkit/bdk` repo.
|
||||
- Use `git push --tags` option to push the new `vMAJOR.MINOR+1.0-rc.x+1` tag.
|
||||
|
||||
#### On the day of the release
|
||||
|
||||
Tag and publish new release:
|
||||
|
||||
- [ ] Bump the `release/MAJOR.MINOR+1` branch to `MAJOR.MINOR+1.0` version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR+1.0`.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR+1.0".
|
||||
- [ ] Add a tag to the `HEAD` commit in the `release/MAJOR.MINOR+1` branch.
|
||||
- The tag name should be `vMAJOR.MINOR+1.0`
|
||||
- The first line of the tag message should be "Release MAJOR.MINOR+1.0".
|
||||
- In the body of the tag message put a copy of the **Summary** and **Changelog** for the release.
|
||||
- Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
|
||||
- [ ] Wait for the CI to finish one last time.
|
||||
- [ ] Push the new tag to the `bitcoindevkit/bdk` repo.
|
||||
- [ ] Publish **all** the updated crates to crates.io.
|
||||
- [ ] Create the release on GitHub.
|
||||
- Go to "tags", click on the dots on the right and select "Create Release".
|
||||
- Set the title to `Release MAJOR.MINOR+1.0`.
|
||||
- In the release notes body put the **Summary** and **Changelog**.
|
||||
- Use the "+ Auto-generate release notes" button to add details from included PRs.
|
||||
- Until we reach a `1.0.0` release check the "Pre-release" box.
|
||||
- [ ] Make sure the new release shows up on [crates.io] and that the docs are built correctly on [docs.rs].
|
||||
- [ ] Announce the release, using the **Summary**, on Discord, Twitter and Mastodon.
|
||||
- [ ] Celebrate 🎉
|
||||
|
||||
[Semantic Versioning]: https://semver.org/
|
||||
[crates.io]: https://crates.io/crates/bdk
|
||||
[docs.rs]: https://docs.rs/bdk/latest/bdk
|
||||
["keep a changelog"]: https://keepachangelog.com/en/1.0.0/
|
||||
71
.github/ISSUE_TEMPLATE/patch_release.md
vendored
Normal file
71
.github/ISSUE_TEMPLATE/patch_release.md
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
---
|
||||
name: Patch Release
|
||||
about: Create a new patch release [for release managers only]
|
||||
title: 'Release MAJOR.MINOR.PATCH+1'
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Create a new patch release
|
||||
|
||||
### Summary
|
||||
|
||||
<--release summary to be used in announcements-->
|
||||
|
||||
### Commit
|
||||
|
||||
<--latest commit ID to include in this release-->
|
||||
|
||||
### Changelog
|
||||
|
||||
<--add notices from PRs merged since the prior release, see ["keep a changelog"]-->
|
||||
|
||||
### Checklist
|
||||
|
||||
Release numbering must follow [Semantic Versioning]. These steps assume the current `master`
|
||||
branch **development** version is *MAJOR.MINOR.PATCH*.
|
||||
|
||||
### On the day of the patch release
|
||||
|
||||
Change the `master` branch to the new PATCH+1 version:
|
||||
|
||||
- [ ] Switch to the `master` branch.
|
||||
- [ ] Create a new PR branch called `bump_dev_MAJOR_MINOR_PATCH+1`, eg. `bump_dev_0_22_1`.
|
||||
- [ ] Bump the `bump_dev_MAJOR_MINOR` branch to the next development PATCH+1 version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR.PATCH+1`.
|
||||
- Update the `CHANGELOG.md` file.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR.PATCH+1".
|
||||
- [ ] Create PR and merge the `bump_dev_MAJOR_MINOR_PATCH+1` branch to `master`.
|
||||
- Title PR "Bump version to MAJOR.MINOR.PATCH+1".
|
||||
|
||||
Cherry-pick, tag and publish new PATCH+1 release:
|
||||
|
||||
- [ ] Merge fix PRs to the `master` branch.
|
||||
- [ ] Git cherry-pick fix commits to the `release/MAJOR.MINOR` branch to be patched.
|
||||
- [ ] Verify fixes in `release/MAJOR.MINOR` branch.
|
||||
- [ ] Bump the `release/MAJOR.MINOR.PATCH+1` branch to `MAJOR.MINOR.PATCH+1` version.
|
||||
- Change the `Cargo.toml` version value to `MAJOR.MINOR.MINOR.PATCH+1`.
|
||||
- The commit message should be "Bump version to MAJOR.MINOR.PATCH+1".
|
||||
- [ ] Add a tag to the `HEAD` commit in the `release/MAJOR.MINOR` branch.
|
||||
- The tag name should be `vMAJOR.MINOR.PATCH+1`
|
||||
- The first line of the tag message should be "Release MAJOR.MINOR.PATCH+1".
|
||||
- In the body of the tag message put a copy of the **Summary** and **Changelog** for the release.
|
||||
- Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
|
||||
- [ ] Wait for the CI to finish one last time.
|
||||
- [ ] Push the new tag to the `bitcoindevkit/bdk` repo.
|
||||
- [ ] Publish **all** the updated crates to crates.io.
|
||||
- [ ] Create the release on GitHub.
|
||||
- Go to "tags", click on the dots on the right and select "Create Release".
|
||||
- Set the title to `Release MAJOR.MINOR.PATCH+1`.
|
||||
- In the release notes body put the **Summary** and **Changelog**.
|
||||
- Use the "+ Auto-generate release notes" button to add details from included PRs.
|
||||
- Until we reach a `1.0.0` release check the "Pre-release" box.
|
||||
- [ ] Make sure the new release shows up on [crates.io] and that the docs are built correctly on [docs.rs].
|
||||
- [ ] Announce the release, using the **Summary**, on Discord, Twitter and Mastodon.
|
||||
- [ ] Celebrate 🎉
|
||||
|
||||
[Semantic Versioning]: https://semver.org/
|
||||
[crates.io]: https://crates.io/crates/bdk
|
||||
[docs.rs]: https://docs.rs/bdk/latest/bdk
|
||||
["keep a changelog"]: https://keepachangelog.com/en/1.0.0/
|
||||
77
.github/ISSUE_TEMPLATE/summer_project.md
vendored
Normal file
77
.github/ISSUE_TEMPLATE/summer_project.md
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
---
|
||||
name: Summer of Bitcoin Project
|
||||
about: Template to suggest a new https://www.summerofbitcoin.org/ project.
|
||||
title: ''
|
||||
labels: 'summer-of-bitcoin'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
## Overview
|
||||
|
||||
Project ideas are scoped for a university-level student with a basic background in CS and bitcoin
|
||||
fundamentals - achievable over 12-weeks. Below are just a few types of ideas:
|
||||
|
||||
- Low-hanging fruit: Relatively short projects with clear goals; requires basic technical knowledge
|
||||
and minimal familiarity with the codebase.
|
||||
- Core development: These projects derive from the ongoing work from the core of your development
|
||||
team. The list of features and bugs is never-ending, and help is always welcome.
|
||||
- Risky/Exploratory: These projects push the scope boundaries of your development effort. They
|
||||
might require expertise in an area not covered by your current development team. They might take
|
||||
advantage of a new technology. There is a reasonable chance that the project might be less
|
||||
successful, but the potential rewards make it worth the attempt.
|
||||
- Infrastructure/Automation: These projects are the code that your organization uses to get its
|
||||
development work done; for example, projects that improve the automation of releases, regression
|
||||
tests and automated builds. This is a category where a Summer of Bitcoin student can be really
|
||||
helpful, doing work that the development team has been putting off while they focus on core
|
||||
development.
|
||||
- Quality Assurance/Testing: Projects that work on and test your project's software development
|
||||
process. Additionally, projects that involve a thorough test and review of individual PRs.
|
||||
- Fun/Peripheral: These projects might not be related to the current core development focus, but
|
||||
create new innovations and new perspectives for your project.
|
||||
-->
|
||||
|
||||
**Description**
|
||||
<!-- Description: 3-7 sentences describing the project background and tasks to be done. -->
|
||||
|
||||
**Expected Outcomes**
|
||||
<!-- Short bullet list describing what is to be accomplished -->
|
||||
|
||||
**Resources**
|
||||
<!-- 2-3 reading materials for candidate to learn about the repo, project, scope etc -->
|
||||
<!-- Recommended reading such as a developer/contributor guide -->
|
||||
<!-- [Another example a paper citation](https://arxiv.org/pdf/1802.08091.pdf) -->
|
||||
<!-- [Another example an existing issue](https://github.com/opencv/opencv/issues/11013) -->
|
||||
<!-- [An existing related module](https://github.com/opencv/opencv_contrib/tree/master/modules/optflow) -->
|
||||
|
||||
**Skills Required**
|
||||
<!-- 3-4 technical skills that the candidate should know -->
|
||||
<!-- hands on experience with git -->
|
||||
<!-- mastery plus experience coding in C++ -->
|
||||
<!-- basic knowledge in matrix and tensor computations, college course work in cryptography -->
|
||||
<!-- strong mathematical background -->
|
||||
<!-- Bonus - has experience with React Native. Best if you have also worked with OSSFuzz -->
|
||||
|
||||
**Mentor(s)**
|
||||
<!-- names of mentor(s) for this project go here -->
|
||||
|
||||
**Difficulty**
|
||||
<!-- Easy, Medium, Hard -->
|
||||
|
||||
**Competency Test (optional)**
|
||||
<!-- 2-3 technical tasks related to the project idea or repository you’d like a candidate to
|
||||
perform in order to demonstrate competency, good first bugs, warm-up exercises -->
|
||||
<!-- ex. Read the instructions here to get Bitcoin core running on your machine -->
|
||||
<!-- ex. pick an issue labeled as “newcomer” in the repository, and send a merge request to the
|
||||
repository. You can also suggest some other improvement that we did not think of yet, or
|
||||
something that you find interesting or useful -->
|
||||
<!-- ex. fixes for coding style are usually easy to do, and are good issues for first time
|
||||
contributions for those learning how to interact with the project. After you are done with the
|
||||
coding style issue, try making a different contribution. -->
|
||||
<!-- ex. setup a full Debian packaging development environment and learn the basics of Debian
|
||||
packaging. Then identify and package the missing dependencies to package Specter Desktop -->
|
||||
<!-- ex. write a pull parser for CSV files. You'll be judged by the decisions to store the parser
|
||||
state and how flexible it is to wrap this parser in other scenarios. -->
|
||||
<!-- ex. Stretch Goal: Implement some basic metaprogram/app to prove you're very familiar with BDK.
|
||||
Be prepared to make adjustments as we judge your solution. -->
|
||||
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@@ -9,6 +9,11 @@
|
||||
<!-- In this section you can include notes directed to the reviewers, like explaining why some parts
|
||||
of the PR were done in a specific way -->
|
||||
|
||||
### Changelog notice
|
||||
|
||||
<!-- Notice the release manager should include in the release tag message changelog -->
|
||||
<!-- See https://keepachangelog.com/en/1.0.0/ for examples -->
|
||||
|
||||
### Checklists
|
||||
|
||||
#### All Submissions:
|
||||
@@ -21,7 +26,6 @@ of the PR were done in a specific way -->
|
||||
|
||||
* [ ] I've added tests for the new feature
|
||||
* [ ] I've added docs for the new feature
|
||||
* [ ] I've updated `CHANGELOG.md`
|
||||
|
||||
#### Bugfixes:
|
||||
|
||||
|
||||
63
.github/workflows/code_coverage.yml
vendored
63
.github/workflows/code_coverage.yml
vendored
@@ -1,27 +1,54 @@
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
name: Code Coverage
|
||||
|
||||
jobs:
|
||||
tarpaulin-codecov:
|
||||
name: Tarpaulin to codecov.io
|
||||
Codecov:
|
||||
name: Code Coverage
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: "-Cinstrument-coverage"
|
||||
RUSTDOCFLAGS: "-Cinstrument-coverage"
|
||||
LLVM_PROFILE_FILE: "report-%p-%m.profraw"
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set default toolchain
|
||||
run: rustup default nightly
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
|
||||
- name: Install tarpaulin
|
||||
run: cargo install cargo-tarpaulin
|
||||
- name: Tarpaulin
|
||||
run: cargo tarpaulin --features all-keys,cli-utils,compiler,esplora,compact_filters --run-types Tests,Doctests --exclude-files "testutils/*" --out Xml
|
||||
|
||||
- name: Publish to codecov.io
|
||||
uses: codecov/codecov-action@v1.0.15
|
||||
- name: Install lcov tools
|
||||
run: sudo apt-get install lcov -y
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
file: ./cobertura.xml
|
||||
toolchain: "1.65.0"
|
||||
override: true
|
||||
profile: minimal
|
||||
components: llvm-tools-preview
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Install grcov
|
||||
run: if [[ ! -e ~/.cargo/bin/grcov ]]; then cargo install grcov; fi
|
||||
- name: Build simulator image
|
||||
run: docker build -t hwi/ledger_emulator ./ci -f ci/Dockerfile.ledger
|
||||
- name: Run simulator image
|
||||
run: docker run --name simulator --network=host hwi/ledger_emulator &
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Install python dependencies
|
||||
run: pip install hwi==2.1.1 protobuf==3.20.1
|
||||
- name: Test
|
||||
run: cargo test --all-features
|
||||
- name: Run grcov
|
||||
run: mkdir coverage; grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore '/*' -o ./coverage/lcov.info
|
||||
- name: Generate HTML coverage report
|
||||
run: genhtml -o coverage-report.html ./coverage/lcov.info
|
||||
# - name: Coveralls upload
|
||||
# uses: coverallsapp/github-action@master
|
||||
# with:
|
||||
# github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: coverage-report
|
||||
path: coverage-report.html
|
||||
|
||||
167
.github/workflows/cont_integration.yml
vendored
167
.github/workflows/cont_integration.yml
vendored
@@ -10,127 +10,57 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.45.0 # MSRV
|
||||
- version: stable
|
||||
clippy: true
|
||||
- version: 1.57.0 # MSRV
|
||||
features:
|
||||
- default
|
||||
- minimal
|
||||
- all-keys
|
||||
- minimal,esplora
|
||||
- key-value-db
|
||||
- electrum
|
||||
- compact_filters
|
||||
- cli-utils,esplora,key-value-db,electrum
|
||||
- compiler
|
||||
- --no-default-features
|
||||
- --all-features
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Generate cache key
|
||||
run: echo "${{ matrix.rust }} ${{ matrix.features }}" | tee .cache_key
|
||||
- name: cache
|
||||
uses: actions/cache@v2
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('.cache_key') }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
|
||||
- name: Set default toolchain
|
||||
run: rustup default ${{ matrix.rust }}
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
- name: Add clippy
|
||||
run: rustup component add clippy
|
||||
toolchain: ${{ matrix.rust.version }}
|
||||
override: true
|
||||
profile: minimal
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Build
|
||||
run: cargo build --features ${{ matrix.features }} --no-default-features
|
||||
- name: Clippy
|
||||
run: cargo clippy -- -D warnings
|
||||
run: cargo build ${{ matrix.features }}
|
||||
- name: Test
|
||||
run: cargo test --features ${{ matrix.features }} --no-default-features
|
||||
|
||||
test-readme-examples:
|
||||
name: Test README.md examples
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-test-md-docs-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
|
||||
- name: Set default toolchain
|
||||
run: rustup default nightly
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
- name: Test
|
||||
run: cargo test --features test-md-docs --no-default-features -- doctest::ReadmeDoctests
|
||||
|
||||
test-electrum:
|
||||
name: Test electrum
|
||||
runs-on: ubuntu-16.04
|
||||
container: bitcoindevkit/electrs
|
||||
env:
|
||||
MAGICAL_RPC_AUTH: USER_PASS
|
||||
MAGICAL_RPC_USER: admin
|
||||
MAGICAL_RPC_PASS: passw
|
||||
MAGICAL_RPC_URL: 127.0.0.1:18443
|
||||
MAGICAL_ELECTRUM_URL: tcp://127.0.0.1:60401
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ github.job }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
|
||||
- name: Install rustup
|
||||
run: curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
- name: Set default toolchain
|
||||
run: $HOME/.cargo/bin/rustup default stable
|
||||
- name: Set profile
|
||||
run: $HOME/.cargo/bin/rustup set profile minimal
|
||||
- name: Start core
|
||||
run: ./ci/start-core.sh
|
||||
- name: Test
|
||||
run: $HOME/.cargo/bin/cargo test --features test-electrum --no-default-features
|
||||
run: cargo test ${{ matrix.features }}
|
||||
|
||||
check-wasm:
|
||||
name: Check WASM
|
||||
runs-on: ubuntu-16.04
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
CC: clang-10
|
||||
CFLAGS: -I/usr/include
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ github.job }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
|
||||
# Install a recent version of clang that supports wasm32
|
||||
- run: wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - || exit 1
|
||||
- run: sudo apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-10 main" || exit 1
|
||||
- run: sudo apt-add-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main" || exit 1
|
||||
- run: sudo apt-get update || exit 1
|
||||
- run: sudo apt-get install -y clang-10 libc6-dev-i386 || exit 1
|
||||
- name: Set default toolchain
|
||||
run: rustup default stable
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
- name: Add target wasm32
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: Check
|
||||
run: cargo check --target wasm32-unknown-unknown --features cli-utils,esplora --no-default-features
|
||||
- run: sudo apt-get install -y libclang-common-10-dev clang-10 libc6-dev-i386 || exit 1
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
profile: minimal
|
||||
target: "wasm32-unknown-unknown"
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Check bdk
|
||||
working-directory: ./crates/bdk
|
||||
run: cargo check --target wasm32-unknown-unknown --features dev-getrandom-wasm
|
||||
- name: Check esplora
|
||||
working-directory: ./crates/esplora
|
||||
run: cargo check --target wasm32-unknown-unknown --features async --no-default-features
|
||||
|
||||
fmt:
|
||||
name: Rust fmt
|
||||
@@ -138,11 +68,30 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set default toolchain
|
||||
run: rustup default stable
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
- name: Add clippy
|
||||
run: rustup component add rustfmt
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
profile: minimal
|
||||
components: rustfmt
|
||||
- name: Check fmt
|
||||
run: cargo fmt --all -- --check
|
||||
run: cargo fmt --all -- --config format_code_in_doc_comments=true --check
|
||||
|
||||
clippy_check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
# we pin clippy instead of using "stable" so that our CI doesn't break
|
||||
# at each new cargo release
|
||||
toolchain: "1.67.0"
|
||||
components: clippy
|
||||
override: true
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --all-features --all-targets -- -D warnings
|
||||
|
||||
37
.github/workflows/nightly_docs.yml
vendored
37
.github/workflows/nightly_docs.yml
vendored
@@ -9,25 +9,18 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: nightly-docs-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
|
||||
- name: Install nightly toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Set default toolchain
|
||||
run: rustup default nightly-2022-12-14
|
||||
- name: Set profile
|
||||
run: rustup set profile minimal
|
||||
- name: Update toolchain
|
||||
run: rustup update
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Build docs
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: rustdoc
|
||||
args: --verbose --features=compiler,electrum,esplora,compact_filters,key-value-db,all-keys -- --cfg docsrs
|
||||
run: cargo doc --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: '--cfg docsrs -Dwarnings'
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@@ -47,18 +40,18 @@ jobs:
|
||||
repository: bitcoindevkit/bitcoindevkit.org
|
||||
ref: master
|
||||
- name: Create directories
|
||||
run: mkdir -p ./static/docs-rs/bdk/nightly
|
||||
run: mkdir -p ./docs/.vuepress/public/docs-rs/bdk/nightly
|
||||
- name: Remove old latest
|
||||
run: rm -rf ./static/docs-rs/bdk/nightly/latest
|
||||
run: rm -rf ./docs/.vuepress/public/docs-rs/bdk/nightly/latest
|
||||
- name: Download built docs
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: built-docs
|
||||
path: ./static/docs-rs/bdk/nightly/latest
|
||||
path: ./docs/.vuepress/public/docs-rs/bdk/nightly/latest
|
||||
- name: Configure git
|
||||
run: git config user.email "github-actions@github.com" && git config user.name "github-actions"
|
||||
- name: Commit
|
||||
continue-on-error: true # If there's nothing to commit this step fails, but it's fine
|
||||
run: git add ./static && git commit -m "Publish autogenerated nightly docs"
|
||||
run: git add ./docs/.vuepress/public/docs-rs && git commit -m "Publish autogenerated nightly docs"
|
||||
- name: Push
|
||||
run: git push origin master
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
/target
|
||||
Cargo.lock
|
||||
/.vscode
|
||||
|
||||
*.swp
|
||||
.idea
|
||||
|
||||
436
CHANGELOG.md
436
CHANGELOG.md
@@ -1,11 +1,414 @@
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
All notable changes to this project can be found here and in each release's git tag and can be viewed with `git tag -ln100 "v*"`. See also [DEVELOPMENT_CYCLE.md](DEVELOPMENT_CYCLE.md) for more details.
|
||||
|
||||
Contributors do not need to change this file but do need to add changelog details in their PR descriptions. The person making the next release will collect changelog details from included PRs and edit this file prior to each release.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [v0.27.1]
|
||||
|
||||
### Summary
|
||||
|
||||
Fixes [RUSTSEC-2022-0090], this issue is only applicable if you are using the optional sqlite database feature.
|
||||
|
||||
[RUSTSEC-2022-0090]: https://rustsec.org/advisories/RUSTSEC-2022-0090
|
||||
|
||||
### Changed
|
||||
|
||||
- Update optional sqlite dependency from 0.27.0 to 0.28.0. #867
|
||||
|
||||
## [v0.27.0]
|
||||
|
||||
### Summary
|
||||
|
||||
A maintenance release with a bump in project MSRV to 1.57.0, updated dependence and a few developer oriented improvements. Improvements include better error formatting, don't default to async/await for wasm32 and adding derived PartialEq and Eq on SyncTime.
|
||||
|
||||
### Changed
|
||||
|
||||
- Improve display error formatting #814
|
||||
- Don't default to use async/await on wasm32 #831
|
||||
- Project MSRV changed from 1.56.1 to 1.57.0 #842
|
||||
- Update rust-miniscript dependency to latest bug fix release 9.0 #844
|
||||
|
||||
### Added
|
||||
|
||||
- Derive PartialEq, Eq on SyncTime #837
|
||||
|
||||
## [v0.26.0]
|
||||
|
||||
### Summary
|
||||
|
||||
This release improves Fulcrum electrum server compatibility and fixes public descriptor template key origin paths. We also snuck in small enhancements to configure the electrum client to validate the domain using SSL and sort TransactionDetails by block height and timestamp.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Make electrum blockchain client `save_tx` function order independent to work with Fulcrum servers. #808
|
||||
- Fix wrong testnet key origin path in public descriptor templates. #818
|
||||
- Make README.md code examples compile without errors. #820
|
||||
|
||||
### Changed
|
||||
|
||||
- Bump `hwi` dependency to `0.4.0`. #825
|
||||
- Bump `esplora-client` dependency to `0.3` #830
|
||||
|
||||
### Added
|
||||
|
||||
- For electrum blockchain client, allow user to configure whether to validate the domain using SSL. #805
|
||||
- Implement ordering for `TransactionDetails`. #812
|
||||
|
||||
## [v0.25.0]
|
||||
|
||||
### Summary
|
||||
|
||||
This release fixes slow sync time and big script_pubkeys table with SQLite, the wallet rescan height for the FullyNodedExport and setting the network for keys in the KeyMap when using descriptor templates. Also added are new blockchain and mnemonic examples.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Slow sync time and big script_pubkeys table with SQLite.
|
||||
- Wallet rescan height for the FullyNodedExport.
|
||||
- Setting the network for keys in the KeyMap when using descriptor templates.
|
||||
|
||||
### Added
|
||||
|
||||
- Examples for connecting to Esplora, Electrum Server, Neutrino and Bitcoin Core.
|
||||
- Example for using a mnemonic in a descriptors.
|
||||
|
||||
## [v0.24.0]
|
||||
|
||||
### Summary
|
||||
|
||||
This release contains important dependency updates for `rust-bitcoin` to `0.29` and `rust-miniscript` to `8.0`, plus related crates that also depend on the latest version of `rust-bitcoin`. The release also includes a breaking change to the BDK signer which now produces low-R signatures by default, saving one byte. A bug was found in the `get_checksum` and `get_checksum_bytes` functions, which are now deprecated in favor of fixed versions called `calc_checksum` and `calc_checksum_bytes`. And finally a new `hardware-signer` features was added that re-exports the `hwi` crate, along with a new `hardware_signers.rs` example file.
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated dependency versions for `rust-bitcoin` to `0.29` and `rust-miniscript` to `8.0`, plus all related crates. @afilini #770
|
||||
- BDK Signer now produces low-R signatures by default, saving one byte. If you want to preserve the original behavior, set allow_grinding in the SignOptions to false. @vladimirfomene #779
|
||||
- Deprecated `get_checksum`and `get_checksum_bytes` due to bug where they calculates the checksum of a descriptor that already has a checksum. Use `calc_checksum` and `calc_checksum_bytes` instead. @evanlinjin #765
|
||||
- Remove deprecated "address validators". @afilini #770
|
||||
|
||||
### Added
|
||||
|
||||
- New `calc_checksum` and `calc_checksum_bytes`, replace deprecated `get_checksum` and `get_checksum_bytes`. @evanlinjin #765
|
||||
- Re-export the hwi crate when the feature hardware-signer is on. @danielabrozzoni #758
|
||||
- New examples/hardware_signer.rs. @danielabrozzoni #758
|
||||
- Make psbt module public to expose PsbtUtils trait to downstream projects. @notmandatory #782
|
||||
|
||||
## [v0.23.0]
|
||||
|
||||
### Summary
|
||||
|
||||
This release brings new utilities functions on PSBTs like `fee_amount()` and `fee_rate()` and migrates BDK to use our new external esplora client library.
|
||||
As always many bug fixes, docs and tests improvement are also included.
|
||||
|
||||
### Changed
|
||||
|
||||
- Update electrum-client to 0.11.0 by @afilini in https://github.com/bitcoindevkit/bdk/pull/737
|
||||
- Change configs for source-base code coverage by @wszdexdrf in https://github.com/bitcoindevkit/bdk/pull/708
|
||||
- Improve docs regarding PSBT finalization by @tnull in https://github.com/bitcoindevkit/bdk/pull/753
|
||||
- Update compiler example to a Policy example by @rajarshimaitra in https://github.com/bitcoindevkit/bdk/pull/730
|
||||
- Fix the release process by @afilini in https://github.com/bitcoindevkit/bdk/pull/754
|
||||
- Remove redundant duplicated keys check by @afilini in https://github.com/bitcoindevkit/bdk/pull/761
|
||||
- Remove genesis_block lazy initialization by @shobitb in https://github.com/bitcoindevkit/bdk/pull/756
|
||||
- Fix `Wallet::descriptor_checksum` to actually return the checksum by @evanlinjin in https://github.com/bitcoindevkit/bdk/pull/763
|
||||
- Use the esplora client crate by @afilini in https://github.com/bitcoindevkit/bdk/pull/764
|
||||
|
||||
### Added
|
||||
|
||||
- Run code coverage on every PR by @danielabrozzoni in https://github.com/bitcoindevkit/bdk/pull/747
|
||||
- Add psbt_signer.rs example by @notmandatory in https://github.com/bitcoindevkit/bdk/pull/744
|
||||
- Add fee_amount() and fee_rate() functions to PsbtUtils trait by @notmandatory in https://github.com/bitcoindevkit/bdk/pull/728
|
||||
- Add tests to improve coverage by @vladimirfomene in https://github.com/bitcoindevkit/bdk/pull/745
|
||||
- Enable signing taproot transactions with only `non_witness_utxos` by @afilini in https://github.com/bitcoindevkit/bdk/pull/757
|
||||
- Add datatype for is_spent sqlite column by @vladimirfomene in https://github.com/bitcoindevkit/bdk/pull/713
|
||||
- Add vscode filter to gitignore by @evanlinjin in https://github.com/bitcoindevkit/bdk/pull/762
|
||||
|
||||
## [v0.22.0]
|
||||
|
||||
### Summary
|
||||
|
||||
This release brings support for hardware signers on desktop through the HWI library.
|
||||
It also includes fixes and improvements which are part of our ongoing effort of integrating
|
||||
BDK and LDK together.
|
||||
|
||||
### Changed
|
||||
|
||||
- FeeRate function name as_sat_vb to as_sat_per_vb. #678
|
||||
- Verify signatures after signing. #718
|
||||
- Dependency electrum-client to 0.11.0. #737
|
||||
|
||||
### Added
|
||||
|
||||
- Functions to create FeeRate from sats/kvbytes and sats/kwu. #678
|
||||
- Custom hardware wallet signer HwiSigner in wallet::hardwaresigner module. #682
|
||||
- Function allow_dust on TxBuilder. #689
|
||||
- Implementation of Deref<Target=UrlClient> for EsploraBlockchain. #722
|
||||
- Implementation of Deref<Target=Client> for ElectrumBlockchain #705
|
||||
- Implementation of Deref<Target=Client> for RpcBlockchain. #731
|
||||
|
||||
## [v0.21.0]
|
||||
|
||||
- Add `descriptor::checksum::get_checksum_bytes` method.
|
||||
- Add `Excess` enum to handle remaining amount after coin selection.
|
||||
- Move change creation from `Wallet::create_tx` to `CoinSelectionAlgorithm::coin_select`.
|
||||
- Change the interface of `SqliteDatabase::new` to accept any type that implement AsRef<Path>
|
||||
- Add the ability to specify which leaves to sign in a taproot transaction through `TapLeavesOptions` in `SignOptions`
|
||||
- Add the ability to specify whether a taproot transaction should be signed using the internal key or not, using `sign_with_tap_internal_key` in `SignOptions`
|
||||
- Consolidate params `fee_amount` and `amount_needed` in `target_amount` in `CoinSelectionAlgorithm::coin_select` signature.
|
||||
- Change the meaning of the `fee_amount` field inside `CoinSelectionResult`: from now on the `fee_amount` will represent only the fees asociated with the utxos in the `selected` field of `CoinSelectionResult`.
|
||||
- New `RpcBlockchain` implementation with various fixes.
|
||||
- Return balance in separate categories, namely `confirmed`, `trusted_pending`, `untrusted_pending` & `immature`.
|
||||
|
||||
## [v0.20.0]
|
||||
|
||||
- New MSRV set to `1.56.1`
|
||||
- Fee sniping discouraging through nLockTime - if the user specifies a `current_height`, we use that as a nlocktime, otherwise we use the last sync height (or 0 if we never synced)
|
||||
- Fix hang when `ElectrumBlockchainConfig::stop_gap` is zero.
|
||||
- Set coin type in BIP44, BIP49, and BIP84 templates
|
||||
- Get block hash given a block height - A `get_block_hash` method is now defined on the `GetBlockHash` trait and implemented on every blockchain backend. This method expects a block height and returns the corresponding block hash.
|
||||
- Add `remove_partial_sigs` and `try_finalize` to `SignOptions`
|
||||
- Deprecate `AddressValidator`
|
||||
- Fix Electrum wallet sync potentially causing address index decrement - compare proposed index and current index before applying batch operations during sync.
|
||||
|
||||
## [v0.19.0]
|
||||
|
||||
- added `OldestFirstCoinSelection` impl to `CoinSelectionAlgorithm`
|
||||
- New MSRV set to `1.56`
|
||||
- Unpinned tokio to `1`
|
||||
- Add traits to reuse `Blockchain`s across multiple wallets (`BlockchainFactory` and `StatelessBlockchain`).
|
||||
- Upgrade to rust-bitcoin `0.28`
|
||||
- If using the `sqlite-db` feature all cached wallet data is deleted due to a possible UTXO inconsistency, a wallet.sync will recreate it
|
||||
- Update `PkOrF` in the policy module to become an enum
|
||||
- Add experimental support for Taproot, including:
|
||||
- Support for `tr()` descriptors with complex tapscript trees
|
||||
- Creation of Taproot PSBTs (BIP-371)
|
||||
- Signing Taproot PSBTs (key spend and script spend)
|
||||
- Support for `tr()` descriptors in the `descriptor!()` macro
|
||||
- Add support for Bitcoin Core 23.0 when using the `rpc` blockchain
|
||||
|
||||
## [v0.18.0]
|
||||
|
||||
- Add `sqlite-bundled` feature for deployments that need a bundled version of sqlite, i.e. for mobile platforms.
|
||||
- Added `Wallet::get_signers()`, `Wallet::descriptor_checksum()` and `Wallet::get_address_validators()`, exposed the `AsDerived` trait.
|
||||
- Deprecate `database::Database::flush()`, the function is only needed for the sled database on mobile, instead for mobile use the sqlite database.
|
||||
- Add `keychain: KeychainKind` to `wallet::AddressInfo`.
|
||||
- Improve key generation traits
|
||||
- Rename `WalletExport` to `FullyNodedExport`, deprecate the former.
|
||||
- Bump `miniscript` dependency version to `^6.1`.
|
||||
|
||||
## [v0.17.0]
|
||||
|
||||
- Removed default verification from `wallet::sync`. sync-time verification is added in `script_sync` and is activated by `verify` feature flag.
|
||||
- `verify` flag removed from `TransactionDetails`.
|
||||
- Add `get_internal_address` to allow you to get internal addresses just as you get external addresses.
|
||||
- added `ensure_addresses_cached` to `Wallet` to let offline wallets load and cache addresses in their database
|
||||
- Add `is_spent` field to `LocalUtxo`; when we notice that a utxo has been spent we set `is_spent` field to true instead of deleting it from the db.
|
||||
|
||||
### Sync API change
|
||||
|
||||
To decouple the `Wallet` from the `Blockchain` we've made major changes:
|
||||
|
||||
- Removed `Blockchain` from Wallet.
|
||||
- Removed `Wallet::broadcast` (just use `Blockchain::broadcast`)
|
||||
- Deprecated `Wallet::new_offline` (all wallets are offline now)
|
||||
- Changed `Wallet::sync` to take a `Blockchain`.
|
||||
- Stop making a request for the block height when calling `Wallet:new`.
|
||||
- Added `SyncOptions` to capture extra (future) arguments to `Wallet::sync`.
|
||||
- Removed `max_addresses` sync parameter which determined how many addresses to cache before syncing since this can just be done with `ensure_addresses_cached`.
|
||||
- remove `flush` method from the `Database` trait.
|
||||
|
||||
## [v0.16.1]
|
||||
|
||||
- Pin tokio dependency version to ~1.14 to prevent errors due to their new MSRV 1.49.0
|
||||
|
||||
## [v0.16.0]
|
||||
|
||||
- Disable `reqwest` default features.
|
||||
- Added `reqwest-default-tls` feature: Use this to restore the TLS defaults of reqwest if you don't want to add a dependency to it in your own manifest.
|
||||
- Use dust_value from rust-bitcoin
|
||||
- Fixed generating WIF in the correct network format.
|
||||
|
||||
## [v0.15.0]
|
||||
|
||||
- Overhauled sync logic for electrum and esplora.
|
||||
- Unify ureq and reqwest esplora backends to have the same configuration parameters. This means reqwest now has a timeout parameter and ureq has a concurrency parameter.
|
||||
- Fixed esplora fee estimation.
|
||||
|
||||
## [v0.14.0]
|
||||
|
||||
- BIP39 implementation dependency, in `keys::bip39` changed from tiny-bip39 to rust-bip39.
|
||||
- Add new method on the `TxBuilder` to embed data in the transaction via `OP_RETURN`. To allow that a fix to check the dust only on spendable output has been introduced.
|
||||
- Update the `Database` trait to store the last sync timestamp and block height
|
||||
- Rename `ConfirmationTime` to `BlockTime`
|
||||
|
||||
## [v0.13.0]
|
||||
|
||||
- Exposed `get_tx()` method from `Database` to `Wallet`.
|
||||
|
||||
## [v0.12.0]
|
||||
|
||||
- Activate `miniscript/use-serde` feature to allow consumers of the library to access it via the re-exported `miniscript` crate.
|
||||
- Add support for proxies in `EsploraBlockchain`
|
||||
- Added `SqliteDatabase` that implements `Database` backed by a sqlite database using `rusqlite` crate.
|
||||
|
||||
## [v0.11.0]
|
||||
|
||||
- Added `flush` method to the `Database` trait to explicitly flush to disk latest changes on the db.
|
||||
|
||||
## [v0.10.0]
|
||||
|
||||
- Added `RpcBlockchain` in the `AnyBlockchain` struct to allow using Rpc backend where `AnyBlockchain` is used (eg `bdk-cli`)
|
||||
- Removed hard dependency on `tokio`.
|
||||
|
||||
### Wallet
|
||||
|
||||
- Removed and replaced `set_single_recipient` with more general `drain_to` and replaced `maintain_single_recipient` with `allow_shrinking`.
|
||||
|
||||
### Blockchain
|
||||
|
||||
- Removed `stop_gap` from `Blockchain` trait and added it to only `ElectrumBlockchain` and `EsploraBlockchain` structs.
|
||||
- Added a `ureq` backend for use when not using feature `async-interface` or target WASM. `ureq` is a blocking HTTP client.
|
||||
|
||||
## [v0.9.0]
|
||||
|
||||
### Wallet
|
||||
|
||||
- Added Bitcoin core RPC added as blockchain backend
|
||||
- Added a `verify` feature that can be enable to verify the unconfirmed txs we download against the consensus rules
|
||||
|
||||
## [v0.8.0]
|
||||
|
||||
### Wallet
|
||||
- Added an option that must be explicitly enabled to allow signing using non-`SIGHASH_ALL` sighashes (#350)
|
||||
#### Changed
|
||||
`get_address` now returns an `AddressInfo` struct that includes the index and derefs to `Address`.
|
||||
|
||||
## [v0.7.0]
|
||||
|
||||
### Policy
|
||||
#### Changed
|
||||
Removed `fill_satisfaction` method in favor of enum parameter in `extract_policy` method
|
||||
|
||||
#### Added
|
||||
Timelocks are considered (optionally) in building the `satisfaction` field
|
||||
|
||||
### Wallet
|
||||
|
||||
- Changed `Wallet::{sign, finalize_psbt}` now take a `&mut psbt` rather than consuming it.
|
||||
- Require and validate `non_witness_utxo` for SegWit signatures by default, can be adjusted with `SignOptions`
|
||||
- Replace the opt-in builder option `force_non_witness_utxo` with the opposite `only_witness_utxo`. From now on we will provide the `non_witness_utxo`, unless explicitly asked not to.
|
||||
|
||||
## [v0.6.0]
|
||||
|
||||
### Misc
|
||||
#### Changed
|
||||
- New minimum supported rust version is 1.46.0
|
||||
- Changed `AnyBlockchainConfig` to use serde tagged representation.
|
||||
|
||||
### Descriptor
|
||||
#### Added
|
||||
- Added ability to analyze a `PSBT` to check which and how many signatures are already available
|
||||
|
||||
### Wallet
|
||||
#### Changed
|
||||
- `get_new_address()` refactored to `get_address(AddressIndex::New)` to support different `get_address()` index selection strategies
|
||||
|
||||
#### Added
|
||||
- Added `get_address(AddressIndex::LastUnused)` which returns the last derived address if it has not been used or if used in a received transaction returns a new address
|
||||
- Added `get_address(AddressIndex::Peek(u32))` which returns a derived address for a specified descriptor index but does not change the current index
|
||||
- Added `get_address(AddressIndex::Reset(u32))` which returns a derived address for a specified descriptor index and resets current index to the given value
|
||||
- Added `get_psbt_input` to create the corresponding psbt input for a local utxo.
|
||||
|
||||
#### Fixed
|
||||
- Fixed `coin_select` calculation for UTXOs where `value < fee` that caused over-/underflow errors.
|
||||
|
||||
## [v0.5.1]
|
||||
|
||||
### Misc
|
||||
#### Changed
|
||||
- Pin `hyper` to `=0.14.4` to make it compile on Rust 1.45
|
||||
|
||||
## [v0.5.0]
|
||||
|
||||
### Misc
|
||||
#### Changed
|
||||
- Updated `electrum-client` to version `0.7`
|
||||
|
||||
### Wallet
|
||||
#### Changed
|
||||
- `FeeRate` constructors `from_sat_per_vb` and `default_min_relay_fee` are now `const` functions
|
||||
|
||||
## [v0.4.0]
|
||||
|
||||
### Keys
|
||||
#### Changed
|
||||
- Renamed `DerivableKey::add_metadata()` to `DerivableKey::into_descriptor_key()`
|
||||
- Renamed `ToDescriptorKey::to_descriptor_key()` to `IntoDescriptorKey::into_descriptor_key()`
|
||||
#### Added
|
||||
- Added an `ExtendedKey` type that is an enum of `bip32::ExtendedPubKey` and `bip32::ExtendedPrivKey`
|
||||
- Added `DerivableKey::into_extended_key()` as the only method that needs to be implemented
|
||||
|
||||
### Misc
|
||||
#### Removed
|
||||
- Removed the `parse_descriptor` example, since it wasn't demonstrating any bdk-specific API anymore.
|
||||
#### Changed
|
||||
- Updated `bitcoin` to `0.26`, `miniscript` to `5.1` and `electrum-client` to `0.6`
|
||||
#### Added
|
||||
- Added support for the `signet` network (issue #62)
|
||||
- Added a function to get the version of BDK at runtime
|
||||
|
||||
### Wallet
|
||||
#### Changed
|
||||
- Removed the explicit `id` argument from `Wallet::add_signer()` since that's now part of `Signer` itself
|
||||
- Renamed `ToWalletDescriptor::to_wallet_descriptor()` to `IntoWalletDescriptor::into_wallet_descriptor()`
|
||||
|
||||
### Policy
|
||||
#### Changed
|
||||
- Removed unneeded `Result<(), PolicyError>` return type for `Satisfaction::finalize()`
|
||||
- Removed the `TooManyItemsSelected` policy error (see commit message for more details)
|
||||
|
||||
## [v0.3.0]
|
||||
|
||||
### Descriptor
|
||||
#### Changed
|
||||
- Added an alias `DescriptorError` for `descriptor::error::Error`
|
||||
- Changed the error returned by `descriptor!()` and `fragment!()` to `DescriptorError`
|
||||
- Changed the error type in `ToWalletDescriptor` to `DescriptorError`
|
||||
- Improved checks on descriptors built using the macros
|
||||
|
||||
### Blockchain
|
||||
#### Changed
|
||||
- Remove `BlockchainMarker`, `OfflineClient` and `OfflineWallet` in favor of just using the unit
|
||||
type to mark for a missing client.
|
||||
- Upgrade `tokio` to `1.0`.
|
||||
|
||||
### Transaction Creation Overhaul
|
||||
|
||||
The `TxBuilder` is now created from the `build_tx` or `build_fee_bump` functions on wallet and the
|
||||
final transaction is created by calling `finish` on the builder.
|
||||
|
||||
- Removed `TxBuilder::utxos` in favor of `TxBuilder::add_utxos`
|
||||
- Added `Wallet::build_tx` to replace `Wallet::create_tx`
|
||||
- Added `Wallet::build_fee_bump` to replace `Wallet::bump_fee`
|
||||
- Added `Wallet::get_utxo`
|
||||
- Added `Wallet::get_descriptor_for_keychain`
|
||||
|
||||
### `add_foreign_utxo`
|
||||
|
||||
- Renamed `UTXO` to `LocalUtxo`
|
||||
- Added `WeightedUtxo` to replace floating `(UTXO, usize)`.
|
||||
- Added `Utxo` enum to incorporate both local utxos and foreign utxos
|
||||
- Added `TxBuilder::add_foreign_utxo` which allows adding a utxo external to the wallet.
|
||||
|
||||
### CLI
|
||||
#### Changed
|
||||
- Remove `cli.rs` module, `cli-utils` feature and `repl.rs` example; moved to new [`bdk-cli`](https://github.com/bitcoindevkit/bdk-cli) repository
|
||||
|
||||
## [v0.2.0]
|
||||
|
||||
### Project
|
||||
#### Added
|
||||
- Add CONTRIBUTING.md
|
||||
@@ -209,5 +612,34 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Use `MemoryDatabase` in the compiler example
|
||||
- Make the REPL return JSON
|
||||
|
||||
[unreleased]: https://github.com/bitcoindevkit/bdk/compare/0.1.0-beta.1...HEAD
|
||||
[0.1.0-beta.1]: https://github.com/bitcoindevkit/bdk/compare/96c87ea5...0.1.0-beta.1
|
||||
[v0.2.0]: https://github.com/bitcoindevkit/bdk/compare/0.1.0-beta.1...v0.2.0
|
||||
[v0.3.0]: https://github.com/bitcoindevkit/bdk/compare/v0.2.0...v0.3.0
|
||||
[v0.4.0]: https://github.com/bitcoindevkit/bdk/compare/v0.3.0...v0.4.0
|
||||
[v0.5.0]: https://github.com/bitcoindevkit/bdk/compare/v0.4.0...v0.5.0
|
||||
[v0.5.1]: https://github.com/bitcoindevkit/bdk/compare/v0.5.0...v0.5.1
|
||||
[v0.6.0]: https://github.com/bitcoindevkit/bdk/compare/v0.5.1...v0.6.0
|
||||
[v0.7.0]: https://github.com/bitcoindevkit/bdk/compare/v0.6.0...v0.7.0
|
||||
[v0.8.0]: https://github.com/bitcoindevkit/bdk/compare/v0.7.0...v0.8.0
|
||||
[v0.9.0]: https://github.com/bitcoindevkit/bdk/compare/v0.8.0...v0.9.0
|
||||
[v0.10.0]: https://github.com/bitcoindevkit/bdk/compare/v0.9.0...v0.10.0
|
||||
[v0.11.0]: https://github.com/bitcoindevkit/bdk/compare/v0.10.0...v0.11.0
|
||||
[v0.12.0]: https://github.com/bitcoindevkit/bdk/compare/v0.11.0...v0.12.0
|
||||
[v0.13.0]: https://github.com/bitcoindevkit/bdk/compare/v0.12.0...v0.13.0
|
||||
[v0.14.0]: https://github.com/bitcoindevkit/bdk/compare/v0.13.0...v0.14.0
|
||||
[v0.15.0]: https://github.com/bitcoindevkit/bdk/compare/v0.14.0...v0.15.0
|
||||
[v0.16.0]: https://github.com/bitcoindevkit/bdk/compare/v0.15.0...v0.16.0
|
||||
[v0.16.1]: https://github.com/bitcoindevkit/bdk/compare/v0.16.0...v0.16.1
|
||||
[v0.17.0]: https://github.com/bitcoindevkit/bdk/compare/v0.16.1...v0.17.0
|
||||
[v0.18.0]: https://github.com/bitcoindevkit/bdk/compare/v0.17.0...v0.18.0
|
||||
[v0.19.0]: https://github.com/bitcoindevkit/bdk/compare/v0.18.0...v0.19.0
|
||||
[v0.20.0]: https://github.com/bitcoindevkit/bdk/compare/v0.19.0...v0.20.0
|
||||
[v0.21.0]: https://github.com/bitcoindevkit/bdk/compare/v0.20.0...v0.21.0
|
||||
[v0.22.0]: https://github.com/bitcoindevkit/bdk/compare/v0.21.0...v0.22.0
|
||||
[v0.23.0]: https://github.com/bitcoindevkit/bdk/compare/v0.22.0...v0.23.0
|
||||
[v0.24.0]: https://github.com/bitcoindevkit/bdk/compare/v0.23.0...v0.24.0
|
||||
[v0.25.0]: https://github.com/bitcoindevkit/bdk/compare/v0.24.0...v0.25.0
|
||||
[v0.26.0]: https://github.com/bitcoindevkit/bdk/compare/v0.25.0...v0.26.0
|
||||
[v0.27.0]: https://github.com/bitcoindevkit/bdk/compare/v0.26.0...v0.27.0
|
||||
[v0.27.1]: https://github.com/bitcoindevkit/bdk/compare/v0.27.0...v0.27.1
|
||||
[Unreleased]: https://github.com/bitcoindevkit/bdk/compare/v0.27.1...HEAD
|
||||
|
||||
@@ -10,7 +10,7 @@ Anyone is invited to contribute without regard to technical experience,
|
||||
cryptocurrencies demands a high-level of rigor, adversarial thinking, thorough
|
||||
testing and risk-minimization.
|
||||
Any bug may cost users real money. That being said, we deeply welcome people
|
||||
contributing for the first time to an open source project or pick up Rust while
|
||||
contributing for the first time to an open source project or picking up Rust while
|
||||
contributing. Don't be shy, you'll learn.
|
||||
|
||||
Communications Channels
|
||||
@@ -46,7 +46,7 @@ Every new feature should be covered by functional tests where possible.
|
||||
When refactoring, structure your PR to make it easy to review and don't
|
||||
hesitate to split it into multiple small, focused PRs.
|
||||
|
||||
The Minimal Supported Rust Version is 1.45 (enforced by our CI).
|
||||
The Minimal Supported Rust Version is 1.46 (enforced by our CI).
|
||||
|
||||
Commits should cover both the issue fixed and the solution's rationale.
|
||||
These [guidelines](https://chris.beams.io/posts/git-commit/) should be kept in mind.
|
||||
@@ -57,6 +57,21 @@ comment suggesting that you're working on it. If someone is already assigned,
|
||||
don't hesitate to ask if the assigned party or previous commenters are still
|
||||
working on it if it has been awhile.
|
||||
|
||||
Deprecation policy
|
||||
------------------
|
||||
|
||||
Where possible, breaking existing APIs should be avoided. Instead, add new APIs and
|
||||
use [`#[deprecated]`](https://github.com/rust-lang/rfcs/blob/master/text/1270-deprecation.md)
|
||||
to discourage use of the old one.
|
||||
|
||||
Deprecated APIs are typically maintained for one release cycle. In other words, an
|
||||
API that has been deprecated with the 0.10 release can be expected to be removed in the
|
||||
0.11 release. This allows for smoother upgrades without incurring too much technical
|
||||
debt inside this library.
|
||||
|
||||
If you deprecated an API as part of a contribution, we encourage you to "own" that API
|
||||
and send a follow-up to remove it as part of the next release cycle.
|
||||
|
||||
Peer review
|
||||
-----------
|
||||
|
||||
|
||||
110
Cargo.toml
110
Cargo.toml
@@ -1,96 +1,18 @@
|
||||
[package]
|
||||
name = "bdk"
|
||||
version = "0.2.0"
|
||||
edition = "2018"
|
||||
authors = ["Alekos Filini <alekos.filini@gmail.com>", "Riccardo Casatta <riccardo@casatta.it>"]
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk"
|
||||
description = "A modern, lightweight, descriptor-based wallet library"
|
||||
keywords = ["bitcoin", "wallet", "descriptor", "psbt"]
|
||||
readme = "README.md"
|
||||
license-file = "LICENSE"
|
||||
|
||||
[dependencies]
|
||||
bdk-macros = "0.2"
|
||||
log = "^0.4"
|
||||
miniscript = "4.0"
|
||||
bitcoin = { version = "^0.25.2", features = ["use-serde"] }
|
||||
serde = { version = "^1.0", features = ["derive"] }
|
||||
serde_json = { version = "^1.0" }
|
||||
rand = "^0.7"
|
||||
|
||||
# Optional dependencies
|
||||
sled = { version = "0.34", optional = true }
|
||||
electrum-client = { version = "0.4.0-beta.1", optional = true }
|
||||
reqwest = { version = "0.10", optional = true, features = ["json"] }
|
||||
futures = { version = "0.3", optional = true }
|
||||
clap = { version = "2.33", optional = true }
|
||||
base64 = { version = "^0.11", optional = true }
|
||||
async-trait = { version = "0.1", optional = true }
|
||||
rocksdb = { version = "0.14", optional = true }
|
||||
# pin cc version to 1.0.62 because 1.0.63 break rocksdb build
|
||||
cc = { version = "=1.0.62", optional = true }
|
||||
socks = { version = "0.3", optional = true }
|
||||
lazy_static = { version = "1.4", optional = true }
|
||||
tiny-bip39 = { version = "^0.8", optional = true }
|
||||
structopt = { version = "^0.3", optional = true }
|
||||
|
||||
# Platform-specific dependencies
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
tokio = { version = "0.2", features = ["rt-core"] }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
async-trait = "0.1"
|
||||
js-sys = "0.3"
|
||||
rand = { version = "^0.7", features = ["wasm-bindgen"] }
|
||||
|
||||
[features]
|
||||
minimal = []
|
||||
compiler = ["clap", "miniscript/compiler"]
|
||||
default = ["key-value-db", "electrum"]
|
||||
electrum = ["electrum-client"]
|
||||
esplora = ["reqwest", "futures"]
|
||||
compact_filters = ["rocksdb", "socks", "lazy_static", "cc"]
|
||||
key-value-db = ["sled"]
|
||||
cli-utils = ["clap", "base64", "structopt"]
|
||||
async-interface = ["async-trait"]
|
||||
all-keys = ["keys-bip39"]
|
||||
keys-bip39 = ["tiny-bip39"]
|
||||
|
||||
# Debug/Test features
|
||||
debug-proc-macros = ["bdk-macros/debug", "bdk-testutils-macros/debug"]
|
||||
test-electrum = ["electrum"]
|
||||
test-md-docs = ["base64", "electrum"]
|
||||
|
||||
[dev-dependencies]
|
||||
bdk-testutils = "0.2"
|
||||
bdk-testutils-macros = "0.2"
|
||||
serial_test = "0.4"
|
||||
lazy_static = "1.4"
|
||||
rustyline = "6.0"
|
||||
dirs-next = "2.0"
|
||||
env_logger = "0.7"
|
||||
|
||||
[[example]]
|
||||
name = "repl"
|
||||
required-features = ["cli-utils"]
|
||||
[[example]]
|
||||
name = "parse_descriptor"
|
||||
[[example]]
|
||||
name = "address_validator"
|
||||
|
||||
[[example]]
|
||||
name = "miniscriptc"
|
||||
path = "examples/compiler.rs"
|
||||
required-features = ["compiler"]
|
||||
|
||||
[workspace]
|
||||
members = ["macros", "testutils", "testutils-macros"]
|
||||
members = [
|
||||
"crates/bdk",
|
||||
"crates/chain",
|
||||
"crates/file_store",
|
||||
"crates/electrum",
|
||||
"example-crates/keychain_tracker_electrum",
|
||||
"example-crates/keychain_tracker_esplora",
|
||||
"example-crates/keychain_tracker_example_cli",
|
||||
"example-crates/wallet_electrum",
|
||||
"example-crates/wallet_esplora",
|
||||
"example-crates/wallet_esplora_async",
|
||||
"nursery/tmp_plan",
|
||||
"nursery/coin_select"
|
||||
]
|
||||
|
||||
# Generate docs with nightly to add the "features required" badge
|
||||
# https://stackoverflow.com/questions/61417452/how-to-get-a-feature-requirement-tag-in-the-documentation-generated-by-cargo-do
|
||||
[package.metadata.docs.rs]
|
||||
features = ["compiler", "electrum", "esplora", "compact_filters", "key-value-db", "all-keys"]
|
||||
# defines the configuration attribute `docsrs`
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
[workspace.package]
|
||||
authors = ["Bitcoin Dev Kit Developers"]
|
||||
|
||||
@@ -1,46 +1,16 @@
|
||||
# Development Cycle
|
||||
|
||||
This project follows a regular releasing schedule similar to the one [used by the Rust language](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html). In short, this means that a new release is made at a regular
|
||||
cadence, with all the feature/bugfixes that made it to `master` in time. This ensures that we don't keep delaying releases waiting for "just one more little thing".
|
||||
This project follows a regular releasing schedule similar to the one [used by the Rust language]. In short, this means that a new release is made at a regular cadence, with all the feature/bugfixes that made it to `master` in time. This ensures that we don't keep delaying releases waiting for "just one more little thing".
|
||||
|
||||
We decided to maintain a faster release cycle while the library is still in "beta", i.e. before release `1.0.0`: since we are constantly adding new features and, even more importantly, fixing issues, we want developers
|
||||
to have access to those updates as fast as possible. For this reason we will make a release **every 4 weeks**.
|
||||
This project uses [Semantic Versioning], but is currently at MAJOR version zero (0.y.z) meaning it is still in initial development. Anything MAY change at any time. The public API SHOULD NOT be considered stable. Until we reach version `1.0.0` we will do our best to document any breaking API changes in the changelog info attached to each release tag.
|
||||
|
||||
Once the project will have reached a more mature state (>= `1.0.0`), we will very likely switch to longer release cycles of **6 weeks**.
|
||||
We decided to maintain a faster release cycle while the library is still in "beta", i.e. before release `1.0.0`: since we are constantly adding new features and, even more importantly, fixing issues, we want developers to have access to those updates as fast as possible. For this reason we will make a release **every 4 weeks**.
|
||||
|
||||
The "feature freeze" will happen **one week before the release date**. This means a new branch will be created originating from the `master` tip at that time, and in that branch we will stop adding new features and only focus
|
||||
on ensuring the ones we've added are working properly.
|
||||
Once the project reaches a more mature state (>= `1.0.0`), we will very likely switch to longer release cycles of **6 weeks**.
|
||||
|
||||
```
|
||||
master: - - - - * - - - * - - - - - - * - - - * ...
|
||||
| / | |
|
||||
release/0.x.0: * - - # | |
|
||||
| /
|
||||
release/0.y.0: * - - #
|
||||
```
|
||||
The "feature freeze" will happen **one week before the release date**. This means a new branch will be created originating from the `master` tip at that time, and in that branch we will stop adding new features and only focus on ensuring the ones we've added are working properly.
|
||||
|
||||
As soon as the release is tagged and published, the `release` branch will be merged back into `master` to update the version in the `Cargo.toml` to apply the new `Cargo.toml` version and all the other fixes made during the feature
|
||||
freeze window.
|
||||
To create a new release a release manager will create a new issue using the `Release` template and follow the template instructions.
|
||||
|
||||
## Making the Release
|
||||
|
||||
What follows are notes and procedures that maintaners can refer to when making releases. All the commits and tags must be signed and, ideally, also [timestamped](https://github.com/opentimestamps/opentimestamps-client/blob/master/doc/git-integration.md).
|
||||
|
||||
Pre-`v1.0.0` our "major" releases only affect the "minor" semver value. Accordingly, our "minor" releases will only affect the "patch" value.
|
||||
|
||||
1. Create a new branch called `release/x.y.z` from `master`. Double check that your local `master` is up-to-date with the upstream repo before doing so.
|
||||
2. Make a commit on the release branch to bump the version to `x.y.z-rc.1`. The message should be "Bump version to x.y.z-rc.1".
|
||||
3. Push the new branch to `bitcoindevkit/bdk` on GitHub.
|
||||
4. During the one week of feature freeze run additional tests on the release branch
|
||||
5. If a bug is found:
|
||||
- If it's a minor issue you can just fix it in the release branch, since it will be merged back to `master` eventually
|
||||
- For bigger issues you can fix them on `master` and then *cherry-pick* the commit to the release branch
|
||||
6. On release day, make a commit on the release branch to bump the version to `x.y.z`. The message should be "Bump version to x.y.z".
|
||||
7. Add a tag to this commit. The tag name should be `vx.y.z` (for example `v0.5.0`), and the message "Release x.y.z". Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
|
||||
8. Push the new commits to the upstream release branch, wait for the CI to finish one last time.
|
||||
9. Publish **all** the updated crates to crates.io.
|
||||
10. Make a new commit to bump the version value to `x.y.(z+1)-dev`. The message should be "Bump version to x.y.(z+1)-dev".
|
||||
11. Merge the release branch back into `master`.
|
||||
12. Make sure the new release shows up on crates.io and that the docs are built correctly on docs.rs.
|
||||
13. Announce the release on Twitter, Discord and Telegram.
|
||||
14. Celebrate :tada:
|
||||
[used by the Rust language]: https://doc.rust-lang.org/book/appendix-07-nightly-rust.html
|
||||
[Semantic Versioning]: https://semver.org/
|
||||
|
||||
21
LICENSE
21
LICENSE
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Magical Bitcoin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
148
README.md
148
README.md
@@ -1,148 +1,54 @@
|
||||
# The Bitcoin Dev Kit
|
||||
|
||||
<div align="center">
|
||||
<h1>BDK</h1>
|
||||
|
||||
<img src="./static/bdk.svg" width="220" />
|
||||
<img src="./static/bdk.png" width="220" />
|
||||
|
||||
<p>
|
||||
<strong>A modern, lightweight, descriptor-based wallet library written in Rust!</strong>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<!-- <a href="https://crates.io/crates/magical"><img alt="Crate Info" src="https://img.shields.io/crates/v/magical.svg"/></a> -->
|
||||
<a href="https://crates.io/crates/bdk"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk.svg"/></a>
|
||||
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
||||
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
||||
<a href="https://codecov.io/gh/bitcoindevkit/bdk"><img src="https://codecov.io/gh/bitcoindevkit/bdk/branch/master/graph/badge.svg"/></a>
|
||||
<a href="https://bitcoindevkit.org/docs-rs/bdk"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk-green"/></a>
|
||||
<a href="https://blog.rust-lang.org/2020/07/16/Rust-1.45.0.html"><img alt="Rustc Version 1.45+" src="https://img.shields.io/badge/rustc-1.45%2B-lightgrey.svg"/></a>
|
||||
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
||||
<a href="https://docs.rs/bdk"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk-green"/></a>
|
||||
<a href="https://blog.rust-lang.org/2021/12/02/Rust-1.57.0.html"><img alt="Rustc Version 1.57.0+" src="https://img.shields.io/badge/rustc-1.57.0%2B-lightgrey.svg"/></a>
|
||||
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
||||
</p>
|
||||
|
||||
<h4>
|
||||
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
||||
<span> | </span>
|
||||
<a href="https://bitcoindevkit.org/docs-rs/bdk">Documentation</a>
|
||||
<a href="https://docs.rs/bdk">Documentation</a>
|
||||
</h4>
|
||||
</div>
|
||||
|
||||
## About
|
||||
|
||||
The `bdk` library aims to be the core building block for Bitcoin wallets of any kind.
|
||||
The `bdk` libraries aims to provide well engineered and reviewed components for Bitcoin based applications.
|
||||
It is built upon the excellent [`rust-bitcoin`] and [`rust-miniscript`] crates.
|
||||
|
||||
* It uses [Miniscript](https://github.com/rust-bitcoin/rust-miniscript) to support descriptors with generalized conditions. This exact same library can be used to build
|
||||
single-sig wallets, multisigs, timelocked contracts and more.
|
||||
* It supports multiple blockchain backends and databases, allowing developers to choose exactly what's right for their projects.
|
||||
* It's built to be cross-platform: the core logic works on desktop, mobile, and even WebAssembly.
|
||||
* It's very easy to extend: developers can implement customized logic for blockchain backends, databases, signers, coin selection, and more, without having to fork and modify this library.
|
||||
> ⚠ The Bitcoin Dev Kit developers are in the process of releasing a `v1.0` which is a fundamental re-write of how the library works.
|
||||
> See for some background on this project: https://bitcoindevkit.org/blog/road-to-bdk-1/ (ignore the timeline 😁)
|
||||
> For a release timeline see the [`bdk_core_staging`] repo where a lot of the component work is being done. The plan is that everything in the `bdk_core_staging` repo will be moved into the `crates` directory here.
|
||||
|
||||
## Examples
|
||||
## Architecture
|
||||
|
||||
### Sync the balance of a descriptor
|
||||
The project is split up into several crates in the `/crates` directory:
|
||||
|
||||
```rust,no_run
|
||||
use bdk::Wallet;
|
||||
use bdk::database::MemoryDatabase;
|
||||
use bdk::blockchain::{noop_progress, ElectrumBlockchain};
|
||||
- [`bdk`](./crates/bdk): Contains the central high level `Wallet` type that is built from the low-level mechanisms provided by the other components
|
||||
- [`chain`](./crates/chain): Tools for storing and indexing chain data
|
||||
- [`file_store`](./crates/file_store): A (experimental) persistence backend for storing chain data in a single file.
|
||||
- [`esplora`](./crates/esplora): Extends the [`esplora-client`] crate with methods to fetch chain data from an esplora HTTP server in the form that [`bdk_chain`] and `Wallet` can consume.
|
||||
- [`electrum`](./crates/electrum): Extends the [`electrum-client`] crate with methods to fetch chain data from an electrum server in the form that [`bdk_chain`] and `Wallet` can consume.
|
||||
|
||||
use bdk::electrum_client::Client;
|
||||
Fully working examples of how to use these components are in `/example-crates`
|
||||
|
||||
fn main() -> Result<(), bdk::Error> {
|
||||
let client = Client::new("ssl://electrum.blockstream.info:60002")?;
|
||||
let wallet = Wallet::new(
|
||||
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
|
||||
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
|
||||
bitcoin::Network::Testnet,
|
||||
MemoryDatabase::default(),
|
||||
ElectrumBlockchain::from(client)
|
||||
)?;
|
||||
|
||||
wallet.sync(noop_progress(), None)?;
|
||||
|
||||
println!("Descriptor balance: {} SAT", wallet.get_balance()?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Generate a few addresses
|
||||
|
||||
```rust
|
||||
use bdk::{Wallet, OfflineWallet};
|
||||
use bdk::database::MemoryDatabase;
|
||||
|
||||
fn main() -> Result<(), bdk::Error> {
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
|
||||
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
|
||||
bitcoin::Network::Testnet,
|
||||
MemoryDatabase::default(),
|
||||
)?;
|
||||
|
||||
println!("Address #0: {}", wallet.get_new_address()?);
|
||||
println!("Address #1: {}", wallet.get_new_address()?);
|
||||
println!("Address #2: {}", wallet.get_new_address()?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Create a transaction
|
||||
|
||||
```rust,no_run
|
||||
use bdk::{FeeRate, TxBuilder, Wallet};
|
||||
use bdk::database::MemoryDatabase;
|
||||
use bdk::blockchain::{noop_progress, ElectrumBlockchain};
|
||||
|
||||
use bdk::electrum_client::Client;
|
||||
|
||||
use bitcoin::consensus::serialize;
|
||||
|
||||
fn main() -> Result<(), bdk::Error> {
|
||||
let client = Client::new("ssl://electrum.blockstream.info:60002")?;
|
||||
let wallet = Wallet::new(
|
||||
"wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)",
|
||||
Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"),
|
||||
bitcoin::Network::Testnet,
|
||||
MemoryDatabase::default(),
|
||||
ElectrumBlockchain::from(client)
|
||||
)?;
|
||||
|
||||
wallet.sync(noop_progress(), None)?;
|
||||
|
||||
let send_to = wallet.get_new_address()?;
|
||||
let (psbt, details) = wallet.create_tx(
|
||||
TxBuilder::with_recipients(vec![(send_to.script_pubkey(), 50_000)])
|
||||
.enable_rbf()
|
||||
.do_not_spend_change()
|
||||
.fee_rate(FeeRate::from_sat_per_vb(5.0))
|
||||
)?;
|
||||
|
||||
println!("Transaction details: {:#?}", details);
|
||||
println!("Unsigned PSBT: {}", base64::encode(&serialize(&psbt)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Sign a transaction
|
||||
|
||||
```rust,no_run
|
||||
use bdk::{Wallet, OfflineWallet};
|
||||
use bdk::database::MemoryDatabase;
|
||||
|
||||
use bitcoin::consensus::deserialize;
|
||||
|
||||
fn main() -> Result<(), bdk::Error> {
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
"wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)",
|
||||
Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"),
|
||||
bitcoin::Network::Testnet,
|
||||
MemoryDatabase::default(),
|
||||
)?;
|
||||
|
||||
let psbt = "...";
|
||||
let psbt = deserialize(&base64::decode(psbt).unwrap())?;
|
||||
|
||||
let (signed_psbt, finalized) = wallet.sign(psbt, None)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
[`bdk_core_staging`]: https://github.com/LLFourn/bdk_core_staging
|
||||
[`rust-miniscript`]: https://github.com/rust-bitcoin/rust-miniscript
|
||||
[`rust-bitcoin`]: https://github.com/rust-bitcoin/rust-bitcoin
|
||||
[`esplora-client`]: https://docs.rs/esplora-client/0.3.0/esplora_client/
|
||||
[`electrum-client`]: https://docs.rs/electrum-client/0.13.0/electrum_client/
|
||||
|
||||
9
ci/Dockerfile.ledger
Normal file
9
ci/Dockerfile.ledger
Normal file
@@ -0,0 +1,9 @@
|
||||
# Taken from bitcoindevkit/rust-hwi
|
||||
FROM ghcr.io/ledgerhq/speculos
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install wget -y
|
||||
RUN wget "https://github.com/LedgerHQ/speculos/blob/master/apps/nanos%23btc%232.1%231c8db8da.elf?raw=true" -O /speculos/btc.elf
|
||||
ADD automation.json /speculos/automation.json
|
||||
|
||||
ENTRYPOINT ["python", "./speculos.py", "--automation", "file:automation.json", "--model", "nanos", "--display", "headless", "--vnc-port", "41000", "btc.elf"]
|
||||
30
ci/automation.json
Normal file
30
ci/automation.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"version": 1,
|
||||
"rules": [
|
||||
{
|
||||
"regexp": "Address \\(\\d/\\d\\)|Message hash \\(\\d/\\d\\)|Confirm|Fees|Review|Amount",
|
||||
"actions": [
|
||||
[ "button", 2, true ],
|
||||
[ "button", 2, false ]
|
||||
]
|
||||
},
|
||||
{
|
||||
"text": "Sign",
|
||||
"conditions": [
|
||||
[ "seen", false ]
|
||||
],
|
||||
"actions": [
|
||||
[ "button", 2, true ],
|
||||
[ "button", 2, false ],
|
||||
[ "setbool", "seen", true ]
|
||||
]
|
||||
},
|
||||
{
|
||||
"regexp": "Approve|Sign|Accept",
|
||||
"actions": [
|
||||
[ "button", 3, true ],
|
||||
[ "button", 3, false ]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,17 +1,14 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
echo "Starting bitcoin node."
|
||||
/root/bitcoind -regtest -server -daemon -fallbackfee=0.0002 -rpcuser=admin -rpcpassword=passw -rpcallowip=0.0.0.0/0 -rpcbind=0.0.0.0
|
||||
mkdir $GITHUB_WORKSPACE/.bitcoin
|
||||
/root/bitcoind -regtest -server -daemon -datadir=$GITHUB_WORKSPACE/.bitcoin -fallbackfee=0.0002 -rpcallowip=0.0.0.0/0 -rpcbind=0.0.0.0 -blockfilterindex=1 -peerblockfilters=1
|
||||
|
||||
echo "Waiting for bitcoin node."
|
||||
until /root/bitcoin-cli -regtest -rpcuser=admin -rpcpassword=passw getblockchaininfo; do
|
||||
until /root/bitcoin-cli -regtest -datadir=$GITHUB_WORKSPACE/.bitcoin getblockchaininfo; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
/root/bitcoin-cli -regtest -datadir=$GITHUB_WORKSPACE/.bitcoin createwallet $BDK_RPC_WALLET
|
||||
echo "Generating 150 bitcoin blocks."
|
||||
ADDR=$(/root/bitcoin-cli -regtest -rpcuser=admin -rpcpassword=passw getnewaddress)
|
||||
/root/bitcoin-cli -regtest -rpcuser=admin -rpcpassword=passw generatetoaddress 150 $ADDR
|
||||
|
||||
echo "Starting electrs node."
|
||||
nohup /root/electrs --network regtest --jsonrpc-import &
|
||||
sleep 5
|
||||
ADDR=$(/root/bitcoin-cli -regtest -datadir=$GITHUB_WORKSPACE/.bitcoin -rpcwallet=$BDK_RPC_WALLET getnewaddress)
|
||||
/root/bitcoin-cli -regtest -datadir=$GITHUB_WORKSPACE/.bitcoin generatetoaddress 150 $ADDR
|
||||
|
||||
1
clippy.toml
Normal file
1
clippy.toml
Normal file
@@ -0,0 +1 @@
|
||||
msrv="1.57.0"
|
||||
69
crates/bdk/Cargo.toml
Normal file
69
crates/bdk/Cargo.toml
Normal file
@@ -0,0 +1,69 @@
|
||||
[package]
|
||||
name = "bdk"
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
version = "1.0.0-alpha.0"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk"
|
||||
description = "A modern, lightweight, descriptor-based wallet library"
|
||||
keywords = ["bitcoin", "wallet", "descriptor", "psbt"]
|
||||
readme = "README.md"
|
||||
license = "MIT OR Apache-2.0"
|
||||
authors = ["Bitcoin Dev Kit Developers"]
|
||||
edition = "2021"
|
||||
rust-version = "1.57"
|
||||
|
||||
[dependencies]
|
||||
log = "^0.4"
|
||||
rand = "^0.8"
|
||||
miniscript = { version = "9", features = ["serde"] }
|
||||
bitcoin = { version = "0.29", features = ["serde", "base64", "rand"] }
|
||||
serde = { version = "^1.0", features = ["derive"] }
|
||||
serde_json = { version = "^1.0" }
|
||||
bdk_chain = { path = "../chain", version = "0.4.0", features = ["miniscript", "serde"] }
|
||||
|
||||
# Optional dependencies
|
||||
hwi = { version = "0.5", optional = true, features = [ "use-miniscript"] }
|
||||
bip39 = { version = "1.0.1", optional = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = "0.2"
|
||||
js-sys = "0.3"
|
||||
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = []
|
||||
compiler = ["miniscript/compiler"]
|
||||
all-keys = ["keys-bip39"]
|
||||
keys-bip39 = ["bip39"]
|
||||
hardware-signer = ["hwi"]
|
||||
test-hardware-signer = ["hardware-signer"]
|
||||
|
||||
|
||||
# This feature is used to run `cargo check` in our CI targeting wasm. It's not recommended
|
||||
# for libraries to explicitly include the "getrandom/js" feature, so we only do it when
|
||||
# necessary for running our CI. See: https://docs.rs/getrandom/0.2.8/getrandom/#webassembly-support
|
||||
dev-getrandom-wasm = ["getrandom/js"]
|
||||
|
||||
[dev-dependencies]
|
||||
lazy_static = "1.4"
|
||||
env_logger = "0.7"
|
||||
# Move back to importing from rust-bitcoin once https://github.com/rust-bitcoin/rust-bitcoin/pull/1342 is released
|
||||
base64 = "^0.13"
|
||||
assert_matches = "1.5.0"
|
||||
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
|
||||
[[example]]
|
||||
name = "mnemonic_to_descriptors"
|
||||
path = "examples/mnemonic_to_descriptors.rs"
|
||||
required-features = ["all-keys"]
|
||||
|
||||
[[example]]
|
||||
name = "miniscriptc"
|
||||
path = "examples/compiler.rs"
|
||||
required-features = ["compiler"]
|
||||
14
crates/bdk/LICENSE
Normal file
14
crates/bdk/LICENSE
Normal file
@@ -0,0 +1,14 @@
|
||||
This software is licensed under [Apache 2.0](LICENSE-APACHE) or
|
||||
[MIT](LICENSE-MIT), at your option.
|
||||
|
||||
Some files retain their own copyright notice, however, for full authorship
|
||||
information, see version control history.
|
||||
|
||||
Except as otherwise noted in individual files, all files in this repository are
|
||||
licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
|
||||
http://opensource.org/licenses/MIT>, at your option.
|
||||
|
||||
You may not use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of this software or any files in this repository except in
|
||||
accordance with one or both of these licenses.
|
||||
201
crates/bdk/LICENSE-APACHE
Normal file
201
crates/bdk/LICENSE-APACHE
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
16
crates/bdk/LICENSE-MIT
Normal file
16
crates/bdk/LICENSE-MIT
Normal file
@@ -0,0 +1,16 @@
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
227
crates/bdk/README.md
Normal file
227
crates/bdk/README.md
Normal file
@@ -0,0 +1,227 @@
|
||||
<div align="center">
|
||||
<h1>BDK</h1>
|
||||
|
||||
<img src="https://raw.githubusercontent.com/bitcoindevkit/bdk/master/static/bdk.png" width="220" />
|
||||
|
||||
<p>
|
||||
<strong>A modern, lightweight, descriptor-based wallet library written in Rust!</strong>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<a href="https://crates.io/crates/bdk"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk.svg"/></a>
|
||||
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
||||
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
||||
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
||||
<a href="https://docs.rs/bdk"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk-green"/></a>
|
||||
<a href="https://blog.rust-lang.org/2021/12/02/Rust-1.57.0.html"><img alt="Rustc Version 1.57.0+" src="https://img.shields.io/badge/rustc-1.57.0%2B-lightgrey.svg"/></a>
|
||||
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
||||
</p>
|
||||
|
||||
<h4>
|
||||
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
||||
<span> | </span>
|
||||
<a href="https://docs.rs/bdk">Documentation</a>
|
||||
</h4>
|
||||
</div>
|
||||
|
||||
## `bdk`
|
||||
|
||||
The `bdk` crate provides the [`Wallet`](`crate::Wallet`) type which is a simple, high-level
|
||||
interface built from the low-level components of [`bdk_chain`]. `Wallet` is a good starting point
|
||||
for many simple applications as well as a good demonstration of how to use the other mechanisms to
|
||||
construct a wallet. It has two keychains (external and internal) which are defined by
|
||||
[miniscript descriptors][`rust-miniscript`] and uses them to generate addresses. When you give it
|
||||
chain data it also uses the descriptors to find transaction outputs owned by them. From there, you
|
||||
can create and sign transactions.
|
||||
|
||||
For more information, see the [`Wallet`'s documentation](https://docs.rs/bdk/latest/bdk/wallet/struct.Wallet.html).
|
||||
|
||||
### Blockchain data
|
||||
|
||||
In order to get blockchain data for `Wallet` to consume, you have to put it into particular form.
|
||||
Right now this is [`KeychainScan`] which is defined in [`bdk_chain`].
|
||||
|
||||
This can be created manually or from blockchain-scanning crates.
|
||||
|
||||
**Blockchain Data Sources**
|
||||
|
||||
* [`bdk_esplora`]: Grabs blockchain data from Esplora for updating BDK structures.
|
||||
* [`bdk_electrum`]: Grabs blockchain data from Electrum for updating BDK structures.
|
||||
|
||||
**Examples**
|
||||
|
||||
* [`example-crates/wallet_esplora`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora)
|
||||
* [`example-crates/wallet_electrum`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_electrum)
|
||||
|
||||
### Persistence
|
||||
|
||||
To persist the `Wallet` on disk, `Wallet` needs to be constructed with a
|
||||
[`Persist`](https://docs.rs/bdk_chain/latest/bdk_chain/keychain/struct.KeychainPersist.html) implementation.
|
||||
|
||||
**Implementations**
|
||||
|
||||
* [`bdk_file_store`]: a simple flat-file implementation of `Persist`.
|
||||
|
||||
**Example**
|
||||
|
||||
```rust
|
||||
use bdk::{bitcoin::Network, wallet::{AddressIndex, Wallet}};
|
||||
|
||||
fn main() {
|
||||
// a type that implements `Persist`
|
||||
let db = ();
|
||||
|
||||
let descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)";
|
||||
let mut wallet = Wallet::new(descriptor, None, db, Network::Testnet).expect("should create");
|
||||
|
||||
// get a new address (this increments revealed derivation index)
|
||||
println!("revealed address: {}", wallet.get_address(AddressIndex::New));
|
||||
println!("staged changes: {:?}", wallet.staged());
|
||||
// persist changes
|
||||
wallet.commit().expect("must save");
|
||||
}
|
||||
```
|
||||
|
||||
<!-- ### Sync the balance of a descriptor -->
|
||||
|
||||
<!-- ```rust,no_run -->
|
||||
<!-- use bdk::Wallet; -->
|
||||
<!-- use bdk::blockchain::ElectrumBlockchain; -->
|
||||
<!-- use bdk::SyncOptions; -->
|
||||
<!-- use bdk::electrum_client::Client; -->
|
||||
<!-- use bdk::bitcoin::Network; -->
|
||||
|
||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
||||
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
||||
<!-- let wallet = Wallet::new( -->
|
||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
||||
<!-- Network::Testnet, -->
|
||||
<!-- )?; -->
|
||||
|
||||
<!-- wallet.sync(&blockchain, SyncOptions::default())?; -->
|
||||
|
||||
<!-- println!("Descriptor balance: {} SAT", wallet.get_balance()?); -->
|
||||
|
||||
<!-- Ok(()) -->
|
||||
<!-- } -->
|
||||
<!-- ``` -->
|
||||
<!-- ### Generate a few addresses -->
|
||||
|
||||
<!-- ```rust -->
|
||||
<!-- use bdk::Wallet; -->
|
||||
<!-- use bdk::wallet::AddressIndex::New; -->
|
||||
<!-- use bdk::bitcoin::Network; -->
|
||||
|
||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
||||
<!-- let wallet = Wallet::new_no_persist( -->
|
||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
||||
<!-- Network::Testnet, -->
|
||||
<!-- )?; -->
|
||||
|
||||
<!-- println!("Address #0: {}", wallet.get_address(New)); -->
|
||||
<!-- println!("Address #1: {}", wallet.get_address(New)); -->
|
||||
<!-- println!("Address #2: {}", wallet.get_address(New)); -->
|
||||
|
||||
<!-- Ok(()) -->
|
||||
<!-- } -->
|
||||
<!-- ``` -->
|
||||
|
||||
<!-- ### Create a transaction -->
|
||||
|
||||
<!-- ```rust,no_run -->
|
||||
<!-- use bdk::{FeeRate, Wallet, SyncOptions}; -->
|
||||
<!-- use bdk::blockchain::ElectrumBlockchain; -->
|
||||
|
||||
<!-- use bdk::electrum_client::Client; -->
|
||||
<!-- use bdk::wallet::AddressIndex::New; -->
|
||||
|
||||
<!-- use base64; -->
|
||||
<!-- use bdk::bitcoin::consensus::serialize; -->
|
||||
<!-- use bdk::bitcoin::Network; -->
|
||||
|
||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
||||
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
||||
<!-- let wallet = Wallet::new_no_persist( -->
|
||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
||||
<!-- Network::Testnet, -->
|
||||
<!-- )?; -->
|
||||
|
||||
<!-- wallet.sync(&blockchain, SyncOptions::default())?; -->
|
||||
|
||||
<!-- let send_to = wallet.get_address(New); -->
|
||||
<!-- let (psbt, details) = { -->
|
||||
<!-- let mut builder = wallet.build_tx(); -->
|
||||
<!-- builder -->
|
||||
<!-- .add_recipient(send_to.script_pubkey(), 50_000) -->
|
||||
<!-- .enable_rbf() -->
|
||||
<!-- .do_not_spend_change() -->
|
||||
<!-- .fee_rate(FeeRate::from_sat_per_vb(5.0)); -->
|
||||
<!-- builder.finish()? -->
|
||||
<!-- }; -->
|
||||
|
||||
<!-- println!("Transaction details: {:#?}", details); -->
|
||||
<!-- println!("Unsigned PSBT: {}", base64::encode(&serialize(&psbt))); -->
|
||||
|
||||
<!-- Ok(()) -->
|
||||
<!-- } -->
|
||||
<!-- ``` -->
|
||||
|
||||
<!-- ### Sign a transaction -->
|
||||
|
||||
<!-- ```rust,no_run -->
|
||||
<!-- use bdk::{Wallet, SignOptions}; -->
|
||||
|
||||
<!-- use base64; -->
|
||||
<!-- use bdk::bitcoin::consensus::deserialize; -->
|
||||
<!-- use bdk::bitcoin::Network; -->
|
||||
|
||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
||||
<!-- let wallet = Wallet::new_no_persist( -->
|
||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)", -->
|
||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"), -->
|
||||
<!-- Network::Testnet, -->
|
||||
<!-- )?; -->
|
||||
|
||||
<!-- let psbt = "..."; -->
|
||||
<!-- let mut psbt = deserialize(&base64::decode(psbt).unwrap())?; -->
|
||||
|
||||
<!-- let _finalized = wallet.sign(&mut psbt, SignOptions::default())?; -->
|
||||
|
||||
<!-- Ok(()) -->
|
||||
<!-- } -->
|
||||
<!-- ``` -->
|
||||
|
||||
## Testing
|
||||
|
||||
### Unit testing
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0
|
||||
([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
|
||||
* MIT license
|
||||
([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/licenses/MIT>)
|
||||
|
||||
at your option.
|
||||
|
||||
## Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
||||
|
||||
[`bdk_chain`]: https://docs.rs/bdk_chain/latest
|
||||
[`bdk_file_store`]: https://docs.rs/bdk_file_store/latest
|
||||
[`bdk_electrum`]: https://docs.rs/bdk_electrum/latest
|
||||
[`bdk_esplora`]: https://docs.rs/bdk_esplora/latest
|
||||
[`KeychainScan`]: https://docs.rs/bdk_chain/latest/bdk_chain/keychain/struct.KeychainScan.html
|
||||
[`rust-miniscript`]: https://docs.rs/miniscript/latest/miniscript/index.html
|
||||
73
crates/bdk/examples/compiler.rs
Normal file
73
crates/bdk/examples/compiler.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
extern crate bdk;
|
||||
extern crate bitcoin;
|
||||
extern crate log;
|
||||
extern crate miniscript;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::error::Error;
|
||||
use std::str::FromStr;
|
||||
|
||||
use log::info;
|
||||
|
||||
use bitcoin::Network;
|
||||
use miniscript::policy::Concrete;
|
||||
use miniscript::Descriptor;
|
||||
|
||||
use bdk::wallet::AddressIndex::New;
|
||||
use bdk::{KeychainKind, Wallet};
|
||||
|
||||
/// Miniscript policy is a high level abstraction of spending conditions. Defined in the
|
||||
/// rust-miniscript library here https://docs.rs/miniscript/7.0.0/miniscript/policy/index.html
|
||||
/// rust-miniscript provides a `compile()` function that can be used to compile any miniscript policy
|
||||
/// into a descriptor. This descriptor then in turn can be used in bdk a fully functioning wallet
|
||||
/// can be derived from the policy.
|
||||
///
|
||||
/// This example demonstrates the interaction between a bdk wallet and miniscript policy.
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
env_logger::init_from_env(
|
||||
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
|
||||
);
|
||||
|
||||
// We start with a generic miniscript policy string
|
||||
let policy_str = "or(10@thresh(4,pk(029ffbe722b147f3035c87cb1c60b9a5947dd49c774cc31e94773478711a929ac0),pk(025f05815e3a1a8a83bfbb03ce016c9a2ee31066b98f567f6227df1d76ec4bd143),pk(025625f41e4a065efc06d5019cbbd56fe8c07595af1231e7cbc03fafb87ebb71ec),pk(02a27c8b850a00f67da3499b60562673dcf5fdfb82b7e17652a7ac54416812aefd),pk(03e618ec5f384d6e19ca9ebdb8e2119e5bef978285076828ce054e55c4daf473e2)),1@and(older(4209713),thresh(2,pk(03deae92101c790b12653231439f27b8897264125ecb2f46f48278603102573165),pk(033841045a531e1adf9910a6ec279589a90b3b8a904ee64ffd692bd08a8996c1aa),pk(02aebf2d10b040eb936a6f02f44ee82f8b34f5c1ccb20ff3949c2b28206b7c1068))))";
|
||||
info!("Compiling policy: \n{}", policy_str);
|
||||
|
||||
// Parse the string as a [`Concrete`] type miniscript policy.
|
||||
let policy = Concrete::<String>::from_str(policy_str)?;
|
||||
|
||||
// Create a `wsh` type descriptor from the policy.
|
||||
// `policy.compile()` returns the resulting miniscript from the policy.
|
||||
let descriptor = Descriptor::new_wsh(policy.compile()?)?;
|
||||
|
||||
info!("Compiled into following Descriptor: \n{}", descriptor);
|
||||
|
||||
// Create a new wallet from this descriptor
|
||||
let mut wallet = Wallet::new_no_persist(&format!("{}", descriptor), None, Network::Regtest)?;
|
||||
|
||||
info!(
|
||||
"First derived address from the descriptor: \n{}",
|
||||
wallet.get_address(New)
|
||||
);
|
||||
|
||||
// BDK also has it's own `Policy` structure to represent the spending condition in a more
|
||||
// human readable json format.
|
||||
let spending_policy = wallet.policies(KeychainKind::External)?;
|
||||
info!(
|
||||
"The BDK spending policy: \n{}",
|
||||
serde_json::to_string_pretty(&spending_policy)?
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
60
crates/bdk/examples/mnemonic_to_descriptors.rs
Normal file
60
crates/bdk/examples/mnemonic_to_descriptors.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
use bdk::bitcoin::secp256k1::Secp256k1;
|
||||
use bdk::bitcoin::util::bip32::DerivationPath;
|
||||
use bdk::bitcoin::Network;
|
||||
use bdk::descriptor;
|
||||
use bdk::descriptor::IntoWalletDescriptor;
|
||||
use bdk::keys::bip39::{Language, Mnemonic, WordCount};
|
||||
use bdk::keys::{GeneratableKey, GeneratedKey};
|
||||
use bdk::miniscript::Tap;
|
||||
use bdk::Error as BDK_Error;
|
||||
use std::error::Error;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// This example demonstrates how to generate a mnemonic phrase
|
||||
/// using BDK and use that to generate a descriptor string.
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
// In this example we are generating a 12 words mnemonic phrase
|
||||
// but it is also possible generate 15, 18, 21 and 24 words
|
||||
// using their respective `WordCount` variant.
|
||||
let mnemonic: GeneratedKey<_, Tap> =
|
||||
Mnemonic::generate((WordCount::Words12, Language::English))
|
||||
.map_err(|_| BDK_Error::Generic("Mnemonic generation error".to_string()))?;
|
||||
|
||||
println!("Mnemonic phrase: {}", *mnemonic);
|
||||
let mnemonic_with_passphrase = (mnemonic, None);
|
||||
|
||||
// define external and internal derivation key path
|
||||
let external_path = DerivationPath::from_str("m/86h/0h/0h/0").unwrap();
|
||||
let internal_path = DerivationPath::from_str("m/86h/0h/0h/1").unwrap();
|
||||
|
||||
// generate external and internal descriptor from mnemonic
|
||||
let (external_descriptor, ext_keymap) =
|
||||
descriptor!(tr((mnemonic_with_passphrase.clone(), external_path)))?
|
||||
.into_wallet_descriptor(&secp, Network::Testnet)?;
|
||||
let (internal_descriptor, int_keymap) =
|
||||
descriptor!(tr((mnemonic_with_passphrase, internal_path)))?
|
||||
.into_wallet_descriptor(&secp, Network::Testnet)?;
|
||||
|
||||
println!("tpub external descriptor: {}", external_descriptor);
|
||||
println!("tpub internal descriptor: {}", internal_descriptor);
|
||||
println!(
|
||||
"tprv external descriptor: {}",
|
||||
external_descriptor.to_string_with_secret(&ext_keymap)
|
||||
);
|
||||
println!(
|
||||
"tprv internal descriptor: {}",
|
||||
internal_descriptor.to_string_with_secret(&int_keymap)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
66
crates/bdk/examples/policy.rs
Normal file
66
crates/bdk/examples/policy.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
extern crate bdk;
|
||||
extern crate env_logger;
|
||||
extern crate log;
|
||||
use std::error::Error;
|
||||
|
||||
use bdk::bitcoin::Network;
|
||||
use bdk::descriptor::{policy::BuildSatisfaction, ExtractPolicy, IntoWalletDescriptor};
|
||||
use bdk::wallet::signer::SignersContainer;
|
||||
|
||||
/// This example describes the use of the BDK's [`bdk::descriptor::policy`] module.
|
||||
///
|
||||
/// Policy is higher abstraction representation of the wallet descriptor spending condition.
|
||||
/// This is useful to express complex miniscript spending conditions into more human readable form.
|
||||
/// The resulting `Policy` structure can be used to derive spending conditions the wallet is capable
|
||||
/// to spend from.
|
||||
///
|
||||
/// This example demos a Policy output for a 2of2 multisig between between 2 parties, where the wallet holds
|
||||
/// one of the Extend Private key.
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
env_logger::init_from_env(
|
||||
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
|
||||
);
|
||||
|
||||
let secp = bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
// The descriptor used in the example
|
||||
// The form is "wsh(multi(2, <privkey>, <pubkey>))"
|
||||
let desc = "wsh(multi(2,tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/*))";
|
||||
|
||||
// Use the descriptor string to derive the full descriptor and a keymap.
|
||||
// The wallet descriptor can be used to create a new bdk::wallet.
|
||||
// While the `keymap` can be used to create a `SignerContainer`.
|
||||
//
|
||||
// The `SignerContainer` can sign for `PSBT`s.
|
||||
// a bdk::wallet internally uses these to handle transaction signing.
|
||||
// But they can be used as independent tools also.
|
||||
let (wallet_desc, keymap) = desc.into_wallet_descriptor(&secp, Network::Testnet)?;
|
||||
|
||||
log::info!("Example Descriptor for policy analysis : {}", wallet_desc);
|
||||
|
||||
// Create the signer with the keymap and descriptor.
|
||||
let signers_container = SignersContainer::build(keymap, &wallet_desc, &secp);
|
||||
|
||||
// Extract the Policy from the given descriptor and signer.
|
||||
// Note that Policy is a wallet specific structure. It depends on the the descriptor, and
|
||||
// what the concerned wallet with a given signer can sign for.
|
||||
let policy = wallet_desc
|
||||
.extract_policy(&signers_container, BuildSatisfaction::None, &secp)?
|
||||
.expect("We expect a policy");
|
||||
|
||||
log::info!("Derived Policy for the descriptor {:#?}", policy);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
182
crates/bdk/src/descriptor/checksum.rs
Normal file
182
crates/bdk/src/descriptor/checksum.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Descriptor checksum
|
||||
//!
|
||||
//! This module contains a re-implementation of the function used by Bitcoin Core to calculate the
|
||||
//! checksum of a descriptor
|
||||
|
||||
use crate::descriptor::DescriptorError;
|
||||
use alloc::string::String;
|
||||
|
||||
const INPUT_CHARSET: &[u8] = b"0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ ";
|
||||
const CHECKSUM_CHARSET: &[u8] = b"qpzry9x8gf2tvdw0s3jn54khce6mua7l";
|
||||
|
||||
fn poly_mod(mut c: u64, val: u64) -> u64 {
|
||||
let c0 = c >> 35;
|
||||
c = ((c & 0x7ffffffff) << 5) ^ val;
|
||||
if c0 & 1 > 0 {
|
||||
c ^= 0xf5dee51989
|
||||
};
|
||||
if c0 & 2 > 0 {
|
||||
c ^= 0xa9fdca3312
|
||||
};
|
||||
if c0 & 4 > 0 {
|
||||
c ^= 0x1bab10e32d
|
||||
};
|
||||
if c0 & 8 > 0 {
|
||||
c ^= 0x3706b1677a
|
||||
};
|
||||
if c0 & 16 > 0 {
|
||||
c ^= 0x644d626ffd
|
||||
};
|
||||
|
||||
c
|
||||
}
|
||||
|
||||
/// Computes the checksum bytes of a descriptor.
|
||||
/// `exclude_hash = true` ignores all data after the first '#' (inclusive).
|
||||
pub(crate) fn calc_checksum_bytes_internal(
|
||||
mut desc: &str,
|
||||
exclude_hash: bool,
|
||||
) -> Result<[u8; 8], DescriptorError> {
|
||||
let mut c = 1;
|
||||
let mut cls = 0;
|
||||
let mut clscount = 0;
|
||||
|
||||
let mut original_checksum = None;
|
||||
if exclude_hash {
|
||||
if let Some(split) = desc.split_once('#') {
|
||||
desc = split.0;
|
||||
original_checksum = Some(split.1);
|
||||
}
|
||||
}
|
||||
|
||||
for ch in desc.as_bytes() {
|
||||
let pos = INPUT_CHARSET
|
||||
.iter()
|
||||
.position(|b| b == ch)
|
||||
.ok_or(DescriptorError::InvalidDescriptorCharacter(*ch))? as u64;
|
||||
c = poly_mod(c, pos & 31);
|
||||
cls = cls * 3 + (pos >> 5);
|
||||
clscount += 1;
|
||||
if clscount == 3 {
|
||||
c = poly_mod(c, cls);
|
||||
cls = 0;
|
||||
clscount = 0;
|
||||
}
|
||||
}
|
||||
if clscount > 0 {
|
||||
c = poly_mod(c, cls);
|
||||
}
|
||||
(0..8).for_each(|_| c = poly_mod(c, 0));
|
||||
c ^= 1;
|
||||
|
||||
let mut checksum = [0_u8; 8];
|
||||
for j in 0..8 {
|
||||
checksum[j] = CHECKSUM_CHARSET[((c >> (5 * (7 - j))) & 31) as usize];
|
||||
}
|
||||
|
||||
// if input data already had a checksum, check calculated checksum against original checksum
|
||||
if let Some(original_checksum) = original_checksum {
|
||||
if original_checksum.as_bytes() != checksum {
|
||||
return Err(DescriptorError::InvalidDescriptorChecksum);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(checksum)
|
||||
}
|
||||
|
||||
/// Compute the checksum bytes of a descriptor, excludes any existing checksum in the descriptor string from the calculation
|
||||
pub fn calc_checksum_bytes(desc: &str) -> Result<[u8; 8], DescriptorError> {
|
||||
calc_checksum_bytes_internal(desc, true)
|
||||
}
|
||||
|
||||
/// Compute the checksum of a descriptor, excludes any existing checksum in the descriptor string from the calculation
|
||||
pub fn calc_checksum(desc: &str) -> Result<String, DescriptorError> {
|
||||
// unsafe is okay here as the checksum only uses bytes in `CHECKSUM_CHARSET`
|
||||
calc_checksum_bytes_internal(desc, true)
|
||||
.map(|b| unsafe { String::from_utf8_unchecked(b.to_vec()) })
|
||||
}
|
||||
|
||||
// TODO in release 0.25.0, remove get_checksum_bytes and get_checksum
|
||||
// TODO in release 0.25.0, consolidate calc_checksum_bytes_internal into calc_checksum_bytes
|
||||
|
||||
/// Compute the checksum bytes of a descriptor
|
||||
#[deprecated(
|
||||
since = "0.24.0",
|
||||
note = "Use new `calc_checksum_bytes` function which excludes any existing checksum in the descriptor string before calculating the checksum hash bytes. See https://github.com/bitcoindevkit/bdk/pull/765."
|
||||
)]
|
||||
pub fn get_checksum_bytes(desc: &str) -> Result<[u8; 8], DescriptorError> {
|
||||
calc_checksum_bytes_internal(desc, false)
|
||||
}
|
||||
|
||||
/// Compute the checksum of a descriptor
|
||||
#[deprecated(
|
||||
since = "0.24.0",
|
||||
note = "Use new `calc_checksum` function which excludes any existing checksum in the descriptor string before calculating the checksum hash. See https://github.com/bitcoindevkit/bdk/pull/765."
|
||||
)]
|
||||
pub fn get_checksum(desc: &str) -> Result<String, DescriptorError> {
|
||||
// unsafe is okay here as the checksum only uses bytes in `CHECKSUM_CHARSET`
|
||||
calc_checksum_bytes_internal(desc, false)
|
||||
.map(|b| unsafe { String::from_utf8_unchecked(b.to_vec()) })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::descriptor::calc_checksum;
|
||||
use assert_matches::assert_matches;
|
||||
|
||||
// test calc_checksum() function; it should return the same value as Bitcoin Core
|
||||
#[test]
|
||||
fn test_calc_checksum() {
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)";
|
||||
assert_eq!(calc_checksum(desc).unwrap(), "tqz0nc62");
|
||||
|
||||
let desc = "pkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/44'/1'/0'/0/*)";
|
||||
assert_eq!(calc_checksum(desc).unwrap(), "lasegmfs");
|
||||
}
|
||||
|
||||
// test calc_checksum() function; it should return the same value as Bitcoin Core even if the
|
||||
// descriptor string includes a checksum hash
|
||||
#[test]
|
||||
fn test_calc_checksum_with_checksum_hash() {
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)#tqz0nc62";
|
||||
assert_eq!(calc_checksum(desc).unwrap(), "tqz0nc62");
|
||||
|
||||
let desc = "pkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/44'/1'/0'/0/*)#lasegmfs";
|
||||
assert_eq!(calc_checksum(desc).unwrap(), "lasegmfs");
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)#tqz0nc26";
|
||||
assert_matches!(
|
||||
calc_checksum(desc),
|
||||
Err(DescriptorError::InvalidDescriptorChecksum)
|
||||
);
|
||||
|
||||
let desc = "pkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/44'/1'/0'/0/*)#lasegmsf";
|
||||
assert_matches!(
|
||||
calc_checksum(desc),
|
||||
Err(DescriptorError::InvalidDescriptorChecksum)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_calc_checksum_invalid_character() {
|
||||
let sparkle_heart = unsafe { core::str::from_utf8_unchecked(&[240, 159, 146, 150]) };
|
||||
let invalid_desc = format!("wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcL{}fjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)", sparkle_heart);
|
||||
|
||||
assert_matches!(
|
||||
calc_checksum(&invalid_desc),
|
||||
Err(DescriptorError::InvalidDescriptorCharacter(invalid_char)) if invalid_char == sparkle_heart.as_bytes()[0]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,26 +1,13 @@
|
||||
// Magical Bitcoin Library
|
||||
// Written in 2020 by
|
||||
// Alekos Filini <alekos.filini@gmail.com>
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020 Magical Bitcoin
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Descriptors DSL
|
||||
|
||||
@@ -28,46 +15,102 @@
|
||||
#[macro_export]
|
||||
macro_rules! impl_top_level_sh {
|
||||
// disallow `sortedmulti` in `bare()`
|
||||
( Bare, Bare, sortedmulti $( $inner:tt )* ) => {
|
||||
( Bare, new, new, Legacy, sortedmulti $( $inner:tt )* ) => {
|
||||
compile_error!("`bare()` descriptors can't contain any `sortedmulti()` operands");
|
||||
};
|
||||
( Bare, Bare, sortedmulti_vec $( $inner:tt )* ) => {
|
||||
( Bare, new, new, Legacy, sortedmulti_vec $( $inner:tt )* ) => {
|
||||
compile_error!("`bare()` descriptors can't contain any `sortedmulti_vec()` operands");
|
||||
};
|
||||
|
||||
( $descriptor_variant:ident, $sortedmulti_variant:ident, sortedmulti $( $inner:tt )* ) => {
|
||||
$crate::impl_sortedmulti!(sortedmulti $( $inner )*)
|
||||
.and_then(|(inner, key_map, valid_networks)| Ok(($crate::miniscript::Descriptor::$sortedmulti_variant(inner), key_map, valid_networks)))
|
||||
};
|
||||
( $descriptor_variant:ident, $sortedmulti_variant:ident, sortedmulti_vec $( $inner:tt )* ) => {
|
||||
$crate::impl_sortedmulti!(sortedmulti_vec $( $inner )*)
|
||||
.and_then(|(inner, key_map, valid_networks)| Ok(($crate::miniscript::Descriptor::$sortedmulti_variant(inner), key_map, valid_networks)))
|
||||
};
|
||||
( $inner_struct:ident, $constructor:ident, $sortedmulti_constructor:ident, $ctx:ident, sortedmulti $( $inner:tt )* ) => {{
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use $crate::miniscript::descriptor::{$inner_struct, Descriptor, DescriptorPublicKey};
|
||||
use $crate::miniscript::$ctx;
|
||||
|
||||
let build_desc = |k, pks| {
|
||||
Ok((Descriptor::<DescriptorPublicKey>::$inner_struct($inner_struct::$sortedmulti_constructor(k, pks)?), PhantomData::<$ctx>))
|
||||
};
|
||||
|
||||
$crate::impl_sortedmulti!(build_desc, sortedmulti $( $inner )*)
|
||||
}};
|
||||
( $inner_struct:ident, $constructor:ident, $sortedmulti_constructor:ident, $ctx:ident, sortedmulti_vec $( $inner:tt )* ) => {{
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use $crate::miniscript::descriptor::{$inner_struct, Descriptor, DescriptorPublicKey};
|
||||
use $crate::miniscript::$ctx;
|
||||
|
||||
let build_desc = |k, pks| {
|
||||
Ok((Descriptor::<DescriptorPublicKey>::$inner_struct($inner_struct::$sortedmulti_constructor(k, pks)?), PhantomData::<$ctx>))
|
||||
};
|
||||
|
||||
$crate::impl_sortedmulti!(build_desc, sortedmulti_vec $( $inner )*)
|
||||
}};
|
||||
|
||||
( $inner_struct:ident, $constructor:ident, $sortedmulti_constructor:ident, $ctx:ident, $( $minisc:tt )* ) => {{
|
||||
use $crate::miniscript::descriptor::{$inner_struct, Descriptor, DescriptorPublicKey};
|
||||
|
||||
( $descriptor_variant:ident, $sortedmulti_variant:ident, $( $minisc:tt )* ) => {
|
||||
$crate::fragment!($( $minisc )*)
|
||||
.map(|(minisc, keymap, networks)|($crate::miniscript::Descriptor::<$crate::miniscript::descriptor::DescriptorPublicKey>::$descriptor_variant(minisc), keymap, networks))
|
||||
};
|
||||
.and_then(|(minisc, keymap, networks)| Ok(($inner_struct::$constructor(minisc)?, keymap, networks)))
|
||||
.and_then(|(inner, key_map, valid_networks)| Ok((Descriptor::<DescriptorPublicKey>::$inner_struct(inner), key_map, valid_networks)))
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_top_level_pk {
|
||||
( $descriptor_variant:ident, $ctx:ty, $key:expr ) => {{
|
||||
( $inner_type:ident, $ctx:ty, $key:expr ) => {{
|
||||
use $crate::miniscript::descriptor::$inner_type;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use $crate::keys::{DescriptorKey, ToDescriptorKey};
|
||||
use $crate::keys::{DescriptorKey, IntoDescriptorKey};
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
$key.to_descriptor_key()
|
||||
$key.into_descriptor_key()
|
||||
.and_then(|key: DescriptorKey<$ctx>| key.extract(&secp))
|
||||
.map(|(pk, key_map, valid_networks)| {
|
||||
(
|
||||
$crate::miniscript::Descriptor::<
|
||||
$crate::miniscript::descriptor::DescriptorPublicKey,
|
||||
>::$descriptor_variant(pk),
|
||||
.map_err($crate::descriptor::DescriptorError::Key)
|
||||
.map(|(pk, key_map, valid_networks)| ($inner_type::new(pk), key_map, valid_networks))
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_top_level_tr {
|
||||
( $internal_key:expr, $tap_tree:expr ) => {{
|
||||
use $crate::miniscript::descriptor::{
|
||||
Descriptor, DescriptorPublicKey, KeyMap, TapTree, Tr,
|
||||
};
|
||||
use $crate::miniscript::Tap;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use $crate::keys::{DescriptorKey, IntoDescriptorKey, ValidNetworks};
|
||||
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
$internal_key
|
||||
.into_descriptor_key()
|
||||
.and_then(|key: DescriptorKey<Tap>| key.extract(&secp))
|
||||
.map_err($crate::descriptor::DescriptorError::Key)
|
||||
.and_then(|(pk, mut key_map, mut valid_networks)| {
|
||||
let tap_tree = $tap_tree.map(
|
||||
|(tap_tree, tree_keymap, tree_networks): (
|
||||
TapTree<DescriptorPublicKey>,
|
||||
KeyMap,
|
||||
ValidNetworks,
|
||||
)| {
|
||||
key_map.extend(tree_keymap.into_iter());
|
||||
valid_networks =
|
||||
$crate::keys::merge_networks(&valid_networks, &tree_networks);
|
||||
|
||||
tap_tree
|
||||
},
|
||||
);
|
||||
|
||||
Ok((
|
||||
Descriptor::<DescriptorPublicKey>::Tr(Tr::new(pk, tap_tree)?),
|
||||
key_map,
|
||||
valid_networks,
|
||||
)
|
||||
))
|
||||
})
|
||||
}};
|
||||
}
|
||||
@@ -75,11 +118,17 @@ macro_rules! impl_top_level_pk {
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_leaf_opcode {
|
||||
( $terminal_variant:ident ) => {
|
||||
( $terminal_variant:ident ) => {{
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
$crate::miniscript::Miniscript::from_ast(
|
||||
$crate::miniscript::miniscript::decode::Terminal::$terminal_variant,
|
||||
)
|
||||
.map_err($crate::Error::Miniscript)
|
||||
.map_err($crate::descriptor::DescriptorError::Miniscript)
|
||||
.and_then(|minisc| {
|
||||
minisc.check_miniscript()?;
|
||||
Ok(minisc)
|
||||
})
|
||||
.map(|minisc| {
|
||||
(
|
||||
minisc,
|
||||
@@ -87,17 +136,23 @@ macro_rules! impl_leaf_opcode {
|
||||
$crate::keys::any_network(),
|
||||
)
|
||||
})
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_leaf_opcode_value {
|
||||
( $terminal_variant:ident, $value:expr ) => {
|
||||
( $terminal_variant:ident, $value:expr ) => {{
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
$crate::miniscript::Miniscript::from_ast(
|
||||
$crate::miniscript::miniscript::decode::Terminal::$terminal_variant($value),
|
||||
)
|
||||
.map_err($crate::Error::Miniscript)
|
||||
.map_err($crate::descriptor::DescriptorError::Miniscript)
|
||||
.and_then(|minisc| {
|
||||
minisc.check_miniscript()?;
|
||||
Ok(minisc)
|
||||
})
|
||||
.map(|minisc| {
|
||||
(
|
||||
minisc,
|
||||
@@ -105,17 +160,23 @@ macro_rules! impl_leaf_opcode_value {
|
||||
$crate::keys::any_network(),
|
||||
)
|
||||
})
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_leaf_opcode_value_two {
|
||||
( $terminal_variant:ident, $one:expr, $two:expr ) => {
|
||||
( $terminal_variant:ident, $one:expr, $two:expr ) => {{
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
$crate::miniscript::Miniscript::from_ast(
|
||||
$crate::miniscript::miniscript::decode::Terminal::$terminal_variant($one, $two),
|
||||
)
|
||||
.map_err($crate::Error::Miniscript)
|
||||
.map_err($crate::descriptor::DescriptorError::Miniscript)
|
||||
.and_then(|minisc| {
|
||||
minisc.check_miniscript()?;
|
||||
Ok(minisc)
|
||||
})
|
||||
.map(|minisc| {
|
||||
(
|
||||
minisc,
|
||||
@@ -123,13 +184,15 @@ macro_rules! impl_leaf_opcode_value_two {
|
||||
$crate::keys::any_network(),
|
||||
)
|
||||
})
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_node_opcode_two {
|
||||
( $terminal_variant:ident, $( $inner:tt )* ) => ({
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
let inner = $crate::fragment_internal!( @t $( $inner )* );
|
||||
let (a, b) = $crate::descriptor::dsl::TupleTwo::from(inner).flattened();
|
||||
|
||||
@@ -139,10 +202,14 @@ macro_rules! impl_node_opcode_two {
|
||||
// join key_maps
|
||||
a_keymap.extend(b_keymap.into_iter());
|
||||
|
||||
Ok(($crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
std::sync::Arc::new(a_minisc),
|
||||
std::sync::Arc::new(b_minisc),
|
||||
))?, a_keymap, $crate::keys::merge_networks(&a_networks, &b_networks)))
|
||||
let minisc = $crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
$crate::alloc::sync::Arc::new(a_minisc),
|
||||
$crate::alloc::sync::Arc::new(b_minisc),
|
||||
))?;
|
||||
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, a_keymap, $crate::keys::merge_networks(&a_networks, &b_networks)))
|
||||
})
|
||||
});
|
||||
}
|
||||
@@ -150,7 +217,9 @@ macro_rules! impl_node_opcode_two {
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_node_opcode_three {
|
||||
( $terminal_variant:ident, $( $inner:tt )* ) => {
|
||||
( $terminal_variant:ident, $( $inner:tt )* ) => ({
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
let inner = $crate::fragment_internal!( @t $( $inner )* );
|
||||
let (a, b, c) = $crate::descriptor::dsl::TupleThree::from(inner).flattened();
|
||||
|
||||
@@ -164,53 +233,117 @@ macro_rules! impl_node_opcode_three {
|
||||
let networks = $crate::keys::merge_networks(&a_networks, &b_networks);
|
||||
let networks = $crate::keys::merge_networks(&networks, &c_networks);
|
||||
|
||||
Ok(($crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
std::sync::Arc::new(a_minisc),
|
||||
std::sync::Arc::new(b_minisc),
|
||||
std::sync::Arc::new(c_minisc),
|
||||
))?, a_keymap, networks))
|
||||
let minisc = $crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
$crate::alloc::sync::Arc::new(a_minisc),
|
||||
$crate::alloc::sync::Arc::new(b_minisc),
|
||||
$crate::alloc::sync::Arc::new(c_minisc),
|
||||
))?;
|
||||
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, a_keymap, networks))
|
||||
})
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_sortedmulti {
|
||||
( sortedmulti_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||
( $build_desc:expr, sortedmulti_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
$crate::keys::make_sortedmulti_inner($thresh, $keys, &secp)
|
||||
$crate::keys::make_sortedmulti($thresh, $keys, $build_desc, &secp)
|
||||
});
|
||||
( sortedmulti ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||
use $crate::keys::ToDescriptorKey;
|
||||
( $build_desc:expr, sortedmulti ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||
use $crate::keys::IntoDescriptorKey;
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
let mut keys = vec![];
|
||||
$(
|
||||
keys.push($key.to_descriptor_key());
|
||||
)*
|
||||
let keys = vec![
|
||||
$(
|
||||
$key.into_descriptor_key(),
|
||||
)*
|
||||
];
|
||||
|
||||
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
||||
.and_then(|keys| $crate::keys::make_sortedmulti_inner($thresh, keys, &secp))
|
||||
keys.into_iter().collect::<Result<$crate::alloc::vec::Vec<_>, _>>()
|
||||
.map_err($crate::descriptor::DescriptorError::Key)
|
||||
.and_then(|keys| $crate::keys::make_sortedmulti($thresh, keys, $build_desc, &secp))
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! parse_tap_tree {
|
||||
( @merge $tree_a:expr, $tree_b:expr) => {{
|
||||
use $crate::alloc::sync::Arc;
|
||||
use $crate::miniscript::descriptor::TapTree;
|
||||
|
||||
$tree_a
|
||||
.and_then(|tree_a| Ok((tree_a, $tree_b?)))
|
||||
.and_then(|((a_tree, mut a_keymap, a_networks), (b_tree, b_keymap, b_networks))| {
|
||||
a_keymap.extend(b_keymap.into_iter());
|
||||
Ok((TapTree::Tree(Arc::new(a_tree), Arc::new(b_tree)), a_keymap, $crate::keys::merge_networks(&a_networks, &b_networks)))
|
||||
})
|
||||
|
||||
}};
|
||||
|
||||
// Two sub-trees
|
||||
( { { $( $tree_a:tt )* }, { $( $tree_b:tt )* } } ) => {{
|
||||
let tree_a = $crate::parse_tap_tree!( { $( $tree_a )* } );
|
||||
let tree_b = $crate::parse_tap_tree!( { $( $tree_b )* } );
|
||||
|
||||
$crate::parse_tap_tree!(@merge tree_a, tree_b)
|
||||
}};
|
||||
|
||||
// One leaf and a sub-tree
|
||||
( { $op_a:ident ( $( $minisc_a:tt )* ), { $( $tree_b:tt )* } } ) => {{
|
||||
let tree_a = $crate::parse_tap_tree!( $op_a ( $( $minisc_a )* ) );
|
||||
let tree_b = $crate::parse_tap_tree!( { $( $tree_b )* } );
|
||||
|
||||
$crate::parse_tap_tree!(@merge tree_a, tree_b)
|
||||
}};
|
||||
( { { $( $tree_a:tt )* }, $op_b:ident ( $( $minisc_b:tt )* ) } ) => {{
|
||||
let tree_a = $crate::parse_tap_tree!( { $( $tree_a )* } );
|
||||
let tree_b = $crate::parse_tap_tree!( $op_b ( $( $minisc_b )* ) );
|
||||
|
||||
$crate::parse_tap_tree!(@merge tree_a, tree_b)
|
||||
}};
|
||||
|
||||
// Two leaves
|
||||
( { $op_a:ident ( $( $minisc_a:tt )* ), $op_b:ident ( $( $minisc_b:tt )* ) } ) => {{
|
||||
let tree_a = $crate::parse_tap_tree!( $op_a ( $( $minisc_a )* ) );
|
||||
let tree_b = $crate::parse_tap_tree!( $op_b ( $( $minisc_b )* ) );
|
||||
|
||||
$crate::parse_tap_tree!(@merge tree_a, tree_b)
|
||||
}};
|
||||
|
||||
// Single leaf
|
||||
( $op:ident ( $( $minisc:tt )* ) ) => {{
|
||||
use $crate::alloc::sync::Arc;
|
||||
use $crate::miniscript::descriptor::TapTree;
|
||||
|
||||
$crate::fragment!( $op ( $( $minisc )* ) )
|
||||
.map(|(a_minisc, a_keymap, a_networks)| (TapTree::Leaf(Arc::new(a_minisc)), a_keymap, a_networks))
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! apply_modifier {
|
||||
( $terminal_variant:ident, $inner:expr ) => {{
|
||||
use $crate::descriptor::CheckMiniscript;
|
||||
|
||||
$inner
|
||||
.map_err(|e| -> $crate::Error { e.into() })
|
||||
.map_err(|e| -> $crate::descriptor::DescriptorError { e.into() })
|
||||
.and_then(|(minisc, keymap, networks)| {
|
||||
Ok((
|
||||
$crate::miniscript::Miniscript::from_ast(
|
||||
$crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
std::sync::Arc::new(minisc),
|
||||
),
|
||||
)?,
|
||||
keymap,
|
||||
networks,
|
||||
))
|
||||
let minisc = $crate::miniscript::Miniscript::from_ast(
|
||||
$crate::miniscript::miniscript::decode::Terminal::$terminal_variant(
|
||||
$crate::alloc::sync::Arc::new(minisc),
|
||||
),
|
||||
)?;
|
||||
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, keymap, networks))
|
||||
})
|
||||
}};
|
||||
|
||||
@@ -241,8 +374,8 @@ macro_rules! apply_modifier {
|
||||
$inner.and_then(|(a_minisc, a_keymap, a_networks)| {
|
||||
$crate::impl_leaf_opcode_value_two!(
|
||||
AndV,
|
||||
std::sync::Arc::new(a_minisc),
|
||||
std::sync::Arc::new($crate::fragment!(true).unwrap().0)
|
||||
$crate::alloc::sync::Arc::new(a_minisc),
|
||||
$crate::alloc::sync::Arc::new($crate::fragment!(true).unwrap().0)
|
||||
)
|
||||
.map(|(minisc, _, _)| (minisc, a_keymap, a_networks))
|
||||
})
|
||||
@@ -251,8 +384,8 @@ macro_rules! apply_modifier {
|
||||
$inner.and_then(|(a_minisc, a_keymap, a_networks)| {
|
||||
$crate::impl_leaf_opcode_value_two!(
|
||||
OrI,
|
||||
std::sync::Arc::new($crate::fragment!(false).unwrap().0),
|
||||
std::sync::Arc::new(a_minisc)
|
||||
$crate::alloc::sync::Arc::new($crate::fragment!(false).unwrap().0),
|
||||
$crate::alloc::sync::Arc::new(a_minisc)
|
||||
)
|
||||
.map(|(minisc, _, _)| (minisc, a_keymap, a_networks))
|
||||
})
|
||||
@@ -261,8 +394,8 @@ macro_rules! apply_modifier {
|
||||
$inner.and_then(|(a_minisc, a_keymap, a_networks)| {
|
||||
$crate::impl_leaf_opcode_value_two!(
|
||||
OrI,
|
||||
std::sync::Arc::new(a_minisc),
|
||||
std::sync::Arc::new($crate::fragment!(false).unwrap().0)
|
||||
$crate::alloc::sync::Arc::new(a_minisc),
|
||||
$crate::alloc::sync::Arc::new($crate::fragment!(false).unwrap().0)
|
||||
)
|
||||
.map(|(minisc, _, _)| (minisc, a_keymap, a_networks))
|
||||
})
|
||||
@@ -272,18 +405,18 @@ macro_rules! apply_modifier {
|
||||
/// Macro to write full descriptors with code
|
||||
///
|
||||
/// This macro expands to a `Result` of
|
||||
/// [`DescriptorTemplateOut`](super::template::DescriptorTemplateOut) and [`Error`](crate::Error)
|
||||
/// [`DescriptorTemplateOut`](super::template::DescriptorTemplateOut) and [`DescriptorError`](crate::descriptor::DescriptorError)
|
||||
///
|
||||
/// The syntax is very similar to the normal descriptor syntax, with the exception that modifiers
|
||||
/// cannot be grouped together. For instance, a descriptor fragment like `sdv:older(144)` has to be
|
||||
/// broken up to `s:d:v:older(144)`.
|
||||
///
|
||||
/// The `pk()`, `pk_k()` and `pk_h()` operands can take as argument any type that implements
|
||||
/// [`ToDescriptorKey`]. This means that keys can also be written inline as strings, but in that
|
||||
/// [`IntoDescriptorKey`]. This means that keys can also be written inline as strings, but in that
|
||||
/// case they must be wrapped in quotes, which is another difference compared to the standard
|
||||
/// descriptor syntax.
|
||||
///
|
||||
/// [`ToDescriptorKey`]: crate::keys::ToDescriptorKey
|
||||
/// [`IntoDescriptorKey`]: crate::keys::IntoDescriptorKey
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
@@ -301,26 +434,30 @@ macro_rules! apply_modifier {
|
||||
/// syntax is more suitable for a fixed number of items known at compile time, while the other accepts a
|
||||
/// [`Vec`] of items, which makes it more suitable for writing dynamic descriptors.
|
||||
///
|
||||
/// They both produce the descriptor: `wsh(thresh(2,pk(...),s:pk(...),sdv:older(...)))`
|
||||
/// They both produce the descriptor: `wsh(thresh(2,pk(...),s:pk(...),sndv:older(...)))`
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// let my_key_1 = bitcoin::PublicKey::from_str("02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c")?;
|
||||
/// let my_key_2 = bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
||||
/// let my_key_1 = bitcoin::PublicKey::from_str(
|
||||
/// "02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c",
|
||||
/// )?;
|
||||
/// let my_key_2 =
|
||||
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
||||
/// let my_timelock = 50;
|
||||
///
|
||||
/// let (descriptor_a, key_map_a, networks) = bdk::descriptor! {
|
||||
/// wsh (
|
||||
/// thresh(2, pk(my_key_1), s:pk(my_key_2), s:d:v:older(my_timelock))
|
||||
/// thresh(2, pk(my_key_1), s:pk(my_key_2), s:n:d:v:older(my_timelock))
|
||||
/// )
|
||||
/// }?;
|
||||
///
|
||||
/// #[rustfmt::skip]
|
||||
/// let b_items = vec![
|
||||
/// bdk::fragment!(pk(my_key_1))?,
|
||||
/// bdk::fragment!(s:pk(my_key_2))?,
|
||||
/// bdk::fragment!(s:d:v:older(my_timelock))?,
|
||||
/// bdk::fragment!(s:n:d:v:older(my_timelock))?,
|
||||
/// ];
|
||||
/// let (descriptor_b, mut key_map_b, networks) = bdk::descriptor!(wsh(thresh_vec(2,b_items)))?;
|
||||
/// let (descriptor_b, mut key_map_b, networks) = bdk::descriptor!(wsh(thresh_vec(2, b_items)))?;
|
||||
///
|
||||
/// assert_eq!(descriptor_a, descriptor_b);
|
||||
/// assert_eq!(key_map_a.len(), key_map_b.len());
|
||||
@@ -358,37 +495,60 @@ macro_rules! apply_modifier {
|
||||
/// let (descriptor, key_map, networks) = bdk::descriptor!(wpkh(my_key))?;
|
||||
/// # Ok::<(), Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
///
|
||||
/// [`Vec`]: alloc::vec::Vec
|
||||
#[macro_export]
|
||||
macro_rules! descriptor {
|
||||
( bare ( $( $minisc:tt )* ) ) => ({
|
||||
$crate::impl_top_level_sh!(Bare, Bare, $( $minisc )*)
|
||||
$crate::impl_top_level_sh!(Bare, new, new, Legacy, $( $minisc )*)
|
||||
});
|
||||
( sh ( wsh ( $( $minisc:tt )* ) ) ) => ({
|
||||
$crate::descriptor!(shwsh ($( $minisc )*))
|
||||
});
|
||||
( shwsh ( $( $minisc:tt )* ) ) => ({
|
||||
$crate::impl_top_level_sh!(ShWsh, ShWshSortedMulti, $( $minisc )*)
|
||||
$crate::impl_top_level_sh!(Sh, new_wsh, new_wsh_sortedmulti, Segwitv0, $( $minisc )*)
|
||||
});
|
||||
( pk ( $key:expr ) ) => ({
|
||||
$crate::impl_top_level_pk!(Pk, $crate::miniscript::Legacy, $key)
|
||||
// `pk()` is actually implemented as `bare(pk())`
|
||||
$crate::descriptor!( bare ( pk ( $key ) ) )
|
||||
});
|
||||
( pkh ( $key:expr ) ) => ({
|
||||
$crate::impl_top_level_pk!(Pkh,$crate::miniscript::Legacy, $key)
|
||||
use $crate::miniscript::descriptor::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
$crate::impl_top_level_pk!(Pkh, $crate::miniscript::Legacy, $key)
|
||||
.map(|(a, b, c)| (Descriptor::<DescriptorPublicKey>::Pkh(a), b, c))
|
||||
});
|
||||
( wpkh ( $key:expr ) ) => ({
|
||||
use $crate::miniscript::descriptor::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
$crate::impl_top_level_pk!(Wpkh, $crate::miniscript::Segwitv0, $key)
|
||||
.and_then(|(a, b, c)| Ok((a?, b, c)))
|
||||
.map(|(a, b, c)| (Descriptor::<DescriptorPublicKey>::Wpkh(a), b, c))
|
||||
});
|
||||
( sh ( wpkh ( $key:expr ) ) ) => ({
|
||||
$crate::descriptor!(shwpkh ( $key ))
|
||||
});
|
||||
( shwpkh ( $key:expr ) ) => ({
|
||||
$crate::impl_top_level_pk!(ShWpkh, $crate::miniscript::Segwitv0, $key)
|
||||
use $crate::miniscript::descriptor::{Descriptor, DescriptorPublicKey, Sh};
|
||||
|
||||
$crate::impl_top_level_pk!(Wpkh, $crate::miniscript::Segwitv0, $key)
|
||||
.and_then(|(a, b, c)| Ok((a?, b, c)))
|
||||
.and_then(|(a, b, c)| Ok((Descriptor::<DescriptorPublicKey>::Sh(Sh::new_wpkh(a.into_inner())?), b, c)))
|
||||
});
|
||||
( sh ( $( $minisc:tt )* ) ) => ({
|
||||
$crate::impl_top_level_sh!(Sh, ShSortedMulti, $( $minisc )*)
|
||||
$crate::impl_top_level_sh!(Sh, new, new_sortedmulti, Legacy, $( $minisc )*)
|
||||
});
|
||||
( wsh ( $( $minisc:tt )* ) ) => ({
|
||||
$crate::impl_top_level_sh!(Wsh, WshSortedMulti, $( $minisc )*)
|
||||
$crate::impl_top_level_sh!(Wsh, new, new_sortedmulti, Segwitv0, $( $minisc )*)
|
||||
});
|
||||
|
||||
( tr ( $internal_key:expr ) ) => ({
|
||||
$crate::impl_top_level_tr!($internal_key, None)
|
||||
});
|
||||
( tr ( $internal_key:expr, $( $taptree:tt )* ) ) => ({
|
||||
let tap_tree = $crate::parse_tap_tree!( $( $taptree )* );
|
||||
tap_tree
|
||||
.and_then(|tap_tree| $crate::impl_top_level_tr!($internal_key, Some(tap_tree)))
|
||||
});
|
||||
}
|
||||
|
||||
@@ -429,6 +589,23 @@ impl<A, B, C> From<(A, (B, (C, ())))> for TupleThree<A, B, C> {
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! group_multi_keys {
|
||||
( $( $key:expr ),+ ) => {{
|
||||
use $crate::keys::IntoDescriptorKey;
|
||||
|
||||
let keys = vec![
|
||||
$(
|
||||
$key.into_descriptor_key(),
|
||||
)*
|
||||
];
|
||||
|
||||
keys.into_iter().collect::<Result<$crate::alloc::vec::Vec<_>, _>>()
|
||||
.map_err($crate::descriptor::DescriptorError::Key)
|
||||
}};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! fragment_internal {
|
||||
@@ -470,7 +647,7 @@ macro_rules! fragment_internal {
|
||||
// three operands it's (X, (X, (X, ()))), etc.
|
||||
//
|
||||
// To check that the right number of arguments has been passed we can "cast" those tuples to
|
||||
// more convenient structures like `TupleTwo`. If the conversion succedes, the right number of
|
||||
// more convenient structures like `TupleTwo`. If the conversion succeeds, the right number of
|
||||
// args was passed. Otherwise the compilation fails entirely.
|
||||
( @t $op:ident ( $( $args:tt )* ) $( $tail:tt )* ) => ({
|
||||
($crate::fragment!( $op ( $( $args )* ) ), $crate::fragment_internal!( @t $( $tail )* ))
|
||||
@@ -484,9 +661,7 @@ macro_rules! fragment_internal {
|
||||
( @t , $( $tail:tt )* ) => ({
|
||||
$crate::fragment_internal!( @t $( $tail )* )
|
||||
});
|
||||
( @t ) => ({
|
||||
()
|
||||
});
|
||||
( @t ) => ({});
|
||||
|
||||
// Fallback to calling `fragment!()`
|
||||
( $( $tokens:tt )* ) => ({
|
||||
@@ -496,7 +671,7 @@ macro_rules! fragment_internal {
|
||||
|
||||
/// Macro to write descriptor fragments with code
|
||||
///
|
||||
/// This macro will be expanded to an object of type `Result<(Miniscript<DescriptorPublicKey, _>, KeyMap, ValidNetworks), Error>`. It allows writing
|
||||
/// This macro will be expanded to an object of type `Result<(Miniscript<DescriptorPublicKey, _>, KeyMap, ValidNetworks), DescriptorError>`. It allows writing
|
||||
/// fragments of larger descriptors that can be pieced together using `fragment!(thresh_vec(m, ...))`.
|
||||
///
|
||||
/// The syntax to write macro fragment is the same as documented for the [`descriptor`] macro.
|
||||
@@ -522,14 +697,15 @@ macro_rules! fragment {
|
||||
( pk ( $key:expr ) ) => ({
|
||||
$crate::fragment!(c:pk_k ( $key ))
|
||||
});
|
||||
( pk_h ( $key_hash:expr ) ) => ({
|
||||
$crate::impl_leaf_opcode_value!(PkH, $key_hash)
|
||||
( pk_h ( $key:expr ) ) => ({
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
$crate::keys::make_pkh($key, &secp)
|
||||
});
|
||||
( after ( $value:expr ) ) => ({
|
||||
$crate::impl_leaf_opcode_value!(After, $value)
|
||||
$crate::impl_leaf_opcode_value!(After, $crate::bitcoin::PackedLockTime($value)) // TODO!! https://github.com/rust-bitcoin/rust-bitcoin/issues/1302
|
||||
});
|
||||
( older ( $value:expr ) ) => ({
|
||||
$crate::impl_leaf_opcode_value!(Older, $value)
|
||||
$crate::impl_leaf_opcode_value!(Older, $crate::bitcoin::Sequence($value)) // TODO!!
|
||||
});
|
||||
( sha256 ( $hash:expr ) ) => ({
|
||||
$crate::impl_leaf_opcode_value!(Sha256, $hash)
|
||||
@@ -552,6 +728,9 @@ macro_rules! fragment {
|
||||
( and_or ( $( $inner:tt )* ) ) => ({
|
||||
$crate::impl_node_opcode_three!(AndOr, $( $inner )*)
|
||||
});
|
||||
( andor ( $( $inner:tt )* ) ) => ({
|
||||
$crate::impl_node_opcode_three!(AndOr, $( $inner )*)
|
||||
});
|
||||
( or_b ( $( $inner:tt )* ) ) => ({
|
||||
$crate::impl_node_opcode_two!(OrB, $( $inner )*)
|
||||
});
|
||||
@@ -567,8 +746,8 @@ macro_rules! fragment {
|
||||
( thresh_vec ( $thresh:expr, $items:expr ) ) => ({
|
||||
use $crate::miniscript::descriptor::KeyMap;
|
||||
|
||||
let (items, key_maps_networks): (Vec<_>, Vec<_>) = $items.into_iter().map(|(a, b, c)| (a, (b, c))).unzip();
|
||||
let items = items.into_iter().map(std::sync::Arc::new).collect();
|
||||
let (items, key_maps_networks): ($crate::alloc::vec::Vec<_>, $crate::alloc::vec::Vec<_>) = $items.into_iter().map(|(a, b, c)| (a, (b, c))).unzip();
|
||||
let items = items.into_iter().map($crate::alloc::sync::Arc::new).collect();
|
||||
|
||||
let (key_maps, valid_networks) = key_maps_networks.into_iter().fold((KeyMap::default(), $crate::keys::any_network()), |(mut keys_acc, net_acc), (key, net)| {
|
||||
keys_acc.extend(key.into_iter());
|
||||
@@ -583,23 +762,26 @@ macro_rules! fragment {
|
||||
( thresh ( $thresh:expr, $( $inner:tt )* ) ) => ({
|
||||
let items = $crate::fragment_internal!( @v $( $inner )* );
|
||||
|
||||
items.into_iter().collect::<Result<Vec<_>, _>>()
|
||||
items.into_iter().collect::<Result<$crate::alloc::vec::Vec<_>, _>>()
|
||||
.and_then(|items| $crate::fragment!(thresh_vec($thresh, items)))
|
||||
});
|
||||
( multi_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||
$crate::keys::make_multi($thresh, $keys)
|
||||
});
|
||||
( multi ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||
use $crate::keys::ToDescriptorKey;
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
let mut keys = vec![];
|
||||
$(
|
||||
keys.push($key.to_descriptor_key());
|
||||
)*
|
||||
$crate::keys::make_multi($thresh, $crate::miniscript::Terminal::Multi, $keys, &secp)
|
||||
});
|
||||
( multi ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||
$crate::group_multi_keys!( $( $key ),* )
|
||||
.and_then(|keys| $crate::fragment!( multi_vec ( $thresh, keys ) ))
|
||||
});
|
||||
( multi_a_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||
|
||||
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
||||
.and_then(|keys| $crate::keys::make_multi($thresh, keys, &secp))
|
||||
$crate::keys::make_multi($thresh, $crate::miniscript::Terminal::MultiA, $keys, &secp)
|
||||
});
|
||||
( multi_a ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||
$crate::group_multi_keys!( $( $key ),* )
|
||||
.and_then(|keys| $crate::fragment!( multi_a_vec ( $thresh, keys ) ))
|
||||
});
|
||||
|
||||
// `sortedmulti()` is handled separately
|
||||
@@ -613,54 +795,51 @@ macro_rules! fragment {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use alloc::string::ToString;
|
||||
use bitcoin::hashes::hex::ToHex;
|
||||
use bitcoin::secp256k1::Secp256k1;
|
||||
use miniscript::descriptor::{DescriptorPublicKey, DescriptorPublicKeyCtx, KeyMap};
|
||||
use miniscript::descriptor::{DescriptorPublicKey, KeyMap};
|
||||
use miniscript::{Descriptor, Legacy, Segwitv0};
|
||||
|
||||
use std::str::FromStr;
|
||||
use core::str::FromStr;
|
||||
|
||||
use crate::descriptor::DescriptorMeta;
|
||||
use crate::keys::{DescriptorKey, KeyError, ToDescriptorKey, ValidNetworks};
|
||||
use bitcoin::network::constants::Network::{Bitcoin, Regtest, Testnet};
|
||||
use crate::descriptor::{DescriptorError, DescriptorMeta};
|
||||
use crate::keys::{DescriptorKey, IntoDescriptorKey, ValidNetworks};
|
||||
use bitcoin::network::constants::Network::{Bitcoin, Regtest, Signet, Testnet};
|
||||
use bitcoin::util::bip32;
|
||||
use bitcoin::util::bip32::ChildNumber;
|
||||
use bitcoin::PrivateKey;
|
||||
|
||||
// test the descriptor!() macro
|
||||
|
||||
// verify descriptor generates expected script(s) (if bare or pk) or address(es)
|
||||
fn check(
|
||||
desc: Result<(Descriptor<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError>,
|
||||
desc: Result<(Descriptor<DescriptorPublicKey>, KeyMap, ValidNetworks), DescriptorError>,
|
||||
is_witness: bool,
|
||||
is_fixed: bool,
|
||||
expected: &[&str],
|
||||
) {
|
||||
let secp = Secp256k1::new();
|
||||
let deriv_ctx = DescriptorPublicKeyCtx::new(&secp, ChildNumber::Normal { index: 0 });
|
||||
|
||||
let (desc, _key_map, _networks) = desc.unwrap();
|
||||
assert_eq!(desc.is_witness(), is_witness);
|
||||
assert_eq!(desc.is_fixed(), is_fixed);
|
||||
assert_eq!(!desc.has_wildcard(), is_fixed);
|
||||
for i in 0..expected.len() {
|
||||
let index = i as u32;
|
||||
let child_desc = if desc.is_fixed() {
|
||||
desc.clone()
|
||||
let child_desc = if !desc.has_wildcard() {
|
||||
desc.at_derivation_index(0)
|
||||
} else {
|
||||
desc.derive(ChildNumber::from_normal_idx(index).unwrap())
|
||||
desc.at_derivation_index(index)
|
||||
};
|
||||
let address = child_desc.address(Regtest, deriv_ctx);
|
||||
if let Some(address) = address {
|
||||
let address = child_desc.address(Regtest);
|
||||
if let Ok(address) = address {
|
||||
assert_eq!(address.to_string(), *expected.get(i).unwrap());
|
||||
} else {
|
||||
let script = child_desc.script_pubkey(deriv_ctx);
|
||||
let script = child_desc.script_pubkey();
|
||||
assert_eq!(script.to_hex().as_str(), *expected.get(i).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// - at least one of each "type" of operator; ie. one modifier, one leaf_opcode, one leaf_opcode_value, etc.
|
||||
// - mixing up key types that implement ToDescriptorKey in multi() or thresh()
|
||||
// - at least one of each "type" of operator; i.e. one modifier, one leaf_opcode, one leaf_opcode_value, etc.
|
||||
// - mixing up key types that implement IntoDescriptorKey in multi() or thresh()
|
||||
|
||||
// expected script for pk and bare manually created
|
||||
// expected addresses created with `bitcoin-cli getdescriptorinfo` (for hash) and `bitcoin-cli deriveaddresses`
|
||||
@@ -739,12 +918,31 @@ mod test {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fixed_threeop_descriptors() {
|
||||
let redeem_key = bitcoin::PublicKey::from_str(
|
||||
"03a34b99f22c790c4e36b2b3c2c35a36db06226e41c692fc82b8b56ac1c540c5bd",
|
||||
)
|
||||
.unwrap();
|
||||
let move_key = bitcoin::PublicKey::from_str(
|
||||
"032e58afe51f9ed8ad3cc7897f634d881fdbe49a81564629ded8156bebd2ffd1af",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
check(
|
||||
descriptor!(sh(wsh(and_or(pk(redeem_key), older(1000), pk(move_key))))),
|
||||
true,
|
||||
true,
|
||||
&["2MypGwr5eQWAWWJtiJgUEToVxc4zuokjQRe"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bip32_legacy_descriptors() {
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
|
||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(pk(desc_key)),
|
||||
false,
|
||||
@@ -756,7 +954,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(pkh(desc_key)),
|
||||
false,
|
||||
@@ -769,8 +967,8 @@ mod test {
|
||||
);
|
||||
|
||||
let path2 = bip32::DerivationPath::from_str("m/2147483647'/0").unwrap();
|
||||
let desc_key1 = (xprv, path).to_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2).to_descriptor_key().unwrap();
|
||||
let desc_key1 = (xprv, path).into_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2).into_descriptor_key().unwrap();
|
||||
|
||||
check(
|
||||
descriptor!(sh(multi(1, desc_key1, desc_key2))),
|
||||
@@ -789,7 +987,7 @@ mod test {
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
|
||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(wpkh(desc_key)),
|
||||
true,
|
||||
@@ -801,7 +999,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(sh(wpkh(desc_key))),
|
||||
true,
|
||||
@@ -814,8 +1012,8 @@ mod test {
|
||||
);
|
||||
|
||||
let path2 = bip32::DerivationPath::from_str("m/2147483647'/0").unwrap();
|
||||
let desc_key1 = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key1 = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2.clone()).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(wsh(multi(1, desc_key1, desc_key2))),
|
||||
true,
|
||||
@@ -827,8 +1025,8 @@ mod test {
|
||||
],
|
||||
);
|
||||
|
||||
let desc_key1 = (xprv, path).to_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2).to_descriptor_key().unwrap();
|
||||
let desc_key1 = (xprv, path).into_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv, path2).into_descriptor_key().unwrap();
|
||||
check(
|
||||
descriptor!(sh(wsh(multi(1, desc_key1, desc_key2)))),
|
||||
true,
|
||||
@@ -904,14 +1102,17 @@ mod test {
|
||||
fn test_valid_networks() {
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path).into_descriptor_key().unwrap();
|
||||
|
||||
let (_desc, _key_map, valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
||||
assert_eq!(valid_networks, [Testnet, Regtest].iter().cloned().collect());
|
||||
assert_eq!(
|
||||
valid_networks,
|
||||
[Testnet, Regtest, Signet].iter().cloned().collect()
|
||||
);
|
||||
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi").unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
|
||||
let desc_key = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key = (xprv, path).into_descriptor_key().unwrap();
|
||||
|
||||
let (_desc, _key_map, valid_networks) = descriptor!(wpkh(desc_key)).unwrap();
|
||||
assert_eq!(valid_networks, [Bitcoin].iter().cloned().collect());
|
||||
@@ -924,26 +1125,23 @@ mod test {
|
||||
|
||||
let xprv1 = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
let path1 = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
let desc_key1 = (xprv1, path1.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key1 = (xprv1, path1.clone()).into_descriptor_key().unwrap();
|
||||
|
||||
let xprv2 = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPegBHHnq7YEgM815dG24M2Jk5RVqipgDxF1HJ1tsnT815X5Fd5FRfMVUs8NZs9XCb6y9an8hRPThnhfwfXJ36intaekySHGF").unwrap();
|
||||
let path2 = bip32::DerivationPath::from_str("m/2147483647'/0").unwrap();
|
||||
let desc_key2 = (xprv2, path2.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key2 = (xprv2, path2.clone()).into_descriptor_key().unwrap();
|
||||
|
||||
let xprv3 = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPdZXrcHNLf5JAJWFAoJ2TrstMRdSKtEggz6PddbuSkvHKM9oKJyFgZV1B7rw8oChspxyYbtmEXYyg1AjfWbL3ho3XHDpHRZf").unwrap();
|
||||
let path3 = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
|
||||
let desc_key3 = (xprv3, path3.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key3 = (xprv3, path3.clone()).into_descriptor_key().unwrap();
|
||||
|
||||
let (_desc, key_map, _valid_networks) =
|
||||
descriptor!(sh(wsh(multi(2, desc_key1, desc_key2, desc_key3)))).unwrap();
|
||||
assert_eq!(key_map.len(), 3);
|
||||
|
||||
let desc_key1: DescriptorKey<Segwitv0> =
|
||||
(xprv1, path1.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key2: DescriptorKey<Segwitv0> =
|
||||
(xprv2, path2.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key3: DescriptorKey<Segwitv0> =
|
||||
(xprv3, path3.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key1: DescriptorKey<Segwitv0> = (xprv1, path1).into_descriptor_key().unwrap();
|
||||
let desc_key2: DescriptorKey<Segwitv0> = (xprv2, path2).into_descriptor_key().unwrap();
|
||||
let desc_key3: DescriptorKey<Segwitv0> = (xprv3, path3).into_descriptor_key().unwrap();
|
||||
|
||||
let (key1, _key_map, _valid_networks) = desc_key1.extract(&secp).unwrap();
|
||||
let (key2, _key_map, _valid_networks) = desc_key2.extract(&secp).unwrap();
|
||||
@@ -953,19 +1151,19 @@ mod test {
|
||||
assert_eq!(key_map.get(&key3).unwrap().to_string(), "tprv8ZgxMBicQKsPdZXrcHNLf5JAJWFAoJ2TrstMRdSKtEggz6PddbuSkvHKM9oKJyFgZV1B7rw8oChspxyYbtmEXYyg1AjfWbL3ho3XHDpHRZf/10/20/30/40/*");
|
||||
}
|
||||
|
||||
// - verify the ScriptContext is correctly validated (i.e. passing a type that only impl ToDescriptorKey<Segwitv0> to a pkh() descriptor should throw a compilation error
|
||||
// - verify the ScriptContext is correctly validated (i.e. passing a type that only impl IntoDescriptorKey<Segwitv0> to a pkh() descriptor should throw a compilation error
|
||||
#[test]
|
||||
fn test_script_context_validation() {
|
||||
// this compiles
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
let desc_key: DescriptorKey<Legacy> = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
let desc_key: DescriptorKey<Legacy> = (xprv, path).into_descriptor_key().unwrap();
|
||||
|
||||
let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
||||
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)");
|
||||
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)#yrnz9pp2");
|
||||
|
||||
// as expected this does not compile due to invalid context
|
||||
//let desc_key:DescriptorKey<Segwitv0> = (xprv, path.clone()).to_descriptor_key().unwrap();
|
||||
//let desc_key:DescriptorKey<Segwitv0> = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
//let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
||||
}
|
||||
|
||||
@@ -974,8 +1172,51 @@ mod test {
|
||||
let private_key =
|
||||
PrivateKey::from_wif("cSQPHDBwXGjVzWRqAHm6zfvQhaTuj1f2bFH58h55ghbjtFwvmeXR").unwrap();
|
||||
let (descriptor, _, _) =
|
||||
descriptor!(wsh(thresh(2,d:v:older(1),s:pk(private_key),s:pk(private_key)))).unwrap();
|
||||
descriptor!(wsh(thresh(2,n:d:v:older(1),s:pk(private_key),s:pk(private_key)))).unwrap();
|
||||
|
||||
assert_eq!(descriptor.to_string(), "wsh(thresh(2,dv:older(1),s:pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c),s:pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c)))")
|
||||
assert_eq!(descriptor.to_string(), "wsh(thresh(2,ndv:older(1),s:pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c),s:pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c)))#zzk3ux8g")
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(
|
||||
expected = "Miniscript(ContextError(CompressedOnly(\"04b4632d08485ff1df2db55b9dafd23347d1c47a457072a1e87be26896549a87378ec38ff91d43e8c2092ebda601780485263da089465619e0358a5c1be7ac91f4\")))"
|
||||
)]
|
||||
fn test_dsl_miniscript_checks() {
|
||||
let mut uncompressed_pk =
|
||||
PrivateKey::from_wif("L5EZftvrYaSudiozVRzTqLcHLNDoVn7H5HSfM9BAN6tMJX8oTWz6").unwrap();
|
||||
uncompressed_pk.compressed = false;
|
||||
|
||||
descriptor!(wsh(v: pk(uncompressed_pk))).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dsl_tr_only_key() {
|
||||
let private_key =
|
||||
PrivateKey::from_wif("cSQPHDBwXGjVzWRqAHm6zfvQhaTuj1f2bFH58h55ghbjtFwvmeXR").unwrap();
|
||||
let (descriptor, _, _) = descriptor!(tr(private_key)).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
descriptor.to_string(),
|
||||
"tr(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c)#heq9m95v"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dsl_tr_simple_tree() {
|
||||
let private_key =
|
||||
PrivateKey::from_wif("cSQPHDBwXGjVzWRqAHm6zfvQhaTuj1f2bFH58h55ghbjtFwvmeXR").unwrap();
|
||||
let (descriptor, _, _) =
|
||||
descriptor!(tr(private_key, { pk(private_key), pk(private_key) })).unwrap();
|
||||
|
||||
assert_eq!(descriptor.to_string(), "tr(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c,{pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c),pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c)})#xy5fjw6d")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dsl_tr_single_leaf() {
|
||||
let private_key =
|
||||
PrivateKey::from_wif("cSQPHDBwXGjVzWRqAHm6zfvQhaTuj1f2bFH58h55ghbjtFwvmeXR").unwrap();
|
||||
let (descriptor, _, _) = descriptor!(tr(private_key, pk(private_key))).unwrap();
|
||||
|
||||
assert_eq!(descriptor.to_string(), "tr(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c,pk(02e96fe52ef0e22d2f131dd425ce1893073a3c6ad20e8cac36726393dfb4856a4c))#lzl2vmc7")
|
||||
}
|
||||
}
|
||||
87
crates/bdk/src/descriptor/error.rs
Normal file
87
crates/bdk/src/descriptor/error.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Descriptor errors
|
||||
|
||||
/// Errors related to the parsing and usage of descriptors
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// Invalid HD Key path, such as having a wildcard but a length != 1
|
||||
InvalidHdKeyPath,
|
||||
/// The provided descriptor doesn't match its checksum
|
||||
InvalidDescriptorChecksum,
|
||||
/// The descriptor contains hardened derivation steps on public extended keys
|
||||
HardenedDerivationXpub,
|
||||
|
||||
/// Error thrown while working with [`keys`](crate::keys)
|
||||
Key(crate::keys::KeyError),
|
||||
/// Error while extracting and manipulating policies
|
||||
Policy(crate::descriptor::policy::PolicyError),
|
||||
|
||||
/// Invalid byte found in the descriptor checksum
|
||||
InvalidDescriptorCharacter(u8),
|
||||
|
||||
/// BIP32 error
|
||||
Bip32(bitcoin::util::bip32::Error),
|
||||
/// Error during base58 decoding
|
||||
Base58(bitcoin::util::base58::Error),
|
||||
/// Key-related error
|
||||
Pk(bitcoin::util::key::Error),
|
||||
/// Miniscript error
|
||||
Miniscript(miniscript::Error),
|
||||
/// Hex decoding error
|
||||
Hex(bitcoin::hashes::hex::Error),
|
||||
}
|
||||
|
||||
impl From<crate::keys::KeyError> for Error {
|
||||
fn from(key_error: crate::keys::KeyError) -> Error {
|
||||
match key_error {
|
||||
crate::keys::KeyError::Miniscript(inner) => Error::Miniscript(inner),
|
||||
crate::keys::KeyError::Bip32(inner) => Error::Bip32(inner),
|
||||
e => Error::Key(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::InvalidHdKeyPath => write!(f, "Invalid HD key path"),
|
||||
Self::InvalidDescriptorChecksum => {
|
||||
write!(f, "The provided descriptor doesn't match its checksum")
|
||||
}
|
||||
Self::HardenedDerivationXpub => write!(
|
||||
f,
|
||||
"The descriptor contains hardened derivation steps on public extended keys"
|
||||
),
|
||||
Self::Key(err) => write!(f, "Key error: {}", err),
|
||||
Self::Policy(err) => write!(f, "Policy error: {}", err),
|
||||
Self::InvalidDescriptorCharacter(char) => {
|
||||
write!(f, "Invalid descriptor character: {}", char)
|
||||
}
|
||||
Self::Bip32(err) => write!(f, "BIP32 error: {}", err),
|
||||
Self::Base58(err) => write!(f, "Base58 error: {}", err),
|
||||
Self::Pk(err) => write!(f, "Key-related error: {}", err),
|
||||
Self::Miniscript(err) => write!(f, "Miniscript error: {}", err),
|
||||
Self::Hex(err) => write!(f, "Hex decoding error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl_error!(bitcoin::util::bip32::Error, Bip32);
|
||||
impl_error!(bitcoin::util::base58::Error, Base58);
|
||||
impl_error!(bitcoin::util::key::Error, Pk);
|
||||
impl_error!(miniscript::Error, Miniscript);
|
||||
impl_error!(bitcoin::hashes::hex::Error, Hex);
|
||||
impl_error!(crate::descriptor::policy::PolicyError, Policy);
|
||||
876
crates/bdk/src/descriptor/mod.rs
Normal file
876
crates/bdk/src/descriptor/mod.rs
Normal file
@@ -0,0 +1,876 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Descriptors
|
||||
//!
|
||||
//! This module contains generic utilities to work with descriptors, plus some re-exported types
|
||||
//! from [`miniscript`].
|
||||
|
||||
use crate::collections::BTreeMap;
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use bitcoin::util::bip32::{ChildNumber, DerivationPath, ExtendedPubKey, Fingerprint, KeySource};
|
||||
use bitcoin::util::{psbt, taproot};
|
||||
use bitcoin::{secp256k1, PublicKey, XOnlyPublicKey};
|
||||
use bitcoin::{Network, TxOut};
|
||||
|
||||
use miniscript::descriptor::{
|
||||
DefiniteDescriptorKey, DescriptorSecretKey, DescriptorType, InnerXKey, SinglePubKey,
|
||||
};
|
||||
pub use miniscript::{
|
||||
descriptor::DescriptorXKey, descriptor::KeyMap, descriptor::Wildcard, Descriptor,
|
||||
DescriptorPublicKey, Legacy, Miniscript, ScriptContext, Segwitv0,
|
||||
};
|
||||
use miniscript::{ForEachKey, MiniscriptKey, TranslatePk};
|
||||
|
||||
use crate::descriptor::policy::BuildSatisfaction;
|
||||
|
||||
pub mod checksum;
|
||||
#[doc(hidden)]
|
||||
pub mod dsl;
|
||||
pub mod error;
|
||||
pub mod policy;
|
||||
pub mod template;
|
||||
|
||||
pub use self::checksum::calc_checksum;
|
||||
use self::checksum::calc_checksum_bytes;
|
||||
pub use self::error::Error as DescriptorError;
|
||||
pub use self::policy::Policy;
|
||||
use self::template::DescriptorTemplateOut;
|
||||
use crate::keys::{IntoDescriptorKey, KeyError};
|
||||
use crate::wallet::signer::SignersContainer;
|
||||
use crate::wallet::utils::SecpCtx;
|
||||
|
||||
/// Alias for a [`Descriptor`] that can contain extended keys using [`DescriptorPublicKey`]
|
||||
pub type ExtendedDescriptor = Descriptor<DescriptorPublicKey>;
|
||||
|
||||
/// Alias for a [`Descriptor`] that contains extended **derived** keys
|
||||
pub type DerivedDescriptor = Descriptor<DefiniteDescriptorKey>;
|
||||
|
||||
/// Alias for the type of maps that represent derivation paths in a [`psbt::Input`] or
|
||||
/// [`psbt::Output`]
|
||||
///
|
||||
/// [`psbt::Input`]: bitcoin::util::psbt::Input
|
||||
/// [`psbt::Output`]: bitcoin::util::psbt::Output
|
||||
pub type HdKeyPaths = BTreeMap<secp256k1::PublicKey, KeySource>;
|
||||
|
||||
/// Alias for the type of maps that represent taproot key origins in a [`psbt::Input`] or
|
||||
/// [`psbt::Output`]
|
||||
///
|
||||
/// [`psbt::Input`]: bitcoin::util::psbt::Input
|
||||
/// [`psbt::Output`]: bitcoin::util::psbt::Output
|
||||
pub type TapKeyOrigins = BTreeMap<bitcoin::XOnlyPublicKey, (Vec<taproot::TapLeafHash>, KeySource)>;
|
||||
|
||||
/// Trait for types which can be converted into an [`ExtendedDescriptor`] and a [`KeyMap`] usable by a wallet in a specific [`Network`]
|
||||
pub trait IntoWalletDescriptor {
|
||||
/// Convert to wallet descriptor
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError>;
|
||||
}
|
||||
|
||||
impl IntoWalletDescriptor for &str {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
let descriptor = match self.split_once('#') {
|
||||
Some((desc, original_checksum)) => {
|
||||
let checksum = calc_checksum_bytes(desc)?;
|
||||
if original_checksum.as_bytes() != checksum {
|
||||
return Err(DescriptorError::InvalidDescriptorChecksum);
|
||||
}
|
||||
desc
|
||||
}
|
||||
None => self,
|
||||
};
|
||||
|
||||
ExtendedDescriptor::parse_descriptor(secp, descriptor)?
|
||||
.into_wallet_descriptor(secp, network)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoWalletDescriptor for &String {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
self.as_str().into_wallet_descriptor(secp, network)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoWalletDescriptor for ExtendedDescriptor {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
(self, KeyMap::default()).into_wallet_descriptor(secp, network)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoWalletDescriptor for (ExtendedDescriptor, KeyMap) {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
use crate::keys::DescriptorKey;
|
||||
|
||||
struct Translator<'s, 'd> {
|
||||
secp: &'s SecpCtx,
|
||||
descriptor: &'d ExtendedDescriptor,
|
||||
network: Network,
|
||||
}
|
||||
|
||||
impl<'s, 'd>
|
||||
miniscript::Translator<DescriptorPublicKey, miniscript::DummyKey, DescriptorError>
|
||||
for Translator<'s, 'd>
|
||||
{
|
||||
fn pk(
|
||||
&mut self,
|
||||
pk: &DescriptorPublicKey,
|
||||
) -> Result<miniscript::DummyKey, DescriptorError> {
|
||||
let secp = &self.secp;
|
||||
|
||||
let (_, _, networks) = if self.descriptor.is_taproot() {
|
||||
let descriptor_key: DescriptorKey<miniscript::Tap> =
|
||||
pk.clone().into_descriptor_key()?;
|
||||
descriptor_key.extract(secp)?
|
||||
} else if self.descriptor.is_witness() {
|
||||
let descriptor_key: DescriptorKey<miniscript::Segwitv0> =
|
||||
pk.clone().into_descriptor_key()?;
|
||||
descriptor_key.extract(secp)?
|
||||
} else {
|
||||
let descriptor_key: DescriptorKey<miniscript::Legacy> =
|
||||
pk.clone().into_descriptor_key()?;
|
||||
descriptor_key.extract(secp)?
|
||||
};
|
||||
|
||||
if networks.contains(&self.network) {
|
||||
Ok(miniscript::DummyKey)
|
||||
} else {
|
||||
Err(DescriptorError::Key(KeyError::InvalidNetwork))
|
||||
}
|
||||
}
|
||||
fn sha256(
|
||||
&mut self,
|
||||
_sha256: &<DescriptorPublicKey as MiniscriptKey>::Sha256,
|
||||
) -> Result<miniscript::DummySha256Hash, DescriptorError> {
|
||||
Ok(Default::default())
|
||||
}
|
||||
fn hash256(
|
||||
&mut self,
|
||||
_hash256: &<DescriptorPublicKey as MiniscriptKey>::Hash256,
|
||||
) -> Result<miniscript::DummyHash256Hash, DescriptorError> {
|
||||
Ok(Default::default())
|
||||
}
|
||||
fn ripemd160(
|
||||
&mut self,
|
||||
_ripemd160: &<DescriptorPublicKey as MiniscriptKey>::Ripemd160,
|
||||
) -> Result<miniscript::DummyRipemd160Hash, DescriptorError> {
|
||||
Ok(Default::default())
|
||||
}
|
||||
fn hash160(
|
||||
&mut self,
|
||||
_hash160: &<DescriptorPublicKey as MiniscriptKey>::Hash160,
|
||||
) -> Result<miniscript::DummyHash160Hash, DescriptorError> {
|
||||
Ok(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
// check the network for the keys
|
||||
self.0.translate_pk(&mut Translator {
|
||||
secp,
|
||||
network,
|
||||
descriptor: &self.0,
|
||||
})?;
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoWalletDescriptor for DescriptorTemplateOut {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
_secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
struct Translator {
|
||||
network: Network,
|
||||
}
|
||||
|
||||
impl miniscript::Translator<DescriptorPublicKey, DescriptorPublicKey, DescriptorError>
|
||||
for Translator
|
||||
{
|
||||
fn pk(
|
||||
&mut self,
|
||||
pk: &DescriptorPublicKey,
|
||||
) -> Result<DescriptorPublicKey, DescriptorError> {
|
||||
// workaround for xpubs generated by other key types, like bip39: since when the
|
||||
// conversion is made one network has to be chosen, what we generally choose
|
||||
// "mainnet", but then override the set of valid networks to specify that all of
|
||||
// them are valid. here we reset the network to make sure the wallet struct gets a
|
||||
// descriptor with the right network everywhere.
|
||||
let pk = match pk {
|
||||
DescriptorPublicKey::XPub(ref xpub) => {
|
||||
let mut xpub = xpub.clone();
|
||||
xpub.xkey.network = self.network;
|
||||
|
||||
DescriptorPublicKey::XPub(xpub)
|
||||
}
|
||||
other => other.clone(),
|
||||
};
|
||||
|
||||
Ok(pk)
|
||||
}
|
||||
miniscript::translate_hash_clone!(
|
||||
DescriptorPublicKey,
|
||||
DescriptorPublicKey,
|
||||
DescriptorError
|
||||
);
|
||||
}
|
||||
|
||||
let (desc, keymap, networks) = self;
|
||||
|
||||
if !networks.contains(&network) {
|
||||
return Err(DescriptorError::Key(KeyError::InvalidNetwork));
|
||||
}
|
||||
|
||||
// fixup the network for keys that need it in the descriptor
|
||||
let translated = desc.translate_pk(&mut Translator { network })?;
|
||||
// ...and in the key map
|
||||
let fixed_keymap = keymap
|
||||
.into_iter()
|
||||
.map(|(mut k, mut v)| {
|
||||
match (&mut k, &mut v) {
|
||||
(DescriptorPublicKey::XPub(xpub), DescriptorSecretKey::XPrv(xprv)) => {
|
||||
xpub.xkey.network = network;
|
||||
xprv.xkey.network = network;
|
||||
}
|
||||
(_, DescriptorSecretKey::Single(key)) => {
|
||||
key.key.network = network;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
(k, v)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok((translated, fixed_keymap))
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper for `IntoWalletDescriptor` that performs additional checks on the keys contained in the
|
||||
/// descriptor
|
||||
pub(crate) fn into_wallet_descriptor_checked<T: IntoWalletDescriptor>(
|
||||
inner: T,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
let (descriptor, keymap) = inner.into_wallet_descriptor(secp, network)?;
|
||||
|
||||
// Ensure the keys don't contain any hardened derivation steps or hardened wildcards
|
||||
let descriptor_contains_hardened_steps = descriptor.for_any_key(|k| {
|
||||
if let DescriptorPublicKey::XPub(DescriptorXKey {
|
||||
derivation_path,
|
||||
wildcard,
|
||||
..
|
||||
}) = k
|
||||
{
|
||||
return *wildcard == Wildcard::Hardened
|
||||
|| derivation_path.into_iter().any(ChildNumber::is_hardened);
|
||||
}
|
||||
|
||||
false
|
||||
});
|
||||
if descriptor_contains_hardened_steps {
|
||||
return Err(DescriptorError::HardenedDerivationXpub);
|
||||
}
|
||||
|
||||
// Run miniscript's sanity check, which will look for duplicated keys and other potential
|
||||
// issues
|
||||
descriptor.sanity_check()?;
|
||||
|
||||
Ok((descriptor, keymap))
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
/// Used internally mainly by the `descriptor!()` and `fragment!()` macros
|
||||
pub trait CheckMiniscript<Ctx: miniscript::ScriptContext> {
|
||||
fn check_miniscript(&self) -> Result<(), miniscript::Error>;
|
||||
}
|
||||
|
||||
impl<Ctx: miniscript::ScriptContext, Pk: miniscript::MiniscriptKey> CheckMiniscript<Ctx>
|
||||
for miniscript::Miniscript<Pk, Ctx>
|
||||
{
|
||||
fn check_miniscript(&self) -> Result<(), miniscript::Error> {
|
||||
Ctx::check_global_validity(self)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait implemented on [`Descriptor`]s to add a method to extract the spending [`policy`]
|
||||
pub trait ExtractPolicy {
|
||||
/// Extract the spending [`policy`]
|
||||
fn extract_policy(
|
||||
&self,
|
||||
signers: &SignersContainer,
|
||||
psbt: BuildSatisfaction,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<Option<Policy>, DescriptorError>;
|
||||
}
|
||||
|
||||
pub(crate) trait XKeyUtils {
|
||||
fn root_fingerprint(&self, secp: &SecpCtx) -> Fingerprint;
|
||||
}
|
||||
|
||||
impl<T> XKeyUtils for DescriptorXKey<T>
|
||||
where
|
||||
T: InnerXKey,
|
||||
{
|
||||
fn root_fingerprint(&self, secp: &SecpCtx) -> Fingerprint {
|
||||
match self.origin {
|
||||
Some((fingerprint, _)) => fingerprint,
|
||||
None => self.xkey.xkey_fingerprint(secp),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait DescriptorMeta {
|
||||
fn is_witness(&self) -> bool;
|
||||
fn is_taproot(&self) -> bool;
|
||||
fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>>;
|
||||
fn derive_from_hd_keypaths(
|
||||
&self,
|
||||
hd_keypaths: &HdKeyPaths,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor>;
|
||||
fn derive_from_tap_key_origins(
|
||||
&self,
|
||||
tap_key_origins: &TapKeyOrigins,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor>;
|
||||
fn derive_from_psbt_key_origins(
|
||||
&self,
|
||||
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor>;
|
||||
fn derive_from_psbt_input(
|
||||
&self,
|
||||
psbt_input: &psbt::Input,
|
||||
utxo: Option<TxOut>,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor>;
|
||||
}
|
||||
|
||||
impl DescriptorMeta for ExtendedDescriptor {
|
||||
fn is_witness(&self) -> bool {
|
||||
matches!(
|
||||
self.desc_type(),
|
||||
DescriptorType::Wpkh
|
||||
| DescriptorType::ShWpkh
|
||||
| DescriptorType::Wsh
|
||||
| DescriptorType::ShWsh
|
||||
| DescriptorType::ShWshSortedMulti
|
||||
| DescriptorType::WshSortedMulti
|
||||
)
|
||||
}
|
||||
|
||||
fn is_taproot(&self) -> bool {
|
||||
self.desc_type() == DescriptorType::Tr
|
||||
}
|
||||
|
||||
fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>> {
|
||||
let mut answer = Vec::new();
|
||||
|
||||
self.for_each_key(|pk| {
|
||||
if let DescriptorPublicKey::XPub(xpub) = pk {
|
||||
answer.push(xpub.clone());
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
|
||||
answer
|
||||
}
|
||||
|
||||
fn derive_from_psbt_key_origins(
|
||||
&self,
|
||||
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor> {
|
||||
// Ensure that deriving `xpub` with `path` yields `expected`
|
||||
let verify_key = |xpub: &DescriptorXKey<ExtendedPubKey>,
|
||||
path: &DerivationPath,
|
||||
expected: &SinglePubKey| {
|
||||
let derived = xpub
|
||||
.xkey
|
||||
.derive_pub(secp, path)
|
||||
.expect("The path should never contain hardened derivation steps")
|
||||
.public_key;
|
||||
|
||||
match expected {
|
||||
SinglePubKey::FullKey(pk) if &PublicKey::new(derived) == pk => true,
|
||||
SinglePubKey::XOnly(pk) if &XOnlyPublicKey::from(derived) == pk => true,
|
||||
_ => false,
|
||||
}
|
||||
};
|
||||
|
||||
let mut path_found = None;
|
||||
|
||||
// using `for_any_key` should make this stop as soon as we return `true`
|
||||
self.for_any_key(|key| {
|
||||
if let DescriptorPublicKey::XPub(xpub) = key {
|
||||
// Check if the key matches one entry in our `key_origins`. If it does, `matches()` will
|
||||
// return the "prefix" that matched, so we remove that prefix from the full path
|
||||
// found in `key_origins` and save it in `derive_path`. We expect this to be a derivation
|
||||
// path of length 1 if the key is `wildcard` and an empty path otherwise.
|
||||
let root_fingerprint = xpub.root_fingerprint(secp);
|
||||
let derive_path = key_origins
|
||||
.get_key_value(&root_fingerprint)
|
||||
.and_then(|(fingerprint, (path, expected))| {
|
||||
xpub.matches(&(*fingerprint, (*path).clone()), secp)
|
||||
.zip(Some((path, expected)))
|
||||
})
|
||||
.and_then(|(prefix, (full_path, expected))| {
|
||||
let derive_path = full_path
|
||||
.into_iter()
|
||||
.skip(prefix.into_iter().count())
|
||||
.cloned()
|
||||
.collect::<DerivationPath>();
|
||||
|
||||
// `derive_path` only contains the replacement index for the wildcard, if present, or
|
||||
// an empty path for fixed descriptors. To verify the key we also need the normal steps
|
||||
// that come before the wildcard, so we take them directly from `xpub` and then append
|
||||
// the final index
|
||||
if verify_key(
|
||||
xpub,
|
||||
&xpub.derivation_path.extend(derive_path.clone()),
|
||||
expected,
|
||||
) {
|
||||
Some(derive_path)
|
||||
} else {
|
||||
log::debug!(
|
||||
"Key `{}` derived with {} yields an unexpected key",
|
||||
root_fingerprint,
|
||||
derive_path
|
||||
);
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
match derive_path {
|
||||
Some(path) if xpub.wildcard != Wildcard::None && path.len() == 1 => {
|
||||
// Ignore hardened wildcards
|
||||
if let ChildNumber::Normal { index } = path[0] {
|
||||
path_found = Some(index);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Some(path) if xpub.wildcard == Wildcard::None && path.is_empty() => {
|
||||
path_found = Some(0);
|
||||
return true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
});
|
||||
|
||||
path_found.map(|path| self.at_derivation_index(path))
|
||||
}
|
||||
|
||||
fn derive_from_hd_keypaths(
|
||||
&self,
|
||||
hd_keypaths: &HdKeyPaths,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor> {
|
||||
// "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins`
|
||||
let key_origins = hd_keypaths
|
||||
.iter()
|
||||
.map(|(pk, (fingerprint, path))| {
|
||||
(
|
||||
*fingerprint,
|
||||
(path, SinglePubKey::FullKey(PublicKey::new(*pk))),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
self.derive_from_psbt_key_origins(key_origins, secp)
|
||||
}
|
||||
|
||||
fn derive_from_tap_key_origins(
|
||||
&self,
|
||||
tap_key_origins: &TapKeyOrigins,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor> {
|
||||
// "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins`
|
||||
let key_origins = tap_key_origins
|
||||
.iter()
|
||||
.map(|(pk, (_, (fingerprint, path)))| (*fingerprint, (path, SinglePubKey::XOnly(*pk))))
|
||||
.collect();
|
||||
self.derive_from_psbt_key_origins(key_origins, secp)
|
||||
}
|
||||
|
||||
fn derive_from_psbt_input(
|
||||
&self,
|
||||
psbt_input: &psbt::Input,
|
||||
utxo: Option<TxOut>,
|
||||
secp: &SecpCtx,
|
||||
) -> Option<DerivedDescriptor> {
|
||||
if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) {
|
||||
return Some(derived);
|
||||
}
|
||||
if let Some(derived) = self.derive_from_tap_key_origins(&psbt_input.tap_key_origins, secp) {
|
||||
return Some(derived);
|
||||
}
|
||||
if self.has_wildcard() {
|
||||
// We can't try to bruteforce the derivation index, exit here
|
||||
return None;
|
||||
}
|
||||
|
||||
let descriptor = self.at_derivation_index(0);
|
||||
match descriptor.desc_type() {
|
||||
// TODO: add pk() here
|
||||
DescriptorType::Pkh
|
||||
| DescriptorType::Wpkh
|
||||
| DescriptorType::ShWpkh
|
||||
| DescriptorType::Tr
|
||||
if utxo.is_some()
|
||||
&& descriptor.script_pubkey() == utxo.as_ref().unwrap().script_pubkey =>
|
||||
{
|
||||
Some(descriptor)
|
||||
}
|
||||
DescriptorType::Bare | DescriptorType::Sh | DescriptorType::ShSortedMulti
|
||||
if psbt_input.redeem_script.is_some()
|
||||
&& &descriptor.explicit_script().unwrap()
|
||||
== psbt_input.redeem_script.as_ref().unwrap() =>
|
||||
{
|
||||
Some(descriptor)
|
||||
}
|
||||
DescriptorType::Wsh
|
||||
| DescriptorType::ShWsh
|
||||
| DescriptorType::ShWshSortedMulti
|
||||
| DescriptorType::WshSortedMulti
|
||||
if psbt_input.witness_script.is_some()
|
||||
&& &descriptor.explicit_script().unwrap()
|
||||
== psbt_input.witness_script.as_ref().unwrap() =>
|
||||
{
|
||||
Some(descriptor)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use alloc::string::ToString;
|
||||
use core::str::FromStr;
|
||||
|
||||
use assert_matches::assert_matches;
|
||||
use bitcoin::consensus::encode::deserialize;
|
||||
use bitcoin::hashes::hex::FromHex;
|
||||
use bitcoin::secp256k1::Secp256k1;
|
||||
use bitcoin::util::{bip32, psbt};
|
||||
use bitcoin::Script;
|
||||
|
||||
use super::*;
|
||||
use crate::psbt::PsbtUtils;
|
||||
|
||||
#[test]
|
||||
fn test_derive_from_psbt_input_wpkh_wif() {
|
||||
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
|
||||
"wpkh(02b4632d08485ff1df2db55b9dafd23347d1c47a457072a1e87be26896549a8737)",
|
||||
)
|
||||
.unwrap();
|
||||
let psbt: psbt::PartiallySignedTransaction = deserialize(
|
||||
&Vec::<u8>::from_hex(
|
||||
"70736274ff010052010000000162307be8e431fbaff807cdf9cdc3fde44d7402\
|
||||
11bc8342c31ffd6ec11fe35bcc0100000000ffffffff01328601000000000016\
|
||||
001493ce48570b55c42c2af816aeaba06cfee1224fae000000000001011fa086\
|
||||
01000000000016001493ce48570b55c42c2af816aeaba06cfee1224fae010304\
|
||||
010000000000",
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(descriptor
|
||||
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0), &Secp256k1::new())
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_from_psbt_input_pkh_tpub() {
|
||||
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
|
||||
"pkh([0f056943/44h/0h/0h]tpubDDpWvmUrPZrhSPmUzCMBHffvC3HyMAPnWDSAQNBTnj1iZeJa7BZQEttFiP4DS4GCcXQHezdXhn86Hj6LHX5EDstXPWrMaSneRWM8yUf6NFd/10/*)",
|
||||
)
|
||||
.unwrap();
|
||||
let psbt: psbt::PartiallySignedTransaction = deserialize(
|
||||
&Vec::<u8>::from_hex(
|
||||
"70736274ff010053010000000145843b86be54a3cd8c9e38444e1162676c00df\
|
||||
e7964122a70df491ea12fd67090100000000ffffffff01c19598000000000017\
|
||||
a91432bb94283282f72b2e034709e348c44d5a4db0ef8700000000000100f902\
|
||||
0000000001010167e99c0eb67640f3a1b6805f2d8be8238c947f8aaf49eb0a9c\
|
||||
bee6a42c984200000000171600142b29a22019cca05b9c2b2d283a4c4489e1cf\
|
||||
9f8ffeffffff02a01dced06100000017a914e2abf033cadbd74f0f4c74946201\
|
||||
decd20d5c43c8780969800000000001976a9148b0fce5fb1264e599a65387313\
|
||||
3c95478b902eb288ac02473044022015d9211576163fa5b001e84dfa3d44efd9\
|
||||
86b8f3a0d3d2174369288b2b750906022048dacc0e5d73ae42512fd2b97e2071\
|
||||
a8d0bce443b390b1fe0b8128fe70ec919e01210232dad1c5a67dcb0116d407e2\
|
||||
52584228ab7ec00e8b9779d0c3ffe8114fc1a7d2c80600000103040100000022\
|
||||
0603433b83583f8c4879b329dd08bbc7da935e4cc02f637ff746e05f0466ffb2\
|
||||
a6a2180f0569432c00008000000080000000800a000000000000000000",
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(descriptor
|
||||
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0), &Secp256k1::new())
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_from_psbt_input_wsh() {
|
||||
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
|
||||
"wsh(and_v(v:pk(03b6633fef2397a0a9de9d7b6f23aef8368a6e362b0581f0f0af70d5ecfd254b14),older(6)))",
|
||||
)
|
||||
.unwrap();
|
||||
let psbt: psbt::PartiallySignedTransaction = deserialize(
|
||||
&Vec::<u8>::from_hex(
|
||||
"70736274ff01005302000000011c8116eea34408ab6529223c9a176606742207\
|
||||
67a1ff1d46a6e3c4a88243ea6e01000000000600000001109698000000000017\
|
||||
a914ad105f61102e0d01d7af40d06d6a5c3ae2f7fde387000000000001012b80\
|
||||
969800000000002200203ca72f106a72234754890ca7640c43f65d2174e44d33\
|
||||
336030f9059345091044010304010000000105252103b6633fef2397a0a9de9d\
|
||||
7b6f23aef8368a6e362b0581f0f0af70d5ecfd254b14ad56b20000",
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(descriptor
|
||||
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0), &Secp256k1::new())
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_from_psbt_input_sh() {
|
||||
let descriptor = Descriptor::<DescriptorPublicKey>::from_str(
|
||||
"sh(and_v(v:pk(021403881a5587297818fcaf17d239cefca22fce84a45b3b1d23e836c4af671dbb),after(630000)))",
|
||||
)
|
||||
.unwrap();
|
||||
let psbt: psbt::PartiallySignedTransaction = deserialize(
|
||||
&Vec::<u8>::from_hex(
|
||||
"70736274ff0100530100000001bc8c13df445dfadcc42afa6dc841f85d22b01d\
|
||||
a6270ebf981740f4b7b1d800390000000000feffffff01ba9598000000000017\
|
||||
a91457b148ba4d3e5fa8608a8657875124e3d1c9390887f09c0900000100e002\
|
||||
0000000001016ba1bbe05cc93574a0d611ec7d93ad0ab6685b28d0cd80e8a82d\
|
||||
debb326643c90100000000feffffff02809698000000000017a914d9a6e8c455\
|
||||
8e16c8253afe53ce37ad61cf4c38c487403504cf6100000017a9144044fb6e0b\
|
||||
757dfc1b34886b6a95aef4d3db137e870247304402202a9b72d939bcde8ba2a1\
|
||||
e0980597e47af4f5c152a78499143c3d0a78ac2286a602207a45b1df9e93b8c9\
|
||||
6f09f5c025fe3e413ca4b905fe65ee55d32a3276439a9b8f012102dc1fcc2636\
|
||||
4da1aa718f03d8d9bd6f2ff410ed2cf1245a168aa3bcc995ac18e0a806000001\
|
||||
03040100000001042821021403881a5587297818fcaf17d239cefca22fce84a4\
|
||||
5b3b1d23e836c4af671dbbad03f09c09b10000",
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(descriptor
|
||||
.derive_from_psbt_input(&psbt.inputs[0], psbt.get_utxo_for(0), &Secp256k1::new())
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_wallet_descriptor_fixup_networks() {
|
||||
use crate::keys::{any_network, IntoDescriptorKey};
|
||||
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3c3gF1DUWpWNr2SG2XrG8oYPpqYh7hoWsJy9NjabErnzriJPpnGHyKz5NgdXmq1KVbqS1r4NXdCoKitWg5e86zqXHa8kxyB").unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||
|
||||
// here `to_descriptor_key` will set the valid networks for the key to only mainnet, since
|
||||
// we are using an "xpub"
|
||||
let key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
||||
// override it with any. this happens in some key conversions, like bip39
|
||||
let key = key.override_valid_networks(any_network());
|
||||
|
||||
// make a descriptor out of it
|
||||
let desc = crate::descriptor!(wpkh(key)).unwrap();
|
||||
// this should convert the key that supports "any_network" to the right network (testnet)
|
||||
let (wallet_desc, keymap) = desc
|
||||
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||
.unwrap();
|
||||
|
||||
let mut xprv_testnet = xprv;
|
||||
xprv_testnet.network = Network::Testnet;
|
||||
|
||||
let xpub_testnet = bip32::ExtendedPubKey::from_priv(&secp, &xprv_testnet);
|
||||
let desc_pubkey = DescriptorPublicKey::XPub(DescriptorXKey {
|
||||
xkey: xpub_testnet,
|
||||
origin: None,
|
||||
derivation_path: path,
|
||||
wildcard: Wildcard::Unhardened,
|
||||
});
|
||||
|
||||
assert_eq!(wallet_desc.to_string(), "wpkh(tpubD6NzVbkrYhZ4XtJzoDja5snUjBNQRP5B3f4Hyn1T1x6PVPxzzVjvw6nJx2D8RBCxog9GEVjZoyStfepTz7TtKoBVdkCtnc7VCJh9dD4RAU9/0/*)#a3svx0ha");
|
||||
assert_eq!(
|
||||
keymap
|
||||
.get(&desc_pubkey)
|
||||
.map(|key| key.to_public(&secp).unwrap()),
|
||||
Some(desc_pubkey)
|
||||
);
|
||||
}
|
||||
|
||||
// test IntoWalletDescriptor trait from &str with and without checksum appended
|
||||
#[test]
|
||||
fn test_descriptor_from_str_with_checksum() {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)#tqz0nc62"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)#67ju93jw"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)#67ju93jw"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert_matches!(desc, Err(DescriptorError::InvalidDescriptorChecksum));
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)#67ju93jw"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert_matches!(desc, Err(DescriptorError::InvalidDescriptorChecksum));
|
||||
}
|
||||
|
||||
// test IntoWalletDescriptor trait from &str with keys from right and wrong network
|
||||
#[test]
|
||||
fn test_descriptor_from_str_with_keys_network() {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Regtest);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Regtest);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "sh(wpkh(02864bb4ad00cefa806098a69e192bbda937494e69eb452b87bb3f20f6283baedb))"
|
||||
.into_wallet_descriptor(&secp, Network::Testnet);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "sh(wpkh(02864bb4ad00cefa806098a69e192bbda937494e69eb452b87bb3f20f6283baedb))"
|
||||
.into_wallet_descriptor(&secp, Network::Bitcoin);
|
||||
assert!(desc.is_ok());
|
||||
|
||||
let desc = "wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Bitcoin);
|
||||
assert_matches!(desc, Err(DescriptorError::Key(KeyError::InvalidNetwork)));
|
||||
|
||||
let desc = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)"
|
||||
.into_wallet_descriptor(&secp, Network::Bitcoin);
|
||||
assert_matches!(desc, Err(DescriptorError::Key(KeyError::InvalidNetwork)));
|
||||
}
|
||||
|
||||
// test IntoWalletDescriptor trait from the output of the descriptor!() macro
|
||||
#[test]
|
||||
fn test_descriptor_from_str_from_output_of_macro() {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let tpub = bip32::ExtendedPubKey::from_str("tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK").unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/1/2").unwrap();
|
||||
let key = (tpub, path).into_descriptor_key().unwrap();
|
||||
|
||||
// make a descriptor out of it
|
||||
let desc = crate::descriptor!(wpkh(key)).unwrap();
|
||||
|
||||
let (wallet_desc, _) = desc
|
||||
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||
.unwrap();
|
||||
let wallet_desc_str = wallet_desc.to_string();
|
||||
assert_eq!(wallet_desc_str, "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/2/*)#67ju93jw");
|
||||
|
||||
let (wallet_desc2, _) = wallet_desc_str
|
||||
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||
.unwrap();
|
||||
assert_eq!(wallet_desc, wallet_desc2)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_into_wallet_descriptor_checked() {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let descriptor = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0'/1/2/*)";
|
||||
let result = into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet);
|
||||
|
||||
assert_matches!(result, Err(DescriptorError::HardenedDerivationXpub));
|
||||
|
||||
let descriptor = "wsh(multi(2,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*))";
|
||||
let result = into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet);
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sh_wsh_sortedmulti_redeemscript() {
|
||||
use miniscript::psbt::PsbtInputExt;
|
||||
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let descriptor = "sh(wsh(sortedmulti(3,tpubDEsqS36T4DVsKJd9UH8pAKzrkGBYPLEt9jZMwpKtzh1G6mgYehfHt9WCgk7MJG5QGSFWf176KaBNoXbcuFcuadAFKxDpUdMDKGBha7bY3QM/0/*,tpubDF3cpwfs7fMvXXuoQbohXtLjNM6ehwYT287LWtmLsd4r77YLg6MZg4vTETx5MSJ2zkfigbYWu31VA2Z2Vc1cZugCYXgS7FQu6pE8V6TriEH/0/*,tpubDE1SKfcW76Tb2AASv5bQWMuScYNAdoqLHoexw13sNDXwmUhQDBbCD3QAedKGLhxMrWQdMDKENzYtnXPDRvexQPNuDrLj52wAjHhNEm8sJ4p/0/*,tpubDFLc6oXwJmhm3FGGzXkfJNTh2KitoY3WhmmQvuAjMhD8YbyWn5mAqckbxXfm2etM3p5J6JoTpSrMqRSTfMLtNW46poDaEZJ1kjd3csRSjwH/0/*,tpubDEWD9NBeWP59xXmdqSNt4VYdtTGwbpyP8WS962BuqpQeMZmX9Pur14dhXdZT5a7wR1pK6dPtZ9fP5WR493hPzemnBvkfLLYxnUjAKj1JCQV/0/*,tpubDEHyZkkwd7gZWCTgQuYQ9C4myF2hMEmyHsBCCmLssGqoqUxeT3gzohF5uEVURkf9TtmeepJgkSUmteac38FwZqirjApzNX59XSHLcwaTZCH/0/*,tpubDEqLouCekwnMUWN486kxGzD44qVgeyuqHyxUypNEiQt5RnUZNJe386TKPK99fqRV1vRkZjYAjtXGTECz98MCsdLcnkM67U6KdYRzVubeCgZ/0/*)))";
|
||||
let (descriptor, _) =
|
||||
into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet).unwrap();
|
||||
|
||||
let descriptor = descriptor.at_derivation_index(0);
|
||||
|
||||
let script = Script::from_str("5321022f533b667e2ea3b36e21961c9fe9dca340fbe0af5210173a83ae0337ab20a57621026bb53a98e810bd0ee61a0ed1164ba6c024786d76554e793e202dc6ce9c78c4ea2102d5b8a7d66a41ffdb6f4c53d61994022e886b4f45001fb158b95c9164d45f8ca3210324b75eead2c1f9c60e8adeb5e7009fec7a29afcdb30d829d82d09562fe8bae8521032d34f8932200833487bd294aa219dcbe000b9f9b3d824799541430009f0fa55121037468f8ea99b6c64788398b5ad25480cad08f4b0d65be54ce3a55fd206b5ae4722103f72d3d96663b0ea99b0aeb0d7f273cab11a8de37885f1dddc8d9112adb87169357ae").unwrap();
|
||||
|
||||
let mut psbt_input = psbt::Input::default();
|
||||
psbt_input
|
||||
.update_with_descriptor_unchecked(&descriptor)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(psbt_input.redeem_script, Some(script.to_v0_p2wsh()));
|
||||
assert_eq!(psbt_input.witness_script, Some(script));
|
||||
}
|
||||
}
|
||||
1886
crates/bdk/src/descriptor/policy.rs
Normal file
1886
crates/bdk/src/descriptor/policy.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,13 @@
|
||||
// Magical Bitcoin Library
|
||||
// Written in 2020 by
|
||||
// Alekos Filini <alekos.filini@gmail.com>
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020 Magical Bitcoin
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Descriptor templates
|
||||
//!
|
||||
@@ -32,8 +19,10 @@ use bitcoin::Network;
|
||||
|
||||
use miniscript::{Legacy, Segwitv0};
|
||||
|
||||
use super::{ExtendedDescriptor, KeyMap, ToWalletDescriptor};
|
||||
use crate::keys::{DerivableKey, KeyError, ToDescriptorKey, ValidNetworks};
|
||||
use super::{ExtendedDescriptor, IntoWalletDescriptor, KeyMap};
|
||||
use crate::descriptor::DescriptorError;
|
||||
use crate::keys::{DerivableKey, IntoDescriptorKey, ValidNetworks};
|
||||
use crate::wallet::utils::SecpCtx;
|
||||
use crate::{descriptor, KeychainKind};
|
||||
|
||||
/// Type alias for the return type of [`DescriptorTemplate`], [`descriptor!`](crate::descriptor!) and others
|
||||
@@ -41,37 +30,40 @@ pub type DescriptorTemplateOut = (ExtendedDescriptor, KeyMap, ValidNetworks);
|
||||
|
||||
/// Trait for descriptor templates that can be built into a full descriptor
|
||||
///
|
||||
/// Since [`ToWalletDescriptor`] is implemented for any [`DescriptorTemplate`], they can also be
|
||||
/// Since [`IntoWalletDescriptor`] is implemented for any [`DescriptorTemplate`], they can also be
|
||||
/// passed directly to the [`Wallet`](crate::Wallet) constructor.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// use bdk::keys::{KeyError, ToDescriptorKey};
|
||||
/// use bdk::descriptor::error::Error as DescriptorError;
|
||||
/// use bdk::keys::{IntoDescriptorKey, KeyError};
|
||||
/// use bdk::miniscript::Legacy;
|
||||
/// use bdk::template::{DescriptorTemplate, DescriptorTemplateOut};
|
||||
/// use bitcoin::Network;
|
||||
///
|
||||
/// struct MyP2PKH<K: ToDescriptorKey<Legacy>>(K);
|
||||
/// struct MyP2PKH<K: IntoDescriptorKey<Legacy>>(K);
|
||||
///
|
||||
/// impl<K: ToDescriptorKey<Legacy>> DescriptorTemplate for MyP2PKH<K> {
|
||||
/// fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
/// impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for MyP2PKH<K> {
|
||||
/// fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
/// Ok(bdk::descriptor!(pkh(self.0))?)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub trait DescriptorTemplate {
|
||||
/// Build the complete descriptor
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError>;
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError>;
|
||||
}
|
||||
|
||||
/// Turns a [`DescriptorTemplate`] into a valid wallet descriptor by calling its
|
||||
/// [`build`](DescriptorTemplate::build) method
|
||||
impl<T: DescriptorTemplate> ToWalletDescriptor for T {
|
||||
fn to_wallet_descriptor(
|
||||
impl<T: DescriptorTemplate> IntoWalletDescriptor for T {
|
||||
fn into_wallet_descriptor(
|
||||
self,
|
||||
secp: &SecpCtx,
|
||||
network: Network,
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), KeyError> {
|
||||
Ok(self.build()?.to_wallet_descriptor(network)?)
|
||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||
self.build(network)?.into_wallet_descriptor(secp, network)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,30 +73,25 @@ impl<T: DescriptorTemplate> ToWalletDescriptor for T {
|
||||
///
|
||||
/// ```
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::P2PKH;
|
||||
/// # use bdk::Wallet;
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::P2Pkh;
|
||||
///
|
||||
/// let key =
|
||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// P2PKH(key),
|
||||
/// None,
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default(),
|
||||
/// )?;
|
||||
/// let mut wallet = Wallet::new_no_persist(P2Pkh(key), None, Network::Testnet)?;
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// wallet.get_new_address()?.to_string(),
|
||||
/// wallet.get_address(New).to_string(),
|
||||
/// "mwJ8hxFYW19JLuc65RCTaP4v1rzVU8cVMT"
|
||||
/// );
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct P2PKH<K: ToDescriptorKey<Legacy>>(pub K);
|
||||
pub struct P2Pkh<K: IntoDescriptorKey<Legacy>>(pub K);
|
||||
|
||||
impl<K: ToDescriptorKey<Legacy>> DescriptorTemplate for P2PKH<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(descriptor!(pkh(self.0))?)
|
||||
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2Pkh<K> {
|
||||
fn build(self, _network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
descriptor!(pkh(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,31 +101,26 @@ impl<K: ToDescriptorKey<Legacy>> DescriptorTemplate for P2PKH<K> {
|
||||
///
|
||||
/// ```
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::P2WPKH_P2SH;
|
||||
/// # use bdk::Wallet;
|
||||
/// use bdk::template::P2Wpkh_P2Sh;
|
||||
/// use bdk::wallet::AddressIndex;
|
||||
///
|
||||
/// let key =
|
||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// P2WPKH_P2SH(key),
|
||||
/// None,
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default(),
|
||||
/// )?;
|
||||
/// let mut wallet = Wallet::new_no_persist(P2Wpkh_P2Sh(key), None, Network::Testnet)?;
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// wallet.get_new_address()?.to_string(),
|
||||
/// wallet.get_address(AddressIndex::New).to_string(),
|
||||
/// "2NB4ox5VDRw1ecUv6SnT3VQHPXveYztRqk5"
|
||||
/// );
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct P2WPKH_P2SH<K: ToDescriptorKey<Segwitv0>>(pub K);
|
||||
pub struct P2Wpkh_P2Sh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
||||
|
||||
impl<K: ToDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH_P2SH<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(descriptor!(sh(wpkh(self.0)))?)
|
||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh_P2Sh<K> {
|
||||
fn build(self, _network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
descriptor!(sh(wpkh(self.0)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,75 +130,69 @@ impl<K: ToDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH_P2SH<K> {
|
||||
///
|
||||
/// ```
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::P2WPKH;
|
||||
/// # use bdk::{Wallet};
|
||||
/// use bdk::template::P2Wpkh;
|
||||
/// use bdk::wallet::AddressIndex::New;
|
||||
///
|
||||
/// let key =
|
||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// P2WPKH(key),
|
||||
/// None,
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default(),
|
||||
/// )?;
|
||||
/// let mut wallet = Wallet::new_no_persist(P2Wpkh(key), None, Network::Testnet)?;
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// wallet.get_new_address()?.to_string(),
|
||||
/// wallet.get_address(New).to_string(),
|
||||
/// "tb1q4525hmgw265tl3drrl8jjta7ayffu6jf68ltjd"
|
||||
/// );
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct P2WPKH<K: ToDescriptorKey<Segwitv0>>(pub K);
|
||||
pub struct P2Wpkh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
||||
|
||||
impl<K: ToDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(descriptor!(wpkh(self.0))?)
|
||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh<K> {
|
||||
fn build(self, _network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
descriptor!(wpkh(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP44 template. Expands to `pkh(key/44'/0'/0'/{0,1}/*)`
|
||||
/// BIP44 template. Expands to `pkh(key/44'/{0,1}'/0'/{0,1}/*)`
|
||||
///
|
||||
/// Since there are hardened derivation steps, this template requires a private derivable key (generally a `xprv`/`tprv`).
|
||||
///
|
||||
/// See [`BIP44Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
/// See [`Bip44Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP44;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip44;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP44(key.clone(), KeychainKind::External),
|
||||
/// Some(BIP44(key, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip44(key.clone(), KeychainKind::External),
|
||||
/// Some(Bip44(key, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "pkh([c55b303f/44'/0'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "mmogjc7HJEZkrLqyQYqJmxUqFaC7i4uf89");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDCuorCpzvYS2LCD75BR46KHE8GdDeg1wsAgNZeNr6DaB5gQK1o14uErKwKLuFmeemkQ6N2m3rNgvctdJLyr7nwu2yia7413Hhg8WWE44cgT/0/*)#5wrnv0xt");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
|
||||
pub struct Bip44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2PKH(legacy::make_bipxx_private(44, self.0, self.1)?).build()?)
|
||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Pkh(legacy::make_bipxx_private(44, self.0, self.1, network)?).build(network)
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP44 public template. Expands to `pkh(key/{0,1}/*)`
|
||||
///
|
||||
/// This assumes that the key used has already been derived with `m/44'/0'/0'`.
|
||||
/// This assumes that the key used has already been derived with `m/44'/0'/0'` for Mainnet or `m/44'/1'/0'` for Testnet.
|
||||
///
|
||||
/// This template requires the parent fingerprint to populate correctly the metadata of PSBTs.
|
||||
///
|
||||
/// See [`BIP44`] for a template that does the full derivation, but requires private data
|
||||
/// See [`Bip44`] for a template that does the full derivation, but requires private data
|
||||
/// for the key.
|
||||
///
|
||||
/// ## Example
|
||||
@@ -224,73 +200,74 @@ impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44<K> {
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP44Public;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip44Public;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU")?;
|
||||
/// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP44Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(BIP44Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip44Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(Bip44Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "pkh([c55b303f/44'/0'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)#cfhumdqz");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
pub struct Bip44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44Public<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2PKH(legacy::make_bipxx_public(44, self.0, self.1, self.2)?).build()?)
|
||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44Public<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Pkh(legacy::make_bipxx_public(
|
||||
44, self.0, self.1, self.2, network,
|
||||
)?)
|
||||
.build(network)
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP49 template. Expands to `sh(wpkh(key/49'/0'/0'/{0,1}/*))`
|
||||
/// BIP49 template. Expands to `sh(wpkh(key/49'/{0,1}'/0'/{0,1}/*))`
|
||||
///
|
||||
/// Since there are hardened derivation steps, this template requires a private derivable key (generally a `xprv`/`tprv`).
|
||||
///
|
||||
/// See [`BIP49Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
/// See [`Bip49Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP49;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip49;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP49(key.clone(), KeychainKind::External),
|
||||
/// Some(BIP49(key, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip49(key.clone(), KeychainKind::External),
|
||||
/// Some(Bip49(key, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "sh(wpkh([c55b303f/49\'/0\'/0\']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "2N4zkWAoGdUv4NXhSsU8DvS5MB36T8nKHEB");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDDYr4kdnZgjjShzYNjZUZXUUtpXaofdkMaipyS8ThEh45qFmhT4hKYways7UXmg6V7het1QiFo9kf4kYUXyDvV4rHEyvSpys9pjCB3pukxi/0/*))#s9vxlc8e");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||
pub struct Bip49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_private(49, self.0, self.1)?).build()?)
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Wpkh_P2Sh(segwit_v0::make_bipxx_private(49, self.0, self.1, network)?).build(network)
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP49 public template. Expands to `sh(wpkh(key/{0,1}/*))`
|
||||
///
|
||||
/// This assumes that the key used has already been derived with `m/49'/0'/0'`.
|
||||
/// This assumes that the key used has already been derived with `m/49'/0'/0'` for Mainnet or `m/49'/1'/0'` for Testnet.
|
||||
///
|
||||
/// This template requires the parent fingerprint to populate correctly the metadata of PSBTs.
|
||||
///
|
||||
/// See [`BIP49`] for a template that does the full derivation, but requires private data
|
||||
/// See [`Bip49`] for a template that does the full derivation, but requires private data
|
||||
/// for the key.
|
||||
///
|
||||
/// ## Example
|
||||
@@ -298,73 +275,74 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49<K> {
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP49Public;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip49Public;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L")?;
|
||||
/// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP49Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(BIP49Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip49Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(Bip49Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "sh(wpkh([c55b303f/49\'/0\'/0\']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))#3tka9g0q");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
pub struct Bip49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49Public<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_public(49, self.0, self.1, self.2)?).build()?)
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49Public<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Wpkh_P2Sh(segwit_v0::make_bipxx_public(
|
||||
49, self.0, self.1, self.2, network,
|
||||
)?)
|
||||
.build(network)
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP84 template. Expands to `wpkh(key/84'/0'/0'/{0,1}/*)`
|
||||
/// BIP84 template. Expands to `wpkh(key/84'/{0,1}'/0'/{0,1}/*)`
|
||||
///
|
||||
/// Since there are hardened derivation steps, this template requires a private derivable key (generally a `xprv`/`tprv`).
|
||||
///
|
||||
/// See [`BIP84Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
/// See [`Bip84Public`] for a template that can work with a `xpub`/`tpub`.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP84;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip84;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP84(key.clone(), KeychainKind::External),
|
||||
/// Some(BIP84(key, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip84(key.clone(), KeychainKind::External),
|
||||
/// Some(Bip84(key, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "wpkh([c55b303f/84\'/0\'/0\']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "tb1qhl85z42h7r4su5u37rvvw0gk8j2t3n9y7zsg4n");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDDc5mum24DekpNw92t6fHGp8Gr2JjF9J7i4TZBtN6Vp8xpAULG5CFaKsfugWa5imhrQQUZKXe261asP5koDHo5bs3qNTmf3U3o4v9SaB8gg/0/*)#6kfecsmr");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||
pub struct Bip84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2WPKH(segwit_v0::make_bipxx_private(84, self.0, self.1)?).build()?)
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Wpkh(segwit_v0::make_bipxx_private(84, self.0, self.1, network)?).build(network)
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP84 public template. Expands to `wpkh(key/{0,1}/*)`
|
||||
///
|
||||
/// This assumes that the key used has already been derived with `m/84'/0'/0'`.
|
||||
/// This assumes that the key used has already been derived with `m/84'/0'/0'` for Mainnet or `m/84'/1'/0'` for Testnet.
|
||||
///
|
||||
/// This template requires the parent fingerprint to populate correctly the metadata of PSBTs.
|
||||
///
|
||||
/// See [`BIP84`] for a template that does the full derivation, but requires private data
|
||||
/// See [`Bip84`] for a template that does the full derivation, but requires private data
|
||||
/// for the key.
|
||||
///
|
||||
/// ## Example
|
||||
@@ -372,28 +350,30 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84<K> {
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
||||
/// # use bdk::{Wallet, OfflineWallet, KeychainKind};
|
||||
/// # use bdk::database::MemoryDatabase;
|
||||
/// use bdk::template::BIP84Public;
|
||||
/// # use bdk::{Wallet, KeychainKind};
|
||||
/// # use bdk::wallet::AddressIndex::New;
|
||||
/// use bdk::template::Bip84Public;
|
||||
///
|
||||
/// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?;
|
||||
/// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?;
|
||||
/// let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
/// BIP84Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(BIP84Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// let mut wallet = Wallet::new_no_persist(
|
||||
/// Bip84Public(key.clone(), fingerprint, KeychainKind::External),
|
||||
/// Some(Bip84Public(key, fingerprint, KeychainKind::Internal)),
|
||||
/// Network::Testnet,
|
||||
/// MemoryDatabase::default()
|
||||
/// )?;
|
||||
///
|
||||
/// assert_eq!(wallet.get_new_address()?.to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "wpkh([c55b303f/84\'/0\'/0\']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)");
|
||||
/// assert_eq!(wallet.get_address(New).to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7");
|
||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#dhu402yv");
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub struct BIP84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
pub struct Bip84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84Public<K> {
|
||||
fn build(self) -> Result<DescriptorTemplateOut, KeyError> {
|
||||
Ok(P2WPKH(segwit_v0::make_bipxx_public(84, self.0, self.1, self.2)?).build()?)
|
||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84Public<K> {
|
||||
fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||
P2Wpkh(segwit_v0::make_bipxx_public(
|
||||
84, self.0, self.1, self.2, network,
|
||||
)?)
|
||||
.build(network)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -406,10 +386,19 @@ macro_rules! expand_make_bipxx {
|
||||
bip: u32,
|
||||
key: K,
|
||||
keychain: KeychainKind,
|
||||
) -> Result<impl ToDescriptorKey<$ctx>, KeyError> {
|
||||
let mut derivation_path = Vec::with_capacity(4);
|
||||
network: Network,
|
||||
) -> Result<impl IntoDescriptorKey<$ctx>, DescriptorError> {
|
||||
let mut derivation_path = alloc::vec::Vec::with_capacity(4);
|
||||
derivation_path.push(bip32::ChildNumber::from_hardened_idx(bip)?);
|
||||
derivation_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
||||
|
||||
match network {
|
||||
Network::Bitcoin => {
|
||||
derivation_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
||||
}
|
||||
_ => {
|
||||
derivation_path.push(bip32::ChildNumber::from_hardened_idx(1)?);
|
||||
}
|
||||
}
|
||||
derivation_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
||||
|
||||
match keychain {
|
||||
@@ -430,17 +419,21 @@ macro_rules! expand_make_bipxx {
|
||||
key: K,
|
||||
parent_fingerprint: bip32::Fingerprint,
|
||||
keychain: KeychainKind,
|
||||
) -> Result<impl ToDescriptorKey<$ctx>, KeyError> {
|
||||
network: Network,
|
||||
) -> Result<impl IntoDescriptorKey<$ctx>, DescriptorError> {
|
||||
let derivation_path: bip32::DerivationPath = match keychain {
|
||||
KeychainKind::External => vec![bip32::ChildNumber::from_normal_idx(0)?].into(),
|
||||
KeychainKind::Internal => vec![bip32::ChildNumber::from_normal_idx(1)?].into(),
|
||||
};
|
||||
|
||||
let mut source_path = Vec::with_capacity(3);
|
||||
source_path.push(bip32::ChildNumber::from_hardened_idx(bip)?);
|
||||
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
||||
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
||||
let source_path: bip32::DerivationPath = source_path.into();
|
||||
let source_path = bip32::DerivationPath::from(vec![
|
||||
bip32::ChildNumber::from_hardened_idx(bip)?,
|
||||
match network {
|
||||
Network::Bitcoin => bip32::ChildNumber::from_hardened_idx(0)?,
|
||||
_ => bip32::ChildNumber::from_hardened_idx(1)?,
|
||||
},
|
||||
bip32::ChildNumber::from_hardened_idx(0)?,
|
||||
]);
|
||||
|
||||
Ok((key, (parent_fingerprint, source_path), derivation_path))
|
||||
}
|
||||
@@ -455,38 +448,69 @@ expand_make_bipxx!(segwit_v0, Segwitv0);
|
||||
mod test {
|
||||
// test existing descriptor templates, make sure they are expanded to the right descriptors
|
||||
|
||||
use alloc::{string::ToString, vec::Vec};
|
||||
use core::str::FromStr;
|
||||
|
||||
use super::*;
|
||||
use crate::descriptor::DescriptorMeta;
|
||||
use crate::keys::{KeyError, ValidNetworks};
|
||||
use bitcoin::hashes::core::str::FromStr;
|
||||
use crate::descriptor::{DescriptorError, DescriptorMeta};
|
||||
use crate::keys::ValidNetworks;
|
||||
use assert_matches::assert_matches;
|
||||
use bitcoin::network::constants::Network::Regtest;
|
||||
use bitcoin::secp256k1::Secp256k1;
|
||||
use bitcoin::util::bip32::ChildNumber;
|
||||
use miniscript::descriptor::{DescriptorPublicKey, DescriptorPublicKeyCtx, KeyMap};
|
||||
use miniscript::descriptor::{DescriptorPublicKey, KeyMap};
|
||||
use miniscript::Descriptor;
|
||||
|
||||
// BIP44 `pkh(key/44'/{0,1}'/0'/{0,1}/*)`
|
||||
#[test]
|
||||
fn test_bip44_template_cointype() {
|
||||
use bitcoin::util::bip32::ChildNumber::{self, Hardened};
|
||||
|
||||
let xprvkey = bitcoin::util::bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K2fpbqApQL69a4oKdGVnVN52R82Ft7d1pSqgKmajF62acJo3aMszZb6qQ22QsVECSFxvf9uyxFUvFYQMq3QbtwtRSMjLAhMf").unwrap();
|
||||
assert_eq!(Network::Bitcoin, xprvkey.network);
|
||||
let xdesc = Bip44(xprvkey, KeychainKind::Internal)
|
||||
.build(Network::Bitcoin)
|
||||
.unwrap();
|
||||
|
||||
if let ExtendedDescriptor::Pkh(pkh) = xdesc.0 {
|
||||
let path: Vec<ChildNumber> = pkh.into_inner().full_derivation_path().into();
|
||||
let purpose = path.get(0).unwrap();
|
||||
assert_matches!(purpose, Hardened { index: 44 });
|
||||
let coin_type = path.get(1).unwrap();
|
||||
assert_matches!(coin_type, Hardened { index: 0 });
|
||||
}
|
||||
|
||||
let tprvkey = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
assert_eq!(Network::Testnet, tprvkey.network);
|
||||
let tdesc = Bip44(tprvkey, KeychainKind::Internal)
|
||||
.build(Network::Testnet)
|
||||
.unwrap();
|
||||
|
||||
if let ExtendedDescriptor::Pkh(pkh) = tdesc.0 {
|
||||
let path: Vec<ChildNumber> = pkh.into_inner().full_derivation_path().into();
|
||||
let purpose = path.get(0).unwrap();
|
||||
assert_matches!(purpose, Hardened { index: 44 });
|
||||
let coin_type = path.get(1).unwrap();
|
||||
assert_matches!(coin_type, Hardened { index: 1 });
|
||||
}
|
||||
}
|
||||
|
||||
// verify template descriptor generates expected address(es)
|
||||
fn check(
|
||||
desc: Result<(Descriptor<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError>,
|
||||
desc: Result<(Descriptor<DescriptorPublicKey>, KeyMap, ValidNetworks), DescriptorError>,
|
||||
is_witness: bool,
|
||||
is_fixed: bool,
|
||||
expected: &[&str],
|
||||
) {
|
||||
let secp = Secp256k1::new();
|
||||
let deriv_ctx =
|
||||
DescriptorPublicKeyCtx::new(&secp, ChildNumber::from_normal_idx(0).unwrap());
|
||||
|
||||
let (desc, _key_map, _networks) = desc.unwrap();
|
||||
assert_eq!(desc.is_witness(), is_witness);
|
||||
assert_eq!(desc.is_fixed(), is_fixed);
|
||||
assert_eq!(!desc.has_wildcard(), is_fixed);
|
||||
for i in 0..expected.len() {
|
||||
let index = i as u32;
|
||||
let child_desc = if desc.is_fixed() {
|
||||
desc.clone()
|
||||
let child_desc = if !desc.has_wildcard() {
|
||||
desc.at_derivation_index(0)
|
||||
} else {
|
||||
desc.derive(ChildNumber::from_normal_idx(index).unwrap())
|
||||
desc.at_derivation_index(index)
|
||||
};
|
||||
let address = child_desc.address(Regtest, deriv_ctx).unwrap();
|
||||
let address = child_desc.address(Regtest).unwrap();
|
||||
assert_eq!(address.to_string(), *expected.get(i).unwrap());
|
||||
}
|
||||
}
|
||||
@@ -498,7 +522,7 @@ mod test {
|
||||
bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")
|
||||
.unwrap();
|
||||
check(
|
||||
P2PKH(prvkey).build(),
|
||||
P2Pkh(prvkey).build(Network::Bitcoin),
|
||||
false,
|
||||
true,
|
||||
&["mwJ8hxFYW19JLuc65RCTaP4v1rzVU8cVMT"],
|
||||
@@ -509,7 +533,7 @@ mod test {
|
||||
)
|
||||
.unwrap();
|
||||
check(
|
||||
P2PKH(pubkey).build(),
|
||||
P2Pkh(pubkey).build(Network::Bitcoin),
|
||||
false,
|
||||
true,
|
||||
&["muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi"],
|
||||
@@ -523,7 +547,7 @@ mod test {
|
||||
bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")
|
||||
.unwrap();
|
||||
check(
|
||||
P2WPKH_P2SH(prvkey).build(),
|
||||
P2Wpkh_P2Sh(prvkey).build(Network::Bitcoin),
|
||||
true,
|
||||
true,
|
||||
&["2NB4ox5VDRw1ecUv6SnT3VQHPXveYztRqk5"],
|
||||
@@ -534,7 +558,7 @@ mod test {
|
||||
)
|
||||
.unwrap();
|
||||
check(
|
||||
P2WPKH_P2SH(pubkey).build(),
|
||||
P2Wpkh_P2Sh(pubkey).build(Network::Bitcoin),
|
||||
true,
|
||||
true,
|
||||
&["2N5LiC3CqzxDamRTPG1kiNv1FpNJQ7x28sb"],
|
||||
@@ -548,7 +572,7 @@ mod test {
|
||||
bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")
|
||||
.unwrap();
|
||||
check(
|
||||
P2WPKH(prvkey).build(),
|
||||
P2Wpkh(prvkey).build(Network::Bitcoin),
|
||||
true,
|
||||
true,
|
||||
&["bcrt1q4525hmgw265tl3drrl8jjta7ayffu6jfcwxx9y"],
|
||||
@@ -559,7 +583,7 @@ mod test {
|
||||
)
|
||||
.unwrap();
|
||||
check(
|
||||
P2WPKH(pubkey).build(),
|
||||
P2Wpkh(pubkey).build(Network::Bitcoin),
|
||||
true,
|
||||
true,
|
||||
&["bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h"],
|
||||
@@ -571,7 +595,7 @@ mod test {
|
||||
fn test_bip44_template() {
|
||||
let prvkey = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
check(
|
||||
BIP44(prvkey, KeychainKind::External).build(),
|
||||
Bip44(prvkey, KeychainKind::External).build(Network::Bitcoin),
|
||||
false,
|
||||
false,
|
||||
&[
|
||||
@@ -581,7 +605,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP44(prvkey, KeychainKind::Internal).build(),
|
||||
Bip44(prvkey, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
false,
|
||||
false,
|
||||
&[
|
||||
@@ -598,7 +622,7 @@ mod test {
|
||||
let pubkey = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU").unwrap();
|
||||
let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f").unwrap();
|
||||
check(
|
||||
BIP44Public(pubkey, fingerprint, KeychainKind::External).build(),
|
||||
Bip44Public(pubkey, fingerprint, KeychainKind::External).build(Network::Bitcoin),
|
||||
false,
|
||||
false,
|
||||
&[
|
||||
@@ -608,7 +632,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP44Public(pubkey, fingerprint, KeychainKind::Internal).build(),
|
||||
Bip44Public(pubkey, fingerprint, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
false,
|
||||
false,
|
||||
&[
|
||||
@@ -624,7 +648,7 @@ mod test {
|
||||
fn test_bip49_template() {
|
||||
let prvkey = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
check(
|
||||
BIP49(prvkey, KeychainKind::External).build(),
|
||||
Bip49(prvkey, KeychainKind::External).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -634,7 +658,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP49(prvkey, KeychainKind::Internal).build(),
|
||||
Bip49(prvkey, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -651,7 +675,7 @@ mod test {
|
||||
let pubkey = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L").unwrap();
|
||||
let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f").unwrap();
|
||||
check(
|
||||
BIP49Public(pubkey, fingerprint, KeychainKind::External).build(),
|
||||
Bip49Public(pubkey, fingerprint, KeychainKind::External).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -661,7 +685,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP49Public(pubkey, fingerprint, KeychainKind::Internal).build(),
|
||||
Bip49Public(pubkey, fingerprint, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -677,7 +701,7 @@ mod test {
|
||||
fn test_bip84_template() {
|
||||
let prvkey = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||
check(
|
||||
BIP84(prvkey, KeychainKind::External).build(),
|
||||
Bip84(prvkey, KeychainKind::External).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -687,7 +711,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP84(prvkey, KeychainKind::Internal).build(),
|
||||
Bip84(prvkey, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -704,7 +728,7 @@ mod test {
|
||||
let pubkey = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q").unwrap();
|
||||
let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f").unwrap();
|
||||
check(
|
||||
BIP84Public(pubkey, fingerprint, KeychainKind::External).build(),
|
||||
Bip84Public(pubkey, fingerprint, KeychainKind::External).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
@@ -714,7 +738,7 @@ mod test {
|
||||
],
|
||||
);
|
||||
check(
|
||||
BIP84Public(pubkey, fingerprint, KeychainKind::Internal).build(),
|
||||
Bip84Public(pubkey, fingerprint, KeychainKind::Internal).build(Network::Bitcoin),
|
||||
true,
|
||||
false,
|
||||
&[
|
||||
199
crates/bdk/src/error.rs
Normal file
199
crates/bdk/src/error.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
use crate::bitcoin::Network;
|
||||
use crate::{descriptor, wallet};
|
||||
use alloc::{string::String, vec::Vec};
|
||||
use bitcoin::{OutPoint, Txid};
|
||||
use core::fmt;
|
||||
|
||||
/// Errors that can be thrown by the [`Wallet`](crate::wallet::Wallet)
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// Generic error
|
||||
Generic(String),
|
||||
/// Cannot build a tx without recipients
|
||||
NoRecipients,
|
||||
/// `manually_selected_only` option is selected but no utxo has been passed
|
||||
NoUtxosSelected,
|
||||
/// Output created is under the dust limit, 546 satoshis
|
||||
OutputBelowDustLimit(usize),
|
||||
/// Wallet's UTXO set is not enough to cover recipient's requested plus fee
|
||||
InsufficientFunds {
|
||||
/// Sats needed for some transaction
|
||||
needed: u64,
|
||||
/// Sats available for spending
|
||||
available: u64,
|
||||
},
|
||||
/// Branch and bound coin selection possible attempts with sufficiently big UTXO set could grow
|
||||
/// exponentially, thus a limit is set, and when hit, this error is thrown
|
||||
BnBTotalTriesExceeded,
|
||||
/// Branch and bound coin selection tries to avoid needing a change by finding the right inputs for
|
||||
/// the desired outputs plus fee, if there is not such combination this error is thrown
|
||||
BnBNoExactMatch,
|
||||
/// Happens when trying to spend an UTXO that is not in the internal database
|
||||
UnknownUtxo,
|
||||
/// Thrown when a tx is not found in the internal database
|
||||
TransactionNotFound,
|
||||
/// Happens when trying to bump a transaction that is already confirmed
|
||||
TransactionConfirmed,
|
||||
/// Trying to replace a tx that has a sequence >= `0xFFFFFFFE`
|
||||
IrreplaceableTransaction,
|
||||
/// When bumping a tx the fee rate requested is lower than required
|
||||
FeeRateTooLow {
|
||||
/// Required fee rate (satoshi/vbyte)
|
||||
required: crate::types::FeeRate,
|
||||
},
|
||||
/// When bumping a tx the absolute fee requested is lower than replaced tx absolute fee
|
||||
FeeTooLow {
|
||||
/// Required fee absolute value (satoshi)
|
||||
required: u64,
|
||||
},
|
||||
/// Node doesn't have data to estimate a fee rate
|
||||
FeeRateUnavailable,
|
||||
/// In order to use the [`TxBuilder::add_global_xpubs`] option every extended
|
||||
/// key in the descriptor must either be a master key itself (having depth = 0) or have an
|
||||
/// explicit origin provided
|
||||
///
|
||||
/// [`TxBuilder::add_global_xpubs`]: crate::wallet::tx_builder::TxBuilder::add_global_xpubs
|
||||
MissingKeyOrigin(String),
|
||||
/// Error while working with [`keys`](crate::keys)
|
||||
Key(crate::keys::KeyError),
|
||||
/// Descriptor checksum mismatch
|
||||
ChecksumMismatch,
|
||||
/// Spending policy is not compatible with this [`KeychainKind`](crate::types::KeychainKind)
|
||||
SpendingPolicyRequired(crate::types::KeychainKind),
|
||||
/// Error while extracting and manipulating policies
|
||||
InvalidPolicyPathError(crate::descriptor::policy::PolicyError),
|
||||
/// Signing error
|
||||
Signer(crate::wallet::signer::SignerError),
|
||||
/// Requested outpoint doesn't exist in the tx (vout greater than available outputs)
|
||||
InvalidOutpoint(OutPoint),
|
||||
/// Error related to the parsing and usage of descriptors
|
||||
Descriptor(crate::descriptor::error::Error),
|
||||
/// Miniscript error
|
||||
Miniscript(miniscript::Error),
|
||||
/// Miniscript PSBT error
|
||||
MiniscriptPsbt(MiniscriptPsbtError),
|
||||
/// BIP32 error
|
||||
Bip32(bitcoin::util::bip32::Error),
|
||||
/// Partially signed bitcoin transaction error
|
||||
Psbt(bitcoin::util::psbt::Error),
|
||||
}
|
||||
|
||||
/// Errors returned by miniscript when updating inconsistent PSBTs
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MiniscriptPsbtError {
|
||||
Conversion(miniscript::descriptor::ConversionError),
|
||||
UtxoUpdate(miniscript::psbt::UtxoUpdateError),
|
||||
OutputUpdate(miniscript::psbt::OutputUpdateError),
|
||||
}
|
||||
|
||||
impl fmt::Display for MiniscriptPsbtError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Conversion(err) => write!(f, "Conversion error: {}", err),
|
||||
Self::UtxoUpdate(err) => write!(f, "UTXO update error: {}", err),
|
||||
Self::OutputUpdate(err) => write!(f, "Output update error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for MiniscriptPsbtError {}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Generic(err) => write!(f, "Generic error: {}", err),
|
||||
Self::NoRecipients => write!(f, "Cannot build tx without recipients"),
|
||||
Self::NoUtxosSelected => write!(f, "No UTXO selected"),
|
||||
Self::OutputBelowDustLimit(limit) => {
|
||||
write!(f, "Output below the dust limit: {}", limit)
|
||||
}
|
||||
Self::InsufficientFunds { needed, available } => write!(
|
||||
f,
|
||||
"Insufficient funds: {} sat available of {} sat needed",
|
||||
available, needed
|
||||
),
|
||||
Self::BnBTotalTriesExceeded => {
|
||||
write!(f, "Branch and bound coin selection: total tries exceeded")
|
||||
}
|
||||
Self::BnBNoExactMatch => write!(f, "Branch and bound coin selection: not exact match"),
|
||||
Self::UnknownUtxo => write!(f, "UTXO not found in the internal database"),
|
||||
Self::TransactionNotFound => {
|
||||
write!(f, "Transaction not found in the internal database")
|
||||
}
|
||||
Self::TransactionConfirmed => write!(f, "Transaction already confirmed"),
|
||||
Self::IrreplaceableTransaction => write!(f, "Transaction can't be replaced"),
|
||||
Self::FeeRateTooLow { required } => write!(
|
||||
f,
|
||||
"Fee rate too low: required {} sat/vbyte",
|
||||
required.as_sat_per_vb()
|
||||
),
|
||||
Self::FeeTooLow { required } => write!(f, "Fee to low: required {} sat", required),
|
||||
Self::FeeRateUnavailable => write!(f, "Fee rate unavailable"),
|
||||
Self::MissingKeyOrigin(err) => write!(f, "Missing key origin: {}", err),
|
||||
Self::Key(err) => write!(f, "Key error: {}", err),
|
||||
Self::ChecksumMismatch => write!(f, "Descriptor checksum mismatch"),
|
||||
Self::SpendingPolicyRequired(keychain_kind) => {
|
||||
write!(f, "Spending policy required: {:?}", keychain_kind)
|
||||
}
|
||||
Self::InvalidPolicyPathError(err) => write!(f, "Invalid policy path: {}", err),
|
||||
Self::Signer(err) => write!(f, "Signer error: {}", err),
|
||||
Self::InvalidOutpoint(outpoint) => write!(
|
||||
f,
|
||||
"Requested outpoint doesn't exist in the tx: {}",
|
||||
outpoint
|
||||
),
|
||||
Self::Descriptor(err) => write!(f, "Descriptor error: {}", err),
|
||||
Self::Miniscript(err) => write!(f, "Miniscript error: {}", err),
|
||||
Self::MiniscriptPsbt(err) => write!(f, "Miniscript PSBT error: {}", err),
|
||||
Self::Bip32(err) => write!(f, "BIP32 error: {}", err),
|
||||
Self::Psbt(err) => write!(f, "PSBT error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
macro_rules! impl_error {
|
||||
( $from:ty, $to:ident ) => {
|
||||
impl_error!($from, $to, Error);
|
||||
};
|
||||
( $from:ty, $to:ident, $impl_for:ty ) => {
|
||||
impl core::convert::From<$from> for $impl_for {
|
||||
fn from(err: $from) -> Self {
|
||||
<$impl_for>::$to(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_error!(descriptor::error::Error, Descriptor);
|
||||
impl_error!(descriptor::policy::PolicyError, InvalidPolicyPathError);
|
||||
impl_error!(wallet::signer::SignerError, Signer);
|
||||
|
||||
impl From<crate::keys::KeyError> for Error {
|
||||
fn from(key_error: crate::keys::KeyError) -> Error {
|
||||
match key_error {
|
||||
crate::keys::KeyError::Miniscript(inner) => Error::Miniscript(inner),
|
||||
crate::keys::KeyError::Bip32(inner) => Error::Bip32(inner),
|
||||
crate::keys::KeyError::InvalidChecksum => Error::ChecksumMismatch,
|
||||
e => Error::Key(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_error!(miniscript::Error, Miniscript);
|
||||
impl_error!(MiniscriptPsbtError, MiniscriptPsbt);
|
||||
impl_error!(bitcoin::util::bip32::Error, Bip32);
|
||||
impl_error!(bitcoin::util::psbt::Error, Psbt);
|
||||
227
crates/bdk/src/keys/bip39.rs
Normal file
227
crates/bdk/src/keys/bip39.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! BIP-0039
|
||||
|
||||
// TODO: maybe write our own implementation of bip39? Seems stupid to have an extra dependency for
|
||||
// something that should be fairly simple to re-implement.
|
||||
|
||||
use alloc::string::String;
|
||||
use bitcoin::util::bip32;
|
||||
use bitcoin::Network;
|
||||
|
||||
use miniscript::ScriptContext;
|
||||
|
||||
pub use bip39::{Error, Language, Mnemonic};
|
||||
|
||||
type Seed = [u8; 64];
|
||||
|
||||
/// Type describing entropy length (aka word count) in the mnemonic
|
||||
pub enum WordCount {
|
||||
/// 12 words mnemonic (128 bits entropy)
|
||||
Words12 = 128,
|
||||
/// 15 words mnemonic (160 bits entropy)
|
||||
Words15 = 160,
|
||||
/// 18 words mnemonic (192 bits entropy)
|
||||
Words18 = 192,
|
||||
/// 21 words mnemonic (224 bits entropy)
|
||||
Words21 = 224,
|
||||
/// 24 words mnemonic (256 bits entropy)
|
||||
Words24 = 256,
|
||||
}
|
||||
|
||||
use super::{
|
||||
any_network, DerivableKey, DescriptorKey, ExtendedKey, GeneratableKey, GeneratedKey, KeyError,
|
||||
};
|
||||
|
||||
fn set_valid_on_any_network<Ctx: ScriptContext>(
|
||||
descriptor_key: DescriptorKey<Ctx>,
|
||||
) -> DescriptorKey<Ctx> {
|
||||
// We have to pick one network to build the xprv, but since the bip39 standard doesn't
|
||||
// encode the network, the xprv we create is actually valid everywhere. So we override the
|
||||
// valid networks with `any_network()`.
|
||||
descriptor_key.override_valid_networks(any_network())
|
||||
}
|
||||
|
||||
/// Type for a BIP39 mnemonic with an optional passphrase
|
||||
pub type MnemonicWithPassphrase = (Mnemonic, Option<String>);
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "keys-bip39")))]
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for Seed {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
Ok(bip32::ExtendedPrivKey::new_master(Network::Bitcoin, &self[..])?.into())
|
||||
}
|
||||
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
source: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let descriptor_key = self
|
||||
.into_extended_key()?
|
||||
.into_descriptor_key(source, derivation_path)?;
|
||||
|
||||
Ok(set_valid_on_any_network(descriptor_key))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "keys-bip39")))]
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for MnemonicWithPassphrase {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
let (mnemonic, passphrase) = self;
|
||||
let seed: Seed = mnemonic.to_seed(passphrase.as_deref().unwrap_or(""));
|
||||
|
||||
seed.into_extended_key()
|
||||
}
|
||||
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
source: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let descriptor_key = self
|
||||
.into_extended_key()?
|
||||
.into_descriptor_key(source, derivation_path)?;
|
||||
|
||||
Ok(set_valid_on_any_network(descriptor_key))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "keys-bip39")))]
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for (GeneratedKey<Mnemonic, Ctx>, Option<String>) {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
let (mnemonic, passphrase) = self;
|
||||
(mnemonic.into_key(), passphrase).into_extended_key()
|
||||
}
|
||||
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
source: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let (mnemonic, passphrase) = self;
|
||||
(mnemonic.into_key(), passphrase).into_descriptor_key(source, derivation_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "keys-bip39")))]
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for Mnemonic {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
(self, None).into_extended_key()
|
||||
}
|
||||
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
source: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let descriptor_key = self
|
||||
.into_extended_key()?
|
||||
.into_descriptor_key(source, derivation_path)?;
|
||||
|
||||
Ok(set_valid_on_any_network(descriptor_key))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "keys-bip39")))]
|
||||
impl<Ctx: ScriptContext> GeneratableKey<Ctx> for Mnemonic {
|
||||
type Entropy = [u8; 32];
|
||||
|
||||
type Options = (WordCount, Language);
|
||||
type Error = Option<bip39::Error>;
|
||||
|
||||
fn generate_with_entropy(
|
||||
(word_count, language): Self::Options,
|
||||
entropy: Self::Entropy,
|
||||
) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||
let entropy = &entropy.as_ref()[..(word_count as usize / 8)];
|
||||
let mnemonic = Mnemonic::from_entropy_in(language, entropy)?;
|
||||
|
||||
Ok(GeneratedKey::new(mnemonic, any_network()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use alloc::string::ToString;
|
||||
use core::str::FromStr;
|
||||
|
||||
use bitcoin::util::bip32;
|
||||
|
||||
use bip39::{Language, Mnemonic};
|
||||
|
||||
use crate::keys::{any_network, GeneratableKey, GeneratedKey};
|
||||
|
||||
use super::WordCount;
|
||||
|
||||
#[test]
|
||||
fn test_keys_bip39_mnemonic() {
|
||||
let mnemonic =
|
||||
"aim bunker wash balance finish force paper analyst cabin spoon stable organ";
|
||||
let mnemonic = Mnemonic::parse_in(Language::English, mnemonic).unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/44'/0'/0'/0").unwrap();
|
||||
|
||||
let key = (mnemonic, path);
|
||||
let (desc, keys, networks) = crate::descriptor!(wpkh(key)).unwrap();
|
||||
assert_eq!(desc.to_string(), "wpkh([be83839f/44'/0'/0']xpub6DCQ1YcqvZtSwGWMrwHELPehjWV3f2MGZ69yBADTxFEUAoLwb5Mp5GniQK6tTp3AgbngVz9zEFbBJUPVnkG7LFYt8QMTfbrNqs6FNEwAPKA/0/*)#0r8v4nkv");
|
||||
assert_eq!(keys.len(), 1);
|
||||
assert_eq!(networks.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keys_bip39_mnemonic_passphrase() {
|
||||
let mnemonic =
|
||||
"aim bunker wash balance finish force paper analyst cabin spoon stable organ";
|
||||
let mnemonic = Mnemonic::parse_in(Language::English, mnemonic).unwrap();
|
||||
let path = bip32::DerivationPath::from_str("m/44'/0'/0'/0").unwrap();
|
||||
|
||||
let key = ((mnemonic, Some("passphrase".into())), path);
|
||||
let (desc, keys, networks) = crate::descriptor!(wpkh(key)).unwrap();
|
||||
assert_eq!(desc.to_string(), "wpkh([8f6cb80c/44'/0'/0']xpub6DWYS8bbihFevy29M4cbw4ZR3P5E12jB8R88gBDWCTCNpYiDHhYWNywrCF9VZQYagzPmsZpxXpytzSoxynyeFr4ZyzheVjnpLKuse4fiwZw/0/*)#h0j0tg5m");
|
||||
assert_eq!(keys.len(), 1);
|
||||
assert_eq!(networks.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keys_generate_bip39() {
|
||||
let generated_mnemonic: GeneratedKey<_, miniscript::Segwitv0> =
|
||||
Mnemonic::generate_with_entropy(
|
||||
(WordCount::Words12, Language::English),
|
||||
crate::keys::test::TEST_ENTROPY,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(generated_mnemonic.valid_networks, any_network());
|
||||
assert_eq!(
|
||||
generated_mnemonic.to_string(),
|
||||
"primary fetch primary fetch primary fetch primary fetch primary fetch primary fever"
|
||||
);
|
||||
|
||||
let generated_mnemonic: GeneratedKey<_, miniscript::Segwitv0> =
|
||||
Mnemonic::generate_with_entropy(
|
||||
(WordCount::Words24, Language::English),
|
||||
crate::keys::test::TEST_ENTROPY,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(generated_mnemonic.valid_networks, any_network());
|
||||
assert_eq!(generated_mnemonic.to_string(), "primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary fetch primary foster");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keys_generate_bip39_random() {
|
||||
let generated_mnemonic: GeneratedKey<_, miniscript::Segwitv0> =
|
||||
Mnemonic::generate((WordCount::Words12, Language::English)).unwrap();
|
||||
assert_eq!(generated_mnemonic.valid_networks, any_network());
|
||||
|
||||
let generated_mnemonic: GeneratedKey<_, miniscript::Segwitv0> =
|
||||
Mnemonic::generate((WordCount::Words24, Language::English)).unwrap();
|
||||
assert_eq!(generated_mnemonic.valid_networks, any_network());
|
||||
}
|
||||
}
|
||||
@@ -1,48 +1,38 @@
|
||||
// Magical Bitcoin Library
|
||||
// Written in 2020 by
|
||||
// Alekos Filini <alekos.filini@gmail.com>
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020 Magical Bitcoin
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Key formats
|
||||
|
||||
use std::any::TypeId;
|
||||
use std::collections::HashSet;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
use crate::collections::HashSet;
|
||||
use alloc::string::{String, ToString};
|
||||
use alloc::vec::Vec;
|
||||
use core::any::TypeId;
|
||||
use core::marker::PhantomData;
|
||||
use core::ops::Deref;
|
||||
use core::str::FromStr;
|
||||
|
||||
use bitcoin::secp256k1;
|
||||
use bitcoin::secp256k1::{self, Secp256k1, Signing};
|
||||
|
||||
use bitcoin::util::bip32;
|
||||
use bitcoin::{Network, PrivateKey, PublicKey};
|
||||
use bitcoin::{Network, PrivateKey, PublicKey, XOnlyPublicKey};
|
||||
|
||||
use miniscript::descriptor::{Descriptor, DescriptorXKey, Wildcard};
|
||||
pub use miniscript::descriptor::{
|
||||
DescriptorPublicKey, DescriptorSecretKey, DescriptorSinglePriv, DescriptorSinglePub,
|
||||
DescriptorPublicKey, DescriptorSecretKey, KeyMap, SinglePriv, SinglePub, SinglePubKey,
|
||||
SortedMultiVec,
|
||||
};
|
||||
use miniscript::descriptor::{DescriptorXKey, KeyMap};
|
||||
pub use miniscript::ScriptContext;
|
||||
use miniscript::{Miniscript, Terminal};
|
||||
|
||||
use crate::descriptor::{CheckMiniscript, DescriptorError};
|
||||
use crate::wallet::utils::SecpCtx;
|
||||
|
||||
#[cfg(feature = "keys-bip39")]
|
||||
@@ -52,11 +42,16 @@ pub mod bip39;
|
||||
/// Set of valid networks for a key
|
||||
pub type ValidNetworks = HashSet<Network>;
|
||||
|
||||
/// Create a set containing mainnet, testnet and regtest
|
||||
/// Create a set containing mainnet, testnet, signet, and regtest
|
||||
pub fn any_network() -> ValidNetworks {
|
||||
vec![Network::Bitcoin, Network::Testnet, Network::Regtest]
|
||||
.into_iter()
|
||||
.collect()
|
||||
vec![
|
||||
Network::Bitcoin,
|
||||
Network::Testnet,
|
||||
Network::Regtest,
|
||||
Network::Signet,
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
/// Create a set only containing mainnet
|
||||
pub fn mainnet_network() -> ValidNetworks {
|
||||
@@ -64,7 +59,7 @@ pub fn mainnet_network() -> ValidNetworks {
|
||||
}
|
||||
/// Create a set containing testnet and regtest
|
||||
pub fn test_networks() -> ValidNetworks {
|
||||
vec![Network::Testnet, Network::Regtest]
|
||||
vec![Network::Testnet, Network::Regtest, Network::Signet]
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
@@ -102,7 +97,7 @@ impl<Ctx: ScriptContext> DescriptorKey<Ctx> {
|
||||
}
|
||||
|
||||
// This method is used internally by `bdk::fragment!` and `bdk::descriptor!`. It has to be
|
||||
// public because it is effectively called by external crates, once the macros are expanded,
|
||||
// public because it is effectively called by external crates once the macros are expanded,
|
||||
// but since it is not meant to be part of the public api we hide it from the docs.
|
||||
#[doc(hidden)]
|
||||
pub fn extract(
|
||||
@@ -117,7 +112,7 @@ impl<Ctx: ScriptContext> DescriptorKey<Ctx> {
|
||||
let mut key_map = KeyMap::with_capacity(1);
|
||||
|
||||
let public = secret
|
||||
.as_public(secp)
|
||||
.to_public(secp)
|
||||
.map_err(|e| miniscript::Error::Unexpected(e.to_string()))?;
|
||||
key_map.insert(public.clone(), secret);
|
||||
|
||||
@@ -134,6 +129,8 @@ pub enum ScriptContextEnum {
|
||||
Legacy,
|
||||
/// Segwitv0 scripts
|
||||
Segwitv0,
|
||||
/// Taproot scripts
|
||||
Tap,
|
||||
}
|
||||
|
||||
impl ScriptContextEnum {
|
||||
@@ -146,6 +143,11 @@ impl ScriptContextEnum {
|
||||
pub fn is_segwit_v0(&self) -> bool {
|
||||
self == &ScriptContextEnum::Segwitv0
|
||||
}
|
||||
|
||||
/// Returns whether the script context is [`ScriptContextEnum::Tap`]
|
||||
pub fn is_taproot(&self) -> bool {
|
||||
self == &ScriptContextEnum::Tap
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that adds extra useful methods to [`ScriptContext`]s
|
||||
@@ -162,6 +164,11 @@ pub trait ExtScriptContext: ScriptContext {
|
||||
fn is_segwit_v0() -> bool {
|
||||
Self::as_enum().is_segwit_v0()
|
||||
}
|
||||
|
||||
/// Returns whether the script context is [`Tap`](miniscript::Tap), aka Taproot or Segwit V1
|
||||
fn is_taproot() -> bool {
|
||||
Self::as_enum().is_taproot()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
@@ -169,6 +176,7 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
match TypeId::of::<Ctx>() {
|
||||
t if t == TypeId::of::<miniscript::Legacy>() => ScriptContextEnum::Legacy,
|
||||
t if t == TypeId::of::<miniscript::Segwitv0>() => ScriptContextEnum::Segwitv0,
|
||||
t if t == TypeId::of::<miniscript::Tap>() => ScriptContextEnum::Tap,
|
||||
_ => unimplemented!("Unknown ScriptContext type"),
|
||||
}
|
||||
}
|
||||
@@ -199,15 +207,15 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
/// ```
|
||||
/// use bdk::bitcoin::PublicKey;
|
||||
///
|
||||
/// use bdk::keys::{DescriptorKey, KeyError, ScriptContext, ToDescriptorKey};
|
||||
/// use bdk::keys::{DescriptorKey, IntoDescriptorKey, KeyError, ScriptContext};
|
||||
///
|
||||
/// pub struct MyKeyType {
|
||||
/// pubkey: PublicKey,
|
||||
/// }
|
||||
///
|
||||
/// impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// self.pubkey.to_descriptor_key()
|
||||
/// impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// self.pubkey.into_descriptor_key()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
@@ -218,20 +226,20 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
/// use bdk::bitcoin::PublicKey;
|
||||
///
|
||||
/// use bdk::keys::{
|
||||
/// mainnet_network, DescriptorKey, DescriptorPublicKey, DescriptorSinglePub, KeyError,
|
||||
/// ScriptContext, ToDescriptorKey,
|
||||
/// mainnet_network, DescriptorKey, DescriptorPublicKey, IntoDescriptorKey, KeyError,
|
||||
/// ScriptContext, SinglePub, SinglePubKey,
|
||||
/// };
|
||||
///
|
||||
/// pub struct MyKeyType {
|
||||
/// pubkey: PublicKey,
|
||||
/// }
|
||||
///
|
||||
/// impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// Ok(DescriptorKey::from_public(
|
||||
/// DescriptorPublicKey::SinglePub(DescriptorSinglePub {
|
||||
/// DescriptorPublicKey::Single(SinglePub {
|
||||
/// origin: None,
|
||||
/// key: self.pubkey,
|
||||
/// key: SinglePubKey::FullKey(self.pubkey),
|
||||
/// }),
|
||||
/// mainnet_network(),
|
||||
/// ))
|
||||
@@ -244,17 +252,17 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
/// ```
|
||||
/// use bdk::bitcoin::PublicKey;
|
||||
///
|
||||
/// use bdk::keys::{DescriptorKey, ExtScriptContext, KeyError, ScriptContext, ToDescriptorKey};
|
||||
/// use bdk::keys::{DescriptorKey, ExtScriptContext, IntoDescriptorKey, KeyError, ScriptContext};
|
||||
///
|
||||
/// pub struct MyKeyType {
|
||||
/// is_legacy: bool,
|
||||
/// pubkey: PublicKey,
|
||||
/// }
|
||||
///
|
||||
/// impl<Ctx: ScriptContext + 'static> ToDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// impl<Ctx: ScriptContext + 'static> IntoDescriptorKey<Ctx> for MyKeyType {
|
||||
/// fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// if Ctx::is_legacy() == self.is_legacy {
|
||||
/// self.pubkey.to_descriptor_key()
|
||||
/// self.pubkey.into_descriptor_key()
|
||||
/// } else {
|
||||
/// Err(KeyError::InvalidScriptContext)
|
||||
/// }
|
||||
@@ -271,17 +279,17 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// use bdk::bitcoin::PublicKey;
|
||||
/// use std::str::FromStr;
|
||||
/// use core::str::FromStr;
|
||||
///
|
||||
/// use bdk::keys::{DescriptorKey, KeyError, ToDescriptorKey};
|
||||
/// use bdk::keys::{DescriptorKey, IntoDescriptorKey, KeyError};
|
||||
///
|
||||
/// pub struct MySegwitOnlyKeyType {
|
||||
/// pubkey: PublicKey,
|
||||
/// }
|
||||
///
|
||||
/// impl ToDescriptorKey<bdk::miniscript::Segwitv0> for MySegwitOnlyKeyType {
|
||||
/// fn to_descriptor_key(self) -> Result<DescriptorKey<bdk::miniscript::Segwitv0>, KeyError> {
|
||||
/// self.pubkey.to_descriptor_key()
|
||||
/// impl IntoDescriptorKey<bdk::miniscript::Segwitv0> for MySegwitOnlyKeyType {
|
||||
/// fn into_descriptor_key(self) -> Result<DescriptorKey<bdk::miniscript::Segwitv0>, KeyError> {
|
||||
/// self.pubkey.into_descriptor_key()
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
@@ -293,61 +301,233 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
||||
///
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
pub trait ToDescriptorKey<Ctx: ScriptContext>: Sized {
|
||||
pub trait IntoDescriptorKey<Ctx: ScriptContext>: Sized {
|
||||
/// Turn the key into a [`DescriptorKey`] within the requested [`ScriptContext`]
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError>;
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError>;
|
||||
}
|
||||
|
||||
/// Enum for extended keys that can be either `xprv` or `xpub`
|
||||
///
|
||||
/// An instance of [`ExtendedKey`] can be constructed from an [`ExtendedPrivKey`](bip32::ExtendedPrivKey)
|
||||
/// or an [`ExtendedPubKey`](bip32::ExtendedPubKey) by using the `From` trait.
|
||||
///
|
||||
/// Defaults to the [`Legacy`](miniscript::Legacy) context.
|
||||
pub enum ExtendedKey<Ctx: ScriptContext = miniscript::Legacy> {
|
||||
/// A private extended key, aka an `xprv`
|
||||
Private((bip32::ExtendedPrivKey, PhantomData<Ctx>)),
|
||||
/// A public extended key, aka an `xpub`
|
||||
Public((bip32::ExtendedPubKey, PhantomData<Ctx>)),
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ExtendedKey<Ctx> {
|
||||
/// Return whether or not the key contains the private data
|
||||
pub fn has_secret(&self) -> bool {
|
||||
match self {
|
||||
ExtendedKey::Private(_) => true,
|
||||
ExtendedKey::Public(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform the [`ExtendedKey`] into an [`ExtendedPrivKey`](bip32::ExtendedPrivKey) for the
|
||||
/// given [`Network`], if the key contains the private data
|
||||
pub fn into_xprv(self, network: Network) -> Option<bip32::ExtendedPrivKey> {
|
||||
match self {
|
||||
ExtendedKey::Private((mut xprv, _)) => {
|
||||
xprv.network = network;
|
||||
Some(xprv)
|
||||
}
|
||||
ExtendedKey::Public(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform the [`ExtendedKey`] into an [`ExtendedPubKey`](bip32::ExtendedPubKey) for the
|
||||
/// given [`Network`]
|
||||
pub fn into_xpub<C: Signing>(
|
||||
self,
|
||||
network: bitcoin::Network,
|
||||
secp: &Secp256k1<C>,
|
||||
) -> bip32::ExtendedPubKey {
|
||||
let mut xpub = match self {
|
||||
ExtendedKey::Private((xprv, _)) => bip32::ExtendedPubKey::from_priv(secp, &xprv),
|
||||
ExtendedKey::Public((xpub, _)) => xpub,
|
||||
};
|
||||
|
||||
xpub.network = network;
|
||||
xpub
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> From<bip32::ExtendedPubKey> for ExtendedKey<Ctx> {
|
||||
fn from(xpub: bip32::ExtendedPubKey) -> Self {
|
||||
ExtendedKey::Public((xpub, PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> From<bip32::ExtendedPrivKey> for ExtendedKey<Ctx> {
|
||||
fn from(xprv: bip32::ExtendedPrivKey) -> Self {
|
||||
ExtendedKey::Private((xprv, PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for keys that can be derived.
|
||||
///
|
||||
/// When extra metadata are provided, a [`DerivableKey`] can be transofrmed into a
|
||||
/// [`DescriptorKey`]: the trait [`ToDescriptorKey`] is automatically implemented
|
||||
/// When extra metadata are provided, a [`DerivableKey`] can be transformed into a
|
||||
/// [`DescriptorKey`]: the trait [`IntoDescriptorKey`] is automatically implemented
|
||||
/// for `(DerivableKey, DerivationPath)` and
|
||||
/// `(DerivableKey, KeySource, DerivationPath)` tuples.
|
||||
///
|
||||
/// For key types that don't encode any indication about the path to use (like bip39), it's
|
||||
/// generally recommended to implemented this trait instead of [`ToDescriptorKey`]. The same
|
||||
/// generally recommended to implement this trait instead of [`IntoDescriptorKey`]. The same
|
||||
/// rules regarding script context and valid networks apply.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// Key types that can be directly converted into an [`ExtendedPrivKey`] or
|
||||
/// an [`ExtendedPubKey`] can implement only the required `into_extended_key()` method.
|
||||
///
|
||||
/// ```
|
||||
/// use bdk::bitcoin;
|
||||
/// use bdk::bitcoin::util::bip32;
|
||||
/// use bdk::keys::{DerivableKey, ExtendedKey, KeyError, ScriptContext};
|
||||
///
|
||||
/// struct MyCustomKeyType {
|
||||
/// key_data: bitcoin::PrivateKey,
|
||||
/// chain_code: Vec<u8>,
|
||||
/// network: bitcoin::Network,
|
||||
/// }
|
||||
///
|
||||
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
||||
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
/// let xprv = bip32::ExtendedPrivKey {
|
||||
/// network: self.network,
|
||||
/// depth: 0,
|
||||
/// parent_fingerprint: bip32::Fingerprint::default(),
|
||||
/// private_key: self.key_data.inner,
|
||||
/// chain_code: bip32::ChainCode::from(self.chain_code.as_ref()),
|
||||
/// child_number: bip32::ChildNumber::Normal { index: 0 },
|
||||
/// };
|
||||
///
|
||||
/// xprv.into_extended_key()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Types that don't internally encode the [`Network`](bitcoin::Network) in which they are valid need some extra
|
||||
/// steps to override the set of valid networks, otherwise only the network specified in the
|
||||
/// [`ExtendedPrivKey`] or [`ExtendedPubKey`] will be considered valid.
|
||||
///
|
||||
/// ```
|
||||
/// use bdk::bitcoin;
|
||||
/// use bdk::bitcoin::util::bip32;
|
||||
/// use bdk::keys::{
|
||||
/// any_network, DerivableKey, DescriptorKey, ExtendedKey, KeyError, ScriptContext,
|
||||
/// };
|
||||
///
|
||||
/// struct MyCustomKeyType {
|
||||
/// key_data: bitcoin::PrivateKey,
|
||||
/// chain_code: Vec<u8>,
|
||||
/// }
|
||||
///
|
||||
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
||||
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
/// let xprv = bip32::ExtendedPrivKey {
|
||||
/// network: bitcoin::Network::Bitcoin, // pick an arbitrary network here
|
||||
/// depth: 0,
|
||||
/// parent_fingerprint: bip32::Fingerprint::default(),
|
||||
/// private_key: self.key_data.inner,
|
||||
/// chain_code: bip32::ChainCode::from(self.chain_code.as_ref()),
|
||||
/// child_number: bip32::ChildNumber::Normal { index: 0 },
|
||||
/// };
|
||||
///
|
||||
/// xprv.into_extended_key()
|
||||
/// }
|
||||
///
|
||||
/// fn into_descriptor_key(
|
||||
/// self,
|
||||
/// source: Option<bip32::KeySource>,
|
||||
/// derivation_path: bip32::DerivationPath,
|
||||
/// ) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
/// let descriptor_key = self
|
||||
/// .into_extended_key()?
|
||||
/// .into_descriptor_key(source, derivation_path)?;
|
||||
///
|
||||
/// // Override the set of valid networks here
|
||||
/// Ok(descriptor_key.override_valid_networks(any_network()))
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`DerivationPath`]: (bip32::DerivationPath)
|
||||
pub trait DerivableKey<Ctx: ScriptContext> {
|
||||
/// Add a extra metadata, consume `self` and turn it into a [`DescriptorKey`]
|
||||
fn add_metadata(
|
||||
self,
|
||||
origin: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError>;
|
||||
}
|
||||
/// [`ExtendedPrivKey`]: (bip32::ExtendedPrivKey)
|
||||
/// [`ExtendedPubKey`]: (bip32::ExtendedPubKey)
|
||||
pub trait DerivableKey<Ctx: ScriptContext = miniscript::Legacy>: Sized {
|
||||
/// Consume `self` and turn it into an [`ExtendedKey`]
|
||||
#[cfg_attr(
|
||||
feature = "keys-bip39",
|
||||
doc = r##"
|
||||
This can be used to get direct access to `xprv`s and `xpub`s for types that implement this trait,
|
||||
like [`Mnemonic`](bip39::Mnemonic) when the `keys-bip39` feature is enabled.
|
||||
```rust
|
||||
use bdk::bitcoin::Network;
|
||||
use bdk::keys::{DerivableKey, ExtendedKey};
|
||||
use bdk::keys::bip39::{Mnemonic, Language};
|
||||
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for bip32::ExtendedPubKey {
|
||||
fn add_metadata(
|
||||
# fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let xkey: ExtendedKey =
|
||||
Mnemonic::parse_in(
|
||||
Language::English,
|
||||
"jelly crash boy whisper mouse ecology tuna soccer memory million news short",
|
||||
)?
|
||||
.into_extended_key()?;
|
||||
let xprv = xkey.into_xprv(Network::Bitcoin).unwrap();
|
||||
# Ok(()) }
|
||||
```
|
||||
"##
|
||||
)]
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError>;
|
||||
|
||||
/// Consume `self` and turn it into a [`DescriptorKey`] by adding the extra metadata, such as
|
||||
/// key origin and derivation path
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
origin: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorPublicKey::XPub(DescriptorXKey {
|
||||
origin,
|
||||
xkey: self,
|
||||
derivation_path,
|
||||
is_wildcard: true,
|
||||
})
|
||||
.to_descriptor_key()
|
||||
match self.into_extended_key()? {
|
||||
ExtendedKey::Private((xprv, _)) => DescriptorSecretKey::XPrv(DescriptorXKey {
|
||||
origin,
|
||||
xkey: xprv,
|
||||
derivation_path,
|
||||
wildcard: Wildcard::Unhardened,
|
||||
})
|
||||
.into_descriptor_key(),
|
||||
ExtendedKey::Public((xpub, _)) => DescriptorPublicKey::XPub(DescriptorXKey {
|
||||
origin,
|
||||
xkey: xpub,
|
||||
derivation_path,
|
||||
wildcard: Wildcard::Unhardened,
|
||||
})
|
||||
.into_descriptor_key(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Identity conversion
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for ExtendedKey<Ctx> {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for bip32::ExtendedPubKey {
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
Ok(self.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> DerivableKey<Ctx> for bip32::ExtendedPrivKey {
|
||||
fn add_metadata(
|
||||
self,
|
||||
origin: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorSecretKey::XPrv(DescriptorXKey {
|
||||
origin,
|
||||
xkey: self,
|
||||
derivation_path,
|
||||
is_wildcard: true,
|
||||
})
|
||||
.to_descriptor_key()
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
Ok(self.into())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -381,6 +561,16 @@ impl<K, Ctx: ScriptContext> Deref for GeneratedKey<K, Ctx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone, Ctx: ScriptContext> Clone for GeneratedKey<K, Ctx> {
|
||||
fn clone(&self) -> GeneratedKey<K, Ctx> {
|
||||
GeneratedKey {
|
||||
key: self.key.clone(),
|
||||
valid_networks: self.valid_networks.clone(),
|
||||
phantom: self.phantom,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Make generated "derivable" keys themselves "derivable". Also make sure they are assigned the
|
||||
// right `valid_networks`.
|
||||
impl<Ctx, K> DerivableKey<Ctx> for GeneratedKey<K, Ctx>
|
||||
@@ -388,45 +578,49 @@ where
|
||||
Ctx: ScriptContext,
|
||||
K: DerivableKey<Ctx>,
|
||||
{
|
||||
fn add_metadata(
|
||||
fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||
self.key.into_extended_key()
|
||||
}
|
||||
|
||||
fn into_descriptor_key(
|
||||
self,
|
||||
origin: Option<bip32::KeySource>,
|
||||
derivation_path: bip32::DerivationPath,
|
||||
) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let descriptor_key = self.key.add_metadata(origin, derivation_path)?;
|
||||
let descriptor_key = self.key.into_descriptor_key(origin, derivation_path)?;
|
||||
Ok(descriptor_key.override_valid_networks(self.valid_networks))
|
||||
}
|
||||
}
|
||||
|
||||
// Make generated keys directly usable in descriptors, and make sure they get assigned the right
|
||||
// `valid_networks`.
|
||||
impl<Ctx, K> ToDescriptorKey<Ctx> for GeneratedKey<K, Ctx>
|
||||
impl<Ctx, K> IntoDescriptorKey<Ctx> for GeneratedKey<K, Ctx>
|
||||
where
|
||||
Ctx: ScriptContext,
|
||||
K: ToDescriptorKey<Ctx>,
|
||||
K: IntoDescriptorKey<Ctx>,
|
||||
{
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let desc_key = self.key.to_descriptor_key()?;
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let desc_key = self.key.into_descriptor_key()?;
|
||||
Ok(desc_key.override_valid_networks(self.valid_networks))
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for keys that can be generated
|
||||
///
|
||||
/// The same rules about [`ScriptContext`] and [`ValidNetworks`] from [`ToDescriptorKey`] apply.
|
||||
/// The same rules about [`ScriptContext`] and [`ValidNetworks`] from [`IntoDescriptorKey`] apply.
|
||||
///
|
||||
/// This trait is particularly useful when combined with [`DerivableKey`]: if `Self`
|
||||
/// implements it, the returned [`GeneratedKey`] will also implement it. The same is true for
|
||||
/// [`ToDescriptorKey`]: the generated keys can be directly used in descriptors if `Self` is also
|
||||
/// [`ToDescriptorKey`].
|
||||
/// [`IntoDescriptorKey`]: the generated keys can be directly used in descriptors if `Self` is also
|
||||
/// [`IntoDescriptorKey`].
|
||||
pub trait GeneratableKey<Ctx: ScriptContext>: Sized {
|
||||
/// Type specifying the amount of entropy required e.g. [u8;32]
|
||||
/// Type specifying the amount of entropy required e.g. `[u8;32]`
|
||||
type Entropy: AsMut<[u8]> + Default;
|
||||
|
||||
/// Extra options required by the `generate_with_entropy`
|
||||
type Options;
|
||||
/// Returned error in case of failure
|
||||
type Error: std::fmt::Debug;
|
||||
type Error: core::fmt::Debug;
|
||||
|
||||
/// Generate a key given the extra options and the entropy
|
||||
fn generate_with_entropy(
|
||||
@@ -517,38 +711,40 @@ impl<Ctx: ScriptContext> GeneratableKey<Ctx> for PrivateKey {
|
||||
entropy: Self::Entropy,
|
||||
) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||
// pick a arbitrary network here, but say that we support all of them
|
||||
let key = secp256k1::SecretKey::from_slice(&entropy)?;
|
||||
let inner = secp256k1::SecretKey::from_slice(&entropy)?;
|
||||
let private_key = PrivateKey {
|
||||
compressed: options.compressed,
|
||||
network: Network::Bitcoin,
|
||||
key,
|
||||
inner,
|
||||
};
|
||||
|
||||
Ok(GeneratedKey::new(private_key, any_network()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext, T: DerivableKey<Ctx>> ToDescriptorKey<Ctx> for (T, bip32::DerivationPath) {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
self.0.add_metadata(None, self.1)
|
||||
impl<Ctx: ScriptContext, T: DerivableKey<Ctx>> IntoDescriptorKey<Ctx>
|
||||
for (T, bip32::DerivationPath)
|
||||
{
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
self.0.into_descriptor_key(None, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext, T: DerivableKey<Ctx>> ToDescriptorKey<Ctx>
|
||||
impl<Ctx: ScriptContext, T: DerivableKey<Ctx>> IntoDescriptorKey<Ctx>
|
||||
for (T, bip32::KeySource, bip32::DerivationPath)
|
||||
{
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
self.0.add_metadata(Some(self.1), self.2)
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
self.0.into_descriptor_key(Some(self.1), self.2)
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_multi_keys<Pk: ToDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
fn expand_multi_keys<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
pks: Vec<Pk>,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<(Vec<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError> {
|
||||
let (pks, key_maps_networks): (Vec<_>, Vec<_>) = pks
|
||||
.into_iter()
|
||||
.map(|key| Ok::<_, KeyError>(key.to_descriptor_key()?.extract(secp)?))
|
||||
.map(|key| key.into_descriptor_key()?.extract(secp))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.map(|(a, b, c)| (a, (b, c)))
|
||||
@@ -569,65 +765,85 @@ fn expand_multi_keys<Pk: ToDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
|
||||
// Used internally by `bdk::fragment!` to build `pk_k()` fragments
|
||||
#[doc(hidden)]
|
||||
pub fn make_pk<Pk: ToDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
pub fn make_pk<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
descriptor_key: Pk,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), KeyError> {
|
||||
let (key, key_map, valid_networks) = descriptor_key.to_descriptor_key()?.extract(secp)?;
|
||||
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), DescriptorError> {
|
||||
let (key, key_map, valid_networks) = descriptor_key.into_descriptor_key()?.extract(secp)?;
|
||||
let minisc = Miniscript::from_ast(Terminal::PkK(key))?;
|
||||
|
||||
Ok((
|
||||
Miniscript::from_ast(Terminal::PkK(key))?,
|
||||
key_map,
|
||||
valid_networks,
|
||||
))
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, key_map, valid_networks))
|
||||
}
|
||||
|
||||
// Used internally by `bdk::fragment!` to build `pk_h()` fragments
|
||||
#[doc(hidden)]
|
||||
pub fn make_pkh<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
descriptor_key: Pk,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), DescriptorError> {
|
||||
let (key, key_map, valid_networks) = descriptor_key.into_descriptor_key()?.extract(secp)?;
|
||||
let minisc = Miniscript::from_ast(Terminal::PkH(key))?;
|
||||
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, key_map, valid_networks))
|
||||
}
|
||||
|
||||
// Used internally by `bdk::fragment!` to build `multi()` fragments
|
||||
#[doc(hidden)]
|
||||
pub fn make_multi<Pk: ToDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
pub fn make_multi<
|
||||
Pk: IntoDescriptorKey<Ctx>,
|
||||
Ctx: ScriptContext,
|
||||
V: Fn(usize, Vec<DescriptorPublicKey>) -> Terminal<DescriptorPublicKey, Ctx>,
|
||||
>(
|
||||
thresh: usize,
|
||||
variant: V,
|
||||
pks: Vec<Pk>,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), KeyError> {
|
||||
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), DescriptorError> {
|
||||
let (pks, key_map, valid_networks) = expand_multi_keys(pks, secp)?;
|
||||
let minisc = Miniscript::from_ast(variant(thresh, pks))?;
|
||||
|
||||
Ok((
|
||||
Miniscript::from_ast(Terminal::Multi(thresh, pks))?,
|
||||
key_map,
|
||||
valid_networks,
|
||||
))
|
||||
minisc.check_miniscript()?;
|
||||
|
||||
Ok((minisc, key_map, valid_networks))
|
||||
}
|
||||
|
||||
// Used internally by `bdk::descriptor!` to build `sortedmulti()` fragments
|
||||
#[doc(hidden)]
|
||||
pub fn make_sortedmulti_inner<Pk: ToDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||
pub fn make_sortedmulti<Pk, Ctx, F>(
|
||||
thresh: usize,
|
||||
pks: Vec<Pk>,
|
||||
build_desc: F,
|
||||
secp: &SecpCtx,
|
||||
) -> Result<
|
||||
(
|
||||
SortedMultiVec<DescriptorPublicKey, Ctx>,
|
||||
KeyMap,
|
||||
ValidNetworks,
|
||||
),
|
||||
KeyError,
|
||||
> {
|
||||
) -> Result<(Descriptor<DescriptorPublicKey>, KeyMap, ValidNetworks), DescriptorError>
|
||||
where
|
||||
Pk: IntoDescriptorKey<Ctx>,
|
||||
Ctx: ScriptContext,
|
||||
F: Fn(
|
||||
usize,
|
||||
Vec<DescriptorPublicKey>,
|
||||
) -> Result<(Descriptor<DescriptorPublicKey>, PhantomData<Ctx>), DescriptorError>,
|
||||
{
|
||||
let (pks, key_map, valid_networks) = expand_multi_keys(pks, secp)?;
|
||||
let descriptor = build_desc(thresh, pks)?.0;
|
||||
|
||||
Ok((SortedMultiVec::new(thresh, pks)?, key_map, valid_networks))
|
||||
Ok((descriptor, key_map, valid_networks))
|
||||
}
|
||||
|
||||
/// The "identity" conversion is used internally by some `bdk::fragment`s
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for DescriptorKey<Ctx> {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorKey<Ctx> {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for DescriptorPublicKey {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorPublicKey {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let networks = match self {
|
||||
DescriptorPublicKey::SinglePub(_) => any_network(),
|
||||
DescriptorPublicKey::Single(_) => any_network(),
|
||||
DescriptorPublicKey::XPub(DescriptorXKey { xkey, .. })
|
||||
if xkey.network == Network::Bitcoin =>
|
||||
{
|
||||
@@ -640,20 +856,30 @@ impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for DescriptorPublicKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for PublicKey {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorPublicKey::SinglePub(DescriptorSinglePub {
|
||||
key: self,
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for PublicKey {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorPublicKey::Single(SinglePub {
|
||||
key: SinglePubKey::FullKey(self),
|
||||
origin: None,
|
||||
})
|
||||
.to_descriptor_key()
|
||||
.into_descriptor_key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for DescriptorSecretKey {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for XOnlyPublicKey {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorPublicKey::Single(SinglePub {
|
||||
key: SinglePubKey::XOnly(self),
|
||||
origin: None,
|
||||
})
|
||||
.into_descriptor_key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorSecretKey {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
let networks = match &self {
|
||||
DescriptorSecretKey::SinglePriv(sk) if sk.key.network == Network::Bitcoin => {
|
||||
DescriptorSecretKey::Single(sk) if sk.key.network == Network::Bitcoin => {
|
||||
mainnet_network()
|
||||
}
|
||||
DescriptorSecretKey::XPrv(DescriptorXKey { xkey, .. })
|
||||
@@ -668,21 +894,21 @@ impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for DescriptorSecretKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for &'_ str {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for &'_ str {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorSecretKey::from_str(self)
|
||||
.map_err(|e| KeyError::Message(e.to_string()))?
|
||||
.to_descriptor_key()
|
||||
.into_descriptor_key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: ScriptContext> ToDescriptorKey<Ctx> for PrivateKey {
|
||||
fn to_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorSecretKey::SinglePriv(DescriptorSinglePriv {
|
||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for PrivateKey {
|
||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||
DescriptorSecretKey::Single(SinglePriv {
|
||||
key: self,
|
||||
origin: None,
|
||||
})
|
||||
.to_descriptor_key()
|
||||
.into_descriptor_key()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -700,20 +926,28 @@ pub enum KeyError {
|
||||
Message(String),
|
||||
|
||||
/// BIP32 error
|
||||
BIP32(bitcoin::util::bip32::Error),
|
||||
Bip32(bitcoin::util::bip32::Error),
|
||||
/// Miniscript error
|
||||
Miniscript(miniscript::Error),
|
||||
}
|
||||
|
||||
impl_error!(miniscript::Error, Miniscript, KeyError);
|
||||
impl_error!(bitcoin::util::bip32::Error, BIP32, KeyError);
|
||||
impl_error!(bitcoin::util::bip32::Error, Bip32, KeyError);
|
||||
|
||||
impl std::fmt::Display for KeyError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
match self {
|
||||
Self::InvalidScriptContext => write!(f, "Invalid script context"),
|
||||
Self::InvalidNetwork => write!(f, "Invalid network"),
|
||||
Self::InvalidChecksum => write!(f, "Invalid checksum"),
|
||||
Self::Message(err) => write!(f, "{}", err),
|
||||
Self::Bip32(err) => write!(f, "BIP32 error: {}", err),
|
||||
Self::Miniscript(err) => write!(f, "Miniscript error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for KeyError {}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -744,4 +978,19 @@ pub mod test {
|
||||
"L2wTu6hQrnDMiFNWA5na6jB12ErGQqtXwqpSL7aWquJaZG8Ai3ch"
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "keys-bip39")]
|
||||
#[test]
|
||||
fn test_keys_wif_network_bip39() {
|
||||
let xkey: ExtendedKey = bip39::Mnemonic::parse_in(
|
||||
bip39::Language::English,
|
||||
"jelly crash boy whisper mouse ecology tuna soccer memory million news short",
|
||||
)
|
||||
.unwrap()
|
||||
.into_extended_key()
|
||||
.unwrap();
|
||||
let xprv = xkey.into_xprv(Network::Testnet).unwrap();
|
||||
|
||||
assert_eq!(xprv.network, Network::Testnet);
|
||||
}
|
||||
}
|
||||
46
crates/bdk/src/lib.rs
Normal file
46
crates/bdk/src/lib.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![no_std]
|
||||
#[cfg(feature = "std")]
|
||||
#[macro_use]
|
||||
extern crate std;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_use]
|
||||
pub extern crate alloc;
|
||||
|
||||
pub extern crate bitcoin;
|
||||
#[cfg(feature = "hardware-signer")]
|
||||
pub extern crate hwi;
|
||||
extern crate log;
|
||||
pub extern crate miniscript;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
|
||||
#[cfg(feature = "keys-bip39")]
|
||||
extern crate bip39;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[macro_use]
|
||||
pub(crate) mod error;
|
||||
pub mod descriptor;
|
||||
pub mod keys;
|
||||
pub mod psbt;
|
||||
pub(crate) mod types;
|
||||
pub mod wallet;
|
||||
|
||||
pub use descriptor::template;
|
||||
pub use descriptor::HdKeyPaths;
|
||||
pub use error::Error;
|
||||
pub use types::*;
|
||||
pub use wallet::signer;
|
||||
pub use wallet::signer::SignOptions;
|
||||
pub use wallet::tx_builder::TxBuilder;
|
||||
pub use wallet::Wallet;
|
||||
|
||||
/// Get the version of BDK at runtime
|
||||
pub fn version() -> &'static str {
|
||||
env!("CARGO_PKG_VERSION", "unknown")
|
||||
}
|
||||
|
||||
pub use bdk_chain as chain;
|
||||
pub(crate) use bdk_chain::collections;
|
||||
79
crates/bdk/src/psbt/mod.rs
Normal file
79
crates/bdk/src/psbt/mod.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Additional functions on the `rust-bitcoin` `PartiallySignedTransaction` structure.
|
||||
|
||||
use crate::FeeRate;
|
||||
use alloc::vec::Vec;
|
||||
use bitcoin::util::psbt::PartiallySignedTransaction as Psbt;
|
||||
use bitcoin::TxOut;
|
||||
|
||||
// TODO upstream the functions here to `rust-bitcoin`?
|
||||
|
||||
/// Trait to add functions to extract utxos and calculate fees.
|
||||
pub trait PsbtUtils {
|
||||
/// Get the `TxOut` for the specified input index, if it doesn't exist in the PSBT `None` is returned.
|
||||
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut>;
|
||||
|
||||
/// The total transaction fee amount, sum of input amounts minus sum of output amounts, in sats.
|
||||
/// If the PSBT is missing a TxOut for an input returns None.
|
||||
fn fee_amount(&self) -> Option<u64>;
|
||||
|
||||
/// The transaction's fee rate. This value will only be accurate if calculated AFTER the
|
||||
/// `PartiallySignedTransaction` is finalized and all witness/signature data is added to the
|
||||
/// transaction.
|
||||
/// If the PSBT is missing a TxOut for an input returns None.
|
||||
fn fee_rate(&self) -> Option<FeeRate>;
|
||||
}
|
||||
|
||||
impl PsbtUtils for Psbt {
|
||||
#[allow(clippy::all)] // We want to allow `manual_map` but it is too new.
|
||||
fn get_utxo_for(&self, input_index: usize) -> Option<TxOut> {
|
||||
let tx = &self.unsigned_tx;
|
||||
|
||||
if input_index >= tx.input.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(input) = self.inputs.get(input_index) {
|
||||
if let Some(wit_utxo) = &input.witness_utxo {
|
||||
Some(wit_utxo.clone())
|
||||
} else if let Some(in_tx) = &input.non_witness_utxo {
|
||||
Some(in_tx.output[tx.input[input_index].previous_output.vout as usize].clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn fee_amount(&self) -> Option<u64> {
|
||||
let tx = &self.unsigned_tx;
|
||||
let utxos: Option<Vec<TxOut>> = (0..tx.input.len()).map(|i| self.get_utxo_for(i)).collect();
|
||||
|
||||
utxos.map(|inputs| {
|
||||
let input_amount: u64 = inputs.iter().map(|i| i.value).sum();
|
||||
let output_amount: u64 = self.unsigned_tx.output.iter().map(|o| o.value).sum();
|
||||
input_amount
|
||||
.checked_sub(output_amount)
|
||||
.expect("input amount must be greater than output amount")
|
||||
})
|
||||
}
|
||||
|
||||
fn fee_rate(&self) -> Option<FeeRate> {
|
||||
let fee_amount = self.fee_amount();
|
||||
fee_amount.map(|fee| {
|
||||
let weight = self.clone().extract_tx().weight();
|
||||
FeeRate::from_wu(fee, weight)
|
||||
})
|
||||
}
|
||||
}
|
||||
333
crates/bdk/src/types.rs
Normal file
333
crates/bdk/src/types.rs
Normal file
@@ -0,0 +1,333 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
use alloc::boxed::Box;
|
||||
use core::convert::AsRef;
|
||||
use core::ops::Sub;
|
||||
|
||||
use bdk_chain::ConfirmationTime;
|
||||
use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut};
|
||||
use bitcoin::{hash_types::Txid, util::psbt};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Types of keychains
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||
pub enum KeychainKind {
|
||||
/// External
|
||||
External = 0,
|
||||
/// Internal, usually used for change outputs
|
||||
Internal = 1,
|
||||
}
|
||||
|
||||
impl KeychainKind {
|
||||
/// Return [`KeychainKind`] as a byte
|
||||
pub fn as_byte(&self) -> u8 {
|
||||
match self {
|
||||
KeychainKind::External => b'e',
|
||||
KeychainKind::Internal => b'i',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for KeychainKind {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
match self {
|
||||
KeychainKind::External => b"e",
|
||||
KeychainKind::Internal => b"i",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fee rate
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
|
||||
// Internally stored as satoshi/vbyte
|
||||
pub struct FeeRate(f32);
|
||||
|
||||
impl FeeRate {
|
||||
/// Create a new instance checking the value provided
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// Panics if the value is not [normal](https://doc.rust-lang.org/std/primitive.f32.html#method.is_normal) (except if it's a positive zero) or negative.
|
||||
fn new_checked(value: f32) -> Self {
|
||||
assert!(value.is_normal() || value == 0.0);
|
||||
assert!(value.is_sign_positive());
|
||||
|
||||
FeeRate(value)
|
||||
}
|
||||
|
||||
/// Create a new instance of [`FeeRate`] given a float fee rate in sats/kwu
|
||||
pub fn from_sat_per_kwu(sat_per_kwu: f32) -> Self {
|
||||
FeeRate::new_checked(sat_per_kwu / 250.0_f32)
|
||||
}
|
||||
|
||||
/// Create a new instance of [`FeeRate`] given a float fee rate in sats/kvb
|
||||
pub fn from_sat_per_kvb(sat_per_kvb: f32) -> Self {
|
||||
FeeRate::new_checked(sat_per_kvb / 1000.0_f32)
|
||||
}
|
||||
|
||||
/// Create a new instance of [`FeeRate`] given a float fee rate in btc/kvbytes
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// Panics if the value is not [normal](https://doc.rust-lang.org/std/primitive.f32.html#method.is_normal) (except if it's a positive zero) or negative.
|
||||
pub fn from_btc_per_kvb(btc_per_kvb: f32) -> Self {
|
||||
FeeRate::new_checked(btc_per_kvb * 1e5)
|
||||
}
|
||||
|
||||
/// Create a new instance of [`FeeRate`] given a float fee rate in satoshi/vbyte
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// Panics if the value is not [normal](https://doc.rust-lang.org/std/primitive.f32.html#method.is_normal) (except if it's a positive zero) or negative.
|
||||
pub fn from_sat_per_vb(sat_per_vb: f32) -> Self {
|
||||
FeeRate::new_checked(sat_per_vb)
|
||||
}
|
||||
|
||||
/// Create a new [`FeeRate`] with the default min relay fee value
|
||||
pub const fn default_min_relay_fee() -> Self {
|
||||
FeeRate(1.0)
|
||||
}
|
||||
|
||||
/// Calculate fee rate from `fee` and weight units (`wu`).
|
||||
pub fn from_wu(fee: u64, wu: usize) -> FeeRate {
|
||||
Self::from_vb(fee, wu.vbytes())
|
||||
}
|
||||
|
||||
/// Calculate fee rate from `fee` and `vbytes`.
|
||||
pub fn from_vb(fee: u64, vbytes: usize) -> FeeRate {
|
||||
let rate = fee as f32 / vbytes as f32;
|
||||
Self::from_sat_per_vb(rate)
|
||||
}
|
||||
|
||||
/// Return the value as satoshi/vbyte
|
||||
pub fn as_sat_per_vb(&self) -> f32 {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Calculate absolute fee in Satoshis using size in weight units.
|
||||
pub fn fee_wu(&self, wu: usize) -> u64 {
|
||||
self.fee_vb(wu.vbytes())
|
||||
}
|
||||
|
||||
/// Calculate absolute fee in Satoshis using size in virtual bytes.
|
||||
pub fn fee_vb(&self, vbytes: usize) -> u64 {
|
||||
(self.as_sat_per_vb() * vbytes as f32).ceil() as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FeeRate {
|
||||
fn default() -> Self {
|
||||
FeeRate::default_min_relay_fee()
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for FeeRate {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, other: FeeRate) -> Self::Output {
|
||||
FeeRate(self.0 - other.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait implemented by types that can be used to measure weight units.
|
||||
pub trait Vbytes {
|
||||
/// Convert weight units to virtual bytes.
|
||||
fn vbytes(self) -> usize;
|
||||
}
|
||||
|
||||
impl Vbytes for usize {
|
||||
fn vbytes(self) -> usize {
|
||||
// ref: https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki#transaction-size-calculations
|
||||
(self as f32 / 4.0).ceil() as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// An unspent output owned by a [`Wallet`].
|
||||
///
|
||||
/// [`Wallet`]: crate::Wallet
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct LocalUtxo {
|
||||
/// Reference to a transaction output
|
||||
pub outpoint: OutPoint,
|
||||
/// Transaction output
|
||||
pub txout: TxOut,
|
||||
/// Type of keychain
|
||||
pub keychain: KeychainKind,
|
||||
/// Whether this UTXO is spent or not
|
||||
pub is_spent: bool,
|
||||
/// The derivation index for the script pubkey in the wallet
|
||||
pub derivation_index: u32,
|
||||
/// The confirmation time for transaction containing this utxo
|
||||
pub confirmation_time: ConfirmationTime,
|
||||
}
|
||||
|
||||
/// A [`Utxo`] with its `satisfaction_weight`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct WeightedUtxo {
|
||||
/// The weight of the witness data and `scriptSig` expressed in [weight units]. This is used to
|
||||
/// properly maintain the feerate when adding this input to a transaction during coin selection.
|
||||
///
|
||||
/// [weight units]: https://en.bitcoin.it/wiki/Weight_units
|
||||
pub satisfaction_weight: usize,
|
||||
/// The UTXO
|
||||
pub utxo: Utxo,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// An unspent transaction output (UTXO).
|
||||
pub enum Utxo {
|
||||
/// A UTXO owned by the local wallet.
|
||||
Local(LocalUtxo),
|
||||
/// A UTXO owned by another wallet.
|
||||
Foreign {
|
||||
/// The location of the output.
|
||||
outpoint: OutPoint,
|
||||
/// The information about the input we require to add it to a PSBT.
|
||||
// Box it to stop the type being too big.
|
||||
psbt_input: Box<psbt::Input>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Utxo {
|
||||
/// Get the location of the UTXO
|
||||
pub fn outpoint(&self) -> OutPoint {
|
||||
match &self {
|
||||
Utxo::Local(local) => local.outpoint,
|
||||
Utxo::Foreign { outpoint, .. } => *outpoint,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the `TxOut` of the UTXO
|
||||
pub fn txout(&self) -> &TxOut {
|
||||
match &self {
|
||||
Utxo::Local(local) => &local.txout,
|
||||
Utxo::Foreign {
|
||||
outpoint,
|
||||
psbt_input,
|
||||
} => {
|
||||
if let Some(prev_tx) = &psbt_input.non_witness_utxo {
|
||||
return &prev_tx.output[outpoint.vout as usize];
|
||||
}
|
||||
|
||||
if let Some(txout) = &psbt_input.witness_utxo {
|
||||
return txout;
|
||||
}
|
||||
|
||||
unreachable!("Foreign UTXOs will always have one of these set")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A wallet transaction
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TransactionDetails {
|
||||
/// Optional transaction
|
||||
pub transaction: Option<Transaction>,
|
||||
/// Transaction id
|
||||
pub txid: Txid,
|
||||
/// Received value (sats)
|
||||
/// Sum of owned outputs of this transaction.
|
||||
pub received: u64,
|
||||
/// Sent value (sats)
|
||||
/// Sum of owned inputs of this transaction.
|
||||
pub sent: u64,
|
||||
/// Fee value in sats if it was available.
|
||||
pub fee: Option<u64>,
|
||||
/// If the transaction is confirmed, contains height and Unix timestamp of the block containing the
|
||||
/// transaction, unconfirmed transaction contains `None`.
|
||||
pub confirmation_time: ConfirmationTime,
|
||||
}
|
||||
|
||||
impl PartialOrd for TransactionDetails {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for TransactionDetails {
|
||||
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
||||
self.confirmation_time
|
||||
.cmp(&other.confirmation_time)
|
||||
.then_with(|| self.txid.cmp(&other.txid))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_store_feerate_in_const() {
|
||||
const _MIN_RELAY: FeeRate = FeeRate::default_min_relay_fee();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_invalid_feerate_neg_zero() {
|
||||
let _ = FeeRate::from_sat_per_vb(-0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_invalid_feerate_neg_value() {
|
||||
let _ = FeeRate::from_sat_per_vb(-5.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_invalid_feerate_nan() {
|
||||
let _ = FeeRate::from_sat_per_vb(f32::NAN);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_invalid_feerate_inf() {
|
||||
let _ = FeeRate::from_sat_per_vb(f32::INFINITY);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_feerate_pos_zero() {
|
||||
let _ = FeeRate::from_sat_per_vb(0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fee_from_btc_per_kvb() {
|
||||
let fee = FeeRate::from_btc_per_kvb(1e-5);
|
||||
assert!((fee.as_sat_per_vb() - 1.0).abs() < f32::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fee_from_sat_per_vbyte() {
|
||||
let fee = FeeRate::from_sat_per_vb(1.0);
|
||||
assert!((fee.as_sat_per_vb() - 1.0).abs() < f32::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fee_default_min_relay_fee() {
|
||||
let fee = FeeRate::default_min_relay_fee();
|
||||
assert!((fee.as_sat_per_vb() - 1.0).abs() < f32::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fee_from_sat_per_kvb() {
|
||||
let fee = FeeRate::from_sat_per_kvb(1000.0);
|
||||
assert!((fee.as_sat_per_vb() - 1.0).abs() < f32::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fee_from_sat_per_kwu() {
|
||||
let fee = FeeRate::from_sat_per_kwu(250.0);
|
||||
assert!((fee.as_sat_per_vb() - 1.0).abs() < f32::EPSILON);
|
||||
}
|
||||
}
|
||||
1437
crates/bdk/src/wallet/coin_selection.rs
Normal file
1437
crates/bdk/src/wallet/coin_selection.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,13 @@
|
||||
// Magical Bitcoin Library
|
||||
// Written in 2020 by
|
||||
// Alekos Filini <alekos.filini@gmail.com>
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020 Magical Bitcoin
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Wallet export
|
||||
//!
|
||||
@@ -33,7 +20,6 @@
|
||||
//! ```
|
||||
//! # use std::str::FromStr;
|
||||
//! # use bitcoin::*;
|
||||
//! # use bdk::database::*;
|
||||
//! # use bdk::wallet::export::*;
|
||||
//! # use bdk::*;
|
||||
//! let import = r#"{
|
||||
@@ -42,51 +28,52 @@
|
||||
//! "label":"testnet"
|
||||
//! }"#;
|
||||
//!
|
||||
//! let import = WalletExport::from_str(import)?;
|
||||
//! let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
//! let import = FullyNodedExport::from_str(import)?;
|
||||
//! let wallet = Wallet::new_no_persist(
|
||||
//! &import.descriptor(),
|
||||
//! import.change_descriptor().as_ref(),
|
||||
//! Network::Testnet,
|
||||
//! MemoryDatabase::default(),
|
||||
//! )?;
|
||||
//! # Ok::<_, bdk::Error>(())
|
||||
//! # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
//! ```
|
||||
//!
|
||||
//! ### Export a `Wallet`
|
||||
//! ```
|
||||
//! # use bitcoin::*;
|
||||
//! # use bdk::database::*;
|
||||
//! # use bdk::wallet::export::*;
|
||||
//! # use bdk::*;
|
||||
//! let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
//! let wallet = Wallet::new_no_persist(
|
||||
//! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/0/*)",
|
||||
//! Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"),
|
||||
//! Network::Testnet,
|
||||
//! MemoryDatabase::default()
|
||||
//! )?;
|
||||
//! let export = WalletExport::export_wallet(&wallet, "exported wallet", true)
|
||||
//! .map_err(ToString::to_string)
|
||||
//! .map_err(bdk::Error::Generic)?;
|
||||
//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap();
|
||||
//!
|
||||
//! println!("Exported: {}", export.to_string());
|
||||
//! # Ok::<_, bdk::Error>(())
|
||||
//! # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
//! ```
|
||||
|
||||
use std::str::FromStr;
|
||||
use core::str::FromStr;
|
||||
|
||||
use alloc::string::{String, ToString};
|
||||
use bdk_chain::sparse_chain::ChainPosition;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use miniscript::{Descriptor, DescriptorPublicKey, ScriptContext, Terminal};
|
||||
use miniscript::descriptor::{ShInner, WshInner};
|
||||
use miniscript::{Descriptor, ScriptContext, Terminal};
|
||||
|
||||
use crate::blockchain::BlockchainMarker;
|
||||
use crate::database::BatchDatabase;
|
||||
use crate::types::KeychainKind;
|
||||
use crate::wallet::Wallet;
|
||||
|
||||
/// Alias for [`FullyNodedExport`]
|
||||
#[deprecated(since = "0.18.0", note = "Please use [`FullyNodedExport`] instead")]
|
||||
pub type WalletExport = FullyNodedExport;
|
||||
|
||||
/// Structure that contains the export of a wallet
|
||||
///
|
||||
/// For a usage example see [this module](crate::wallet::export)'s documentation.
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct WalletExport {
|
||||
pub struct FullyNodedExport {
|
||||
descriptor: String,
|
||||
/// Earliest block to rescan when looking for the wallet's transactions
|
||||
pub blockheight: u32,
|
||||
@@ -94,13 +81,13 @@ pub struct WalletExport {
|
||||
pub label: String,
|
||||
}
|
||||
|
||||
impl ToString for WalletExport {
|
||||
impl ToString for FullyNodedExport {
|
||||
fn to_string(&self) -> String {
|
||||
serde_json::to_string(self).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for WalletExport {
|
||||
impl FromStr for FullyNodedExport {
|
||||
type Err = serde_json::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
@@ -108,7 +95,11 @@ impl FromStr for WalletExport {
|
||||
}
|
||||
}
|
||||
|
||||
impl WalletExport {
|
||||
fn remove_checksum(s: String) -> String {
|
||||
s.split_once('#').map(|(a, _)| String::from(a)).unwrap()
|
||||
}
|
||||
|
||||
impl FullyNodedExport {
|
||||
/// Export a wallet
|
||||
///
|
||||
/// This function returns an error if it determines that the `wallet`'s descriptor(s) are not
|
||||
@@ -120,40 +111,51 @@ impl WalletExport {
|
||||
///
|
||||
/// If the database is empty or `include_blockheight` is false, the `blockheight` field
|
||||
/// returned will be `0`.
|
||||
pub fn export_wallet<B: BlockchainMarker, D: BatchDatabase>(
|
||||
wallet: &Wallet<B, D>,
|
||||
pub fn export_wallet<D>(
|
||||
wallet: &Wallet<D>,
|
||||
label: &str,
|
||||
include_blockheight: bool,
|
||||
) -> Result<Self, &'static str> {
|
||||
let descriptor = wallet
|
||||
.descriptor
|
||||
.to_string_with_secret(&wallet.signers.as_key_map(wallet.secp_ctx()));
|
||||
.get_descriptor_for_keychain(KeychainKind::External)
|
||||
.to_string_with_secret(
|
||||
&wallet
|
||||
.get_signers(KeychainKind::External)
|
||||
.as_key_map(wallet.secp_ctx()),
|
||||
);
|
||||
let descriptor = remove_checksum(descriptor);
|
||||
Self::is_compatible_with_core(&descriptor)?;
|
||||
|
||||
let blockheight = match wallet.database.borrow().iter_txs(false) {
|
||||
_ if !include_blockheight => 0,
|
||||
Err(_) => 0,
|
||||
Ok(txs) => {
|
||||
let mut heights = txs
|
||||
.into_iter()
|
||||
.map(|tx| tx.height.unwrap_or(0))
|
||||
.collect::<Vec<_>>();
|
||||
heights.sort_unstable();
|
||||
|
||||
*heights.last().unwrap_or(&0)
|
||||
}
|
||||
let blockheight = if include_blockheight {
|
||||
wallet
|
||||
.transactions()
|
||||
.next()
|
||||
.and_then(|(pos, _)| pos.height().into())
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let export = WalletExport {
|
||||
let export = FullyNodedExport {
|
||||
descriptor,
|
||||
label: label.into(),
|
||||
blockheight,
|
||||
};
|
||||
|
||||
let desc_to_string = |d: &Descriptor<DescriptorPublicKey>| {
|
||||
d.to_string_with_secret(&wallet.change_signers.as_key_map(wallet.secp_ctx()))
|
||||
let change_descriptor = match wallet.public_descriptor(KeychainKind::Internal).is_some() {
|
||||
false => None,
|
||||
true => {
|
||||
let descriptor = wallet
|
||||
.get_descriptor_for_keychain(KeychainKind::Internal)
|
||||
.to_string_with_secret(
|
||||
&wallet
|
||||
.get_signers(KeychainKind::Internal)
|
||||
.as_key_map(wallet.secp_ctx()),
|
||||
);
|
||||
Some(remove_checksum(descriptor))
|
||||
}
|
||||
};
|
||||
if export.change_descriptor() != wallet.change_descriptor.as_ref().map(desc_to_string) {
|
||||
if export.change_descriptor() != change_descriptor {
|
||||
return Err("Incompatible change descriptor");
|
||||
}
|
||||
|
||||
@@ -162,7 +164,7 @@ impl WalletExport {
|
||||
|
||||
fn is_compatible_with_core(descriptor: &str) -> Result<(), &'static str> {
|
||||
fn check_ms<Ctx: ScriptContext>(
|
||||
terminal: Terminal<String, Ctx>,
|
||||
terminal: &Terminal<String, Ctx>,
|
||||
) -> Result<(), &'static str> {
|
||||
if let Terminal::Multi(_, _) = terminal {
|
||||
Ok(())
|
||||
@@ -171,13 +173,22 @@ impl WalletExport {
|
||||
}
|
||||
}
|
||||
|
||||
// pkh(), wpkh(), sh(wpkh()) are always fine, as well as multi() and sortedmulti()
|
||||
match Descriptor::<String>::from_str(descriptor).map_err(|_| "Invalid descriptor")? {
|
||||
Descriptor::Pk(_)
|
||||
| Descriptor::Pkh(_)
|
||||
| Descriptor::Wpkh(_)
|
||||
| Descriptor::ShWpkh(_) => Ok(()),
|
||||
Descriptor::Sh(ms) => check_ms(ms.node),
|
||||
Descriptor::Wsh(ms) | Descriptor::ShWsh(ms) => check_ms(ms.node),
|
||||
Descriptor::Pkh(_) | Descriptor::Wpkh(_) => Ok(()),
|
||||
Descriptor::Sh(sh) => match sh.as_inner() {
|
||||
ShInner::Wpkh(_) => Ok(()),
|
||||
ShInner::SortedMulti(_) => Ok(()),
|
||||
ShInner::Wsh(wsh) => match wsh.as_inner() {
|
||||
WshInner::SortedMulti(_) => Ok(()),
|
||||
WshInner::Ms(ms) => check_ms(&ms.node),
|
||||
},
|
||||
ShInner::Ms(ms) => check_ms(&ms.node),
|
||||
},
|
||||
Descriptor::Wsh(wsh) => match wsh.as_inner() {
|
||||
WshInner::SortedMulti(_) => Ok(()),
|
||||
WshInner::Ms(ms) => check_ms(&ms.node),
|
||||
},
|
||||
_ => Err("The descriptor is not compatible with Bitcoin Core"),
|
||||
}
|
||||
}
|
||||
@@ -201,32 +212,43 @@ impl WalletExport {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::str::FromStr;
|
||||
use core::str::FromStr;
|
||||
|
||||
use bitcoin::{Network, Txid};
|
||||
use bdk_chain::{BlockId, ConfirmationTime};
|
||||
use bitcoin::hashes::Hash;
|
||||
use bitcoin::{BlockHash, Network, Transaction};
|
||||
|
||||
use super::*;
|
||||
use crate::database::{memory::MemoryDatabase, BatchOperations};
|
||||
use crate::types::TransactionDetails;
|
||||
use crate::wallet::{OfflineWallet, Wallet};
|
||||
use crate::wallet::Wallet;
|
||||
|
||||
fn get_test_db() -> MemoryDatabase {
|
||||
let mut db = MemoryDatabase::new();
|
||||
db.set_tx(&TransactionDetails {
|
||||
transaction: None,
|
||||
txid: Txid::from_str(
|
||||
"4ddff1fa33af17f377f62b72357b43107c19110a8009b36fb832af505efed98a",
|
||||
fn get_test_wallet(
|
||||
descriptor: &str,
|
||||
change_descriptor: Option<&str>,
|
||||
network: Network,
|
||||
) -> Wallet<()> {
|
||||
let mut wallet = Wallet::new_no_persist(descriptor, change_descriptor, network).unwrap();
|
||||
let transaction = Transaction {
|
||||
input: vec![],
|
||||
output: vec![],
|
||||
version: 0,
|
||||
lock_time: bitcoin::PackedLockTime::ZERO,
|
||||
};
|
||||
wallet
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 5001,
|
||||
hash: BlockHash::all_zeros(),
|
||||
})
|
||||
.unwrap();
|
||||
wallet
|
||||
.insert_tx(
|
||||
transaction,
|
||||
ConfirmationTime::Confirmed {
|
||||
height: 5000,
|
||||
time: 0,
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
timestamp: 12345678,
|
||||
received: 100_000,
|
||||
sent: 0,
|
||||
fees: 500,
|
||||
height: Some(5000),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
db
|
||||
.unwrap();
|
||||
wallet
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -234,14 +256,8 @@ mod test {
|
||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
||||
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
descriptor,
|
||||
Some(change_descriptor),
|
||||
Network::Bitcoin,
|
||||
get_test_db(),
|
||||
)
|
||||
.unwrap();
|
||||
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
|
||||
assert_eq!(export.descriptor(), descriptor);
|
||||
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
|
||||
@@ -258,9 +274,8 @@ mod test {
|
||||
|
||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||
|
||||
let wallet: OfflineWallet<_> =
|
||||
Wallet::new_offline(descriptor, None, Network::Bitcoin, get_test_db()).unwrap();
|
||||
WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
let wallet = get_test_wallet(descriptor, None, Network::Bitcoin);
|
||||
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -272,14 +287,8 @@ mod test {
|
||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/50'/0'/1/*)";
|
||||
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
descriptor,
|
||||
Some(change_descriptor),
|
||||
Network::Bitcoin,
|
||||
get_test_db(),
|
||||
)
|
||||
.unwrap();
|
||||
WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
||||
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -295,14 +304,8 @@ mod test {
|
||||
[c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/1/*\
|
||||
))";
|
||||
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
descriptor,
|
||||
Some(change_descriptor),
|
||||
Network::Testnet,
|
||||
get_test_db(),
|
||||
)
|
||||
.unwrap();
|
||||
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Testnet);
|
||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
|
||||
assert_eq!(export.descriptor(), descriptor);
|
||||
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
|
||||
@@ -315,14 +318,8 @@ mod test {
|
||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
||||
|
||||
let wallet: OfflineWallet<_> = Wallet::new_offline(
|
||||
descriptor,
|
||||
Some(change_descriptor),
|
||||
Network::Bitcoin,
|
||||
get_test_db(),
|
||||
)
|
||||
.unwrap();
|
||||
let export = WalletExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||
|
||||
assert_eq!(export.to_string(), "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}");
|
||||
}
|
||||
@@ -333,7 +330,7 @@ mod test {
|
||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
||||
|
||||
let import_str = "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}";
|
||||
let export = WalletExport::from_str(import_str).unwrap();
|
||||
let export = FullyNodedExport::from_str(import_str).unwrap();
|
||||
|
||||
assert_eq!(export.descriptor(), descriptor);
|
||||
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
|
||||
98
crates/bdk/src/wallet/hardwaresigner.rs
Normal file
98
crates/bdk/src/wallet/hardwaresigner.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! HWI Signer
|
||||
//!
|
||||
//! This module contains HWISigner, an implementation of a [TransactionSigner] to be
|
||||
//! used with hardware wallets.
|
||||
//! ```no_run
|
||||
//! # use bdk::bitcoin::Network;
|
||||
//! # use bdk::signer::SignerOrdering;
|
||||
//! # use bdk::wallet::hardwaresigner::HWISigner;
|
||||
//! # use bdk::wallet::AddressIndex::New;
|
||||
//! # use bdk::{FeeRate, KeychainKind, SignOptions, Wallet};
|
||||
//! # use hwi::{types::HWIChain, HWIClient};
|
||||
//! # use std::sync::Arc;
|
||||
//! #
|
||||
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
//! let mut devices = HWIClient::enumerate()?;
|
||||
//! if devices.is_empty() {
|
||||
//! panic!("No devices found!");
|
||||
//! }
|
||||
//! let first_device = devices.remove(0)?;
|
||||
//! let custom_signer = HWISigner::from_device(&first_device, HWIChain::Test)?;
|
||||
//!
|
||||
//! # let mut wallet = Wallet::new_no_persist(
|
||||
//! # "",
|
||||
//! # None,
|
||||
//! # Network::Testnet,
|
||||
//! # )?;
|
||||
//! #
|
||||
//! // Adding the hardware signer to the BDK wallet
|
||||
//! wallet.add_signer(
|
||||
//! KeychainKind::External,
|
||||
//! SignerOrdering(200),
|
||||
//! Arc::new(custom_signer),
|
||||
//! );
|
||||
//!
|
||||
//! # Ok(())
|
||||
//! # }
|
||||
//! ```
|
||||
|
||||
use bitcoin::psbt::PartiallySignedTransaction;
|
||||
use bitcoin::secp256k1::{All, Secp256k1};
|
||||
use bitcoin::util::bip32::Fingerprint;
|
||||
|
||||
use hwi::error::Error;
|
||||
use hwi::types::{HWIChain, HWIDevice};
|
||||
use hwi::HWIClient;
|
||||
|
||||
use crate::signer::{SignerCommon, SignerError, SignerId, TransactionSigner};
|
||||
|
||||
#[derive(Debug)]
|
||||
/// Custom signer for Hardware Wallets
|
||||
///
|
||||
/// This ignores `sign_options` and leaves the decisions up to the hardware wallet.
|
||||
pub struct HWISigner {
|
||||
fingerprint: Fingerprint,
|
||||
client: HWIClient,
|
||||
}
|
||||
|
||||
impl HWISigner {
|
||||
/// Create a instance from the specified device and chain
|
||||
pub fn from_device(device: &HWIDevice, chain: HWIChain) -> Result<HWISigner, Error> {
|
||||
let client = HWIClient::get_client(device, false, chain)?;
|
||||
Ok(HWISigner {
|
||||
fingerprint: device.fingerprint,
|
||||
client,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SignerCommon for HWISigner {
|
||||
fn id(&self, _secp: &Secp256k1<All>) -> SignerId {
|
||||
SignerId::Fingerprint(self.fingerprint)
|
||||
}
|
||||
}
|
||||
|
||||
/// This implementation ignores `sign_options`
|
||||
impl TransactionSigner for HWISigner {
|
||||
fn sign_transaction(
|
||||
&self,
|
||||
psbt: &mut PartiallySignedTransaction,
|
||||
_sign_options: &crate::SignOptions,
|
||||
_secp: &crate::wallet::utils::SecpCtx,
|
||||
) -> Result<(), SignerError> {
|
||||
psbt.combine(self.client.sign_tx(psbt)?.psbt)
|
||||
.expect("Failed to combine HW signed psbt with passed PSBT");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1803
crates/bdk/src/wallet/mod.rs
Normal file
1803
crates/bdk/src/wallet/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
1140
crates/bdk/src/wallet/signer.rs
Normal file
1140
crates/bdk/src/wallet/signer.rs
Normal file
File diff suppressed because it is too large
Load Diff
944
crates/bdk/src/wallet/tx_builder.rs
Normal file
944
crates/bdk/src/wallet/tx_builder.rs
Normal file
@@ -0,0 +1,944 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
//! Transaction builder
|
||||
//!
|
||||
//! ## Example
|
||||
//!
|
||||
//! ```
|
||||
//! # use std::str::FromStr;
|
||||
//! # use bitcoin::*;
|
||||
//! # use bdk::*;
|
||||
//! # use bdk::wallet::tx_builder::CreateTx;
|
||||
//! # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
|
||||
//! # let mut wallet = doctest_wallet!();
|
||||
//! // create a TxBuilder from a wallet
|
||||
//! let mut tx_builder = wallet.build_tx();
|
||||
//!
|
||||
//! tx_builder
|
||||
//! // Create a transaction with one output to `to_address` of 50_000 satoshi
|
||||
//! .add_recipient(to_address.script_pubkey(), 50_000)
|
||||
//! // With a custom fee rate of 5.0 satoshi/vbyte
|
||||
//! .fee_rate(FeeRate::from_sat_per_vb(5.0))
|
||||
//! // Only spend non-change outputs
|
||||
//! .do_not_spend_change()
|
||||
//! // Turn on RBF signaling
|
||||
//! .enable_rbf();
|
||||
//! let (psbt, tx_details) = tx_builder.finish()?;
|
||||
//! # Ok::<(), bdk::Error>(())
|
||||
//! ```
|
||||
|
||||
use crate::collections::BTreeMap;
|
||||
use crate::collections::HashSet;
|
||||
use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
|
||||
use bdk_chain::ConfirmationTime;
|
||||
use core::cell::RefCell;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use bitcoin::util::psbt::{self, PartiallySignedTransaction as Psbt};
|
||||
use bitcoin::{LockTime, OutPoint, Script, Sequence, Transaction};
|
||||
|
||||
use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm};
|
||||
use super::persist;
|
||||
use crate::{
|
||||
types::{FeeRate, KeychainKind, LocalUtxo, WeightedUtxo},
|
||||
TransactionDetails,
|
||||
};
|
||||
use crate::{Error, Utxo, Wallet};
|
||||
/// Context in which the [`TxBuilder`] is valid
|
||||
pub trait TxBuilderContext: core::fmt::Debug + Default + Clone {}
|
||||
|
||||
/// Marker type to indicate the [`TxBuilder`] is being used to create a new transaction (as opposed
|
||||
/// to bumping the fee of an existing one).
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct CreateTx;
|
||||
impl TxBuilderContext for CreateTx {}
|
||||
|
||||
/// Marker type to indicate the [`TxBuilder`] is being used to bump the fee of an existing transaction.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct BumpFee;
|
||||
impl TxBuilderContext for BumpFee {}
|
||||
|
||||
/// A transaction builder
|
||||
///
|
||||
/// A `TxBuilder` is created by calling [`build_tx`] or [`build_fee_bump`] on a wallet. After
|
||||
/// assigning it, you set options on it until finally calling [`finish`] to consume the builder and
|
||||
/// generate the transaction.
|
||||
///
|
||||
/// Each option setting method on `TxBuilder` takes and returns `&mut self` so you can chain calls
|
||||
/// as in the following example:
|
||||
///
|
||||
/// ```
|
||||
/// # use bdk::*;
|
||||
/// # use bdk::wallet::tx_builder::*;
|
||||
/// # use bitcoin::*;
|
||||
/// # use core::str::FromStr;
|
||||
/// # let mut wallet = doctest_wallet!();
|
||||
/// # let addr1 = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
|
||||
/// # let addr2 = addr1.clone();
|
||||
/// // chaining
|
||||
/// let (psbt1, details) = {
|
||||
/// let mut builder = wallet.build_tx();
|
||||
/// builder
|
||||
/// .ordering(TxOrdering::Untouched)
|
||||
/// .add_recipient(addr1.script_pubkey(), 50_000)
|
||||
/// .add_recipient(addr2.script_pubkey(), 50_000);
|
||||
/// builder.finish()?
|
||||
/// };
|
||||
///
|
||||
/// // non-chaining
|
||||
/// let (psbt2, details) = {
|
||||
/// let mut builder = wallet.build_tx();
|
||||
/// builder.ordering(TxOrdering::Untouched);
|
||||
/// for addr in &[addr1, addr2] {
|
||||
/// builder.add_recipient(addr.script_pubkey(), 50_000);
|
||||
/// }
|
||||
/// builder.finish()?
|
||||
/// };
|
||||
///
|
||||
/// assert_eq!(psbt1.unsigned_tx.output[..2], psbt2.unsigned_tx.output[..2]);
|
||||
/// # Ok::<(), bdk::Error>(())
|
||||
/// ```
|
||||
///
|
||||
/// At the moment [`coin_selection`] is an exception to the rule as it consumes `self`.
|
||||
/// This means it is usually best to call [`coin_selection`] on the return value of `build_tx` before assigning it.
|
||||
///
|
||||
/// For further examples see [this module](super::tx_builder)'s documentation;
|
||||
///
|
||||
/// [`build_tx`]: Wallet::build_tx
|
||||
/// [`build_fee_bump`]: Wallet::build_fee_bump
|
||||
/// [`finish`]: Self::finish
|
||||
/// [`coin_selection`]: Self::coin_selection
|
||||
#[derive(Debug)]
|
||||
pub struct TxBuilder<'a, D, Cs, Ctx> {
|
||||
pub(crate) wallet: Rc<RefCell<&'a mut Wallet<D>>>,
|
||||
pub(crate) params: TxParams,
|
||||
pub(crate) coin_selection: Cs,
|
||||
pub(crate) phantom: PhantomData<Ctx>,
|
||||
}
|
||||
|
||||
/// The parameters for transaction creation sans coin selection algorithm.
|
||||
//TODO: TxParams should eventually be exposed publicly.
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub(crate) struct TxParams {
|
||||
pub(crate) recipients: Vec<(Script, u64)>,
|
||||
pub(crate) drain_wallet: bool,
|
||||
pub(crate) drain_to: Option<Script>,
|
||||
pub(crate) fee_policy: Option<FeePolicy>,
|
||||
pub(crate) internal_policy_path: Option<BTreeMap<String, Vec<usize>>>,
|
||||
pub(crate) external_policy_path: Option<BTreeMap<String, Vec<usize>>>,
|
||||
pub(crate) utxos: Vec<WeightedUtxo>,
|
||||
pub(crate) unspendable: HashSet<OutPoint>,
|
||||
pub(crate) manually_selected_only: bool,
|
||||
pub(crate) sighash: Option<psbt::PsbtSighashType>,
|
||||
pub(crate) ordering: TxOrdering,
|
||||
pub(crate) locktime: Option<LockTime>,
|
||||
pub(crate) rbf: Option<RbfValue>,
|
||||
pub(crate) version: Option<Version>,
|
||||
pub(crate) change_policy: ChangeSpendPolicy,
|
||||
pub(crate) only_witness_utxo: bool,
|
||||
pub(crate) add_global_xpubs: bool,
|
||||
pub(crate) include_output_redeem_witness_script: bool,
|
||||
pub(crate) bumping_fee: Option<PreviousFee>,
|
||||
pub(crate) current_height: Option<LockTime>,
|
||||
pub(crate) allow_dust: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) struct PreviousFee {
|
||||
pub absolute: u64,
|
||||
pub rate: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(crate) enum FeePolicy {
|
||||
FeeRate(FeeRate),
|
||||
FeeAmount(u64),
|
||||
}
|
||||
|
||||
impl Default for FeePolicy {
|
||||
fn default() -> Self {
|
||||
FeePolicy::FeeRate(FeeRate::default_min_relay_fee())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D, Cs: Clone, Ctx> Clone for TxBuilder<'a, D, Cs, Ctx> {
|
||||
fn clone(&self) -> Self {
|
||||
TxBuilder {
|
||||
wallet: self.wallet.clone(),
|
||||
params: self.params.clone(),
|
||||
coin_selection: self.coin_selection.clone(),
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// methods supported by both contexts, for any CoinSelectionAlgorithm
|
||||
impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D, Cs, Ctx> {
|
||||
/// Set a custom fee rate
|
||||
pub fn fee_rate(&mut self, fee_rate: FeeRate) -> &mut Self {
|
||||
self.params.fee_policy = Some(FeePolicy::FeeRate(fee_rate));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set an absolute fee
|
||||
pub fn fee_absolute(&mut self, fee_amount: u64) -> &mut Self {
|
||||
self.params.fee_policy = Some(FeePolicy::FeeAmount(fee_amount));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the policy path to use while creating the transaction for a given keychain.
|
||||
///
|
||||
/// This method accepts a map where the key is the policy node id (see
|
||||
/// [`Policy::id`](crate::descriptor::Policy::id)) and the value is the list of the indexes of
|
||||
/// the items that are intended to be satisfied from the policy node (see
|
||||
/// [`SatisfiableItem::Thresh::items`](crate::descriptor::policy::SatisfiableItem::Thresh::items)).
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// An example of when the policy path is needed is the following descriptor:
|
||||
/// `wsh(thresh(2,pk(A),sj:and_v(v:pk(B),n:older(6)),snj:and_v(v:pk(C),after(630000))))`,
|
||||
/// derived from the miniscript policy `thresh(2,pk(A),and(pk(B),older(6)),and(pk(C),after(630000)))`.
|
||||
/// It declares three descriptor fragments, and at the top level it uses `thresh()` to
|
||||
/// ensure that at least two of them are satisfied. The individual fragments are:
|
||||
///
|
||||
/// 1. `pk(A)`
|
||||
/// 2. `and(pk(B),older(6))`
|
||||
/// 3. `and(pk(C),after(630000))`
|
||||
///
|
||||
/// When those conditions are combined in pairs, it's clear that the transaction needs to be created
|
||||
/// differently depending on how the user intends to satisfy the policy afterwards:
|
||||
///
|
||||
/// * If fragments `1` and `2` are used, the transaction will need to use a specific
|
||||
/// `n_sequence` in order to spend an `OP_CSV` branch.
|
||||
/// * If fragments `1` and `3` are used, the transaction will need to use a specific `locktime`
|
||||
/// in order to spend an `OP_CLTV` branch.
|
||||
/// * If fragments `2` and `3` are used, the transaction will need both.
|
||||
///
|
||||
/// When the spending policy is represented as a tree (see
|
||||
/// [`Wallet::policies`](super::Wallet::policies)), every node
|
||||
/// is assigned a unique identifier that can be used in the policy path to specify which of
|
||||
/// the node's children the user intends to satisfy: for instance, assuming the `thresh()`
|
||||
/// root node of this example has an id of `aabbccdd`, the policy path map would look like:
|
||||
///
|
||||
/// `{ "aabbccdd" => [0, 1] }`
|
||||
///
|
||||
/// where the key is the node's id, and the value is a list of the children that should be
|
||||
/// used, in no particular order.
|
||||
///
|
||||
/// If a particularly complex descriptor has multiple ambiguous thresholds in its structure,
|
||||
/// multiple entries can be added to the map, one for each node that requires an explicit path.
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use std::collections::BTreeMap;
|
||||
/// # use bitcoin::*;
|
||||
/// # use bdk::*;
|
||||
/// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
|
||||
/// # let mut wallet = doctest_wallet!();
|
||||
/// let mut path = BTreeMap::new();
|
||||
/// path.insert("aabbccdd".to_string(), vec![0, 1]);
|
||||
///
|
||||
/// let builder = wallet
|
||||
/// .build_tx()
|
||||
/// .add_recipient(to_address.script_pubkey(), 50_000)
|
||||
/// .policy_path(path, KeychainKind::External);
|
||||
///
|
||||
/// # Ok::<(), bdk::Error>(())
|
||||
/// ```
|
||||
pub fn policy_path(
|
||||
&mut self,
|
||||
policy_path: BTreeMap<String, Vec<usize>>,
|
||||
keychain: KeychainKind,
|
||||
) -> &mut Self {
|
||||
let to_update = match keychain {
|
||||
KeychainKind::Internal => &mut self.params.internal_policy_path,
|
||||
KeychainKind::External => &mut self.params.external_policy_path,
|
||||
};
|
||||
|
||||
*to_update = Some(policy_path);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add the list of outpoints to the internal list of UTXOs that **must** be spent.
|
||||
///
|
||||
/// If an error occurs while adding any of the UTXOs then none of them are added and the error is returned.
|
||||
///
|
||||
/// These have priority over the "unspendable" utxos, meaning that if a utxo is present both in
|
||||
/// the "utxos" and the "unspendable" list, it will be spent.
|
||||
pub fn add_utxos(&mut self, outpoints: &[OutPoint]) -> Result<&mut Self, Error> {
|
||||
{
|
||||
let wallet = self.wallet.borrow();
|
||||
let utxos = outpoints
|
||||
.iter()
|
||||
.map(|outpoint| wallet.get_utxo(*outpoint).ok_or(Error::UnknownUtxo))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
for utxo in utxos {
|
||||
let descriptor = wallet.get_descriptor_for_keychain(utxo.keychain);
|
||||
let satisfaction_weight = descriptor.max_satisfaction_weight().unwrap();
|
||||
self.params.utxos.push(WeightedUtxo {
|
||||
satisfaction_weight,
|
||||
utxo: Utxo::Local(utxo),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Add a utxo to the internal list of utxos that **must** be spent
|
||||
///
|
||||
/// These have priority over the "unspendable" utxos, meaning that if a utxo is present both in
|
||||
/// the "utxos" and the "unspendable" list, it will be spent.
|
||||
pub fn add_utxo(&mut self, outpoint: OutPoint) -> Result<&mut Self, Error> {
|
||||
self.add_utxos(&[outpoint])
|
||||
}
|
||||
|
||||
/// Add a foreign UTXO i.e. a UTXO not owned by this wallet.
|
||||
///
|
||||
/// At a minimum to add a foreign UTXO we need:
|
||||
///
|
||||
/// 1. `outpoint`: To add it to the raw transaction.
|
||||
/// 2. `psbt_input`: To know the value.
|
||||
/// 3. `satisfaction_weight`: To know how much weight/vbytes the input will add to the transaction for fee calculation.
|
||||
///
|
||||
/// There are several security concerns about adding foreign UTXOs that application
|
||||
/// developers should consider. First, how do you know the value of the input is correct? If a
|
||||
/// `non_witness_utxo` is provided in the `psbt_input` then this method implicitly verifies the
|
||||
/// value by checking it against the transaction. If only a `witness_utxo` is provided then this
|
||||
/// method doesn't verify the value but just takes it as a given -- it is up to you to check
|
||||
/// that whoever sent you the `input_psbt` was not lying!
|
||||
///
|
||||
/// Secondly, you must somehow provide `satisfaction_weight` of the input. Depending on your
|
||||
/// application it may be important that this be known precisely. If not, a malicious
|
||||
/// counterparty may fool you into putting in a value that is too low, giving the transaction a
|
||||
/// lower than expected feerate. They could also fool you into putting a value that is too high
|
||||
/// causing you to pay a fee that is too high. The party who is broadcasting the transaction can
|
||||
/// of course check the real input weight matches the expected weight prior to broadcasting.
|
||||
///
|
||||
/// To guarantee the `satisfaction_weight` is correct, you can require the party providing the
|
||||
/// `psbt_input` provide a miniscript descriptor for the input so you can check it against the
|
||||
/// `script_pubkey` and then ask it for the [`max_satisfaction_weight`].
|
||||
///
|
||||
/// This is an **EXPERIMENTAL** feature, API and other major changes are expected.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method returns errors in the following circumstances:
|
||||
///
|
||||
/// 1. The `psbt_input` does not contain a `witness_utxo` or `non_witness_utxo`.
|
||||
/// 2. The data in `non_witness_utxo` does not match what is in `outpoint`.
|
||||
///
|
||||
/// Note unless you set [`only_witness_utxo`] any non-taproot `psbt_input` you pass to this
|
||||
/// method must have `non_witness_utxo` set otherwise you will get an error when [`finish`]
|
||||
/// is called.
|
||||
///
|
||||
/// [`only_witness_utxo`]: Self::only_witness_utxo
|
||||
/// [`finish`]: Self::finish
|
||||
/// [`max_satisfaction_weight`]: miniscript::Descriptor::max_satisfaction_weight
|
||||
pub fn add_foreign_utxo(
|
||||
&mut self,
|
||||
outpoint: OutPoint,
|
||||
psbt_input: psbt::Input,
|
||||
satisfaction_weight: usize,
|
||||
) -> Result<&mut Self, Error> {
|
||||
if psbt_input.witness_utxo.is_none() {
|
||||
match psbt_input.non_witness_utxo.as_ref() {
|
||||
Some(tx) => {
|
||||
if tx.txid() != outpoint.txid {
|
||||
return Err(Error::Generic(
|
||||
"Foreign utxo outpoint does not match PSBT input".into(),
|
||||
));
|
||||
}
|
||||
if tx.output.len() <= outpoint.vout as usize {
|
||||
return Err(Error::InvalidOutpoint(outpoint));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Err(Error::Generic(
|
||||
"Foreign utxo missing witness_utxo or non_witness_utxo".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.params.utxos.push(WeightedUtxo {
|
||||
satisfaction_weight,
|
||||
utxo: Utxo::Foreign {
|
||||
outpoint,
|
||||
psbt_input: Box::new(psbt_input),
|
||||
},
|
||||
});
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Only spend utxos added by [`add_utxo`].
|
||||
///
|
||||
/// The wallet will **not** add additional utxos to the transaction even if they are needed to
|
||||
/// make the transaction valid.
|
||||
///
|
||||
/// [`add_utxo`]: Self::add_utxo
|
||||
pub fn manually_selected_only(&mut self) -> &mut Self {
|
||||
self.params.manually_selected_only = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Replace the internal list of unspendable utxos with a new list
|
||||
///
|
||||
/// It's important to note that the "must-be-spent" utxos added with [`TxBuilder::add_utxo`]
|
||||
/// have priority over these. See the docs of the two linked methods for more details.
|
||||
pub fn unspendable(&mut self, unspendable: Vec<OutPoint>) -> &mut Self {
|
||||
self.params.unspendable = unspendable.into_iter().collect();
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a utxo to the internal list of unspendable utxos
|
||||
///
|
||||
/// It's important to note that the "must-be-spent" utxos added with [`TxBuilder::add_utxo`]
|
||||
/// have priority over this. See the docs of the two linked methods for more details.
|
||||
pub fn add_unspendable(&mut self, unspendable: OutPoint) -> &mut Self {
|
||||
self.params.unspendable.insert(unspendable);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sign with a specific sig hash
|
||||
///
|
||||
/// **Use this option very carefully**
|
||||
pub fn sighash(&mut self, sighash: psbt::PsbtSighashType) -> &mut Self {
|
||||
self.params.sighash = Some(sighash);
|
||||
self
|
||||
}
|
||||
|
||||
/// Choose the ordering for inputs and outputs of the transaction
|
||||
pub fn ordering(&mut self, ordering: TxOrdering) -> &mut Self {
|
||||
self.params.ordering = ordering;
|
||||
self
|
||||
}
|
||||
|
||||
/// Use a specific nLockTime while creating the transaction
|
||||
///
|
||||
/// This can cause conflicts if the wallet's descriptors contain an "after" (OP_CLTV) operator.
|
||||
pub fn nlocktime(&mut self, locktime: LockTime) -> &mut Self {
|
||||
self.params.locktime = Some(locktime);
|
||||
self
|
||||
}
|
||||
|
||||
/// Build a transaction with a specific version
|
||||
///
|
||||
/// The `version` should always be greater than `0` and greater than `1` if the wallet's
|
||||
/// descriptors contain an "older" (OP_CSV) operator.
|
||||
pub fn version(&mut self, version: i32) -> &mut Self {
|
||||
self.params.version = Some(Version(version));
|
||||
self
|
||||
}
|
||||
|
||||
/// Do not spend change outputs
|
||||
///
|
||||
/// This effectively adds all the change outputs to the "unspendable" list. See
|
||||
/// [`TxBuilder::unspendable`].
|
||||
pub fn do_not_spend_change(&mut self) -> &mut Self {
|
||||
self.params.change_policy = ChangeSpendPolicy::ChangeForbidden;
|
||||
self
|
||||
}
|
||||
|
||||
/// Only spend change outputs
|
||||
///
|
||||
/// This effectively adds all the non-change outputs to the "unspendable" list. See
|
||||
/// [`TxBuilder::unspendable`].
|
||||
pub fn only_spend_change(&mut self) -> &mut Self {
|
||||
self.params.change_policy = ChangeSpendPolicy::OnlyChange;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set a specific [`ChangeSpendPolicy`]. See [`TxBuilder::do_not_spend_change`] and
|
||||
/// [`TxBuilder::only_spend_change`] for some shortcuts.
|
||||
pub fn change_policy(&mut self, change_policy: ChangeSpendPolicy) -> &mut Self {
|
||||
self.params.change_policy = change_policy;
|
||||
self
|
||||
}
|
||||
|
||||
/// Only Fill-in the [`psbt::Input::witness_utxo`](bitcoin::util::psbt::Input::witness_utxo) field when spending from
|
||||
/// SegWit descriptors.
|
||||
///
|
||||
/// This reduces the size of the PSBT, but some signers might reject them due to the lack of
|
||||
/// the `non_witness_utxo`.
|
||||
pub fn only_witness_utxo(&mut self) -> &mut Self {
|
||||
self.params.only_witness_utxo = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Fill-in the [`psbt::Output::redeem_script`](bitcoin::util::psbt::Output::redeem_script) and
|
||||
/// [`psbt::Output::witness_script`](bitcoin::util::psbt::Output::witness_script) fields.
|
||||
///
|
||||
/// This is useful for signers which always require it, like ColdCard hardware wallets.
|
||||
pub fn include_output_redeem_witness_script(&mut self) -> &mut Self {
|
||||
self.params.include_output_redeem_witness_script = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Fill-in the `PSBT_GLOBAL_XPUB` field with the extended keys contained in both the external
|
||||
/// and internal descriptors
|
||||
///
|
||||
/// This is useful for offline signers that take part to a multisig. Some hardware wallets like
|
||||
/// BitBox and ColdCard are known to require this.
|
||||
pub fn add_global_xpubs(&mut self) -> &mut Self {
|
||||
self.params.add_global_xpubs = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Spend all the available inputs. This respects filters like [`TxBuilder::unspendable`] and the change policy.
|
||||
pub fn drain_wallet(&mut self) -> &mut Self {
|
||||
self.params.drain_wallet = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Choose the coin selection algorithm
|
||||
///
|
||||
/// Overrides the [`DefaultCoinSelectionAlgorithm`](super::coin_selection::DefaultCoinSelectionAlgorithm).
|
||||
///
|
||||
/// Note that this function consumes the builder and returns it so it is usually best to put this as the first call on the builder.
|
||||
pub fn coin_selection<P: CoinSelectionAlgorithm>(
|
||||
self,
|
||||
coin_selection: P,
|
||||
) -> TxBuilder<'a, D, P, Ctx> {
|
||||
TxBuilder {
|
||||
wallet: self.wallet,
|
||||
params: self.params,
|
||||
coin_selection,
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finish building the transaction.
|
||||
///
|
||||
/// Returns the [`BIP174`] "PSBT" and summary details about the transaction.
|
||||
///
|
||||
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
|
||||
pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error>
|
||||
where
|
||||
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
|
||||
{
|
||||
self.wallet
|
||||
.borrow_mut()
|
||||
.create_tx(self.coin_selection, self.params)
|
||||
}
|
||||
|
||||
/// Enable signaling RBF
|
||||
///
|
||||
/// This will use the default nSequence value of `0xFFFFFFFD`.
|
||||
pub fn enable_rbf(&mut self) -> &mut Self {
|
||||
self.params.rbf = Some(RbfValue::Default);
|
||||
self
|
||||
}
|
||||
|
||||
/// Enable signaling RBF with a specific nSequence value
|
||||
///
|
||||
/// This can cause conflicts if the wallet's descriptors contain an "older" (OP_CSV) operator
|
||||
/// and the given `nsequence` is lower than the CSV value.
|
||||
///
|
||||
/// If the `nsequence` is higher than `0xFFFFFFFD` an error will be thrown, since it would not
|
||||
/// be a valid nSequence to signal RBF.
|
||||
pub fn enable_rbf_with_sequence(&mut self, nsequence: Sequence) -> &mut Self {
|
||||
self.params.rbf = Some(RbfValue::Value(nsequence));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the current blockchain height.
|
||||
///
|
||||
/// This will be used to:
|
||||
/// 1. Set the nLockTime for preventing fee sniping.
|
||||
/// **Note**: This will be ignored if you manually specify a nlocktime using [`TxBuilder::nlocktime`].
|
||||
/// 2. Decide whether coinbase outputs are mature or not. If the coinbase outputs are not
|
||||
/// mature at `current_height`, we ignore them in the coin selection.
|
||||
/// If you want to create a transaction that spends immature coinbase inputs, manually
|
||||
/// add them using [`TxBuilder::add_utxos`].
|
||||
///
|
||||
/// In both cases, if you don't provide a current height, we use the last sync height.
|
||||
pub fn current_height(&mut self, height: u32) -> &mut Self {
|
||||
self.params.current_height = Some(LockTime::from_height(height).expect("Invalid height"));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set whether or not the dust limit is checked.
|
||||
///
|
||||
/// **Note**: by avoiding a dust limit check you may end up with a transaction that is non-standard.
|
||||
pub fn allow_dust(&mut self, allow_dust: bool) -> &mut Self {
|
||||
self.params.allow_dust = allow_dust;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D, Cs: CoinSelectionAlgorithm> TxBuilder<'a, D, Cs, CreateTx> {
|
||||
/// Replace the recipients already added with a new list
|
||||
pub fn set_recipients(&mut self, recipients: Vec<(Script, u64)>) -> &mut Self {
|
||||
self.params.recipients = recipients;
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a recipient to the internal list
|
||||
pub fn add_recipient(&mut self, script_pubkey: Script, amount: u64) -> &mut Self {
|
||||
self.params.recipients.push((script_pubkey, amount));
|
||||
self
|
||||
}
|
||||
|
||||
/// Add data as an output, using OP_RETURN
|
||||
pub fn add_data(&mut self, data: &[u8]) -> &mut Self {
|
||||
let script = Script::new_op_return(data);
|
||||
self.add_recipient(script, 0u64);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the address to *drain* excess coins to.
|
||||
///
|
||||
/// Usually, when there are excess coins they are sent to a change address generated by the
|
||||
/// wallet. This option replaces the usual change address with an arbitrary `script_pubkey` of
|
||||
/// your choosing. Just as with a change output, if the drain output is not needed (the excess
|
||||
/// coins are too small) it will not be included in the resulting transaction. The only
|
||||
/// difference is that it is valid to use `drain_to` without setting any ordinary recipients
|
||||
/// with [`add_recipient`] (but it is perfectly fine to add recipients as well).
|
||||
///
|
||||
/// If you choose not to set any recipients, you should either provide the utxos that the
|
||||
/// transaction should spend via [`add_utxos`], or set [`drain_wallet`] to spend all of them.
|
||||
///
|
||||
/// When bumping the fees of a transaction made with this option, you probably want to
|
||||
/// use [`allow_shrinking`] to allow this output to be reduced to pay for the extra fees.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// `drain_to` is very useful for draining all the coins in a wallet with [`drain_wallet`] to a
|
||||
/// single address.
|
||||
///
|
||||
/// ```
|
||||
/// # use std::str::FromStr;
|
||||
/// # use bitcoin::*;
|
||||
/// # use bdk::*;
|
||||
/// # use bdk::wallet::tx_builder::CreateTx;
|
||||
/// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap();
|
||||
/// # let mut wallet = doctest_wallet!();
|
||||
/// let mut tx_builder = wallet.build_tx();
|
||||
///
|
||||
/// tx_builder
|
||||
/// // Spend all outputs in this wallet.
|
||||
/// .drain_wallet()
|
||||
/// // Send the excess (which is all the coins minus the fee) to this address.
|
||||
/// .drain_to(to_address.script_pubkey())
|
||||
/// .fee_rate(FeeRate::from_sat_per_vb(5.0))
|
||||
/// .enable_rbf();
|
||||
/// let (psbt, tx_details) = tx_builder.finish()?;
|
||||
/// # Ok::<(), bdk::Error>(())
|
||||
/// ```
|
||||
///
|
||||
/// [`allow_shrinking`]: Self::allow_shrinking
|
||||
/// [`add_recipient`]: Self::add_recipient
|
||||
/// [`add_utxos`]: Self::add_utxos
|
||||
/// [`drain_wallet`]: Self::drain_wallet
|
||||
pub fn drain_to(&mut self, script_pubkey: Script) -> &mut Self {
|
||||
self.params.drain_to = Some(script_pubkey);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// methods supported only by bump_fee
|
||||
impl<'a, D> TxBuilder<'a, D, DefaultCoinSelectionAlgorithm, BumpFee> {
|
||||
/// Explicitly tells the wallet that it is allowed to reduce the amount of the output matching this
|
||||
/// `script_pubkey` in order to bump the transaction fee. Without specifying this the wallet
|
||||
/// will attempt to find a change output to shrink instead.
|
||||
///
|
||||
/// **Note** that the output may shrink to below the dust limit and therefore be removed. If it is
|
||||
/// preserved then it is currently not guaranteed to be in the same position as it was
|
||||
/// originally.
|
||||
///
|
||||
/// Returns an `Err` if `script_pubkey` can't be found among the recipients of the
|
||||
/// transaction we are bumping.
|
||||
pub fn allow_shrinking(&mut self, script_pubkey: Script) -> Result<&mut Self, Error> {
|
||||
match self
|
||||
.params
|
||||
.recipients
|
||||
.iter()
|
||||
.position(|(recipient_script, _)| *recipient_script == script_pubkey)
|
||||
{
|
||||
Some(position) => {
|
||||
self.params.recipients.remove(position);
|
||||
self.params.drain_to = Some(script_pubkey);
|
||||
Ok(self)
|
||||
}
|
||||
None => Err(Error::Generic(format!(
|
||||
"{} was not in the original transaction",
|
||||
script_pubkey
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ordering of the transaction's inputs and outputs
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
|
||||
pub enum TxOrdering {
|
||||
/// Randomized (default)
|
||||
Shuffle,
|
||||
/// Unchanged
|
||||
Untouched,
|
||||
/// BIP69 / Lexicographic
|
||||
Bip69Lexicographic,
|
||||
}
|
||||
|
||||
impl Default for TxOrdering {
|
||||
fn default() -> Self {
|
||||
TxOrdering::Shuffle
|
||||
}
|
||||
}
|
||||
|
||||
impl TxOrdering {
|
||||
/// Sort transaction inputs and outputs by [`TxOrdering`] variant
|
||||
pub fn sort_tx(&self, tx: &mut Transaction) {
|
||||
match self {
|
||||
TxOrdering::Untouched => {}
|
||||
TxOrdering::Shuffle => {
|
||||
use rand::seq::SliceRandom;
|
||||
let mut rng = rand::thread_rng();
|
||||
tx.input.shuffle(&mut rng);
|
||||
tx.output.shuffle(&mut rng);
|
||||
}
|
||||
TxOrdering::Bip69Lexicographic => {
|
||||
tx.input.sort_unstable_by_key(|txin| {
|
||||
(txin.previous_output.txid, txin.previous_output.vout)
|
||||
});
|
||||
tx.output
|
||||
.sort_unstable_by_key(|txout| (txout.value, txout.script_pubkey.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Transaction version
|
||||
///
|
||||
/// Has a default value of `1`
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
|
||||
pub(crate) struct Version(pub(crate) i32);
|
||||
|
||||
impl Default for Version {
|
||||
fn default() -> Self {
|
||||
Version(1)
|
||||
}
|
||||
}
|
||||
|
||||
/// RBF nSequence value
|
||||
///
|
||||
/// Has a default value of `0xFFFFFFFD`
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
|
||||
pub(crate) enum RbfValue {
|
||||
Default,
|
||||
Value(Sequence),
|
||||
}
|
||||
|
||||
impl RbfValue {
|
||||
pub(crate) fn get_value(&self) -> Sequence {
|
||||
match self {
|
||||
RbfValue::Default => Sequence::ENABLE_RBF_NO_LOCKTIME,
|
||||
RbfValue::Value(v) => *v,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Policy regarding the use of change outputs when creating a transaction
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
|
||||
pub enum ChangeSpendPolicy {
|
||||
/// Use both change and non-change outputs (default)
|
||||
ChangeAllowed,
|
||||
/// Only use change outputs (see [`TxBuilder::only_spend_change`])
|
||||
OnlyChange,
|
||||
/// Only use non-change outputs (see [`TxBuilder::do_not_spend_change`])
|
||||
ChangeForbidden,
|
||||
}
|
||||
|
||||
impl Default for ChangeSpendPolicy {
|
||||
fn default() -> Self {
|
||||
ChangeSpendPolicy::ChangeAllowed
|
||||
}
|
||||
}
|
||||
|
||||
impl ChangeSpendPolicy {
|
||||
pub(crate) fn is_satisfied_by(&self, utxo: &LocalUtxo) -> bool {
|
||||
match self {
|
||||
ChangeSpendPolicy::ChangeAllowed => true,
|
||||
ChangeSpendPolicy::OnlyChange => utxo.keychain == KeychainKind::Internal,
|
||||
ChangeSpendPolicy::ChangeForbidden => utxo.keychain == KeychainKind::External,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
const ORDERING_TEST_TX: &str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
|
||||
85d1fd600f0100000000ffffffffc26f3eb7932f7acddc5ddd26602b77e75160\
|
||||
79b03090a16e2c2f5485d1fd600f0000000000ffffffff571fb3e02278217852\
|
||||
dd5d299947e2b7354a639adc32ec1fa7b82cfb5dec530e0500000000ffffffff\
|
||||
03e80300000000000002aaeee80300000000000001aa200300000000000001ff\
|
||||
00000000";
|
||||
macro_rules! ordering_test_tx {
|
||||
() => {
|
||||
deserialize::<bitcoin::Transaction>(&Vec::<u8>::from_hex(ORDERING_TEST_TX).unwrap())
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
use bdk_chain::ConfirmationTime;
|
||||
use bitcoin::consensus::deserialize;
|
||||
use bitcoin::hashes::hex::FromHex;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_output_ordering_default_shuffle() {
|
||||
assert_eq!(TxOrdering::default(), TxOrdering::Shuffle);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_ordering_untouched() {
|
||||
let original_tx = ordering_test_tx!();
|
||||
let mut tx = original_tx.clone();
|
||||
|
||||
TxOrdering::Untouched.sort_tx(&mut tx);
|
||||
|
||||
assert_eq!(original_tx, tx);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_ordering_shuffle() {
|
||||
let original_tx = ordering_test_tx!();
|
||||
let mut tx = original_tx.clone();
|
||||
|
||||
(0..40)
|
||||
.find(|_| {
|
||||
TxOrdering::Shuffle.sort_tx(&mut tx);
|
||||
original_tx.input != tx.input
|
||||
})
|
||||
.expect("it should have moved the inputs at least once");
|
||||
|
||||
let mut tx = original_tx.clone();
|
||||
(0..40)
|
||||
.find(|_| {
|
||||
TxOrdering::Shuffle.sort_tx(&mut tx);
|
||||
original_tx.output != tx.output
|
||||
})
|
||||
.expect("it should have moved the outputs at least once");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_ordering_bip69() {
|
||||
use core::str::FromStr;
|
||||
|
||||
let original_tx = ordering_test_tx!();
|
||||
let mut tx = original_tx;
|
||||
|
||||
TxOrdering::Bip69Lexicographic.sort_tx(&mut tx);
|
||||
|
||||
assert_eq!(
|
||||
tx.input[0].previous_output,
|
||||
bitcoin::OutPoint::from_str(
|
||||
"0e53ec5dfb2cb8a71fec32dc9a634a35b7e24799295ddd5278217822e0b31f57:5"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
tx.input[1].previous_output,
|
||||
bitcoin::OutPoint::from_str(
|
||||
"0f60fdd185542f2c6ea19030b0796051e7772b6026dd5ddccd7a2f93b73e6fc2:0"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
tx.input[2].previous_output,
|
||||
bitcoin::OutPoint::from_str(
|
||||
"0f60fdd185542f2c6ea19030b0796051e7772b6026dd5ddccd7a2f93b73e6fc2:1"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(tx.output[0].value, 800);
|
||||
assert_eq!(tx.output[1].script_pubkey, From::from(vec![0xAA]));
|
||||
assert_eq!(tx.output[2].script_pubkey, From::from(vec![0xAA, 0xEE]));
|
||||
}
|
||||
|
||||
fn get_test_utxos() -> Vec<LocalUtxo> {
|
||||
use bitcoin::hashes::Hash;
|
||||
|
||||
vec![
|
||||
LocalUtxo {
|
||||
outpoint: OutPoint {
|
||||
txid: bitcoin::Txid::from_inner([0; 32]),
|
||||
vout: 0,
|
||||
},
|
||||
txout: Default::default(),
|
||||
keychain: KeychainKind::External,
|
||||
is_spent: false,
|
||||
confirmation_time: ConfirmationTime::Unconfirmed,
|
||||
derivation_index: 0,
|
||||
},
|
||||
LocalUtxo {
|
||||
outpoint: OutPoint {
|
||||
txid: bitcoin::Txid::from_inner([0; 32]),
|
||||
vout: 1,
|
||||
},
|
||||
txout: Default::default(),
|
||||
keychain: KeychainKind::Internal,
|
||||
is_spent: false,
|
||||
confirmation_time: ConfirmationTime::Confirmed {
|
||||
height: 32,
|
||||
time: 42,
|
||||
},
|
||||
derivation_index: 1,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_change_spend_policy_default() {
|
||||
let change_spend_policy = ChangeSpendPolicy::default();
|
||||
let filtered = get_test_utxos()
|
||||
.into_iter()
|
||||
.filter(|u| change_spend_policy.is_satisfied_by(u))
|
||||
.count();
|
||||
|
||||
assert_eq!(filtered, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_change_spend_policy_no_internal() {
|
||||
let change_spend_policy = ChangeSpendPolicy::ChangeForbidden;
|
||||
let filtered = get_test_utxos()
|
||||
.into_iter()
|
||||
.filter(|u| change_spend_policy.is_satisfied_by(u))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
assert_eq!(filtered[0].keychain, KeychainKind::External);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_change_spend_policy_only_internal() {
|
||||
let change_spend_policy = ChangeSpendPolicy::OnlyChange;
|
||||
let filtered = get_test_utxos()
|
||||
.into_iter()
|
||||
.filter(|u| change_spend_policy.is_satisfied_by(u))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
assert_eq!(filtered[0].keychain, KeychainKind::Internal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_tx_version_1() {
|
||||
let version = Version::default();
|
||||
assert_eq!(version.0, 1);
|
||||
}
|
||||
}
|
||||
181
crates/bdk/src/wallet/utils.rs
Normal file
181
crates/bdk/src/wallet/utils.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
// Bitcoin Dev Kit
|
||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||
//
|
||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||
//
|
||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||
// You may not use this file except in accordance with one or both of these
|
||||
// licenses.
|
||||
|
||||
use bitcoin::secp256k1::{All, Secp256k1};
|
||||
use bitcoin::{LockTime, Script, Sequence};
|
||||
|
||||
use miniscript::{MiniscriptKey, Satisfier, ToPublicKey};
|
||||
|
||||
/// Trait to check if a value is below the dust limit.
|
||||
/// We are performing dust value calculation for a given script public key using rust-bitcoin to
|
||||
/// keep it compatible with network dust rate
|
||||
// we implement this trait to make sure we don't mess up the comparison with off-by-one like a <
|
||||
// instead of a <= etc.
|
||||
pub trait IsDust {
|
||||
/// Check whether or not a value is below dust limit
|
||||
fn is_dust(&self, script: &Script) -> bool;
|
||||
}
|
||||
|
||||
impl IsDust for u64 {
|
||||
fn is_dust(&self, script: &Script) -> bool {
|
||||
*self < script.dust_value().to_sat()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct After {
|
||||
pub current_height: Option<u32>,
|
||||
pub assume_height_reached: bool,
|
||||
}
|
||||
|
||||
impl After {
|
||||
pub(crate) fn new(current_height: Option<u32>, assume_height_reached: bool) -> After {
|
||||
After {
|
||||
current_height,
|
||||
assume_height_reached,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_nsequence_rbf(rbf: Sequence, csv: Sequence) -> bool {
|
||||
// The RBF value must enable relative timelocks
|
||||
if !rbf.is_relative_lock_time() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Both values should be represented in the same unit (either time-based or
|
||||
// block-height based)
|
||||
if rbf.is_time_locked() != csv.is_time_locked() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The value should be at least `csv`
|
||||
if rbf < csv {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
impl<Pk: MiniscriptKey + ToPublicKey> Satisfier<Pk> for After {
|
||||
fn check_after(&self, n: LockTime) -> bool {
|
||||
if let Some(current_height) = self.current_height {
|
||||
current_height >= n.to_consensus_u32()
|
||||
} else {
|
||||
self.assume_height_reached
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Older {
|
||||
pub current_height: Option<u32>,
|
||||
pub create_height: Option<u32>,
|
||||
pub assume_height_reached: bool,
|
||||
}
|
||||
|
||||
impl Older {
|
||||
pub(crate) fn new(
|
||||
current_height: Option<u32>,
|
||||
create_height: Option<u32>,
|
||||
assume_height_reached: bool,
|
||||
) -> Older {
|
||||
Older {
|
||||
current_height,
|
||||
create_height,
|
||||
assume_height_reached,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Pk: MiniscriptKey + ToPublicKey> Satisfier<Pk> for Older {
|
||||
fn check_older(&self, n: Sequence) -> bool {
|
||||
if let Some(current_height) = self.current_height {
|
||||
// TODO: test >= / >
|
||||
current_height
|
||||
>= self
|
||||
.create_height
|
||||
.unwrap_or(0)
|
||||
.checked_add(n.to_consensus_u32())
|
||||
.expect("Overflowing addition")
|
||||
} else {
|
||||
self.assume_height_reached
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type SecpCtx = Secp256k1<All>;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// When nSequence is lower than this flag the timelock is interpreted as block-height-based,
|
||||
// otherwise it's time-based
|
||||
pub(crate) const SEQUENCE_LOCKTIME_TYPE_FLAG: u32 = 1 << 22;
|
||||
|
||||
use super::{check_nsequence_rbf, IsDust};
|
||||
use crate::bitcoin::{Address, Sequence};
|
||||
use core::str::FromStr;
|
||||
|
||||
#[test]
|
||||
fn test_is_dust() {
|
||||
let script_p2pkh = Address::from_str("1GNgwA8JfG7Kc8akJ8opdNWJUihqUztfPe")
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
assert!(script_p2pkh.is_p2pkh());
|
||||
assert!(545.is_dust(&script_p2pkh));
|
||||
assert!(!546.is_dust(&script_p2pkh));
|
||||
|
||||
let script_p2wpkh = Address::from_str("bc1qxlh2mnc0yqwas76gqq665qkggee5m98t8yskd8")
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
assert!(script_p2wpkh.is_v0_p2wpkh());
|
||||
assert!(293.is_dust(&script_p2wpkh));
|
||||
assert!(!294.is_dust(&script_p2wpkh));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_msb_set() {
|
||||
let result = check_nsequence_rbf(Sequence(0x80000000), Sequence(5000));
|
||||
assert!(!result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_lt_csv() {
|
||||
let result = check_nsequence_rbf(Sequence(4000), Sequence(5000));
|
||||
assert!(!result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_different_unit() {
|
||||
let result =
|
||||
check_nsequence_rbf(Sequence(SEQUENCE_LOCKTIME_TYPE_FLAG + 5000), Sequence(5000));
|
||||
assert!(!result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_mask() {
|
||||
let result = check_nsequence_rbf(Sequence(0x3f + 10_000), Sequence(5000));
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_same_unit_blocks() {
|
||||
let result = check_nsequence_rbf(Sequence(10_000), Sequence(5000));
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_nsequence_rbf_same_unit_time() {
|
||||
let result = check_nsequence_rbf(
|
||||
Sequence(SEQUENCE_LOCKTIME_TYPE_FLAG + 10_000),
|
||||
Sequence(SEQUENCE_LOCKTIME_TYPE_FLAG + 5000),
|
||||
);
|
||||
assert!(result);
|
||||
}
|
||||
}
|
||||
93
crates/bdk/tests/common.rs
Normal file
93
crates/bdk/tests/common.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
#![allow(unused)]
|
||||
use bdk::{wallet::AddressIndex, Wallet};
|
||||
use bdk_chain::{BlockId, ConfirmationTime};
|
||||
use bitcoin::hashes::Hash;
|
||||
use bitcoin::{BlockHash, Network, Transaction, TxOut};
|
||||
|
||||
/// Return a fake wallet that appears to be funded for testing.
|
||||
pub fn get_funded_wallet_with_change(
|
||||
descriptor: &str,
|
||||
change: Option<&str>,
|
||||
) -> (Wallet, bitcoin::Txid) {
|
||||
let mut wallet = Wallet::new_no_persist(descriptor, change, Network::Regtest).unwrap();
|
||||
let address = wallet.get_address(AddressIndex::New).address;
|
||||
|
||||
let tx = Transaction {
|
||||
version: 1,
|
||||
lock_time: bitcoin::PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 50_000,
|
||||
script_pubkey: address.script_pubkey(),
|
||||
}],
|
||||
};
|
||||
|
||||
wallet
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 1_000,
|
||||
hash: BlockHash::all_zeros(),
|
||||
})
|
||||
.unwrap();
|
||||
wallet
|
||||
.insert_tx(
|
||||
tx.clone(),
|
||||
ConfirmationTime::Confirmed {
|
||||
height: 1_000,
|
||||
time: 100,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(wallet, tx.txid())
|
||||
}
|
||||
|
||||
pub fn get_funded_wallet(descriptor: &str) -> (Wallet, bitcoin::Txid) {
|
||||
get_funded_wallet_with_change(descriptor, None)
|
||||
}
|
||||
|
||||
pub fn get_test_wpkh() -> &'static str {
|
||||
"wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)"
|
||||
}
|
||||
|
||||
pub fn get_test_single_sig_csv() -> &'static str {
|
||||
// and(pk(Alice),older(6))
|
||||
"wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),older(6)))"
|
||||
}
|
||||
|
||||
pub fn get_test_a_or_b_plus_csv() -> &'static str {
|
||||
// or(pk(Alice),and(pk(Bob),older(144)))
|
||||
"wsh(or_d(pk(cRjo6jqfVNP33HhSS76UhXETZsGTZYx8FMFvR9kpbtCSV1PmdZdu),and_v(v:pk(cMnkdebixpXMPfkcNEjjGin7s94hiehAH4mLbYkZoh9KSiNNmqC8),older(144))))"
|
||||
}
|
||||
|
||||
pub fn get_test_single_sig_cltv() -> &'static str {
|
||||
// and(pk(Alice),after(100000))
|
||||
"wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(100000)))"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_single_sig() -> &'static str {
|
||||
"tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG)"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_with_taptree() -> &'static str {
|
||||
"tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{pk(cPZzKuNmpuUjD1e8jUU4PVzy2b5LngbSip8mBsxf4e7rSFZVb4Uh),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_with_taptree_both_priv() -> &'static str {
|
||||
"tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{pk(cPZzKuNmpuUjD1e8jUU4PVzy2b5LngbSip8mBsxf4e7rSFZVb4Uh),pk(cNaQCDwmmh4dS9LzCgVtyy1e1xjCJ21GUDHe9K98nzb689JvinGV)})"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_repeated_key() -> &'static str {
|
||||
"tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(100)),and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(200))})"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_single_sig_xprv() -> &'static str {
|
||||
"tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*)"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_with_taptree_xprv() -> &'static str {
|
||||
"tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})"
|
||||
}
|
||||
|
||||
pub fn get_test_tr_dup_keys() -> &'static str {
|
||||
"tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})"
|
||||
}
|
||||
158
crates/bdk/tests/psbt.rs
Normal file
158
crates/bdk/tests/psbt.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use bdk::bitcoin::TxIn;
|
||||
use bdk::wallet::AddressIndex;
|
||||
use bdk::wallet::AddressIndex::New;
|
||||
use bdk::{psbt, FeeRate, SignOptions};
|
||||
use bitcoin::util::psbt::PartiallySignedTransaction as Psbt;
|
||||
use core::str::FromStr;
|
||||
mod common;
|
||||
use common::*;
|
||||
|
||||
// from bip 174
|
||||
const PSBT_STR: &str = "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEHakcwRAIgR1lmF5fAGwNrJZKJSGhiGDR9iYZLcZ4ff89X0eURZYcCIFMJ6r9Wqk2Ikf/REf3xM286KdqGbX+EhtdVRs7tr5MZASEDXNxh/HupccC1AaZGoqg7ECy0OIEhfKaC3Ibi1z+ogpIAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIAAAA";
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "InputIndexOutOfRange")]
|
||||
fn test_psbt_malformed_psbt_input_legacy() {
|
||||
let psbt_bip = Psbt::from_str(PSBT_STR).unwrap();
|
||||
let (mut wallet, _) = get_funded_wallet(get_test_wpkh());
|
||||
let send_to = wallet.get_address(AddressIndex::New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.add_recipient(send_to.script_pubkey(), 10_000);
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
psbt.inputs.push(psbt_bip.inputs[0].clone());
|
||||
let options = SignOptions {
|
||||
trust_witness_utxo: true,
|
||||
..Default::default()
|
||||
};
|
||||
let _ = wallet.sign(&mut psbt, options).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "InputIndexOutOfRange")]
|
||||
fn test_psbt_malformed_psbt_input_segwit() {
|
||||
let psbt_bip = Psbt::from_str(PSBT_STR).unwrap();
|
||||
let (mut wallet, _) = get_funded_wallet(get_test_wpkh());
|
||||
let send_to = wallet.get_address(AddressIndex::New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.add_recipient(send_to.script_pubkey(), 10_000);
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
psbt.inputs.push(psbt_bip.inputs[1].clone());
|
||||
let options = SignOptions {
|
||||
trust_witness_utxo: true,
|
||||
..Default::default()
|
||||
};
|
||||
let _ = wallet.sign(&mut psbt, options).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "InputIndexOutOfRange")]
|
||||
fn test_psbt_malformed_tx_input() {
|
||||
let (mut wallet, _) = get_funded_wallet(get_test_wpkh());
|
||||
let send_to = wallet.get_address(AddressIndex::New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.add_recipient(send_to.script_pubkey(), 10_000);
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
psbt.unsigned_tx.input.push(TxIn::default());
|
||||
let options = SignOptions {
|
||||
trust_witness_utxo: true,
|
||||
..Default::default()
|
||||
};
|
||||
let _ = wallet.sign(&mut psbt, options).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_psbt_sign_with_finalized() {
|
||||
let psbt_bip = Psbt::from_str(PSBT_STR).unwrap();
|
||||
let (mut wallet, _) = get_funded_wallet(get_test_wpkh());
|
||||
let send_to = wallet.get_address(AddressIndex::New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.add_recipient(send_to.script_pubkey(), 10_000);
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
|
||||
// add a finalized input
|
||||
psbt.inputs.push(psbt_bip.inputs[0].clone());
|
||||
psbt.unsigned_tx
|
||||
.input
|
||||
.push(psbt_bip.unsigned_tx.input[0].clone());
|
||||
|
||||
let _ = wallet.sign(&mut psbt, SignOptions::default()).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_psbt_fee_rate_with_witness_utxo() {
|
||||
use psbt::PsbtUtils;
|
||||
|
||||
let expected_fee_rate = 1.2345;
|
||||
|
||||
let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)");
|
||||
let addr = wallet.get_address(New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||
builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate));
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
let fee_amount = psbt.fee_amount();
|
||||
assert!(fee_amount.is_some());
|
||||
|
||||
let unfinalized_fee_rate = psbt.fee_rate().unwrap();
|
||||
|
||||
let finalized = wallet.sign(&mut psbt, Default::default()).unwrap();
|
||||
assert!(finalized);
|
||||
|
||||
let finalized_fee_rate = psbt.fee_rate().unwrap();
|
||||
assert!(finalized_fee_rate.as_sat_per_vb() >= expected_fee_rate);
|
||||
assert!(finalized_fee_rate.as_sat_per_vb() < unfinalized_fee_rate.as_sat_per_vb());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_psbt_fee_rate_with_nonwitness_utxo() {
|
||||
use psbt::PsbtUtils;
|
||||
|
||||
let expected_fee_rate = 1.2345;
|
||||
|
||||
let (mut wallet, _) = get_funded_wallet("pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)");
|
||||
let addr = wallet.get_address(New);
|
||||
let mut builder = wallet.build_tx();
|
||||
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||
builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate));
|
||||
let (mut psbt, _) = builder.finish().unwrap();
|
||||
let fee_amount = psbt.fee_amount();
|
||||
assert!(fee_amount.is_some());
|
||||
let unfinalized_fee_rate = psbt.fee_rate().unwrap();
|
||||
|
||||
let finalized = wallet.sign(&mut psbt, Default::default()).unwrap();
|
||||
assert!(finalized);
|
||||
|
||||
let finalized_fee_rate = psbt.fee_rate().unwrap();
|
||||
assert!(finalized_fee_rate.as_sat_per_vb() >= expected_fee_rate);
|
||||
assert!(finalized_fee_rate.as_sat_per_vb() < unfinalized_fee_rate.as_sat_per_vb());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_psbt_fee_rate_with_missing_txout() {
|
||||
use psbt::PsbtUtils;
|
||||
|
||||
let expected_fee_rate = 1.2345;
|
||||
|
||||
let (mut wpkh_wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)");
|
||||
let addr = wpkh_wallet.get_address(New);
|
||||
let mut builder = wpkh_wallet.build_tx();
|
||||
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||
builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate));
|
||||
let (mut wpkh_psbt, _) = builder.finish().unwrap();
|
||||
|
||||
wpkh_psbt.inputs[0].witness_utxo = None;
|
||||
wpkh_psbt.inputs[0].non_witness_utxo = None;
|
||||
assert!(wpkh_psbt.fee_amount().is_none());
|
||||
assert!(wpkh_psbt.fee_rate().is_none());
|
||||
|
||||
let (mut pkh_wallet, _) = get_funded_wallet("pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)");
|
||||
let addr = pkh_wallet.get_address(New);
|
||||
let mut builder = pkh_wallet.build_tx();
|
||||
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||
builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate));
|
||||
let (mut pkh_psbt, _) = builder.finish().unwrap();
|
||||
|
||||
pkh_psbt.inputs[0].non_witness_utxo = None;
|
||||
assert!(pkh_psbt.fee_amount().is_none());
|
||||
assert!(pkh_psbt.fee_rate().is_none());
|
||||
}
|
||||
3309
crates/bdk/tests/wallet.rs
Normal file
3309
crates/bdk/tests/wallet.rs
Normal file
File diff suppressed because it is too large
Load Diff
30
crates/chain/Cargo.toml
Normal file
30
crates/chain/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "bdk_chain"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.57"
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk_chain"
|
||||
description = "Collection of core structures for Bitcoin Dev Kit."
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "README.md"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
bitcoin = { version = "0.29" }
|
||||
serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] }
|
||||
|
||||
# Use hashbrown as a feature flag to have HashSet and HashMap from it.
|
||||
# note version 0.13 breaks outs MSRV.
|
||||
hashbrown = { version = "0.12", optional = true, features = ["serde"] }
|
||||
miniscript = { version = "9.0.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8"
|
||||
|
||||
[features]
|
||||
default = ["std", "miniscript"]
|
||||
std = []
|
||||
serde = ["serde_crate", "bitcoin/serde" ]
|
||||
3
crates/chain/README.md
Normal file
3
crates/chain/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# BDK Chain
|
||||
|
||||
BDK keychain tracker, tools for storing and indexing chain data.
|
||||
218
crates/chain/src/chain_data.rs
Normal file
218
crates/chain/src/chain_data.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use bitcoin::{hashes::Hash, BlockHash, OutPoint, TxOut, Txid};
|
||||
|
||||
use crate::{
|
||||
sparse_chain::{self, ChainPosition},
|
||||
COINBASE_MATURITY,
|
||||
};
|
||||
|
||||
/// Represents the height at which a transaction is confirmed.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(crate = "serde_crate")
|
||||
)]
|
||||
pub enum TxHeight {
|
||||
Confirmed(u32),
|
||||
Unconfirmed,
|
||||
}
|
||||
|
||||
impl Default for TxHeight {
|
||||
fn default() -> Self {
|
||||
Self::Unconfirmed
|
||||
}
|
||||
}
|
||||
|
||||
impl core::fmt::Display for TxHeight {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
Self::Confirmed(h) => core::write!(f, "confirmed_at({})", h),
|
||||
Self::Unconfirmed => core::write!(f, "unconfirmed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<u32>> for TxHeight {
|
||||
fn from(opt: Option<u32>) -> Self {
|
||||
match opt {
|
||||
Some(h) => Self::Confirmed(h),
|
||||
None => Self::Unconfirmed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TxHeight> for Option<u32> {
|
||||
fn from(height: TxHeight) -> Self {
|
||||
match height {
|
||||
TxHeight::Confirmed(h) => Some(h),
|
||||
TxHeight::Unconfirmed => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::sparse_chain::ChainPosition for TxHeight {
|
||||
fn height(&self) -> TxHeight {
|
||||
*self
|
||||
}
|
||||
|
||||
fn max_ord_of_height(height: TxHeight) -> Self {
|
||||
height
|
||||
}
|
||||
|
||||
fn min_ord_of_height(height: TxHeight) -> Self {
|
||||
height
|
||||
}
|
||||
}
|
||||
|
||||
impl TxHeight {
|
||||
pub fn is_confirmed(&self) -> bool {
|
||||
matches!(self, Self::Confirmed(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// Block height and timestamp at which a transaction is confirmed.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(crate = "serde_crate")
|
||||
)]
|
||||
pub enum ConfirmationTime {
|
||||
Confirmed { height: u32, time: u64 },
|
||||
Unconfirmed,
|
||||
}
|
||||
|
||||
impl sparse_chain::ChainPosition for ConfirmationTime {
|
||||
fn height(&self) -> TxHeight {
|
||||
match self {
|
||||
ConfirmationTime::Confirmed { height, .. } => TxHeight::Confirmed(*height),
|
||||
ConfirmationTime::Unconfirmed => TxHeight::Unconfirmed,
|
||||
}
|
||||
}
|
||||
|
||||
fn max_ord_of_height(height: TxHeight) -> Self {
|
||||
match height {
|
||||
TxHeight::Confirmed(height) => Self::Confirmed {
|
||||
height,
|
||||
time: u64::MAX,
|
||||
},
|
||||
TxHeight::Unconfirmed => Self::Unconfirmed,
|
||||
}
|
||||
}
|
||||
|
||||
fn min_ord_of_height(height: TxHeight) -> Self {
|
||||
match height {
|
||||
TxHeight::Confirmed(height) => Self::Confirmed {
|
||||
height,
|
||||
time: u64::MIN,
|
||||
},
|
||||
TxHeight::Unconfirmed => Self::Unconfirmed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConfirmationTime {
|
||||
pub fn is_confirmed(&self) -> bool {
|
||||
matches!(self, Self::Confirmed { .. })
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a block in the canonical chain.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(crate = "serde_crate")
|
||||
)]
|
||||
pub struct BlockId {
|
||||
/// The height of the block.
|
||||
pub height: u32,
|
||||
/// The hash of the block.
|
||||
pub hash: BlockHash,
|
||||
}
|
||||
|
||||
impl Default for BlockId {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
height: Default::default(),
|
||||
hash: BlockHash::from_inner([0u8; 32]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(u32, BlockHash)> for BlockId {
|
||||
fn from((height, hash): (u32, BlockHash)) -> Self {
|
||||
Self { height, hash }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BlockId> for (u32, BlockHash) {
|
||||
fn from(block_id: BlockId) -> Self {
|
||||
(block_id.height, block_id.hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(&u32, &BlockHash)> for BlockId {
|
||||
fn from((height, hash): (&u32, &BlockHash)) -> Self {
|
||||
Self {
|
||||
height: *height,
|
||||
hash: *hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A `TxOut` with as much data as we can retrieve about it
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct FullTxOut<I> {
|
||||
/// The location of the `TxOut`.
|
||||
pub outpoint: OutPoint,
|
||||
/// The `TxOut`.
|
||||
pub txout: TxOut,
|
||||
/// The position of the transaction in `outpoint` in the overall chain.
|
||||
pub chain_position: I,
|
||||
/// The txid and chain position of the transaction (if any) that has spent this output.
|
||||
pub spent_by: Option<(I, Txid)>,
|
||||
/// Whether this output is on a coinbase transaction.
|
||||
pub is_on_coinbase: bool,
|
||||
}
|
||||
|
||||
impl<I: ChainPosition> FullTxOut<I> {
|
||||
/// Whether the utxo is/was/will be spendable at `height`.
|
||||
///
|
||||
/// It is spendable if it is not an immature coinbase output and no spending tx has been
|
||||
/// confirmed by that height.
|
||||
pub fn is_spendable_at(&self, height: u32) -> bool {
|
||||
if !self.is_mature(height) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.chain_position.height() > TxHeight::Confirmed(height) {
|
||||
return false;
|
||||
}
|
||||
|
||||
match &self.spent_by {
|
||||
Some((spending_height, _)) => spending_height.height() > TxHeight::Confirmed(height),
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_mature(&self, height: u32) -> bool {
|
||||
if self.is_on_coinbase {
|
||||
let tx_height = match self.chain_position.height() {
|
||||
TxHeight::Confirmed(tx_height) => tx_height,
|
||||
TxHeight::Unconfirmed => {
|
||||
debug_assert!(false, "coinbase tx can never be unconfirmed");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
let age = height.saturating_sub(tx_height);
|
||||
if age + 1 < COINBASE_MATURITY {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: make test
|
||||
639
crates/chain/src/chain_graph.rs
Normal file
639
crates/chain/src/chain_graph.rs
Normal file
@@ -0,0 +1,639 @@
|
||||
//! Module for structures that combine the features of [`sparse_chain`] and [`tx_graph`].
|
||||
use crate::{
|
||||
collections::HashSet,
|
||||
sparse_chain::{self, ChainPosition, SparseChain},
|
||||
tx_graph::{self, TxGraph},
|
||||
BlockId, ForEachTxOut, FullTxOut, TxHeight,
|
||||
};
|
||||
use alloc::{string::ToString, vec::Vec};
|
||||
use bitcoin::{OutPoint, Transaction, TxOut, Txid};
|
||||
use core::fmt::Debug;
|
||||
|
||||
/// A consistent combination of a [`SparseChain<P>`] and a [`TxGraph<T>`].
|
||||
///
|
||||
/// `SparseChain` only keeps track of transaction ids and their position in the chain, but you often
|
||||
/// want to store the full transactions as well. Additionally, you want to make sure that everything
|
||||
/// in the chain is consistent with the full transaction data. `ChainGraph` enforces these two
|
||||
/// invariants:
|
||||
///
|
||||
/// 1. Every transaction that is in the chain is also in the graph (you always have the full
|
||||
/// transaction).
|
||||
/// 2. No transactions in the chain conflict with each other, i.e., they don't double spend each
|
||||
/// other or have ancestors that double spend each other.
|
||||
///
|
||||
/// Note that the `ChainGraph` guarantees a 1:1 mapping between transactions in the `chain` and
|
||||
/// `graph` but not the other way around. Transactions may fall out of the *chain* (via re-org or
|
||||
/// mempool eviction) but will remain in the *graph*.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ChainGraph<P = TxHeight> {
|
||||
chain: SparseChain<P>,
|
||||
graph: TxGraph,
|
||||
}
|
||||
|
||||
impl<P> Default for ChainGraph<P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
chain: Default::default(),
|
||||
graph: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> AsRef<SparseChain<P>> for ChainGraph<P> {
|
||||
fn as_ref(&self) -> &SparseChain<P> {
|
||||
&self.chain
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> AsRef<TxGraph> for ChainGraph<P> {
|
||||
fn as_ref(&self) -> &TxGraph {
|
||||
&self.graph
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> AsRef<ChainGraph<P>> for ChainGraph<P> {
|
||||
fn as_ref(&self) -> &ChainGraph<P> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> ChainGraph<P> {
|
||||
/// Returns a reference to the internal [`SparseChain`].
|
||||
pub fn chain(&self) -> &SparseChain<P> {
|
||||
&self.chain
|
||||
}
|
||||
|
||||
/// Returns a reference to the internal [`TxGraph`].
|
||||
pub fn graph(&self) -> &TxGraph {
|
||||
&self.graph
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> ChainGraph<P>
|
||||
where
|
||||
P: ChainPosition,
|
||||
{
|
||||
/// Create a new chain graph from a `chain` and a `graph`.
|
||||
///
|
||||
/// There are two reasons this can return an `Err`:
|
||||
///
|
||||
/// 1. There is a transaction in the `chain` that does not have its corresponding full
|
||||
/// transaction in `graph`.
|
||||
/// 2. The `chain` has two transactions that are allegedly in it, but they conflict in the `graph`
|
||||
/// (so could not possibly be in the same chain).
|
||||
pub fn new(chain: SparseChain<P>, graph: TxGraph) -> Result<Self, NewError<P>> {
|
||||
let mut missing = HashSet::default();
|
||||
for (pos, txid) in chain.txids() {
|
||||
if let Some(tx) = graph.get_tx(*txid) {
|
||||
let conflict = graph
|
||||
.walk_conflicts(tx, |_, txid| Some((chain.tx_position(txid)?.clone(), txid)))
|
||||
.next();
|
||||
if let Some((conflict_pos, conflict)) = conflict {
|
||||
return Err(NewError::Conflict {
|
||||
a: (pos.clone(), *txid),
|
||||
b: (conflict_pos, conflict),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
missing.insert(*txid);
|
||||
}
|
||||
}
|
||||
|
||||
if !missing.is_empty() {
|
||||
return Err(NewError::Missing(missing));
|
||||
}
|
||||
|
||||
Ok(Self { chain, graph })
|
||||
}
|
||||
|
||||
/// Take an update in the form of a [`SparseChain<P>`][`SparseChain`] and attempt to turn it
|
||||
/// into a chain graph by filling in full transactions from `self` and from `new_txs`. This
|
||||
/// returns a `ChainGraph<P, Cow<T>>` where the [`Cow<'a, T>`] will borrow the transaction if it
|
||||
/// got it from `self`.
|
||||
///
|
||||
/// This is useful when interacting with services like an electrum server which returns a list
|
||||
/// of txids and heights when calling [`script_get_history`], which can easily be inserted into a
|
||||
/// [`SparseChain<TxHeight>`][`SparseChain`]. From there, you need to figure out which full
|
||||
/// transactions you are missing in your chain graph and form `new_txs`. You then use
|
||||
/// `inflate_update` to turn this into an update `ChainGraph<P, Cow<Transaction>>` and finally
|
||||
/// use [`determine_changeset`] to generate the changeset from it.
|
||||
///
|
||||
/// [`SparseChain`]: crate::sparse_chain::SparseChain
|
||||
/// [`Cow<'a, T>`]: std::borrow::Cow
|
||||
/// [`script_get_history`]: https://docs.rs/electrum-client/latest/electrum_client/trait.ElectrumApi.html#tymethod.script_get_history
|
||||
/// [`determine_changeset`]: Self::determine_changeset
|
||||
pub fn inflate_update(
|
||||
&self,
|
||||
update: SparseChain<P>,
|
||||
new_txs: impl IntoIterator<Item = Transaction>,
|
||||
) -> Result<ChainGraph<P>, NewError<P>> {
|
||||
let mut inflated_chain = SparseChain::default();
|
||||
let mut inflated_graph = TxGraph::default();
|
||||
|
||||
for (height, hash) in update.checkpoints().clone().into_iter() {
|
||||
let _ = inflated_chain
|
||||
.insert_checkpoint(BlockId { height, hash })
|
||||
.expect("must insert");
|
||||
}
|
||||
|
||||
// [TODO] @evanlinjin: These need better comments
|
||||
// - copy transactions that have changed positions into the graph
|
||||
// - add new transactions to an inflated chain
|
||||
for (pos, txid) in update.txids() {
|
||||
match self.chain.tx_position(*txid) {
|
||||
Some(original_pos) => {
|
||||
if original_pos != pos {
|
||||
let tx = self
|
||||
.graph
|
||||
.get_tx(*txid)
|
||||
.expect("tx must exist as it is referenced in sparsechain")
|
||||
.clone();
|
||||
let _ = inflated_chain
|
||||
.insert_tx(*txid, pos.clone())
|
||||
.expect("must insert since this was already in update");
|
||||
let _ = inflated_graph.insert_tx(tx);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let _ = inflated_chain
|
||||
.insert_tx(*txid, pos.clone())
|
||||
.expect("must insert since this was already in update");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for tx in new_txs {
|
||||
let _ = inflated_graph.insert_tx(tx);
|
||||
}
|
||||
|
||||
ChainGraph::new(inflated_chain, inflated_graph)
|
||||
}
|
||||
|
||||
/// Gets the checkpoint limit.
|
||||
///
|
||||
/// Refer to [`SparseChain::checkpoint_limit`] for more.
|
||||
pub fn checkpoint_limit(&self) -> Option<usize> {
|
||||
self.chain.checkpoint_limit()
|
||||
}
|
||||
|
||||
/// Sets the checkpoint limit.
|
||||
///
|
||||
/// Refer to [`SparseChain::set_checkpoint_limit`] for more.
|
||||
pub fn set_checkpoint_limit(&mut self, limit: Option<usize>) {
|
||||
self.chain.set_checkpoint_limit(limit)
|
||||
}
|
||||
|
||||
/// Determines the changes required to invalidate checkpoints `from_height` (inclusive) and
|
||||
/// above. Displaced transactions will have their positions moved to [`TxHeight::Unconfirmed`].
|
||||
pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet<P> {
|
||||
ChangeSet {
|
||||
chain: self.chain.invalidate_checkpoints_preview(from_height),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Invalidate checkpoints `from_height` (inclusive) and above. Displaced transactions will be
|
||||
/// re-positioned to [`TxHeight::Unconfirmed`].
|
||||
///
|
||||
/// This is equivalent to calling [`Self::invalidate_checkpoints_preview`] and
|
||||
/// [`Self::apply_changeset`] in sequence.
|
||||
pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet<P>
|
||||
where
|
||||
ChangeSet<P>: Clone,
|
||||
{
|
||||
let changeset = self.invalidate_checkpoints_preview(from_height);
|
||||
self.apply_changeset(changeset.clone());
|
||||
changeset
|
||||
}
|
||||
|
||||
/// Get a transaction currently in the underlying [`SparseChain`].
|
||||
///
|
||||
/// This does not necessarily mean that it is *confirmed* in the blockchain; it might just be in
|
||||
/// the unconfirmed transaction list within the [`SparseChain`].
|
||||
pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, &Transaction)> {
|
||||
let position = self.chain.tx_position(txid)?;
|
||||
let full_tx = self.graph.get_tx(txid).expect("must exist");
|
||||
Some((position, full_tx))
|
||||
}
|
||||
|
||||
/// Determines the changes required to insert a transaction into the inner [`ChainGraph`] and
|
||||
/// [`SparseChain`] at the given `position`.
|
||||
///
|
||||
/// If inserting it into the chain `position` will result in conflicts, the returned
|
||||
/// [`ChangeSet`] should evict conflicting transactions.
|
||||
pub fn insert_tx_preview(
|
||||
&self,
|
||||
tx: Transaction,
|
||||
pos: P,
|
||||
) -> Result<ChangeSet<P>, InsertTxError<P>> {
|
||||
let mut changeset = ChangeSet {
|
||||
chain: self.chain.insert_tx_preview(tx.txid(), pos)?,
|
||||
graph: self.graph.insert_tx_preview(tx),
|
||||
};
|
||||
self.fix_conflicts(&mut changeset)?;
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Inserts [`Transaction`] at the given chain position.
|
||||
///
|
||||
/// This is equivalent to calling [`Self::insert_tx_preview`] and [`Self::apply_changeset`] in
|
||||
/// sequence.
|
||||
pub fn insert_tx(&mut self, tx: Transaction, pos: P) -> Result<ChangeSet<P>, InsertTxError<P>> {
|
||||
let changeset = self.insert_tx_preview(tx, pos)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Determines the changes required to insert a [`TxOut`] into the internal [`TxGraph`].
|
||||
pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P> {
|
||||
ChangeSet {
|
||||
chain: Default::default(),
|
||||
graph: self.graph.insert_txout_preview(outpoint, txout),
|
||||
}
|
||||
}
|
||||
|
||||
/// Inserts a [`TxOut`] into the internal [`TxGraph`].
|
||||
///
|
||||
/// This is equivalent to calling [`Self::insert_txout_preview`] and [`Self::apply_changeset`]
|
||||
/// in sequence.
|
||||
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P> {
|
||||
let changeset = self.insert_txout_preview(outpoint, txout);
|
||||
self.apply_changeset(changeset.clone());
|
||||
changeset
|
||||
}
|
||||
|
||||
/// Determines the changes required to insert a `block_id` (a height and block hash) into the
|
||||
/// chain.
|
||||
///
|
||||
/// If a checkpoint with a different hash already exists at that height, this will return an error.
|
||||
pub fn insert_checkpoint_preview(
|
||||
&self,
|
||||
block_id: BlockId,
|
||||
) -> Result<ChangeSet<P>, InsertCheckpointError> {
|
||||
self.chain
|
||||
.insert_checkpoint_preview(block_id)
|
||||
.map(|chain_changeset| ChangeSet {
|
||||
chain: chain_changeset,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Inserts checkpoint into [`Self`].
|
||||
///
|
||||
/// This is equivalent to calling [`Self::insert_checkpoint_preview`] and
|
||||
/// [`Self::apply_changeset`] in sequence.
|
||||
pub fn insert_checkpoint(
|
||||
&mut self,
|
||||
block_id: BlockId,
|
||||
) -> Result<ChangeSet<P>, InsertCheckpointError> {
|
||||
let changeset = self.insert_checkpoint_preview(block_id)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
|
||||
pub fn determine_changeset(
|
||||
&self,
|
||||
update: &ChainGraph<P>,
|
||||
) -> Result<ChangeSet<P>, UpdateError<P>> {
|
||||
let chain_changeset = self
|
||||
.chain
|
||||
.determine_changeset(&update.chain)
|
||||
.map_err(UpdateError::Chain)?;
|
||||
|
||||
let mut changeset = ChangeSet {
|
||||
chain: chain_changeset,
|
||||
graph: self.graph.determine_additions(&update.graph),
|
||||
};
|
||||
|
||||
self.fix_conflicts(&mut changeset)?;
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Given a transaction, return an iterator of `txid`s that conflict with it (spends at least
|
||||
/// one of the same inputs). This iterator includes all descendants of conflicting transactions.
|
||||
///
|
||||
/// This method only returns conflicts that exist in the [`SparseChain`] as transactions that
|
||||
/// are not included in [`SparseChain`] are already considered as evicted.
|
||||
pub fn tx_conflicts_in_chain<'a>(
|
||||
&'a self,
|
||||
tx: &'a Transaction,
|
||||
) -> impl Iterator<Item = (&'a P, Txid)> + 'a {
|
||||
self.graph.walk_conflicts(tx, move |_, conflict_txid| {
|
||||
self.chain
|
||||
.tx_position(conflict_txid)
|
||||
.map(|conflict_pos| (conflict_pos, conflict_txid))
|
||||
})
|
||||
}
|
||||
|
||||
/// Fix changeset conflicts.
|
||||
///
|
||||
/// **WARNING:** If there are any missing full txs, conflict resolution will not be complete. In
|
||||
/// debug mode, this will result in panic.
|
||||
fn fix_conflicts(&self, changeset: &mut ChangeSet<P>) -> Result<(), UnresolvableConflict<P>> {
|
||||
let mut chain_conflicts = vec![];
|
||||
|
||||
for (&txid, pos_change) in &changeset.chain.txids {
|
||||
let pos = match pos_change {
|
||||
Some(pos) => {
|
||||
// Ignore txs that are still in the chain -- we only care about new ones
|
||||
if self.chain.tx_position(txid).is_some() {
|
||||
continue;
|
||||
}
|
||||
pos
|
||||
}
|
||||
// Ignore txids that are being deleted by the change (they can't conflict)
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let mut full_tx = self.graph.get_tx(txid);
|
||||
|
||||
if full_tx.is_none() {
|
||||
full_tx = changeset.graph.tx.iter().find(|tx| tx.txid() == txid)
|
||||
}
|
||||
|
||||
debug_assert!(full_tx.is_some(), "should have full tx at this point");
|
||||
|
||||
let full_tx = match full_tx {
|
||||
Some(full_tx) => full_tx,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx) {
|
||||
chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid))
|
||||
}
|
||||
}
|
||||
|
||||
for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts {
|
||||
// We have found a tx that conflicts with our update txid. Only allow this when the
|
||||
// conflicting tx will be positioned as "unconfirmed" after the update is applied.
|
||||
// If so, we will modify the changeset to evict the conflicting txid.
|
||||
|
||||
// determine the position of the conflicting txid after the current changeset is applied
|
||||
let conflicting_new_pos = changeset
|
||||
.chain
|
||||
.txids
|
||||
.get(&conflicting_txid)
|
||||
.map(Option::as_ref)
|
||||
.unwrap_or(Some(conflicting_pos));
|
||||
|
||||
match conflicting_new_pos {
|
||||
None => {
|
||||
// conflicting txid will be deleted, can ignore
|
||||
}
|
||||
Some(existing_new_pos) => match existing_new_pos.height() {
|
||||
TxHeight::Confirmed(_) => {
|
||||
// the new position of the conflicting tx is "confirmed", therefore cannot be
|
||||
// evicted, return error
|
||||
return Err(UnresolvableConflict {
|
||||
already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid),
|
||||
update_tx: (update_pos, update_txid),
|
||||
});
|
||||
}
|
||||
TxHeight::Unconfirmed => {
|
||||
// the new position of the conflicting tx is "unconfirmed", therefore it can
|
||||
// be evicted
|
||||
changeset.chain.txids.insert(conflicting_txid, None);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Applies `changeset` to `self`.
|
||||
///
|
||||
/// **Warning** this method assumes that the changeset is correctly formed. If it is not, the
|
||||
/// chain graph may behave incorrectly in the future and panic unexpectedly.
|
||||
pub fn apply_changeset(&mut self, changeset: ChangeSet<P>) {
|
||||
self.chain.apply_changeset(changeset.chain);
|
||||
self.graph.apply_additions(changeset.graph);
|
||||
}
|
||||
|
||||
/// Applies the `update` chain graph. Note this is shorthand for calling
|
||||
/// [`Self::determine_changeset()`] and [`Self::apply_changeset()`] in sequence.
|
||||
pub fn apply_update(&mut self, update: ChainGraph<P>) -> Result<ChangeSet<P>, UpdateError<P>> {
|
||||
let changeset = self.determine_changeset(&update)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Get the full transaction output at an outpoint if it exists in the chain and the graph.
|
||||
pub fn full_txout(&self, outpoint: OutPoint) -> Option<FullTxOut<P>> {
|
||||
self.chain.full_txout(&self.graph, outpoint)
|
||||
}
|
||||
|
||||
/// Iterate over the full transactions and their position in the chain ordered by their position
|
||||
/// in ascending order.
|
||||
pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &Transaction)> {
|
||||
self.chain
|
||||
.txids()
|
||||
.map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
|
||||
}
|
||||
|
||||
/// Find the transaction in the chain that spends `outpoint`.
|
||||
///
|
||||
/// This uses the input/output relationships in the internal `graph`. Note that the transaction
|
||||
/// which includes `outpoint` does not need to be in the `graph` or the `chain` for this to
|
||||
/// return `Some(_)`.
|
||||
pub fn spent_by(&self, outpoint: OutPoint) -> Option<(&P, Txid)> {
|
||||
self.chain.spent_by(&self.graph, outpoint)
|
||||
}
|
||||
|
||||
/// Whether the chain graph contains any data whatsoever.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.chain.is_empty() && self.graph.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents changes to [`ChainGraph`].
|
||||
///
|
||||
/// This is essentially a combination of [`sparse_chain::ChangeSet`] and [`tx_graph::Additions`].
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(
|
||||
crate = "serde_crate",
|
||||
bound(
|
||||
deserialize = "P: serde::Deserialize<'de>",
|
||||
serialize = "P: serde::Serialize"
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct ChangeSet<P> {
|
||||
pub chain: sparse_chain::ChangeSet<P>,
|
||||
pub graph: tx_graph::Additions,
|
||||
}
|
||||
|
||||
impl<P> ChangeSet<P> {
|
||||
/// Returns `true` if this [`ChangeSet`] records no changes.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.chain.is_empty() && self.graph.is_empty()
|
||||
}
|
||||
|
||||
/// Returns `true` if this [`ChangeSet`] contains transaction evictions.
|
||||
pub fn contains_eviction(&self) -> bool {
|
||||
self.chain
|
||||
.txids
|
||||
.iter()
|
||||
.any(|(_, new_pos)| new_pos.is_none())
|
||||
}
|
||||
|
||||
/// Appends the changes in `other` into self such that applying `self` afterward has the same
|
||||
/// effect as sequentially applying the original `self` and `other`.
|
||||
pub fn append(&mut self, other: ChangeSet<P>)
|
||||
where
|
||||
P: ChainPosition,
|
||||
{
|
||||
self.chain.append(other.chain);
|
||||
self.graph.append(other.graph);
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> Default for ChangeSet<P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
chain: Default::default(),
|
||||
graph: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> ForEachTxOut for ChainGraph<P> {
|
||||
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
|
||||
self.graph.for_each_txout(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> ForEachTxOut for ChangeSet<P> {
|
||||
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
|
||||
self.graph.for_each_txout(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Error that may occur when calling [`ChainGraph::new`].
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum NewError<P> {
|
||||
/// Two transactions within the sparse chain conflicted with each other
|
||||
Conflict { a: (P, Txid), b: (P, Txid) },
|
||||
/// One or more transactions in the chain were not in the graph
|
||||
Missing(HashSet<Txid>),
|
||||
}
|
||||
|
||||
impl<P: core::fmt::Debug> core::fmt::Display for NewError<P> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
NewError::Conflict { a, b } => write!(
|
||||
f,
|
||||
"Unable to inflate sparse chain to chain graph since transactions {:?} and {:?}",
|
||||
a, b
|
||||
),
|
||||
NewError::Missing(missing) => write!(
|
||||
f,
|
||||
"missing full transactions for {}",
|
||||
missing
|
||||
.iter()
|
||||
.map(|txid| txid.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<P: core::fmt::Debug> std::error::Error for NewError<P> {}
|
||||
|
||||
/// Error that may occur when inserting a transaction.
|
||||
///
|
||||
/// Refer to [`ChainGraph::insert_tx_preview`] and [`ChainGraph::insert_tx`].
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum InsertTxError<P> {
|
||||
Chain(sparse_chain::InsertTxError<P>),
|
||||
UnresolvableConflict(UnresolvableConflict<P>),
|
||||
}
|
||||
|
||||
impl<P: core::fmt::Debug> core::fmt::Display for InsertTxError<P> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
InsertTxError::Chain(inner) => core::fmt::Display::fmt(inner, f),
|
||||
InsertTxError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> From<sparse_chain::InsertTxError<P>> for InsertTxError<P> {
|
||||
fn from(inner: sparse_chain::InsertTxError<P>) -> Self {
|
||||
Self::Chain(inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<P: core::fmt::Debug> std::error::Error for InsertTxError<P> {}
|
||||
|
||||
/// A nice alias of [`sparse_chain::InsertCheckpointError`].
|
||||
pub type InsertCheckpointError = sparse_chain::InsertCheckpointError;
|
||||
|
||||
/// Represents an update failure.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum UpdateError<P> {
|
||||
/// The update chain was inconsistent with the existing chain
|
||||
Chain(sparse_chain::UpdateError<P>),
|
||||
/// A transaction in the update spent the same input as an already confirmed transaction
|
||||
UnresolvableConflict(UnresolvableConflict<P>),
|
||||
}
|
||||
|
||||
impl<P: core::fmt::Debug> core::fmt::Display for UpdateError<P> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
UpdateError::Chain(inner) => core::fmt::Display::fmt(inner, f),
|
||||
UpdateError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> From<sparse_chain::UpdateError<P>> for UpdateError<P> {
|
||||
fn from(inner: sparse_chain::UpdateError<P>) -> Self {
|
||||
Self::Chain(inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<P: core::fmt::Debug> std::error::Error for UpdateError<P> {}
|
||||
|
||||
/// Represents an unresolvable conflict between an update's transaction and an
|
||||
/// already-confirmed transaction.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct UnresolvableConflict<P> {
|
||||
pub already_confirmed_tx: (P, Txid),
|
||||
pub update_tx: (P, Txid),
|
||||
}
|
||||
|
||||
impl<P: core::fmt::Debug> core::fmt::Display for UnresolvableConflict<P> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
let Self {
|
||||
already_confirmed_tx,
|
||||
update_tx,
|
||||
} = self;
|
||||
write!(f, "update transaction {} at height {:?} conflicts with an already confirmed transaction {} at height {:?}",
|
||||
update_tx.1, update_tx.0, already_confirmed_tx.1, already_confirmed_tx.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> From<UnresolvableConflict<P>> for UpdateError<P> {
|
||||
fn from(inner: UnresolvableConflict<P>) -> Self {
|
||||
Self::UnresolvableConflict(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> From<UnresolvableConflict<P>> for InsertTxError<P> {
|
||||
fn from(inner: UnresolvableConflict<P>) -> Self {
|
||||
Self::UnresolvableConflict(inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<P: core::fmt::Debug> std::error::Error for UnresolvableConflict<P> {}
|
||||
16
crates/chain/src/descriptor_ext.rs
Normal file
16
crates/chain/src/descriptor_ext.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::miniscript::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
/// A trait to extend the functionality of a miniscript descriptor.
|
||||
pub trait DescriptorExt {
|
||||
/// Returns the minimum value (in satoshis) at which an output is broadcastable.
|
||||
fn dust_value(&self) -> u64;
|
||||
}
|
||||
|
||||
impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||
fn dust_value(&self) -> u64 {
|
||||
self.at_derivation_index(0)
|
||||
.script_pubkey()
|
||||
.dust_value()
|
||||
.to_sat()
|
||||
}
|
||||
}
|
||||
30
crates/chain/src/example_utils.rs
Normal file
30
crates/chain/src/example_utils.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
#![allow(unused)]
|
||||
use alloc::vec::Vec;
|
||||
use bitcoin::{
|
||||
consensus,
|
||||
hashes::{hex::FromHex, Hash},
|
||||
Transaction,
|
||||
};
|
||||
|
||||
use crate::BlockId;
|
||||
|
||||
pub const RAW_TX_1: &str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
|
||||
pub const RAW_TX_2: &str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
|
||||
pub const RAW_TX_3: &str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
|
||||
pub const RAW_TX_4: &str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
|
||||
|
||||
pub fn tx_from_hex(s: &str) -> Transaction {
|
||||
let raw = Vec::from_hex(s).expect("data must be in hex");
|
||||
consensus::deserialize(raw.as_slice()).expect("must deserialize")
|
||||
}
|
||||
|
||||
pub fn new_hash<H: Hash>(s: &str) -> H {
|
||||
<H as bitcoin::hashes::Hash>::hash(s.as_bytes())
|
||||
}
|
||||
|
||||
pub fn new_block_id(height: u32, hash: &str) -> BlockId {
|
||||
BlockId {
|
||||
height,
|
||||
hash: new_hash(hash),
|
||||
}
|
||||
}
|
||||
309
crates/chain/src/keychain.rs
Normal file
309
crates/chain/src/keychain.rs
Normal file
@@ -0,0 +1,309 @@
|
||||
//! Module for keychain related structures.
|
||||
//!
|
||||
//! A keychain here is a set of application-defined indexes for a miniscript descriptor where we can
|
||||
//! derive script pubkeys at a particular derivation index. The application's index is simply
|
||||
//! anything that implements `Ord`.
|
||||
//!
|
||||
//! [`KeychainTxOutIndex`] indexes script pubkeys of keychains and scans in relevant outpoints (that
|
||||
//! has a `txout` containing an indexed script pubkey). Internally, this uses [`SpkTxOutIndex`], but
|
||||
//! also maintains "revealed" and "lookahead" index counts per keychain.
|
||||
//!
|
||||
//! [`KeychainTracker`] combines [`ChainGraph`] and [`KeychainTxOutIndex`] and enforces atomic
|
||||
//! changes between both these structures. [`KeychainScan`] is a structure used to update to
|
||||
//! [`KeychainTracker`] and changes made on a [`KeychainTracker`] are reported by
|
||||
//! [`KeychainChangeSet`]s.
|
||||
//!
|
||||
//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
|
||||
use crate::{
|
||||
chain_graph::{self, ChainGraph},
|
||||
collections::BTreeMap,
|
||||
sparse_chain::ChainPosition,
|
||||
tx_graph::TxGraph,
|
||||
ForEachTxOut,
|
||||
};
|
||||
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub mod persist;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use persist::*;
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod tracker;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use tracker::*;
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod txout_index;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use txout_index::*;
|
||||
|
||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||
///
|
||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_additions`]. [`DerivationAdditions] are
|
||||
/// monotone in that they will never decrease the revealed derivation index.
|
||||
///
|
||||
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
||||
/// [`apply_additions`]: crate::keychain::KeychainTxOutIndex::apply_additions
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(
|
||||
crate = "serde_crate",
|
||||
bound(
|
||||
deserialize = "K: Ord + serde::Deserialize<'de>",
|
||||
serialize = "K: Ord + serde::Serialize"
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct DerivationAdditions<K>(pub BTreeMap<K, u32>);
|
||||
|
||||
impl<K> DerivationAdditions<K> {
|
||||
/// Returns whether the additions are empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Get the inner map of the keychain to its new derivation index.
|
||||
pub fn as_inner(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord> DerivationAdditions<K> {
|
||||
/// Append another [`DerivationAdditions`] into self.
|
||||
///
|
||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||
/// If the keychain did not exist, append the new keychain.
|
||||
pub fn append(&mut self, mut other: Self) {
|
||||
self.0.iter_mut().for_each(|(key, index)| {
|
||||
if let Some(other_index) = other.0.remove(key) {
|
||||
*index = other_index.max(*index);
|
||||
}
|
||||
});
|
||||
|
||||
self.0.append(&mut other.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for DerivationAdditions<K> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> AsRef<BTreeMap<K, u32>> for DerivationAdditions<K> {
|
||||
fn as_ref(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
/// An update that includes the last active indexes of each keychain.
|
||||
pub struct KeychainScan<K, P> {
|
||||
/// The update data in the form of a chain that could be applied
|
||||
pub update: ChainGraph<P>,
|
||||
/// The last active indexes of each keychain
|
||||
pub last_active_indices: BTreeMap<K, u32>,
|
||||
}
|
||||
|
||||
impl<K, P> Default for KeychainScan<K, P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
update: Default::default(),
|
||||
last_active_indices: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> From<ChainGraph<P>> for KeychainScan<K, P> {
|
||||
fn from(update: ChainGraph<P>) -> Self {
|
||||
KeychainScan {
|
||||
update,
|
||||
last_active_indices: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents changes to a [`KeychainTracker`].
|
||||
///
|
||||
/// This is essentially a combination of [`DerivationAdditions`] and [`chain_graph::ChangeSet`].
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(
|
||||
crate = "serde_crate",
|
||||
bound(
|
||||
deserialize = "K: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>",
|
||||
serialize = "K: Ord + serde::Serialize, P: serde::Serialize"
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct KeychainChangeSet<K, P> {
|
||||
/// The changes in local keychain derivation indices
|
||||
pub derivation_indices: DerivationAdditions<K>,
|
||||
/// The changes that have occurred in the blockchain
|
||||
pub chain_graph: chain_graph::ChangeSet<P>,
|
||||
}
|
||||
|
||||
impl<K, P> Default for KeychainChangeSet<K, P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
chain_graph: Default::default(),
|
||||
derivation_indices: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> KeychainChangeSet<K, P> {
|
||||
/// Returns whether the [`KeychainChangeSet`] is empty (no changes recorded).
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.chain_graph.is_empty() && self.derivation_indices.is_empty()
|
||||
}
|
||||
|
||||
/// Appends the changes in `other` into `self` such that applying `self` afterward has the same
|
||||
/// effect as sequentially applying the original `self` and `other`.
|
||||
///
|
||||
/// Note the derivation indices cannot be decreased, so `other` will only change the derivation
|
||||
/// index for a keychain, if it's value is higher than the one in `self`.
|
||||
pub fn append(&mut self, other: KeychainChangeSet<K, P>)
|
||||
where
|
||||
K: Ord,
|
||||
P: ChainPosition,
|
||||
{
|
||||
self.derivation_indices.append(other.derivation_indices);
|
||||
self.chain_graph.append(other.chain_graph);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> From<chain_graph::ChangeSet<P>> for KeychainChangeSet<K, P> {
|
||||
fn from(changeset: chain_graph::ChangeSet<P>) -> Self {
|
||||
Self {
|
||||
chain_graph: changeset,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> From<DerivationAdditions<K>> for KeychainChangeSet<K, P> {
|
||||
fn from(additions: DerivationAdditions<K>) -> Self {
|
||||
Self {
|
||||
derivation_indices: additions,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> AsRef<TxGraph> for KeychainScan<K, P> {
|
||||
fn as_ref(&self) -> &TxGraph {
|
||||
self.update.graph()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> ForEachTxOut for KeychainChangeSet<K, P> {
|
||||
fn for_each_txout(&self, f: impl FnMut((bitcoin::OutPoint, &bitcoin::TxOut))) {
|
||||
self.chain_graph.for_each_txout(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Balance, differentiated into various categories.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(crate = "serde_crate",)
|
||||
)]
|
||||
pub struct Balance {
|
||||
/// All coinbase outputs not yet matured
|
||||
pub immature: u64,
|
||||
/// Unconfirmed UTXOs generated by a wallet tx
|
||||
pub trusted_pending: u64,
|
||||
/// Unconfirmed UTXOs received from an external wallet
|
||||
pub untrusted_pending: u64,
|
||||
/// Confirmed and immediately spendable balance
|
||||
pub confirmed: u64,
|
||||
}
|
||||
|
||||
impl Balance {
|
||||
/// Get sum of trusted_pending and confirmed coins.
|
||||
///
|
||||
/// This is the balance you can spend right now that shouldn't get cancelled via another party
|
||||
/// double spending it.
|
||||
pub fn trusted_spendable(&self) -> u64 {
|
||||
self.confirmed + self.trusted_pending
|
||||
}
|
||||
|
||||
/// Get the whole balance visible to the wallet.
|
||||
pub fn total(&self) -> u64 {
|
||||
self.confirmed + self.trusted_pending + self.untrusted_pending + self.immature
|
||||
}
|
||||
}
|
||||
|
||||
impl core::fmt::Display for Balance {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{{ immature: {}, trusted_pending: {}, untrusted_pending: {}, confirmed: {} }}",
|
||||
self.immature, self.trusted_pending, self.untrusted_pending, self.confirmed
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::ops::Add for Balance {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: Self) -> Self {
|
||||
Self {
|
||||
immature: self.immature + other.immature,
|
||||
trusted_pending: self.trusted_pending + other.trusted_pending,
|
||||
untrusted_pending: self.untrusted_pending + other.untrusted_pending,
|
||||
confirmed: self.confirmed + other.confirmed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::TxHeight;
|
||||
|
||||
use super::*;
|
||||
#[test]
|
||||
fn append_keychain_derivation_indices() {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
|
||||
enum Keychain {
|
||||
One,
|
||||
Two,
|
||||
Three,
|
||||
Four,
|
||||
}
|
||||
let mut lhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
let mut rhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
lhs_di.insert(Keychain::One, 7);
|
||||
lhs_di.insert(Keychain::Two, 0);
|
||||
rhs_di.insert(Keychain::One, 3);
|
||||
rhs_di.insert(Keychain::Two, 5);
|
||||
lhs_di.insert(Keychain::Three, 3);
|
||||
rhs_di.insert(Keychain::Four, 4);
|
||||
let mut lhs = KeychainChangeSet {
|
||||
derivation_indices: DerivationAdditions(lhs_di),
|
||||
chain_graph: chain_graph::ChangeSet::<TxHeight>::default(),
|
||||
};
|
||||
|
||||
let rhs = KeychainChangeSet {
|
||||
derivation_indices: DerivationAdditions(rhs_di),
|
||||
chain_graph: chain_graph::ChangeSet::<TxHeight>::default(),
|
||||
};
|
||||
|
||||
lhs.append(rhs);
|
||||
|
||||
// Exiting index doesn't update if the new index in `other` is lower than `self`.
|
||||
assert_eq!(lhs.derivation_indices.0.get(&Keychain::One), Some(&7));
|
||||
// Existing index updates if the new index in `other` is higher than `self`.
|
||||
assert_eq!(lhs.derivation_indices.0.get(&Keychain::Two), Some(&5));
|
||||
// Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
assert_eq!(lhs.derivation_indices.0.get(&Keychain::Three), Some(&3));
|
||||
// New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
assert_eq!(lhs.derivation_indices.0.get(&Keychain::Four), Some(&4));
|
||||
}
|
||||
}
|
||||
108
crates/chain/src/keychain/persist.rs
Normal file
108
crates/chain/src/keychain/persist.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
//! Persistence for changes made to a [`KeychainTracker`].
|
||||
//!
|
||||
//! BDK's [`KeychainTracker`] needs somewhere to persist changes it makes during operation.
|
||||
//! Operations like giving out a new address are crucial to persist so that next time the
|
||||
//! application is loaded, it can find transactions related to that address.
|
||||
//!
|
||||
//! Note that the [`KeychainTracker`] does not read this persisted data during operation since it
|
||||
//! always has a copy in memory.
|
||||
//!
|
||||
//! [`KeychainTracker`]: crate::keychain::KeychainTracker
|
||||
|
||||
use crate::{keychain, sparse_chain::ChainPosition};
|
||||
|
||||
/// `Persist` wraps a [`PersistBackend`] to create a convenient staging area for changes before they
|
||||
/// are persisted. Not all changes made to the [`KeychainTracker`] need to be written to disk right
|
||||
/// away so you can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to
|
||||
/// finally, write it to disk.
|
||||
///
|
||||
/// [`KeychainTracker`]: keychain::KeychainTracker
|
||||
#[derive(Debug)]
|
||||
pub struct Persist<K, P, B> {
|
||||
backend: B,
|
||||
stage: keychain::KeychainChangeSet<K, P>,
|
||||
}
|
||||
|
||||
impl<K, P, B> Persist<K, P, B> {
|
||||
/// Create a new `Persist` from a [`PersistBackend`].
|
||||
pub fn new(backend: B) -> Self {
|
||||
Self {
|
||||
backend,
|
||||
stage: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Stage a `changeset` to later persistence with [`commit`].
|
||||
///
|
||||
/// [`commit`]: Self::commit
|
||||
pub fn stage(&mut self, changeset: keychain::KeychainChangeSet<K, P>)
|
||||
where
|
||||
K: Ord,
|
||||
P: ChainPosition,
|
||||
{
|
||||
self.stage.append(changeset)
|
||||
}
|
||||
|
||||
/// Get the changes that haven't been committed yet
|
||||
pub fn staged(&self) -> &keychain::KeychainChangeSet<K, P> {
|
||||
&self.stage
|
||||
}
|
||||
|
||||
/// Commit the staged changes to the underlying persistence backend.
|
||||
///
|
||||
/// Returns a backend-defined error if this fails.
|
||||
pub fn commit(&mut self) -> Result<(), B::WriteError>
|
||||
where
|
||||
B: PersistBackend<K, P>,
|
||||
{
|
||||
self.backend.append_changeset(&self.stage)?;
|
||||
self.stage = Default::default();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A persistence backend for [`Persist`].
|
||||
pub trait PersistBackend<K, P> {
|
||||
/// The error the backend returns when it fails to write.
|
||||
type WriteError: core::fmt::Debug;
|
||||
|
||||
/// The error the backend returns when it fails to load.
|
||||
type LoadError: core::fmt::Debug;
|
||||
|
||||
/// Appends a new changeset to the persistent backend.
|
||||
///
|
||||
/// It is up to the backend what it does with this. It could store every changeset in a list or
|
||||
/// it inserts the actual changes into a more structured database. All it needs to guarantee is
|
||||
/// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all
|
||||
/// changesets had been applied sequentially.
|
||||
///
|
||||
/// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker
|
||||
fn append_changeset(
|
||||
&mut self,
|
||||
changeset: &keychain::KeychainChangeSet<K, P>,
|
||||
) -> Result<(), Self::WriteError>;
|
||||
|
||||
/// Applies all the changesets the backend has received to `tracker`.
|
||||
fn load_into_keychain_tracker(
|
||||
&mut self,
|
||||
tracker: &mut keychain::KeychainTracker<K, P>,
|
||||
) -> Result<(), Self::LoadError>;
|
||||
}
|
||||
|
||||
impl<K, P> PersistBackend<K, P> for () {
|
||||
type WriteError = ();
|
||||
type LoadError = ();
|
||||
|
||||
fn append_changeset(
|
||||
&mut self,
|
||||
_changeset: &keychain::KeychainChangeSet<K, P>,
|
||||
) -> Result<(), Self::WriteError> {
|
||||
Ok(())
|
||||
}
|
||||
fn load_into_keychain_tracker(
|
||||
&mut self,
|
||||
_tracker: &mut keychain::KeychainTracker<K, P>,
|
||||
) -> Result<(), Self::LoadError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
308
crates/chain/src/keychain/tracker.rs
Normal file
308
crates/chain/src/keychain/tracker.rs
Normal file
@@ -0,0 +1,308 @@
|
||||
use bitcoin::Transaction;
|
||||
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
use crate::{
|
||||
chain_graph::{self, ChainGraph},
|
||||
collections::*,
|
||||
keychain::{KeychainChangeSet, KeychainScan, KeychainTxOutIndex},
|
||||
sparse_chain::{self, SparseChain},
|
||||
tx_graph::TxGraph,
|
||||
BlockId, FullTxOut, TxHeight,
|
||||
};
|
||||
|
||||
use super::{Balance, DerivationAdditions};
|
||||
|
||||
/// A convenient combination of a [`KeychainTxOutIndex`] and a [`ChainGraph`].
|
||||
///
|
||||
/// The [`KeychainTracker`] atomically updates its [`KeychainTxOutIndex`] whenever new chain data is
|
||||
/// incorporated into its internal [`ChainGraph`].
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeychainTracker<K, P> {
|
||||
/// Index between script pubkeys to transaction outputs
|
||||
pub txout_index: KeychainTxOutIndex<K>,
|
||||
chain_graph: ChainGraph<P>,
|
||||
}
|
||||
|
||||
impl<K, P> KeychainTracker<K, P>
|
||||
where
|
||||
P: sparse_chain::ChainPosition,
|
||||
K: Ord + Clone + core::fmt::Debug,
|
||||
{
|
||||
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
|
||||
/// This is just shorthand for calling [`KeychainTxOutIndex::add_keychain`] on the internal
|
||||
/// `txout_index`.
|
||||
///
|
||||
/// Adding a keychain means you will be able to derive new script pubkeys under that keychain
|
||||
/// and the tracker will discover transaction outputs with those script pubkeys.
|
||||
pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
|
||||
self.txout_index.add_keychain(keychain, descriptor)
|
||||
}
|
||||
|
||||
/// Get the internal map of keychains to their descriptors. This is just shorthand for calling
|
||||
/// [`KeychainTxOutIndex::keychains`] on the internal `txout_index`.
|
||||
pub fn keychains(&mut self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
|
||||
self.txout_index.keychains()
|
||||
}
|
||||
|
||||
/// Get the checkpoint limit of the internal [`SparseChain`].
|
||||
///
|
||||
/// Refer to [`SparseChain::checkpoint_limit`] for more.
|
||||
pub fn checkpoint_limit(&self) -> Option<usize> {
|
||||
self.chain_graph.checkpoint_limit()
|
||||
}
|
||||
|
||||
/// Set the checkpoint limit of the internal [`SparseChain`].
|
||||
///
|
||||
/// Refer to [`SparseChain::set_checkpoint_limit`] for more.
|
||||
pub fn set_checkpoint_limit(&mut self, limit: Option<usize>) {
|
||||
self.chain_graph.set_checkpoint_limit(limit)
|
||||
}
|
||||
|
||||
/// Determines the resultant [`KeychainChangeSet`] if the given [`KeychainScan`] is applied.
|
||||
///
|
||||
/// Internally, we call [`ChainGraph::determine_changeset`] and also determine the additions of
|
||||
/// [`KeychainTxOutIndex`].
|
||||
pub fn determine_changeset(
|
||||
&self,
|
||||
scan: &KeychainScan<K, P>,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::UpdateError<P>> {
|
||||
// TODO: `KeychainTxOutIndex::determine_additions`
|
||||
let mut derivation_indices = scan.last_active_indices.clone();
|
||||
derivation_indices.retain(|keychain, index| {
|
||||
match self.txout_index.last_revealed_index(keychain) {
|
||||
Some(existing) => *index > existing,
|
||||
None => true,
|
||||
}
|
||||
});
|
||||
|
||||
Ok(KeychainChangeSet {
|
||||
derivation_indices: DerivationAdditions(derivation_indices),
|
||||
chain_graph: self.chain_graph.determine_changeset(&scan.update)?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Directly applies a [`KeychainScan`] on [`KeychainTracker`].
|
||||
///
|
||||
/// This is equivalent to calling [`determine_changeset`] and [`apply_changeset`] in sequence.
|
||||
///
|
||||
/// [`determine_changeset`]: Self::determine_changeset
|
||||
/// [`apply_changeset`]: Self::apply_changeset
|
||||
pub fn apply_update(
|
||||
&mut self,
|
||||
scan: KeychainScan<K, P>,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::UpdateError<P>> {
|
||||
let changeset = self.determine_changeset(&scan)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Applies the changes in `changeset` to [`KeychainTracker`].
|
||||
///
|
||||
/// Internally, this calls [`KeychainTxOutIndex::apply_additions`] and
|
||||
/// [`ChainGraph::apply_changeset`] in sequence.
|
||||
pub fn apply_changeset(&mut self, changeset: KeychainChangeSet<K, P>) {
|
||||
let KeychainChangeSet {
|
||||
derivation_indices,
|
||||
chain_graph,
|
||||
} = changeset;
|
||||
self.txout_index.apply_additions(derivation_indices);
|
||||
let _ = self.txout_index.scan(&chain_graph);
|
||||
self.chain_graph.apply_changeset(chain_graph)
|
||||
}
|
||||
|
||||
/// Iterates through [`FullTxOut`]s that are considered to exist in our representation of the
|
||||
/// blockchain/mempool.
|
||||
///
|
||||
/// In other words, these are `txout`s of confirmed and in-mempool transactions, based on our
|
||||
/// view of the blockchain/mempool.
|
||||
pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
|
||||
self.txout_index
|
||||
.txouts()
|
||||
.filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
|
||||
}
|
||||
|
||||
/// Iterates through [`FullTxOut`]s that are unspent outputs.
|
||||
///
|
||||
/// Refer to [`full_txouts`] for more.
|
||||
///
|
||||
/// [`full_txouts`]: Self::full_txouts
|
||||
pub fn full_utxos(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
|
||||
self.full_txouts()
|
||||
.filter(|(_, txout)| txout.spent_by.is_none())
|
||||
}
|
||||
|
||||
/// Returns a reference to the internal [`ChainGraph`].
|
||||
pub fn chain_graph(&self) -> &ChainGraph<P> {
|
||||
&self.chain_graph
|
||||
}
|
||||
|
||||
/// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
|
||||
pub fn graph(&self) -> &TxGraph {
|
||||
self.chain_graph().graph()
|
||||
}
|
||||
|
||||
/// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]).
|
||||
pub fn chain(&self) -> &SparseChain<P> {
|
||||
self.chain_graph().chain()
|
||||
}
|
||||
|
||||
/// Determines the changes as a result of inserting `block_id` (a height and block hash) into the
|
||||
/// tracker.
|
||||
///
|
||||
/// The caller is responsible for guaranteeing that a block exists at that height. If a
|
||||
/// checkpoint already exists at that height with a different hash; this will return an error.
|
||||
/// Otherwise it will return `Ok(true)` if the checkpoint didn't already exist or `Ok(false)`
|
||||
/// if it did.
|
||||
///
|
||||
/// **Warning**: This function modifies the internal state of the tracker. You are responsible
|
||||
/// for persisting these changes to disk if you need to restore them.
|
||||
pub fn insert_checkpoint_preview(
|
||||
&self,
|
||||
block_id: BlockId,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertCheckpointError> {
|
||||
Ok(KeychainChangeSet {
|
||||
chain_graph: self.chain_graph.insert_checkpoint_preview(block_id)?,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Directly insert a `block_id` into the tracker.
|
||||
///
|
||||
/// This is equivalent of calling [`insert_checkpoint_preview`] and [`apply_changeset`] in
|
||||
/// sequence.
|
||||
///
|
||||
/// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview
|
||||
/// [`apply_changeset`]: Self::apply_changeset
|
||||
pub fn insert_checkpoint(
|
||||
&mut self,
|
||||
block_id: BlockId,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertCheckpointError> {
|
||||
let changeset = self.insert_checkpoint_preview(block_id)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Determines the changes as a result of inserting a transaction into the inner [`ChainGraph`]
|
||||
/// and optionally into the inner chain at `position`.
|
||||
///
|
||||
/// **Warning**: This function modifies the internal state of the chain graph. You are
|
||||
/// responsible for persisting these changes to disk if you need to restore them.
|
||||
pub fn insert_tx_preview(
|
||||
&self,
|
||||
tx: Transaction,
|
||||
pos: P,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertTxError<P>> {
|
||||
Ok(KeychainChangeSet {
|
||||
chain_graph: self.chain_graph.insert_tx_preview(tx, pos)?,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Directly insert a transaction into the inner [`ChainGraph`] and optionally into the inner
|
||||
/// chain at `position`.
|
||||
///
|
||||
/// This is equivalent of calling [`insert_tx_preview`] and [`apply_changeset`] in sequence.
|
||||
///
|
||||
/// [`insert_tx_preview`]: Self::insert_tx_preview
|
||||
/// [`apply_changeset`]: Self::apply_changeset
|
||||
pub fn insert_tx(
|
||||
&mut self,
|
||||
tx: Transaction,
|
||||
pos: P,
|
||||
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertTxError<P>> {
|
||||
let changeset = self.insert_tx_preview(tx, pos)?;
|
||||
self.apply_changeset(changeset.clone());
|
||||
Ok(changeset)
|
||||
}
|
||||
|
||||
/// Returns the *balance* of the keychain, i.e., the value of unspent transaction outputs tracked.
|
||||
///
|
||||
/// The caller provides a `should_trust` predicate which must decide whether the value of
|
||||
/// unconfirmed outputs on this keychain are guaranteed to be realized or not. For example:
|
||||
///
|
||||
/// - For an *internal* (change) keychain, `should_trust` should generally be `true` since even if
|
||||
/// you lose an internal output due to eviction, you will always gain back the value from whatever output the
|
||||
/// unconfirmed transaction was spending (since that output is presumably from your wallet).
|
||||
/// - For an *external* keychain, you might want `should_trust` to return `false` since someone may cancel (by double spending)
|
||||
/// a payment made to addresses on that keychain.
|
||||
///
|
||||
/// When in doubt set `should_trust` to return false. This doesn't do anything other than change
|
||||
/// where the unconfirmed output's value is accounted for in `Balance`.
|
||||
pub fn balance(&self, mut should_trust: impl FnMut(&K) -> bool) -> Balance {
|
||||
let mut immature = 0;
|
||||
let mut trusted_pending = 0;
|
||||
let mut untrusted_pending = 0;
|
||||
let mut confirmed = 0;
|
||||
let last_sync_height = self.chain().latest_checkpoint().map(|latest| latest.height);
|
||||
for ((keychain, _), utxo) in self.full_utxos() {
|
||||
let chain_position = &utxo.chain_position;
|
||||
|
||||
match chain_position.height() {
|
||||
TxHeight::Confirmed(_) => {
|
||||
if utxo.is_on_coinbase {
|
||||
if utxo.is_mature(
|
||||
last_sync_height
|
||||
.expect("since it's confirmed we must have a checkpoint"),
|
||||
) {
|
||||
confirmed += utxo.txout.value;
|
||||
} else {
|
||||
immature += utxo.txout.value;
|
||||
}
|
||||
} else {
|
||||
confirmed += utxo.txout.value;
|
||||
}
|
||||
}
|
||||
TxHeight::Unconfirmed => {
|
||||
if should_trust(keychain) {
|
||||
trusted_pending += utxo.txout.value;
|
||||
} else {
|
||||
untrusted_pending += utxo.txout.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Balance {
|
||||
immature,
|
||||
trusted_pending,
|
||||
untrusted_pending,
|
||||
confirmed,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the balance of all spendable confirmed unspent outputs of this tracker at a
|
||||
/// particular height.
|
||||
pub fn balance_at(&self, height: u32) -> u64 {
|
||||
self.full_txouts()
|
||||
.filter(|(_, full_txout)| full_txout.is_spendable_at(height))
|
||||
.map(|(_, full_txout)| full_txout.txout.value)
|
||||
.sum()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> Default for KeychainTracker<K, P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
txout_index: Default::default(),
|
||||
chain_graph: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> AsRef<SparseChain<P>> for KeychainTracker<K, P> {
|
||||
fn as_ref(&self) -> &SparseChain<P> {
|
||||
self.chain_graph.chain()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> AsRef<TxGraph> for KeychainTracker<K, P> {
|
||||
fn as_ref(&self) -> &TxGraph {
|
||||
self.chain_graph.graph()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> AsRef<ChainGraph<P>> for KeychainTracker<K, P> {
|
||||
fn as_ref(&self) -> &ChainGraph<P> {
|
||||
&self.chain_graph
|
||||
}
|
||||
}
|
||||
590
crates/chain/src/keychain/txout_index.rs
Normal file
590
crates/chain/src/keychain/txout_index.rs
Normal file
@@ -0,0 +1,590 @@
|
||||
use crate::{
|
||||
collections::*,
|
||||
miniscript::{Descriptor, DescriptorPublicKey},
|
||||
ForEachTxOut, SpkTxOutIndex,
|
||||
};
|
||||
use alloc::{borrow::Cow, vec::Vec};
|
||||
use bitcoin::{secp256k1::Secp256k1, OutPoint, Script, TxOut};
|
||||
use core::{fmt::Debug, ops::Deref};
|
||||
|
||||
use super::DerivationAdditions;
|
||||
|
||||
/// Maximum [BIP32](https://bips.xyz/32) derivation index.
|
||||
pub const BIP32_MAX_INDEX: u32 = (1 << 31) - 1;
|
||||
|
||||
/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public
|
||||
/// [`Descriptor`]s.
|
||||
///
|
||||
/// Descriptors are referenced by the provided keychain generic (`K`).
|
||||
///
|
||||
/// Script pubkeys for a descriptor are revealed chronologically from index 0. I.e., If the last
|
||||
/// revealed index of a descriptor is 5; scripts of indices 0 to 4 are guaranteed to be already
|
||||
/// revealed. In addition to revealed scripts, we have a `lookahead` parameter for each keychain,
|
||||
/// which defines the number of script pubkeys to store ahead of the last revealed index.
|
||||
///
|
||||
/// Methods that could update the last revealed index will return [`DerivationAdditions`] to report
|
||||
/// these changes. This can be persisted for future recovery.
|
||||
///
|
||||
/// ## Synopsis
|
||||
///
|
||||
/// ```
|
||||
/// use bdk_chain::keychain::KeychainTxOutIndex;
|
||||
/// # use bdk_chain::{ miniscript::{Descriptor, DescriptorPublicKey} };
|
||||
/// # use core::str::FromStr;
|
||||
///
|
||||
/// // imagine our service has internal and external addresses but also addresses for users
|
||||
/// #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||
/// enum MyKeychain {
|
||||
/// External,
|
||||
/// Internal,
|
||||
/// MyAppUser {
|
||||
/// user_id: u32
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// let mut txout_index = KeychainTxOutIndex::<MyKeychain>::default();
|
||||
///
|
||||
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
/// # let descriptor_for_user_42 = external_descriptor.clone();
|
||||
/// txout_index.add_keychain(MyKeychain::External, external_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42);
|
||||
///
|
||||
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
||||
/// ```
|
||||
///
|
||||
/// [`Ord`]: core::cmp::Ord
|
||||
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
||||
/// [`Descriptor`]: crate::miniscript::Descriptor
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeychainTxOutIndex<K> {
|
||||
inner: SpkTxOutIndex<(K, u32)>,
|
||||
// descriptors of each keychain
|
||||
keychains: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||
// last revealed indexes
|
||||
last_revealed: BTreeMap<K, u32>,
|
||||
// lookahead settings for each keychain
|
||||
lookahead: BTreeMap<K, u32>,
|
||||
}
|
||||
|
||||
impl<K> Default for KeychainTxOutIndex<K> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
inner: SpkTxOutIndex::default(),
|
||||
keychains: BTreeMap::default(),
|
||||
last_revealed: BTreeMap::default(),
|
||||
lookahead: BTreeMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Deref for KeychainTxOutIndex<K> {
|
||||
type Target = SpkTxOutIndex<(K, u32)>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Scans an object for relevant outpoints, which are stored and indexed internally.
|
||||
///
|
||||
/// If the matched script pubkey is part of the lookahead, the last stored index is updated for
|
||||
/// the script pubkey's keychain and the [`DerivationAdditions`] returned will reflect the
|
||||
/// change.
|
||||
///
|
||||
/// Typically, this method is used in two situations:
|
||||
///
|
||||
/// 1. After loading transaction data from the disk, you may scan over all the txouts to restore all
|
||||
/// your txouts.
|
||||
/// 2. When getting new data from the chain, you usually scan it before incorporating it into
|
||||
/// your chain state (i.e., `SparseChain`, `ChainGraph`).
|
||||
///
|
||||
/// See [`ForEachTxout`] for the types that support this.
|
||||
///
|
||||
/// [`ForEachTxout`]: crate::ForEachTxOut
|
||||
pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> DerivationAdditions<K> {
|
||||
let mut additions = DerivationAdditions::<K>::default();
|
||||
txouts.for_each_txout(|(op, txout)| additions.append(self.scan_txout(op, txout)));
|
||||
additions
|
||||
}
|
||||
|
||||
/// Scan a single outpoint for a matching script pubkey.
|
||||
///
|
||||
/// If it matches, this will store and index it.
|
||||
pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> DerivationAdditions<K> {
|
||||
match self.inner.scan_txout(op, txout).cloned() {
|
||||
Some((keychain, index)) => self.reveal_to_target(&keychain, index).1,
|
||||
None => DerivationAdditions::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to the internal [`SpkTxOutIndex`].
|
||||
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
/// Return a reference to the internal map of the keychain to descriptors.
|
||||
pub fn keychains(&self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
|
||||
&self.keychains
|
||||
}
|
||||
|
||||
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
|
||||
///
|
||||
/// Adding a keychain means you will be able to derive new script pubkeys under that keychain
|
||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if a different `descriptor` is introduced to the same `keychain`.
|
||||
pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
|
||||
let old_descriptor = &*self.keychains.entry(keychain).or_insert(descriptor.clone());
|
||||
assert_eq!(
|
||||
&descriptor, old_descriptor,
|
||||
"keychain already contains a different descriptor"
|
||||
);
|
||||
}
|
||||
|
||||
/// Return the lookahead setting for each keychain.
|
||||
///
|
||||
/// Refer to [`set_lookahead`] for a deeper explanation of the `lookahead`.
|
||||
///
|
||||
/// [`set_lookahead`]: Self::set_lookahead
|
||||
pub fn lookaheads(&self) -> &BTreeMap<K, u32> {
|
||||
&self.lookahead
|
||||
}
|
||||
|
||||
/// Convenience method to call [`set_lookahead`] for all keychains.
|
||||
///
|
||||
/// [`set_lookahead`]: Self::set_lookahead
|
||||
pub fn set_lookahead_for_all(&mut self, lookahead: u32) {
|
||||
for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() {
|
||||
self.lookahead.insert(keychain.clone(), lookahead);
|
||||
self.replenish_lookahead(keychain);
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lookahead count for `keychain`.
|
||||
///
|
||||
/// The lookahead is the number of scripts to cache ahead of the last stored script index. This
|
||||
/// is useful during a scan via [`scan`] or [`scan_txout`].
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if the `keychain` does not exist.
|
||||
///
|
||||
/// [`scan`]: Self::scan
|
||||
/// [`scan_txout`]: Self::scan_txout
|
||||
pub fn set_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
||||
self.lookahead.insert(keychain.clone(), lookahead);
|
||||
self.replenish_lookahead(keychain);
|
||||
}
|
||||
|
||||
/// Convenience method to call [`lookahead_to_target`] for multiple keychains.
|
||||
///
|
||||
/// [`lookahead_to_target`]: Self::lookahead_to_target
|
||||
pub fn lookahead_to_target_multi(&mut self, target_indexes: BTreeMap<K, u32>) {
|
||||
for (keychain, target_index) in target_indexes {
|
||||
self.lookahead_to_target(&keychain, target_index)
|
||||
}
|
||||
}
|
||||
|
||||
/// Store lookahead scripts until `target_index`.
|
||||
///
|
||||
/// This does not change the `lookahead` setting.
|
||||
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
||||
let next_index = self.next_store_index(keychain);
|
||||
if let Some(temp_lookahead) = target_index.checked_sub(next_index).filter(|&v| v > 0) {
|
||||
let old_lookahead = self.lookahead.insert(keychain.clone(), temp_lookahead);
|
||||
self.replenish_lookahead(keychain);
|
||||
|
||||
// revert
|
||||
match old_lookahead {
|
||||
Some(lookahead) => self.lookahead.insert(keychain.clone(), lookahead),
|
||||
None => self.lookahead.remove(keychain),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn replenish_lookahead(&mut self, keychain: &K) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let next_store_index = self.next_store_index(keychain);
|
||||
let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
||||
let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v);
|
||||
|
||||
for (new_index, new_spk) in range_descriptor_spks(
|
||||
Cow::Borrowed(descriptor),
|
||||
next_store_index..next_reveal_index + lookahead,
|
||||
) {
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||
}
|
||||
}
|
||||
|
||||
fn next_store_index(&self, keychain: &K) -> u32 {
|
||||
self.inner()
|
||||
.all_spks()
|
||||
.range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
|
||||
.last()
|
||||
.map_or(0, |((_, v), _)| *v + 1)
|
||||
}
|
||||
|
||||
/// Generates script pubkey iterators for every `keychain`. The iterators iterate over all
|
||||
/// derivable script pubkeys.
|
||||
pub fn spks_of_all_keychains(
|
||||
&self,
|
||||
) -> BTreeMap<K, impl Iterator<Item = (u32, Script)> + Clone> {
|
||||
self.keychains
|
||||
.iter()
|
||||
.map(|(keychain, descriptor)| {
|
||||
(
|
||||
keychain.clone(),
|
||||
range_descriptor_spks(Cow::Owned(descriptor.clone()), 0..),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generates a script pubkey iterator for the given `keychain`'s descriptor (if it exists). The
|
||||
/// iterator iterates over all derivable scripts of the keychain's descriptor.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if the `keychain` does not exist.
|
||||
pub fn spks_of_keychain(&self, keychain: &K) -> impl Iterator<Item = (u32, Script)> + Clone {
|
||||
let descriptor = self
|
||||
.keychains
|
||||
.get(keychain)
|
||||
.expect("keychain must exist")
|
||||
.clone();
|
||||
range_descriptor_spks(Cow::Owned(descriptor), 0..)
|
||||
}
|
||||
|
||||
/// Convenience method to get [`revealed_spks_of_keychain`] of all keychains.
|
||||
///
|
||||
/// [`revealed_spks_of_keychain`]: Self::revealed_spks_of_keychain
|
||||
pub fn revealed_spks_of_all_keychains(
|
||||
&self,
|
||||
) -> BTreeMap<K, impl Iterator<Item = (u32, &Script)> + Clone> {
|
||||
self.keychains
|
||||
.keys()
|
||||
.map(|keychain| (keychain.clone(), self.revealed_spks_of_keychain(keychain)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Iterates over the script pubkeys revealed by this index under `keychain`.
|
||||
pub fn revealed_spks_of_keychain(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
||||
let next_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
||||
self.inner
|
||||
.all_spks()
|
||||
.range((keychain.clone(), u32::MIN)..(keychain.clone(), next_index))
|
||||
.map(|((_, derivation_index), spk)| (*derivation_index, spk))
|
||||
}
|
||||
|
||||
/// Get the next derivation index for `keychain`. The next index is the index after the last revealed
|
||||
/// derivation index.
|
||||
///
|
||||
/// The second field in the returned tuple represents whether the next derivation index is new.
|
||||
/// There are two scenarios where the next derivation index is reused (not new):
|
||||
///
|
||||
/// 1. The keychain's descriptor has no wildcard, and a script has already been revealed.
|
||||
/// 2. The number of revealed scripts has already reached 2^31 (refer to BIP-32).
|
||||
///
|
||||
/// Not checking the second field of the tuple may result in address reuse.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn next_index(&self, keychain: &K) -> (u32, bool) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let last_index = self.last_revealed.get(keychain).cloned();
|
||||
|
||||
// we can only get the next index if the wildcard exists.
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
match last_index {
|
||||
// if there is no index, next_index is always 0.
|
||||
None => (0, true),
|
||||
// descriptors without wildcards can only have one index.
|
||||
Some(_) if !has_wildcard => (0, false),
|
||||
// derivation index must be < 2^31 (BIP-32).
|
||||
Some(index) if index > BIP32_MAX_INDEX => {
|
||||
unreachable!("index is out of bounds")
|
||||
}
|
||||
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
||||
// get the next derivation index.
|
||||
Some(index) => (index + 1, true),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the last derivation index that is revealed for each keychain.
|
||||
///
|
||||
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
||||
pub fn last_revealed_indices(&self) -> &BTreeMap<K, u32> {
|
||||
&self.last_revealed
|
||||
}
|
||||
|
||||
/// Get the last derivation index revealed for `keychain`.
|
||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
||||
self.last_revealed.get(keychain).cloned()
|
||||
}
|
||||
|
||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||
pub fn reveal_to_target_multi(
|
||||
&mut self,
|
||||
keychains: &BTreeMap<K, u32>,
|
||||
) -> (
|
||||
BTreeMap<K, impl Iterator<Item = (u32, Script)>>,
|
||||
DerivationAdditions<K>,
|
||||
) {
|
||||
let mut additions = DerivationAdditions::default();
|
||||
let mut spks = BTreeMap::new();
|
||||
|
||||
for (keychain, &index) in keychains {
|
||||
let (new_spks, new_additions) = self.reveal_to_target(keychain, index);
|
||||
if !new_additions.is_empty() {
|
||||
spks.insert(keychain.clone(), new_spks);
|
||||
additions.append(new_additions);
|
||||
}
|
||||
}
|
||||
|
||||
(spks, additions)
|
||||
}
|
||||
|
||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||
/// `target_index`.
|
||||
///
|
||||
/// If the `target_index` cannot be reached (due to the descriptor having no wildcard and/or
|
||||
/// the `target_index` is in the hardened index range), this method will make a best-effort and
|
||||
/// reveal up to the last possible index.
|
||||
///
|
||||
/// This returns an iterator of newly revealed indices (alongside their scripts) and a
|
||||
/// [`DerivationAdditions`], which reports updates to the latest revealed index. If no new script
|
||||
/// pubkeys are revealed, then both of these will be empty.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` does not exist.
|
||||
pub fn reveal_to_target(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
target_index: u32,
|
||||
) -> (impl Iterator<Item = (u32, Script)>, DerivationAdditions<K>) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
let target_index = if has_wildcard { target_index } else { 0 };
|
||||
let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
||||
let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v);
|
||||
|
||||
debug_assert_eq!(
|
||||
next_reveal_index + lookahead,
|
||||
self.next_store_index(keychain)
|
||||
);
|
||||
|
||||
// if we need to reveal new indices, the latest revealed index goes here
|
||||
let mut reveal_to_index = None;
|
||||
|
||||
// if the target is not yet revealed, but is already stored (due to lookahead), we need to
|
||||
// set the `reveal_to_index` as target here (as the `for` loop below only updates
|
||||
// `reveal_to_index` for indexes that are NOT stored)
|
||||
if next_reveal_index <= target_index && target_index < next_reveal_index + lookahead {
|
||||
reveal_to_index = Some(target_index);
|
||||
}
|
||||
|
||||
// we range over indexes that are not stored
|
||||
let range = next_reveal_index + lookahead..=target_index + lookahead;
|
||||
for (new_index, new_spk) in range_descriptor_spks(Cow::Borrowed(descriptor), range) {
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
debug_assert!(_inserted, "must not have existing spk",);
|
||||
|
||||
// everything after `target_index` is stored for lookahead only
|
||||
if new_index <= target_index {
|
||||
reveal_to_index = Some(new_index);
|
||||
}
|
||||
}
|
||||
|
||||
match reveal_to_index {
|
||||
Some(index) => {
|
||||
let _old_index = self.last_revealed.insert(keychain.clone(), index);
|
||||
debug_assert!(_old_index < Some(index));
|
||||
(
|
||||
range_descriptor_spks(
|
||||
Cow::Owned(descriptor.clone()),
|
||||
next_reveal_index..index + 1,
|
||||
),
|
||||
DerivationAdditions(core::iter::once((keychain.clone(), index)).collect()),
|
||||
)
|
||||
}
|
||||
None => (
|
||||
range_descriptor_spks(
|
||||
Cow::Owned(descriptor.clone()),
|
||||
next_reveal_index..next_reveal_index,
|
||||
),
|
||||
DerivationAdditions::default(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||
///
|
||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||
/// [`DerivationAdditions`] which represents changes in the last revealed index (if any).
|
||||
///
|
||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||
/// [`DerivationAdditions`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||
///
|
||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions<K>) {
|
||||
let (next_index, _) = self.next_index(keychain);
|
||||
let additions = self.reveal_to_target(keychain, next_index).1;
|
||||
let script = self
|
||||
.inner
|
||||
.spk_at_index(&(keychain.clone(), next_index))
|
||||
.expect("script must already be stored");
|
||||
((next_index, script), additions)
|
||||
}
|
||||
|
||||
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
||||
/// index that has not been used yet.
|
||||
///
|
||||
/// This will derive and reveal a new script pubkey if no more unused script pubkeys exist.
|
||||
///
|
||||
/// If the descriptor has no wildcard and already has a used script pubkey or if a descriptor
|
||||
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
||||
/// returned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` has never been added to the index
|
||||
pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions<K>) {
|
||||
let need_new = self.unused_spks_of_keychain(keychain).next().is_none();
|
||||
// this rather strange branch is needed because of some lifetime issues
|
||||
if need_new {
|
||||
self.reveal_next_spk(keychain)
|
||||
} else {
|
||||
(
|
||||
self.unused_spks_of_keychain(keychain)
|
||||
.next()
|
||||
.expect("we already know next exists"),
|
||||
DerivationAdditions::default(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output with it.
|
||||
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
||||
///
|
||||
/// Returns whether the `index` was initially present as `unused`.
|
||||
///
|
||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
||||
/// `keychain` used until you call [`unmark_used`].
|
||||
///
|
||||
/// [`unmark_used`]: Self::unmark_used
|
||||
pub fn mark_used(&mut self, keychain: &K, index: u32) -> bool {
|
||||
self.inner.mark_used(&(keychain.clone(), index))
|
||||
}
|
||||
|
||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||
/// `unused`.
|
||||
///
|
||||
/// Note that if `self` has scanned an output with this script pubkey, then this will have no
|
||||
/// effect.
|
||||
///
|
||||
/// [`mark_used`]: Self::mark_used
|
||||
pub fn unmark_used(&mut self, keychain: &K, index: u32) -> bool {
|
||||
self.inner.unmark_used(&(keychain.clone(), index))
|
||||
}
|
||||
|
||||
/// Iterates over all unused script pubkeys for a `keychain` stored in the index.
|
||||
pub fn unused_spks_of_keychain(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> {
|
||||
let next_index = self.last_revealed.get(keychain).map_or(0, |&v| v + 1);
|
||||
let range = (keychain.clone(), u32::MIN)..(keychain.clone(), next_index);
|
||||
self.inner
|
||||
.unused_spks(range)
|
||||
.map(|((_, i), script)| (*i, script))
|
||||
}
|
||||
|
||||
/// Iterates over all the [`OutPoint`] that have a `TxOut` with a script pubkey derived from
|
||||
/// `keychain`.
|
||||
pub fn txouts_of_keychain(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, OutPoint)> + '_ {
|
||||
self.inner
|
||||
.outputs_in_range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
|
||||
.map(|((_, i), op)| (*i, op))
|
||||
}
|
||||
|
||||
/// Returns the highest derivation index of the `keychain` where [`KeychainTxOutIndex`] has
|
||||
/// found a [`TxOut`] with it's script pubkey.
|
||||
pub fn last_used_index(&self, keychain: &K) -> Option<u32> {
|
||||
self.txouts_of_keychain(keychain).last().map(|(i, _)| i)
|
||||
}
|
||||
|
||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||
/// a [`TxOut`] with it's script pubkey.
|
||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||
self.keychains
|
||||
.iter()
|
||||
.filter_map(|(keychain, _)| {
|
||||
self.last_used_index(keychain)
|
||||
.map(|index| (keychain.clone(), index))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Applies the derivation additions to the [`KeychainTxOutIndex`], extending the number of
|
||||
/// derived scripts per keychain, as specified in the `additions`.
|
||||
pub fn apply_additions(&mut self, additions: DerivationAdditions<K>) {
|
||||
let _ = self.reveal_to_target_multi(&additions.0);
|
||||
}
|
||||
}
|
||||
|
||||
fn range_descriptor_spks<'a, R>(
|
||||
descriptor: Cow<'a, Descriptor<DescriptorPublicKey>>,
|
||||
range: R,
|
||||
) -> impl Iterator<Item = (u32, Script)> + Clone + Send + 'a
|
||||
where
|
||||
R: Iterator<Item = u32> + Clone + Send + 'a,
|
||||
{
|
||||
let secp = Secp256k1::verification_only();
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
range
|
||||
.into_iter()
|
||||
// non-wildcard descriptors can only have one derivation index (0)
|
||||
.take_while(move |&index| has_wildcard || index == 0)
|
||||
// we can only iterate over non-hardened indices
|
||||
.take_while(|&index| index <= BIP32_MAX_INDEX)
|
||||
.map(
|
||||
move |index| -> Result<_, miniscript::descriptor::ConversionError> {
|
||||
Ok((
|
||||
index,
|
||||
descriptor
|
||||
.at_derivation_index(index)
|
||||
.derived_descriptor(&secp)?
|
||||
.script_pubkey(),
|
||||
))
|
||||
},
|
||||
)
|
||||
.take_while(Result::is_ok)
|
||||
.map(Result::unwrap)
|
||||
}
|
||||
89
crates/chain/src/lib.rs
Normal file
89
crates/chain/src/lib.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
//! This crate is a collection of core structures for [Bitcoin Dev Kit] (alpha release).
|
||||
//!
|
||||
//! The goal of this crate is to give wallets the mechanisms needed to:
|
||||
//!
|
||||
//! 1. Figure out what data they need to fetch.
|
||||
//! 2. Process the data in a way that never leads to inconsistent states.
|
||||
//! 3. Fully index that data and expose it to be consumed without friction.
|
||||
//!
|
||||
//! Our design goals for these mechanisms are:
|
||||
//!
|
||||
//! 1. Data source agnostic -- nothing in `bdk_chain` cares about where you get data from or whether
|
||||
//! you do it synchronously or asynchronously. If you know a fact about the blockchain, you can just
|
||||
//! tell `bdk_chain`'s APIs about it, and that information will be integrated, if it can be done
|
||||
//! consistently.
|
||||
//! 2. Error-free APIs.
|
||||
//! 3. Data persistence agnostic -- `bdk_chain` does not care where you cache on-chain data, what you
|
||||
//! cache or how you fetch it.
|
||||
//!
|
||||
//! [Bitcoin Dev Kit]: https://bitcoindevkit.org/
|
||||
#![no_std]
|
||||
pub use bitcoin;
|
||||
pub mod chain_graph;
|
||||
mod spk_txout_index;
|
||||
pub use spk_txout_index::*;
|
||||
mod chain_data;
|
||||
pub use chain_data::*;
|
||||
pub mod keychain;
|
||||
pub mod sparse_chain;
|
||||
mod tx_data_traits;
|
||||
pub mod tx_graph;
|
||||
pub use tx_data_traits::*;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod example_utils;
|
||||
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use miniscript;
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod descriptor_ext;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use descriptor_ext::DescriptorExt;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
pub extern crate serde_crate as serde;
|
||||
|
||||
#[cfg(feature = "bincode")]
|
||||
extern crate bincode;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[macro_use]
|
||||
extern crate std;
|
||||
|
||||
#[cfg(all(not(feature = "std"), feature = "hashbrown"))]
|
||||
extern crate hashbrown;
|
||||
|
||||
// When no-std use `alloc`'s Hash collections. This is activated by default
|
||||
#[cfg(all(not(feature = "std"), not(feature = "hashbrown")))]
|
||||
#[doc(hidden)]
|
||||
pub mod collections {
|
||||
#![allow(dead_code)]
|
||||
pub type HashSet<K> = alloc::collections::BTreeSet<K>;
|
||||
pub type HashMap<K, V> = alloc::collections::BTreeMap<K, V>;
|
||||
pub use alloc::collections::{btree_map as hash_map, *};
|
||||
}
|
||||
|
||||
// When we have std, use `std`'s all collections
|
||||
#[cfg(all(feature = "std", not(feature = "hashbrown")))]
|
||||
#[doc(hidden)]
|
||||
pub mod collections {
|
||||
pub use std::collections::{hash_map, *};
|
||||
}
|
||||
|
||||
// With this special feature `hashbrown`, use `hashbrown`'s hash collections, and else from `alloc`.
|
||||
#[cfg(feature = "hashbrown")]
|
||||
#[doc(hidden)]
|
||||
pub mod collections {
|
||||
#![allow(dead_code)]
|
||||
pub type HashSet<K> = hashbrown::HashSet<K>;
|
||||
pub type HashMap<K, V> = hashbrown::HashMap<K, V>;
|
||||
pub use alloc::collections::*;
|
||||
pub use hashbrown::hash_map;
|
||||
}
|
||||
|
||||
/// How many confirmations are needed f or a coinbase output to be spent.
|
||||
pub const COINBASE_MATURITY: u32 = 100;
|
||||
1102
crates/chain/src/sparse_chain.rs
Normal file
1102
crates/chain/src/sparse_chain.rs
Normal file
File diff suppressed because it is too large
Load Diff
309
crates/chain/src/spk_txout_index.rs
Normal file
309
crates/chain/src/spk_txout_index.rs
Normal file
@@ -0,0 +1,309 @@
|
||||
use core::ops::RangeBounds;
|
||||
|
||||
use crate::{
|
||||
collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap},
|
||||
ForEachTxOut,
|
||||
};
|
||||
use bitcoin::{self, OutPoint, Script, Transaction, TxOut, Txid};
|
||||
|
||||
/// An index storing [`TxOut`]s that have a script pubkey that matches those in a list.
|
||||
///
|
||||
/// The basic idea is that you insert script pubkeys you care about into the index with
|
||||
/// [`insert_spk`] and then when you call [`scan`], the index will look at any txouts you pass in and
|
||||
/// store and index any txouts matching one of its script pubkeys.
|
||||
///
|
||||
/// Each script pubkey is associated with an application-defined index script index `I`, which must be
|
||||
/// [`Ord`]. Usually, this is used to associate the derivation index of the script pubkey or even a
|
||||
/// combination of `(keychain, derivation_index)`.
|
||||
///
|
||||
/// Note there is no harm in scanning transactions that disappear from the blockchain or were never
|
||||
/// in there in the first place. `SpkTxOutIndex` is intentionally *monotone* -- you cannot delete or
|
||||
/// modify txouts that have been indexed. To find out which txouts from the index are actually in the
|
||||
/// chain or unspent, you must use other sources of information like a [`SparseChain`].
|
||||
///
|
||||
/// [`TxOut`]: bitcoin::TxOut
|
||||
/// [`insert_spk`]: Self::insert_spk
|
||||
/// [`Ord`]: core::cmp::Ord
|
||||
/// [`scan`]: Self::scan
|
||||
/// [`SparseChain`]: crate::sparse_chain::SparseChain
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SpkTxOutIndex<I> {
|
||||
/// script pubkeys ordered by index
|
||||
spks: BTreeMap<I, Script>,
|
||||
/// A reverse lookup from spk to spk index
|
||||
spk_indices: HashMap<Script, I>,
|
||||
/// The set of unused indexes.
|
||||
unused: BTreeSet<I>,
|
||||
/// Lookup index and txout by outpoint.
|
||||
txouts: BTreeMap<OutPoint, (I, TxOut)>,
|
||||
/// Lookup from spk index to outpoints that had that spk
|
||||
spk_txouts: BTreeSet<(I, OutPoint)>,
|
||||
}
|
||||
|
||||
impl<I> Default for SpkTxOutIndex<I> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
txouts: Default::default(),
|
||||
spks: Default::default(),
|
||||
spk_indices: Default::default(),
|
||||
spk_txouts: Default::default(),
|
||||
unused: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This macro is used instead of a member function of `SpkTxOutIndex`, which would result in a
|
||||
/// compiler error[E0521]: "borrowed data escapes out of closure" when we attempt to take a
|
||||
/// reference out of the `ForEachTxOut` closure during scanning.
|
||||
macro_rules! scan_txout {
|
||||
($self:ident, $op:expr, $txout:expr) => {{
|
||||
let spk_i = $self.spk_indices.get(&$txout.script_pubkey);
|
||||
if let Some(spk_i) = spk_i {
|
||||
$self.txouts.insert($op, (spk_i.clone(), $txout.clone()));
|
||||
$self.spk_txouts.insert((spk_i.clone(), $op));
|
||||
$self.unused.remove(&spk_i);
|
||||
}
|
||||
spk_i
|
||||
}};
|
||||
}
|
||||
|
||||
impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
||||
/// Scans an object containing many txouts.
|
||||
///
|
||||
/// Typically, this is used in two situations:
|
||||
///
|
||||
/// 1. After loading transaction data from the disk, you may scan over all the txouts to restore all
|
||||
/// your txouts.
|
||||
/// 2. When getting new data from the chain, you usually scan it before incorporating it into your chain state.
|
||||
///
|
||||
/// See [`ForEachTxout`] for the types that support this.
|
||||
///
|
||||
/// [`ForEachTxout`]: crate::ForEachTxOut
|
||||
pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<I> {
|
||||
let mut scanned_indices = BTreeSet::new();
|
||||
|
||||
txouts.for_each_txout(|(op, txout)| {
|
||||
if let Some(spk_i) = scan_txout!(self, op, txout) {
|
||||
scanned_indices.insert(spk_i.clone());
|
||||
}
|
||||
});
|
||||
|
||||
scanned_indices
|
||||
}
|
||||
|
||||
/// Scan a single `TxOut` for a matching script pubkey and returns the index that matches the
|
||||
/// script pubkey (if any).
|
||||
pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> Option<&I> {
|
||||
scan_txout!(self, op, txout)
|
||||
}
|
||||
|
||||
/// Iterate over all known txouts that spend to tracked script pubkeys.
|
||||
pub fn txouts(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (&I, OutPoint, &TxOut)> + ExactSizeIterator {
|
||||
self.txouts
|
||||
.iter()
|
||||
.map(|(op, (index, txout))| (index, *op, txout))
|
||||
}
|
||||
|
||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||
pub fn txouts_in_tx(
|
||||
&self,
|
||||
txid: Txid,
|
||||
) -> impl DoubleEndedIterator<Item = (&I, OutPoint, &TxOut)> {
|
||||
self.txouts
|
||||
.range(OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX))
|
||||
.map(|(op, (index, txout))| (index, *op, txout))
|
||||
}
|
||||
|
||||
/// Iterates over all the outputs with script pubkeys in an index range.
|
||||
pub fn outputs_in_range(
|
||||
&self,
|
||||
range: impl RangeBounds<I>,
|
||||
) -> impl DoubleEndedIterator<Item = (&I, OutPoint)> {
|
||||
use bitcoin::hashes::Hash;
|
||||
use core::ops::Bound::*;
|
||||
let min_op = OutPoint {
|
||||
txid: Txid::from_inner([0x00; 32]),
|
||||
vout: u32::MIN,
|
||||
};
|
||||
let max_op = OutPoint {
|
||||
txid: Txid::from_inner([0xff; 32]),
|
||||
vout: u32::MAX,
|
||||
};
|
||||
|
||||
let start = match range.start_bound() {
|
||||
Included(index) => Included((index.clone(), min_op)),
|
||||
Excluded(index) => Excluded((index.clone(), max_op)),
|
||||
Unbounded => Unbounded,
|
||||
};
|
||||
|
||||
let end = match range.end_bound() {
|
||||
Included(index) => Included((index.clone(), max_op)),
|
||||
Excluded(index) => Excluded((index.clone(), min_op)),
|
||||
Unbounded => Unbounded,
|
||||
};
|
||||
|
||||
self.spk_txouts.range((start, end)).map(|(i, op)| (i, *op))
|
||||
}
|
||||
|
||||
/// Returns the txout and script pubkey index of the `TxOut` at `OutPoint`.
|
||||
///
|
||||
/// Returns `None` if the `TxOut` hasn't been scanned or if nothing matching was found there.
|
||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(&I, &TxOut)> {
|
||||
self.txouts
|
||||
.get(&outpoint)
|
||||
.map(|(spk_i, txout)| (spk_i, txout))
|
||||
}
|
||||
|
||||
/// Returns the script that has been inserted at the `index`.
|
||||
///
|
||||
/// If that index hasn't been inserted yet, it will return `None`.
|
||||
pub fn spk_at_index(&self, index: &I) -> Option<&Script> {
|
||||
self.spks.get(index)
|
||||
}
|
||||
|
||||
/// The script pubkeys that are being tracked by the index.
|
||||
pub fn all_spks(&self) -> &BTreeMap<I, Script> {
|
||||
&self.spks
|
||||
}
|
||||
|
||||
/// Adds a script pubkey to scan for. Returns `false` and does nothing if spk already exists in the map
|
||||
///
|
||||
/// the index will look for outputs spending to this spk whenever it scans new data.
|
||||
pub fn insert_spk(&mut self, index: I, spk: Script) -> bool {
|
||||
match self.spk_indices.entry(spk.clone()) {
|
||||
Entry::Vacant(value) => {
|
||||
value.insert(index.clone());
|
||||
self.spks.insert(index.clone(), spk);
|
||||
self.unused.insert(index);
|
||||
true
|
||||
}
|
||||
Entry::Occupied(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterates over all unused script pubkeys in an index range.
|
||||
///
|
||||
/// Here, "unused" means that after the script pubkey was stored in the index, the index has
|
||||
/// never scanned a transaction output with it.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// # use bdk_chain::SpkTxOutIndex;
|
||||
///
|
||||
/// // imagine our spks are indexed like (keychain, derivation_index).
|
||||
/// let txout_index = SpkTxOutIndex::<(u32, u32)>::default();
|
||||
/// let all_unused_spks = txout_index.unused_spks(..);
|
||||
/// let change_index = 1;
|
||||
/// let unused_change_spks =
|
||||
/// txout_index.unused_spks((change_index, u32::MIN)..(change_index, u32::MAX));
|
||||
/// ```
|
||||
pub fn unused_spks<R>(&self, range: R) -> impl DoubleEndedIterator<Item = (&I, &Script)>
|
||||
where
|
||||
R: RangeBounds<I>,
|
||||
{
|
||||
self.unused
|
||||
.range(range)
|
||||
.map(move |index| (index, self.spk_at_index(index).expect("must exist")))
|
||||
}
|
||||
|
||||
/// Returns whether the script pubkey at `index` has been used or not.
|
||||
///
|
||||
/// Here, "unused" means that after the script pubkey was stored in the index, the index has
|
||||
/// never scanned a transaction output with it.
|
||||
pub fn is_used(&self, index: &I) -> bool {
|
||||
self.unused.get(index).is_none()
|
||||
}
|
||||
|
||||
/// Marks the script pubkey at `index` as used even though it hasn't seen an output spending to it.
|
||||
/// This only affects when the `index` had already been added to `self` and was unused.
|
||||
///
|
||||
/// Returns whether the `index` was initially present as `unused`.
|
||||
///
|
||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||
/// the transaction output using it to the index yet. Other callers will consider the `index` used
|
||||
/// until you call [`unmark_used`].
|
||||
///
|
||||
/// [`unmark_used`]: Self::unmark_used
|
||||
pub fn mark_used(&mut self, index: &I) -> bool {
|
||||
self.unused.remove(index)
|
||||
}
|
||||
|
||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||
/// `unused`.
|
||||
///
|
||||
/// Note that if `self` has scanned an output with this script pubkey then this will have no
|
||||
/// effect.
|
||||
///
|
||||
/// [`mark_used`]: Self::mark_used
|
||||
pub fn unmark_used(&mut self, index: &I) -> bool {
|
||||
// we cannot set the index as unused when it does not exist
|
||||
if !self.spks.contains_key(index) {
|
||||
return false;
|
||||
}
|
||||
// we cannot set the index as unused when txouts are indexed under it
|
||||
if self.outputs_in_range(index..=index).next().is_some() {
|
||||
return false;
|
||||
}
|
||||
self.unused.insert(index.clone())
|
||||
}
|
||||
|
||||
/// Returns the index associated with the script pubkey.
|
||||
pub fn index_of_spk(&self, script: &Script) -> Option<&I> {
|
||||
self.spk_indices.get(script)
|
||||
}
|
||||
|
||||
/// Computes total input value going from script pubkeys in the index (sent) and the total output
|
||||
/// value going to script pubkeys in the index (received) in `tx`. For the `sent` to be computed
|
||||
/// correctly, the output being spent must have already been scanned by the index. Calculating
|
||||
/// received just uses the transaction outputs directly, so it will be correct even if it has not
|
||||
/// been scanned.
|
||||
pub fn sent_and_received(&self, tx: &Transaction) -> (u64, u64) {
|
||||
let mut sent = 0;
|
||||
let mut received = 0;
|
||||
|
||||
for txin in &tx.input {
|
||||
if let Some((_, txout)) = self.txout(txin.previous_output) {
|
||||
sent += txout.value;
|
||||
}
|
||||
}
|
||||
for txout in &tx.output {
|
||||
if self.index_of_spk(&txout.script_pubkey).is_some() {
|
||||
received += txout.value;
|
||||
}
|
||||
}
|
||||
|
||||
(sent, received)
|
||||
}
|
||||
|
||||
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
||||
/// *takes* from the transaction outputs in the index. Shorthand for calling
|
||||
/// [`sent_and_received`] and subtracting sent from received.
|
||||
///
|
||||
/// [`sent_and_received`]: Self::sent_and_received
|
||||
pub fn net_value(&self, tx: &Transaction) -> i64 {
|
||||
let (sent, received) = self.sent_and_received(tx);
|
||||
received as i64 - sent as i64
|
||||
}
|
||||
|
||||
/// Whether any of the inputs of this transaction spend a txout tracked or whether any output
|
||||
/// matches one of our script pubkeys.
|
||||
///
|
||||
/// It is easily possible to misuse this method and get false negatives by calling it before you
|
||||
/// have scanned the `TxOut`s the transaction is spending. For example, if you want to filter out
|
||||
/// all the transactions in a block that are irrelevant, you **must first scan all the
|
||||
/// transactions in the block** and only then use this method.
|
||||
pub fn is_relevant(&self, tx: &Transaction) -> bool {
|
||||
let input_matches = tx
|
||||
.input
|
||||
.iter()
|
||||
.any(|input| self.txouts.contains_key(&input.previous_output));
|
||||
let output_matches = tx
|
||||
.output
|
||||
.iter()
|
||||
.any(|output| self.spk_indices.contains_key(&output.script_pubkey));
|
||||
input_matches || output_matches
|
||||
}
|
||||
}
|
||||
33
crates/chain/src/tx_data_traits.rs
Normal file
33
crates/chain/src/tx_data_traits.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use bitcoin::{Block, OutPoint, Transaction, TxOut};
|
||||
|
||||
/// Trait to do something with every txout contained in a structure.
|
||||
///
|
||||
/// We would prefer to just work with things that can give us an `Iterator<Item=(OutPoint, &TxOut)>`
|
||||
/// here, but rust's type system makes it extremely hard to do this (without trait objects).
|
||||
pub trait ForEachTxOut {
|
||||
/// The provided closure `f` will be called with each `outpoint/txout` pair.
|
||||
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut)));
|
||||
}
|
||||
|
||||
impl ForEachTxOut for Block {
|
||||
fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) {
|
||||
for tx in self.txdata.iter() {
|
||||
tx.for_each_txout(&mut f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ForEachTxOut for Transaction {
|
||||
fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) {
|
||||
let txid = self.txid();
|
||||
for (i, txout) in self.output.iter().enumerate() {
|
||||
f((
|
||||
OutPoint {
|
||||
txid,
|
||||
vout: i as u32,
|
||||
},
|
||||
txout,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
581
crates/chain/src/tx_graph.rs
Normal file
581
crates/chain/src/tx_graph.rs
Normal file
@@ -0,0 +1,581 @@
|
||||
//! Module for structures that store and traverse transactions.
|
||||
//!
|
||||
//! [`TxGraph`] is a monotone structure that inserts transactions and indexes the spends. The
|
||||
//! [`Additions`] structure reports changes of [`TxGraph`] but can also be applied to a
|
||||
//! [`TxGraph`] as well. Lastly, [`TxDescendants`] is an [`Iterator`] that traverses descendants of
|
||||
//! a given transaction.
|
||||
//!
|
||||
//! Conflicting transactions are allowed to coexist within a [`TxGraph`]. This is useful for
|
||||
//! identifying and traversing conflicts and descendants of a given transaction.
|
||||
//!
|
||||
//! # Previewing and applying changes
|
||||
//!
|
||||
//! Methods that either preview or apply changes to [`TxGraph`] will return [`Additions`].
|
||||
//! [`Additions`] can be applied back to a [`TxGraph`] or be used to inform persistent storage
|
||||
//! of the changes to [`TxGraph`].
|
||||
//!
|
||||
//! ```
|
||||
//! # use bdk_chain::tx_graph::TxGraph;
|
||||
//! # use bdk_chain::example_utils::*;
|
||||
//! # use bitcoin::Transaction;
|
||||
//! # let tx_a = tx_from_hex(RAW_TX_1);
|
||||
//! # let tx_b = tx_from_hex(RAW_TX_2);
|
||||
//! let mut graph = TxGraph::default();
|
||||
//!
|
||||
//! // preview a transaction insertion (not actually inserted)
|
||||
//! let additions = graph.insert_tx_preview(tx_a);
|
||||
//! // apply the insertion
|
||||
//! graph.apply_additions(additions);
|
||||
//!
|
||||
//! // you can also insert a transaction directly
|
||||
//! let already_applied_additions = graph.insert_tx(tx_b);
|
||||
//! ```
|
||||
//!
|
||||
//! A [`TxGraph`] can also be updated with another [`TxGraph`].
|
||||
//!
|
||||
//! ```
|
||||
//! # use bdk_chain::tx_graph::TxGraph;
|
||||
//! # use bdk_chain::example_utils::*;
|
||||
//! # use bitcoin::Transaction;
|
||||
//! # let tx_a = tx_from_hex(RAW_TX_1);
|
||||
//! # let tx_b = tx_from_hex(RAW_TX_2);
|
||||
//! let mut graph = TxGraph::default();
|
||||
//! let update = TxGraph::new(vec![tx_a, tx_b]);
|
||||
//!
|
||||
//! // preview additions as the result of the update
|
||||
//! let additions = graph.determine_additions(&update);
|
||||
//! // apply the additions
|
||||
//! graph.apply_additions(additions);
|
||||
//!
|
||||
//! // we can also apply the update graph directly
|
||||
//! // the additions will be empty as we have already applied the same update above
|
||||
//! let additions = graph.apply_update(update);
|
||||
//! assert!(additions.is_empty());
|
||||
//! ```
|
||||
use crate::{collections::*, ForEachTxOut};
|
||||
use alloc::vec::Vec;
|
||||
use bitcoin::{OutPoint, Transaction, TxOut, Txid};
|
||||
use core::ops::RangeInclusive;
|
||||
|
||||
/// A graph of transactions and spends.
|
||||
///
|
||||
/// See the [module-level documentation] for more.
|
||||
///
|
||||
/// [module-level documentation]: crate::tx_graph
|
||||
#[derive(Clone, Debug, PartialEq, Default)]
|
||||
pub struct TxGraph {
|
||||
txs: HashMap<Txid, TxNode>,
|
||||
spends: BTreeMap<OutPoint, HashSet<Txid>>,
|
||||
|
||||
// This atrocity exists so that `TxGraph::outspends()` can return a reference.
|
||||
// FIXME: This can be removed once `HashSet::new` is a const fn.
|
||||
empty_outspends: HashSet<Txid>,
|
||||
}
|
||||
|
||||
/// Node of a [`TxGraph`]. This can either be a whole transaction, or a partial transaction (where
|
||||
/// we only have select outputs).
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum TxNode {
|
||||
Whole(Transaction),
|
||||
Partial(BTreeMap<u32, TxOut>),
|
||||
}
|
||||
|
||||
impl Default for TxNode {
|
||||
fn default() -> Self {
|
||||
Self::Partial(BTreeMap::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl TxGraph {
|
||||
/// Iterate over all tx outputs known by [`TxGraph`].
|
||||
pub fn all_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
||||
self.txs.iter().flat_map(|(txid, tx)| match tx {
|
||||
TxNode::Whole(tx) => tx
|
||||
.output
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(vout, txout)| (OutPoint::new(*txid, vout as _), txout))
|
||||
.collect::<Vec<_>>(),
|
||||
TxNode::Partial(txouts) => txouts
|
||||
.iter()
|
||||
.map(|(vout, txout)| (OutPoint::new(*txid, *vout as _), txout))
|
||||
.collect::<Vec<_>>(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over all full transactions in the graph.
|
||||
pub fn full_transactions(&self) -> impl Iterator<Item = &Transaction> {
|
||||
self.txs.iter().filter_map(|(_, tx)| match tx {
|
||||
TxNode::Whole(tx) => Some(tx),
|
||||
TxNode::Partial(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a transaction by txid. This only returns `Some` for full transactions.
|
||||
///
|
||||
/// Refer to [`get_txout`] for getting a specific [`TxOut`].
|
||||
///
|
||||
/// [`get_txout`]: Self::get_txout
|
||||
pub fn get_tx(&self, txid: Txid) -> Option<&Transaction> {
|
||||
match self.txs.get(&txid)? {
|
||||
TxNode::Whole(tx) => Some(tx),
|
||||
TxNode::Partial(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Obtains a single tx output (if any) at the specified outpoint.
|
||||
pub fn get_txout(&self, outpoint: OutPoint) -> Option<&TxOut> {
|
||||
match self.txs.get(&outpoint.txid)? {
|
||||
TxNode::Whole(tx) => tx.output.get(outpoint.vout as usize),
|
||||
TxNode::Partial(txouts) => txouts.get(&outpoint.vout),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a [`BTreeMap`] of vout to output of the provided `txid`.
|
||||
pub fn txouts(&self, txid: Txid) -> Option<BTreeMap<u32, &TxOut>> {
|
||||
Some(match self.txs.get(&txid)? {
|
||||
TxNode::Whole(tx) => tx
|
||||
.output
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(vout, txout)| (vout as u32, txout))
|
||||
.collect::<BTreeMap<_, _>>(),
|
||||
TxNode::Partial(txouts) => txouts
|
||||
.iter()
|
||||
.map(|(vout, txout)| (*vout, txout))
|
||||
.collect::<BTreeMap<_, _>>(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Calculates the fee of a given transaction. Returns 0 if `tx` is a coinbase transaction.
|
||||
/// Returns `Some(_)` if we have all the `TxOut`s being spent by `tx` in the graph (either as
|
||||
/// the full transactions or individual txouts). If the returned value is negative, then the
|
||||
/// transaction is invalid according to the graph.
|
||||
///
|
||||
/// Returns `None` if we're missing an input for the tx in the graph.
|
||||
///
|
||||
/// Note `tx` does not have to be in the graph for this to work.
|
||||
pub fn calculate_fee(&self, tx: &Transaction) -> Option<i64> {
|
||||
if tx.is_coin_base() {
|
||||
return Some(0);
|
||||
}
|
||||
let inputs_sum = tx
|
||||
.input
|
||||
.iter()
|
||||
.map(|txin| {
|
||||
self.get_txout(txin.previous_output)
|
||||
.map(|txout| txout.value as i64)
|
||||
})
|
||||
.sum::<Option<i64>>()?;
|
||||
|
||||
let outputs_sum = tx
|
||||
.output
|
||||
.iter()
|
||||
.map(|txout| txout.value as i64)
|
||||
.sum::<i64>();
|
||||
|
||||
Some(inputs_sum - outputs_sum)
|
||||
}
|
||||
}
|
||||
|
||||
impl TxGraph {
|
||||
/// Construct a new [`TxGraph`] from a list of transactions.
|
||||
pub fn new(txs: impl IntoIterator<Item = Transaction>) -> Self {
|
||||
let mut new = Self::default();
|
||||
for tx in txs.into_iter() {
|
||||
let _ = new.insert_tx(tx);
|
||||
}
|
||||
new
|
||||
}
|
||||
/// Inserts the given [`TxOut`] at [`OutPoint`].
|
||||
///
|
||||
/// Note this will ignore the action if we already have the full transaction that the txout is
|
||||
/// alleged to be on (even if it doesn't match it!).
|
||||
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> Additions {
|
||||
let additions = self.insert_txout_preview(outpoint, txout);
|
||||
self.apply_additions(additions.clone());
|
||||
additions
|
||||
}
|
||||
|
||||
/// Inserts the given transaction into [`TxGraph`].
|
||||
///
|
||||
/// The [`Additions`] returned will be empty if `tx` already exists.
|
||||
pub fn insert_tx(&mut self, tx: Transaction) -> Additions {
|
||||
let additions = self.insert_tx_preview(tx);
|
||||
self.apply_additions(additions.clone());
|
||||
additions
|
||||
}
|
||||
|
||||
/// Extends this graph with another so that `self` becomes the union of the two sets of
|
||||
/// transactions.
|
||||
///
|
||||
/// The returned [`Additions`] is the set difference between `update` and `self` (transactions that
|
||||
/// exist in `update` but not in `self`).
|
||||
pub fn apply_update(&mut self, update: TxGraph) -> Additions {
|
||||
let additions = self.determine_additions(&update);
|
||||
self.apply_additions(additions.clone());
|
||||
additions
|
||||
}
|
||||
|
||||
/// Applies [`Additions`] to [`TxGraph`].
|
||||
pub fn apply_additions(&mut self, additions: Additions) {
|
||||
for tx in additions.tx {
|
||||
let txid = tx.txid();
|
||||
|
||||
tx.input
|
||||
.iter()
|
||||
.map(|txin| txin.previous_output)
|
||||
// coinbase spends are not to be counted
|
||||
.filter(|outpoint| !outpoint.is_null())
|
||||
// record spend as this tx has spent this outpoint
|
||||
.for_each(|outpoint| {
|
||||
self.spends.entry(outpoint).or_default().insert(txid);
|
||||
});
|
||||
|
||||
if let Some(TxNode::Whole(old_tx)) = self.txs.insert(txid, TxNode::Whole(tx)) {
|
||||
debug_assert_eq!(
|
||||
old_tx.txid(),
|
||||
txid,
|
||||
"old tx of the same txid should not be different."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (outpoint, txout) in additions.txout {
|
||||
let tx_entry = self
|
||||
.txs
|
||||
.entry(outpoint.txid)
|
||||
.or_insert_with(TxNode::default);
|
||||
|
||||
match tx_entry {
|
||||
TxNode::Whole(_) => { /* do nothing since we already have full tx */ }
|
||||
TxNode::Partial(txouts) => {
|
||||
txouts.insert(outpoint.vout, txout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Previews the resultant [`Additions`] when [`Self`] is updated against the `update` graph.
|
||||
///
|
||||
/// The [`Additions`] would be the set difference between `update` and `self` (transactions that
|
||||
/// exist in `update` but not in `self`).
|
||||
pub fn determine_additions(&self, update: &TxGraph) -> Additions {
|
||||
let mut additions = Additions::default();
|
||||
|
||||
for (&txid, update_tx) in &update.txs {
|
||||
if self.get_tx(txid).is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
match update_tx {
|
||||
TxNode::Whole(tx) => {
|
||||
if matches!(self.txs.get(&txid), None | Some(TxNode::Partial(_))) {
|
||||
additions.tx.insert(tx.clone());
|
||||
}
|
||||
}
|
||||
TxNode::Partial(partial) => {
|
||||
for (&vout, update_txout) in partial {
|
||||
let outpoint = OutPoint::new(txid, vout);
|
||||
|
||||
if self.get_txout(outpoint) != Some(update_txout) {
|
||||
additions.txout.insert(outpoint, update_txout.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
additions
|
||||
}
|
||||
|
||||
/// Returns the resultant [`Additions`] if the given transaction is inserted. Does not actually
|
||||
/// mutate [`Self`].
|
||||
///
|
||||
/// The [`Additions`] result will be empty if `tx` already exists in `self`.
|
||||
pub fn insert_tx_preview(&self, tx: Transaction) -> Additions {
|
||||
let mut update = Self::default();
|
||||
update.txs.insert(tx.txid(), TxNode::Whole(tx));
|
||||
self.determine_additions(&update)
|
||||
}
|
||||
|
||||
/// Returns the resultant [`Additions`] if the given `txout` is inserted at `outpoint`. Does not
|
||||
/// mutate `self`.
|
||||
///
|
||||
/// The [`Additions`] result will be empty if the `outpoint` (or a full transaction containing
|
||||
/// the `outpoint`) already existed in `self`.
|
||||
pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> Additions {
|
||||
let mut update = Self::default();
|
||||
update.txs.insert(
|
||||
outpoint.txid,
|
||||
TxNode::Partial([(outpoint.vout, txout)].into()),
|
||||
);
|
||||
self.determine_additions(&update)
|
||||
}
|
||||
}
|
||||
|
||||
impl TxGraph {
|
||||
/// The transactions spending from this output.
|
||||
///
|
||||
/// `TxGraph` allows conflicting transactions within the graph. Obviously the transactions in
|
||||
/// the returned set will never be in the same active-chain.
|
||||
pub fn outspends(&self, outpoint: OutPoint) -> &HashSet<Txid> {
|
||||
self.spends.get(&outpoint).unwrap_or(&self.empty_outspends)
|
||||
}
|
||||
|
||||
/// Iterates over the transactions spending from `txid`.
|
||||
///
|
||||
/// The iterator item is a union of `(vout, txid-set)` where:
|
||||
///
|
||||
/// - `vout` is the provided `txid`'s outpoint that is being spent
|
||||
/// - `txid-set` is the set of txids spending the `vout`.
|
||||
pub fn tx_outspends(
|
||||
&self,
|
||||
txid: Txid,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &HashSet<Txid>)> + '_ {
|
||||
let start = OutPoint { txid, vout: 0 };
|
||||
let end = OutPoint {
|
||||
txid,
|
||||
vout: u32::MAX,
|
||||
};
|
||||
self.spends
|
||||
.range(start..=end)
|
||||
.map(|(outpoint, spends)| (outpoint.vout, spends))
|
||||
}
|
||||
|
||||
/// Iterate over all partial transactions (outputs only) in the graph.
|
||||
pub fn partial_transactions(&self) -> impl Iterator<Item = (Txid, &BTreeMap<u32, TxOut>)> {
|
||||
self.txs.iter().filter_map(|(txid, tx)| match tx {
|
||||
TxNode::Whole(_) => None,
|
||||
TxNode::Partial(partial) => Some((*txid, partial)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an iterator that filters and maps descendants from the starting `txid`.
|
||||
///
|
||||
/// The supplied closure takes in two inputs `(depth, descendant_txid)`:
|
||||
///
|
||||
/// * `depth` is the distance between the starting `txid` and the `descendant_txid`. I.e., if the
|
||||
/// descendant is spending an output of the starting `txid`; the `depth` will be 1.
|
||||
/// * `descendant_txid` is the descendant's txid which we are considering to walk.
|
||||
///
|
||||
/// The supplied closure returns an `Option<T>`, allowing the caller to map each node it vists
|
||||
/// and decide whether to visit descendants.
|
||||
pub fn walk_descendants<'g, F, O>(&'g self, txid: Txid, walk_map: F) -> TxDescendants<F>
|
||||
where
|
||||
F: FnMut(usize, Txid) -> Option<O> + 'g,
|
||||
{
|
||||
TxDescendants::new_exclude_root(self, txid, walk_map)
|
||||
}
|
||||
|
||||
/// Creates an iterator that both filters and maps conflicting transactions (this includes
|
||||
/// descendants of directly-conflicting transactions, which are also considered conflicts).
|
||||
///
|
||||
/// Refer to [`Self::walk_descendants`] for `walk_map` usage.
|
||||
pub fn walk_conflicts<'g, F, O>(&'g self, tx: &'g Transaction, walk_map: F) -> TxDescendants<F>
|
||||
where
|
||||
F: FnMut(usize, Txid) -> Option<O> + 'g,
|
||||
{
|
||||
let txids = self.direct_conflicts_of_tx(tx).map(|(_, txid)| txid);
|
||||
TxDescendants::from_multiple_include_root(self, txids, walk_map)
|
||||
}
|
||||
|
||||
/// Given a transaction, return an iterator of txids that directly conflict with the given
|
||||
/// transaction's inputs (spends). The conflicting txids are returned with the given
|
||||
/// transaction's vin (in which it conflicts).
|
||||
///
|
||||
/// Note that this only returns directly conflicting txids and does not include descendants of
|
||||
/// those txids (which are technically also conflicting).
|
||||
pub fn direct_conflicts_of_tx<'g>(
|
||||
&'g self,
|
||||
tx: &'g Transaction,
|
||||
) -> impl Iterator<Item = (usize, Txid)> + '_ {
|
||||
let txid = tx.txid();
|
||||
tx.input
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(move |(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
|
||||
.flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned()))
|
||||
.filter(move |(_, conflicting_txid)| *conflicting_txid != txid)
|
||||
}
|
||||
|
||||
/// Whether the graph has any transactions or outputs in it.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.txs.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure that represents changes to a [`TxGraph`].
|
||||
///
|
||||
/// It is named "additions" because [`TxGraph`] is monotone, so transactions can only be added and
|
||||
/// not removed.
|
||||
///
|
||||
/// Refer to [module-level documentation] for more.
|
||||
///
|
||||
/// [module-level documentation]: crate::tx_graph
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(crate = "serde_crate")
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct Additions {
|
||||
pub tx: BTreeSet<Transaction>,
|
||||
pub txout: BTreeMap<OutPoint, TxOut>,
|
||||
}
|
||||
|
||||
impl Additions {
|
||||
/// Returns true if the [`Additions`] is empty (no transactions or txouts).
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.tx.is_empty() && self.txout.is_empty()
|
||||
}
|
||||
|
||||
/// Iterates over all outpoints contained within [`Additions`].
|
||||
pub fn txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
||||
self.tx
|
||||
.iter()
|
||||
.flat_map(|tx| {
|
||||
tx.output
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(move |(vout, txout)| (OutPoint::new(tx.txid(), vout as _), txout))
|
||||
})
|
||||
.chain(self.txout.iter().map(|(op, txout)| (*op, txout)))
|
||||
}
|
||||
|
||||
/// Appends the changes in `other` into self such that applying `self` afterward has the same
|
||||
/// effect as sequentially applying the original `self` and `other`.
|
||||
pub fn append(&mut self, mut other: Additions) {
|
||||
self.tx.append(&mut other.tx);
|
||||
self.txout.append(&mut other.txout);
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<TxGraph> for TxGraph {
|
||||
fn as_ref(&self) -> &TxGraph {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ForEachTxOut for Additions {
|
||||
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
|
||||
self.txouts().for_each(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ForEachTxOut for TxGraph {
|
||||
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
|
||||
self.all_txouts().for_each(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator that traverses transaction descendants.
|
||||
///
|
||||
/// This `struct` is created by the [`walk_descendants`] method of [`TxGraph`].
|
||||
///
|
||||
/// [`walk_descendants`]: TxGraph::walk_descendants
|
||||
pub struct TxDescendants<'g, F> {
|
||||
graph: &'g TxGraph,
|
||||
visited: HashSet<Txid>,
|
||||
stack: Vec<(usize, Txid)>,
|
||||
filter_map: F,
|
||||
}
|
||||
|
||||
impl<'g, F> TxDescendants<'g, F> {
|
||||
/// Creates a `TxDescendants` that includes the starting `txid` when iterating.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new_include_root(graph: &'g TxGraph, txid: Txid, filter_map: F) -> Self {
|
||||
Self {
|
||||
graph,
|
||||
visited: Default::default(),
|
||||
stack: [(0, txid)].into(),
|
||||
filter_map,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `TxDescendants` that excludes the starting `txid` when iterating.
|
||||
pub(crate) fn new_exclude_root(graph: &'g TxGraph, txid: Txid, filter_map: F) -> Self {
|
||||
let mut descendants = Self {
|
||||
graph,
|
||||
visited: Default::default(),
|
||||
stack: Default::default(),
|
||||
filter_map,
|
||||
};
|
||||
descendants.populate_stack(1, txid);
|
||||
descendants
|
||||
}
|
||||
|
||||
/// Creates a `TxDescendants` from multiple starting transactions that include the starting
|
||||
/// `txid`s when iterating.
|
||||
pub(crate) fn from_multiple_include_root<I>(graph: &'g TxGraph, txids: I, filter_map: F) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = Txid>,
|
||||
{
|
||||
Self {
|
||||
graph,
|
||||
visited: Default::default(),
|
||||
stack: txids.into_iter().map(|txid| (0, txid)).collect(),
|
||||
filter_map,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `TxDescendants` from multiple starting transactions that excludes the starting
|
||||
/// `txid`s when iterating.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn from_multiple_exclude_root<I>(graph: &'g TxGraph, txids: I, filter_map: F) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = Txid>,
|
||||
{
|
||||
let mut descendants = Self {
|
||||
graph,
|
||||
visited: Default::default(),
|
||||
stack: Default::default(),
|
||||
filter_map,
|
||||
};
|
||||
for txid in txids {
|
||||
descendants.populate_stack(1, txid);
|
||||
}
|
||||
descendants
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, F> TxDescendants<'g, F> {
|
||||
fn populate_stack(&mut self, depth: usize, txid: Txid) {
|
||||
let spend_paths = self
|
||||
.graph
|
||||
.spends
|
||||
.range(tx_outpoint_range(txid))
|
||||
.flat_map(|(_, spends)| spends)
|
||||
.map(|&txid| (depth, txid));
|
||||
self.stack.extend(spend_paths);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, F, O> Iterator for TxDescendants<'g, F>
|
||||
where
|
||||
F: FnMut(usize, Txid) -> Option<O>,
|
||||
{
|
||||
type Item = O;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (op_spends, txid, item) = loop {
|
||||
// we have exhausted all paths when stack is empty
|
||||
let (op_spends, txid) = self.stack.pop()?;
|
||||
// we do not want to visit the same transaction twice
|
||||
if self.visited.insert(txid) {
|
||||
// ignore paths when user filters them out
|
||||
if let Some(item) = (self.filter_map)(op_spends, txid) {
|
||||
break (op_spends, txid, item);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.populate_stack(op_spends + 1, txid);
|
||||
Some(item)
|
||||
}
|
||||
}
|
||||
|
||||
fn tx_outpoint_range(txid: Txid) -> RangeInclusive<OutPoint> {
|
||||
OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX)
|
||||
}
|
||||
60
crates/chain/tests/common/mod.rs
Normal file
60
crates/chain/tests/common/mod.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! h {
|
||||
($index:literal) => {{
|
||||
bitcoin::hashes::Hash::hash($index.as_bytes())
|
||||
}};
|
||||
}
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! chain {
|
||||
($([$($tt:tt)*]),*) => { chain!( checkpoints: [$([$($tt)*]),*] ) };
|
||||
(checkpoints: $($tail:tt)*) => { chain!( index: TxHeight, checkpoints: $($tail)*) };
|
||||
(index: $ind:ty, checkpoints: [ $([$height:expr, $block_hash:expr]),* ] $(,txids: [$(($txid:expr, $tx_height:expr)),*])?) => {{
|
||||
#[allow(unused_mut)]
|
||||
let mut chain = bdk_chain::sparse_chain::SparseChain::<$ind>::from_checkpoints([$(($height, $block_hash).into()),*]);
|
||||
|
||||
$(
|
||||
$(
|
||||
let _ = chain.insert_tx($txid, $tx_height).expect("should succeed");
|
||||
)*
|
||||
)?
|
||||
|
||||
chain
|
||||
}};
|
||||
}
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! changeset {
|
||||
(checkpoints: $($tail:tt)*) => { changeset!(index: TxHeight, checkpoints: $($tail)*) };
|
||||
(
|
||||
index: $ind:ty,
|
||||
checkpoints: [ $(( $height:expr, $cp_to:expr )),* ]
|
||||
$(,txids: [ $(( $txid:expr, $tx_to:expr )),* ])?
|
||||
) => {{
|
||||
use bdk_chain::collections::BTreeMap;
|
||||
|
||||
#[allow(unused_mut)]
|
||||
bdk_chain::sparse_chain::ChangeSet::<$ind> {
|
||||
checkpoints: {
|
||||
let mut changes = BTreeMap::default();
|
||||
$(changes.insert($height, $cp_to);)*
|
||||
changes
|
||||
},
|
||||
txids: {
|
||||
let mut changes = BTreeMap::default();
|
||||
$($(changes.insert($txid, $tx_to.map(|h: TxHeight| h.into()));)*)?
|
||||
changes
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn new_tx(lt: u32) -> bitcoin::Transaction {
|
||||
bitcoin::Transaction {
|
||||
version: 0x00,
|
||||
lock_time: bitcoin::PackedLockTime(lt),
|
||||
input: vec![],
|
||||
output: vec![],
|
||||
}
|
||||
}
|
||||
653
crates/chain/tests/test_chain_graph.rs
Normal file
653
crates/chain/tests/test_chain_graph.rs
Normal file
@@ -0,0 +1,653 @@
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
use bdk_chain::{
|
||||
chain_graph::*,
|
||||
collections::HashSet,
|
||||
sparse_chain,
|
||||
tx_graph::{self, TxGraph},
|
||||
BlockId, TxHeight,
|
||||
};
|
||||
use bitcoin::{OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness};
|
||||
|
||||
#[test]
|
||||
fn test_spent_by() {
|
||||
let tx1 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let op = OutPoint {
|
||||
txid: tx1.txid(),
|
||||
vout: 0,
|
||||
};
|
||||
|
||||
let tx2 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: op,
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![],
|
||||
};
|
||||
let tx3 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(42),
|
||||
input: vec![TxIn {
|
||||
previous_output: op,
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![],
|
||||
};
|
||||
|
||||
let mut cg1 = ChainGraph::default();
|
||||
let _ = cg1
|
||||
.insert_tx(tx1, TxHeight::Unconfirmed)
|
||||
.expect("should insert");
|
||||
let mut cg2 = cg1.clone();
|
||||
let _ = cg1
|
||||
.insert_tx(tx2.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert");
|
||||
let _ = cg2
|
||||
.insert_tx(tx3.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert");
|
||||
|
||||
assert_eq!(cg1.spent_by(op), Some((&TxHeight::Unconfirmed, tx2.txid())));
|
||||
assert_eq!(cg2.spent_by(op), Some((&TxHeight::Unconfirmed, tx3.txid())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_evicts_conflicting_tx() {
|
||||
let cp_a = BlockId {
|
||||
height: 0,
|
||||
hash: h!("A"),
|
||||
};
|
||||
let cp_b = BlockId {
|
||||
height: 1,
|
||||
hash: h!("B"),
|
||||
};
|
||||
let cp_b2 = BlockId {
|
||||
height: 1,
|
||||
hash: h!("B'"),
|
||||
};
|
||||
|
||||
let tx_a = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let tx_b = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
||||
script_sig: Script::new(),
|
||||
sequence: Sequence::default(),
|
||||
witness: Witness::new(),
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let tx_b2 = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
||||
script_sig: Script::new(),
|
||||
sequence: Sequence::default(),
|
||||
witness: Witness::new(),
|
||||
}],
|
||||
output: vec![TxOut::default(), TxOut::default()],
|
||||
};
|
||||
{
|
||||
let mut cg1 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
|
||||
let _ = cg
|
||||
.insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
|
||||
.expect("should insert tx");
|
||||
let _ = cg
|
||||
.insert_tx(tx_b.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
let cg2 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg
|
||||
.insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
|
||||
let changeset = ChangeSet::<TxHeight> {
|
||||
chain: sparse_chain::ChangeSet {
|
||||
checkpoints: Default::default(),
|
||||
txids: [
|
||||
(tx_b.txid(), None),
|
||||
(tx_b2.txid(), Some(TxHeight::Unconfirmed)),
|
||||
]
|
||||
.into(),
|
||||
},
|
||||
graph: tx_graph::Additions {
|
||||
tx: [tx_b2.clone()].into(),
|
||||
txout: [].into(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
cg1.determine_changeset(&cg2),
|
||||
Ok(changeset.clone()),
|
||||
"tx should be evicted from mempool"
|
||||
);
|
||||
|
||||
cg1.apply_changeset(changeset);
|
||||
}
|
||||
|
||||
{
|
||||
let cg1 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
|
||||
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
|
||||
let _ = cg
|
||||
.insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
|
||||
.expect("should insert tx");
|
||||
let _ = cg
|
||||
.insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
let cg2 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg
|
||||
.insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
assert_eq!(
|
||||
cg1.determine_changeset(&cg2),
|
||||
Err(UpdateError::UnresolvableConflict(UnresolvableConflict {
|
||||
already_confirmed_tx: (TxHeight::Confirmed(1), tx_b.txid()),
|
||||
update_tx: (TxHeight::Unconfirmed, tx_b2.txid()),
|
||||
})),
|
||||
"fail if tx is evicted from valid block"
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
// Given 2 blocks `{A, B}`, and an update that invalidates block B with
|
||||
// `{A, B'}`, we expect txs that exist in `B` that conflicts with txs
|
||||
// introduced in the update to be successfully evicted.
|
||||
let mut cg1 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
|
||||
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
|
||||
let _ = cg
|
||||
.insert_tx(tx_a, TxHeight::Confirmed(0))
|
||||
.expect("should insert tx");
|
||||
let _ = cg
|
||||
.insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
let cg2 = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
|
||||
let _ = cg.insert_checkpoint(cp_b2).expect("should insert cp");
|
||||
let _ = cg
|
||||
.insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
|
||||
.expect("should insert tx");
|
||||
cg
|
||||
};
|
||||
|
||||
let changeset = ChangeSet::<TxHeight> {
|
||||
chain: sparse_chain::ChangeSet {
|
||||
checkpoints: [(1, Some(h!("B'")))].into(),
|
||||
txids: [
|
||||
(tx_b.txid(), None),
|
||||
(tx_b2.txid(), Some(TxHeight::Unconfirmed)),
|
||||
]
|
||||
.into(),
|
||||
},
|
||||
graph: tx_graph::Additions {
|
||||
tx: [tx_b2].into(),
|
||||
txout: [].into(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
cg1.determine_changeset(&cg2),
|
||||
Ok(changeset.clone()),
|
||||
"tx should be evicted from B",
|
||||
);
|
||||
|
||||
cg1.apply_changeset(changeset);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chain_graph_new_missing() {
|
||||
let tx_a = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
let tx_b = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let update = chain!(
|
||||
index: TxHeight,
|
||||
checkpoints: [[0, h!("A")]],
|
||||
txids: [
|
||||
(tx_a.txid(), TxHeight::Confirmed(0)),
|
||||
(tx_b.txid(), TxHeight::Confirmed(0))
|
||||
]
|
||||
);
|
||||
let mut graph = TxGraph::default();
|
||||
|
||||
let mut expected_missing = HashSet::new();
|
||||
expected_missing.insert(tx_a.txid());
|
||||
expected_missing.insert(tx_b.txid());
|
||||
|
||||
assert_eq!(
|
||||
ChainGraph::new(update.clone(), graph.clone()),
|
||||
Err(NewError::Missing(expected_missing.clone()))
|
||||
);
|
||||
|
||||
let _ = graph.insert_tx(tx_b.clone());
|
||||
expected_missing.remove(&tx_b.txid());
|
||||
|
||||
assert_eq!(
|
||||
ChainGraph::new(update.clone(), graph.clone()),
|
||||
Err(NewError::Missing(expected_missing.clone()))
|
||||
);
|
||||
|
||||
let _ = graph.insert_txout(
|
||||
OutPoint {
|
||||
txid: tx_a.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
tx_a.output[0].clone(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ChainGraph::new(update.clone(), graph.clone()),
|
||||
Err(NewError::Missing(expected_missing)),
|
||||
"inserting an output instead of full tx doesn't satisfy constraint"
|
||||
);
|
||||
|
||||
let _ = graph.insert_tx(tx_a.clone());
|
||||
|
||||
let new_graph = ChainGraph::new(update.clone(), graph.clone()).unwrap();
|
||||
let expected_graph = {
|
||||
let mut cg = ChainGraph::<TxHeight>::default();
|
||||
let _ = cg
|
||||
.insert_checkpoint(update.latest_checkpoint().unwrap())
|
||||
.unwrap();
|
||||
let _ = cg.insert_tx(tx_a, TxHeight::Confirmed(0)).unwrap();
|
||||
let _ = cg.insert_tx(tx_b, TxHeight::Confirmed(0)).unwrap();
|
||||
cg
|
||||
};
|
||||
|
||||
assert_eq!(new_graph, expected_graph);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chain_graph_new_conflicts() {
|
||||
let tx_a = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let tx_b = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
||||
script_sig: Script::new(),
|
||||
sequence: Sequence::default(),
|
||||
witness: Witness::new(),
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let tx_b2 = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
||||
script_sig: Script::new(),
|
||||
sequence: Sequence::default(),
|
||||
witness: Witness::new(),
|
||||
}],
|
||||
output: vec![TxOut::default(), TxOut::default()],
|
||||
};
|
||||
|
||||
let chain = chain!(
|
||||
index: TxHeight,
|
||||
checkpoints: [[5, h!("A")]],
|
||||
txids: [
|
||||
(tx_a.txid(), TxHeight::Confirmed(1)),
|
||||
(tx_b.txid(), TxHeight::Confirmed(2)),
|
||||
(tx_b2.txid(), TxHeight::Confirmed(3))
|
||||
]
|
||||
);
|
||||
|
||||
let graph = TxGraph::new([tx_a, tx_b, tx_b2]);
|
||||
|
||||
assert!(matches!(
|
||||
ChainGraph::new(chain, graph),
|
||||
Err(NewError::Conflict { .. })
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_tx_in_chain() {
|
||||
let mut cg = ChainGraph::default();
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let _ = cg.insert_tx(tx.clone(), TxHeight::Unconfirmed).unwrap();
|
||||
assert_eq!(
|
||||
cg.get_tx_in_chain(tx.txid()),
|
||||
Some((&TxHeight::Unconfirmed, &tx))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iterate_transactions() {
|
||||
let mut cg = ChainGraph::default();
|
||||
let txs = (0..3)
|
||||
.map(|i| Transaction {
|
||||
version: i,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let _ = cg
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 1,
|
||||
hash: h!("A"),
|
||||
})
|
||||
.unwrap();
|
||||
let _ = cg
|
||||
.insert_tx(txs[0].clone(), TxHeight::Confirmed(1))
|
||||
.unwrap();
|
||||
let _ = cg.insert_tx(txs[1].clone(), TxHeight::Unconfirmed).unwrap();
|
||||
let _ = cg
|
||||
.insert_tx(txs[2].clone(), TxHeight::Confirmed(0))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
cg.transactions_in_chain().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TxHeight::Confirmed(0), &txs[2]),
|
||||
(&TxHeight::Confirmed(1), &txs[0]),
|
||||
(&TxHeight::Unconfirmed, &txs[1]),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
/// Start with: block1, block2a, tx1, tx2a
|
||||
/// Update 1: block2a -> block2b , tx2a -> tx2b
|
||||
/// Update 2: block2b -> block2c , tx2b -> tx2a
|
||||
#[test]
|
||||
fn test_apply_changes_reintroduce_tx() {
|
||||
let block1 = BlockId {
|
||||
height: 1,
|
||||
hash: h!("block 1"),
|
||||
};
|
||||
let block2a = BlockId {
|
||||
height: 2,
|
||||
hash: h!("block 2a"),
|
||||
};
|
||||
let block2b = BlockId {
|
||||
height: 2,
|
||||
hash: h!("block 2b"),
|
||||
};
|
||||
let block2c = BlockId {
|
||||
height: 2,
|
||||
hash: h!("block 2c"),
|
||||
};
|
||||
|
||||
let tx1 = Transaction {
|
||||
version: 0,
|
||||
lock_time: PackedLockTime(1),
|
||||
input: Vec::new(),
|
||||
output: [TxOut {
|
||||
value: 1,
|
||||
script_pubkey: Script::new(),
|
||||
}]
|
||||
.into(),
|
||||
};
|
||||
|
||||
let tx2a = Transaction {
|
||||
version: 0,
|
||||
lock_time: PackedLockTime('a'.into()),
|
||||
input: [TxIn {
|
||||
previous_output: OutPoint::new(tx1.txid(), 0),
|
||||
..Default::default()
|
||||
}]
|
||||
.into(),
|
||||
output: [TxOut {
|
||||
value: 0,
|
||||
..Default::default()
|
||||
}]
|
||||
.into(),
|
||||
};
|
||||
|
||||
let tx2b = Transaction {
|
||||
lock_time: PackedLockTime('b'.into()),
|
||||
..tx2a.clone()
|
||||
};
|
||||
|
||||
// block1, block2a, tx1, tx2a
|
||||
let mut cg = {
|
||||
let mut cg = ChainGraph::default();
|
||||
let _ = cg.insert_checkpoint(block1).unwrap();
|
||||
let _ = cg.insert_checkpoint(block2a).unwrap();
|
||||
let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
|
||||
let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap();
|
||||
cg
|
||||
};
|
||||
|
||||
// block2a -> block2b , tx2a -> tx2b
|
||||
let update = {
|
||||
let mut update = ChainGraph::default();
|
||||
let _ = update.insert_checkpoint(block1).unwrap();
|
||||
let _ = update.insert_checkpoint(block2b).unwrap();
|
||||
let _ = update
|
||||
.insert_tx(tx2b.clone(), TxHeight::Confirmed(2))
|
||||
.unwrap();
|
||||
update
|
||||
};
|
||||
assert_eq!(
|
||||
cg.apply_update(update).expect("should update"),
|
||||
ChangeSet {
|
||||
chain: changeset! {
|
||||
checkpoints: [(2, Some(block2b.hash))],
|
||||
txids: [(tx2a.txid(), None), (tx2b.txid(), Some(TxHeight::Confirmed(2)))]
|
||||
},
|
||||
graph: tx_graph::Additions {
|
||||
tx: [tx2b.clone()].into(),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
// block2b -> block2c , tx2b -> tx2a
|
||||
let update = {
|
||||
let mut update = ChainGraph::default();
|
||||
let _ = update.insert_checkpoint(block1).unwrap();
|
||||
let _ = update.insert_checkpoint(block2c).unwrap();
|
||||
let _ = update
|
||||
.insert_tx(tx2a.clone(), TxHeight::Confirmed(2))
|
||||
.unwrap();
|
||||
update
|
||||
};
|
||||
assert_eq!(
|
||||
cg.apply_update(update).expect("should update"),
|
||||
ChangeSet {
|
||||
chain: changeset! {
|
||||
checkpoints: [(2, Some(block2c.hash))],
|
||||
txids: [(tx2b.txid(), None), (tx2a.txid(), Some(TxHeight::Confirmed(2)))]
|
||||
},
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_evict_descendants() {
|
||||
let block_1 = BlockId {
|
||||
height: 1,
|
||||
hash: h!("block 1"),
|
||||
};
|
||||
|
||||
let block_2a = BlockId {
|
||||
height: 2,
|
||||
hash: h!("block 2 a"),
|
||||
};
|
||||
|
||||
let block_2b = BlockId {
|
||||
height: 2,
|
||||
hash: h!("block 2 b"),
|
||||
};
|
||||
|
||||
let tx_1 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(h!("fake tx"), 0),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut {
|
||||
value: 10_000,
|
||||
script_pubkey: Script::new(),
|
||||
}],
|
||||
..common::new_tx(1)
|
||||
};
|
||||
let tx_2 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_1.txid(), 0),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![
|
||||
TxOut {
|
||||
value: 20_000,
|
||||
script_pubkey: Script::new(),
|
||||
},
|
||||
TxOut {
|
||||
value: 30_000,
|
||||
script_pubkey: Script::new(),
|
||||
},
|
||||
],
|
||||
..common::new_tx(2)
|
||||
};
|
||||
let tx_3 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_2.txid(), 0),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut {
|
||||
value: 40_000,
|
||||
script_pubkey: Script::new(),
|
||||
}],
|
||||
..common::new_tx(3)
|
||||
};
|
||||
let tx_4 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_2.txid(), 1),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut {
|
||||
value: 40_000,
|
||||
script_pubkey: Script::new(),
|
||||
}],
|
||||
..common::new_tx(4)
|
||||
};
|
||||
let tx_5 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_4.txid(), 0),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut {
|
||||
value: 40_000,
|
||||
script_pubkey: Script::new(),
|
||||
}],
|
||||
..common::new_tx(5)
|
||||
};
|
||||
|
||||
let tx_conflict = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_1.txid(), 0),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut {
|
||||
value: 12345,
|
||||
script_pubkey: Script::new(),
|
||||
}],
|
||||
..common::new_tx(6)
|
||||
};
|
||||
|
||||
// 1 is spent by 2, 2 is spent by 3 and 4, 4 is spent by 5
|
||||
let _txid_1 = tx_1.txid();
|
||||
let txid_2 = tx_2.txid();
|
||||
let txid_3 = tx_3.txid();
|
||||
let txid_4 = tx_4.txid();
|
||||
let txid_5 = tx_5.txid();
|
||||
|
||||
// this tx conflicts with 2
|
||||
let txid_conflict = tx_conflict.txid();
|
||||
|
||||
let cg = {
|
||||
let mut cg = ChainGraph::<TxHeight>::default();
|
||||
let _ = cg.insert_checkpoint(block_1);
|
||||
let _ = cg.insert_checkpoint(block_2a);
|
||||
let _ = cg.insert_tx(tx_1, TxHeight::Confirmed(1));
|
||||
let _ = cg.insert_tx(tx_2, TxHeight::Confirmed(2));
|
||||
let _ = cg.insert_tx(tx_3, TxHeight::Confirmed(2));
|
||||
let _ = cg.insert_tx(tx_4, TxHeight::Confirmed(2));
|
||||
let _ = cg.insert_tx(tx_5, TxHeight::Confirmed(2));
|
||||
cg
|
||||
};
|
||||
|
||||
let update = {
|
||||
let mut cg = ChainGraph::<TxHeight>::default();
|
||||
let _ = cg.insert_checkpoint(block_1);
|
||||
let _ = cg.insert_checkpoint(block_2b);
|
||||
let _ = cg.insert_tx(tx_conflict.clone(), TxHeight::Confirmed(2));
|
||||
cg
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
cg.determine_changeset(&update),
|
||||
Ok(ChangeSet {
|
||||
chain: changeset! {
|
||||
checkpoints: [(2, Some(block_2b.hash))],
|
||||
txids: [(txid_2, None), (txid_3, None), (txid_4, None), (txid_5, None), (txid_conflict, Some(TxHeight::Confirmed(2)))]
|
||||
},
|
||||
graph: tx_graph::Additions {
|
||||
tx: [tx_conflict.clone()].into(),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
let err = cg
|
||||
.insert_tx_preview(tx_conflict, TxHeight::Unconfirmed)
|
||||
.expect_err("must fail due to conflicts");
|
||||
assert!(matches!(err, InsertTxError::UnresolvableConflict(_)));
|
||||
}
|
||||
239
crates/chain/tests/test_keychain_tracker.rs
Normal file
239
crates/chain/tests/test_keychain_tracker.rs
Normal file
@@ -0,0 +1,239 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
#[macro_use]
|
||||
mod common;
|
||||
use bdk_chain::{
|
||||
keychain::{Balance, KeychainTracker},
|
||||
miniscript::{
|
||||
bitcoin::{secp256k1::Secp256k1, OutPoint, PackedLockTime, Transaction, TxOut},
|
||||
Descriptor,
|
||||
},
|
||||
BlockId, ConfirmationTime, TxHeight,
|
||||
};
|
||||
use bitcoin::TxIn;
|
||||
|
||||
#[test]
|
||||
fn test_insert_tx() {
|
||||
let mut tracker = KeychainTracker::default();
|
||||
let secp = Secp256k1::new();
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
tracker.add_keychain((), descriptor.clone());
|
||||
let txout = TxOut {
|
||||
value: 100_000,
|
||||
script_pubkey: descriptor.at_derivation_index(5).script_pubkey(),
|
||||
};
|
||||
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![txout],
|
||||
};
|
||||
|
||||
let _ = tracker.txout_index.reveal_to_target(&(), 5);
|
||||
|
||||
let changeset = tracker
|
||||
.insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed)
|
||||
.unwrap();
|
||||
tracker.apply_changeset(changeset);
|
||||
assert_eq!(
|
||||
tracker
|
||||
.chain_graph()
|
||||
.transactions_in_chain()
|
||||
.collect::<Vec<_>>(),
|
||||
vec![(&ConfirmationTime::Unconfirmed, &tx)]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tracker
|
||||
.txout_index
|
||||
.txouts_of_keychain(&())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![(
|
||||
5,
|
||||
OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0
|
||||
}
|
||||
)]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_balance() {
|
||||
use core::str::FromStr;
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)]
|
||||
enum Keychain {
|
||||
One,
|
||||
Two,
|
||||
}
|
||||
let mut tracker = KeychainTracker::<Keychain, TxHeight>::default();
|
||||
let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
|
||||
let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
|
||||
tracker.add_keychain(Keychain::One, one);
|
||||
tracker.add_keychain(Keychain::Two, two);
|
||||
|
||||
let tx1 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 13_000,
|
||||
script_pubkey: tracker
|
||||
.txout_index
|
||||
.reveal_next_spk(&Keychain::One)
|
||||
.0
|
||||
.1
|
||||
.clone(),
|
||||
}],
|
||||
};
|
||||
|
||||
let tx2 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 7_000,
|
||||
script_pubkey: tracker
|
||||
.txout_index
|
||||
.reveal_next_spk(&Keychain::Two)
|
||||
.0
|
||||
.1
|
||||
.clone(),
|
||||
}],
|
||||
};
|
||||
|
||||
let tx_coinbase = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn::default()],
|
||||
output: vec![TxOut {
|
||||
value: 11_000,
|
||||
script_pubkey: tracker
|
||||
.txout_index
|
||||
.reveal_next_spk(&Keychain::Two)
|
||||
.0
|
||||
.1
|
||||
.clone(),
|
||||
}],
|
||||
};
|
||||
|
||||
assert!(tx_coinbase.is_coin_base());
|
||||
|
||||
let _ = tracker
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 5,
|
||||
hash: h!("1"),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let should_trust = |keychain: &Keychain| match *keychain {
|
||||
Keychain::One => false,
|
||||
Keychain::Two => true,
|
||||
};
|
||||
|
||||
assert_eq!(tracker.balance(should_trust), Balance::default());
|
||||
|
||||
let _ = tracker
|
||||
.insert_tx(tx1.clone(), TxHeight::Unconfirmed)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
untrusted_pending: 13_000,
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker
|
||||
.insert_tx(tx2.clone(), TxHeight::Unconfirmed)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 7_000,
|
||||
untrusted_pending: 13_000,
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker
|
||||
.insert_tx(tx_coinbase, TxHeight::Confirmed(0))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 7_000,
|
||||
untrusted_pending: 13_000,
|
||||
immature: 11_000,
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 7_000,
|
||||
untrusted_pending: 0,
|
||||
immature: 11_000,
|
||||
confirmed: 13_000,
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 0,
|
||||
untrusted_pending: 0,
|
||||
immature: 11_000,
|
||||
confirmed: 20_000,
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 98,
|
||||
hash: h!("98"),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 0,
|
||||
untrusted_pending: 0,
|
||||
immature: 11_000,
|
||||
confirmed: 20_000,
|
||||
}
|
||||
);
|
||||
|
||||
let _ = tracker
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 99,
|
||||
hash: h!("99"),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
tracker.balance(should_trust),
|
||||
Balance {
|
||||
trusted_pending: 0,
|
||||
untrusted_pending: 0,
|
||||
immature: 0,
|
||||
confirmed: 31_000,
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(tracker.balance_at(0), 0);
|
||||
assert_eq!(tracker.balance_at(1), 13_000);
|
||||
assert_eq!(tracker.balance_at(2), 20_000);
|
||||
assert_eq!(tracker.balance_at(98), 20_000);
|
||||
assert_eq!(tracker.balance_at(99), 31_000);
|
||||
assert_eq!(tracker.balance_at(100), 31_000);
|
||||
}
|
||||
369
crates/chain/tests/test_keychain_txout_index.rs
Normal file
369
crates/chain/tests/test_keychain_txout_index.rs
Normal file
@@ -0,0 +1,369 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
use bdk_chain::{
|
||||
collections::BTreeMap,
|
||||
keychain::{DerivationAdditions, KeychainTxOutIndex},
|
||||
};
|
||||
|
||||
use bitcoin::{secp256k1::Secp256k1, OutPoint, Script, Transaction, TxOut};
|
||||
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||
enum TestKeychain {
|
||||
External,
|
||||
Internal,
|
||||
}
|
||||
|
||||
fn init_txout_index() -> (
|
||||
bdk_chain::keychain::KeychainTxOutIndex<TestKeychain>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
) {
|
||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::default();
|
||||
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
||||
|
||||
(txout_index, external_descriptor, internal_descriptor)
|
||||
}
|
||||
|
||||
fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Script {
|
||||
descriptor
|
||||
.derived_descriptor(&Secp256k1::verification_only(), index)
|
||||
.expect("must derive")
|
||||
.script_pubkey()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_all_derivation_indices() {
|
||||
let (mut txout_index, _, _) = init_txout_index();
|
||||
let derive_to: BTreeMap<_, _> =
|
||||
[(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1.as_inner(),
|
||||
&derive_to
|
||||
);
|
||||
assert_eq!(txout_index.last_revealed_indices(), &derive_to);
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||
DerivationAdditions::default(),
|
||||
"no changes if we set to the same thing"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lookahead() {
|
||||
let (mut txout_index, external_desc, internal_desc) = init_txout_index();
|
||||
|
||||
// ensure it does not break anything if lookahead is set multiple times
|
||||
(0..=10).for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::External, lookahead));
|
||||
(0..=20)
|
||||
.filter(|v| v % 2 == 0)
|
||||
.for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::Internal, lookahead));
|
||||
|
||||
assert_eq!(txout_index.inner().all_spks().len(), 30);
|
||||
|
||||
// given:
|
||||
// - external lookahead set to 10
|
||||
// - internal lookahead set to 20
|
||||
// when:
|
||||
// - set external derivation index to value higher than last, but within the lookahead value
|
||||
// expect:
|
||||
// - scripts cached in spk_txout_index should increase correctly
|
||||
// - stored scripts of external keychain should be of expected counts
|
||||
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
||||
let (revealed_spks, revealed_additions) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, index);
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
vec![(index, spk_at_index(&external_desc, index))],
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_additions.as_inner(),
|
||||
&[(TestKeychain::External, index)].into()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
txout_index.inner().all_spks().len(),
|
||||
10 /* external lookahead */ +
|
||||
20 /* internal lookahead */ +
|
||||
index as usize + 1 /* `derived` count */
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.revealed_spks_of_keychain(&TestKeychain::External)
|
||||
.count(),
|
||||
index as usize + 1,
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.revealed_spks_of_keychain(&TestKeychain::Internal)
|
||||
.count(),
|
||||
0,
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.unused_spks_of_keychain(&TestKeychain::External)
|
||||
.count(),
|
||||
index as usize + 1,
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.unused_spks_of_keychain(&TestKeychain::Internal)
|
||||
.count(),
|
||||
0,
|
||||
);
|
||||
}
|
||||
|
||||
// given:
|
||||
// - internal lookahead is 20
|
||||
// - internal derivation index is `None`
|
||||
// when:
|
||||
// - derivation index is set ahead of current derivation index + lookahead
|
||||
// expect:
|
||||
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
||||
let (revealed_spks, revealed_additions) =
|
||||
txout_index.reveal_to_target(&TestKeychain::Internal, 24);
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
(0..=24)
|
||||
.map(|index| (index, spk_at_index(&internal_desc, index)))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_additions.as_inner(),
|
||||
&[(TestKeychain::Internal, 24)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.inner().all_spks().len(),
|
||||
10 /* external lookahead */ +
|
||||
20 /* internal lookahead */ +
|
||||
20 /* external stored index count */ +
|
||||
25 /* internal stored index count */
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.revealed_spks_of_keychain(&TestKeychain::Internal)
|
||||
.count(),
|
||||
25,
|
||||
);
|
||||
|
||||
// ensure derivation indices are expected for each keychain
|
||||
let last_external_index = txout_index
|
||||
.last_revealed_index(&TestKeychain::External)
|
||||
.expect("already derived");
|
||||
let last_internal_index = txout_index
|
||||
.last_revealed_index(&TestKeychain::Internal)
|
||||
.expect("already derived");
|
||||
assert_eq!(last_external_index, 19);
|
||||
assert_eq!(last_internal_index, 24);
|
||||
|
||||
// when:
|
||||
// - scanning txouts with spks within stored indexes
|
||||
// expect:
|
||||
// - no changes to stored index counts
|
||||
let external_iter = 0..=last_external_index;
|
||||
let internal_iter = last_internal_index - last_external_index..=last_internal_index;
|
||||
for (external_index, internal_index) in external_iter.zip(internal_iter) {
|
||||
let tx = Transaction {
|
||||
output: vec![
|
||||
TxOut {
|
||||
script_pubkey: external_desc
|
||||
.at_derivation_index(external_index)
|
||||
.script_pubkey(),
|
||||
value: 10_000,
|
||||
},
|
||||
TxOut {
|
||||
script_pubkey: internal_desc
|
||||
.at_derivation_index(internal_index)
|
||||
.script_pubkey(),
|
||||
value: 10_000,
|
||||
},
|
||||
],
|
||||
..common::new_tx(external_index)
|
||||
};
|
||||
assert_eq!(txout_index.scan(&tx), DerivationAdditions::default());
|
||||
assert_eq!(
|
||||
txout_index.last_revealed_index(&TestKeychain::External),
|
||||
Some(last_external_index)
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.last_revealed_index(&TestKeychain::Internal),
|
||||
Some(last_internal_index)
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.revealed_spks_of_keychain(&TestKeychain::External)
|
||||
.count(),
|
||||
last_external_index as usize + 1,
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index
|
||||
.revealed_spks_of_keychain(&TestKeychain::Internal)
|
||||
.count(),
|
||||
last_internal_index as usize + 1,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// when:
|
||||
// - scanning txouts with spks above last stored index
|
||||
// expect:
|
||||
// - last revealed index should increase as expected
|
||||
// - last used index should change as expected
|
||||
#[test]
|
||||
fn test_scan_with_lookahead() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index();
|
||||
txout_index.set_lookahead_for_all(10);
|
||||
|
||||
let spks: BTreeMap<u32, Script> = [0, 10, 20, 30]
|
||||
.into_iter()
|
||||
.map(|i| (i, external_desc.at_derivation_index(i).script_pubkey()))
|
||||
.collect();
|
||||
|
||||
for (&spk_i, spk) in &spks {
|
||||
let op = OutPoint::new(h!("fake tx"), spk_i);
|
||||
let txout = TxOut {
|
||||
script_pubkey: spk.clone(),
|
||||
value: 0,
|
||||
};
|
||||
|
||||
let additions = txout_index.scan_txout(op, &txout);
|
||||
assert_eq!(
|
||||
additions.as_inner(),
|
||||
&[(TestKeychain::External, spk_i)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.last_revealed_index(&TestKeychain::External),
|
||||
Some(spk_i)
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.last_used_index(&TestKeychain::External),
|
||||
Some(spk_i)
|
||||
);
|
||||
}
|
||||
|
||||
// now try with index 41 (lookahead surpassed), we expect that the txout to not be indexed
|
||||
let spk_41 = external_desc.at_derivation_index(41).script_pubkey();
|
||||
let op = OutPoint::new(h!("fake tx"), 41);
|
||||
let txout = TxOut {
|
||||
script_pubkey: spk_41,
|
||||
value: 0,
|
||||
};
|
||||
let additions = txout_index.scan_txout(op, &txout);
|
||||
assert!(additions.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wildcard_derivations() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index();
|
||||
let external_spk_0 = external_desc.at_derivation_index(0).script_pubkey();
|
||||
let external_spk_16 = external_desc.at_derivation_index(16).script_pubkey();
|
||||
let external_spk_26 = external_desc.at_derivation_index(26).script_pubkey();
|
||||
let external_spk_27 = external_desc.at_derivation_index(27).script_pubkey();
|
||||
|
||||
// - nothing is derived
|
||||
// - unused list is also empty
|
||||
//
|
||||
// - next_derivation_index() == (0, true)
|
||||
// - derive_new() == ((0, <spk>), DerivationAdditions)
|
||||
// - next_unused() == ((0, <spk>), DerivationAdditions:is_empty())
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0_u32, &external_spk_0));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0_u32, &external_spk_0));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
|
||||
// - derived till 25
|
||||
// - used all spks till 15.
|
||||
// - used list : [0..=15, 17, 20, 23]
|
||||
// - unused list: [16, 18, 19, 21, 22, 24, 25]
|
||||
|
||||
// - next_derivation_index() = (26, true)
|
||||
// - derive_new() = ((26, <spk>), DerivationAdditions)
|
||||
// - next_unused() == ((16, <spk>), DerivationAdditions::is_empty())
|
||||
let _ = txout_index.reveal_to_target(&TestKeychain::External, 25);
|
||||
|
||||
(0..=15)
|
||||
.into_iter()
|
||||
.chain(vec![17, 20, 23].into_iter())
|
||||
.for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index)));
|
||||
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (26, &external_spk_26));
|
||||
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (16, &external_spk_16));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
|
||||
// - Use all the derived till 26.
|
||||
// - next_unused() = ((27, <spk>), DerivationAdditions)
|
||||
(0..=26).into_iter().for_each(|index| {
|
||||
txout_index.mark_used(&TestKeychain::External, index);
|
||||
});
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (27, &external_spk_27));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_wildcard_derivations() {
|
||||
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::default();
|
||||
|
||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (no_wildcard_descriptor, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap();
|
||||
let external_spk = no_wildcard_descriptor
|
||||
.at_derivation_index(0)
|
||||
.script_pubkey();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor);
|
||||
|
||||
// given:
|
||||
// - `txout_index` with no stored scripts
|
||||
// expect:
|
||||
// - next derivation index should be new
|
||||
// - when we derive a new script, script @ index 0
|
||||
// - when we get the next unused script, script @ index 0
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0, &external_spk));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0, &external_spk));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
|
||||
// given:
|
||||
// - the non-wildcard descriptor already has a stored and used script
|
||||
// expect:
|
||||
// - next derivation index should not be new
|
||||
// - derive new and next unused should return the old script
|
||||
// - store_up_to should not panic and return empty additions
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false));
|
||||
txout_index.mark_used(&TestKeychain::External, 0);
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0, &external_spk));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(spk, (0, &external_spk));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
let (revealed_spks, revealed_additions) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, 200);
|
||||
assert_eq!(revealed_spks.count(), 0);
|
||||
assert!(revealed_additions.is_empty());
|
||||
}
|
||||
773
crates/chain/tests/test_sparse_chain.rs
Normal file
773
crates/chain/tests/test_sparse_chain.rs
Normal file
@@ -0,0 +1,773 @@
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
use bdk_chain::{collections::BTreeSet, sparse_chain::*, BlockId, TxHeight};
|
||||
use bitcoin::{hashes::Hash, Txid};
|
||||
use core::ops::Bound;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
|
||||
pub struct TestIndex(TxHeight, u32);
|
||||
|
||||
impl ChainPosition for TestIndex {
|
||||
fn height(&self) -> TxHeight {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn max_ord_of_height(height: TxHeight) -> Self {
|
||||
Self(height, u32::MAX)
|
||||
}
|
||||
|
||||
fn min_ord_of_height(height: TxHeight) -> Self {
|
||||
Self(height, u32::MIN)
|
||||
}
|
||||
}
|
||||
|
||||
impl TestIndex {
|
||||
pub fn new<H>(height: H, ext: u32) -> Self
|
||||
where
|
||||
H: Into<TxHeight>,
|
||||
{
|
||||
Self(height.into(), ext)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_first_checkpoint() {
|
||||
let chain = SparseChain::default();
|
||||
assert_eq!(
|
||||
chain.determine_changeset(&chain!([0, h!("A")])),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A")))],
|
||||
txids: []
|
||||
},),
|
||||
"add first tip"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_second_tip() {
|
||||
let chain = chain!([0, h!("A")]);
|
||||
assert_eq!(
|
||||
chain.determine_changeset(&chain!([0, h!("A")], [1, h!("B")])),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(1, Some(h!("B")))],
|
||||
txids: []
|
||||
},),
|
||||
"extend tip by one"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn two_disjoint_chains_cannot_merge() {
|
||||
let chain1 = chain!([0, h!("A")]);
|
||||
let chain2 = chain!([1, h!("B")]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Err(UpdateError::NotConnected(0))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_chains_should_merge() {
|
||||
let chain1 = chain!([0, h!("A")]);
|
||||
let chain2 = chain!([0, h!("A")]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(ChangeSet::default())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_chains_with_txs_should_merge() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(ChangeSet::default())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_chains_with_different_txs_should_merge() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx1"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [],
|
||||
txids: [(h!("tx1"), Some(TxHeight::Confirmed(0)))]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidate_first_and_only_checkpoint_without_tx_changes() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A'")))],
|
||||
txids: []
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidate_first_and_only_checkpoint_with_tx_move_forward() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A'")],[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A'"))), (1, Some(h!("B")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidate_first_and_only_checkpoint_with_tx_move_backward() {
|
||||
let chain1 = chain!(checkpoints: [[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A")],[1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A"))), (1, Some(h!("B'")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation() {
|
||||
let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Err(UpdateError::TxInconsistent {
|
||||
txid: h!("tx0"),
|
||||
original_pos: TxHeight::Confirmed(0),
|
||||
update_pos: TxHeight::Confirmed(1),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/// This test doesn't make much sense. We're invalidating a block at height 1 and moving it to
|
||||
/// height 0. It should be impossible for it to be at height 1 at any point if it was at height 0
|
||||
/// all along.
|
||||
#[test]
|
||||
fn move_invalidated_tx_into_earlier_checkpoint() {
|
||||
let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
|
||||
let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(1, Some(h!("B'")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidate_first_and_only_checkpoint_with_tx_move_to_mempool() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A'")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Unconfirmed))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn confirm_tx_without_extending_chain() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn confirm_tx_backwards_while_extending_chain() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
let chain2 = chain!(checkpoints: [[0,h!("A")],[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(1, Some(h!("B")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn confirm_tx_in_new_block() {
|
||||
let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
let chain2 = chain! {
|
||||
checkpoints: [[0,h!("A")], [1,h!("B")]],
|
||||
txids: [(h!("tx0"), TxHeight::Confirmed(1))]
|
||||
};
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(1, Some(h!("B")))],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merging_mempool_of_empty_chains_doesnt_fail() {
|
||||
let chain1 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
let chain2 = chain!(checkpoints: [], txids: [(h!("tx1"), TxHeight::Unconfirmed)]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [],
|
||||
txids: [(h!("tx1"), Some(TxHeight::Unconfirmed))]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_insert_confirmed_tx_without_checkpoints() {
|
||||
let chain = SparseChain::default();
|
||||
assert_eq!(
|
||||
chain.insert_tx_preview(h!("A"), TxHeight::Confirmed(0)),
|
||||
Err(InsertTxError::TxTooHigh {
|
||||
txid: h!("A"),
|
||||
tx_height: 0,
|
||||
tip_height: None
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_chain_can_add_unconfirmed_transactions() {
|
||||
let chain1 = chain!(checkpoints: [[0, h!("A")]], txids: []);
|
||||
let chain2 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [],
|
||||
txids: [ (h!("tx0"), Some(TxHeight::Unconfirmed)) ]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_update_with_shorter_chain() {
|
||||
let chain1 = chain!(checkpoints: [[1, h!("B")],[2, h!("C")]], txids: []);
|
||||
let chain2 = chain!(checkpoints: [[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [],
|
||||
txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
|
||||
},)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_introduce_older_checkpoints() {
|
||||
let chain1 = chain!(checkpoints: [[2, h!("C")], [3, h!("D")]], txids: []);
|
||||
let chain2 = chain!(checkpoints: [[1, h!("B")], [2, h!("C")]], txids: []);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(1, Some(h!("B")))],
|
||||
txids: []
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fix_blockhash_before_agreement_point() {
|
||||
let chain1 = chain!([0, h!("im-wrong")], [1, h!("we-agree")]);
|
||||
let chain2 = chain!([0, h!("fix")], [1, h!("we-agree")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("fix")))],
|
||||
txids: []
|
||||
},)
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: Use macro
|
||||
#[test]
|
||||
fn cannot_change_ext_index_of_confirmed_tx() {
|
||||
let chain1 = chain!(
|
||||
index: TestIndex,
|
||||
checkpoints: [[1, h!("A")]],
|
||||
txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 10))]
|
||||
);
|
||||
let chain2 = chain!(
|
||||
index: TestIndex,
|
||||
checkpoints: [[1, h!("A")]],
|
||||
txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 20))]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Err(UpdateError::TxInconsistent {
|
||||
txid: h!("tx0"),
|
||||
original_pos: TestIndex(TxHeight::Confirmed(1), 10),
|
||||
update_pos: TestIndex(TxHeight::Confirmed(1), 20),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_change_index_of_unconfirmed_tx() {
|
||||
let chain1 = chain!(
|
||||
index: TestIndex,
|
||||
checkpoints: [[1, h!("A")]],
|
||||
txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 10))]
|
||||
);
|
||||
let chain2 = chain!(
|
||||
index: TestIndex,
|
||||
checkpoints: [[1, h!("A")]],
|
||||
txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 20))]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(ChangeSet {
|
||||
checkpoints: [].into(),
|
||||
txids: [(h!("tx1"), Some(TestIndex(TxHeight::Unconfirmed, 20)),)].into()
|
||||
},),
|
||||
)
|
||||
}
|
||||
|
||||
/// B and C are in both chain and update
|
||||
/// ```
|
||||
/// | 0 | 1 | 2 | 3 | 4
|
||||
/// chain | B C
|
||||
/// update | A B C D
|
||||
/// ```
|
||||
/// This should succeed with the point of agreement being C and A should be added in addition.
|
||||
#[test]
|
||||
fn two_points_of_agreement() {
|
||||
let chain1 = chain!([1, h!("B")], [2, h!("C")]);
|
||||
let chain2 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [3, h!("D")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [(0, Some(h!("A"))), (3, Some(h!("D")))]
|
||||
},),
|
||||
);
|
||||
}
|
||||
|
||||
/// Update and chain does not connect:
|
||||
/// ```
|
||||
/// | 0 | 1 | 2 | 3 | 4
|
||||
/// chain | B C
|
||||
/// update | A B D
|
||||
/// ```
|
||||
/// This should fail as we cannot figure out whether C & D are on the same chain
|
||||
#[test]
|
||||
fn update_and_chain_does_not_connect() {
|
||||
let chain1 = chain!([1, h!("B")], [2, h!("C")]);
|
||||
let chain2 = chain!([0, h!("A")], [1, h!("B")], [3, h!("D")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Err(UpdateError::NotConnected(2)),
|
||||
);
|
||||
}
|
||||
|
||||
/// Transient invalidation:
|
||||
/// ```
|
||||
/// | 0 | 1 | 2 | 3 | 4 | 5
|
||||
/// chain | A B C E
|
||||
/// update | A B' C' D
|
||||
/// ```
|
||||
/// This should succeed and invalidate B,C and E with point of agreement being A.
|
||||
/// It should also invalidate transactions at height 1.
|
||||
#[test]
|
||||
fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation() {
|
||||
let chain1 = chain! {
|
||||
checkpoints: [[0, h!("A")], [2, h!("B")], [3, h!("C")], [5, h!("E")]],
|
||||
txids: [
|
||||
(h!("a"), TxHeight::Confirmed(0)),
|
||||
(h!("b1"), TxHeight::Confirmed(1)),
|
||||
(h!("b2"), TxHeight::Confirmed(2)),
|
||||
(h!("d"), TxHeight::Confirmed(3)),
|
||||
(h!("e"), TxHeight::Confirmed(5))
|
||||
]
|
||||
};
|
||||
let chain2 = chain! {
|
||||
checkpoints: [[0, h!("A")], [2, h!("B'")], [3, h!("C'")], [4, h!("D")]],
|
||||
txids: [(h!("b1"), TxHeight::Confirmed(4)), (h!("b2"), TxHeight::Confirmed(3))]
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [
|
||||
(2, Some(h!("B'"))),
|
||||
(3, Some(h!("C'"))),
|
||||
(4, Some(h!("D"))),
|
||||
(5, None)
|
||||
],
|
||||
txids: [
|
||||
(h!("b1"), Some(TxHeight::Confirmed(4))),
|
||||
(h!("b2"), Some(TxHeight::Confirmed(3))),
|
||||
(h!("d"), Some(TxHeight::Unconfirmed)),
|
||||
(h!("e"), Some(TxHeight::Unconfirmed))
|
||||
]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
/// Transient invalidation:
|
||||
/// ```
|
||||
/// | 0 | 1 | 2 | 3 | 4
|
||||
/// chain | B C E
|
||||
/// update | B' C' D
|
||||
/// ```
|
||||
///
|
||||
/// This should succeed and invalidate B, C and E with no point of agreement
|
||||
#[test]
|
||||
fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation_no_point_of_agreement() {
|
||||
let chain1 = chain!([1, h!("B")], [2, h!("C")], [4, h!("E")]);
|
||||
let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [
|
||||
(1, Some(h!("B'"))),
|
||||
(2, Some(h!("C'"))),
|
||||
(3, Some(h!("D"))),
|
||||
(4, None)
|
||||
]
|
||||
},)
|
||||
)
|
||||
}
|
||||
|
||||
/// Transient invalidation:
|
||||
/// ```
|
||||
/// | 0 | 1 | 2 | 3 | 4
|
||||
/// chain | A B C E
|
||||
/// update | B' C' D
|
||||
/// ```
|
||||
///
|
||||
/// This should fail since although it tells us that B and C are invalid it doesn't tell us whether
|
||||
/// A was invalid.
|
||||
#[test]
|
||||
fn invalidation_but_no_connection() {
|
||||
let chain1 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [4, h!("E")]);
|
||||
let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Err(UpdateError::NotConnected(0))
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checkpoint_limit_is_respected() {
|
||||
let mut chain1 = SparseChain::default();
|
||||
let _ = chain1
|
||||
.apply_update(chain!(
|
||||
[1, h!("A")],
|
||||
[2, h!("B")],
|
||||
[3, h!("C")],
|
||||
[4, h!("D")],
|
||||
[5, h!("E")]
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(chain1.checkpoints().len(), 5);
|
||||
chain1.set_checkpoint_limit(Some(4));
|
||||
assert_eq!(chain1.checkpoints().len(), 4);
|
||||
|
||||
let _ = chain1
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 6,
|
||||
hash: h!("F"),
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(chain1.checkpoints().len(), 4);
|
||||
|
||||
let changeset = chain1.determine_changeset(&chain!([6, h!("F")], [7, h!("G")]));
|
||||
assert_eq!(changeset, Ok(changeset!(checkpoints: [(7, Some(h!("G")))])));
|
||||
|
||||
chain1.apply_changeset(changeset.unwrap());
|
||||
|
||||
assert_eq!(chain1.checkpoints().len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_txids_by_height() {
|
||||
let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")], [2, h!("block 2")]]);
|
||||
|
||||
let txids: [(TestIndex, Txid); 4] = [
|
||||
(
|
||||
TestIndex(TxHeight::Confirmed(1), u32::MIN),
|
||||
Txid::from_inner([0x00; 32]),
|
||||
),
|
||||
(
|
||||
TestIndex(TxHeight::Confirmed(1), u32::MAX),
|
||||
Txid::from_inner([0xfe; 32]),
|
||||
),
|
||||
(
|
||||
TestIndex(TxHeight::Confirmed(2), u32::MIN),
|
||||
Txid::from_inner([0x01; 32]),
|
||||
),
|
||||
(
|
||||
TestIndex(TxHeight::Confirmed(2), u32::MAX),
|
||||
Txid::from_inner([0xff; 32]),
|
||||
),
|
||||
];
|
||||
|
||||
// populate chain with txids
|
||||
for (index, txid) in txids {
|
||||
let _ = chain.insert_tx(txid, index).expect("should succeed");
|
||||
}
|
||||
|
||||
// inclusive start
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_height(TxHeight::Confirmed(1)..)
|
||||
.collect::<Vec<_>>(),
|
||||
txids.iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// exclusive start
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_height((Bound::Excluded(TxHeight::Confirmed(1)), Bound::Unbounded,))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[2..].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// inclusive end
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_height((Bound::Unbounded, Bound::Included(TxHeight::Confirmed(2))))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..4].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// exclusive end
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_height(..TxHeight::Confirmed(2))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..2].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_txids_by_index() {
|
||||
let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")],[2, h!("block 2")]]);
|
||||
|
||||
let txids: [(TestIndex, Txid); 4] = [
|
||||
(TestIndex(TxHeight::Confirmed(1), u32::MIN), h!("tx 1 min")),
|
||||
(TestIndex(TxHeight::Confirmed(1), u32::MAX), h!("tx 1 max")),
|
||||
(TestIndex(TxHeight::Confirmed(2), u32::MIN), h!("tx 2 min")),
|
||||
(TestIndex(TxHeight::Confirmed(2), u32::MAX), h!("tx 2 max")),
|
||||
];
|
||||
|
||||
// populate chain with txids
|
||||
for (index, txid) in txids {
|
||||
let _ = chain.insert_tx(txid, index).expect("should succeed");
|
||||
}
|
||||
|
||||
// inclusive start
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MIN)..)
|
||||
.collect::<Vec<_>>(),
|
||||
txids.iter().collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MAX)..)
|
||||
.collect::<Vec<_>>(),
|
||||
txids[1..].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// exclusive start
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position((
|
||||
Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MIN)),
|
||||
Bound::Unbounded
|
||||
))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[1..].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position((
|
||||
Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MAX)),
|
||||
Bound::Unbounded
|
||||
))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[2..].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// inclusive end
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position((
|
||||
Bound::Unbounded,
|
||||
Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MIN))
|
||||
))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..3].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position((
|
||||
Bound::Unbounded,
|
||||
Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MAX))
|
||||
))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..4].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// exclusive end
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MIN))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..2].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MAX))
|
||||
.collect::<Vec<_>>(),
|
||||
txids[..3].iter().collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_txids() {
|
||||
let mut chain = SparseChain::default();
|
||||
|
||||
let txids = (0..100)
|
||||
.map(|v| Txid::hash(v.to_string().as_bytes()))
|
||||
.collect::<BTreeSet<Txid>>();
|
||||
|
||||
// populate chain
|
||||
for txid in &txids {
|
||||
let _ = chain
|
||||
.insert_tx(*txid, TxHeight::Unconfirmed)
|
||||
.expect("should succeed");
|
||||
}
|
||||
|
||||
for txid in &txids {
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids((TxHeight::Unconfirmed, *txid)..)
|
||||
.map(|(_, txid)| txid)
|
||||
.collect::<Vec<_>>(),
|
||||
txids.range(*txid..).collect::<Vec<_>>(),
|
||||
"range with inclusive start should succeed"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids((
|
||||
Bound::Excluded((TxHeight::Unconfirmed, *txid)),
|
||||
Bound::Unbounded,
|
||||
))
|
||||
.map(|(_, txid)| txid)
|
||||
.collect::<Vec<_>>(),
|
||||
txids
|
||||
.range((Bound::Excluded(*txid), Bound::Unbounded,))
|
||||
.collect::<Vec<_>>(),
|
||||
"range with exclusive start should succeed"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids(..(TxHeight::Unconfirmed, *txid))
|
||||
.map(|(_, txid)| txid)
|
||||
.collect::<Vec<_>>(),
|
||||
txids.range(..*txid).collect::<Vec<_>>(),
|
||||
"range with exclusive end should succeed"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
chain
|
||||
.range_txids((
|
||||
Bound::Included((TxHeight::Unconfirmed, *txid)),
|
||||
Bound::Unbounded,
|
||||
))
|
||||
.map(|(_, txid)| txid)
|
||||
.collect::<Vec<_>>(),
|
||||
txids
|
||||
.range((Bound::Included(*txid), Bound::Unbounded,))
|
||||
.collect::<Vec<_>>(),
|
||||
"range with inclusive end should succeed"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalidated_txs_move_to_unconfirmed() {
|
||||
let chain1 = chain! {
|
||||
checkpoints: [[0, h!("A")], [1, h!("B")], [2, h!("C")]],
|
||||
txids: [
|
||||
(h!("a"), TxHeight::Confirmed(0)),
|
||||
(h!("b"), TxHeight::Confirmed(1)),
|
||||
(h!("c"), TxHeight::Confirmed(2)),
|
||||
(h!("d"), TxHeight::Unconfirmed)
|
||||
]
|
||||
};
|
||||
|
||||
let chain2 = chain!([0, h!("A")], [1, h!("B'")]);
|
||||
|
||||
assert_eq!(
|
||||
chain1.determine_changeset(&chain2),
|
||||
Ok(changeset! {
|
||||
checkpoints: [
|
||||
(1, Some(h!("B'"))),
|
||||
(2, None)
|
||||
],
|
||||
txids: [
|
||||
(h!("b"), Some(TxHeight::Unconfirmed)),
|
||||
(h!("c"), Some(TxHeight::Unconfirmed))
|
||||
]
|
||||
},)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_tx_position_from_unconfirmed_to_confirmed() {
|
||||
let mut chain = SparseChain::<TxHeight>::default();
|
||||
let txid = h!("txid");
|
||||
|
||||
let _ = chain.insert_tx(txid, TxHeight::Unconfirmed).unwrap();
|
||||
|
||||
assert_eq!(chain.tx_position(txid), Some(&TxHeight::Unconfirmed));
|
||||
let _ = chain
|
||||
.insert_checkpoint(BlockId {
|
||||
height: 0,
|
||||
hash: h!("0"),
|
||||
})
|
||||
.unwrap();
|
||||
let _ = chain.insert_tx(txid, TxHeight::Confirmed(0)).unwrap();
|
||||
|
||||
assert_eq!(chain.tx_position(txid), Some(&TxHeight::Confirmed(0)));
|
||||
}
|
||||
100
crates/chain/tests/test_spk_txout_index.rs
Normal file
100
crates/chain/tests/test_spk_txout_index.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use bdk_chain::SpkTxOutIndex;
|
||||
use bitcoin::{hashes::hex::FromHex, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut};
|
||||
|
||||
#[test]
|
||||
fn spk_txout_sent_and_received() {
|
||||
let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap();
|
||||
let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap();
|
||||
|
||||
let mut index = SpkTxOutIndex::default();
|
||||
index.insert_spk(0, spk1.clone());
|
||||
index.insert_spk(1, spk2.clone());
|
||||
|
||||
let tx1 = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 42_000,
|
||||
script_pubkey: spk1.clone(),
|
||||
}],
|
||||
};
|
||||
|
||||
assert_eq!(index.sent_and_received(&tx1), (0, 42_000));
|
||||
assert_eq!(index.net_value(&tx1), 42_000);
|
||||
index.scan(&tx1);
|
||||
assert_eq!(
|
||||
index.sent_and_received(&tx1),
|
||||
(0, 42_000),
|
||||
"shouldn't change after scanning"
|
||||
);
|
||||
|
||||
let tx2 = Transaction {
|
||||
version: 0x1,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint {
|
||||
txid: tx1.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![
|
||||
TxOut {
|
||||
value: 20_000,
|
||||
script_pubkey: spk2,
|
||||
},
|
||||
TxOut {
|
||||
script_pubkey: spk1,
|
||||
value: 30_000,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
assert_eq!(index.sent_and_received(&tx2), (42_000, 50_000));
|
||||
assert_eq!(index.net_value(&tx2), 8_000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mark_used() {
|
||||
let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap();
|
||||
let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap();
|
||||
|
||||
let mut spk_index = SpkTxOutIndex::default();
|
||||
spk_index.insert_spk(1, spk1.clone());
|
||||
spk_index.insert_spk(2, spk2);
|
||||
|
||||
assert!(!spk_index.is_used(&1));
|
||||
spk_index.mark_used(&1);
|
||||
assert!(spk_index.is_used(&1));
|
||||
spk_index.unmark_used(&1);
|
||||
assert!(!spk_index.is_used(&1));
|
||||
spk_index.mark_used(&1);
|
||||
assert!(spk_index.is_used(&1));
|
||||
|
||||
let tx1 = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 42_000,
|
||||
script_pubkey: spk1,
|
||||
}],
|
||||
};
|
||||
|
||||
spk_index.scan(&tx1);
|
||||
spk_index.unmark_used(&1);
|
||||
assert!(
|
||||
spk_index.is_used(&1),
|
||||
"even though we unmark_used it doesn't matter because there was a tx scanned that used it"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmark_used_does_not_result_in_invalid_representation() {
|
||||
let mut spk_index = SpkTxOutIndex::default();
|
||||
assert!(!spk_index.unmark_used(&0));
|
||||
assert!(!spk_index.unmark_used(&1));
|
||||
assert!(!spk_index.unmark_used(&2));
|
||||
assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty());
|
||||
}
|
||||
512
crates/chain/tests/test_tx_graph.rs
Normal file
512
crates/chain/tests/test_tx_graph.rs
Normal file
@@ -0,0 +1,512 @@
|
||||
#[macro_use]
|
||||
mod common;
|
||||
use bdk_chain::{
|
||||
collections::*,
|
||||
tx_graph::{Additions, TxGraph},
|
||||
};
|
||||
use bitcoin::{hashes::Hash, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut, Txid};
|
||||
use core::iter;
|
||||
|
||||
#[test]
|
||||
fn insert_txouts() {
|
||||
let original_ops = [
|
||||
(
|
||||
OutPoint::new(h!("tx1"), 1),
|
||||
TxOut {
|
||||
value: 10_000,
|
||||
script_pubkey: Script::new(),
|
||||
},
|
||||
),
|
||||
(
|
||||
OutPoint::new(h!("tx1"), 2),
|
||||
TxOut {
|
||||
value: 20_000,
|
||||
script_pubkey: Script::new(),
|
||||
},
|
||||
),
|
||||
];
|
||||
|
||||
let update_ops = [(
|
||||
OutPoint::new(h!("tx2"), 0),
|
||||
TxOut {
|
||||
value: 20_000,
|
||||
script_pubkey: Script::new(),
|
||||
},
|
||||
)];
|
||||
|
||||
let mut graph = {
|
||||
let mut graph = TxGraph::default();
|
||||
for (outpoint, txout) in &original_ops {
|
||||
assert_eq!(
|
||||
graph.insert_txout(*outpoint, txout.clone()),
|
||||
Additions {
|
||||
txout: [(*outpoint, txout.clone())].into(),
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
graph
|
||||
};
|
||||
|
||||
let update = {
|
||||
let mut graph = TxGraph::default();
|
||||
for (outpoint, txout) in &update_ops {
|
||||
assert_eq!(
|
||||
graph.insert_txout(*outpoint, txout.clone()),
|
||||
Additions {
|
||||
txout: [(*outpoint, txout.clone())].into(),
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
graph
|
||||
};
|
||||
|
||||
let additions = graph.determine_additions(&update);
|
||||
|
||||
assert_eq!(
|
||||
additions,
|
||||
Additions {
|
||||
tx: [].into(),
|
||||
txout: update_ops.into(),
|
||||
}
|
||||
);
|
||||
|
||||
graph.apply_additions(additions);
|
||||
assert_eq!(graph.all_txouts().count(), 3);
|
||||
assert_eq!(graph.full_transactions().count(), 0);
|
||||
assert_eq!(graph.partial_transactions().count(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_tx_graph_doesnt_count_coinbase_as_spent() {
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::null(),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![],
|
||||
};
|
||||
|
||||
let mut graph = TxGraph::default();
|
||||
let _ = graph.insert_tx(tx);
|
||||
assert!(graph.outspends(OutPoint::null()).is_empty());
|
||||
assert!(graph.tx_outspends(Txid::all_zeros()).next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_tx_graph_keeps_track_of_spend() {
|
||||
let tx1 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let op = OutPoint {
|
||||
txid: tx1.txid(),
|
||||
vout: 0,
|
||||
};
|
||||
|
||||
let tx2 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: op,
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![],
|
||||
};
|
||||
|
||||
let mut graph1 = TxGraph::default();
|
||||
let mut graph2 = TxGraph::default();
|
||||
|
||||
// insert in different order
|
||||
let _ = graph1.insert_tx(tx1.clone());
|
||||
let _ = graph1.insert_tx(tx2.clone());
|
||||
|
||||
let _ = graph2.insert_tx(tx2.clone());
|
||||
let _ = graph2.insert_tx(tx1);
|
||||
|
||||
assert_eq!(
|
||||
graph1.outspends(op),
|
||||
&iter::once(tx2.txid()).collect::<HashSet<_>>()
|
||||
);
|
||||
assert_eq!(graph2.outspends(op), graph1.outspends(op));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_tx_can_retrieve_full_tx_from_graph() {
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::null(),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let mut graph = TxGraph::default();
|
||||
let _ = graph.insert_tx(tx.clone());
|
||||
assert_eq!(graph.get_tx(tx.txid()), Some(&tx));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_tx_displaces_txouts() {
|
||||
let mut tx_graph = TxGraph::default();
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 42_000,
|
||||
script_pubkey: Script::default(),
|
||||
}],
|
||||
};
|
||||
|
||||
let _ = tx_graph.insert_txout(
|
||||
OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
TxOut {
|
||||
value: 1_337_000,
|
||||
script_pubkey: Script::default(),
|
||||
},
|
||||
);
|
||||
|
||||
let _ = tx_graph.insert_txout(
|
||||
OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
TxOut {
|
||||
value: 1_000_000_000,
|
||||
script_pubkey: Script::default(),
|
||||
},
|
||||
);
|
||||
|
||||
let _additions = tx_graph.insert_tx(tx.clone());
|
||||
|
||||
assert_eq!(
|
||||
tx_graph
|
||||
.get_txout(OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0
|
||||
})
|
||||
.unwrap()
|
||||
.value,
|
||||
42_000
|
||||
);
|
||||
assert_eq!(
|
||||
tx_graph.get_txout(OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 1
|
||||
}),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_txout_does_not_displace_tx() {
|
||||
let mut tx_graph = TxGraph::default();
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 42_000,
|
||||
script_pubkey: Script::default(),
|
||||
}],
|
||||
};
|
||||
|
||||
let _additions = tx_graph.insert_tx(tx.clone());
|
||||
|
||||
let _ = tx_graph.insert_txout(
|
||||
OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
TxOut {
|
||||
value: 1_337_000,
|
||||
script_pubkey: Script::default(),
|
||||
},
|
||||
);
|
||||
|
||||
let _ = tx_graph.insert_txout(
|
||||
OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
TxOut {
|
||||
value: 1_000_000_000,
|
||||
script_pubkey: Script::default(),
|
||||
},
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tx_graph
|
||||
.get_txout(OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 0
|
||||
})
|
||||
.unwrap()
|
||||
.value,
|
||||
42_000
|
||||
);
|
||||
assert_eq!(
|
||||
tx_graph.get_txout(OutPoint {
|
||||
txid: tx.txid(),
|
||||
vout: 1
|
||||
}),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_calculate_fee() {
|
||||
let mut graph = TxGraph::default();
|
||||
let intx1 = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 100,
|
||||
..Default::default()
|
||||
}],
|
||||
};
|
||||
let intx2 = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![],
|
||||
output: vec![TxOut {
|
||||
value: 200,
|
||||
..Default::default()
|
||||
}],
|
||||
};
|
||||
|
||||
let intxout1 = (
|
||||
OutPoint {
|
||||
txid: h!("dangling output"),
|
||||
vout: 0,
|
||||
},
|
||||
TxOut {
|
||||
value: 300,
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
let _ = graph.insert_tx(intx1.clone());
|
||||
let _ = graph.insert_tx(intx2.clone());
|
||||
let _ = graph.insert_txout(intxout1.0, intxout1.1);
|
||||
|
||||
let mut tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![
|
||||
TxIn {
|
||||
previous_output: OutPoint {
|
||||
txid: intx1.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
TxIn {
|
||||
previous_output: OutPoint {
|
||||
txid: intx2.txid(),
|
||||
vout: 0,
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
TxIn {
|
||||
previous_output: intxout1.0,
|
||||
..Default::default()
|
||||
},
|
||||
],
|
||||
output: vec![TxOut {
|
||||
value: 500,
|
||||
..Default::default()
|
||||
}],
|
||||
};
|
||||
|
||||
assert_eq!(graph.calculate_fee(&tx), Some(100));
|
||||
|
||||
tx.input.remove(2);
|
||||
|
||||
// fee would be negative
|
||||
assert_eq!(graph.calculate_fee(&tx), Some(-200));
|
||||
|
||||
// If we have an unknown outpoint, fee should return None.
|
||||
tx.input.push(TxIn {
|
||||
previous_output: OutPoint {
|
||||
txid: h!("unknown_txid"),
|
||||
vout: 0,
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
assert_eq!(graph.calculate_fee(&tx), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_calculate_fee_on_coinbase() {
|
||||
let tx = Transaction {
|
||||
version: 0x01,
|
||||
lock_time: PackedLockTime(0),
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::null(),
|
||||
..Default::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
};
|
||||
|
||||
let graph = TxGraph::default();
|
||||
|
||||
assert_eq!(graph.calculate_fee(&tx), Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conflicting_descendants() {
|
||||
let previous_output = OutPoint::new(h!("op"), 2);
|
||||
|
||||
// tx_a spends previous_output
|
||||
let tx_a = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output,
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(0)
|
||||
};
|
||||
|
||||
// tx_a2 spends previous_output and conflicts with tx_a
|
||||
let tx_a2 = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output,
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default(), TxOut::default()],
|
||||
..common::new_tx(1)
|
||||
};
|
||||
|
||||
// tx_b spends tx_a
|
||||
let tx_b = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(2)
|
||||
};
|
||||
|
||||
let txid_a = tx_a.txid();
|
||||
let txid_b = tx_b.txid();
|
||||
|
||||
let mut graph = TxGraph::default();
|
||||
let _ = graph.insert_tx(tx_a);
|
||||
let _ = graph.insert_tx(tx_b);
|
||||
|
||||
assert_eq!(
|
||||
graph
|
||||
.walk_conflicts(&tx_a2, |depth, txid| Some((depth, txid)))
|
||||
.collect::<Vec<_>>(),
|
||||
vec![(0_usize, txid_a), (1_usize, txid_b),],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_descendants_no_repeat() {
|
||||
let tx_a = Transaction {
|
||||
output: vec![TxOut::default(), TxOut::default(), TxOut::default()],
|
||||
..common::new_tx(0)
|
||||
};
|
||||
|
||||
let txs_b = (0..3)
|
||||
.map(|vout| Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_a.txid(), vout),
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(1)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let txs_c = (0..2)
|
||||
.map(|vout| Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(txs_b[vout as usize].txid(), vout),
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(2)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let tx_d = Transaction {
|
||||
input: vec![
|
||||
TxIn {
|
||||
previous_output: OutPoint::new(txs_c[0].txid(), 0),
|
||||
..TxIn::default()
|
||||
},
|
||||
TxIn {
|
||||
previous_output: OutPoint::new(txs_c[1].txid(), 0),
|
||||
..TxIn::default()
|
||||
},
|
||||
],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(3)
|
||||
};
|
||||
|
||||
let tx_e = Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(tx_d.txid(), 0),
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(4)
|
||||
};
|
||||
|
||||
let txs_not_connected = (10..20)
|
||||
.map(|v| Transaction {
|
||||
input: vec![TxIn {
|
||||
previous_output: OutPoint::new(h!("tx_does_not_exist"), v),
|
||||
..TxIn::default()
|
||||
}],
|
||||
output: vec![TxOut::default()],
|
||||
..common::new_tx(v)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut graph = TxGraph::default();
|
||||
let mut expected_txids = BTreeSet::new();
|
||||
|
||||
// these are NOT descendants of `tx_a`
|
||||
for tx in txs_not_connected {
|
||||
let _ = graph.insert_tx(tx.clone());
|
||||
}
|
||||
|
||||
// these are the expected descendants of `tx_a`
|
||||
for tx in txs_b
|
||||
.iter()
|
||||
.chain(&txs_c)
|
||||
.chain(core::iter::once(&tx_d))
|
||||
.chain(core::iter::once(&tx_e))
|
||||
{
|
||||
let _ = graph.insert_tx(tx.clone());
|
||||
assert!(expected_txids.insert(tx.txid()));
|
||||
}
|
||||
|
||||
let descendants = graph
|
||||
.walk_descendants(tx_a.txid(), |_, txid| Some(txid))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(descendants.len(), expected_txids.len());
|
||||
|
||||
for txid in descendants {
|
||||
assert!(expected_txids.remove(&txid));
|
||||
}
|
||||
assert!(expected_txids.is_empty());
|
||||
}
|
||||
16
crates/electrum/Cargo.toml
Normal file
16
crates/electrum/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "bdk_electrum"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk_electrum"
|
||||
description = "Fetch data from electrum in the form BDK accepts"
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "README.md"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../chain", version = "0.4.0", features = ["serde", "miniscript"] }
|
||||
electrum-client = { version = "0.12" }
|
||||
3
crates/electrum/README.md
Normal file
3
crates/electrum/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# BDK Electrum
|
||||
|
||||
BDK Electrum client library for updating the keychain tracker.
|
||||
588
crates/electrum/src/lib.rs
Normal file
588
crates/electrum/src/lib.rs
Normal file
@@ -0,0 +1,588 @@
|
||||
//! This crate is used for updating structures of the [`bdk_chain`] crate with data from electrum.
|
||||
//!
|
||||
//! The star of the show is the [`ElectrumExt::scan`] method, which scans for relevant blockchain
|
||||
//! data (via electrum) and outputs an [`ElectrumUpdate`].
|
||||
//!
|
||||
//! An [`ElectrumUpdate`] only includes `txid`s and no full transactions. The caller is responsible
|
||||
//! for obtaining full transactions before applying. This can be done with
|
||||
//! these steps:
|
||||
//!
|
||||
//! 1. Determine which full transactions are missing. The method [`missing_full_txs`] of
|
||||
//! [`ElectrumUpdate`] can be used.
|
||||
//!
|
||||
//! 2. Obtaining the full transactions. To do this via electrum, the method
|
||||
//! [`batch_transaction_get`] can be used.
|
||||
//!
|
||||
//! Refer to [`bdk_electrum_example`] for a complete example.
|
||||
//!
|
||||
//! [`ElectrumClient::scan`]: ElectrumClient::scan
|
||||
//! [`missing_full_txs`]: ElectrumUpdate::missing_full_txs
|
||||
//! [`batch_transaction_get`]: ElectrumApi::batch_transaction_get
|
||||
//! [`bdk_electrum_example`]: https://github.com/LLFourn/bdk_core_staging/tree/master/bdk_electrum_example
|
||||
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
fmt::Debug,
|
||||
};
|
||||
|
||||
pub use bdk_chain;
|
||||
use bdk_chain::{
|
||||
bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid},
|
||||
chain_graph::{self, ChainGraph},
|
||||
keychain::KeychainScan,
|
||||
sparse_chain::{self, ChainPosition, SparseChain},
|
||||
tx_graph::TxGraph,
|
||||
BlockId, ConfirmationTime, TxHeight,
|
||||
};
|
||||
pub use electrum_client;
|
||||
use electrum_client::{Client, ElectrumApi, Error};
|
||||
|
||||
/// Trait to extend [`electrum_client::Client`] functionality.
|
||||
///
|
||||
/// Refer to [crate-level documentation] for more.
|
||||
///
|
||||
/// [crate-level documentation]: crate
|
||||
pub trait ElectrumExt {
|
||||
/// Fetch the latest block height.
|
||||
fn get_tip(&self) -> Result<(u32, BlockHash), Error>;
|
||||
|
||||
/// Scan the blockchain (via electrum) for the data specified. This returns a [`ElectrumUpdate`]
|
||||
/// which can be transformed into a [`KeychainScan`] after we find all the missing full
|
||||
/// transactions.
|
||||
///
|
||||
/// - `local_chain`: the most recent block hashes present locally
|
||||
/// - `keychain_spks`: keychains that we want to scan transactions for
|
||||
/// - `txids`: transactions for which we want the updated [`ChainPosition`]s
|
||||
/// - `outpoints`: transactions associated with these outpoints (residing, spending) that we
|
||||
/// want to included in the update
|
||||
fn scan<K: Ord + Clone>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
stop_gap: usize,
|
||||
batch_size: usize,
|
||||
) -> Result<ElectrumUpdate<K, TxHeight>, Error>;
|
||||
|
||||
/// Convenience method to call [`scan`] without requiring a keychain.
|
||||
///
|
||||
/// [`scan`]: ElectrumExt::scan
|
||||
fn scan_without_keychain(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
misc_spks: impl IntoIterator<Item = Script>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
batch_size: usize,
|
||||
) -> Result<SparseChain, Error> {
|
||||
let spk_iter = misc_spks
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, spk)| (i as u32, spk));
|
||||
|
||||
self.scan(
|
||||
local_chain,
|
||||
[((), spk_iter)].into(),
|
||||
txids,
|
||||
outpoints,
|
||||
usize::MAX,
|
||||
batch_size,
|
||||
)
|
||||
.map(|u| u.chain_update)
|
||||
}
|
||||
}
|
||||
|
||||
impl ElectrumExt for Client {
|
||||
fn get_tip(&self) -> Result<(u32, BlockHash), Error> {
|
||||
// TODO: unsubscribe when added to the client, or is there a better call to use here?
|
||||
self.block_headers_subscribe()
|
||||
.map(|data| (data.height as u32, data.header.block_hash()))
|
||||
}
|
||||
|
||||
fn scan<K: Ord + Clone>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
stop_gap: usize,
|
||||
batch_size: usize,
|
||||
) -> Result<ElectrumUpdate<K, TxHeight>, Error> {
|
||||
let mut request_spks = keychain_spks
|
||||
.into_iter()
|
||||
.map(|(k, s)| {
|
||||
let iter = s.into_iter();
|
||||
(k, iter)
|
||||
})
|
||||
.collect::<BTreeMap<K, _>>();
|
||||
let mut scanned_spks = BTreeMap::<(K, u32), (Script, bool)>::new();
|
||||
|
||||
let txids = txids.into_iter().collect::<Vec<_>>();
|
||||
let outpoints = outpoints.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let update = loop {
|
||||
let mut update = prepare_update(self, local_chain)?;
|
||||
|
||||
if !request_spks.is_empty() {
|
||||
if !scanned_spks.is_empty() {
|
||||
let mut scanned_spk_iter = scanned_spks
|
||||
.iter()
|
||||
.map(|(i, (spk, _))| (i.clone(), spk.clone()));
|
||||
match populate_with_spks::<K, _, _>(
|
||||
self,
|
||||
&mut update,
|
||||
&mut scanned_spk_iter,
|
||||
stop_gap,
|
||||
batch_size,
|
||||
) {
|
||||
Err(InternalError::Reorg) => continue,
|
||||
Err(InternalError::ElectrumError(e)) => return Err(e),
|
||||
Ok(mut spks) => scanned_spks.append(&mut spks),
|
||||
};
|
||||
}
|
||||
for (keychain, keychain_spks) in &mut request_spks {
|
||||
match populate_with_spks::<K, u32, _>(
|
||||
self,
|
||||
&mut update,
|
||||
keychain_spks,
|
||||
stop_gap,
|
||||
batch_size,
|
||||
) {
|
||||
Err(InternalError::Reorg) => continue,
|
||||
Err(InternalError::ElectrumError(e)) => return Err(e),
|
||||
Ok(spks) => scanned_spks.extend(
|
||||
spks.into_iter()
|
||||
.map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)),
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) {
|
||||
Err(InternalError::Reorg) => continue,
|
||||
Err(InternalError::ElectrumError(e)) => return Err(e),
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) {
|
||||
Err(InternalError::Reorg) => continue,
|
||||
Err(InternalError::ElectrumError(e)) => return Err(e),
|
||||
Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ }
|
||||
}
|
||||
|
||||
// check for reorgs during scan process
|
||||
let our_tip = update
|
||||
.latest_checkpoint()
|
||||
.expect("update must have atleast one checkpoint");
|
||||
let server_blockhash = self.block_header(our_tip.height as usize)?.block_hash();
|
||||
if our_tip.hash != server_blockhash {
|
||||
continue; // reorg
|
||||
} else {
|
||||
break update;
|
||||
}
|
||||
};
|
||||
|
||||
let last_active_index = request_spks
|
||||
.into_keys()
|
||||
.filter_map(|k| {
|
||||
scanned_spks
|
||||
.range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX))
|
||||
.rev()
|
||||
.find(|(_, (_, active))| *active)
|
||||
.map(|((_, i), _)| (k, *i))
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
Ok(ElectrumUpdate {
|
||||
chain_update: update,
|
||||
last_active_indices: last_active_index,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of [`ElectrumExt::scan`].
|
||||
pub struct ElectrumUpdate<K, P> {
|
||||
/// The internal [`SparseChain`] update.
|
||||
pub chain_update: SparseChain<P>,
|
||||
/// The last keychain script pubkey indices, which had transaction histories.
|
||||
pub last_active_indices: BTreeMap<K, u32>,
|
||||
}
|
||||
|
||||
impl<K, P> Default for ElectrumUpdate<K, P> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
chain_update: Default::default(),
|
||||
last_active_indices: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, P> AsRef<SparseChain<P>> for ElectrumUpdate<K, P> {
|
||||
fn as_ref(&self) -> &SparseChain<P> {
|
||||
&self.chain_update
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord + Clone + Debug, P: ChainPosition> ElectrumUpdate<K, P> {
|
||||
/// Return a list of missing full transactions that are required to [`inflate_update`].
|
||||
///
|
||||
/// [`inflate_update`]: bdk_chain::chain_graph::ChainGraph::inflate_update
|
||||
pub fn missing_full_txs<G>(&self, graph: G) -> Vec<&Txid>
|
||||
where
|
||||
G: AsRef<TxGraph>,
|
||||
{
|
||||
self.chain_update
|
||||
.txids()
|
||||
.filter(|(_, txid)| graph.as_ref().get_tx(*txid).is_none())
|
||||
.map(|(_, txid)| txid)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Transform the [`ElectrumUpdate`] into a [`KeychainScan`], which can be applied to a
|
||||
/// `tracker`.
|
||||
///
|
||||
/// This will fail if there are missing full transactions not provided via `new_txs`.
|
||||
pub fn into_keychain_scan<CG>(
|
||||
self,
|
||||
new_txs: Vec<Transaction>,
|
||||
chain_graph: &CG,
|
||||
) -> Result<KeychainScan<K, P>, chain_graph::NewError<P>>
|
||||
where
|
||||
CG: AsRef<ChainGraph<P>>,
|
||||
{
|
||||
Ok(KeychainScan {
|
||||
update: chain_graph
|
||||
.as_ref()
|
||||
.inflate_update(self.chain_update, new_txs)?,
|
||||
last_active_indices: self.last_active_indices,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord + Clone + Debug> ElectrumUpdate<K, TxHeight> {
|
||||
/// Creates [`ElectrumUpdate<K, ConfirmationTime>`] from [`ElectrumUpdate<K, TxHeight>`].
|
||||
pub fn into_confirmation_time_update(
|
||||
self,
|
||||
client: &electrum_client::Client,
|
||||
) -> Result<ElectrumUpdate<K, ConfirmationTime>, Error> {
|
||||
let heights = self
|
||||
.chain_update
|
||||
.range_txids_by_height(..TxHeight::Unconfirmed)
|
||||
.map(|(h, _)| match h {
|
||||
TxHeight::Confirmed(h) => *h,
|
||||
_ => unreachable!("already filtered out unconfirmed"),
|
||||
})
|
||||
.collect::<Vec<u32>>();
|
||||
|
||||
let height_to_time = heights
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(
|
||||
client
|
||||
.batch_block_header(heights)?
|
||||
.into_iter()
|
||||
.map(|bh| bh.time as u64),
|
||||
)
|
||||
.collect::<HashMap<u32, u64>>();
|
||||
|
||||
let mut new_update = SparseChain::<ConfirmationTime>::from_checkpoints(
|
||||
self.chain_update.range_checkpoints(..),
|
||||
);
|
||||
|
||||
for &(tx_height, txid) in self.chain_update.txids() {
|
||||
let conf_time = match tx_height {
|
||||
TxHeight::Confirmed(height) => ConfirmationTime::Confirmed {
|
||||
height,
|
||||
time: height_to_time[&height],
|
||||
},
|
||||
TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed,
|
||||
};
|
||||
let _ = new_update.insert_tx(txid, conf_time).expect("must insert");
|
||||
}
|
||||
|
||||
Ok(ElectrumUpdate {
|
||||
chain_update: new_update,
|
||||
last_active_indices: self.last_active_indices,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum InternalError {
|
||||
ElectrumError(Error),
|
||||
Reorg,
|
||||
}
|
||||
|
||||
impl From<electrum_client::Error> for InternalError {
|
||||
fn from(value: electrum_client::Error) -> Self {
|
||||
Self::ElectrumError(value)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> {
|
||||
// TODO: unsubscribe when added to the client, or is there a better call to use here?
|
||||
client
|
||||
.block_headers_subscribe()
|
||||
.map(|data| (data.height as u32, data.header.block_hash()))
|
||||
}
|
||||
|
||||
/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`.
|
||||
fn prepare_update(
|
||||
client: &Client,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
) -> Result<SparseChain, Error> {
|
||||
let mut update = SparseChain::default();
|
||||
|
||||
// Find the local chain block that is still there so our update can connect to the local chain.
|
||||
for (&existing_height, &existing_hash) in local_chain.iter().rev() {
|
||||
// TODO: a batch request may be safer, as a reorg that happens when we are obtaining
|
||||
// `block_header`s will result in inconsistencies
|
||||
let current_hash = client.block_header(existing_height as usize)?.block_hash();
|
||||
let _ = update
|
||||
.insert_checkpoint(BlockId {
|
||||
height: existing_height,
|
||||
hash: current_hash,
|
||||
})
|
||||
.expect("This never errors because we are working with a fresh chain");
|
||||
|
||||
if current_hash == existing_hash {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Insert the new tip so new transactions will be accepted into the sparsechain.
|
||||
let tip = {
|
||||
let (height, hash) = get_tip(client)?;
|
||||
BlockId { height, hash }
|
||||
};
|
||||
if let Err(failure) = update.insert_checkpoint(tip) {
|
||||
match failure {
|
||||
sparse_chain::InsertCheckpointError::HashNotMatching { .. } => {
|
||||
// There has been a re-org before we even begin scanning addresses.
|
||||
// Just recursively call (this should never happen).
|
||||
return prepare_update(client, local_chain);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(update)
|
||||
}
|
||||
|
||||
/// This atrocity is required because electrum thinks a height of 0 means "unconfirmed", but there is
|
||||
/// such thing as a genesis block.
|
||||
///
|
||||
/// We contain an expectation for the genesis coinbase txid to always have a chain position of
|
||||
/// [`TxHeight::Confirmed(0)`].
|
||||
fn determine_tx_height(raw_height: i32, tip_height: u32, txid: Txid) -> TxHeight {
|
||||
if txid
|
||||
== Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b")
|
||||
.expect("must deserialize genesis coinbase txid")
|
||||
{
|
||||
return TxHeight::Confirmed(0);
|
||||
}
|
||||
match raw_height {
|
||||
h if h <= 0 => {
|
||||
debug_assert!(
|
||||
h == 0 || h == -1,
|
||||
"unexpected height ({}) from electrum server",
|
||||
h
|
||||
);
|
||||
TxHeight::Unconfirmed
|
||||
}
|
||||
h => {
|
||||
let h = h as u32;
|
||||
if h > tip_height {
|
||||
TxHeight::Unconfirmed
|
||||
} else {
|
||||
TxHeight::Confirmed(h)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Populates the update [`SparseChain`] with related transactions and associated [`ChainPosition`]s
|
||||
/// of the provided `outpoints` (this is the tx which contains the outpoint and the one spending the
|
||||
/// outpoint).
|
||||
///
|
||||
/// Unfortunately, this is awkward to implement as electrum does not provide such an API. Instead, we
|
||||
/// will get the tx history of the outpoint's spk and try to find the containing tx and the
|
||||
/// spending tx.
|
||||
fn populate_with_outpoints(
|
||||
client: &Client,
|
||||
update: &mut SparseChain,
|
||||
outpoints: &mut impl Iterator<Item = OutPoint>,
|
||||
) -> Result<HashMap<Txid, Transaction>, InternalError> {
|
||||
let tip = update
|
||||
.latest_checkpoint()
|
||||
.expect("update must atleast have one checkpoint");
|
||||
|
||||
let mut full_txs = HashMap::new();
|
||||
for outpoint in outpoints {
|
||||
let txid = outpoint.txid;
|
||||
let tx = client.transaction_get(&txid)?;
|
||||
debug_assert_eq!(tx.txid(), txid);
|
||||
let txout = match tx.output.get(outpoint.vout as usize) {
|
||||
Some(txout) => txout,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// attempt to find the following transactions (alongside their chain positions), and
|
||||
// add to our sparsechain `update`:
|
||||
let mut has_residing = false; // tx in which the outpoint resides
|
||||
let mut has_spending = false; // tx that spends the outpoint
|
||||
for res in client.script_get_history(&txout.script_pubkey)? {
|
||||
if has_residing && has_spending {
|
||||
break;
|
||||
}
|
||||
|
||||
if res.tx_hash == txid {
|
||||
if has_residing {
|
||||
continue;
|
||||
}
|
||||
has_residing = true;
|
||||
full_txs.insert(res.tx_hash, tx.clone());
|
||||
} else {
|
||||
if has_spending {
|
||||
continue;
|
||||
}
|
||||
let res_tx = match full_txs.get(&res.tx_hash) {
|
||||
Some(tx) => tx,
|
||||
None => {
|
||||
let res_tx = client.transaction_get(&res.tx_hash)?;
|
||||
full_txs.insert(res.tx_hash, res_tx);
|
||||
full_txs.get(&res.tx_hash).expect("just inserted")
|
||||
}
|
||||
};
|
||||
has_spending = res_tx
|
||||
.input
|
||||
.iter()
|
||||
.any(|txin| txin.previous_output == outpoint);
|
||||
if !has_spending {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let tx_height = determine_tx_height(res.height, tip.height, res.tx_hash);
|
||||
|
||||
if let Err(failure) = update.insert_tx(res.tx_hash, tx_height) {
|
||||
match failure {
|
||||
sparse_chain::InsertTxError::TxTooHigh { .. } => {
|
||||
unreachable!("we should never encounter this as we ensured height <= tip");
|
||||
}
|
||||
sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => {
|
||||
return Err(InternalError::Reorg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(full_txs)
|
||||
}
|
||||
|
||||
/// Populate an update [`SparseChain`] with transactions (and associated block positions) from
|
||||
/// the given `txids`.
|
||||
fn populate_with_txids(
|
||||
client: &Client,
|
||||
update: &mut SparseChain,
|
||||
txids: &mut impl Iterator<Item = Txid>,
|
||||
) -> Result<(), InternalError> {
|
||||
let tip = update
|
||||
.latest_checkpoint()
|
||||
.expect("update must have atleast one checkpoint");
|
||||
for txid in txids {
|
||||
let tx = match client.transaction_get(&txid) {
|
||||
Ok(tx) => tx,
|
||||
Err(electrum_client::Error::Protocol(_)) => continue,
|
||||
Err(other_err) => return Err(other_err.into()),
|
||||
};
|
||||
|
||||
let spk = tx
|
||||
.output
|
||||
.get(0)
|
||||
.map(|txo| &txo.script_pubkey)
|
||||
.expect("tx must have an output");
|
||||
|
||||
let tx_height = match client
|
||||
.script_get_history(spk)?
|
||||
.into_iter()
|
||||
.find(|r| r.tx_hash == txid)
|
||||
{
|
||||
Some(r) => determine_tx_height(r.height, tip.height, r.tx_hash),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if let Err(failure) = update.insert_tx(txid, tx_height) {
|
||||
match failure {
|
||||
sparse_chain::InsertTxError::TxTooHigh { .. } => {
|
||||
unreachable!("we should never encounter this as we ensured height <= tip");
|
||||
}
|
||||
sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => {
|
||||
return Err(InternalError::Reorg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Populate an update [`SparseChain`] with transactions (and associated block positions) from
|
||||
/// the transaction history of the provided `spk`s.
|
||||
fn populate_with_spks<K, I, S>(
|
||||
client: &Client,
|
||||
update: &mut SparseChain,
|
||||
spks: &mut S,
|
||||
stop_gap: usize,
|
||||
batch_size: usize,
|
||||
) -> Result<BTreeMap<I, (Script, bool)>, InternalError>
|
||||
where
|
||||
K: Ord + Clone,
|
||||
I: Ord + Clone,
|
||||
S: Iterator<Item = (I, Script)>,
|
||||
{
|
||||
let tip = update.latest_checkpoint().map_or(0, |cp| cp.height);
|
||||
let mut unused_spk_count = 0_usize;
|
||||
let mut scanned_spks = BTreeMap::new();
|
||||
|
||||
loop {
|
||||
let spks = (0..batch_size)
|
||||
.map_while(|_| spks.next())
|
||||
.collect::<Vec<_>>();
|
||||
if spks.is_empty() {
|
||||
return Ok(scanned_spks);
|
||||
}
|
||||
|
||||
let spk_histories = client.batch_script_get_history(spks.iter().map(|(_, s)| s))?;
|
||||
|
||||
for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) {
|
||||
if spk_history.is_empty() {
|
||||
scanned_spks.insert(spk_index, (spk, false));
|
||||
unused_spk_count += 1;
|
||||
if unused_spk_count > stop_gap {
|
||||
return Ok(scanned_spks);
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
scanned_spks.insert(spk_index, (spk, true));
|
||||
unused_spk_count = 0;
|
||||
}
|
||||
|
||||
for tx in spk_history {
|
||||
let tx_height = determine_tx_height(tx.height, tip, tx.tx_hash);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx.tx_hash, tx_height) {
|
||||
match failure {
|
||||
sparse_chain::InsertTxError::TxTooHigh { .. } => {
|
||||
unreachable!(
|
||||
"we should never encounter this as we ensured height <= tip"
|
||||
);
|
||||
}
|
||||
sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => {
|
||||
return Err(InternalError::Reorg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
24
crates/esplora/Cargo.toml
Normal file
24
crates/esplora/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "bdk_esplora"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk_esplora"
|
||||
description = "Fetch data from esplora in the form that accepts"
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "README.md"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../chain", version = "0.4.0", features = ["serde", "miniscript"] }
|
||||
esplora-client = { version = "0.3", default-features = false }
|
||||
async-trait = { version = "0.1.66", optional = true }
|
||||
futures = { version = "0.3.26", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["async-https", "blocking"]
|
||||
async = ["async-trait", "futures", "esplora-client/async"]
|
||||
async-https = ["async", "esplora-client/async-https"]
|
||||
blocking = ["esplora-client/blocking"]
|
||||
33
crates/esplora/README.md
Normal file
33
crates/esplora/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# BDK Esplora
|
||||
|
||||
BDK Esplora extends [`esplora_client`](crate::esplora_client) to update [`bdk_chain`] structures
|
||||
from an Esplora server.
|
||||
|
||||
## Usage
|
||||
|
||||
There are two versions of the extension trait (blocking and async).
|
||||
|
||||
For blocking-only:
|
||||
```toml
|
||||
bdk_esplora = { version = "0.1", features = ["blocking"] }
|
||||
```
|
||||
|
||||
For async-only:
|
||||
```toml
|
||||
bdk_esplora = { version = "0.1", features = ["async"] }
|
||||
```
|
||||
|
||||
For async-only (with https):
|
||||
```toml
|
||||
bdk_esplora = { version = "0.1", features = ["async-https"] }
|
||||
```
|
||||
|
||||
To use the extension traits:
|
||||
```rust
|
||||
// for blocking
|
||||
use bdk_esplora::EsploraExt;
|
||||
// for async
|
||||
use bdk_esplora::EsploraAsyncExt;
|
||||
```
|
||||
|
||||
For full examples, refer to [`example-crates/wallet_esplora`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora) (blocking) and [`example-crates/wallet_esplora_async`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora_async).
|
||||
316
crates/esplora/src/async_ext.rs
Normal file
316
crates/esplora/src/async_ext.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use bdk_chain::{
|
||||
bitcoin::{BlockHash, OutPoint, Script, Txid},
|
||||
chain_graph::ChainGraph,
|
||||
keychain::KeychainScan,
|
||||
sparse_chain, BlockId, ConfirmationTime,
|
||||
};
|
||||
use esplora_client::{Error, OutputStatus};
|
||||
use futures::stream::{FuturesOrdered, TryStreamExt};
|
||||
|
||||
use crate::map_confirmation_time;
|
||||
|
||||
/// Trait to extend [`esplora_client::AsyncClient`] functionality.
|
||||
///
|
||||
/// This is the async version of [`EsploraExt`]. Refer to
|
||||
/// [crate-level documentation] for more.
|
||||
///
|
||||
/// [`EsploraExt`]: crate::EsploraExt
|
||||
/// [crate-level documentation]: crate
|
||||
#[cfg(feature = "async")]
|
||||
#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
|
||||
#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
|
||||
pub trait EsploraAsyncExt {
|
||||
/// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`].
|
||||
///
|
||||
/// - `local_chain`: the most recent block hashes present locally
|
||||
/// - `keychain_spks`: keychains that we want to scan transactions for
|
||||
/// - `txids`: transactions for which we want updated [`ChainPosition`]s
|
||||
/// - `outpoints`: transactions associated with these outpoints (residing, spending) that we
|
||||
/// want to included in the update
|
||||
///
|
||||
/// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated
|
||||
/// transactions. `parallel_requests` specifies the max number of HTTP requests to make in
|
||||
/// parallel.
|
||||
///
|
||||
/// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
|
||||
#[allow(clippy::result_large_err)] // FIXME
|
||||
async fn scan<K: Ord + Clone + Send>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<
|
||||
K,
|
||||
impl IntoIterator<IntoIter = impl Iterator<Item = (u32, Script)> + Send> + Send,
|
||||
>,
|
||||
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
|
||||
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
|
||||
stop_gap: usize,
|
||||
parallel_requests: usize,
|
||||
) -> Result<KeychainScan<K, ConfirmationTime>, Error>;
|
||||
|
||||
/// Convenience method to call [`scan`] without requiring a keychain.
|
||||
///
|
||||
/// [`scan`]: EsploraAsyncExt::scan
|
||||
#[allow(clippy::result_large_err)] // FIXME
|
||||
async fn scan_without_keychain(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
misc_spks: impl IntoIterator<IntoIter = impl Iterator<Item = Script> + Send> + Send,
|
||||
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
|
||||
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
|
||||
parallel_requests: usize,
|
||||
) -> Result<ChainGraph<ConfirmationTime>, Error> {
|
||||
let wallet_scan = self
|
||||
.scan(
|
||||
local_chain,
|
||||
[(
|
||||
(),
|
||||
misc_spks
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, spk)| (i as u32, spk)),
|
||||
)]
|
||||
.into(),
|
||||
txids,
|
||||
outpoints,
|
||||
usize::MAX,
|
||||
parallel_requests,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(wallet_scan.update)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "async")]
|
||||
#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
|
||||
#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
|
||||
impl EsploraAsyncExt for esplora_client::AsyncClient {
|
||||
#[allow(clippy::result_large_err)] // FIXME
|
||||
async fn scan<K: Ord + Clone + Send>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<
|
||||
K,
|
||||
impl IntoIterator<IntoIter = impl Iterator<Item = (u32, Script)> + Send> + Send,
|
||||
>,
|
||||
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
|
||||
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
|
||||
stop_gap: usize,
|
||||
parallel_requests: usize,
|
||||
) -> Result<KeychainScan<K, ConfirmationTime>, Error> {
|
||||
let txids = txids.into_iter();
|
||||
let outpoints = outpoints.into_iter();
|
||||
let parallel_requests = parallel_requests.max(1);
|
||||
let mut scan = KeychainScan::default();
|
||||
let update = &mut scan.update;
|
||||
let last_active_indices = &mut scan.last_active_indices;
|
||||
|
||||
for (&height, &original_hash) in local_chain.iter().rev() {
|
||||
let update_block_id = BlockId {
|
||||
height,
|
||||
hash: self.get_block_hash(height).await?,
|
||||
};
|
||||
let _ = update
|
||||
.insert_checkpoint(update_block_id)
|
||||
.expect("cannot repeat height here");
|
||||
if update_block_id.hash == original_hash {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let tip_at_start = BlockId {
|
||||
height: self.get_height().await?,
|
||||
hash: self.get_tip_hash().await?,
|
||||
};
|
||||
if let Err(failure) = update.insert_checkpoint(tip_at_start) {
|
||||
match failure {
|
||||
sparse_chain::InsertCheckpointError::HashNotMatching { .. } => {
|
||||
// there was a re-org before we started scanning. We haven't consumed any iterators, so calling this function recursively is safe.
|
||||
return EsploraAsyncExt::scan(
|
||||
self,
|
||||
local_chain,
|
||||
keychain_spks,
|
||||
txids,
|
||||
outpoints,
|
||||
stop_gap,
|
||||
parallel_requests,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (keychain, spks) in keychain_spks {
|
||||
let mut spks = spks.into_iter();
|
||||
let mut last_active_index = None;
|
||||
let mut empty_scripts = 0;
|
||||
type IndexWithTxs = (u32, Vec<esplora_client::Tx>);
|
||||
|
||||
loop {
|
||||
let futures: FuturesOrdered<_> = (0..parallel_requests)
|
||||
.filter_map(|_| {
|
||||
let (index, script) = spks.next()?;
|
||||
let client = self.clone();
|
||||
Some(async move {
|
||||
let mut related_txs = client.scripthash_txs(&script, None).await?;
|
||||
|
||||
let n_confirmed =
|
||||
related_txs.iter().filter(|tx| tx.status.confirmed).count();
|
||||
// esplora pages on 25 confirmed transactions. If there are 25 or more we
|
||||
// keep requesting to see if there's more.
|
||||
if n_confirmed >= 25 {
|
||||
loop {
|
||||
let new_related_txs = client
|
||||
.scripthash_txs(
|
||||
&script,
|
||||
Some(related_txs.last().unwrap().txid),
|
||||
)
|
||||
.await?;
|
||||
let n = new_related_txs.len();
|
||||
related_txs.extend(new_related_txs);
|
||||
// we've reached the end
|
||||
if n < 25 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Result::<_, esplora_client::Error>::Ok((index, related_txs))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let n_futures = futures.len();
|
||||
|
||||
let idx_with_tx: Vec<IndexWithTxs> = futures.try_collect().await?;
|
||||
|
||||
for (index, related_txs) in idx_with_tx {
|
||||
if related_txs.is_empty() {
|
||||
empty_scripts += 1;
|
||||
} else {
|
||||
last_active_index = Some(index);
|
||||
empty_scripts = 0;
|
||||
}
|
||||
for tx in related_txs {
|
||||
let confirmation_time =
|
||||
map_confirmation_time(&tx.status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) {
|
||||
use bdk_chain::{
|
||||
chain_graph::InsertTxError, sparse_chain::InsertTxError::*,
|
||||
};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if n_futures == 0 || empty_scripts >= stop_gap {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(last_active_index) = last_active_index {
|
||||
last_active_indices.insert(keychain, last_active_index);
|
||||
}
|
||||
}
|
||||
|
||||
for txid in txids {
|
||||
let (tx, tx_status) =
|
||||
match (self.get_tx(&txid).await?, self.get_tx_status(&txid).await?) {
|
||||
(Some(tx), Some(tx_status)) => (tx, tx_status),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx, confirmation_time) {
|
||||
use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for op in outpoints {
|
||||
let mut op_txs = Vec::with_capacity(2);
|
||||
if let (Some(tx), Some(tx_status)) = (
|
||||
self.get_tx(&op.txid).await?,
|
||||
self.get_tx_status(&op.txid).await?,
|
||||
) {
|
||||
op_txs.push((tx, tx_status));
|
||||
if let Some(OutputStatus {
|
||||
txid: Some(txid),
|
||||
status: Some(spend_status),
|
||||
..
|
||||
}) = self.get_output_status(&op.txid, op.vout as _).await?
|
||||
{
|
||||
if let Some(spend_tx) = self.get_tx(&txid).await? {
|
||||
op_txs.push((spend_tx, spend_status));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (tx, status) in op_txs {
|
||||
let confirmation_time = map_confirmation_time(&status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx, confirmation_time) {
|
||||
use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let reorg_occurred = {
|
||||
if let Some(checkpoint) = update.chain().latest_checkpoint() {
|
||||
self.get_block_hash(checkpoint.height).await? != checkpoint.hash
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if reorg_occurred {
|
||||
// A reorg occurred, so let's find out where all the txids we found are in the chain now.
|
||||
// XXX: collect required because of weird type naming issues
|
||||
let txids_found = update
|
||||
.chain()
|
||||
.txids()
|
||||
.map(|(_, txid)| *txid)
|
||||
.collect::<Vec<_>>();
|
||||
scan.update = EsploraAsyncExt::scan_without_keychain(
|
||||
self,
|
||||
local_chain,
|
||||
[],
|
||||
txids_found,
|
||||
[],
|
||||
parallel_requests,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(scan)
|
||||
}
|
||||
}
|
||||
290
crates/esplora/src/blocking_ext.rs
Normal file
290
crates/esplora/src/blocking_ext.rs
Normal file
@@ -0,0 +1,290 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use bdk_chain::{
|
||||
bitcoin::{BlockHash, OutPoint, Script, Txid},
|
||||
chain_graph::ChainGraph,
|
||||
keychain::KeychainScan,
|
||||
sparse_chain, BlockId, ConfirmationTime,
|
||||
};
|
||||
use esplora_client::{Error, OutputStatus};
|
||||
|
||||
use crate::map_confirmation_time;
|
||||
|
||||
/// Trait to extend [`esplora_client::BlockingClient`] functionality.
|
||||
///
|
||||
/// Refer to [crate-level documentation] for more.
|
||||
///
|
||||
/// [crate-level documentation]: crate
|
||||
pub trait EsploraExt {
|
||||
/// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`].
|
||||
///
|
||||
/// - `local_chain`: the most recent block hashes present locally
|
||||
/// - `keychain_spks`: keychains that we want to scan transactions for
|
||||
/// - `txids`: transactions for which we want updated [`ChainPosition`]s
|
||||
/// - `outpoints`: transactions associated with these outpoints (residing, spending) that we
|
||||
/// want to included in the update
|
||||
///
|
||||
/// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated
|
||||
/// transactions. `parallel_requests` specifies the max number of HTTP requests to make in
|
||||
/// parallel.
|
||||
///
|
||||
/// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
|
||||
#[allow(clippy::result_large_err)] // FIXME
|
||||
fn scan<K: Ord + Clone>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
stop_gap: usize,
|
||||
parallel_requests: usize,
|
||||
) -> Result<KeychainScan<K, ConfirmationTime>, Error>;
|
||||
|
||||
/// Convenience method to call [`scan`] without requiring a keychain.
|
||||
///
|
||||
/// [`scan`]: EsploraExt::scan
|
||||
#[allow(clippy::result_large_err)] // FIXME
|
||||
fn scan_without_keychain(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
misc_spks: impl IntoIterator<Item = Script>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
parallel_requests: usize,
|
||||
) -> Result<ChainGraph<ConfirmationTime>, Error> {
|
||||
let wallet_scan = self.scan(
|
||||
local_chain,
|
||||
[(
|
||||
(),
|
||||
misc_spks
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, spk)| (i as u32, spk)),
|
||||
)]
|
||||
.into(),
|
||||
txids,
|
||||
outpoints,
|
||||
usize::MAX,
|
||||
parallel_requests,
|
||||
)?;
|
||||
|
||||
Ok(wallet_scan.update)
|
||||
}
|
||||
}
|
||||
|
||||
impl EsploraExt for esplora_client::BlockingClient {
|
||||
fn scan<K: Ord + Clone>(
|
||||
&self,
|
||||
local_chain: &BTreeMap<u32, BlockHash>,
|
||||
keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
|
||||
txids: impl IntoIterator<Item = Txid>,
|
||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||
stop_gap: usize,
|
||||
parallel_requests: usize,
|
||||
) -> Result<KeychainScan<K, ConfirmationTime>, Error> {
|
||||
let parallel_requests = parallel_requests.max(1);
|
||||
let mut scan = KeychainScan::default();
|
||||
let update = &mut scan.update;
|
||||
let last_active_indices = &mut scan.last_active_indices;
|
||||
|
||||
for (&height, &original_hash) in local_chain.iter().rev() {
|
||||
let update_block_id = BlockId {
|
||||
height,
|
||||
hash: self.get_block_hash(height)?,
|
||||
};
|
||||
let _ = update
|
||||
.insert_checkpoint(update_block_id)
|
||||
.expect("cannot repeat height here");
|
||||
if update_block_id.hash == original_hash {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let tip_at_start = BlockId {
|
||||
height: self.get_height()?,
|
||||
hash: self.get_tip_hash()?,
|
||||
};
|
||||
if let Err(failure) = update.insert_checkpoint(tip_at_start) {
|
||||
match failure {
|
||||
sparse_chain::InsertCheckpointError::HashNotMatching { .. } => {
|
||||
// there was a re-org before we started scanning. We haven't consumed any iterators, so calling this function recursively is safe.
|
||||
return EsploraExt::scan(
|
||||
self,
|
||||
local_chain,
|
||||
keychain_spks,
|
||||
txids,
|
||||
outpoints,
|
||||
stop_gap,
|
||||
parallel_requests,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (keychain, spks) in keychain_spks {
|
||||
let mut spks = spks.into_iter();
|
||||
let mut last_active_index = None;
|
||||
let mut empty_scripts = 0;
|
||||
type IndexWithTxs = (u32, Vec<esplora_client::Tx>);
|
||||
|
||||
loop {
|
||||
let handles = (0..parallel_requests)
|
||||
.filter_map(
|
||||
|_| -> Option<std::thread::JoinHandle<Result<IndexWithTxs, _>>> {
|
||||
let (index, script) = spks.next()?;
|
||||
let client = self.clone();
|
||||
Some(std::thread::spawn(move || {
|
||||
let mut related_txs = client.scripthash_txs(&script, None)?;
|
||||
|
||||
let n_confirmed =
|
||||
related_txs.iter().filter(|tx| tx.status.confirmed).count();
|
||||
// esplora pages on 25 confirmed transactions. If there are 25 or more we
|
||||
// keep requesting to see if there's more.
|
||||
if n_confirmed >= 25 {
|
||||
loop {
|
||||
let new_related_txs = client.scripthash_txs(
|
||||
&script,
|
||||
Some(related_txs.last().unwrap().txid),
|
||||
)?;
|
||||
let n = new_related_txs.len();
|
||||
related_txs.extend(new_related_txs);
|
||||
// we've reached the end
|
||||
if n < 25 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Result::<_, esplora_client::Error>::Ok((index, related_txs))
|
||||
}))
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let n_handles = handles.len();
|
||||
|
||||
for handle in handles {
|
||||
let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap
|
||||
if related_txs.is_empty() {
|
||||
empty_scripts += 1;
|
||||
} else {
|
||||
last_active_index = Some(index);
|
||||
empty_scripts = 0;
|
||||
}
|
||||
for tx in related_txs {
|
||||
let confirmation_time =
|
||||
map_confirmation_time(&tx.status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) {
|
||||
use bdk_chain::{
|
||||
chain_graph::InsertTxError, sparse_chain::InsertTxError::*,
|
||||
};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if n_handles == 0 || empty_scripts >= stop_gap {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(last_active_index) = last_active_index {
|
||||
last_active_indices.insert(keychain, last_active_index);
|
||||
}
|
||||
}
|
||||
|
||||
for txid in txids.into_iter() {
|
||||
let (tx, tx_status) = match (self.get_tx(&txid)?, self.get_tx_status(&txid)?) {
|
||||
(Some(tx), Some(tx_status)) => (tx, tx_status),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx, confirmation_time) {
|
||||
use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for op in outpoints.into_iter() {
|
||||
let mut op_txs = Vec::with_capacity(2);
|
||||
if let (Some(tx), Some(tx_status)) =
|
||||
(self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?)
|
||||
{
|
||||
op_txs.push((tx, tx_status));
|
||||
if let Some(OutputStatus {
|
||||
txid: Some(txid),
|
||||
status: Some(spend_status),
|
||||
..
|
||||
}) = self.get_output_status(&op.txid, op.vout as _)?
|
||||
{
|
||||
if let Some(spend_tx) = self.get_tx(&txid)? {
|
||||
op_txs.push((spend_tx, spend_status));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (tx, status) in op_txs {
|
||||
let confirmation_time = map_confirmation_time(&status, tip_at_start.height);
|
||||
|
||||
if let Err(failure) = update.insert_tx(tx, confirmation_time) {
|
||||
use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
|
||||
match failure {
|
||||
InsertTxError::Chain(TxTooHigh { .. }) => {
|
||||
unreachable!("chain position already checked earlier")
|
||||
}
|
||||
InsertTxError::Chain(TxMovedUnexpectedly { .. })
|
||||
| InsertTxError::UnresolvableConflict(_) => {
|
||||
/* implies reorg during a scan. We deal with that below */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let reorg_occurred = {
|
||||
if let Some(checkpoint) = update.chain().latest_checkpoint() {
|
||||
self.get_block_hash(checkpoint.height)? != checkpoint.hash
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if reorg_occurred {
|
||||
// A reorg occurred, so let's find out where all the txids we found are now in the chain.
|
||||
// XXX: collect required because of weird type naming issues
|
||||
let txids_found = update
|
||||
.chain()
|
||||
.txids()
|
||||
.map(|(_, txid)| *txid)
|
||||
.collect::<Vec<_>>();
|
||||
scan.update = EsploraExt::scan_without_keychain(
|
||||
self,
|
||||
local_chain,
|
||||
[],
|
||||
txids_found,
|
||||
[],
|
||||
parallel_requests,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(scan)
|
||||
}
|
||||
}
|
||||
27
crates/esplora/src/lib.rs
Normal file
27
crates/esplora/src/lib.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
use bdk_chain::ConfirmationTime;
|
||||
use esplora_client::TxStatus;
|
||||
|
||||
pub use esplora_client;
|
||||
|
||||
#[cfg(feature = "blocking")]
|
||||
mod blocking_ext;
|
||||
#[cfg(feature = "blocking")]
|
||||
pub use blocking_ext::*;
|
||||
|
||||
#[cfg(feature = "async")]
|
||||
mod async_ext;
|
||||
#[cfg(feature = "async")]
|
||||
pub use async_ext::*;
|
||||
|
||||
pub(crate) fn map_confirmation_time(
|
||||
tx_status: &TxStatus,
|
||||
height_at_start: u32,
|
||||
) -> ConfirmationTime {
|
||||
match (tx_status.block_time, tx_status.block_height) {
|
||||
(Some(time), Some(height)) if height <= height_at_start => {
|
||||
ConfirmationTime::Confirmed { height, time }
|
||||
}
|
||||
_ => ConfirmationTime::Unconfirmed,
|
||||
}
|
||||
}
|
||||
18
crates/file_store/Cargo.toml
Normal file
18
crates/file_store/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "bdk_file_store"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk_file_store"
|
||||
keywords = ["bitcoin", "persist", "persistence", "bdk", "file", "store"]
|
||||
authors = ["Bitcoin Dev Kit Developers"]
|
||||
readme = "README.md"
|
||||
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../chain", version = "0.4.0", features = [ "serde", "miniscript" ] }
|
||||
bincode = { version = "1" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
10
crates/file_store/README.md
Normal file
10
crates/file_store/README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# BDK File Store
|
||||
|
||||
This is a simple append-only flat file implementation of
|
||||
[`Persist`](`bdk_chain::keychain::persist::Persist`).
|
||||
|
||||
The main structure is [`KeychainStore`](`crate::KeychainStore`), which can be used with [`bdk`]'s
|
||||
`Wallet` to persist wallet data into a flat file.
|
||||
|
||||
[`bdk`]: https://docs.rs/bdk/latest
|
||||
[`bdk_chain`]: https://docs.rs/bdk_chain/latest
|
||||
404
crates/file_store/src/file_store.rs
Normal file
404
crates/file_store/src/file_store.rs
Normal file
@@ -0,0 +1,404 @@
|
||||
//! Module for persisting data on disk.
|
||||
//!
|
||||
//! The star of the show is [`KeychainStore`], which maintains an append-only file of
|
||||
//! [`KeychainChangeSet`]s which can be used to restore a [`KeychainTracker`].
|
||||
use bdk_chain::{
|
||||
keychain::{KeychainChangeSet, KeychainTracker},
|
||||
sparse_chain,
|
||||
};
|
||||
use bincode::{DefaultOptions, Options};
|
||||
use core::marker::PhantomData;
|
||||
use std::{
|
||||
fs::{File, OpenOptions},
|
||||
io::{self, Read, Seek, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// BDK File Store magic bytes length.
|
||||
const MAGIC_BYTES_LEN: usize = 12;
|
||||
|
||||
/// BDK File Store magic bytes.
|
||||
const MAGIC_BYTES: [u8; MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 48, 48, 48, 48, 48, 48, 48];
|
||||
|
||||
/// Persists an append only list of `KeychainChangeSet<K,P>` to a single file.
|
||||
/// [`KeychainChangeSet<K,P>`] record the changes made to a [`KeychainTracker<K,P>`].
|
||||
#[derive(Debug)]
|
||||
pub struct KeychainStore<K, P> {
|
||||
db_file: File,
|
||||
changeset_type_params: core::marker::PhantomData<(K, P)>,
|
||||
}
|
||||
|
||||
fn bincode() -> impl bincode::Options {
|
||||
DefaultOptions::new().with_varint_encoding()
|
||||
}
|
||||
|
||||
impl<K, P> KeychainStore<K, P>
|
||||
where
|
||||
K: Ord + Clone + core::fmt::Debug,
|
||||
P: sparse_chain::ChainPosition,
|
||||
KeychainChangeSet<K, P>: serde::Serialize + serde::de::DeserializeOwned,
|
||||
{
|
||||
/// Creates a new store from a [`File`].
|
||||
///
|
||||
/// The file must have been opened with read and write permissions.
|
||||
///
|
||||
/// [`File`]: std::fs::File
|
||||
pub fn new(mut file: File) -> Result<Self, FileError> {
|
||||
file.rewind()?;
|
||||
|
||||
let mut magic_bytes = [0_u8; MAGIC_BYTES_LEN];
|
||||
file.read_exact(&mut magic_bytes)?;
|
||||
|
||||
if magic_bytes != MAGIC_BYTES {
|
||||
return Err(FileError::InvalidMagicBytes(magic_bytes));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
db_file: file,
|
||||
changeset_type_params: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates or loads a store from `db_path`. If no file exists there, it will be created.
|
||||
pub fn new_from_path<D: AsRef<Path>>(db_path: D) -> Result<Self, FileError> {
|
||||
let already_exists = db_path.as_ref().exists();
|
||||
|
||||
let mut db_file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(db_path)?;
|
||||
|
||||
if !already_exists {
|
||||
db_file.write_all(&MAGIC_BYTES)?;
|
||||
}
|
||||
|
||||
Self::new(db_file)
|
||||
}
|
||||
|
||||
/// Iterates over the stored changeset from first to last, changing the seek position at each
|
||||
/// iteration.
|
||||
///
|
||||
/// The iterator may fail to read an entry and therefore return an error. However, the first time
|
||||
/// it returns an error will be the last. After doing so, the iterator will always yield `None`.
|
||||
///
|
||||
/// **WARNING**: This method changes the write position in the underlying file. You should
|
||||
/// always iterate over all entries until `None` is returned if you want your next write to go
|
||||
/// at the end; otherwise, you will write over existing entries.
|
||||
pub fn iter_changesets(&mut self) -> Result<EntryIter<'_, KeychainChangeSet<K, P>>, io::Error> {
|
||||
self.db_file
|
||||
.seek(io::SeekFrom::Start(MAGIC_BYTES_LEN as _))?;
|
||||
|
||||
Ok(EntryIter::new(&mut self.db_file))
|
||||
}
|
||||
|
||||
/// Loads all the changesets that have been stored as one giant changeset.
|
||||
///
|
||||
/// This function returns a tuple of the aggregate changeset and a result that indicates
|
||||
/// whether an error occurred while reading or deserializing one of the entries. If so the
|
||||
/// changeset will consist of all of those it was able to read.
|
||||
///
|
||||
/// You should usually check the error. In many applications, it may make sense to do a full
|
||||
/// wallet scan with a stop-gap after getting an error, since it is likely that one of the
|
||||
/// changesets it was unable to read changed the derivation indices of the tracker.
|
||||
///
|
||||
/// **WARNING**: This method changes the write position of the underlying file. The next
|
||||
/// changeset will be written over the erroring entry (or the end of the file if none existed).
|
||||
pub fn aggregate_changeset(&mut self) -> (KeychainChangeSet<K, P>, Result<(), IterError>) {
|
||||
let mut changeset = KeychainChangeSet::default();
|
||||
let result = (|| {
|
||||
let iter_changeset = self.iter_changesets()?;
|
||||
for next_changeset in iter_changeset {
|
||||
changeset.append(next_changeset?);
|
||||
}
|
||||
Ok(())
|
||||
})();
|
||||
|
||||
(changeset, result)
|
||||
}
|
||||
|
||||
/// Reads and applies all the changesets stored sequentially to the tracker, stopping when it fails
|
||||
/// to read the next one.
|
||||
///
|
||||
/// **WARNING**: This method changes the write position of the underlying file. The next
|
||||
/// changeset will be written over the erroring entry (or the end of the file if none existed).
|
||||
pub fn load_into_keychain_tracker(
|
||||
&mut self,
|
||||
tracker: &mut KeychainTracker<K, P>,
|
||||
) -> Result<(), IterError> {
|
||||
for changeset in self.iter_changesets()? {
|
||||
tracker.apply_changeset(changeset?)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Append a new changeset to the file and truncate the file to the end of the appended changeset.
|
||||
///
|
||||
/// The truncation is to avoid the possibility of having a valid but inconsistent changeset
|
||||
/// directly after the appended changeset.
|
||||
pub fn append_changeset(
|
||||
&mut self,
|
||||
changeset: &KeychainChangeSet<K, P>,
|
||||
) -> Result<(), io::Error> {
|
||||
if changeset.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
bincode()
|
||||
.serialize_into(&mut self.db_file, changeset)
|
||||
.map_err(|e| match *e {
|
||||
bincode::ErrorKind::Io(inner) => inner,
|
||||
unexpected_err => panic!("unexpected bincode error: {}", unexpected_err),
|
||||
})?;
|
||||
|
||||
// truncate file after this changeset addition
|
||||
// if this is not done, data after this changeset may represent valid changesets, however
|
||||
// applying those changesets on top of this one may result in an inconsistent state
|
||||
let pos = self.db_file.stream_position()?;
|
||||
self.db_file.set_len(pos)?;
|
||||
|
||||
// We want to make sure that derivation indices changes are written to disk as soon as
|
||||
// possible, so you know about the write failure before you give out the address in the application.
|
||||
if !changeset.derivation_indices.is_empty() {
|
||||
self.db_file.sync_data()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Error that occurs due to problems encountered with the file.
|
||||
#[derive(Debug)]
|
||||
pub enum FileError {
|
||||
/// IO error, this may mean that the file is too short.
|
||||
Io(io::Error),
|
||||
/// Magic bytes do not match what is expected.
|
||||
InvalidMagicBytes([u8; MAGIC_BYTES_LEN]),
|
||||
}
|
||||
|
||||
impl core::fmt::Display for FileError {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
Self::Io(e) => write!(f, "io error trying to read file: {}", e),
|
||||
Self::InvalidMagicBytes(b) => write!(
|
||||
f,
|
||||
"file has invalid magic bytes: expected={:?} got={:?}",
|
||||
MAGIC_BYTES, b
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for FileError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
Self::Io(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for FileError {}
|
||||
|
||||
/// Error type for [`EntryIter`].
|
||||
#[derive(Debug)]
|
||||
pub enum IterError {
|
||||
/// Failure to read from the file.
|
||||
Io(io::Error),
|
||||
/// Failure to decode data from the file.
|
||||
Bincode(bincode::ErrorKind),
|
||||
}
|
||||
|
||||
impl core::fmt::Display for IterError {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
IterError::Io(e) => write!(f, "io error trying to read entry {}", e),
|
||||
IterError::Bincode(e) => write!(f, "bincode error while reading entry {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for IterError {}
|
||||
|
||||
/// Iterator over entries in a file store.
|
||||
///
|
||||
/// Reads and returns an entry each time [`next`] is called. If an error occurs while reading the
|
||||
/// iterator will yield a `Result::Err(_)` instead and then `None` for the next call to `next`.
|
||||
///
|
||||
/// [`next`]: Self::next
|
||||
pub struct EntryIter<'a, V> {
|
||||
db_file: &'a mut File,
|
||||
types: PhantomData<V>,
|
||||
error_exit: bool,
|
||||
}
|
||||
|
||||
impl<'a, V> EntryIter<'a, V> {
|
||||
pub fn new(db_file: &'a mut File) -> Self {
|
||||
Self {
|
||||
db_file,
|
||||
types: PhantomData,
|
||||
error_exit: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, V> Iterator for EntryIter<'a, V>
|
||||
where
|
||||
V: serde::de::DeserializeOwned,
|
||||
{
|
||||
type Item = Result<V, IterError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let result = (|| {
|
||||
let pos = self.db_file.stream_position()?;
|
||||
|
||||
match bincode().deserialize_from(&mut self.db_file) {
|
||||
Ok(changeset) => Ok(Some(changeset)),
|
||||
Err(e) => {
|
||||
if let bincode::ErrorKind::Io(inner) = &*e {
|
||||
if inner.kind() == io::ErrorKind::UnexpectedEof {
|
||||
let eof = self.db_file.seek(io::SeekFrom::End(0))?;
|
||||
if pos == eof {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.db_file.seek(io::SeekFrom::Start(pos))?;
|
||||
Err(IterError::Bincode(*e))
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
let result = result.transpose();
|
||||
|
||||
if let Some(Err(_)) = &result {
|
||||
self.error_exit = true;
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for IterError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
IterError::Io(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use bdk_chain::{
|
||||
keychain::{DerivationAdditions, KeychainChangeSet},
|
||||
TxHeight,
|
||||
};
|
||||
use std::{
|
||||
io::{Read, Write},
|
||||
vec::Vec,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
Copy,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
serde::Serialize,
|
||||
serde::Deserialize,
|
||||
)]
|
||||
enum TestKeychain {
|
||||
External,
|
||||
Internal,
|
||||
}
|
||||
|
||||
impl core::fmt::Display for TestKeychain {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::External => write!(f, "external"),
|
||||
Self::Internal => write!(f, "internal"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn magic_bytes() {
|
||||
assert_eq!(&MAGIC_BYTES, "bdkfs0000000".as_bytes());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_fails_if_file_is_too_short() {
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
file.write_all(&MAGIC_BYTES[..MAGIC_BYTES_LEN - 1])
|
||||
.expect("should write");
|
||||
|
||||
match KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap()) {
|
||||
Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof),
|
||||
unexpected => panic!("unexpected result: {:?}", unexpected),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_fails_if_magic_bytes_are_invalid() {
|
||||
let invalid_magic_bytes = "ldkfs0000000";
|
||||
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
file.write_all(invalid_magic_bytes.as_bytes())
|
||||
.expect("should write");
|
||||
|
||||
match KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap()) {
|
||||
Err(FileError::InvalidMagicBytes(b)) => {
|
||||
assert_eq!(b, invalid_magic_bytes.as_bytes())
|
||||
}
|
||||
unexpected => panic!("unexpected result: {:?}", unexpected),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn append_changeset_truncates_invalid_bytes() {
|
||||
// initial data to write to file (magic bytes + invalid data)
|
||||
let mut data = [255_u8; 2000];
|
||||
data[..MAGIC_BYTES_LEN].copy_from_slice(&MAGIC_BYTES);
|
||||
|
||||
let changeset = KeychainChangeSet {
|
||||
derivation_indices: DerivationAdditions(
|
||||
vec![(TestKeychain::External, 42)].into_iter().collect(),
|
||||
),
|
||||
chain_graph: Default::default(),
|
||||
};
|
||||
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
file.write_all(&data).expect("should write");
|
||||
|
||||
let mut store = KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap())
|
||||
.expect("should open");
|
||||
match store.iter_changesets().expect("seek should succeed").next() {
|
||||
Some(Err(IterError::Bincode(_))) => {}
|
||||
unexpected_res => panic!("unexpected result: {:?}", unexpected_res),
|
||||
}
|
||||
|
||||
store.append_changeset(&changeset).expect("should append");
|
||||
|
||||
drop(store);
|
||||
|
||||
let got_bytes = {
|
||||
let mut buf = Vec::new();
|
||||
file.reopen()
|
||||
.unwrap()
|
||||
.read_to_end(&mut buf)
|
||||
.expect("should read");
|
||||
buf
|
||||
};
|
||||
|
||||
let expected_bytes = {
|
||||
let mut buf = MAGIC_BYTES.to_vec();
|
||||
DefaultOptions::new()
|
||||
.with_varint_encoding()
|
||||
.serialize_into(&mut buf, &changeset)
|
||||
.expect("should encode");
|
||||
buf
|
||||
};
|
||||
|
||||
assert_eq!(got_bytes, expected_bytes);
|
||||
}
|
||||
}
|
||||
32
crates/file_store/src/lib.rs
Normal file
32
crates/file_store/src/lib.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
mod file_store;
|
||||
use bdk_chain::{
|
||||
keychain::{KeychainChangeSet, KeychainTracker, PersistBackend},
|
||||
sparse_chain::ChainPosition,
|
||||
};
|
||||
pub use file_store::*;
|
||||
|
||||
impl<K, P> PersistBackend<K, P> for KeychainStore<K, P>
|
||||
where
|
||||
K: Ord + Clone + core::fmt::Debug,
|
||||
P: ChainPosition,
|
||||
KeychainChangeSet<K, P>: serde::Serialize + serde::de::DeserializeOwned,
|
||||
{
|
||||
type WriteError = std::io::Error;
|
||||
|
||||
type LoadError = IterError;
|
||||
|
||||
fn append_changeset(
|
||||
&mut self,
|
||||
changeset: &KeychainChangeSet<K, P>,
|
||||
) -> Result<(), Self::WriteError> {
|
||||
KeychainStore::append_changeset(self, changeset)
|
||||
}
|
||||
|
||||
fn load_into_keychain_tracker(
|
||||
&mut self,
|
||||
tracker: &mut KeychainTracker<K, P>,
|
||||
) -> Result<(), Self::LoadError> {
|
||||
KeychainStore::load_into_keychain_tracker(self, tracker)
|
||||
}
|
||||
}
|
||||
1
crates/file_store/tests/test_file_store.rs
Normal file
1
crates/file_store/tests/test_file_store.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
example-crates/keychain_tracker_electrum/.gitignore
vendored
Normal file
1
example-crates/keychain_tracker_electrum/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/target
|
||||
9
example-crates/keychain_tracker_electrum/Cargo.toml
Normal file
9
example-crates/keychain_tracker_electrum/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "keychain_tracker_electrum_example"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../../crates/chain", features = ["serde"] }
|
||||
bdk_electrum = { path = "../../crates/electrum" }
|
||||
keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli"}
|
||||
6
example-crates/keychain_tracker_electrum/README.md
Normal file
6
example-crates/keychain_tracker_electrum/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Keychain Tracker with electrum
|
||||
|
||||
This example shows how you use the `KeychainTracker` from `bdk_chain` to create a simple command
|
||||
line wallet.
|
||||
|
||||
|
||||
245
example-crates/keychain_tracker_electrum/src/main.rs
Normal file
245
example-crates/keychain_tracker_electrum/src/main.rs
Normal file
@@ -0,0 +1,245 @@
|
||||
use bdk_chain::bitcoin::{Address, OutPoint, Txid};
|
||||
use bdk_electrum::bdk_chain::{self, bitcoin::Network, TxHeight};
|
||||
use bdk_electrum::{
|
||||
electrum_client::{self, ElectrumApi},
|
||||
ElectrumExt, ElectrumUpdate,
|
||||
};
|
||||
use keychain_tracker_example_cli::{
|
||||
self as cli,
|
||||
anyhow::{self, Context},
|
||||
clap::{self, Parser, Subcommand},
|
||||
};
|
||||
use std::{collections::BTreeMap, fmt::Debug, io, io::Write};
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
enum ElectrumCommands {
|
||||
/// Scans the addresses in the wallet using the esplora API.
|
||||
Scan {
|
||||
/// When a gap this large has been found for a keychain, it will stop.
|
||||
#[clap(long, default_value = "5")]
|
||||
stop_gap: usize,
|
||||
#[clap(flatten)]
|
||||
scan_options: ScanOptions,
|
||||
},
|
||||
/// Scans particular addresses using the esplora API.
|
||||
Sync {
|
||||
/// Scan all the unused addresses.
|
||||
#[clap(long)]
|
||||
unused_spks: bool,
|
||||
/// Scan every address that you have derived.
|
||||
#[clap(long)]
|
||||
all_spks: bool,
|
||||
/// Scan unspent outpoints for spends or changes to confirmation status of residing tx.
|
||||
#[clap(long)]
|
||||
utxos: bool,
|
||||
/// Scan unconfirmed transactions for updates.
|
||||
#[clap(long)]
|
||||
unconfirmed: bool,
|
||||
#[clap(flatten)]
|
||||
scan_options: ScanOptions,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone, PartialEq)]
|
||||
pub struct ScanOptions {
|
||||
/// Set batch size for each script_history call to electrum client.
|
||||
#[clap(long, default_value = "25")]
|
||||
pub batch_size: usize,
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _>()?;
|
||||
|
||||
let electrum_url = match args.network {
|
||||
Network::Bitcoin => "ssl://electrum.blockstream.info:50002",
|
||||
Network::Testnet => "ssl://electrum.blockstream.info:60002",
|
||||
Network::Regtest => "tcp://localhost:60401",
|
||||
Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001",
|
||||
};
|
||||
let config = electrum_client::Config::builder()
|
||||
.validate_domain(matches!(args.network, Network::Bitcoin))
|
||||
.build();
|
||||
|
||||
let client = electrum_client::Client::from_config(electrum_url, config)?;
|
||||
|
||||
let electrum_cmd = match args.command.clone() {
|
||||
cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd,
|
||||
general_command => {
|
||||
return cli::handle_commands(
|
||||
general_command,
|
||||
|transaction| {
|
||||
let _txid = client.transaction_broadcast(transaction)?;
|
||||
Ok(())
|
||||
},
|
||||
&tracker,
|
||||
&db,
|
||||
args.network,
|
||||
&keymap,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let response = match electrum_cmd {
|
||||
ElectrumCommands::Scan {
|
||||
stop_gap,
|
||||
scan_options: scan_option,
|
||||
} => {
|
||||
let (spk_iterators, local_chain) = {
|
||||
// Get a short lock on the tracker to get the spks iterators
|
||||
// and local chain state
|
||||
let tracker = &*tracker.lock().unwrap();
|
||||
let spk_iterators = tracker
|
||||
.txout_index
|
||||
.spks_of_all_keychains()
|
||||
.into_iter()
|
||||
.map(|(keychain, iter)| {
|
||||
let mut first = true;
|
||||
let spk_iter = iter.inspect(move |(i, _)| {
|
||||
if first {
|
||||
eprint!("\nscanning {}: ", keychain);
|
||||
first = false;
|
||||
}
|
||||
|
||||
eprint!("{} ", i);
|
||||
let _ = io::stdout().flush();
|
||||
});
|
||||
(keychain, spk_iter)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
let local_chain = tracker.chain().checkpoints().clone();
|
||||
(spk_iterators, local_chain)
|
||||
};
|
||||
|
||||
// we scan the spks **without** a lock on the tracker
|
||||
client.scan(
|
||||
&local_chain,
|
||||
spk_iterators,
|
||||
core::iter::empty(),
|
||||
core::iter::empty(),
|
||||
stop_gap,
|
||||
scan_option.batch_size,
|
||||
)?
|
||||
}
|
||||
ElectrumCommands::Sync {
|
||||
mut unused_spks,
|
||||
mut utxos,
|
||||
mut unconfirmed,
|
||||
all_spks,
|
||||
scan_options,
|
||||
} => {
|
||||
// Get a short lock on the tracker to get the spks we're interested in
|
||||
let tracker = tracker.lock().unwrap();
|
||||
|
||||
if !(all_spks || unused_spks || utxos || unconfirmed) {
|
||||
unused_spks = true;
|
||||
unconfirmed = true;
|
||||
utxos = true;
|
||||
} else if all_spks {
|
||||
unused_spks = false;
|
||||
}
|
||||
|
||||
let mut spks: Box<dyn Iterator<Item = bdk_chain::bitcoin::Script>> =
|
||||
Box::new(core::iter::empty());
|
||||
if all_spks {
|
||||
let all_spks = tracker
|
||||
.txout_index
|
||||
.all_spks()
|
||||
.iter()
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
|
||||
eprintln!("scanning {:?}", index);
|
||||
script
|
||||
})));
|
||||
}
|
||||
if unused_spks {
|
||||
let unused_spks = tracker
|
||||
.txout_index
|
||||
.unused_spks(..)
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
|
||||
eprintln!(
|
||||
"Checking if address {} {:?} has been used",
|
||||
Address::from_script(&script, args.network).unwrap(),
|
||||
index
|
||||
);
|
||||
|
||||
script
|
||||
})));
|
||||
}
|
||||
|
||||
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
||||
|
||||
if utxos {
|
||||
let utxos = tracker
|
||||
.full_utxos()
|
||||
.map(|(_, utxo)| utxo)
|
||||
.collect::<Vec<_>>();
|
||||
outpoints = Box::new(
|
||||
utxos
|
||||
.into_iter()
|
||||
.inspect(|utxo| {
|
||||
eprintln!(
|
||||
"Checking if outpoint {} (value: {}) has been spent",
|
||||
utxo.outpoint, utxo.txout.value
|
||||
);
|
||||
})
|
||||
.map(|utxo| utxo.outpoint),
|
||||
);
|
||||
};
|
||||
|
||||
let mut txids: Box<dyn Iterator<Item = Txid>> = Box::new(core::iter::empty());
|
||||
|
||||
if unconfirmed {
|
||||
let unconfirmed_txids = tracker
|
||||
.chain()
|
||||
.range_txids_by_height(TxHeight::Unconfirmed..)
|
||||
.map(|(_, txid)| *txid)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| {
|
||||
eprintln!("Checking if {} is confirmed yet", txid);
|
||||
}));
|
||||
}
|
||||
|
||||
let local_chain = tracker.chain().checkpoints().clone();
|
||||
// drop lock on tracker
|
||||
drop(tracker);
|
||||
|
||||
// we scan the spks **without** a lock on the tracker
|
||||
ElectrumUpdate {
|
||||
chain_update: client
|
||||
.scan_without_keychain(
|
||||
&local_chain,
|
||||
spks,
|
||||
txids,
|
||||
outpoints,
|
||||
scan_options.batch_size,
|
||||
)
|
||||
.context("scanning the blockchain")?,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let missing_txids = response.missing_full_txs(&*tracker.lock().unwrap());
|
||||
|
||||
// fetch the missing full transactions **without** a lock on the tracker
|
||||
let new_txs = client
|
||||
.batch_transaction_get(missing_txids)
|
||||
.context("fetching full transactions")?;
|
||||
|
||||
{
|
||||
// Get a final short lock to apply the changes
|
||||
let mut tracker = tracker.lock().unwrap();
|
||||
let changeset = {
|
||||
let scan = response.into_keychain_scan(new_txs, &*tracker)?;
|
||||
tracker.determine_changeset(&scan)?
|
||||
};
|
||||
db.lock().unwrap().append_changeset(&changeset)?;
|
||||
tracker.apply_changeset(changeset);
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
3
example-crates/keychain_tracker_esplora/.gitignore
vendored
Normal file
3
example-crates/keychain_tracker_esplora/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/target
|
||||
Cargo.lock
|
||||
.bdk_example_db
|
||||
11
example-crates/keychain_tracker_esplora/Cargo.toml
Normal file
11
example-crates/keychain_tracker_esplora/Cargo.toml
Normal file
@@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "keychain_tracker_esplora_example"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"] }
|
||||
bdk_esplora = { path = "../../crates/esplora" }
|
||||
keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli" }
|
||||
241
example-crates/keychain_tracker_esplora/src/main.rs
Normal file
241
example-crates/keychain_tracker_esplora/src/main.rs
Normal file
@@ -0,0 +1,241 @@
|
||||
use bdk_chain::bitcoin::{Address, OutPoint, Txid};
|
||||
use bdk_chain::{bitcoin::Network, TxHeight};
|
||||
use bdk_esplora::esplora_client;
|
||||
use bdk_esplora::EsploraExt;
|
||||
|
||||
use std::io::{self, Write};
|
||||
|
||||
use keychain_tracker_example_cli::{
|
||||
self as cli,
|
||||
anyhow::{self, Context},
|
||||
clap::{self, Parser, Subcommand},
|
||||
};
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
enum EsploraCommands {
|
||||
/// Scans the addresses in the wallet using the esplora API.
|
||||
Scan {
|
||||
/// When a gap this large has been found for a keychain, it will stop.
|
||||
#[clap(long, default_value = "5")]
|
||||
stop_gap: usize,
|
||||
|
||||
#[clap(flatten)]
|
||||
scan_options: ScanOptions,
|
||||
},
|
||||
/// Scans particular addresses using esplora API.
|
||||
Sync {
|
||||
/// Scan all the unused addresses.
|
||||
#[clap(long)]
|
||||
unused_spks: bool,
|
||||
/// Scan every address that you have derived.
|
||||
#[clap(long)]
|
||||
all_spks: bool,
|
||||
/// Scan unspent outpoints for spends or changes to confirmation status of residing tx.
|
||||
#[clap(long)]
|
||||
utxos: bool,
|
||||
/// Scan unconfirmed transactions for updates.
|
||||
#[clap(long)]
|
||||
unconfirmed: bool,
|
||||
|
||||
#[clap(flatten)]
|
||||
scan_options: ScanOptions,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone, PartialEq)]
|
||||
pub struct ScanOptions {
|
||||
#[clap(long, default_value = "5")]
|
||||
pub parallel_requests: usize,
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let (args, keymap, keychain_tracker, db) = cli::init::<EsploraCommands, _>()?;
|
||||
let esplora_url = match args.network {
|
||||
Network::Bitcoin => "https://mempool.space/api",
|
||||
Network::Testnet => "https://mempool.space/testnet/api",
|
||||
Network::Regtest => "http://localhost:3002",
|
||||
Network::Signet => "https://mempool.space/signet/api",
|
||||
};
|
||||
|
||||
let client = esplora_client::Builder::new(esplora_url).build_blocking()?;
|
||||
|
||||
let esplora_cmd = match args.command {
|
||||
cli::Commands::ChainSpecific(esplora_cmd) => esplora_cmd,
|
||||
general_command => {
|
||||
return cli::handle_commands(
|
||||
general_command,
|
||||
|transaction| Ok(client.broadcast(transaction)?),
|
||||
&keychain_tracker,
|
||||
&db,
|
||||
args.network,
|
||||
&keymap,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
match esplora_cmd {
|
||||
EsploraCommands::Scan {
|
||||
stop_gap,
|
||||
scan_options,
|
||||
} => {
|
||||
let (spk_iterators, local_chain) = {
|
||||
// Get a short lock on the tracker to get the spks iterators
|
||||
// and local chain state
|
||||
let tracker = &*keychain_tracker.lock().unwrap();
|
||||
let spk_iterators = tracker
|
||||
.txout_index
|
||||
.spks_of_all_keychains()
|
||||
.into_iter()
|
||||
.map(|(keychain, iter)| {
|
||||
let mut first = true;
|
||||
(
|
||||
keychain,
|
||||
iter.inspect(move |(i, _)| {
|
||||
if first {
|
||||
eprint!("\nscanning {}: ", keychain);
|
||||
first = false;
|
||||
}
|
||||
|
||||
eprint!("{} ", i);
|
||||
let _ = io::stdout().flush();
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let local_chain = tracker.chain().checkpoints().clone();
|
||||
(spk_iterators, local_chain)
|
||||
};
|
||||
|
||||
// we scan the iterators **without** a lock on the tracker
|
||||
let wallet_scan = client
|
||||
.scan(
|
||||
&local_chain,
|
||||
spk_iterators,
|
||||
core::iter::empty(),
|
||||
core::iter::empty(),
|
||||
stop_gap,
|
||||
scan_options.parallel_requests,
|
||||
)
|
||||
.context("scanning the blockchain")?;
|
||||
eprintln!();
|
||||
|
||||
{
|
||||
// we take a short lock to apply results to tracker and db
|
||||
let tracker = &mut *keychain_tracker.lock().unwrap();
|
||||
let db = &mut *db.lock().unwrap();
|
||||
let changeset = tracker.apply_update(wallet_scan)?;
|
||||
db.append_changeset(&changeset)?;
|
||||
}
|
||||
}
|
||||
EsploraCommands::Sync {
|
||||
mut unused_spks,
|
||||
mut utxos,
|
||||
mut unconfirmed,
|
||||
all_spks,
|
||||
scan_options,
|
||||
} => {
|
||||
// Get a short lock on the tracker to get the spks we're interested in
|
||||
let tracker = keychain_tracker.lock().unwrap();
|
||||
|
||||
if !(all_spks || unused_spks || utxos || unconfirmed) {
|
||||
unused_spks = true;
|
||||
unconfirmed = true;
|
||||
utxos = true;
|
||||
} else if all_spks {
|
||||
unused_spks = false;
|
||||
}
|
||||
|
||||
let mut spks: Box<dyn Iterator<Item = bdk_chain::bitcoin::Script>> =
|
||||
Box::new(core::iter::empty());
|
||||
if all_spks {
|
||||
let all_spks = tracker
|
||||
.txout_index
|
||||
.all_spks()
|
||||
.iter()
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
|
||||
eprintln!("scanning {:?}", index);
|
||||
script
|
||||
})));
|
||||
}
|
||||
if unused_spks {
|
||||
let unused_spks = tracker
|
||||
.txout_index
|
||||
.unused_spks(..)
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
|
||||
eprintln!(
|
||||
"Checking if address {} {:?} has been used",
|
||||
Address::from_script(&script, args.network).unwrap(),
|
||||
index
|
||||
);
|
||||
|
||||
script
|
||||
})));
|
||||
}
|
||||
|
||||
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
||||
|
||||
if utxos {
|
||||
let utxos = tracker
|
||||
.full_utxos()
|
||||
.map(|(_, utxo)| utxo)
|
||||
.collect::<Vec<_>>();
|
||||
outpoints = Box::new(
|
||||
utxos
|
||||
.into_iter()
|
||||
.inspect(|utxo| {
|
||||
eprintln!(
|
||||
"Checking if outpoint {} (value: {}) has been spent",
|
||||
utxo.outpoint, utxo.txout.value
|
||||
);
|
||||
})
|
||||
.map(|utxo| utxo.outpoint),
|
||||
);
|
||||
};
|
||||
|
||||
let mut txids: Box<dyn Iterator<Item = Txid>> = Box::new(core::iter::empty());
|
||||
|
||||
if unconfirmed {
|
||||
let unconfirmed_txids = tracker
|
||||
.chain()
|
||||
.range_txids_by_height(TxHeight::Unconfirmed..)
|
||||
.map(|(_, txid)| *txid)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| {
|
||||
eprintln!("Checking if {} is confirmed yet", txid);
|
||||
}));
|
||||
}
|
||||
|
||||
let local_chain = tracker.chain().checkpoints().clone();
|
||||
|
||||
// drop lock on tracker
|
||||
drop(tracker);
|
||||
|
||||
// we scan the desired spks **without** a lock on the tracker
|
||||
let scan = client
|
||||
.scan_without_keychain(
|
||||
&local_chain,
|
||||
spks,
|
||||
txids,
|
||||
outpoints,
|
||||
scan_options.parallel_requests,
|
||||
)
|
||||
.context("scanning the blockchain")?;
|
||||
|
||||
{
|
||||
// we take a short lock to apply the results to the tracker and db
|
||||
let tracker = &mut *keychain_tracker.lock().unwrap();
|
||||
let changeset = tracker.apply_update(scan.into())?;
|
||||
let db = &mut *db.lock().unwrap();
|
||||
db.append_changeset(&changeset)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
1
example-crates/keychain_tracker_example_cli/.gitignore
vendored
Normal file
1
example-crates/keychain_tracker_example_cli/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/target
|
||||
16
example-crates/keychain_tracker_example_cli/Cargo.toml
Normal file
16
example-crates/keychain_tracker_example_cli/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "keychain_tracker_example_cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[dependencies]
|
||||
bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]}
|
||||
bdk_file_store = { path = "../../crates/file_store" }
|
||||
bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
|
||||
bdk_coin_select = { path = "../../nursery/coin_select" }
|
||||
|
||||
clap = { version = "3.2.23", features = ["derive", "env"] }
|
||||
anyhow = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = { version = "^1.0" }
|
||||
1
example-crates/keychain_tracker_example_cli/README.md
Normal file
1
example-crates/keychain_tracker_example_cli/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Provides common command line processing logic between examples using the `KeychainTracker`
|
||||
692
example-crates/keychain_tracker_example_cli/src/lib.rs
Normal file
692
example-crates/keychain_tracker_example_cli/src/lib.rs
Normal file
@@ -0,0 +1,692 @@
|
||||
pub extern crate anyhow;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use bdk_chain::{
|
||||
bitcoin::{
|
||||
secp256k1::Secp256k1,
|
||||
util::sighash::{Prevouts, SighashCache},
|
||||
Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut,
|
||||
},
|
||||
chain_graph::InsertTxError,
|
||||
keychain::{DerivationAdditions, KeychainChangeSet, KeychainTracker},
|
||||
miniscript::{
|
||||
descriptor::{DescriptorSecretKey, KeyMap},
|
||||
Descriptor, DescriptorPublicKey,
|
||||
},
|
||||
sparse_chain::{self, ChainPosition},
|
||||
DescriptorExt, FullTxOut,
|
||||
};
|
||||
use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue};
|
||||
use bdk_file_store::KeychainStore;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::{
|
||||
cmp::Reverse, collections::HashMap, fmt::Debug, path::PathBuf, sync::Mutex, time::Duration,
|
||||
};
|
||||
|
||||
pub use bdk_file_store;
|
||||
pub use clap;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(author, version, about, long_about = None)]
|
||||
#[clap(propagate_version = true)]
|
||||
pub struct Args<C: clap::Subcommand> {
|
||||
#[clap(env = "DESCRIPTOR")]
|
||||
pub descriptor: String,
|
||||
#[clap(env = "CHANGE_DESCRIPTOR")]
|
||||
pub change_descriptor: Option<String>,
|
||||
|
||||
#[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")]
|
||||
pub network: Network,
|
||||
|
||||
#[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")]
|
||||
pub db_path: PathBuf,
|
||||
|
||||
#[clap(env = "BDK_CP_LIMIT", long, default_value = "20")]
|
||||
pub cp_limit: usize,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub command: Commands<C>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
pub enum Commands<C: clap::Subcommand> {
|
||||
#[clap(flatten)]
|
||||
ChainSpecific(C),
|
||||
/// Address generation and inspection.
|
||||
Address {
|
||||
#[clap(subcommand)]
|
||||
addr_cmd: AddressCmd,
|
||||
},
|
||||
/// Get the wallet balance.
|
||||
Balance,
|
||||
/// TxOut related commands.
|
||||
#[clap(name = "txout")]
|
||||
TxOut {
|
||||
#[clap(subcommand)]
|
||||
txout_cmd: TxOutCmd,
|
||||
},
|
||||
/// Send coins to an address.
|
||||
Send {
|
||||
value: u64,
|
||||
address: Address,
|
||||
#[clap(short, default_value = "largest-first")]
|
||||
coin_select: CoinSelectionAlgo,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum CoinSelectionAlgo {
|
||||
LargestFirst,
|
||||
SmallestFirst,
|
||||
OldestFirst,
|
||||
NewestFirst,
|
||||
BranchAndBound,
|
||||
}
|
||||
|
||||
impl Default for CoinSelectionAlgo {
|
||||
fn default() -> Self {
|
||||
Self::LargestFirst
|
||||
}
|
||||
}
|
||||
|
||||
impl core::str::FromStr for CoinSelectionAlgo {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use CoinSelectionAlgo::*;
|
||||
Ok(match s {
|
||||
"largest-first" => LargestFirst,
|
||||
"smallest-first" => SmallestFirst,
|
||||
"oldest-first" => OldestFirst,
|
||||
"newest-first" => NewestFirst,
|
||||
"bnb" => BranchAndBound,
|
||||
unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl core::fmt::Display for CoinSelectionAlgo {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
use CoinSelectionAlgo::*;
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
LargestFirst => "largest-first",
|
||||
SmallestFirst => "smallest-first",
|
||||
OldestFirst => "oldest-first",
|
||||
NewestFirst => "newest-first",
|
||||
BranchAndBound => "bnb",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
pub enum AddressCmd {
|
||||
/// Get the next unused address.
|
||||
Next,
|
||||
/// Get a new address regardless of the existing unused addresses.
|
||||
New,
|
||||
/// List all addresses
|
||||
List {
|
||||
#[clap(long)]
|
||||
change: bool,
|
||||
},
|
||||
Index,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
pub enum TxOutCmd {
|
||||
List {
|
||||
/// Return only spent outputs.
|
||||
#[clap(short, long)]
|
||||
spent: bool,
|
||||
/// Return only unspent outputs.
|
||||
#[clap(short, long)]
|
||||
unspent: bool,
|
||||
/// Return only confirmed outputs.
|
||||
#[clap(long)]
|
||||
confirmed: bool,
|
||||
/// Return only unconfirmed outputs.
|
||||
#[clap(long)]
|
||||
unconfirmed: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize,
|
||||
)]
|
||||
pub enum Keychain {
|
||||
External,
|
||||
Internal,
|
||||
}
|
||||
|
||||
impl core::fmt::Display for Keychain {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Keychain::External => write!(f, "external"),
|
||||
Keychain::Internal => write!(f, "internal"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure defining the output of an [`AddressCmd`]` execution.
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct AddrsOutput {
|
||||
keychain: String,
|
||||
index: u32,
|
||||
addrs: Address,
|
||||
used: bool,
|
||||
}
|
||||
|
||||
pub fn run_address_cmd<P>(
|
||||
tracker: &Mutex<KeychainTracker<Keychain, P>>,
|
||||
db: &Mutex<KeychainStore<Keychain, P>>,
|
||||
addr_cmd: AddressCmd,
|
||||
network: Network,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: bdk_chain::sparse_chain::ChainPosition,
|
||||
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
|
||||
{
|
||||
let mut tracker = tracker.lock().unwrap();
|
||||
let txout_index = &mut tracker.txout_index;
|
||||
|
||||
let addr_cmmd_output = match addr_cmd {
|
||||
AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)),
|
||||
AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(((index, spk), additions)) = addr_cmmd_output {
|
||||
let mut db = db.lock().unwrap();
|
||||
// update database since we're about to give out a new address
|
||||
db.append_changeset(&additions.into())?;
|
||||
|
||||
let spk = spk.clone();
|
||||
let address =
|
||||
Address::from_script(&spk, network).expect("should always be able to derive address");
|
||||
eprintln!("This is the address at index {}", index);
|
||||
println!("{}", address);
|
||||
}
|
||||
|
||||
match addr_cmd {
|
||||
AddressCmd::Next | AddressCmd::New => {
|
||||
/* covered */
|
||||
Ok(())
|
||||
}
|
||||
AddressCmd::Index => {
|
||||
for (keychain, derivation_index) in txout_index.last_revealed_indices() {
|
||||
println!("{:?}: {}", keychain, derivation_index);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
AddressCmd::List { change } => {
|
||||
let target_keychain = match change {
|
||||
true => Keychain::Internal,
|
||||
false => Keychain::External,
|
||||
};
|
||||
for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) {
|
||||
let address = Address::from_script(spk, network)
|
||||
.expect("should always be able to derive address");
|
||||
println!(
|
||||
"{:?} {} used:{}",
|
||||
index,
|
||||
address,
|
||||
txout_index.is_used(&(target_keychain, index))
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_balance_cmd<P: ChainPosition>(tracker: &Mutex<KeychainTracker<Keychain, P>>) {
|
||||
let tracker = tracker.lock().unwrap();
|
||||
let (confirmed, unconfirmed) =
|
||||
tracker
|
||||
.full_utxos()
|
||||
.fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| {
|
||||
if utxo.chain_position.height().is_confirmed() {
|
||||
(confirmed + utxo.txout.value, unconfirmed)
|
||||
} else {
|
||||
(confirmed, unconfirmed + utxo.txout.value)
|
||||
}
|
||||
});
|
||||
|
||||
println!("confirmed: {}", confirmed);
|
||||
println!("unconfirmed: {}", unconfirmed);
|
||||
}
|
||||
|
||||
pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
|
||||
txout_cmd: TxOutCmd,
|
||||
tracker: &Mutex<KeychainTracker<K, P>>,
|
||||
network: Network,
|
||||
) {
|
||||
match txout_cmd {
|
||||
TxOutCmd::List {
|
||||
unspent,
|
||||
spent,
|
||||
confirmed,
|
||||
unconfirmed,
|
||||
} => {
|
||||
let tracker = tracker.lock().unwrap();
|
||||
#[allow(clippy::type_complexity)] // FIXME
|
||||
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent)
|
||||
{
|
||||
(true, false) => Box::new(tracker.full_utxos()),
|
||||
(false, true) => Box::new(
|
||||
tracker
|
||||
.full_txouts()
|
||||
.filter(|(_, txout)| txout.spent_by.is_some()),
|
||||
),
|
||||
_ => Box::new(tracker.full_txouts()),
|
||||
};
|
||||
|
||||
#[allow(clippy::type_complexity)] // FIXME
|
||||
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> =
|
||||
match (confirmed, unconfirmed) {
|
||||
(true, false) => Box::new(
|
||||
txouts.filter(|(_, txout)| txout.chain_position.height().is_confirmed()),
|
||||
),
|
||||
(false, true) => Box::new(
|
||||
txouts.filter(|(_, txout)| !txout.chain_position.height().is_confirmed()),
|
||||
),
|
||||
_ => txouts,
|
||||
};
|
||||
|
||||
for (spk_index, full_txout) in txouts {
|
||||
let address =
|
||||
Address::from_script(&full_txout.txout.script_pubkey, network).unwrap();
|
||||
|
||||
println!(
|
||||
"{:?} {} {} {} spent:{:?}",
|
||||
spk_index,
|
||||
full_txout.txout.value,
|
||||
full_txout.outpoint,
|
||||
address,
|
||||
full_txout.spent_by
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)] // FIXME
|
||||
pub fn create_tx<P: ChainPosition>(
|
||||
value: u64,
|
||||
address: Address,
|
||||
coin_select: CoinSelectionAlgo,
|
||||
keychain_tracker: &mut KeychainTracker<Keychain, P>,
|
||||
keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
|
||||
) -> Result<(
|
||||
Transaction,
|
||||
Option<(DerivationAdditions<Keychain>, (Keychain, u32))>,
|
||||
)> {
|
||||
let mut additions = DerivationAdditions::default();
|
||||
|
||||
let assets = bdk_tmp_plan::Assets {
|
||||
keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// TODO use planning module
|
||||
let mut candidates = planned_utxos(keychain_tracker, &assets).collect::<Vec<_>>();
|
||||
|
||||
// apply coin selection algorithm
|
||||
match coin_select {
|
||||
CoinSelectionAlgo::LargestFirst => {
|
||||
candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value))
|
||||
}
|
||||
CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value),
|
||||
CoinSelectionAlgo::OldestFirst => {
|
||||
candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone())
|
||||
}
|
||||
CoinSelectionAlgo::NewestFirst => {
|
||||
candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone()))
|
||||
}
|
||||
CoinSelectionAlgo::BranchAndBound => {}
|
||||
}
|
||||
|
||||
// turn the txos we chose into weight and value
|
||||
let wv_candidates = candidates
|
||||
.iter()
|
||||
.map(|(plan, utxo)| {
|
||||
WeightedValue::new(
|
||||
utxo.txout.value,
|
||||
plan.expected_weight() as _,
|
||||
plan.witness_version().is_some(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut outputs = vec![TxOut {
|
||||
value,
|
||||
script_pubkey: address.script_pubkey(),
|
||||
}];
|
||||
|
||||
let internal_keychain = if keychain_tracker
|
||||
.txout_index
|
||||
.keychains()
|
||||
.get(&Keychain::Internal)
|
||||
.is_some()
|
||||
{
|
||||
Keychain::Internal
|
||||
} else {
|
||||
Keychain::External
|
||||
};
|
||||
|
||||
let ((change_index, change_script), change_additions) = keychain_tracker
|
||||
.txout_index
|
||||
.next_unused_spk(&internal_keychain);
|
||||
additions.append(change_additions);
|
||||
|
||||
// Clone to drop the immutable reference.
|
||||
let change_script = change_script.clone();
|
||||
|
||||
let change_plan = bdk_tmp_plan::plan_satisfaction(
|
||||
&keychain_tracker
|
||||
.txout_index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.expect("must exist")
|
||||
.at_derivation_index(change_index),
|
||||
&assets,
|
||||
)
|
||||
.expect("failed to obtain change plan");
|
||||
|
||||
let mut change_output = TxOut {
|
||||
value: 0,
|
||||
script_pubkey: change_script,
|
||||
};
|
||||
|
||||
let cs_opts = CoinSelectorOpt {
|
||||
target_feerate: 0.5,
|
||||
min_drain_value: keychain_tracker
|
||||
.txout_index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.expect("must exist")
|
||||
.dust_value(),
|
||||
..CoinSelectorOpt::fund_outputs(
|
||||
&outputs,
|
||||
&change_output,
|
||||
change_plan.expected_weight() as u32,
|
||||
)
|
||||
};
|
||||
|
||||
// TODO: How can we make it easy to shuffle in order of inputs and outputs here?
|
||||
// apply coin selection by saying we need to fund these outputs
|
||||
let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts);
|
||||
|
||||
// just select coins in the order provided until we have enough
|
||||
// only use the first result (least waste)
|
||||
let selection = match coin_select {
|
||||
CoinSelectionAlgo::BranchAndBound => {
|
||||
coin_select_bnb(Duration::from_secs(10), coin_selector.clone())
|
||||
.map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())?
|
||||
}
|
||||
_ => coin_selector.select_until_finished()?,
|
||||
};
|
||||
let (_, selection_meta) = selection.best_strategy();
|
||||
|
||||
// get the selected utxos
|
||||
let selected_txos = selection.apply_selection(&candidates).collect::<Vec<_>>();
|
||||
|
||||
if let Some(drain_value) = selection_meta.drain_value {
|
||||
change_output.value = drain_value;
|
||||
// if the selection tells us to use change and the change value is sufficient, we add it as an output
|
||||
outputs.push(change_output)
|
||||
}
|
||||
|
||||
let mut transaction = Transaction {
|
||||
version: 0x02,
|
||||
lock_time: keychain_tracker
|
||||
.chain()
|
||||
.latest_checkpoint()
|
||||
.and_then(|block_id| LockTime::from_height(block_id.height).ok())
|
||||
.unwrap_or(LockTime::ZERO)
|
||||
.into(),
|
||||
input: selected_txos
|
||||
.iter()
|
||||
.map(|(_, utxo)| TxIn {
|
||||
previous_output: utxo.outpoint,
|
||||
sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
|
||||
..Default::default()
|
||||
})
|
||||
.collect(),
|
||||
output: outputs,
|
||||
};
|
||||
|
||||
let prevouts = selected_txos
|
||||
.iter()
|
||||
.map(|(_, utxo)| utxo.txout.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let sighash_prevouts = Prevouts::All(&prevouts);
|
||||
|
||||
// first, set tx values for the plan so that we don't change them while signing
|
||||
for (i, (plan, _)) in selected_txos.iter().enumerate() {
|
||||
if let Some(sequence) = plan.required_sequence() {
|
||||
transaction.input[i].sequence = sequence
|
||||
}
|
||||
}
|
||||
|
||||
// create a short lived transaction
|
||||
let _sighash_tx = transaction.clone();
|
||||
let mut sighash_cache = SighashCache::new(&_sighash_tx);
|
||||
|
||||
for (i, (plan, _)) in selected_txos.iter().enumerate() {
|
||||
let requirements = plan.requirements();
|
||||
let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default();
|
||||
assert!(
|
||||
!requirements.requires_hash_preimages(),
|
||||
"can't have hash pre-images since we didn't provide any."
|
||||
);
|
||||
assert!(
|
||||
requirements.signatures.sign_with_keymap(
|
||||
i,
|
||||
keymap,
|
||||
&sighash_prevouts,
|
||||
None,
|
||||
None,
|
||||
&mut sighash_cache,
|
||||
&mut auth_data,
|
||||
&Secp256k1::default(),
|
||||
)?,
|
||||
"we should have signed with this input."
|
||||
);
|
||||
|
||||
match plan.try_complete(&auth_data) {
|
||||
bdk_tmp_plan::PlanState::Complete {
|
||||
final_script_sig,
|
||||
final_script_witness,
|
||||
} => {
|
||||
if let Some(witness) = final_script_witness {
|
||||
transaction.input[i].witness = witness;
|
||||
}
|
||||
|
||||
if let Some(script_sig) = final_script_sig {
|
||||
transaction.input[i].script_sig = script_sig;
|
||||
}
|
||||
}
|
||||
bdk_tmp_plan::PlanState::Incomplete(_) => {
|
||||
return Err(anyhow!(
|
||||
"we weren't able to complete the plan with our keys."
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let change_info = if selection_meta.drain_value.is_some() {
|
||||
Some((additions, (internal_keychain, change_index)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok((transaction, change_info))
|
||||
}
|
||||
|
||||
pub fn handle_commands<C: clap::Subcommand, P>(
|
||||
command: Commands<C>,
|
||||
broadcast: impl FnOnce(&Transaction) -> Result<()>,
|
||||
// we Mutex around these not because we need them for a simple CLI app but to demonstrate how
|
||||
// all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound.
|
||||
tracker: &Mutex<KeychainTracker<Keychain, P>>,
|
||||
store: &Mutex<KeychainStore<Keychain, P>>,
|
||||
network: Network,
|
||||
keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: ChainPosition,
|
||||
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
|
||||
{
|
||||
match command {
|
||||
// TODO: Make these functions return stuffs
|
||||
Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network),
|
||||
Commands::Balance => {
|
||||
run_balance_cmd(tracker);
|
||||
Ok(())
|
||||
}
|
||||
Commands::TxOut { txout_cmd } => {
|
||||
run_txo_cmd(txout_cmd, tracker, network);
|
||||
Ok(())
|
||||
}
|
||||
Commands::Send {
|
||||
value,
|
||||
address,
|
||||
coin_select,
|
||||
} => {
|
||||
let (transaction, change_index) = {
|
||||
// take mutable ref to construct tx -- it is only open for a short time while building it.
|
||||
let tracker = &mut *tracker.lock().unwrap();
|
||||
let (transaction, change_info) =
|
||||
create_tx(value, address, coin_select, tracker, keymap)?;
|
||||
|
||||
if let Some((change_derivation_changes, (change_keychain, index))) = change_info {
|
||||
// We must first persist to disk the fact that we've got a new address from the
|
||||
// change keychain so future scans will find the tx we're about to broadcast.
|
||||
// If we're unable to persist this, then we don't want to broadcast.
|
||||
let store = &mut *store.lock().unwrap();
|
||||
store.append_changeset(&change_derivation_changes.into())?;
|
||||
|
||||
// We don't want other callers/threads to use this address while we're using it
|
||||
// but we also don't want to scan the tx we just created because it's not
|
||||
// technically in the blockchain yet.
|
||||
tracker.txout_index.mark_used(&change_keychain, index);
|
||||
(transaction, Some((change_keychain, index)))
|
||||
} else {
|
||||
(transaction, None)
|
||||
}
|
||||
};
|
||||
|
||||
match (broadcast)(&transaction) {
|
||||
Ok(_) => {
|
||||
println!("Broadcasted Tx : {}", transaction.txid());
|
||||
let mut tracker = tracker.lock().unwrap();
|
||||
match tracker.insert_tx(transaction.clone(), P::unconfirmed()) {
|
||||
Ok(changeset) => {
|
||||
let store = &mut *store.lock().unwrap();
|
||||
// We know the tx is at least unconfirmed now. Note if persisting here fails,
|
||||
// it's not a big deal since we can always find it again form
|
||||
// blockchain.
|
||||
store.append_changeset(&changeset)?;
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => match e {
|
||||
InsertTxError::Chain(e) => match e {
|
||||
// TODO: add insert_unconfirmed_tx to the chaingraph and sparsechain
|
||||
sparse_chain::InsertTxError::TxTooHigh { .. } => unreachable!("we are inserting at unconfirmed position"),
|
||||
sparse_chain::InsertTxError::TxMovedUnexpectedly { txid, original_pos, ..} => Err(anyhow!("the tx we created {} has already been confirmed at block {:?}", txid, original_pos)),
|
||||
},
|
||||
InsertTxError::UnresolvableConflict(e) => Err(e).context("another tx that conflicts with the one we tried to create has been confirmed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let tracker = &mut *tracker.lock().unwrap();
|
||||
if let Some((keychain, index)) = change_index {
|
||||
// We failed to broadcast, so allow our change address to be used in the future
|
||||
tracker.txout_index.unmark_used(&keychain, index);
|
||||
}
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::ChainSpecific(_) => {
|
||||
todo!("example code is meant to handle this!")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)] // FIXME
|
||||
pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
|
||||
Args<C>,
|
||||
KeyMap,
|
||||
// These don't need to have mutexes around them, but we want the cli example code to make it obvious how they
|
||||
// are thread-safe, forcing the example developers to show where they would lock and unlock things.
|
||||
Mutex<KeychainTracker<Keychain, P>>,
|
||||
Mutex<KeychainStore<Keychain, P>>,
|
||||
)>
|
||||
where
|
||||
P: sparse_chain::ChainPosition,
|
||||
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
|
||||
{
|
||||
let args = Args::<C>::parse();
|
||||
let secp = Secp256k1::default();
|
||||
let (descriptor, mut keymap) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
||||
|
||||
let mut tracker = KeychainTracker::default();
|
||||
tracker.set_checkpoint_limit(Some(args.cp_limit));
|
||||
|
||||
tracker
|
||||
.txout_index
|
||||
.add_keychain(Keychain::External, descriptor);
|
||||
|
||||
let internal = args
|
||||
.change_descriptor
|
||||
.clone()
|
||||
.map(|descriptor| Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &descriptor))
|
||||
.transpose()?;
|
||||
if let Some((internal_descriptor, internal_keymap)) = internal {
|
||||
keymap.extend(internal_keymap);
|
||||
tracker
|
||||
.txout_index
|
||||
.add_keychain(Keychain::Internal, internal_descriptor);
|
||||
};
|
||||
|
||||
let mut db = KeychainStore::<Keychain, P>::new_from_path(args.db_path.as_path())?;
|
||||
|
||||
if let Err(e) = db.load_into_keychain_tracker(&mut tracker) {
|
||||
match tracker.chain().latest_checkpoint() {
|
||||
Some(checkpoint) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), checkpoint.height, e),
|
||||
None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e),
|
||||
|
||||
}
|
||||
eprintln!("⚠ Consider running a rescan of chain data.");
|
||||
}
|
||||
|
||||
Ok((args, keymap, Mutex::new(tracker), Mutex::new(db)))
|
||||
}
|
||||
|
||||
pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>(
|
||||
tracker: &'a KeychainTracker<Keychain, P>,
|
||||
assets: &'a bdk_tmp_plan::Assets<AK>,
|
||||
) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a {
|
||||
tracker
|
||||
.full_utxos()
|
||||
.filter_map(move |((keychain, derivation_index), full_txout)| {
|
||||
Some((
|
||||
bdk_tmp_plan::plan_satisfaction(
|
||||
&tracker
|
||||
.txout_index
|
||||
.keychains()
|
||||
.get(keychain)
|
||||
.expect("must exist since we have a utxo for it")
|
||||
.at_derivation_index(*derivation_index),
|
||||
assets,
|
||||
)?,
|
||||
full_txout,
|
||||
))
|
||||
})
|
||||
}
|
||||
9
example-crates/wallet_electrum/Cargo.toml
Normal file
9
example-crates/wallet_electrum/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "wallet_electrum_example"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bdk = { path = "../../crates/bdk" }
|
||||
bdk_electrum = { path = "../../crates/electrum" }
|
||||
bdk_file_store = { path = "../../crates/file_store" }
|
||||
104
example-crates/wallet_electrum/src/main.rs
Normal file
104
example-crates/wallet_electrum/src/main.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use std::{io::Write, str::FromStr};
|
||||
|
||||
use bdk::{
|
||||
bitcoin::{Address, Network},
|
||||
SignOptions, Wallet,
|
||||
};
|
||||
use bdk_electrum::{
|
||||
electrum_client::{self, ElectrumApi},
|
||||
ElectrumExt,
|
||||
};
|
||||
use bdk_file_store::KeychainStore;
|
||||
|
||||
const SEND_AMOUNT: u64 = 5000;
|
||||
const STOP_GAP: usize = 50;
|
||||
const BATCH_SIZE: usize = 5;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("Hello, world!");
|
||||
|
||||
let db_path = std::env::temp_dir().join("bdk-electrum-example");
|
||||
let db = KeychainStore::new_from_path(db_path)?;
|
||||
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)";
|
||||
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)";
|
||||
|
||||
let mut wallet = Wallet::new(
|
||||
external_descriptor,
|
||||
Some(internal_descriptor),
|
||||
db,
|
||||
Network::Testnet,
|
||||
)?;
|
||||
|
||||
let address = wallet.get_address(bdk::wallet::AddressIndex::New);
|
||||
println!("Generated Address: {}", address);
|
||||
|
||||
let balance = wallet.get_balance();
|
||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||
|
||||
print!("Syncing...");
|
||||
// Scanning the chain...
|
||||
let electrum_url = "ssl://electrum.blockstream.info:60002";
|
||||
let client = electrum_client::Client::new(electrum_url)?;
|
||||
let local_chain = wallet.checkpoints();
|
||||
let spks = wallet
|
||||
.spks_of_all_keychains()
|
||||
.into_iter()
|
||||
.map(|(k, spks)| {
|
||||
let mut first = true;
|
||||
(
|
||||
k,
|
||||
spks.inspect(move |(spk_i, _)| {
|
||||
if first {
|
||||
first = false;
|
||||
print!("\nScanning keychain [{:?}]:", k);
|
||||
}
|
||||
print!(" {}", spk_i);
|
||||
let _ = std::io::stdout().flush();
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let electrum_update = client
|
||||
.scan(
|
||||
local_chain,
|
||||
spks,
|
||||
core::iter::empty(),
|
||||
core::iter::empty(),
|
||||
STOP_GAP,
|
||||
BATCH_SIZE,
|
||||
)?
|
||||
.into_confirmation_time_update(&client)?;
|
||||
println!();
|
||||
let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?;
|
||||
let update = electrum_update.into_keychain_scan(new_txs, &wallet)?;
|
||||
wallet.apply_update(update)?;
|
||||
wallet.commit()?;
|
||||
|
||||
let balance = wallet.get_balance();
|
||||
println!("Wallet balance after syncing: {} sats", balance.total());
|
||||
|
||||
if balance.total() < SEND_AMOUNT {
|
||||
println!(
|
||||
"Please send at least {} sats to the receiving address",
|
||||
SEND_AMOUNT
|
||||
);
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?;
|
||||
|
||||
let mut tx_builder = wallet.build_tx();
|
||||
tx_builder
|
||||
.add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT)
|
||||
.enable_rbf();
|
||||
|
||||
let (mut psbt, _) = tx_builder.finish()?;
|
||||
let finalized = wallet.sign(&mut psbt, SignOptions::default())?;
|
||||
assert!(finalized);
|
||||
|
||||
let tx = psbt.extract_tx();
|
||||
client.transaction_broadcast(&tx)?;
|
||||
println!("Tx broadcasted! Txid: {}", tx.txid());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
12
example-crates/wallet_esplora/Cargo.toml
Normal file
12
example-crates/wallet_esplora/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "bdk-esplora-wallet-example"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
bdk = { path = "../../crates/bdk" }
|
||||
bdk_esplora = { path = "../../crates/esplora", features = ["blocking"] }
|
||||
bdk_file_store = { path = "../../crates/file_store" }
|
||||
96
example-crates/wallet_esplora/src/main.rs
Normal file
96
example-crates/wallet_esplora/src/main.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use bdk::{
|
||||
bitcoin::{Address, Network},
|
||||
wallet::AddressIndex,
|
||||
SignOptions, Wallet,
|
||||
};
|
||||
use bdk_esplora::esplora_client;
|
||||
use bdk_esplora::EsploraExt;
|
||||
use bdk_file_store::KeychainStore;
|
||||
use std::{io::Write, str::FromStr};
|
||||
|
||||
const SEND_AMOUNT: u64 = 5000;
|
||||
const STOP_GAP: usize = 50;
|
||||
const PARALLEL_REQUESTS: usize = 5;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let db_path = std::env::temp_dir().join("bdk-esplora-example");
|
||||
let db = KeychainStore::new_from_path(db_path)?;
|
||||
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)";
|
||||
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)";
|
||||
|
||||
let mut wallet = Wallet::new(
|
||||
external_descriptor,
|
||||
Some(internal_descriptor),
|
||||
db,
|
||||
Network::Testnet,
|
||||
)?;
|
||||
|
||||
let address = wallet.get_address(AddressIndex::New);
|
||||
println!("Generated Address: {}", address);
|
||||
|
||||
let balance = wallet.get_balance();
|
||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||
|
||||
print!("Syncing...");
|
||||
// Scanning the chain...
|
||||
let esplora_url = "https://mempool.space/testnet/api";
|
||||
let client = esplora_client::Builder::new(esplora_url).build_blocking()?;
|
||||
let checkpoints = wallet.checkpoints();
|
||||
let spks = wallet
|
||||
.spks_of_all_keychains()
|
||||
.into_iter()
|
||||
.map(|(k, spks)| {
|
||||
let mut first = true;
|
||||
(
|
||||
k,
|
||||
spks.inspect(move |(spk_i, _)| {
|
||||
if first {
|
||||
first = false;
|
||||
print!("\nScanning keychain [{:?}]:", k);
|
||||
}
|
||||
print!(" {}", spk_i);
|
||||
let _ = std::io::stdout().flush();
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let update = client.scan(
|
||||
checkpoints,
|
||||
spks,
|
||||
core::iter::empty(),
|
||||
core::iter::empty(),
|
||||
STOP_GAP,
|
||||
PARALLEL_REQUESTS,
|
||||
)?;
|
||||
println!();
|
||||
wallet.apply_update(update)?;
|
||||
wallet.commit()?;
|
||||
|
||||
let balance = wallet.get_balance();
|
||||
println!("Wallet balance after syncing: {} sats", balance.total());
|
||||
|
||||
if balance.total() < SEND_AMOUNT {
|
||||
println!(
|
||||
"Please send at least {} sats to the receiving address",
|
||||
SEND_AMOUNT
|
||||
);
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?;
|
||||
|
||||
let mut tx_builder = wallet.build_tx();
|
||||
tx_builder
|
||||
.add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT)
|
||||
.enable_rbf();
|
||||
|
||||
let (mut psbt, _) = tx_builder.finish()?;
|
||||
let finalized = wallet.sign(&mut psbt, SignOptions::default())?;
|
||||
assert!(finalized);
|
||||
|
||||
let tx = psbt.extract_tx();
|
||||
client.broadcast(&tx)?;
|
||||
println!("Tx broadcasted! Txid: {}", tx.txid());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user