-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathsynchronizer.py
297 lines (262 loc) · 12.1 KB
/
synchronizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import hashlib
from typing import Dict, List, TYPE_CHECKING, Tuple
from collections import defaultdict
import logging
from aiorpcx import TaskGroup, run_in_thread, RPCError
from .transaction import Transaction
from .util import bh2u, make_aiohttp_session, NetworkJobOnDefaultServer
from .bitcoin import address_to_scripthash, is_address
from .network import UntrustedServerReturnedError
from .logging import Logger
from .interface import GracefulDisconnect
if TYPE_CHECKING:
from .network import Network
from .address_synchronizer import AddressSynchronizer
class SynchronizerFailure(Exception): pass
def history_status(h):
if not h:
return None
status = ''
for tx_hash, height in h:
status += tx_hash + ':%d:' % height
return bh2u(hashlib.sha256(status.encode('ascii')).digest())
class SynchronizerBase(NetworkJobOnDefaultServer):
"""Subscribe over the network to a set of addresses, and monitor their statuses.
Every time a status changes, run a coroutine provided by the subclass.
"""
def __init__(self, network: 'Network'):
self.asyncio_loop = network.asyncio_loop
NetworkJobOnDefaultServer.__init__(self, network)
self._reset_request_counters()
def _reset(self):
super()._reset()
self.requested_addrs = set()
self.scripthash_to_address = {}
self._processed_some_notifications = False # so that we don't miss them
self._reset_request_counters()
# Queues
self.add_queue = asyncio.Queue()
self.status_queue = asyncio.Queue()
async def _start_tasks(self):
try:
async with self.group as group:
await group.spawn(self.send_subscriptions())
await group.spawn(self.handle_status())
await group.spawn(self.main())
finally:
# we are being cancelled now
self.session.unsubscribe(self.status_queue)
def _reset_request_counters(self):
self._requests_sent = 0
self._requests_answered = 0
def add(self, addr):
asyncio.run_coroutine_threadsafe(self._add_address(addr), self.asyncio_loop)
async def _add_address(self, addr: str):
if not is_address(addr): raise ValueError(f"invalid bitcoin address {addr}")
if addr in self.requested_addrs: return
self.requested_addrs.add(addr)
await self.add_queue.put(addr)
async def _on_address_status(self, addr, status):
"""Handle the change of the status of an address."""
raise NotImplementedError() # implemented by subclasses
async def send_subscriptions(self):
async def subscribe_to_address(addr):
h = address_to_scripthash(addr)
self.scripthash_to_address[h] = addr
self._requests_sent += 1
try:
await self.session.subscribe('blockchain.scripthash.subscribe', [h], self.status_queue)
except RPCError as e:
if e.message == 'history too large': # no unique error code
raise GracefulDisconnect(e, log_level=logging.ERROR) from e
raise
self._requests_answered += 1
self.requested_addrs.remove(addr)
while True:
addr = await self.add_queue.get()
await self.group.spawn(subscribe_to_address, addr)
async def handle_status(self):
while True:
h, status = await self.status_queue.get()
addr = self.scripthash_to_address[h]
await self.group.spawn(self._on_address_status, addr, status)
self._processed_some_notifications = True
def num_requests_sent_and_answered(self) -> Tuple[int, int]:
return self._requests_sent, self._requests_answered
async def main(self):
raise NotImplementedError() # implemented by subclasses
class Synchronizer(SynchronizerBase):
'''The synchronizer keeps the wallet up-to-date with its set of
addresses and their transactions. It subscribes over the network
to wallet addresses, gets the wallet to generate new addresses
when necessary, requests the transaction history of any addresses
we don't have the full history of, and requests binary transaction
data of any transactions the wallet doesn't have.
'''
def __init__(self, wallet: 'AddressSynchronizer'):
self.wallet = wallet
SynchronizerBase.__init__(self, wallet.network)
def _reset(self):
super()._reset()
self.requested_tx = {}
self.requested_histories = set()
def diagnostic_name(self):
return self.wallet.diagnostic_name()
def is_up_to_date(self):
return (not self.requested_addrs
and not self.requested_histories
and not self.requested_tx)
async def _on_address_status(self, addr, status):
history = self.wallet.db.get_addr_history(addr)
if history_status(history) == status:
return
if (addr, status) in self.requested_histories:
return
# request address history
self.requested_histories.add((addr, status))
h = address_to_scripthash(addr)
self._requests_sent += 1
result = await self.network.get_history_for_scripthash(h)
self._requests_answered += 1
self.logger.info(f"receiving history {addr} {len(result)}")
hashes = set(map(lambda item: item['tx_hash'], result))
hist = list(map(lambda item: (item['tx_hash'], item['height']), result))
# tx_fees
tx_fees = [(item['tx_hash'], item.get('fee')) for item in result]
tx_fees = dict(filter(lambda x:x[1] is not None, tx_fees))
# Check that txids are unique
if len(hashes) != len(result):
self.logger.info(f"error: server history has non-unique txids: {addr}")
# Check that the status corresponds to what was announced
elif history_status(hist) != status:
self.logger.info(f"error: status mismatch: {addr}")
else:
# Store received history
self.wallet.receive_history_callback(addr, hist, tx_fees)
# Request transactions we don't have
await self._request_missing_txs(hist)
# Remove request; this allows up_to_date to be True
self.requested_histories.discard((addr, status))
async def _request_missing_txs(self, hist, *, allow_server_not_finding_tx=False):
# "hist" is a list of [tx_hash, tx_height] lists
transaction_hashes = []
for tx_hash, tx_height in hist:
if tx_hash in self.requested_tx:
continue
if self.wallet.db.get_transaction(tx_hash):
continue
transaction_hashes.append(tx_hash)
self.requested_tx[tx_hash] = tx_height
if not transaction_hashes: return
async with TaskGroup() as group:
for tx_hash in transaction_hashes:
await group.spawn(self._get_transaction(tx_hash, allow_server_not_finding_tx=allow_server_not_finding_tx))
async def _get_transaction(self, tx_hash, *, allow_server_not_finding_tx=False):
self._requests_sent += 1
try:
result = await self.network.get_transaction(tx_hash)
except UntrustedServerReturnedError as e:
# most likely, "No such mempool or blockchain transaction"
if allow_server_not_finding_tx:
self.requested_tx.pop(tx_hash)
return
else:
raise
finally:
self._requests_answered += 1
tx = Transaction(result)
try:
tx.deserialize() # see if raises
except Exception as e:
# possible scenarios:
# 1: server is sending garbage
# 2: there is a bug in the deserialization code
# 3: there was a segwit-like upgrade that changed the tx structure
# that we don't know about
raise SynchronizerFailure(f"cannot deserialize transaction {tx_hash}") from e
if tx_hash != tx.txid():
raise SynchronizerFailure(f"received tx does not match expected txid ({tx_hash} != {tx.txid()})")
tx_height = self.requested_tx.pop(tx_hash)
self.wallet.receive_tx_callback(tx_hash, tx, tx_height)
self.logger.info(f"received tx {tx_hash} height: {tx_height} bytes: {len(tx.raw)}")
# callbacks
self.wallet.network.trigger_callback('new_transaction', self.wallet, tx)
async def main(self):
self.wallet.set_up_to_date(False)
# request missing txns, if any
for addr in self.wallet.db.get_history():
history = self.wallet.db.get_addr_history(addr)
# Old electrum servers returned ['*'] when all history for the address
# was pruned. This no longer happens but may remain in old wallets.
if history == ['*']: continue
await self._request_missing_txs(history, allow_server_not_finding_tx=True)
# add addresses to bootstrap
for addr in self.wallet.get_addresses():
await self._add_address(addr)
# main loop
while True:
await asyncio.sleep(0.1)
await run_in_thread(self.wallet.synchronize)
up_to_date = self.is_up_to_date()
if (up_to_date != self.wallet.is_up_to_date()
or up_to_date and self._processed_some_notifications):
self._processed_some_notifications = False
if up_to_date:
self._reset_request_counters()
self.wallet.set_up_to_date(up_to_date)
self.wallet.network.trigger_callback('wallet_updated', self.wallet)
class Notifier(SynchronizerBase):
"""Watch addresses. Every time the status of an address changes,
an HTTP POST is sent to the corresponding URL.
"""
def __init__(self, network):
SynchronizerBase.__init__(self, network)
self.watched_addresses = defaultdict(list) # type: Dict[str, List[str]]
self.start_watching_queue = asyncio.Queue()
async def main(self):
# resend existing subscriptions if we were restarted
for addr in self.watched_addresses:
await self._add_address(addr)
# main loop
while True:
addr, url = await self.start_watching_queue.get()
self.watched_addresses[addr].append(url)
await self._add_address(addr)
async def _on_address_status(self, addr, status):
self.logger.info(f'new status for addr {addr}')
headers = {'content-type': 'application/json'}
data = {'address': addr, 'status': status}
for url in self.watched_addresses[addr]:
try:
async with make_aiohttp_session(proxy=self.network.proxy, headers=headers) as session:
async with session.post(url, json=data, headers=headers) as resp:
await resp.text()
except Exception as e:
self.logger.info(str(e))
else:
self.logger.info(f'Got Response for {addr}')